blob: e3010f9669a3f506fe4dcdab97bff49ff30a1790 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
9
Robert Phillips7c525e62018-06-12 10:11:12 -040010#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060011#include "GrOpFlushState.h"
12#include "ccpr/GrCCPerFlushResources.h"
13#include "ccpr/GrCoverageCountingPathRenderer.h"
14
Chris Dalton1c548942018-05-22 13:09:48 -060015static bool has_coord_transforms(const GrPaint& paint) {
16 GrFragmentProcessor::Iter iter(paint);
17 while (const GrFragmentProcessor* fp = iter.next()) {
18 if (!fp->coordTransforms().empty()) {
19 return true;
20 }
21 }
22 return false;
23}
24
Robert Phillipsc994a932018-06-19 13:09:54 -040025std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext* context,
26 const SkIRect& clipIBounds,
27 const SkMatrix& m,
28 const GrShape& shape,
29 const SkRect& devBounds,
30 GrPaint&& paint) {
Chris Dalton4da70192018-06-18 09:51:36 -060031 bool canStashPathMask = true;
Chris Dalton42c21152018-06-13 15:28:19 -060032 SkIRect looseClippedIBounds;
33 devBounds.roundOut(&looseClippedIBounds); // GrCCPathParser might find slightly tighter bounds.
Chris Dalton4da70192018-06-18 09:51:36 -060034 if (!clipIBounds.contains(looseClippedIBounds)) {
35 canStashPathMask = false;
36 if (!looseClippedIBounds.intersect(clipIBounds)) {
37 return nullptr;
38 }
Chris Dalton42c21152018-06-13 15:28:19 -060039 }
Robert Phillipsc994a932018-06-19 13:09:54 -040040
41 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
42
43 return pool->allocate<GrCCDrawPathsOp>(looseClippedIBounds, m, shape, canStashPathMask,
44 devBounds, std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -060045}
46
47GrCCDrawPathsOp::GrCCDrawPathsOp(const SkIRect& looseClippedIBounds, const SkMatrix& m,
Chris Dalton4da70192018-06-18 09:51:36 -060048 const GrShape& shape, bool canStashPathMask,
49 const SkRect& devBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -060050 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -060051 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton42c21152018-06-13 15:28:19 -060052 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(paint))
Chris Dalton4da70192018-06-18 09:51:36 -060053 , fDraws({looseClippedIBounds, m, shape, paint.getColor(), nullptr, nullptr, {0, 0},
54 canStashPathMask, nullptr})
Chris Dalton42c21152018-06-13 15:28:19 -060055 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060056 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060057 // FIXME: intersect with clip bounds to (hopefully) improve batching.
58 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
59 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
60}
61
62GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -060063 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060064 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -060065 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060066 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060067}
68
Chris Dalton4da70192018-06-18 09:51:36 -060069GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
70 if (fCacheEntry) {
71 // All currFlushAtlas references must be reset back to null before the flush is finished.
72 fCacheEntry->setCurrFlushAtlas(nullptr);
73 }
74}
75
Chris Dalton5ba36ba2018-05-09 01:08:38 -060076GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
77 const GrAppliedClip* clip,
78 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton4da70192018-06-18 09:51:36 -060079 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060080 GrProcessorSet::Analysis analysis =
Chris Dalton4bfb50b2018-05-21 09:10:53 -060081 fProcessors.finalize(fDraws.head().fColor, GrProcessorAnalysisCoverage::kSingleChannel,
82 clip, false, caps, dstIsClamped, &fDraws.head().fColor);
Chris Dalton4da70192018-06-18 09:51:36 -060083 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -060084}
85
Chris Dalton4da70192018-06-18 09:51:36 -060086bool GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060087 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -060088 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -060089 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -060090 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -060091 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060092
Chris Daltondaef06a2018-05-23 17:11:09 -060093 if (fSRGBFlags != that->fSRGBFlags || fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -060094 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060095 return false;
96 }
97
Chris Daltond7e22272018-05-23 10:17:17 -060098 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060099 this->joinBounds(*that);
100
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600101 SkDEBUGCODE(fNumDraws += that->fNumDraws);
102 SkDEBUGCODE(that->fNumDraws = 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600103 return true;
104}
105
Chris Daltond7e22272018-05-23 10:17:17 -0600106void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600107 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600108 SkASSERT(!fOwningPerOpListPaths);
109 owningPerOpListPaths->fDrawOps.addToTail(this);
110 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600111}
112
Chris Dalton4da70192018-06-18 09:51:36 -0600113void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
114 GrOnFlushResourceProvider* onFlushRP,
115 const GrUniqueKey& stashedAtlasKey,
116 GrCCPerFlushResourceSpecs* specs) {
117 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
118 using MaskTransform = GrCCPathCache::MaskTransform;
119
120 for (SingleDraw& draw : fDraws) {
121 SkASSERT(!draw.fCacheEntry);
122
123 SkPath path;
124 draw.fShape.asPath(&path);
125
126 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
127 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(draw.fCanStashPathMask));
128 if (auto cacheEntry = draw.fCacheEntry.get()) {
129 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
130 if (cacheEntry->atlasKey().isValid()) {
131 // Does the path already exist in a cached atlas?
132 if (cacheEntry->hasCachedAtlas() &&
133 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
134 cacheEntry->atlasKey(),
135 GrCCAtlas::kTextureOrigin))) {
136 ++specs->fNumCachedPaths;
137 continue;
138 }
139
140 // Does the path exist in the atlas that we stashed away from last flush? If so we
141 // can copy it into a new 8-bit atlas and keep it in the resource cache.
142 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
143 SkASSERT(!cacheEntry->hasCachedAtlas());
144 ++specs->fNumCopiedPaths;
145 specs->fCopyPathStats.statPath(path);
146 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
147 cacheEntry->height());
148 continue;
149 }
150
151 // Whatever atlas the path used to reside in, it no longer exists.
152 cacheEntry->resetAtlasKeyAndInfo();
153 }
154
155 if (!draw.fCanStashPathMask) {
156 // No point in keeping this cache entry around anymore if we aren't going to try and
157 // stash the the rendered path mask after flush.
158 draw.fCacheEntry = nullptr;
159 pathCache->evict(cacheEntry);
160 }
161 }
162
163 ++specs->fNumRenderedPaths;
164 specs->fRenderedPathStats.statPath(path);
165 specs->fRenderedAtlasSpecs.accountForSpace(draw.fLooseClippedIBounds.width(),
Chris Dalton42c21152018-06-13 15:28:19 -0600166 draw.fLooseClippedIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600167 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600168}
169
Chris Dalton4da70192018-06-18 09:51:36 -0600170void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
171 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
172 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600173 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600174 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600175 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600176
Chris Dalton4da70192018-06-18 09:51:36 -0600177 for (SingleDraw& draw : fDraws) {
178 SkPath path;
179 draw.fShape.asPath(&path);
180
181 auto doEvenOddFill = DoEvenOddFill(SkPath::kEvenOdd_FillType == path.getFillType());
182 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
183 SkPath::kWinding_FillType == path.getFillType());
184
185 if (auto cacheEntry = draw.fCacheEntry.get()) {
186 // Does the path already exist in a cached atlas texture?
187 if (auto proxy = draw.fCachedAtlasProxy.get()) {
188 SkASSERT(!cacheEntry->currFlushAtlas());
189 this->recordInstance(proxy, resources->nextPathInstanceIdx());
190 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
191 draw.fColor);
192 continue;
193 }
194
195 // Have we already encountered this path during the flush? (i.e. was the same SkPath
196 // drawn more than once during the same flush, with a compatible matrix?)
197 if (auto atlas = cacheEntry->currFlushAtlas()) {
198 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
199 resources->appendDrawPathInstance().set(
200 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
201 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
202 continue;
203 }
204
205 // If the cache entry still has a valid atlas key at this point, it means the path
206 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
207 // 8-bit atlas in the resource cache.
208 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
209 SkIVector newOffset;
210 GrCCAtlas* atlas =
211 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
212 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
213 newOffset, atlas->refOrMakeCachedAtlasInfo());
214 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
215 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
216 draw.fColor);
217 // Remember this atlas in case we encounter the path again during the same flush.
218 cacheEntry->setCurrFlushAtlas(atlas);
219 continue;
220 }
221 }
222
223 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
224 // bounding boxes: One in device space, as well as a second one rotated an additional 45
225 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
226 // circumscribes the path.
227 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600228 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600229 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600230 SkIVector devToAtlasOffset;
Chris Dalton4da70192018-06-18 09:51:36 -0600231 if (auto atlas = resources->renderPathInAtlas(draw.fLooseClippedIBounds, draw.fMatrix, path,
232 &devBounds, &devBounds45, &devIBounds,
233 &devToAtlasOffset)) {
234 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
235 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
236 draw.fColor, doEvenOddFill);
237 if (draw.fCacheEntry && draw.fCanStashPathMask &&
238 resources->nextAtlasToStash() == atlas) {
239 const GrUniqueKey& atlasKey =
240 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
241 draw.fCacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds,
242 devBounds45, devIBounds,
243 draw.fCachedMaskShift);
244 // Remember this atlas in case we encounter the path again during the same flush.
245 draw.fCacheEntry->setCurrFlushAtlas(atlas);
246 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600247 continue;
248 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600249 }
250
Chris Dalton4da70192018-06-18 09:51:36 -0600251 if (!fInstanceRanges.empty()) {
252 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
253 }
254}
255
256inline void GrCCDrawPathsOp::recordInstance(const GrTextureProxy* atlasProxy, int instanceIdx) {
257 if (fInstanceRanges.empty()) {
258 fInstanceRanges.push_back({atlasProxy, instanceIdx});
259 return;
260 }
261 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
262 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
263 fInstanceRanges.push_back({atlasProxy, instanceIdx});
264 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600265 }
266}
267
268void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600269 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600270
Chris Daltond7e22272018-05-23 10:17:17 -0600271 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600272 if (!resources) {
273 return; // Setup failed.
274 }
275
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600276 GrPipeline::InitArgs initArgs;
277 initArgs.fFlags = fSRGBFlags;
278 initArgs.fProxy = flushState->drawOpArgs().fProxy;
279 initArgs.fCaps = &flushState->caps();
280 initArgs.fResourceProvider = flushState->resourceProvider();
281 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
282 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
283
284 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600285 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600286
Chris Dalton4c458b12018-06-16 17:22:59 -0600287 for (const InstanceRange& range : fInstanceRanges) {
288 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600289
Chris Dalton4c458b12018-06-16 17:22:59 -0600290 GrCCPathProcessor pathProc(flushState->resourceProvider(), sk_ref_sp(range.fAtlasProxy),
Chris Dalton1c548942018-05-22 13:09:48 -0600291 fViewMatrixIfUsingLocalCoords);
Chris Dalton4da70192018-06-18 09:51:36 -0600292 pathProc.drawPaths(flushState, pipeline, *resources, baseInstance, range.fEndInstanceIdx,
293 this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600294
295 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600296 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600297}