blob: 18d3ee93d2fbb337f66f8522aeddcfe1da1ad72c [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPerFlushResources.h"
9
Chris Dalton9414c962018-06-14 10:14:50 -060010#include "GrClip.h"
11#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "GrOnFlushResourceProvider.h"
Robert Phillipsbe9aff22019-02-15 11:33:22 -050013#include "GrRecordingContext.h"
14#include "GrRecordingContextPriv.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060015#include "GrRenderTargetContext.h"
Chris Dalton09a7bb22018-08-31 19:53:15 +080016#include "GrShape.h"
Robert Phillipsbe9aff22019-02-15 11:33:22 -050017#include "GrSurfaceContextPriv.h"
Chris Dalton9414c962018-06-14 10:14:50 -060018#include "SkMakeUnique.h"
Chris Dalton4da70192018-06-18 09:51:36 -060019#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060020
Chris Daltone1639692018-08-20 14:00:30 -060021using FillBatchID = GrCCFiller::BatchID;
Chris Dalton09a7bb22018-08-31 19:53:15 +080022using StrokeBatchID = GrCCStroker::BatchID;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023using PathInstance = GrCCPathProcessor::Instance;
24
Chris Dalton09a7bb22018-08-31 19:53:15 +080025static constexpr int kFillIdx = GrCCPerFlushResourceSpecs::kFillIdx;
26static constexpr int kStrokeIdx = GrCCPerFlushResourceSpecs::kStrokeIdx;
27
Chris Dalton9414c962018-06-14 10:14:50 -060028namespace {
29
Chris Dalton4da70192018-06-18 09:51:36 -060030// Base class for an Op that renders a CCPR atlas.
31class AtlasOp : public GrDrawOp {
32public:
33 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Brian Osman5ced0bf2019-03-15 10:15:29 -040034 GrProcessorSet::Analysis finalize(
35 const GrCaps&, const GrAppliedClip*, GrFSAAType, GrClampType) override {
Chris Dalton4b62aed2019-01-15 11:53:00 -070036 return GrProcessorSet::EmptySetAnalysis();
Brian Osman532b3f92018-07-11 10:02:07 -040037 }
Brian Salomon7eae3e02018-08-07 14:02:38 +000038 CombineResult onCombineIfPossible(GrOp* other, const GrCaps&) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070039 // We will only make multiple copy ops if they have different source proxies.
40 // TODO: make use of texture chaining.
41 return CombineResult::kCannotCombine;
Chris Dalton4da70192018-06-18 09:51:36 -060042 }
43 void onPrepare(GrOpFlushState*) override {}
44
45protected:
46 AtlasOp(uint32_t classID, sk_sp<const GrCCPerFlushResources> resources,
47 const SkISize& drawBounds)
48 : GrDrawOp(classID)
49 , fResources(std::move(resources)) {
50 this->setBounds(SkRect::MakeIWH(drawBounds.width(), drawBounds.height()),
51 GrOp::HasAABloat::kNo, GrOp::IsZeroArea::kNo);
52 }
53
54 const sk_sp<const GrCCPerFlushResources> fResources;
55};
56
Chris Dalton351e80c2019-01-06 22:51:00 -070057// Copies paths from a cached coverage count atlas into an 8-bit literal-coverage atlas.
Chris Dalton4da70192018-06-18 09:51:36 -060058class CopyAtlasOp : public AtlasOp {
59public:
60 DEFINE_OP_CLASS_ID
61
Robert Phillipsbe9aff22019-02-15 11:33:22 -050062 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Chris Dalton4da70192018-06-18 09:51:36 -060063 sk_sp<const GrCCPerFlushResources> resources,
64 sk_sp<GrTextureProxy> copyProxy, int baseInstance,
65 int endInstance, const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -050066 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -040067
68 return pool->allocate<CopyAtlasOp>(std::move(resources), std::move(copyProxy),
69 baseInstance, endInstance, drawBounds);
Chris Dalton4da70192018-06-18 09:51:36 -060070 }
71
72 const char* name() const override { return "CopyAtlasOp (CCPR)"; }
Chris Dalton351e80c2019-01-06 22:51:00 -070073 void visitProxies(const VisitProxyFunc& fn, VisitorType) const override { fn(fSrcProxy.get()); }
Chris Dalton4da70192018-06-18 09:51:36 -060074
Brian Salomon588cec72018-11-14 13:56:37 -050075 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070076 SkASSERT(fSrcProxy);
Brian Salomon7eae3e02018-08-07 14:02:38 +000077 GrPipeline::FixedDynamicState dynamicState;
Chris Dalton351e80c2019-01-06 22:51:00 -070078 auto srcProxy = fSrcProxy.get();
79 dynamicState.fPrimitiveProcessorTextures = &srcProxy;
Brian Salomon7eae3e02018-08-07 14:02:38 +000080
Robert Phillipsd0fe8752019-01-31 14:13:59 -050081 GrPipeline pipeline(GrScissorTest::kDisabled, SkBlendMode::kSrc);
Chris Dalton351e80c2019-01-06 22:51:00 -070082 GrCCPathProcessor pathProc(srcProxy);
Brian Salomon7eae3e02018-08-07 14:02:38 +000083 pathProc.drawPaths(flushState, pipeline, &dynamicState, *fResources, fBaseInstance,
84 fEndInstance, this->bounds());
Chris Dalton4da70192018-06-18 09:51:36 -060085 }
86
87private:
88 friend class ::GrOpMemoryPool; // for ctor
89
Chris Dalton351e80c2019-01-06 22:51:00 -070090 CopyAtlasOp(sk_sp<const GrCCPerFlushResources> resources, sk_sp<GrTextureProxy> srcProxy,
Chris Dalton4da70192018-06-18 09:51:36 -060091 int baseInstance, int endInstance, const SkISize& drawBounds)
92 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton351e80c2019-01-06 22:51:00 -070093 , fSrcProxy(srcProxy)
Chris Dalton4da70192018-06-18 09:51:36 -060094 , fBaseInstance(baseInstance)
95 , fEndInstance(endInstance) {
96 }
Chris Dalton351e80c2019-01-06 22:51:00 -070097 sk_sp<GrTextureProxy> fSrcProxy;
Chris Dalton4da70192018-06-18 09:51:36 -060098 const int fBaseInstance;
99 const int fEndInstance;
100};
101
Chris Dalton9414c962018-06-14 10:14:50 -0600102// Renders coverage counts to a CCPR atlas using the resources' pre-filled GrCCPathParser.
Chris Dalton4da70192018-06-18 09:51:36 -0600103class RenderAtlasOp : public AtlasOp {
Chris Dalton9414c962018-06-14 10:14:50 -0600104public:
105 DEFINE_OP_CLASS_ID
106
Robert Phillipsbe9aff22019-02-15 11:33:22 -0500107 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Chris Dalton9414c962018-06-14 10:14:50 -0600108 sk_sp<const GrCCPerFlushResources> resources,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800109 FillBatchID fillBatchID, StrokeBatchID strokeBatchID,
110 const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500111 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -0400112
Chris Dalton09a7bb22018-08-31 19:53:15 +0800113 return pool->allocate<RenderAtlasOp>(std::move(resources), fillBatchID, strokeBatchID,
114 drawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600115 }
116
117 // GrDrawOp interface.
118 const char* name() const override { return "RenderAtlasOp (CCPR)"; }
Chris Dalton9414c962018-06-14 10:14:50 -0600119
Brian Salomon588cec72018-11-14 13:56:37 -0500120 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800121 fResources->filler().drawFills(flushState, fFillBatchID, fDrawBounds);
122 fResources->stroker().drawStrokes(flushState, fStrokeBatchID, fDrawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600123 }
124
125private:
126 friend class ::GrOpMemoryPool; // for ctor
127
Chris Dalton09a7bb22018-08-31 19:53:15 +0800128 RenderAtlasOp(sk_sp<const GrCCPerFlushResources> resources, FillBatchID fillBatchID,
129 StrokeBatchID strokeBatchID, const SkISize& drawBounds)
Chris Dalton4da70192018-06-18 09:51:36 -0600130 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800131 , fFillBatchID(fillBatchID)
132 , fStrokeBatchID(strokeBatchID)
Chris Dalton9414c962018-06-14 10:14:50 -0600133 , fDrawBounds(SkIRect::MakeWH(drawBounds.width(), drawBounds.height())) {
Chris Dalton9414c962018-06-14 10:14:50 -0600134 }
135
Chris Dalton09a7bb22018-08-31 19:53:15 +0800136 const FillBatchID fFillBatchID;
137 const StrokeBatchID fStrokeBatchID;
Chris Dalton9414c962018-06-14 10:14:50 -0600138 const SkIRect fDrawBounds;
139};
140
141}
142
Chris Dalton4da70192018-06-18 09:51:36 -0600143static int inst_buffer_count(const GrCCPerFlushResourceSpecs& specs) {
144 return specs.fNumCachedPaths +
Chris Dalton09a7bb22018-08-31 19:53:15 +0800145 // Copies get two instances per draw: 1 copy + 1 draw.
146 (specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) * 2 +
147 specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx];
148 // No clips in instance buffers.
Chris Dalton4da70192018-06-18 09:51:36 -0600149}
150
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600151GrCCPerFlushResources::GrCCPerFlushResources(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton42c21152018-06-13 15:28:19 -0600152 const GrCCPerFlushResourceSpecs& specs)
Brian Salomonae64c192019-02-05 09:41:37 -0500153 // Overallocate by one point so we can call Sk4f::Store at the final SkPoint in the array.
154 // (See transform_path_pts below.)
155 // FIXME: instead use built-in instructions to write only the first two lanes of an Sk4f.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800156 : fLocalDevPtsBuffer(SkTMax(specs.fRenderedPathStats[kFillIdx].fMaxPointsPerPath,
157 specs.fRenderedPathStats[kStrokeIdx].fMaxPointsPerPath) + 1)
158 , fFiller(specs.fNumRenderedPaths[kFillIdx] + specs.fNumClipPaths,
159 specs.fRenderedPathStats[kFillIdx].fNumTotalSkPoints,
160 specs.fRenderedPathStats[kFillIdx].fNumTotalSkVerbs,
161 specs.fRenderedPathStats[kFillIdx].fNumTotalConicWeights)
162 , fStroker(specs.fNumRenderedPaths[kStrokeIdx],
163 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkPoints,
164 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkVerbs)
Chris Dalton351e80c2019-01-06 22:51:00 -0700165 , fCopyAtlasStack(GrCCAtlas::CoverageType::kA8_LiteralCoverage, specs.fCopyAtlasSpecs,
166 onFlushRP->caps())
167 , fRenderedAtlasStack(GrCCAtlas::CoverageType::kFP16_CoverageCount,
168 specs.fRenderedAtlasSpecs, onFlushRP->caps())
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600169 , fIndexBuffer(GrCCPathProcessor::FindIndexBuffer(onFlushRP))
170 , fVertexBuffer(GrCCPathProcessor::FindVertexBuffer(onFlushRP))
Brian Salomonae64c192019-02-05 09:41:37 -0500171 , fInstanceBuffer(onFlushRP->makeBuffer(GrGpuBufferType::kVertex,
Chris Dalton4da70192018-06-18 09:51:36 -0600172 inst_buffer_count(specs) * sizeof(PathInstance)))
173 , fNextCopyInstanceIdx(0)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800174 , fNextPathInstanceIdx(specs.fNumCopiedPaths[kFillIdx] +
175 specs.fNumCopiedPaths[kStrokeIdx]) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600176 if (!fIndexBuffer) {
177 SkDebugf("WARNING: failed to allocate CCPR index buffer. No paths will be drawn.\n");
178 return;
179 }
180 if (!fVertexBuffer) {
181 SkDebugf("WARNING: failed to allocate CCPR vertex buffer. No paths will be drawn.\n");
182 return;
183 }
184 if (!fInstanceBuffer) {
185 SkDebugf("WARNING: failed to allocate CCPR instance buffer. No paths will be drawn.\n");
186 return;
187 }
188 fPathInstanceData = static_cast<PathInstance*>(fInstanceBuffer->map());
189 SkASSERT(fPathInstanceData);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800190 SkDEBUGCODE(fEndCopyInstance =
191 specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]);
Chris Dalton4da70192018-06-18 09:51:36 -0600192 SkDEBUGCODE(fEndPathInstance = inst_buffer_count(specs));
193}
194
Chris Dalton351e80c2019-01-06 22:51:00 -0700195void GrCCPerFlushResources::upgradeEntryToLiteralCoverageAtlas(
196 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, GrCCPathCacheEntry* entry,
197 GrCCPathProcessor::DoEvenOddFill evenOdd) {
198 using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
Chris Dalton4da70192018-06-18 09:51:36 -0600199 SkASSERT(this->isMapped());
200 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600201
Chris Dalton351e80c2019-01-06 22:51:00 -0700202 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
203 SkASSERT(cachedAtlas);
204 SkASSERT(cachedAtlas->getOnFlushProxy());
205
206 if (GrCCAtlas::CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) {
207 // This entry has already been upgraded to literal coverage. The path must have been drawn
208 // multiple times during the flush.
209 SkDEBUGCODE(--fEndCopyInstance);
210 return;
Chris Dalton4da70192018-06-18 09:51:36 -0600211 }
212
Chris Dalton351e80c2019-01-06 22:51:00 -0700213 SkIVector newAtlasOffset;
214 if (GrCCAtlas* retiredAtlas = fCopyAtlasStack.addRect(entry->devIBounds(), &newAtlasOffset)) {
215 // We did not fit in the previous copy atlas and it was retired. We will render the ranges
216 // up until fCopyPathRanges.count() into the retired atlas during finalize().
217 retiredAtlas->setFillBatchID(fCopyPathRanges.count());
218 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
219 }
220
221 this->recordCopyPathInstance(*entry, newAtlasOffset, evenOdd,
222 sk_ref_sp(cachedAtlas->getOnFlushProxy()));
223
224 sk_sp<GrTexture> previousAtlasTexture =
225 sk_ref_sp(cachedAtlas->getOnFlushProxy()->peekTexture());
226 GrCCAtlas* newAtlas = &fCopyAtlasStack.current();
227 if (ReleaseAtlasResult::kDidInvalidateFromCache ==
228 entry->upgradeToLiteralCoverageAtlas(pathCache, onFlushRP, newAtlas, newAtlasOffset)) {
229 // This texture just got booted out of the cache. Keep it around, in case we might be able
230 // to recycle it for a new atlas. We can recycle it because copying happens before rendering
231 // new paths, and every path from the atlas that we're planning to use this flush will be
232 // copied to a new atlas. We'll never copy some and leave others.
233 fRecyclableAtlasTextures.push_back(std::move(previousAtlasTexture));
234 }
235}
236
237template<typename T, typename... Args>
238static void emplace_at_memcpy(SkTArray<T>* array, int idx, Args&&... args) {
239 if (int moveCount = array->count() - idx) {
240 array->push_back();
241 T* location = array->begin() + idx;
242 memcpy(location+1, location, moveCount * sizeof(T));
243 new (location) T(std::forward<Args>(args)...);
244 } else {
245 array->emplace_back(std::forward<Args>(args)...);
246 }
247}
248
249void GrCCPerFlushResources::recordCopyPathInstance(const GrCCPathCacheEntry& entry,
250 const SkIVector& newAtlasOffset,
251 GrCCPathProcessor::DoEvenOddFill evenOdd,
252 sk_sp<GrTextureProxy> srcProxy) {
253 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
254
255 // Write the instance at the back of the array.
256 int currentInstanceIdx = fNextCopyInstanceIdx++;
Brian Osmanc6444d22019-01-09 16:30:12 -0500257 constexpr uint64_t kWhite = (((uint64_t) SK_Half1) << 0) |
258 (((uint64_t) SK_Half1) << 16) |
259 (((uint64_t) SK_Half1) << 32) |
260 (((uint64_t) SK_Half1) << 48);
261 fPathInstanceData[currentInstanceIdx].set(entry, newAtlasOffset, kWhite, evenOdd);
Chris Dalton351e80c2019-01-06 22:51:00 -0700262
263 // Percolate the instance forward until it's contiguous with other instances that share the same
264 // proxy.
265 for (int i = fCopyPathRanges.count() - 1; i >= fCurrCopyAtlasRangesIdx; --i) {
266 if (fCopyPathRanges[i].fSrcProxy == srcProxy) {
267 ++fCopyPathRanges[i].fCount;
268 return;
269 }
270 int rangeFirstInstanceIdx = currentInstanceIdx - fCopyPathRanges[i].fCount;
271 std::swap(fPathInstanceData[rangeFirstInstanceIdx], fPathInstanceData[currentInstanceIdx]);
272 currentInstanceIdx = rangeFirstInstanceIdx;
273 }
274
275 // An instance with this particular proxy did not yet exist in the array. Add a range for it.
276 emplace_at_memcpy(&fCopyPathRanges, fCurrCopyAtlasRangesIdx, std::move(srcProxy), 1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600277}
278
Chris Daltonce038dc2018-09-14 14:14:49 -0600279static bool transform_path_pts(const SkMatrix& m, const SkPath& path,
Chris Daltone1639692018-08-20 14:00:30 -0600280 const SkAutoSTArray<32, SkPoint>& outDevPts, SkRect* devBounds,
281 SkRect* devBounds45) {
282 const SkPoint* pts = SkPathPriv::PointData(path);
283 int numPts = path.countPoints();
284 SkASSERT(numPts + 1 <= outDevPts.count());
285 SkASSERT(numPts);
286
287 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
288 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
289 // transform is not necessary as long as the shader uses the correct inverse.
290 SkMatrix m45;
291 m45.setSinCos(1, 1);
292 m45.preConcat(m);
293
294 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
295 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
296 // | 1 1 |
297 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
298 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
299 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
300
301 // Map the path's points to device space and accumulate bounding boxes.
302 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
303 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
304 Sk4f topLeft = devPt;
305 Sk4f bottomRight = devPt;
306
307 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
308 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
309 // be at least one larger than the number of points.
310 devPt.store(&outDevPts[0]);
311
312 for (int i = 1; i < numPts; ++i) {
313 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
314 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
315 topLeft = Sk4f::Min(topLeft, devPt);
316 bottomRight = Sk4f::Max(bottomRight, devPt);
317 devPt.store(&outDevPts[i]);
318 }
319
Chris Daltonce038dc2018-09-14 14:14:49 -0600320 if (!(Sk4f(0) == topLeft*0).allTrue() || !(Sk4f(0) == bottomRight*0).allTrue()) {
321 // The bounds are infinite or NaN.
322 return false;
323 }
324
Chris Daltone1639692018-08-20 14:00:30 -0600325 SkPoint topLeftPts[2], bottomRightPts[2];
326 topLeft.store(topLeftPts);
327 bottomRight.store(bottomRightPts);
328 devBounds->setLTRB(topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(),
329 bottomRightPts[0].y());
330 devBounds45->setLTRB(topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(),
331 bottomRightPts[1].y());
Chris Daltonce038dc2018-09-14 14:14:49 -0600332 return true;
Chris Daltone1639692018-08-20 14:00:30 -0600333}
334
Chris Dalton351e80c2019-01-06 22:51:00 -0700335GrCCAtlas* GrCCPerFlushResources::renderShapeInAtlas(
Chris Dalton09a7bb22018-08-31 19:53:15 +0800336 const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
337 SkRect* devBounds, SkRect* devBounds45, SkIRect* devIBounds, SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600338 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600339 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
340
Chris Dalton09a7bb22018-08-31 19:53:15 +0800341 SkPath path;
342 shape.asPath(&path);
Chris Daltone1639692018-08-20 14:00:30 -0600343 if (path.isEmpty()) {
344 SkDEBUGCODE(--fEndPathInstance);
345 return nullptr;
346 }
Chris Daltonce038dc2018-09-14 14:14:49 -0600347 if (!transform_path_pts(m, path, fLocalDevPtsBuffer, devBounds, devBounds45)) {
348 // The transformed path had infinite or NaN bounds.
349 SkDEBUGCODE(--fEndPathInstance);
350 return nullptr;
351 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800352
353 const SkStrokeRec& stroke = shape.style().strokeRec();
354 if (!stroke.isFillStyle()) {
355 float r = SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(),
356 stroke.getCap(), strokeDevWidth);
357 devBounds->outset(r, r);
358 // devBounds45 is in (| 1 -1 | * devCoords) space.
359 // | 1 1 |
360 devBounds45->outset(r*SK_ScalarSqrt2, r*SK_ScalarSqrt2);
361 }
Chris Dalton4da70192018-06-18 09:51:36 -0600362 devBounds->roundOut(devIBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600363
Chris Daltone1639692018-08-20 14:00:30 -0600364 GrScissorTest scissorTest;
365 SkIRect clippedPathIBounds;
366 if (!this->placeRenderedPathInAtlas(clipIBounds, *devIBounds, &scissorTest, &clippedPathIBounds,
367 devToAtlasOffset)) {
Chris Dalton9414c962018-06-14 10:14:50 -0600368 SkDEBUGCODE(--fEndPathInstance);
369 return nullptr; // Path was degenerate or clipped away.
370 }
Chris Daltone1639692018-08-20 14:00:30 -0600371
Chris Dalton09a7bb22018-08-31 19:53:15 +0800372 if (stroke.isFillStyle()) {
373 SkASSERT(0 == strokeDevWidth);
374 fFiller.parseDeviceSpaceFill(path, fLocalDevPtsBuffer.begin(), scissorTest,
375 clippedPathIBounds, *devToAtlasOffset);
376 } else {
377 // Stroke-and-fill is not yet supported.
378 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle() || stroke.isHairlineStyle());
379 SkASSERT(!stroke.isHairlineStyle() || 1 == strokeDevWidth);
380 fStroker.parseDeviceSpaceStroke(path, fLocalDevPtsBuffer.begin(), stroke, strokeDevWidth,
381 scissorTest, clippedPathIBounds, *devToAtlasOffset);
382 }
Chris Dalton4da70192018-06-18 09:51:36 -0600383 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600384}
385
Chris Dalton9414c962018-06-14 10:14:50 -0600386const GrCCAtlas* GrCCPerFlushResources::renderDeviceSpacePathInAtlas(
387 const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds,
388 SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600389 SkASSERT(this->isMapped());
Chris Daltone1639692018-08-20 14:00:30 -0600390
391 if (devPath.isEmpty()) {
Chris Dalton9414c962018-06-14 10:14:50 -0600392 return nullptr;
393 }
Chris Daltone1639692018-08-20 14:00:30 -0600394
395 GrScissorTest scissorTest;
396 SkIRect clippedPathIBounds;
397 if (!this->placeRenderedPathInAtlas(clipIBounds, devPathIBounds, &scissorTest,
398 &clippedPathIBounds, devToAtlasOffset)) {
399 return nullptr;
400 }
401
402 fFiller.parseDeviceSpaceFill(devPath, SkPathPriv::PointData(devPath), scissorTest,
403 clippedPathIBounds, *devToAtlasOffset);
Chris Dalton4da70192018-06-18 09:51:36 -0600404 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600405}
406
Chris Daltone1639692018-08-20 14:00:30 -0600407bool GrCCPerFlushResources::placeRenderedPathInAtlas(const SkIRect& clipIBounds,
408 const SkIRect& pathIBounds,
409 GrScissorTest* scissorTest,
410 SkIRect* clippedPathIBounds,
411 SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600412 if (clipIBounds.contains(pathIBounds)) {
Chris Daltone1639692018-08-20 14:00:30 -0600413 *clippedPathIBounds = pathIBounds;
414 *scissorTest = GrScissorTest::kDisabled;
415 } else if (clippedPathIBounds->intersect(clipIBounds, pathIBounds)) {
416 *scissorTest = GrScissorTest::kEnabled;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600417 } else {
Chris Dalton9414c962018-06-14 10:14:50 -0600418 return false;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600419 }
420
Chris Dalton4da70192018-06-18 09:51:36 -0600421 if (GrCCAtlas* retiredAtlas =
Chris Daltone1639692018-08-20 14:00:30 -0600422 fRenderedAtlasStack.addRect(*clippedPathIBounds, devToAtlasOffset)) {
Chris Dalton9414c962018-06-14 10:14:50 -0600423 // We did not fit in the previous coverage count atlas and it was retired. Close the path
424 // parser's current batch (which does not yet include the path we just parsed). We will
425 // render this batch into the retired atlas during finalize().
Chris Dalton09a7bb22018-08-31 19:53:15 +0800426 retiredAtlas->setFillBatchID(fFiller.closeCurrentBatch());
427 retiredAtlas->setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600428 }
Chris Dalton9414c962018-06-14 10:14:50 -0600429 return true;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600430}
431
432bool GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton9414c962018-06-14 10:14:50 -0600433 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600434 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600435 SkASSERT(fNextPathInstanceIdx == fEndPathInstance);
Chris Dalton351e80c2019-01-06 22:51:00 -0700436 SkASSERT(fNextCopyInstanceIdx == fEndCopyInstance);
Chris Dalton9414c962018-06-14 10:14:50 -0600437
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600438 fInstanceBuffer->unmap();
439 fPathInstanceData = nullptr;
440
Chris Dalton4da70192018-06-18 09:51:36 -0600441 if (!fCopyAtlasStack.empty()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700442 fCopyAtlasStack.current().setFillBatchID(fCopyPathRanges.count());
443 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
Chris Dalton4da70192018-06-18 09:51:36 -0600444 }
445 if (!fRenderedAtlasStack.empty()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800446 fRenderedAtlasStack.current().setFillBatchID(fFiller.closeCurrentBatch());
447 fRenderedAtlasStack.current().setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600448 }
449
Chris Dalton9414c962018-06-14 10:14:50 -0600450 // Build the GPU buffers to render path coverage counts. (This must not happen until after the
Chris Dalton09a7bb22018-08-31 19:53:15 +0800451 // final calls to fFiller/fStroker.closeCurrentBatch().)
Chris Daltone1639692018-08-20 14:00:30 -0600452 if (!fFiller.prepareToDraw(onFlushRP)) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600453 return false;
454 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800455 if (!fStroker.prepareToDraw(onFlushRP)) {
456 return false;
457 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600458
Chris Dalton351e80c2019-01-06 22:51:00 -0700459 // Draw the copies from 16-bit literal coverage atlas(es) into 8-bit cached atlas(es).
460 int copyRangeIdx = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600461 int baseCopyInstance = 0;
462 for (GrCCAtlasStack::Iter atlas(fCopyAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700463 int endCopyRange = atlas->getFillBatchID();
464 SkASSERT(endCopyRange > copyRangeIdx);
465
466 sk_sp<GrRenderTargetContext> rtc = atlas->makeRenderTargetContext(onFlushRP);
467 for (; copyRangeIdx < endCopyRange; ++copyRangeIdx) {
468 const CopyPathRange& copyRange = fCopyPathRanges[copyRangeIdx];
469 int endCopyInstance = baseCopyInstance + copyRange.fCount;
470 if (rtc) {
471 auto op = CopyAtlasOp::Make(rtc->surfPriv().getContext(), sk_ref_sp(this),
472 copyRange.fSrcProxy, baseCopyInstance, endCopyInstance,
473 atlas->drawBounds());
474 rtc->addDrawOp(GrNoClip(), std::move(op));
475 }
476 baseCopyInstance = endCopyInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600477 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700478 out->push_back(std::move(rtc));
Chris Dalton4da70192018-06-18 09:51:36 -0600479 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700480 SkASSERT(fCopyPathRanges.count() == copyRangeIdx);
481 SkASSERT(fNextCopyInstanceIdx == baseCopyInstance);
482 SkASSERT(baseCopyInstance == fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600483
Chris Dalton4da70192018-06-18 09:51:36 -0600484 // Render the coverage count atlas(es).
485 for (GrCCAtlasStack::Iter atlas(fRenderedAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700486 // Copies will be finished by the time we get to rendering new atlases. See if we can
487 // recycle any previous invalidated atlas textures instead of creating new ones.
Chris Daltonafde18f2018-06-22 12:44:19 -0600488 sk_sp<GrTexture> backingTexture;
Chris Dalton351e80c2019-01-06 22:51:00 -0700489 for (sk_sp<GrTexture>& texture : fRecyclableAtlasTextures) {
490 if (texture && atlas->currentHeight() == texture->height() &&
491 atlas->currentWidth() == texture->width()) {
492 backingTexture = skstd::exchange(texture, nullptr);
493 break;
494 }
Chris Daltonafde18f2018-06-22 12:44:19 -0600495 }
496
497 if (auto rtc = atlas->makeRenderTargetContext(onFlushRP, std::move(backingTexture))) {
Chris Dalton9414c962018-06-14 10:14:50 -0600498 auto op = RenderAtlasOp::Make(rtc->surfPriv().getContext(), sk_ref_sp(this),
Chris Dalton09a7bb22018-08-31 19:53:15 +0800499 atlas->getFillBatchID(), atlas->getStrokeBatchID(),
500 atlas->drawBounds());
Chris Dalton9414c962018-06-14 10:14:50 -0600501 rtc->addDrawOp(GrNoClip(), std::move(op));
502 out->push_back(std::move(rtc));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600503 }
504 }
505
506 return true;
507}
Chris Dalton4da70192018-06-18 09:51:36 -0600508
Chris Dalton351e80c2019-01-06 22:51:00 -0700509void GrCCPerFlushResourceSpecs::cancelCopies() {
510 // Convert copies to cached draws.
511 fNumCachedPaths += fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx];
512 fNumCopiedPaths[kFillIdx] = fNumCopiedPaths[kStrokeIdx] = 0;
513 fCopyPathStats[kFillIdx] = fCopyPathStats[kStrokeIdx] = GrCCRenderedPathStats();
Chris Dalton4da70192018-06-18 09:51:36 -0600514 fCopyAtlasSpecs = GrCCAtlas::Specs();
Chris Dalton4da70192018-06-18 09:51:36 -0600515}