blob: 567a77835fdd3352df011b5bf90b54d7cf9afe2e [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCCPerFlushResources.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -06009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/private/GrRecordingContext.h"
11#include "src/core/SkMakeUnique.h"
12#include "src/gpu/GrClip.h"
13#include "src/gpu/GrMemoryPool.h"
14#include "src/gpu/GrOnFlushResourceProvider.h"
15#include "src/gpu/GrRecordingContextPriv.h"
16#include "src/gpu/GrRenderTargetContext.h"
17#include "src/gpu/GrShape.h"
18#include "src/gpu/GrSurfaceContextPriv.h"
19#include "src/gpu/ccpr/GrCCPathCache.h"
20#include "src/gpu/ccpr/GrGSCoverageProcessor.h"
21#include "src/gpu/ccpr/GrVSCoverageProcessor.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060022
Chris Daltone1639692018-08-20 14:00:30 -060023using FillBatchID = GrCCFiller::BatchID;
Chris Dalton09a7bb22018-08-31 19:53:15 +080024using StrokeBatchID = GrCCStroker::BatchID;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060025using PathInstance = GrCCPathProcessor::Instance;
26
Chris Dalton09a7bb22018-08-31 19:53:15 +080027static constexpr int kFillIdx = GrCCPerFlushResourceSpecs::kFillIdx;
28static constexpr int kStrokeIdx = GrCCPerFlushResourceSpecs::kStrokeIdx;
29
Chris Dalton9414c962018-06-14 10:14:50 -060030namespace {
31
Chris Dalton4da70192018-06-18 09:51:36 -060032// Base class for an Op that renders a CCPR atlas.
33class AtlasOp : public GrDrawOp {
34public:
35 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Brian Osman5ced0bf2019-03-15 10:15:29 -040036 GrProcessorSet::Analysis finalize(
37 const GrCaps&, const GrAppliedClip*, GrFSAAType, GrClampType) override {
Chris Dalton4b62aed2019-01-15 11:53:00 -070038 return GrProcessorSet::EmptySetAnalysis();
Brian Osman532b3f92018-07-11 10:02:07 -040039 }
Brian Salomon7eae3e02018-08-07 14:02:38 +000040 CombineResult onCombineIfPossible(GrOp* other, const GrCaps&) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070041 // We will only make multiple copy ops if they have different source proxies.
42 // TODO: make use of texture chaining.
43 return CombineResult::kCannotCombine;
Chris Dalton4da70192018-06-18 09:51:36 -060044 }
45 void onPrepare(GrOpFlushState*) override {}
46
47protected:
48 AtlasOp(uint32_t classID, sk_sp<const GrCCPerFlushResources> resources,
49 const SkISize& drawBounds)
50 : GrDrawOp(classID)
51 , fResources(std::move(resources)) {
52 this->setBounds(SkRect::MakeIWH(drawBounds.width(), drawBounds.height()),
53 GrOp::HasAABloat::kNo, GrOp::IsZeroArea::kNo);
54 }
55
56 const sk_sp<const GrCCPerFlushResources> fResources;
57};
58
Chris Dalton351e80c2019-01-06 22:51:00 -070059// Copies paths from a cached coverage count atlas into an 8-bit literal-coverage atlas.
Chris Dalton4da70192018-06-18 09:51:36 -060060class CopyAtlasOp : public AtlasOp {
61public:
62 DEFINE_OP_CLASS_ID
63
Robert Phillipsbe9aff22019-02-15 11:33:22 -050064 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Chris Dalton4da70192018-06-18 09:51:36 -060065 sk_sp<const GrCCPerFlushResources> resources,
66 sk_sp<GrTextureProxy> copyProxy, int baseInstance,
67 int endInstance, const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -050068 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -040069
70 return pool->allocate<CopyAtlasOp>(std::move(resources), std::move(copyProxy),
71 baseInstance, endInstance, drawBounds);
Chris Dalton4da70192018-06-18 09:51:36 -060072 }
73
74 const char* name() const override { return "CopyAtlasOp (CCPR)"; }
Chris Dalton351e80c2019-01-06 22:51:00 -070075 void visitProxies(const VisitProxyFunc& fn, VisitorType) const override { fn(fSrcProxy.get()); }
Chris Dalton4da70192018-06-18 09:51:36 -060076
Brian Salomon588cec72018-11-14 13:56:37 -050077 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070078 SkASSERT(fSrcProxy);
Chris Dalton351e80c2019-01-06 22:51:00 -070079 auto srcProxy = fSrcProxy.get();
Chris Dalton46d0f9a2019-04-24 19:34:54 -040080 SkASSERT(srcProxy->isInstantiated());
81
82 GrCCPathProcessor pathProc(srcProxy->peekTexture(), srcProxy->origin());
Brian Salomon7eae3e02018-08-07 14:02:38 +000083
Robert Phillipsd0fe8752019-01-31 14:13:59 -050084 GrPipeline pipeline(GrScissorTest::kDisabled, SkBlendMode::kSrc);
Chris Dalton46d0f9a2019-04-24 19:34:54 -040085 GrPipeline::FixedDynamicState dynamicState;
86 dynamicState.fPrimitiveProcessorTextures = &srcProxy;
87
Brian Salomon7eae3e02018-08-07 14:02:38 +000088 pathProc.drawPaths(flushState, pipeline, &dynamicState, *fResources, fBaseInstance,
89 fEndInstance, this->bounds());
Chris Dalton4da70192018-06-18 09:51:36 -060090 }
91
92private:
93 friend class ::GrOpMemoryPool; // for ctor
94
Chris Dalton351e80c2019-01-06 22:51:00 -070095 CopyAtlasOp(sk_sp<const GrCCPerFlushResources> resources, sk_sp<GrTextureProxy> srcProxy,
Chris Dalton4da70192018-06-18 09:51:36 -060096 int baseInstance, int endInstance, const SkISize& drawBounds)
97 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton351e80c2019-01-06 22:51:00 -070098 , fSrcProxy(srcProxy)
Chris Dalton4da70192018-06-18 09:51:36 -060099 , fBaseInstance(baseInstance)
100 , fEndInstance(endInstance) {
101 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700102 sk_sp<GrTextureProxy> fSrcProxy;
Chris Dalton4da70192018-06-18 09:51:36 -0600103 const int fBaseInstance;
104 const int fEndInstance;
105};
106
Chris Dalton9414c962018-06-14 10:14:50 -0600107// Renders coverage counts to a CCPR atlas using the resources' pre-filled GrCCPathParser.
Chris Dalton2c5e0112019-03-29 13:14:18 -0500108template<typename ProcessorType> class RenderAtlasOp : public AtlasOp {
Chris Dalton9414c962018-06-14 10:14:50 -0600109public:
110 DEFINE_OP_CLASS_ID
111
Robert Phillipsbe9aff22019-02-15 11:33:22 -0500112 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Chris Dalton9414c962018-06-14 10:14:50 -0600113 sk_sp<const GrCCPerFlushResources> resources,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800114 FillBatchID fillBatchID, StrokeBatchID strokeBatchID,
115 const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500116 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -0400117
Chris Dalton09a7bb22018-08-31 19:53:15 +0800118 return pool->allocate<RenderAtlasOp>(std::move(resources), fillBatchID, strokeBatchID,
119 drawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600120 }
121
122 // GrDrawOp interface.
123 const char* name() const override { return "RenderAtlasOp (CCPR)"; }
Chris Dalton9414c962018-06-14 10:14:50 -0600124
Brian Salomon588cec72018-11-14 13:56:37 -0500125 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500126 ProcessorType proc;
127 fResources->filler().drawFills(flushState, &proc, fFillBatchID, fDrawBounds);
128 fResources->stroker().drawStrokes(flushState, &proc, fStrokeBatchID, fDrawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600129 }
130
131private:
132 friend class ::GrOpMemoryPool; // for ctor
133
Chris Dalton09a7bb22018-08-31 19:53:15 +0800134 RenderAtlasOp(sk_sp<const GrCCPerFlushResources> resources, FillBatchID fillBatchID,
135 StrokeBatchID strokeBatchID, const SkISize& drawBounds)
Chris Dalton4da70192018-06-18 09:51:36 -0600136 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800137 , fFillBatchID(fillBatchID)
138 , fStrokeBatchID(strokeBatchID)
Chris Dalton9414c962018-06-14 10:14:50 -0600139 , fDrawBounds(SkIRect::MakeWH(drawBounds.width(), drawBounds.height())) {
Chris Dalton9414c962018-06-14 10:14:50 -0600140 }
141
Chris Dalton09a7bb22018-08-31 19:53:15 +0800142 const FillBatchID fFillBatchID;
143 const StrokeBatchID fStrokeBatchID;
Chris Dalton9414c962018-06-14 10:14:50 -0600144 const SkIRect fDrawBounds;
145};
146
147}
148
Chris Dalton4da70192018-06-18 09:51:36 -0600149static int inst_buffer_count(const GrCCPerFlushResourceSpecs& specs) {
150 return specs.fNumCachedPaths +
Chris Dalton09a7bb22018-08-31 19:53:15 +0800151 // Copies get two instances per draw: 1 copy + 1 draw.
152 (specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) * 2 +
153 specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx];
154 // No clips in instance buffers.
Chris Dalton4da70192018-06-18 09:51:36 -0600155}
156
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600157GrCCPerFlushResources::GrCCPerFlushResources(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton42c21152018-06-13 15:28:19 -0600158 const GrCCPerFlushResourceSpecs& specs)
Brian Salomonae64c192019-02-05 09:41:37 -0500159 // Overallocate by one point so we can call Sk4f::Store at the final SkPoint in the array.
160 // (See transform_path_pts below.)
161 // FIXME: instead use built-in instructions to write only the first two lanes of an Sk4f.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800162 : fLocalDevPtsBuffer(SkTMax(specs.fRenderedPathStats[kFillIdx].fMaxPointsPerPath,
163 specs.fRenderedPathStats[kStrokeIdx].fMaxPointsPerPath) + 1)
164 , fFiller(specs.fNumRenderedPaths[kFillIdx] + specs.fNumClipPaths,
165 specs.fRenderedPathStats[kFillIdx].fNumTotalSkPoints,
166 specs.fRenderedPathStats[kFillIdx].fNumTotalSkVerbs,
167 specs.fRenderedPathStats[kFillIdx].fNumTotalConicWeights)
168 , fStroker(specs.fNumRenderedPaths[kStrokeIdx],
169 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkPoints,
170 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkVerbs)
Chris Dalton351e80c2019-01-06 22:51:00 -0700171 , fCopyAtlasStack(GrCCAtlas::CoverageType::kA8_LiteralCoverage, specs.fCopyAtlasSpecs,
172 onFlushRP->caps())
173 , fRenderedAtlasStack(GrCCAtlas::CoverageType::kFP16_CoverageCount,
174 specs.fRenderedAtlasSpecs, onFlushRP->caps())
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600175 , fIndexBuffer(GrCCPathProcessor::FindIndexBuffer(onFlushRP))
176 , fVertexBuffer(GrCCPathProcessor::FindVertexBuffer(onFlushRP))
Brian Salomonae64c192019-02-05 09:41:37 -0500177 , fInstanceBuffer(onFlushRP->makeBuffer(GrGpuBufferType::kVertex,
Chris Dalton4da70192018-06-18 09:51:36 -0600178 inst_buffer_count(specs) * sizeof(PathInstance)))
179 , fNextCopyInstanceIdx(0)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800180 , fNextPathInstanceIdx(specs.fNumCopiedPaths[kFillIdx] +
181 specs.fNumCopiedPaths[kStrokeIdx]) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600182 if (!fIndexBuffer) {
183 SkDebugf("WARNING: failed to allocate CCPR index buffer. No paths will be drawn.\n");
184 return;
185 }
186 if (!fVertexBuffer) {
187 SkDebugf("WARNING: failed to allocate CCPR vertex buffer. No paths will be drawn.\n");
188 return;
189 }
190 if (!fInstanceBuffer) {
191 SkDebugf("WARNING: failed to allocate CCPR instance buffer. No paths will be drawn.\n");
192 return;
193 }
194 fPathInstanceData = static_cast<PathInstance*>(fInstanceBuffer->map());
195 SkASSERT(fPathInstanceData);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800196 SkDEBUGCODE(fEndCopyInstance =
197 specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]);
Chris Dalton4da70192018-06-18 09:51:36 -0600198 SkDEBUGCODE(fEndPathInstance = inst_buffer_count(specs));
199}
200
Chris Dalton351e80c2019-01-06 22:51:00 -0700201void GrCCPerFlushResources::upgradeEntryToLiteralCoverageAtlas(
202 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, GrCCPathCacheEntry* entry,
203 GrCCPathProcessor::DoEvenOddFill evenOdd) {
204 using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
Chris Dalton4da70192018-06-18 09:51:36 -0600205 SkASSERT(this->isMapped());
206 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600207
Chris Dalton351e80c2019-01-06 22:51:00 -0700208 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
209 SkASSERT(cachedAtlas);
210 SkASSERT(cachedAtlas->getOnFlushProxy());
211
212 if (GrCCAtlas::CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) {
213 // This entry has already been upgraded to literal coverage. The path must have been drawn
214 // multiple times during the flush.
215 SkDEBUGCODE(--fEndCopyInstance);
216 return;
Chris Dalton4da70192018-06-18 09:51:36 -0600217 }
218
Chris Dalton351e80c2019-01-06 22:51:00 -0700219 SkIVector newAtlasOffset;
220 if (GrCCAtlas* retiredAtlas = fCopyAtlasStack.addRect(entry->devIBounds(), &newAtlasOffset)) {
221 // We did not fit in the previous copy atlas and it was retired. We will render the ranges
222 // up until fCopyPathRanges.count() into the retired atlas during finalize().
223 retiredAtlas->setFillBatchID(fCopyPathRanges.count());
224 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
225 }
226
227 this->recordCopyPathInstance(*entry, newAtlasOffset, evenOdd,
228 sk_ref_sp(cachedAtlas->getOnFlushProxy()));
229
230 sk_sp<GrTexture> previousAtlasTexture =
231 sk_ref_sp(cachedAtlas->getOnFlushProxy()->peekTexture());
232 GrCCAtlas* newAtlas = &fCopyAtlasStack.current();
233 if (ReleaseAtlasResult::kDidInvalidateFromCache ==
234 entry->upgradeToLiteralCoverageAtlas(pathCache, onFlushRP, newAtlas, newAtlasOffset)) {
235 // This texture just got booted out of the cache. Keep it around, in case we might be able
236 // to recycle it for a new atlas. We can recycle it because copying happens before rendering
237 // new paths, and every path from the atlas that we're planning to use this flush will be
238 // copied to a new atlas. We'll never copy some and leave others.
239 fRecyclableAtlasTextures.push_back(std::move(previousAtlasTexture));
240 }
241}
242
243template<typename T, typename... Args>
244static void emplace_at_memcpy(SkTArray<T>* array, int idx, Args&&... args) {
245 if (int moveCount = array->count() - idx) {
246 array->push_back();
247 T* location = array->begin() + idx;
248 memcpy(location+1, location, moveCount * sizeof(T));
249 new (location) T(std::forward<Args>(args)...);
250 } else {
251 array->emplace_back(std::forward<Args>(args)...);
252 }
253}
254
255void GrCCPerFlushResources::recordCopyPathInstance(const GrCCPathCacheEntry& entry,
256 const SkIVector& newAtlasOffset,
257 GrCCPathProcessor::DoEvenOddFill evenOdd,
258 sk_sp<GrTextureProxy> srcProxy) {
259 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
260
261 // Write the instance at the back of the array.
262 int currentInstanceIdx = fNextCopyInstanceIdx++;
Brian Osmanc6444d22019-01-09 16:30:12 -0500263 constexpr uint64_t kWhite = (((uint64_t) SK_Half1) << 0) |
264 (((uint64_t) SK_Half1) << 16) |
265 (((uint64_t) SK_Half1) << 32) |
266 (((uint64_t) SK_Half1) << 48);
267 fPathInstanceData[currentInstanceIdx].set(entry, newAtlasOffset, kWhite, evenOdd);
Chris Dalton351e80c2019-01-06 22:51:00 -0700268
269 // Percolate the instance forward until it's contiguous with other instances that share the same
270 // proxy.
271 for (int i = fCopyPathRanges.count() - 1; i >= fCurrCopyAtlasRangesIdx; --i) {
272 if (fCopyPathRanges[i].fSrcProxy == srcProxy) {
273 ++fCopyPathRanges[i].fCount;
274 return;
275 }
276 int rangeFirstInstanceIdx = currentInstanceIdx - fCopyPathRanges[i].fCount;
277 std::swap(fPathInstanceData[rangeFirstInstanceIdx], fPathInstanceData[currentInstanceIdx]);
278 currentInstanceIdx = rangeFirstInstanceIdx;
279 }
280
281 // An instance with this particular proxy did not yet exist in the array. Add a range for it.
282 emplace_at_memcpy(&fCopyPathRanges, fCurrCopyAtlasRangesIdx, std::move(srcProxy), 1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600283}
284
Chris Daltonce038dc2018-09-14 14:14:49 -0600285static bool transform_path_pts(const SkMatrix& m, const SkPath& path,
Chris Daltone1639692018-08-20 14:00:30 -0600286 const SkAutoSTArray<32, SkPoint>& outDevPts, SkRect* devBounds,
287 SkRect* devBounds45) {
288 const SkPoint* pts = SkPathPriv::PointData(path);
289 int numPts = path.countPoints();
290 SkASSERT(numPts + 1 <= outDevPts.count());
291 SkASSERT(numPts);
292
293 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
294 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
295 // transform is not necessary as long as the shader uses the correct inverse.
296 SkMatrix m45;
297 m45.setSinCos(1, 1);
298 m45.preConcat(m);
299
300 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
301 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
302 // | 1 1 |
303 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
304 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
305 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
306
307 // Map the path's points to device space and accumulate bounding boxes.
308 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
309 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
310 Sk4f topLeft = devPt;
311 Sk4f bottomRight = devPt;
312
313 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
314 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
315 // be at least one larger than the number of points.
316 devPt.store(&outDevPts[0]);
317
318 for (int i = 1; i < numPts; ++i) {
319 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
320 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
321 topLeft = Sk4f::Min(topLeft, devPt);
322 bottomRight = Sk4f::Max(bottomRight, devPt);
323 devPt.store(&outDevPts[i]);
324 }
325
Chris Daltonce038dc2018-09-14 14:14:49 -0600326 if (!(Sk4f(0) == topLeft*0).allTrue() || !(Sk4f(0) == bottomRight*0).allTrue()) {
327 // The bounds are infinite or NaN.
328 return false;
329 }
330
Chris Daltone1639692018-08-20 14:00:30 -0600331 SkPoint topLeftPts[2], bottomRightPts[2];
332 topLeft.store(topLeftPts);
333 bottomRight.store(bottomRightPts);
334 devBounds->setLTRB(topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(),
335 bottomRightPts[0].y());
336 devBounds45->setLTRB(topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(),
337 bottomRightPts[1].y());
Chris Daltonce038dc2018-09-14 14:14:49 -0600338 return true;
Chris Daltone1639692018-08-20 14:00:30 -0600339}
340
Chris Dalton351e80c2019-01-06 22:51:00 -0700341GrCCAtlas* GrCCPerFlushResources::renderShapeInAtlas(
Chris Dalton09a7bb22018-08-31 19:53:15 +0800342 const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
343 SkRect* devBounds, SkRect* devBounds45, SkIRect* devIBounds, SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600344 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600345 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
346
Chris Dalton09a7bb22018-08-31 19:53:15 +0800347 SkPath path;
348 shape.asPath(&path);
Chris Daltone1639692018-08-20 14:00:30 -0600349 if (path.isEmpty()) {
350 SkDEBUGCODE(--fEndPathInstance);
351 return nullptr;
352 }
Chris Daltonce038dc2018-09-14 14:14:49 -0600353 if (!transform_path_pts(m, path, fLocalDevPtsBuffer, devBounds, devBounds45)) {
354 // The transformed path had infinite or NaN bounds.
355 SkDEBUGCODE(--fEndPathInstance);
356 return nullptr;
357 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800358
359 const SkStrokeRec& stroke = shape.style().strokeRec();
360 if (!stroke.isFillStyle()) {
361 float r = SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(),
362 stroke.getCap(), strokeDevWidth);
363 devBounds->outset(r, r);
364 // devBounds45 is in (| 1 -1 | * devCoords) space.
365 // | 1 1 |
366 devBounds45->outset(r*SK_ScalarSqrt2, r*SK_ScalarSqrt2);
367 }
Chris Dalton4da70192018-06-18 09:51:36 -0600368 devBounds->roundOut(devIBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600369
Chris Daltone1639692018-08-20 14:00:30 -0600370 GrScissorTest scissorTest;
371 SkIRect clippedPathIBounds;
372 if (!this->placeRenderedPathInAtlas(clipIBounds, *devIBounds, &scissorTest, &clippedPathIBounds,
373 devToAtlasOffset)) {
Chris Dalton9414c962018-06-14 10:14:50 -0600374 SkDEBUGCODE(--fEndPathInstance);
375 return nullptr; // Path was degenerate or clipped away.
376 }
Chris Daltone1639692018-08-20 14:00:30 -0600377
Chris Dalton09a7bb22018-08-31 19:53:15 +0800378 if (stroke.isFillStyle()) {
379 SkASSERT(0 == strokeDevWidth);
380 fFiller.parseDeviceSpaceFill(path, fLocalDevPtsBuffer.begin(), scissorTest,
381 clippedPathIBounds, *devToAtlasOffset);
382 } else {
383 // Stroke-and-fill is not yet supported.
384 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle() || stroke.isHairlineStyle());
385 SkASSERT(!stroke.isHairlineStyle() || 1 == strokeDevWidth);
386 fStroker.parseDeviceSpaceStroke(path, fLocalDevPtsBuffer.begin(), stroke, strokeDevWidth,
387 scissorTest, clippedPathIBounds, *devToAtlasOffset);
388 }
Chris Dalton4da70192018-06-18 09:51:36 -0600389 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600390}
391
Chris Dalton9414c962018-06-14 10:14:50 -0600392const GrCCAtlas* GrCCPerFlushResources::renderDeviceSpacePathInAtlas(
393 const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds,
394 SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600395 SkASSERT(this->isMapped());
Chris Daltone1639692018-08-20 14:00:30 -0600396
397 if (devPath.isEmpty()) {
Chris Dalton9414c962018-06-14 10:14:50 -0600398 return nullptr;
399 }
Chris Daltone1639692018-08-20 14:00:30 -0600400
401 GrScissorTest scissorTest;
402 SkIRect clippedPathIBounds;
403 if (!this->placeRenderedPathInAtlas(clipIBounds, devPathIBounds, &scissorTest,
404 &clippedPathIBounds, devToAtlasOffset)) {
405 return nullptr;
406 }
407
408 fFiller.parseDeviceSpaceFill(devPath, SkPathPriv::PointData(devPath), scissorTest,
409 clippedPathIBounds, *devToAtlasOffset);
Chris Dalton4da70192018-06-18 09:51:36 -0600410 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600411}
412
Chris Daltone1639692018-08-20 14:00:30 -0600413bool GrCCPerFlushResources::placeRenderedPathInAtlas(const SkIRect& clipIBounds,
414 const SkIRect& pathIBounds,
415 GrScissorTest* scissorTest,
416 SkIRect* clippedPathIBounds,
417 SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600418 if (clipIBounds.contains(pathIBounds)) {
Chris Daltone1639692018-08-20 14:00:30 -0600419 *clippedPathIBounds = pathIBounds;
420 *scissorTest = GrScissorTest::kDisabled;
421 } else if (clippedPathIBounds->intersect(clipIBounds, pathIBounds)) {
422 *scissorTest = GrScissorTest::kEnabled;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600423 } else {
Chris Dalton9414c962018-06-14 10:14:50 -0600424 return false;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600425 }
426
Chris Dalton4da70192018-06-18 09:51:36 -0600427 if (GrCCAtlas* retiredAtlas =
Chris Daltone1639692018-08-20 14:00:30 -0600428 fRenderedAtlasStack.addRect(*clippedPathIBounds, devToAtlasOffset)) {
Chris Dalton9414c962018-06-14 10:14:50 -0600429 // We did not fit in the previous coverage count atlas and it was retired. Close the path
430 // parser's current batch (which does not yet include the path we just parsed). We will
431 // render this batch into the retired atlas during finalize().
Chris Dalton09a7bb22018-08-31 19:53:15 +0800432 retiredAtlas->setFillBatchID(fFiller.closeCurrentBatch());
433 retiredAtlas->setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600434 }
Chris Dalton9414c962018-06-14 10:14:50 -0600435 return true;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600436}
437
438bool GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton9414c962018-06-14 10:14:50 -0600439 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600440 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600441 SkASSERT(fNextPathInstanceIdx == fEndPathInstance);
Chris Dalton351e80c2019-01-06 22:51:00 -0700442 SkASSERT(fNextCopyInstanceIdx == fEndCopyInstance);
Chris Dalton9414c962018-06-14 10:14:50 -0600443
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600444 fInstanceBuffer->unmap();
445 fPathInstanceData = nullptr;
446
Chris Dalton4da70192018-06-18 09:51:36 -0600447 if (!fCopyAtlasStack.empty()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700448 fCopyAtlasStack.current().setFillBatchID(fCopyPathRanges.count());
449 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
Chris Dalton4da70192018-06-18 09:51:36 -0600450 }
451 if (!fRenderedAtlasStack.empty()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800452 fRenderedAtlasStack.current().setFillBatchID(fFiller.closeCurrentBatch());
453 fRenderedAtlasStack.current().setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600454 }
455
Chris Dalton9414c962018-06-14 10:14:50 -0600456 // Build the GPU buffers to render path coverage counts. (This must not happen until after the
Chris Dalton09a7bb22018-08-31 19:53:15 +0800457 // final calls to fFiller/fStroker.closeCurrentBatch().)
Chris Daltone1639692018-08-20 14:00:30 -0600458 if (!fFiller.prepareToDraw(onFlushRP)) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600459 return false;
460 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800461 if (!fStroker.prepareToDraw(onFlushRP)) {
462 return false;
463 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600464
Chris Dalton351e80c2019-01-06 22:51:00 -0700465 // Draw the copies from 16-bit literal coverage atlas(es) into 8-bit cached atlas(es).
466 int copyRangeIdx = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600467 int baseCopyInstance = 0;
468 for (GrCCAtlasStack::Iter atlas(fCopyAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700469 int endCopyRange = atlas->getFillBatchID();
470 SkASSERT(endCopyRange > copyRangeIdx);
471
472 sk_sp<GrRenderTargetContext> rtc = atlas->makeRenderTargetContext(onFlushRP);
473 for (; copyRangeIdx < endCopyRange; ++copyRangeIdx) {
474 const CopyPathRange& copyRange = fCopyPathRanges[copyRangeIdx];
475 int endCopyInstance = baseCopyInstance + copyRange.fCount;
476 if (rtc) {
477 auto op = CopyAtlasOp::Make(rtc->surfPriv().getContext(), sk_ref_sp(this),
478 copyRange.fSrcProxy, baseCopyInstance, endCopyInstance,
479 atlas->drawBounds());
480 rtc->addDrawOp(GrNoClip(), std::move(op));
481 }
482 baseCopyInstance = endCopyInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600483 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700484 out->push_back(std::move(rtc));
Chris Dalton4da70192018-06-18 09:51:36 -0600485 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700486 SkASSERT(fCopyPathRanges.count() == copyRangeIdx);
487 SkASSERT(fNextCopyInstanceIdx == baseCopyInstance);
488 SkASSERT(baseCopyInstance == fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600489
Chris Dalton4da70192018-06-18 09:51:36 -0600490 // Render the coverage count atlas(es).
491 for (GrCCAtlasStack::Iter atlas(fRenderedAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700492 // Copies will be finished by the time we get to rendering new atlases. See if we can
493 // recycle any previous invalidated atlas textures instead of creating new ones.
Chris Daltonafde18f2018-06-22 12:44:19 -0600494 sk_sp<GrTexture> backingTexture;
Chris Dalton351e80c2019-01-06 22:51:00 -0700495 for (sk_sp<GrTexture>& texture : fRecyclableAtlasTextures) {
496 if (texture && atlas->currentHeight() == texture->height() &&
497 atlas->currentWidth() == texture->width()) {
498 backingTexture = skstd::exchange(texture, nullptr);
499 break;
500 }
Chris Daltonafde18f2018-06-22 12:44:19 -0600501 }
502
503 if (auto rtc = atlas->makeRenderTargetContext(onFlushRP, std::move(backingTexture))) {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500504 std::unique_ptr<GrDrawOp> op;
505 if (onFlushRP->caps()->shaderCaps()->geometryShaderSupport()) {
506 op = RenderAtlasOp<GrGSCoverageProcessor>::Make(
507 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(),
508 atlas->getStrokeBatchID(), atlas->drawBounds());
509 } else {
510 op = RenderAtlasOp<GrVSCoverageProcessor>::Make(
511 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(),
512 atlas->getStrokeBatchID(), atlas->drawBounds());
513 }
Chris Dalton9414c962018-06-14 10:14:50 -0600514 rtc->addDrawOp(GrNoClip(), std::move(op));
515 out->push_back(std::move(rtc));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600516 }
517 }
518
519 return true;
520}
Chris Dalton4da70192018-06-18 09:51:36 -0600521
Chris Dalton351e80c2019-01-06 22:51:00 -0700522void GrCCPerFlushResourceSpecs::cancelCopies() {
523 // Convert copies to cached draws.
524 fNumCachedPaths += fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx];
525 fNumCopiedPaths[kFillIdx] = fNumCopiedPaths[kStrokeIdx] = 0;
526 fCopyPathStats[kFillIdx] = fCopyPathStats[kStrokeIdx] = GrCCRenderedPathStats();
Chris Dalton4da70192018-06-18 09:51:36 -0600527 fCopyAtlasSpecs = GrCCAtlas::Specs();
Chris Dalton4da70192018-06-18 09:51:36 -0600528}