blob: 33ac61bd3c85f60b9fad4e1f21c99791ba9b0939 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCCPerFlushResources.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -06009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/private/GrRecordingContext.h"
11#include "src/core/SkMakeUnique.h"
12#include "src/gpu/GrClip.h"
13#include "src/gpu/GrMemoryPool.h"
14#include "src/gpu/GrOnFlushResourceProvider.h"
15#include "src/gpu/GrRecordingContextPriv.h"
16#include "src/gpu/GrRenderTargetContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/GrSurfaceContextPriv.h"
18#include "src/gpu/ccpr/GrCCPathCache.h"
19#include "src/gpu/ccpr/GrGSCoverageProcessor.h"
20#include "src/gpu/ccpr/GrVSCoverageProcessor.h"
Michael Ludwig663afe52019-06-03 16:46:19 -040021#include "src/gpu/geometry/GrShape.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060022
Chris Daltone1639692018-08-20 14:00:30 -060023using FillBatchID = GrCCFiller::BatchID;
Chris Dalton09a7bb22018-08-31 19:53:15 +080024using StrokeBatchID = GrCCStroker::BatchID;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060025using PathInstance = GrCCPathProcessor::Instance;
26
Chris Dalton09a7bb22018-08-31 19:53:15 +080027static constexpr int kFillIdx = GrCCPerFlushResourceSpecs::kFillIdx;
28static constexpr int kStrokeIdx = GrCCPerFlushResourceSpecs::kStrokeIdx;
29
Chris Dalton9414c962018-06-14 10:14:50 -060030namespace {
31
Chris Dalton4da70192018-06-18 09:51:36 -060032// Base class for an Op that renders a CCPR atlas.
33class AtlasOp : public GrDrawOp {
34public:
35 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Chris Dalton6ce447a2019-06-23 18:07:38 -060036 GrProcessorSet::Analysis finalize(const GrCaps&, const GrAppliedClip*,
37 bool hasMixedSampledCoverage, GrClampType) override {
Chris Dalton4b62aed2019-01-15 11:53:00 -070038 return GrProcessorSet::EmptySetAnalysis();
Brian Osman532b3f92018-07-11 10:02:07 -040039 }
Brian Salomon7eae3e02018-08-07 14:02:38 +000040 CombineResult onCombineIfPossible(GrOp* other, const GrCaps&) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070041 // We will only make multiple copy ops if they have different source proxies.
42 // TODO: make use of texture chaining.
43 return CombineResult::kCannotCombine;
Chris Dalton4da70192018-06-18 09:51:36 -060044 }
45 void onPrepare(GrOpFlushState*) override {}
46
47protected:
48 AtlasOp(uint32_t classID, sk_sp<const GrCCPerFlushResources> resources,
49 const SkISize& drawBounds)
50 : GrDrawOp(classID)
51 , fResources(std::move(resources)) {
52 this->setBounds(SkRect::MakeIWH(drawBounds.width(), drawBounds.height()),
53 GrOp::HasAABloat::kNo, GrOp::IsZeroArea::kNo);
54 }
55
56 const sk_sp<const GrCCPerFlushResources> fResources;
57};
58
Chris Dalton351e80c2019-01-06 22:51:00 -070059// Copies paths from a cached coverage count atlas into an 8-bit literal-coverage atlas.
Chris Dalton4da70192018-06-18 09:51:36 -060060class CopyAtlasOp : public AtlasOp {
61public:
62 DEFINE_OP_CLASS_ID
63
Robert Phillipsbe9aff22019-02-15 11:33:22 -050064 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Chris Dalton4da70192018-06-18 09:51:36 -060065 sk_sp<const GrCCPerFlushResources> resources,
66 sk_sp<GrTextureProxy> copyProxy, int baseInstance,
67 int endInstance, const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -050068 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -040069
70 return pool->allocate<CopyAtlasOp>(std::move(resources), std::move(copyProxy),
71 baseInstance, endInstance, drawBounds);
Chris Dalton4da70192018-06-18 09:51:36 -060072 }
73
74 const char* name() const override { return "CopyAtlasOp (CCPR)"; }
Chris Dalton7eb5c0f2019-05-23 15:15:47 -060075
76 void visitProxies(const VisitProxyFunc& fn) const override {
77 fn(fSrcProxy.get(), GrMipMapped::kNo);
78 }
Chris Dalton4da70192018-06-18 09:51:36 -060079
Brian Salomon588cec72018-11-14 13:56:37 -050080 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070081 SkASSERT(fSrcProxy);
Chris Dalton2185f392019-04-25 17:44:41 +000082 auto srcProxy = fSrcProxy.get();
Chris Daltonf91b7552019-04-29 16:21:18 -060083 SkASSERT(srcProxy->isInstantiated());
84
Greg Daniel2c3398d2019-06-19 11:58:01 -040085 GrCCPathProcessor pathProc(srcProxy->peekTexture(), srcProxy->textureSwizzle(),
86 srcProxy->origin());
Chris Dalton46d0f9a2019-04-24 19:34:54 -040087
Greg Daniel2c3398d2019-06-19 11:58:01 -040088 GrPipeline pipeline(GrScissorTest::kDisabled, SkBlendMode::kSrc,
89 flushState->drawOpArgs().fOutputSwizzle);
Chris Daltonf91b7552019-04-29 16:21:18 -060090 GrPipeline::FixedDynamicState dynamicState;
91 dynamicState.fPrimitiveProcessorTextures = &srcProxy;
92
Brian Salomon7eae3e02018-08-07 14:02:38 +000093 pathProc.drawPaths(flushState, pipeline, &dynamicState, *fResources, fBaseInstance,
94 fEndInstance, this->bounds());
Chris Dalton4da70192018-06-18 09:51:36 -060095 }
96
97private:
98 friend class ::GrOpMemoryPool; // for ctor
99
Chris Dalton351e80c2019-01-06 22:51:00 -0700100 CopyAtlasOp(sk_sp<const GrCCPerFlushResources> resources, sk_sp<GrTextureProxy> srcProxy,
Chris Dalton4da70192018-06-18 09:51:36 -0600101 int baseInstance, int endInstance, const SkISize& drawBounds)
102 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton351e80c2019-01-06 22:51:00 -0700103 , fSrcProxy(srcProxy)
Chris Dalton4da70192018-06-18 09:51:36 -0600104 , fBaseInstance(baseInstance)
105 , fEndInstance(endInstance) {
106 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700107 sk_sp<GrTextureProxy> fSrcProxy;
Chris Dalton4da70192018-06-18 09:51:36 -0600108 const int fBaseInstance;
109 const int fEndInstance;
110};
111
Chris Dalton9414c962018-06-14 10:14:50 -0600112// Renders coverage counts to a CCPR atlas using the resources' pre-filled GrCCPathParser.
Chris Dalton2c5e0112019-03-29 13:14:18 -0500113template<typename ProcessorType> class RenderAtlasOp : public AtlasOp {
Chris Dalton9414c962018-06-14 10:14:50 -0600114public:
115 DEFINE_OP_CLASS_ID
116
Robert Phillipsbe9aff22019-02-15 11:33:22 -0500117 static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context,
Chris Dalton9414c962018-06-14 10:14:50 -0600118 sk_sp<const GrCCPerFlushResources> resources,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800119 FillBatchID fillBatchID, StrokeBatchID strokeBatchID,
120 const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500121 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -0400122
Chris Dalton09a7bb22018-08-31 19:53:15 +0800123 return pool->allocate<RenderAtlasOp>(std::move(resources), fillBatchID, strokeBatchID,
124 drawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600125 }
126
127 // GrDrawOp interface.
128 const char* name() const override { return "RenderAtlasOp (CCPR)"; }
Chris Dalton9414c962018-06-14 10:14:50 -0600129
Brian Salomon588cec72018-11-14 13:56:37 -0500130 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500131 ProcessorType proc;
132 fResources->filler().drawFills(flushState, &proc, fFillBatchID, fDrawBounds);
133 fResources->stroker().drawStrokes(flushState, &proc, fStrokeBatchID, fDrawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600134 }
135
136private:
137 friend class ::GrOpMemoryPool; // for ctor
138
Chris Dalton09a7bb22018-08-31 19:53:15 +0800139 RenderAtlasOp(sk_sp<const GrCCPerFlushResources> resources, FillBatchID fillBatchID,
140 StrokeBatchID strokeBatchID, const SkISize& drawBounds)
Chris Dalton4da70192018-06-18 09:51:36 -0600141 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800142 , fFillBatchID(fillBatchID)
143 , fStrokeBatchID(strokeBatchID)
Chris Dalton9414c962018-06-14 10:14:50 -0600144 , fDrawBounds(SkIRect::MakeWH(drawBounds.width(), drawBounds.height())) {
Chris Dalton9414c962018-06-14 10:14:50 -0600145 }
146
Chris Dalton09a7bb22018-08-31 19:53:15 +0800147 const FillBatchID fFillBatchID;
148 const StrokeBatchID fStrokeBatchID;
Chris Dalton9414c962018-06-14 10:14:50 -0600149 const SkIRect fDrawBounds;
150};
151
152}
153
Chris Dalton4da70192018-06-18 09:51:36 -0600154static int inst_buffer_count(const GrCCPerFlushResourceSpecs& specs) {
155 return specs.fNumCachedPaths +
Chris Dalton09a7bb22018-08-31 19:53:15 +0800156 // Copies get two instances per draw: 1 copy + 1 draw.
157 (specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) * 2 +
158 specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx];
159 // No clips in instance buffers.
Chris Dalton4da70192018-06-18 09:51:36 -0600160}
161
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600162GrCCPerFlushResources::GrCCPerFlushResources(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton42c21152018-06-13 15:28:19 -0600163 const GrCCPerFlushResourceSpecs& specs)
Brian Salomonae64c192019-02-05 09:41:37 -0500164 // Overallocate by one point so we can call Sk4f::Store at the final SkPoint in the array.
165 // (See transform_path_pts below.)
166 // FIXME: instead use built-in instructions to write only the first two lanes of an Sk4f.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800167 : fLocalDevPtsBuffer(SkTMax(specs.fRenderedPathStats[kFillIdx].fMaxPointsPerPath,
168 specs.fRenderedPathStats[kStrokeIdx].fMaxPointsPerPath) + 1)
169 , fFiller(specs.fNumRenderedPaths[kFillIdx] + specs.fNumClipPaths,
170 specs.fRenderedPathStats[kFillIdx].fNumTotalSkPoints,
171 specs.fRenderedPathStats[kFillIdx].fNumTotalSkVerbs,
172 specs.fRenderedPathStats[kFillIdx].fNumTotalConicWeights)
173 , fStroker(specs.fNumRenderedPaths[kStrokeIdx],
174 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkPoints,
175 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkVerbs)
Chris Dalton351e80c2019-01-06 22:51:00 -0700176 , fCopyAtlasStack(GrCCAtlas::CoverageType::kA8_LiteralCoverage, specs.fCopyAtlasSpecs,
177 onFlushRP->caps())
178 , fRenderedAtlasStack(GrCCAtlas::CoverageType::kFP16_CoverageCount,
179 specs.fRenderedAtlasSpecs, onFlushRP->caps())
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600180 , fIndexBuffer(GrCCPathProcessor::FindIndexBuffer(onFlushRP))
181 , fVertexBuffer(GrCCPathProcessor::FindVertexBuffer(onFlushRP))
Brian Salomonae64c192019-02-05 09:41:37 -0500182 , fInstanceBuffer(onFlushRP->makeBuffer(GrGpuBufferType::kVertex,
Chris Dalton4da70192018-06-18 09:51:36 -0600183 inst_buffer_count(specs) * sizeof(PathInstance)))
184 , fNextCopyInstanceIdx(0)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800185 , fNextPathInstanceIdx(specs.fNumCopiedPaths[kFillIdx] +
186 specs.fNumCopiedPaths[kStrokeIdx]) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600187 if (!fIndexBuffer) {
188 SkDebugf("WARNING: failed to allocate CCPR index buffer. No paths will be drawn.\n");
189 return;
190 }
191 if (!fVertexBuffer) {
192 SkDebugf("WARNING: failed to allocate CCPR vertex buffer. No paths will be drawn.\n");
193 return;
194 }
195 if (!fInstanceBuffer) {
196 SkDebugf("WARNING: failed to allocate CCPR instance buffer. No paths will be drawn.\n");
197 return;
198 }
199 fPathInstanceData = static_cast<PathInstance*>(fInstanceBuffer->map());
200 SkASSERT(fPathInstanceData);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800201 SkDEBUGCODE(fEndCopyInstance =
202 specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]);
Chris Dalton4da70192018-06-18 09:51:36 -0600203 SkDEBUGCODE(fEndPathInstance = inst_buffer_count(specs));
204}
205
Chris Dalton351e80c2019-01-06 22:51:00 -0700206void GrCCPerFlushResources::upgradeEntryToLiteralCoverageAtlas(
207 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, GrCCPathCacheEntry* entry,
208 GrCCPathProcessor::DoEvenOddFill evenOdd) {
209 using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
Chris Dalton4da70192018-06-18 09:51:36 -0600210 SkASSERT(this->isMapped());
211 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600212
Chris Dalton351e80c2019-01-06 22:51:00 -0700213 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
214 SkASSERT(cachedAtlas);
215 SkASSERT(cachedAtlas->getOnFlushProxy());
216
217 if (GrCCAtlas::CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) {
218 // This entry has already been upgraded to literal coverage. The path must have been drawn
219 // multiple times during the flush.
220 SkDEBUGCODE(--fEndCopyInstance);
221 return;
Chris Dalton4da70192018-06-18 09:51:36 -0600222 }
223
Chris Dalton351e80c2019-01-06 22:51:00 -0700224 SkIVector newAtlasOffset;
225 if (GrCCAtlas* retiredAtlas = fCopyAtlasStack.addRect(entry->devIBounds(), &newAtlasOffset)) {
226 // We did not fit in the previous copy atlas and it was retired. We will render the ranges
227 // up until fCopyPathRanges.count() into the retired atlas during finalize().
228 retiredAtlas->setFillBatchID(fCopyPathRanges.count());
229 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
230 }
231
232 this->recordCopyPathInstance(*entry, newAtlasOffset, evenOdd,
233 sk_ref_sp(cachedAtlas->getOnFlushProxy()));
234
235 sk_sp<GrTexture> previousAtlasTexture =
236 sk_ref_sp(cachedAtlas->getOnFlushProxy()->peekTexture());
237 GrCCAtlas* newAtlas = &fCopyAtlasStack.current();
238 if (ReleaseAtlasResult::kDidInvalidateFromCache ==
239 entry->upgradeToLiteralCoverageAtlas(pathCache, onFlushRP, newAtlas, newAtlasOffset)) {
240 // This texture just got booted out of the cache. Keep it around, in case we might be able
241 // to recycle it for a new atlas. We can recycle it because copying happens before rendering
242 // new paths, and every path from the atlas that we're planning to use this flush will be
243 // copied to a new atlas. We'll never copy some and leave others.
244 fRecyclableAtlasTextures.push_back(std::move(previousAtlasTexture));
245 }
246}
247
248template<typename T, typename... Args>
249static void emplace_at_memcpy(SkTArray<T>* array, int idx, Args&&... args) {
250 if (int moveCount = array->count() - idx) {
251 array->push_back();
252 T* location = array->begin() + idx;
253 memcpy(location+1, location, moveCount * sizeof(T));
254 new (location) T(std::forward<Args>(args)...);
255 } else {
256 array->emplace_back(std::forward<Args>(args)...);
257 }
258}
259
260void GrCCPerFlushResources::recordCopyPathInstance(const GrCCPathCacheEntry& entry,
261 const SkIVector& newAtlasOffset,
262 GrCCPathProcessor::DoEvenOddFill evenOdd,
263 sk_sp<GrTextureProxy> srcProxy) {
264 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
265
266 // Write the instance at the back of the array.
267 int currentInstanceIdx = fNextCopyInstanceIdx++;
Brian Osmanc6444d22019-01-09 16:30:12 -0500268 constexpr uint64_t kWhite = (((uint64_t) SK_Half1) << 0) |
269 (((uint64_t) SK_Half1) << 16) |
270 (((uint64_t) SK_Half1) << 32) |
271 (((uint64_t) SK_Half1) << 48);
272 fPathInstanceData[currentInstanceIdx].set(entry, newAtlasOffset, kWhite, evenOdd);
Chris Dalton351e80c2019-01-06 22:51:00 -0700273
274 // Percolate the instance forward until it's contiguous with other instances that share the same
275 // proxy.
276 for (int i = fCopyPathRanges.count() - 1; i >= fCurrCopyAtlasRangesIdx; --i) {
277 if (fCopyPathRanges[i].fSrcProxy == srcProxy) {
278 ++fCopyPathRanges[i].fCount;
279 return;
280 }
281 int rangeFirstInstanceIdx = currentInstanceIdx - fCopyPathRanges[i].fCount;
282 std::swap(fPathInstanceData[rangeFirstInstanceIdx], fPathInstanceData[currentInstanceIdx]);
283 currentInstanceIdx = rangeFirstInstanceIdx;
284 }
285
286 // An instance with this particular proxy did not yet exist in the array. Add a range for it.
287 emplace_at_memcpy(&fCopyPathRanges, fCurrCopyAtlasRangesIdx, std::move(srcProxy), 1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600288}
289
Chris Dalton8610e9c2019-05-09 11:07:10 -0600290static bool transform_path_pts(
291 const SkMatrix& m, const SkPath& path, const SkAutoSTArray<32, SkPoint>& outDevPts,
292 GrOctoBounds* octoBounds) {
Chris Daltone1639692018-08-20 14:00:30 -0600293 const SkPoint* pts = SkPathPriv::PointData(path);
294 int numPts = path.countPoints();
295 SkASSERT(numPts + 1 <= outDevPts.count());
296 SkASSERT(numPts);
297
298 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
299 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
300 // transform is not necessary as long as the shader uses the correct inverse.
301 SkMatrix m45;
302 m45.setSinCos(1, 1);
303 m45.preConcat(m);
304
305 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
306 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
307 // | 1 1 |
308 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
309 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
310 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
311
312 // Map the path's points to device space and accumulate bounding boxes.
313 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
314 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
315 Sk4f topLeft = devPt;
316 Sk4f bottomRight = devPt;
317
318 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
319 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
320 // be at least one larger than the number of points.
321 devPt.store(&outDevPts[0]);
322
323 for (int i = 1; i < numPts; ++i) {
324 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
325 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
326 topLeft = Sk4f::Min(topLeft, devPt);
327 bottomRight = Sk4f::Max(bottomRight, devPt);
328 devPt.store(&outDevPts[i]);
329 }
330
Chris Daltonce038dc2018-09-14 14:14:49 -0600331 if (!(Sk4f(0) == topLeft*0).allTrue() || !(Sk4f(0) == bottomRight*0).allTrue()) {
332 // The bounds are infinite or NaN.
333 return false;
334 }
335
Chris Daltone1639692018-08-20 14:00:30 -0600336 SkPoint topLeftPts[2], bottomRightPts[2];
337 topLeft.store(topLeftPts);
338 bottomRight.store(bottomRightPts);
Chris Dalton8610e9c2019-05-09 11:07:10 -0600339
340 const SkRect& devBounds = SkRect::MakeLTRB(
341 topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(), bottomRightPts[0].y());
342 const SkRect& devBounds45 = SkRect::MakeLTRB(
343 topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(), bottomRightPts[1].y());
344
345 octoBounds->set(devBounds, devBounds45);
Chris Daltonce038dc2018-09-14 14:14:49 -0600346 return true;
Chris Daltone1639692018-08-20 14:00:30 -0600347}
348
Chris Dalton351e80c2019-01-06 22:51:00 -0700349GrCCAtlas* GrCCPerFlushResources::renderShapeInAtlas(
Chris Dalton09a7bb22018-08-31 19:53:15 +0800350 const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
Chris Dalton8610e9c2019-05-09 11:07:10 -0600351 GrOctoBounds* octoBounds, SkIRect* devIBounds, SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600352 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600353 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
354
Chris Dalton09a7bb22018-08-31 19:53:15 +0800355 SkPath path;
356 shape.asPath(&path);
Chris Daltone1639692018-08-20 14:00:30 -0600357 if (path.isEmpty()) {
358 SkDEBUGCODE(--fEndPathInstance);
359 return nullptr;
360 }
Chris Dalton8610e9c2019-05-09 11:07:10 -0600361 if (!transform_path_pts(m, path, fLocalDevPtsBuffer, octoBounds)) {
Chris Daltonce038dc2018-09-14 14:14:49 -0600362 // The transformed path had infinite or NaN bounds.
363 SkDEBUGCODE(--fEndPathInstance);
364 return nullptr;
365 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800366
367 const SkStrokeRec& stroke = shape.style().strokeRec();
368 if (!stroke.isFillStyle()) {
Chris Dalton8610e9c2019-05-09 11:07:10 -0600369 float r = SkStrokeRec::GetInflationRadius(
370 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), strokeDevWidth);
371 octoBounds->outset(r);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800372 }
Chris Dalton9414c962018-06-14 10:14:50 -0600373
Chris Daltona6fcb762019-05-13 08:57:53 -0600374 GrScissorTest enableScissorInAtlas;
375 if (clipIBounds.contains(octoBounds->bounds())) {
376 enableScissorInAtlas = GrScissorTest::kDisabled;
377 } else if (octoBounds->clip(clipIBounds)) {
378 enableScissorInAtlas = GrScissorTest::kEnabled;
379 } else {
380 // The clip and octo bounds do not intersect. Draw nothing.
Chris Dalton9414c962018-06-14 10:14:50 -0600381 SkDEBUGCODE(--fEndPathInstance);
Chris Daltona6fcb762019-05-13 08:57:53 -0600382 return nullptr;
Chris Dalton9414c962018-06-14 10:14:50 -0600383 }
Chris Daltona6fcb762019-05-13 08:57:53 -0600384 octoBounds->roundOut(devIBounds);
385 SkASSERT(clipIBounds.contains(*devIBounds));
386
387 this->placeRenderedPathInAtlas(*devIBounds, enableScissorInAtlas, devToAtlasOffset);
Chris Daltone1639692018-08-20 14:00:30 -0600388
Chris Dalton09a7bb22018-08-31 19:53:15 +0800389 if (stroke.isFillStyle()) {
390 SkASSERT(0 == strokeDevWidth);
Chris Daltona6fcb762019-05-13 08:57:53 -0600391 fFiller.parseDeviceSpaceFill(path, fLocalDevPtsBuffer.begin(), enableScissorInAtlas,
392 *devIBounds, *devToAtlasOffset);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800393 } else {
394 // Stroke-and-fill is not yet supported.
395 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle() || stroke.isHairlineStyle());
396 SkASSERT(!stroke.isHairlineStyle() || 1 == strokeDevWidth);
Chris Daltona6fcb762019-05-13 08:57:53 -0600397 fStroker.parseDeviceSpaceStroke(
398 path, fLocalDevPtsBuffer.begin(), stroke, strokeDevWidth, enableScissorInAtlas,
399 *devIBounds, *devToAtlasOffset);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800400 }
Chris Dalton4da70192018-06-18 09:51:36 -0600401 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600402}
403
Chris Dalton9414c962018-06-14 10:14:50 -0600404const GrCCAtlas* GrCCPerFlushResources::renderDeviceSpacePathInAtlas(
405 const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds,
406 SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600407 SkASSERT(this->isMapped());
Chris Daltone1639692018-08-20 14:00:30 -0600408
409 if (devPath.isEmpty()) {
Chris Dalton9414c962018-06-14 10:14:50 -0600410 return nullptr;
411 }
Chris Daltone1639692018-08-20 14:00:30 -0600412
Chris Daltona6fcb762019-05-13 08:57:53 -0600413 GrScissorTest enableScissorInAtlas;
Chris Daltone1639692018-08-20 14:00:30 -0600414 SkIRect clippedPathIBounds;
Chris Daltona6fcb762019-05-13 08:57:53 -0600415 if (clipIBounds.contains(devPathIBounds)) {
416 clippedPathIBounds = devPathIBounds;
417 enableScissorInAtlas = GrScissorTest::kDisabled;
418 } else if (clippedPathIBounds.intersect(clipIBounds, devPathIBounds)) {
419 enableScissorInAtlas = GrScissorTest::kEnabled;
420 } else {
421 // The clip and path bounds do not intersect. Draw nothing.
Chris Daltone1639692018-08-20 14:00:30 -0600422 return nullptr;
423 }
424
Chris Daltona6fcb762019-05-13 08:57:53 -0600425 this->placeRenderedPathInAtlas(clippedPathIBounds, enableScissorInAtlas, devToAtlasOffset);
426 fFiller.parseDeviceSpaceFill(devPath, SkPathPriv::PointData(devPath), enableScissorInAtlas,
Chris Daltone1639692018-08-20 14:00:30 -0600427 clippedPathIBounds, *devToAtlasOffset);
Chris Dalton4da70192018-06-18 09:51:36 -0600428 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600429}
430
Chris Daltona6fcb762019-05-13 08:57:53 -0600431void GrCCPerFlushResources::placeRenderedPathInAtlas(
432 const SkIRect& clippedPathIBounds, GrScissorTest scissorTest, SkIVector* devToAtlasOffset) {
Chris Dalton4da70192018-06-18 09:51:36 -0600433 if (GrCCAtlas* retiredAtlas =
Chris Daltona6fcb762019-05-13 08:57:53 -0600434 fRenderedAtlasStack.addRect(clippedPathIBounds, devToAtlasOffset)) {
Chris Dalton9414c962018-06-14 10:14:50 -0600435 // We did not fit in the previous coverage count atlas and it was retired. Close the path
436 // parser's current batch (which does not yet include the path we just parsed). We will
437 // render this batch into the retired atlas during finalize().
Chris Dalton09a7bb22018-08-31 19:53:15 +0800438 retiredAtlas->setFillBatchID(fFiller.closeCurrentBatch());
439 retiredAtlas->setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600440 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600441}
442
443bool GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton9414c962018-06-14 10:14:50 -0600444 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600445 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600446 SkASSERT(fNextPathInstanceIdx == fEndPathInstance);
Chris Dalton351e80c2019-01-06 22:51:00 -0700447 SkASSERT(fNextCopyInstanceIdx == fEndCopyInstance);
Chris Dalton9414c962018-06-14 10:14:50 -0600448
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600449 fInstanceBuffer->unmap();
450 fPathInstanceData = nullptr;
451
Chris Dalton4da70192018-06-18 09:51:36 -0600452 if (!fCopyAtlasStack.empty()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700453 fCopyAtlasStack.current().setFillBatchID(fCopyPathRanges.count());
454 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
Chris Dalton4da70192018-06-18 09:51:36 -0600455 }
456 if (!fRenderedAtlasStack.empty()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800457 fRenderedAtlasStack.current().setFillBatchID(fFiller.closeCurrentBatch());
458 fRenderedAtlasStack.current().setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600459 }
460
Chris Dalton9414c962018-06-14 10:14:50 -0600461 // Build the GPU buffers to render path coverage counts. (This must not happen until after the
Chris Dalton09a7bb22018-08-31 19:53:15 +0800462 // final calls to fFiller/fStroker.closeCurrentBatch().)
Chris Daltone1639692018-08-20 14:00:30 -0600463 if (!fFiller.prepareToDraw(onFlushRP)) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600464 return false;
465 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800466 if (!fStroker.prepareToDraw(onFlushRP)) {
467 return false;
468 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600469
Chris Dalton351e80c2019-01-06 22:51:00 -0700470 // Draw the copies from 16-bit literal coverage atlas(es) into 8-bit cached atlas(es).
471 int copyRangeIdx = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600472 int baseCopyInstance = 0;
473 for (GrCCAtlasStack::Iter atlas(fCopyAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700474 int endCopyRange = atlas->getFillBatchID();
475 SkASSERT(endCopyRange > copyRangeIdx);
476
477 sk_sp<GrRenderTargetContext> rtc = atlas->makeRenderTargetContext(onFlushRP);
478 for (; copyRangeIdx < endCopyRange; ++copyRangeIdx) {
479 const CopyPathRange& copyRange = fCopyPathRanges[copyRangeIdx];
480 int endCopyInstance = baseCopyInstance + copyRange.fCount;
481 if (rtc) {
482 auto op = CopyAtlasOp::Make(rtc->surfPriv().getContext(), sk_ref_sp(this),
483 copyRange.fSrcProxy, baseCopyInstance, endCopyInstance,
484 atlas->drawBounds());
485 rtc->addDrawOp(GrNoClip(), std::move(op));
486 }
487 baseCopyInstance = endCopyInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600488 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700489 out->push_back(std::move(rtc));
Chris Dalton4da70192018-06-18 09:51:36 -0600490 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700491 SkASSERT(fCopyPathRanges.count() == copyRangeIdx);
492 SkASSERT(fNextCopyInstanceIdx == baseCopyInstance);
493 SkASSERT(baseCopyInstance == fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600494
Chris Dalton4da70192018-06-18 09:51:36 -0600495 // Render the coverage count atlas(es).
496 for (GrCCAtlasStack::Iter atlas(fRenderedAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700497 // Copies will be finished by the time we get to rendering new atlases. See if we can
498 // recycle any previous invalidated atlas textures instead of creating new ones.
Chris Daltonafde18f2018-06-22 12:44:19 -0600499 sk_sp<GrTexture> backingTexture;
Chris Dalton351e80c2019-01-06 22:51:00 -0700500 for (sk_sp<GrTexture>& texture : fRecyclableAtlasTextures) {
501 if (texture && atlas->currentHeight() == texture->height() &&
502 atlas->currentWidth() == texture->width()) {
503 backingTexture = skstd::exchange(texture, nullptr);
504 break;
505 }
Chris Daltonafde18f2018-06-22 12:44:19 -0600506 }
507
508 if (auto rtc = atlas->makeRenderTargetContext(onFlushRP, std::move(backingTexture))) {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500509 std::unique_ptr<GrDrawOp> op;
510 if (onFlushRP->caps()->shaderCaps()->geometryShaderSupport()) {
511 op = RenderAtlasOp<GrGSCoverageProcessor>::Make(
512 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(),
513 atlas->getStrokeBatchID(), atlas->drawBounds());
514 } else {
515 op = RenderAtlasOp<GrVSCoverageProcessor>::Make(
516 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(),
517 atlas->getStrokeBatchID(), atlas->drawBounds());
518 }
Chris Dalton9414c962018-06-14 10:14:50 -0600519 rtc->addDrawOp(GrNoClip(), std::move(op));
520 out->push_back(std::move(rtc));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600521 }
522 }
523
524 return true;
525}
Chris Dalton4da70192018-06-18 09:51:36 -0600526
Chris Dalton351e80c2019-01-06 22:51:00 -0700527void GrCCPerFlushResourceSpecs::cancelCopies() {
528 // Convert copies to cached draws.
529 fNumCachedPaths += fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx];
530 fNumCopiedPaths[kFillIdx] = fNumCopiedPaths[kStrokeIdx] = 0;
531 fCopyPathStats[kFillIdx] = fCopyPathStats[kStrokeIdx] = GrCCRenderedPathStats();
Chris Dalton4da70192018-06-18 09:51:36 -0600532 fCopyAtlasSpecs = GrCCAtlas::Specs();
Chris Dalton4da70192018-06-18 09:51:36 -0600533}