blob: c5db56a6e0305501d4e071418808bf91554b6f67 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCCPerFlushResources.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -06009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/private/GrRecordingContext.h"
11#include "src/core/SkMakeUnique.h"
12#include "src/gpu/GrClip.h"
13#include "src/gpu/GrMemoryPool.h"
14#include "src/gpu/GrOnFlushResourceProvider.h"
15#include "src/gpu/GrRecordingContextPriv.h"
16#include "src/gpu/GrRenderTargetContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/GrSurfaceContextPriv.h"
18#include "src/gpu/ccpr/GrCCPathCache.h"
19#include "src/gpu/ccpr/GrGSCoverageProcessor.h"
Chris Daltonc3318f02019-07-19 14:20:53 -060020#include "src/gpu/ccpr/GrSampleMaskProcessor.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050021#include "src/gpu/ccpr/GrVSCoverageProcessor.h"
Michael Ludwig663afe52019-06-03 16:46:19 -040022#include "src/gpu/geometry/GrShape.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023
Chris Daltonc3318f02019-07-19 14:20:53 -060024using CoverageType = GrCCAtlas::CoverageType;
Chris Daltone1639692018-08-20 14:00:30 -060025using FillBatchID = GrCCFiller::BatchID;
Chris Dalton09a7bb22018-08-31 19:53:15 +080026using StrokeBatchID = GrCCStroker::BatchID;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060027using PathInstance = GrCCPathProcessor::Instance;
28
Chris Dalton09a7bb22018-08-31 19:53:15 +080029static constexpr int kFillIdx = GrCCPerFlushResourceSpecs::kFillIdx;
30static constexpr int kStrokeIdx = GrCCPerFlushResourceSpecs::kStrokeIdx;
31
Chris Dalton9414c962018-06-14 10:14:50 -060032namespace {
33
Chris Dalton4da70192018-06-18 09:51:36 -060034// Base class for an Op that renders a CCPR atlas.
35class AtlasOp : public GrDrawOp {
36public:
37 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Chris Dalton6ce447a2019-06-23 18:07:38 -060038 GrProcessorSet::Analysis finalize(const GrCaps&, const GrAppliedClip*,
39 bool hasMixedSampledCoverage, GrClampType) override {
Chris Dalton4b62aed2019-01-15 11:53:00 -070040 return GrProcessorSet::EmptySetAnalysis();
Brian Osman532b3f92018-07-11 10:02:07 -040041 }
Brian Salomon7eae3e02018-08-07 14:02:38 +000042 CombineResult onCombineIfPossible(GrOp* other, const GrCaps&) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070043 // We will only make multiple copy ops if they have different source proxies.
44 // TODO: make use of texture chaining.
45 return CombineResult::kCannotCombine;
Chris Dalton4da70192018-06-18 09:51:36 -060046 }
47 void onPrepare(GrOpFlushState*) override {}
48
49protected:
50 AtlasOp(uint32_t classID, sk_sp<const GrCCPerFlushResources> resources,
51 const SkISize& drawBounds)
52 : GrDrawOp(classID)
53 , fResources(std::move(resources)) {
54 this->setBounds(SkRect::MakeIWH(drawBounds.width(), drawBounds.height()),
55 GrOp::HasAABloat::kNo, GrOp::IsZeroArea::kNo);
56 }
57
58 const sk_sp<const GrCCPerFlushResources> fResources;
59};
60
Chris Daltonc3318f02019-07-19 14:20:53 -060061// Copies paths from a cached coverage count or msaa atlas into an 8-bit literal-coverage atlas.
Chris Dalton4da70192018-06-18 09:51:36 -060062class CopyAtlasOp : public AtlasOp {
63public:
64 DEFINE_OP_CLASS_ID
65
Chris Daltonc3318f02019-07-19 14:20:53 -060066 static std::unique_ptr<GrDrawOp> Make(
67 GrRecordingContext* context, sk_sp<const GrCCPerFlushResources> resources,
68 sk_sp<GrTextureProxy> copyProxy, int baseInstance, int endInstance,
69 const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -050070 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -040071
Chris Daltonc3318f02019-07-19 14:20:53 -060072 return pool->allocate<CopyAtlasOp>(std::move(resources), std::move(copyProxy), baseInstance,
73 endInstance, drawBounds);
Chris Dalton4da70192018-06-18 09:51:36 -060074 }
75
76 const char* name() const override { return "CopyAtlasOp (CCPR)"; }
Chris Dalton7eb5c0f2019-05-23 15:15:47 -060077
78 void visitProxies(const VisitProxyFunc& fn) const override {
79 fn(fSrcProxy.get(), GrMipMapped::kNo);
80 }
Chris Dalton4da70192018-06-18 09:51:36 -060081
Brian Salomon588cec72018-11-14 13:56:37 -050082 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070083 SkASSERT(fSrcProxy);
Chris Dalton2185f392019-04-25 17:44:41 +000084 auto srcProxy = fSrcProxy.get();
Chris Daltonf91b7552019-04-29 16:21:18 -060085 SkASSERT(srcProxy->isInstantiated());
86
Chris Daltonc3318f02019-07-19 14:20:53 -060087 auto coverageMode = GrCCPathProcessor::GetCoverageMode(
88 fResources->renderedPathCoverageType());
89 GrCCPathProcessor pathProc(coverageMode, srcProxy->peekTexture(),
90 srcProxy->textureSwizzle(), srcProxy->origin());
Chris Dalton46d0f9a2019-04-24 19:34:54 -040091
Greg Daniel2c3398d2019-06-19 11:58:01 -040092 GrPipeline pipeline(GrScissorTest::kDisabled, SkBlendMode::kSrc,
93 flushState->drawOpArgs().fOutputSwizzle);
Chris Daltonf91b7552019-04-29 16:21:18 -060094 GrPipeline::FixedDynamicState dynamicState;
95 dynamicState.fPrimitiveProcessorTextures = &srcProxy;
96
Brian Salomon7eae3e02018-08-07 14:02:38 +000097 pathProc.drawPaths(flushState, pipeline, &dynamicState, *fResources, fBaseInstance,
98 fEndInstance, this->bounds());
Chris Dalton4da70192018-06-18 09:51:36 -060099 }
100
101private:
102 friend class ::GrOpMemoryPool; // for ctor
103
Chris Dalton351e80c2019-01-06 22:51:00 -0700104 CopyAtlasOp(sk_sp<const GrCCPerFlushResources> resources, sk_sp<GrTextureProxy> srcProxy,
Chris Dalton4da70192018-06-18 09:51:36 -0600105 int baseInstance, int endInstance, const SkISize& drawBounds)
106 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton351e80c2019-01-06 22:51:00 -0700107 , fSrcProxy(srcProxy)
Chris Dalton4da70192018-06-18 09:51:36 -0600108 , fBaseInstance(baseInstance)
109 , fEndInstance(endInstance) {
110 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700111 sk_sp<GrTextureProxy> fSrcProxy;
Chris Dalton4da70192018-06-18 09:51:36 -0600112 const int fBaseInstance;
113 const int fEndInstance;
114};
115
Chris Dalton9414c962018-06-14 10:14:50 -0600116// Renders coverage counts to a CCPR atlas using the resources' pre-filled GrCCPathParser.
Chris Dalton2c5e0112019-03-29 13:14:18 -0500117template<typename ProcessorType> class RenderAtlasOp : public AtlasOp {
Chris Dalton9414c962018-06-14 10:14:50 -0600118public:
119 DEFINE_OP_CLASS_ID
120
Chris Daltonc3318f02019-07-19 14:20:53 -0600121 static std::unique_ptr<GrDrawOp> Make(
122 GrRecordingContext* context, sk_sp<const GrCCPerFlushResources> resources,
123 FillBatchID fillBatchID, StrokeBatchID strokeBatchID, const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500124 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -0400125
Chris Daltonc3318f02019-07-19 14:20:53 -0600126 return pool->allocate<RenderAtlasOp>(
127 std::move(resources), fillBatchID, strokeBatchID, drawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600128 }
129
130 // GrDrawOp interface.
131 const char* name() const override { return "RenderAtlasOp (CCPR)"; }
Chris Dalton9414c962018-06-14 10:14:50 -0600132
Brian Salomon588cec72018-11-14 13:56:37 -0500133 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500134 ProcessorType proc;
Chris Daltonc3318f02019-07-19 14:20:53 -0600135 GrPipeline pipeline(GrScissorTest::kEnabled, SkBlendMode::kPlus,
136 flushState->drawOpArgs().fOutputSwizzle);
137 fResources->filler().drawFills(flushState, &proc, pipeline, fFillBatchID, fDrawBounds);
Chris Dalton2c5e0112019-03-29 13:14:18 -0500138 fResources->stroker().drawStrokes(flushState, &proc, fStrokeBatchID, fDrawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600139 }
140
141private:
142 friend class ::GrOpMemoryPool; // for ctor
143
Chris Dalton09a7bb22018-08-31 19:53:15 +0800144 RenderAtlasOp(sk_sp<const GrCCPerFlushResources> resources, FillBatchID fillBatchID,
145 StrokeBatchID strokeBatchID, const SkISize& drawBounds)
Chris Dalton4da70192018-06-18 09:51:36 -0600146 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800147 , fFillBatchID(fillBatchID)
148 , fStrokeBatchID(strokeBatchID)
Chris Dalton9414c962018-06-14 10:14:50 -0600149 , fDrawBounds(SkIRect::MakeWH(drawBounds.width(), drawBounds.height())) {
Chris Dalton9414c962018-06-14 10:14:50 -0600150 }
151
Chris Dalton09a7bb22018-08-31 19:53:15 +0800152 const FillBatchID fFillBatchID;
153 const StrokeBatchID fStrokeBatchID;
Chris Dalton9414c962018-06-14 10:14:50 -0600154 const SkIRect fDrawBounds;
155};
156
157}
158
Chris Dalton4da70192018-06-18 09:51:36 -0600159static int inst_buffer_count(const GrCCPerFlushResourceSpecs& specs) {
160 return specs.fNumCachedPaths +
Chris Dalton09a7bb22018-08-31 19:53:15 +0800161 // Copies get two instances per draw: 1 copy + 1 draw.
162 (specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) * 2 +
163 specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx];
164 // No clips in instance buffers.
Chris Dalton4da70192018-06-18 09:51:36 -0600165}
166
Chris Daltonc3318f02019-07-19 14:20:53 -0600167GrCCPerFlushResources::GrCCPerFlushResources(
168 GrOnFlushResourceProvider* onFlushRP, CoverageType coverageType,
169 const GrCCPerFlushResourceSpecs& specs)
Brian Salomonae64c192019-02-05 09:41:37 -0500170 // Overallocate by one point so we can call Sk4f::Store at the final SkPoint in the array.
171 // (See transform_path_pts below.)
172 // FIXME: instead use built-in instructions to write only the first two lanes of an Sk4f.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800173 : fLocalDevPtsBuffer(SkTMax(specs.fRenderedPathStats[kFillIdx].fMaxPointsPerPath,
174 specs.fRenderedPathStats[kStrokeIdx].fMaxPointsPerPath) + 1)
Chris Daltonc3318f02019-07-19 14:20:53 -0600175 , fFiller((CoverageType::kFP16_CoverageCount == coverageType)
176 ? GrCCFiller::Algorithm::kCoverageCount
177 : GrCCFiller::Algorithm::kStencilWindingCount,
178 specs.fNumRenderedPaths[kFillIdx] + specs.fNumClipPaths,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800179 specs.fRenderedPathStats[kFillIdx].fNumTotalSkPoints,
180 specs.fRenderedPathStats[kFillIdx].fNumTotalSkVerbs,
181 specs.fRenderedPathStats[kFillIdx].fNumTotalConicWeights)
182 , fStroker(specs.fNumRenderedPaths[kStrokeIdx],
183 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkPoints,
184 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkVerbs)
Chris Daltonc3318f02019-07-19 14:20:53 -0600185 , fCopyAtlasStack(CoverageType::kA8_LiteralCoverage, specs.fCopyAtlasSpecs,
Chris Dalton351e80c2019-01-06 22:51:00 -0700186 onFlushRP->caps())
Chris Daltonc3318f02019-07-19 14:20:53 -0600187 , fRenderedAtlasStack(coverageType, specs.fRenderedAtlasSpecs, onFlushRP->caps())
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600188 , fIndexBuffer(GrCCPathProcessor::FindIndexBuffer(onFlushRP))
189 , fVertexBuffer(GrCCPathProcessor::FindVertexBuffer(onFlushRP))
Brian Salomonae64c192019-02-05 09:41:37 -0500190 , fInstanceBuffer(onFlushRP->makeBuffer(GrGpuBufferType::kVertex,
Chris Dalton4da70192018-06-18 09:51:36 -0600191 inst_buffer_count(specs) * sizeof(PathInstance)))
192 , fNextCopyInstanceIdx(0)
Chris Daltonc3318f02019-07-19 14:20:53 -0600193 , fNextPathInstanceIdx(
194 specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600195 if (!fIndexBuffer) {
196 SkDebugf("WARNING: failed to allocate CCPR index buffer. No paths will be drawn.\n");
197 return;
198 }
199 if (!fVertexBuffer) {
200 SkDebugf("WARNING: failed to allocate CCPR vertex buffer. No paths will be drawn.\n");
201 return;
202 }
203 if (!fInstanceBuffer) {
204 SkDebugf("WARNING: failed to allocate CCPR instance buffer. No paths will be drawn.\n");
205 return;
206 }
207 fPathInstanceData = static_cast<PathInstance*>(fInstanceBuffer->map());
208 SkASSERT(fPathInstanceData);
Chris Daltonc3318f02019-07-19 14:20:53 -0600209
210 if (CoverageType::kA8_Multisample == coverageType) {
211 int numRenderedPaths =
212 specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx] +
213 specs.fNumClipPaths;
214 fStencilResolveBuffer = onFlushRP->makeBuffer(
215 GrGpuBufferType::kVertex,
216 numRenderedPaths * sizeof(GrStencilAtlasOp::ResolveRectInstance));
217 fStencilResolveInstanceData = static_cast<GrStencilAtlasOp::ResolveRectInstance*>(
218 fStencilResolveBuffer->map());
219 SkASSERT(fStencilResolveInstanceData);
220 SkDEBUGCODE(fEndStencilResolveInstance = numRenderedPaths);
221 }
222
Chris Dalton09a7bb22018-08-31 19:53:15 +0800223 SkDEBUGCODE(fEndCopyInstance =
224 specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]);
Chris Dalton4da70192018-06-18 09:51:36 -0600225 SkDEBUGCODE(fEndPathInstance = inst_buffer_count(specs));
226}
227
Chris Dalton351e80c2019-01-06 22:51:00 -0700228void GrCCPerFlushResources::upgradeEntryToLiteralCoverageAtlas(
229 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, GrCCPathCacheEntry* entry,
Chris Daltonc3318f02019-07-19 14:20:53 -0600230 GrFillRule fillRule) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700231 using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
Chris Dalton4da70192018-06-18 09:51:36 -0600232 SkASSERT(this->isMapped());
233 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600234
Chris Dalton351e80c2019-01-06 22:51:00 -0700235 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
236 SkASSERT(cachedAtlas);
237 SkASSERT(cachedAtlas->getOnFlushProxy());
238
Chris Daltonc3318f02019-07-19 14:20:53 -0600239 if (CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700240 // This entry has already been upgraded to literal coverage. The path must have been drawn
241 // multiple times during the flush.
242 SkDEBUGCODE(--fEndCopyInstance);
243 return;
Chris Dalton4da70192018-06-18 09:51:36 -0600244 }
245
Chris Dalton351e80c2019-01-06 22:51:00 -0700246 SkIVector newAtlasOffset;
247 if (GrCCAtlas* retiredAtlas = fCopyAtlasStack.addRect(entry->devIBounds(), &newAtlasOffset)) {
248 // We did not fit in the previous copy atlas and it was retired. We will render the ranges
249 // up until fCopyPathRanges.count() into the retired atlas during finalize().
250 retiredAtlas->setFillBatchID(fCopyPathRanges.count());
251 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
252 }
253
Chris Daltonc3318f02019-07-19 14:20:53 -0600254 this->recordCopyPathInstance(
255 *entry, newAtlasOffset, fillRule, sk_ref_sp(cachedAtlas->getOnFlushProxy()));
Chris Dalton351e80c2019-01-06 22:51:00 -0700256
257 sk_sp<GrTexture> previousAtlasTexture =
258 sk_ref_sp(cachedAtlas->getOnFlushProxy()->peekTexture());
259 GrCCAtlas* newAtlas = &fCopyAtlasStack.current();
260 if (ReleaseAtlasResult::kDidInvalidateFromCache ==
261 entry->upgradeToLiteralCoverageAtlas(pathCache, onFlushRP, newAtlas, newAtlasOffset)) {
262 // This texture just got booted out of the cache. Keep it around, in case we might be able
263 // to recycle it for a new atlas. We can recycle it because copying happens before rendering
264 // new paths, and every path from the atlas that we're planning to use this flush will be
265 // copied to a new atlas. We'll never copy some and leave others.
266 fRecyclableAtlasTextures.push_back(std::move(previousAtlasTexture));
267 }
268}
269
270template<typename T, typename... Args>
271static void emplace_at_memcpy(SkTArray<T>* array, int idx, Args&&... args) {
272 if (int moveCount = array->count() - idx) {
273 array->push_back();
274 T* location = array->begin() + idx;
275 memcpy(location+1, location, moveCount * sizeof(T));
276 new (location) T(std::forward<Args>(args)...);
277 } else {
278 array->emplace_back(std::forward<Args>(args)...);
279 }
280}
281
Chris Daltonc3318f02019-07-19 14:20:53 -0600282void GrCCPerFlushResources::recordCopyPathInstance(
283 const GrCCPathCacheEntry& entry, const SkIVector& newAtlasOffset, GrFillRule fillRule,
284 sk_sp<GrTextureProxy> srcProxy) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700285 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
286
287 // Write the instance at the back of the array.
288 int currentInstanceIdx = fNextCopyInstanceIdx++;
Brian Osmanc6444d22019-01-09 16:30:12 -0500289 constexpr uint64_t kWhite = (((uint64_t) SK_Half1) << 0) |
290 (((uint64_t) SK_Half1) << 16) |
291 (((uint64_t) SK_Half1) << 32) |
292 (((uint64_t) SK_Half1) << 48);
Chris Daltonc3318f02019-07-19 14:20:53 -0600293 fPathInstanceData[currentInstanceIdx].set(entry, newAtlasOffset, kWhite, fillRule);
Chris Dalton351e80c2019-01-06 22:51:00 -0700294
295 // Percolate the instance forward until it's contiguous with other instances that share the same
296 // proxy.
297 for (int i = fCopyPathRanges.count() - 1; i >= fCurrCopyAtlasRangesIdx; --i) {
298 if (fCopyPathRanges[i].fSrcProxy == srcProxy) {
299 ++fCopyPathRanges[i].fCount;
300 return;
301 }
302 int rangeFirstInstanceIdx = currentInstanceIdx - fCopyPathRanges[i].fCount;
303 std::swap(fPathInstanceData[rangeFirstInstanceIdx], fPathInstanceData[currentInstanceIdx]);
304 currentInstanceIdx = rangeFirstInstanceIdx;
305 }
306
307 // An instance with this particular proxy did not yet exist in the array. Add a range for it.
308 emplace_at_memcpy(&fCopyPathRanges, fCurrCopyAtlasRangesIdx, std::move(srcProxy), 1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600309}
310
Chris Dalton8610e9c2019-05-09 11:07:10 -0600311static bool transform_path_pts(
312 const SkMatrix& m, const SkPath& path, const SkAutoSTArray<32, SkPoint>& outDevPts,
313 GrOctoBounds* octoBounds) {
Chris Daltone1639692018-08-20 14:00:30 -0600314 const SkPoint* pts = SkPathPriv::PointData(path);
315 int numPts = path.countPoints();
316 SkASSERT(numPts + 1 <= outDevPts.count());
317 SkASSERT(numPts);
318
319 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
320 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
321 // transform is not necessary as long as the shader uses the correct inverse.
322 SkMatrix m45;
323 m45.setSinCos(1, 1);
324 m45.preConcat(m);
325
326 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
327 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
328 // | 1 1 |
329 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
330 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
331 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
332
333 // Map the path's points to device space and accumulate bounding boxes.
334 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
335 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
336 Sk4f topLeft = devPt;
337 Sk4f bottomRight = devPt;
338
339 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
340 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
341 // be at least one larger than the number of points.
342 devPt.store(&outDevPts[0]);
343
344 for (int i = 1; i < numPts; ++i) {
345 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
346 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
347 topLeft = Sk4f::Min(topLeft, devPt);
348 bottomRight = Sk4f::Max(bottomRight, devPt);
349 devPt.store(&outDevPts[i]);
350 }
351
Chris Daltonce038dc2018-09-14 14:14:49 -0600352 if (!(Sk4f(0) == topLeft*0).allTrue() || !(Sk4f(0) == bottomRight*0).allTrue()) {
353 // The bounds are infinite or NaN.
354 return false;
355 }
356
Chris Daltone1639692018-08-20 14:00:30 -0600357 SkPoint topLeftPts[2], bottomRightPts[2];
358 topLeft.store(topLeftPts);
359 bottomRight.store(bottomRightPts);
Chris Dalton8610e9c2019-05-09 11:07:10 -0600360
361 const SkRect& devBounds = SkRect::MakeLTRB(
362 topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(), bottomRightPts[0].y());
363 const SkRect& devBounds45 = SkRect::MakeLTRB(
364 topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(), bottomRightPts[1].y());
365
366 octoBounds->set(devBounds, devBounds45);
Chris Daltonce038dc2018-09-14 14:14:49 -0600367 return true;
Chris Daltone1639692018-08-20 14:00:30 -0600368}
369
Chris Dalton351e80c2019-01-06 22:51:00 -0700370GrCCAtlas* GrCCPerFlushResources::renderShapeInAtlas(
Chris Dalton09a7bb22018-08-31 19:53:15 +0800371 const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
Chris Dalton8610e9c2019-05-09 11:07:10 -0600372 GrOctoBounds* octoBounds, SkIRect* devIBounds, SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600373 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600374 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
375
Chris Dalton09a7bb22018-08-31 19:53:15 +0800376 SkPath path;
377 shape.asPath(&path);
Chris Daltone1639692018-08-20 14:00:30 -0600378 if (path.isEmpty()) {
379 SkDEBUGCODE(--fEndPathInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600380 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltone1639692018-08-20 14:00:30 -0600381 return nullptr;
382 }
Chris Dalton8610e9c2019-05-09 11:07:10 -0600383 if (!transform_path_pts(m, path, fLocalDevPtsBuffer, octoBounds)) {
Chris Daltonce038dc2018-09-14 14:14:49 -0600384 // The transformed path had infinite or NaN bounds.
385 SkDEBUGCODE(--fEndPathInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600386 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltonce038dc2018-09-14 14:14:49 -0600387 return nullptr;
388 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800389
390 const SkStrokeRec& stroke = shape.style().strokeRec();
391 if (!stroke.isFillStyle()) {
Chris Dalton8610e9c2019-05-09 11:07:10 -0600392 float r = SkStrokeRec::GetInflationRadius(
393 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), strokeDevWidth);
394 octoBounds->outset(r);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800395 }
Chris Dalton9414c962018-06-14 10:14:50 -0600396
Chris Daltona6fcb762019-05-13 08:57:53 -0600397 GrScissorTest enableScissorInAtlas;
398 if (clipIBounds.contains(octoBounds->bounds())) {
399 enableScissorInAtlas = GrScissorTest::kDisabled;
400 } else if (octoBounds->clip(clipIBounds)) {
401 enableScissorInAtlas = GrScissorTest::kEnabled;
402 } else {
403 // The clip and octo bounds do not intersect. Draw nothing.
Chris Dalton9414c962018-06-14 10:14:50 -0600404 SkDEBUGCODE(--fEndPathInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600405 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltona6fcb762019-05-13 08:57:53 -0600406 return nullptr;
Chris Dalton9414c962018-06-14 10:14:50 -0600407 }
Chris Daltona6fcb762019-05-13 08:57:53 -0600408 octoBounds->roundOut(devIBounds);
409 SkASSERT(clipIBounds.contains(*devIBounds));
410
411 this->placeRenderedPathInAtlas(*devIBounds, enableScissorInAtlas, devToAtlasOffset);
Chris Daltone1639692018-08-20 14:00:30 -0600412
Chris Daltonc3318f02019-07-19 14:20:53 -0600413 GrFillRule fillRule;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800414 if (stroke.isFillStyle()) {
415 SkASSERT(0 == strokeDevWidth);
Chris Daltona6fcb762019-05-13 08:57:53 -0600416 fFiller.parseDeviceSpaceFill(path, fLocalDevPtsBuffer.begin(), enableScissorInAtlas,
417 *devIBounds, *devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -0600418 fillRule = GrFillRuleForSkPath(path);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800419 } else {
420 // Stroke-and-fill is not yet supported.
421 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle() || stroke.isHairlineStyle());
422 SkASSERT(!stroke.isHairlineStyle() || 1 == strokeDevWidth);
Chris Daltona6fcb762019-05-13 08:57:53 -0600423 fStroker.parseDeviceSpaceStroke(
424 path, fLocalDevPtsBuffer.begin(), stroke, strokeDevWidth, enableScissorInAtlas,
425 *devIBounds, *devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -0600426 fillRule = GrFillRule::kNonzero;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800427 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600428
429 if (GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType()) {
430 this->recordStencilResolveInstance(*devIBounds, *devToAtlasOffset, fillRule);
431 }
432
Chris Dalton4da70192018-06-18 09:51:36 -0600433 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600434}
435
Chris Dalton9414c962018-06-14 10:14:50 -0600436const GrCCAtlas* GrCCPerFlushResources::renderDeviceSpacePathInAtlas(
437 const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds,
Chris Daltonc3318f02019-07-19 14:20:53 -0600438 GrFillRule fillRule, SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600439 SkASSERT(this->isMapped());
Chris Daltone1639692018-08-20 14:00:30 -0600440
441 if (devPath.isEmpty()) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600442 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Dalton9414c962018-06-14 10:14:50 -0600443 return nullptr;
444 }
Chris Daltone1639692018-08-20 14:00:30 -0600445
Chris Daltona6fcb762019-05-13 08:57:53 -0600446 GrScissorTest enableScissorInAtlas;
Chris Daltone1639692018-08-20 14:00:30 -0600447 SkIRect clippedPathIBounds;
Chris Daltona6fcb762019-05-13 08:57:53 -0600448 if (clipIBounds.contains(devPathIBounds)) {
449 clippedPathIBounds = devPathIBounds;
450 enableScissorInAtlas = GrScissorTest::kDisabled;
451 } else if (clippedPathIBounds.intersect(clipIBounds, devPathIBounds)) {
452 enableScissorInAtlas = GrScissorTest::kEnabled;
453 } else {
454 // The clip and path bounds do not intersect. Draw nothing.
Chris Daltonc3318f02019-07-19 14:20:53 -0600455 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltone1639692018-08-20 14:00:30 -0600456 return nullptr;
457 }
458
Chris Daltona6fcb762019-05-13 08:57:53 -0600459 this->placeRenderedPathInAtlas(clippedPathIBounds, enableScissorInAtlas, devToAtlasOffset);
460 fFiller.parseDeviceSpaceFill(devPath, SkPathPriv::PointData(devPath), enableScissorInAtlas,
Chris Daltone1639692018-08-20 14:00:30 -0600461 clippedPathIBounds, *devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -0600462
463 // In MSAA mode we also record an internal draw instance that will be used to resolve stencil
464 // winding values to coverage when the atlas is generated.
465 if (GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType()) {
466 this->recordStencilResolveInstance(clippedPathIBounds, *devToAtlasOffset, fillRule);
467 }
468
Chris Dalton4da70192018-06-18 09:51:36 -0600469 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600470}
471
Chris Daltona6fcb762019-05-13 08:57:53 -0600472void GrCCPerFlushResources::placeRenderedPathInAtlas(
473 const SkIRect& clippedPathIBounds, GrScissorTest scissorTest, SkIVector* devToAtlasOffset) {
Chris Dalton4da70192018-06-18 09:51:36 -0600474 if (GrCCAtlas* retiredAtlas =
Chris Daltona6fcb762019-05-13 08:57:53 -0600475 fRenderedAtlasStack.addRect(clippedPathIBounds, devToAtlasOffset)) {
Chris Dalton9414c962018-06-14 10:14:50 -0600476 // We did not fit in the previous coverage count atlas and it was retired. Close the path
477 // parser's current batch (which does not yet include the path we just parsed). We will
478 // render this batch into the retired atlas during finalize().
Chris Dalton09a7bb22018-08-31 19:53:15 +0800479 retiredAtlas->setFillBatchID(fFiller.closeCurrentBatch());
480 retiredAtlas->setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Daltonc3318f02019-07-19 14:20:53 -0600481 retiredAtlas->setEndStencilResolveInstance(fNextStencilResolveInstanceIdx);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600482 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600483}
484
Chris Daltonc3318f02019-07-19 14:20:53 -0600485void GrCCPerFlushResources::recordStencilResolveInstance(
486 const SkIRect& clippedPathIBounds, const SkIVector& devToAtlasOffset, GrFillRule fillRule) {
487 SkASSERT(GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType());
488 SkASSERT(fNextStencilResolveInstanceIdx < fEndStencilResolveInstance);
489
490 SkIRect atlasIBounds = clippedPathIBounds.makeOffset(
491 devToAtlasOffset.x(), devToAtlasOffset.y());
492 if (GrFillRule::kEvenOdd == fillRule) {
493 // Make even/odd fills counterclockwise. The resolve draw uses two-sided stencil, with
494 // "nonzero" settings in front and "even/odd" settings in back.
495 std::swap(atlasIBounds.fLeft, atlasIBounds.fRight);
496 }
497 fStencilResolveInstanceData[fNextStencilResolveInstanceIdx++] = {
498 (int16_t)atlasIBounds.left(), (int16_t)atlasIBounds.top(),
499 (int16_t)atlasIBounds.right(), (int16_t)atlasIBounds.bottom()};
500}
501
Chris Daltonc4b47352019-08-23 10:10:36 -0600502bool GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600503 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600504 SkASSERT(fNextPathInstanceIdx == fEndPathInstance);
Chris Dalton351e80c2019-01-06 22:51:00 -0700505 SkASSERT(fNextCopyInstanceIdx == fEndCopyInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600506 SkASSERT(GrCCAtlas::CoverageType::kA8_Multisample != this->renderedPathCoverageType() ||
507 fNextStencilResolveInstanceIdx == fEndStencilResolveInstance);
Chris Dalton9414c962018-06-14 10:14:50 -0600508
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600509 fInstanceBuffer->unmap();
510 fPathInstanceData = nullptr;
511
Chris Daltonc3318f02019-07-19 14:20:53 -0600512 if (fStencilResolveBuffer) {
513 fStencilResolveBuffer->unmap();
514 fStencilResolveInstanceData = nullptr;
515 }
516
Chris Dalton4da70192018-06-18 09:51:36 -0600517 if (!fCopyAtlasStack.empty()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700518 fCopyAtlasStack.current().setFillBatchID(fCopyPathRanges.count());
519 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
Chris Dalton4da70192018-06-18 09:51:36 -0600520 }
521 if (!fRenderedAtlasStack.empty()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800522 fRenderedAtlasStack.current().setFillBatchID(fFiller.closeCurrentBatch());
523 fRenderedAtlasStack.current().setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Daltonc3318f02019-07-19 14:20:53 -0600524 fRenderedAtlasStack.current().setEndStencilResolveInstance(fNextStencilResolveInstanceIdx);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600525 }
526
Chris Dalton9414c962018-06-14 10:14:50 -0600527 // Build the GPU buffers to render path coverage counts. (This must not happen until after the
Chris Dalton09a7bb22018-08-31 19:53:15 +0800528 // final calls to fFiller/fStroker.closeCurrentBatch().)
Chris Daltone1639692018-08-20 14:00:30 -0600529 if (!fFiller.prepareToDraw(onFlushRP)) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600530 return false;
531 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800532 if (!fStroker.prepareToDraw(onFlushRP)) {
533 return false;
534 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600535
Chris Daltonc3318f02019-07-19 14:20:53 -0600536 // Draw the copies from coverage count or msaa atlas(es) into 8-bit cached atlas(es).
Chris Dalton351e80c2019-01-06 22:51:00 -0700537 int copyRangeIdx = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600538 int baseCopyInstance = 0;
539 for (GrCCAtlasStack::Iter atlas(fCopyAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700540 int endCopyRange = atlas->getFillBatchID();
541 SkASSERT(endCopyRange > copyRangeIdx);
542
Brian Salomonbf6b9792019-08-21 09:38:10 -0400543 auto rtc = atlas->makeRenderTargetContext(onFlushRP);
Chris Dalton351e80c2019-01-06 22:51:00 -0700544 for (; copyRangeIdx < endCopyRange; ++copyRangeIdx) {
545 const CopyPathRange& copyRange = fCopyPathRanges[copyRangeIdx];
546 int endCopyInstance = baseCopyInstance + copyRange.fCount;
547 if (rtc) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600548 auto op = CopyAtlasOp::Make(
549 rtc->surfPriv().getContext(), sk_ref_sp(this), copyRange.fSrcProxy,
550 baseCopyInstance, endCopyInstance, atlas->drawBounds());
Chris Dalton351e80c2019-01-06 22:51:00 -0700551 rtc->addDrawOp(GrNoClip(), std::move(op));
552 }
553 baseCopyInstance = endCopyInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600554 }
Chris Dalton4da70192018-06-18 09:51:36 -0600555 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700556 SkASSERT(fCopyPathRanges.count() == copyRangeIdx);
557 SkASSERT(fNextCopyInstanceIdx == baseCopyInstance);
558 SkASSERT(baseCopyInstance == fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600559
Chris Dalton4da70192018-06-18 09:51:36 -0600560 // Render the coverage count atlas(es).
Chris Daltonc3318f02019-07-19 14:20:53 -0600561 int baseStencilResolveInstance = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600562 for (GrCCAtlasStack::Iter atlas(fRenderedAtlasStack); atlas.next();) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700563 // Copies will be finished by the time we get to rendering new atlases. See if we can
564 // recycle any previous invalidated atlas textures instead of creating new ones.
Chris Daltonafde18f2018-06-22 12:44:19 -0600565 sk_sp<GrTexture> backingTexture;
Chris Dalton351e80c2019-01-06 22:51:00 -0700566 for (sk_sp<GrTexture>& texture : fRecyclableAtlasTextures) {
567 if (texture && atlas->currentHeight() == texture->height() &&
568 atlas->currentWidth() == texture->width()) {
569 backingTexture = skstd::exchange(texture, nullptr);
570 break;
571 }
Chris Daltonafde18f2018-06-22 12:44:19 -0600572 }
573
574 if (auto rtc = atlas->makeRenderTargetContext(onFlushRP, std::move(backingTexture))) {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500575 std::unique_ptr<GrDrawOp> op;
Chris Daltonc3318f02019-07-19 14:20:53 -0600576 if (CoverageType::kA8_Multisample == fRenderedAtlasStack.coverageType()) {
577 op = GrStencilAtlasOp::Make(
578 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(),
579 atlas->getStrokeBatchID(), baseStencilResolveInstance,
580 atlas->getEndStencilResolveInstance(), atlas->drawBounds());
581 } else if (onFlushRP->caps()->shaderCaps()->geometryShaderSupport()) {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500582 op = RenderAtlasOp<GrGSCoverageProcessor>::Make(
583 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(),
584 atlas->getStrokeBatchID(), atlas->drawBounds());
585 } else {
586 op = RenderAtlasOp<GrVSCoverageProcessor>::Make(
587 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(),
588 atlas->getStrokeBatchID(), atlas->drawBounds());
589 }
Chris Dalton9414c962018-06-14 10:14:50 -0600590 rtc->addDrawOp(GrNoClip(), std::move(op));
Chris Dalton4ece96d2019-08-30 11:26:39 -0600591 if (rtc->proxy()->requiresManualMSAAResolve()) {
592 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->proxy()->asTextureProxy()),
593 GrSurfaceProxy::ResolveFlags::kMSAA);
594 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600595 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600596
597 SkASSERT(atlas->getEndStencilResolveInstance() >= baseStencilResolveInstance);
598 baseStencilResolveInstance = atlas->getEndStencilResolveInstance();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600599 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600600 SkASSERT(GrCCAtlas::CoverageType::kA8_Multisample != this->renderedPathCoverageType() ||
601 baseStencilResolveInstance == fEndStencilResolveInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600602
603 return true;
604}
Chris Dalton4da70192018-06-18 09:51:36 -0600605
Chris Dalton351e80c2019-01-06 22:51:00 -0700606void GrCCPerFlushResourceSpecs::cancelCopies() {
607 // Convert copies to cached draws.
608 fNumCachedPaths += fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx];
609 fNumCopiedPaths[kFillIdx] = fNumCopiedPaths[kStrokeIdx] = 0;
610 fCopyPathStats[kFillIdx] = fCopyPathStats[kStrokeIdx] = GrCCRenderedPathStats();
Chris Dalton4da70192018-06-18 09:51:36 -0600611 fCopyAtlasSpecs = GrCCAtlas::Specs();
Chris Dalton4da70192018-06-18 09:51:36 -0600612}