blob: 493de289f5f0b1ecd04e4484a78d4fd5e2b8c0dd [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCCPerFlushResources.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -06009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/private/GrRecordingContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/gpu/GrClip.h"
12#include "src/gpu/GrMemoryPool.h"
13#include "src/gpu/GrOnFlushResourceProvider.h"
14#include "src/gpu/GrRecordingContextPriv.h"
15#include "src/gpu/GrRenderTargetContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "src/gpu/GrSurfaceContextPriv.h"
17#include "src/gpu/ccpr/GrCCPathCache.h"
18#include "src/gpu/ccpr/GrGSCoverageProcessor.h"
Chris Daltonc3318f02019-07-19 14:20:53 -060019#include "src/gpu/ccpr/GrSampleMaskProcessor.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/ccpr/GrVSCoverageProcessor.h"
Michael Ludwig663afe52019-06-03 16:46:19 -040021#include "src/gpu/geometry/GrShape.h"
Mike Klein456c4052019-12-05 15:30:35 -060022#include <algorithm>
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023
Chris Daltonc3318f02019-07-19 14:20:53 -060024using CoverageType = GrCCAtlas::CoverageType;
Chris Daltone1639692018-08-20 14:00:30 -060025using FillBatchID = GrCCFiller::BatchID;
Chris Dalton09a7bb22018-08-31 19:53:15 +080026using StrokeBatchID = GrCCStroker::BatchID;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060027using PathInstance = GrCCPathProcessor::Instance;
28
Chris Dalton09a7bb22018-08-31 19:53:15 +080029static constexpr int kFillIdx = GrCCPerFlushResourceSpecs::kFillIdx;
30static constexpr int kStrokeIdx = GrCCPerFlushResourceSpecs::kStrokeIdx;
31
Chris Dalton9414c962018-06-14 10:14:50 -060032namespace {
33
Chris Dalton4da70192018-06-18 09:51:36 -060034// Base class for an Op that renders a CCPR atlas.
35class AtlasOp : public GrDrawOp {
36public:
37 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Chris Dalton6ce447a2019-06-23 18:07:38 -060038 GrProcessorSet::Analysis finalize(const GrCaps&, const GrAppliedClip*,
39 bool hasMixedSampledCoverage, GrClampType) override {
Chris Dalton4b62aed2019-01-15 11:53:00 -070040 return GrProcessorSet::EmptySetAnalysis();
Brian Osman532b3f92018-07-11 10:02:07 -040041 }
Michael Ludwig28b0c5d2019-12-19 14:51:00 -050042 CombineResult onCombineIfPossible(GrOp* other, GrRecordingContext::Arenas*,
43 const GrCaps&) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070044 // We will only make multiple copy ops if they have different source proxies.
45 // TODO: make use of texture chaining.
46 return CombineResult::kCannotCombine;
Chris Dalton4da70192018-06-18 09:51:36 -060047 }
Chris Dalton4da70192018-06-18 09:51:36 -060048
49protected:
50 AtlasOp(uint32_t classID, sk_sp<const GrCCPerFlushResources> resources,
51 const SkISize& drawBounds)
52 : GrDrawOp(classID)
53 , fResources(std::move(resources)) {
54 this->setBounds(SkRect::MakeIWH(drawBounds.width(), drawBounds.height()),
Greg Daniel5faf4742019-10-01 15:14:44 -040055 GrOp::HasAABloat::kNo, GrOp::IsHairline::kNo);
Chris Dalton4da70192018-06-18 09:51:36 -060056 }
57
58 const sk_sp<const GrCCPerFlushResources> fResources;
Robert Phillipsc655c3a2020-03-18 13:23:45 -040059
60private:
61 void onPrePrepare(GrRecordingContext*,
Brian Salomon8afde5f2020-04-01 16:22:00 -040062 const GrSurfaceProxyView* writeView,
Robert Phillipsc655c3a2020-03-18 13:23:45 -040063 GrAppliedClip*,
64 const GrXferProcessor::DstProxyView&) final {}
65 void onPrepare(GrOpFlushState*) final {}
Chris Dalton4da70192018-06-18 09:51:36 -060066};
67
Chris Daltonc3318f02019-07-19 14:20:53 -060068// Copies paths from a cached coverage count or msaa atlas into an 8-bit literal-coverage atlas.
Chris Dalton4da70192018-06-18 09:51:36 -060069class CopyAtlasOp : public AtlasOp {
70public:
71 DEFINE_OP_CLASS_ID
72
Chris Daltonc3318f02019-07-19 14:20:53 -060073 static std::unique_ptr<GrDrawOp> Make(
74 GrRecordingContext* context, sk_sp<const GrCCPerFlushResources> resources,
75 sk_sp<GrTextureProxy> copyProxy, int baseInstance, int endInstance,
76 const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -050077 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -040078
Chris Daltonc3318f02019-07-19 14:20:53 -060079 return pool->allocate<CopyAtlasOp>(std::move(resources), std::move(copyProxy), baseInstance,
80 endInstance, drawBounds);
Chris Dalton4da70192018-06-18 09:51:36 -060081 }
82
83 const char* name() const override { return "CopyAtlasOp (CCPR)"; }
Chris Dalton7eb5c0f2019-05-23 15:15:47 -060084
85 void visitProxies(const VisitProxyFunc& fn) const override {
86 fn(fSrcProxy.get(), GrMipMapped::kNo);
87 }
Chris Dalton4da70192018-06-18 09:51:36 -060088
Brian Salomon588cec72018-11-14 13:56:37 -050089 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton351e80c2019-01-06 22:51:00 -070090 SkASSERT(fSrcProxy);
Chris Dalton39ca9732020-03-10 10:34:17 -060091 SkASSERT(fSrcProxy->isInstantiated());
Chris Daltonf91b7552019-04-29 16:21:18 -060092
Chris Daltona550cf22020-02-07 13:35:31 -070093 auto coverageMode = GrCCAtlas::CoverageTypeToPathCoverageMode(
Chris Daltonc3318f02019-07-19 14:20:53 -060094 fResources->renderedPathCoverageType());
Greg Daniele810d832020-02-07 17:20:56 -050095 GrColorType ct = GrCCAtlas::CoverageTypeToColorType(fResources->renderedPathCoverageType());
Chris Dalton39ca9732020-03-10 10:34:17 -060096 GrSwizzle swizzle = flushState->caps().getReadSwizzle(fSrcProxy->backendFormat(), ct);
97 GrCCPathProcessor pathProc(coverageMode, fSrcProxy->peekTexture(), swizzle,
Greg Daniele810d832020-02-07 17:20:56 -050098 GrCCAtlas::kTextureOrigin);
Chris Dalton46d0f9a2019-04-24 19:34:54 -040099
Greg Daniel2c3398d2019-06-19 11:58:01 -0400100 GrPipeline pipeline(GrScissorTest::kDisabled, SkBlendMode::kSrc,
Brian Salomon982f5462020-03-30 12:52:33 -0400101 flushState->drawOpArgs().writeSwizzle());
Chris Daltonf91b7552019-04-29 16:21:18 -0600102
Chris Dalton39ca9732020-03-10 10:34:17 -0600103 pathProc.drawPaths(flushState, pipeline, *fSrcProxy, *fResources, fBaseInstance,
Brian Salomon7eae3e02018-08-07 14:02:38 +0000104 fEndInstance, this->bounds());
Chris Dalton4da70192018-06-18 09:51:36 -0600105 }
106
107private:
108 friend class ::GrOpMemoryPool; // for ctor
109
Chris Dalton351e80c2019-01-06 22:51:00 -0700110 CopyAtlasOp(sk_sp<const GrCCPerFlushResources> resources, sk_sp<GrTextureProxy> srcProxy,
Chris Dalton4da70192018-06-18 09:51:36 -0600111 int baseInstance, int endInstance, const SkISize& drawBounds)
112 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton351e80c2019-01-06 22:51:00 -0700113 , fSrcProxy(srcProxy)
Chris Dalton4da70192018-06-18 09:51:36 -0600114 , fBaseInstance(baseInstance)
115 , fEndInstance(endInstance) {
116 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700117 sk_sp<GrTextureProxy> fSrcProxy;
Chris Dalton4da70192018-06-18 09:51:36 -0600118 const int fBaseInstance;
119 const int fEndInstance;
120};
121
Chris Dalton9414c962018-06-14 10:14:50 -0600122// Renders coverage counts to a CCPR atlas using the resources' pre-filled GrCCPathParser.
Chris Dalton2c5e0112019-03-29 13:14:18 -0500123template<typename ProcessorType> class RenderAtlasOp : public AtlasOp {
Chris Dalton9414c962018-06-14 10:14:50 -0600124public:
125 DEFINE_OP_CLASS_ID
126
Chris Daltonc3318f02019-07-19 14:20:53 -0600127 static std::unique_ptr<GrDrawOp> Make(
128 GrRecordingContext* context, sk_sp<const GrCCPerFlushResources> resources,
129 FillBatchID fillBatchID, StrokeBatchID strokeBatchID, const SkISize& drawBounds) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500130 GrOpMemoryPool* pool = context->priv().opMemoryPool();
Robert Phillipsc994a932018-06-19 13:09:54 -0400131
Chris Daltonc3318f02019-07-19 14:20:53 -0600132 return pool->allocate<RenderAtlasOp>(
133 std::move(resources), fillBatchID, strokeBatchID, drawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600134 }
135
136 // GrDrawOp interface.
137 const char* name() const override { return "RenderAtlasOp (CCPR)"; }
Chris Dalton9414c962018-06-14 10:14:50 -0600138
Brian Salomon588cec72018-11-14 13:56:37 -0500139 void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500140 ProcessorType proc;
Chris Daltonc3318f02019-07-19 14:20:53 -0600141 GrPipeline pipeline(GrScissorTest::kEnabled, SkBlendMode::kPlus,
Brian Salomon982f5462020-03-30 12:52:33 -0400142 flushState->drawOpArgs().writeSwizzle());
Chris Daltonc3318f02019-07-19 14:20:53 -0600143 fResources->filler().drawFills(flushState, &proc, pipeline, fFillBatchID, fDrawBounds);
Chris Dalton2c5e0112019-03-29 13:14:18 -0500144 fResources->stroker().drawStrokes(flushState, &proc, fStrokeBatchID, fDrawBounds);
Chris Dalton9414c962018-06-14 10:14:50 -0600145 }
146
147private:
148 friend class ::GrOpMemoryPool; // for ctor
149
Chris Dalton09a7bb22018-08-31 19:53:15 +0800150 RenderAtlasOp(sk_sp<const GrCCPerFlushResources> resources, FillBatchID fillBatchID,
151 StrokeBatchID strokeBatchID, const SkISize& drawBounds)
Chris Dalton4da70192018-06-18 09:51:36 -0600152 : AtlasOp(ClassID(), std::move(resources), drawBounds)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800153 , fFillBatchID(fillBatchID)
154 , fStrokeBatchID(strokeBatchID)
Chris Dalton9414c962018-06-14 10:14:50 -0600155 , fDrawBounds(SkIRect::MakeWH(drawBounds.width(), drawBounds.height())) {
Chris Dalton9414c962018-06-14 10:14:50 -0600156 }
157
Chris Dalton09a7bb22018-08-31 19:53:15 +0800158 const FillBatchID fFillBatchID;
159 const StrokeBatchID fStrokeBatchID;
Chris Dalton9414c962018-06-14 10:14:50 -0600160 const SkIRect fDrawBounds;
161};
162
163}
164
Chris Dalton4da70192018-06-18 09:51:36 -0600165static int inst_buffer_count(const GrCCPerFlushResourceSpecs& specs) {
166 return specs.fNumCachedPaths +
Chris Dalton09a7bb22018-08-31 19:53:15 +0800167 // Copies get two instances per draw: 1 copy + 1 draw.
168 (specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) * 2 +
169 specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx];
170 // No clips in instance buffers.
Chris Dalton4da70192018-06-18 09:51:36 -0600171}
172
Chris Daltonc3318f02019-07-19 14:20:53 -0600173GrCCPerFlushResources::GrCCPerFlushResources(
174 GrOnFlushResourceProvider* onFlushRP, CoverageType coverageType,
175 const GrCCPerFlushResourceSpecs& specs)
Brian Salomonae64c192019-02-05 09:41:37 -0500176 // Overallocate by one point so we can call Sk4f::Store at the final SkPoint in the array.
177 // (See transform_path_pts below.)
178 // FIXME: instead use built-in instructions to write only the first two lanes of an Sk4f.
Brian Osman788b9162020-02-07 10:36:46 -0500179 : fLocalDevPtsBuffer(std::max(specs.fRenderedPathStats[kFillIdx].fMaxPointsPerPath,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800180 specs.fRenderedPathStats[kStrokeIdx].fMaxPointsPerPath) + 1)
Chris Daltonc3318f02019-07-19 14:20:53 -0600181 , fFiller((CoverageType::kFP16_CoverageCount == coverageType)
182 ? GrCCFiller::Algorithm::kCoverageCount
183 : GrCCFiller::Algorithm::kStencilWindingCount,
184 specs.fNumRenderedPaths[kFillIdx] + specs.fNumClipPaths,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800185 specs.fRenderedPathStats[kFillIdx].fNumTotalSkPoints,
186 specs.fRenderedPathStats[kFillIdx].fNumTotalSkVerbs,
187 specs.fRenderedPathStats[kFillIdx].fNumTotalConicWeights)
188 , fStroker(specs.fNumRenderedPaths[kStrokeIdx],
189 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkPoints,
190 specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkVerbs)
Chris Daltonc3318f02019-07-19 14:20:53 -0600191 , fCopyAtlasStack(CoverageType::kA8_LiteralCoverage, specs.fCopyAtlasSpecs,
Chris Dalton351e80c2019-01-06 22:51:00 -0700192 onFlushRP->caps())
Chris Daltonc3318f02019-07-19 14:20:53 -0600193 , fRenderedAtlasStack(coverageType, specs.fRenderedAtlasSpecs, onFlushRP->caps())
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600194 , fIndexBuffer(GrCCPathProcessor::FindIndexBuffer(onFlushRP))
195 , fVertexBuffer(GrCCPathProcessor::FindVertexBuffer(onFlushRP))
Chris Dalton4da70192018-06-18 09:51:36 -0600196 , fNextCopyInstanceIdx(0)
Chris Daltonc3318f02019-07-19 14:20:53 -0600197 , fNextPathInstanceIdx(
198 specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600199 if (!fIndexBuffer) {
200 SkDebugf("WARNING: failed to allocate CCPR index buffer. No paths will be drawn.\n");
201 return;
202 }
203 if (!fVertexBuffer) {
204 SkDebugf("WARNING: failed to allocate CCPR vertex buffer. No paths will be drawn.\n");
205 return;
206 }
Chris Daltona6adc5c2020-03-25 12:41:17 -0600207 fPathInstanceBuffer.resetAndMapBuffer(onFlushRP,
208 inst_buffer_count(specs) * sizeof(PathInstance));
209 if (!fPathInstanceBuffer.gpuBuffer()) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600210 SkDebugf("WARNING: failed to allocate CCPR instance buffer. No paths will be drawn.\n");
211 return;
212 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600213
214 if (CoverageType::kA8_Multisample == coverageType) {
215 int numRenderedPaths =
216 specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx] +
217 specs.fNumClipPaths;
Chris Daltona6adc5c2020-03-25 12:41:17 -0600218 fStencilResolveBuffer.resetAndMapBuffer(
219 onFlushRP, numRenderedPaths * sizeof(GrStencilAtlasOp::ResolveRectInstance));
220 if (!fStencilResolveBuffer.gpuBuffer()) {
221 SkDebugf("WARNING: failed to allocate CCPR stencil resolve buffer. "
222 "No paths will be drawn.\n");
223 return;
224 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600225 SkDEBUGCODE(fEndStencilResolveInstance = numRenderedPaths);
226 }
227
Chris Dalton09a7bb22018-08-31 19:53:15 +0800228 SkDEBUGCODE(fEndCopyInstance =
229 specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]);
Chris Dalton4da70192018-06-18 09:51:36 -0600230 SkDEBUGCODE(fEndPathInstance = inst_buffer_count(specs));
231}
232
Chris Dalton351e80c2019-01-06 22:51:00 -0700233void GrCCPerFlushResources::upgradeEntryToLiteralCoverageAtlas(
234 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, GrCCPathCacheEntry* entry,
Chris Daltonc3318f02019-07-19 14:20:53 -0600235 GrFillRule fillRule) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700236 using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
Chris Dalton4da70192018-06-18 09:51:36 -0600237 SkASSERT(this->isMapped());
238 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600239
Chris Dalton351e80c2019-01-06 22:51:00 -0700240 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
241 SkASSERT(cachedAtlas);
242 SkASSERT(cachedAtlas->getOnFlushProxy());
243
Chris Daltonc3318f02019-07-19 14:20:53 -0600244 if (CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700245 // This entry has already been upgraded to literal coverage. The path must have been drawn
246 // multiple times during the flush.
247 SkDEBUGCODE(--fEndCopyInstance);
248 return;
Chris Dalton4da70192018-06-18 09:51:36 -0600249 }
250
Chris Dalton351e80c2019-01-06 22:51:00 -0700251 SkIVector newAtlasOffset;
252 if (GrCCAtlas* retiredAtlas = fCopyAtlasStack.addRect(entry->devIBounds(), &newAtlasOffset)) {
253 // We did not fit in the previous copy atlas and it was retired. We will render the ranges
254 // up until fCopyPathRanges.count() into the retired atlas during finalize().
255 retiredAtlas->setFillBatchID(fCopyPathRanges.count());
256 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
257 }
258
Chris Daltonc3318f02019-07-19 14:20:53 -0600259 this->recordCopyPathInstance(
260 *entry, newAtlasOffset, fillRule, sk_ref_sp(cachedAtlas->getOnFlushProxy()));
Chris Dalton351e80c2019-01-06 22:51:00 -0700261
262 sk_sp<GrTexture> previousAtlasTexture =
263 sk_ref_sp(cachedAtlas->getOnFlushProxy()->peekTexture());
264 GrCCAtlas* newAtlas = &fCopyAtlasStack.current();
265 if (ReleaseAtlasResult::kDidInvalidateFromCache ==
266 entry->upgradeToLiteralCoverageAtlas(pathCache, onFlushRP, newAtlas, newAtlasOffset)) {
267 // This texture just got booted out of the cache. Keep it around, in case we might be able
268 // to recycle it for a new atlas. We can recycle it because copying happens before rendering
269 // new paths, and every path from the atlas that we're planning to use this flush will be
270 // copied to a new atlas. We'll never copy some and leave others.
271 fRecyclableAtlasTextures.push_back(std::move(previousAtlasTexture));
272 }
273}
274
Chris Daltonc3318f02019-07-19 14:20:53 -0600275void GrCCPerFlushResources::recordCopyPathInstance(
276 const GrCCPathCacheEntry& entry, const SkIVector& newAtlasOffset, GrFillRule fillRule,
277 sk_sp<GrTextureProxy> srcProxy) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700278 SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance);
279
280 // Write the instance at the back of the array.
281 int currentInstanceIdx = fNextCopyInstanceIdx++;
Brian Osmanc6444d22019-01-09 16:30:12 -0500282 constexpr uint64_t kWhite = (((uint64_t) SK_Half1) << 0) |
283 (((uint64_t) SK_Half1) << 16) |
284 (((uint64_t) SK_Half1) << 32) |
285 (((uint64_t) SK_Half1) << 48);
Chris Daltona6adc5c2020-03-25 12:41:17 -0600286 fPathInstanceBuffer[currentInstanceIdx].set(entry, newAtlasOffset, kWhite, fillRule);
Chris Dalton351e80c2019-01-06 22:51:00 -0700287
288 // Percolate the instance forward until it's contiguous with other instances that share the same
289 // proxy.
290 for (int i = fCopyPathRanges.count() - 1; i >= fCurrCopyAtlasRangesIdx; --i) {
291 if (fCopyPathRanges[i].fSrcProxy == srcProxy) {
292 ++fCopyPathRanges[i].fCount;
293 return;
294 }
295 int rangeFirstInstanceIdx = currentInstanceIdx - fCopyPathRanges[i].fCount;
Chris Daltona6adc5c2020-03-25 12:41:17 -0600296 std::swap(fPathInstanceBuffer[rangeFirstInstanceIdx],
297 fPathInstanceBuffer[currentInstanceIdx]);
Chris Dalton351e80c2019-01-06 22:51:00 -0700298 currentInstanceIdx = rangeFirstInstanceIdx;
299 }
300
Mike Klein456c4052019-12-05 15:30:35 -0600301 // An instance with this particular proxy did not yet exist in the array. Add a range for it,
302 // first moving any later ranges back to make space for it at fCurrCopyAtlasRangesIdx.
303 fCopyPathRanges.push_back();
304 std::move_backward(fCopyPathRanges.begin() + fCurrCopyAtlasRangesIdx,
305 fCopyPathRanges.end() - 1,
306 fCopyPathRanges.end());
307 fCopyPathRanges[fCurrCopyAtlasRangesIdx] = {std::move(srcProxy), 1};
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600308}
309
Chris Dalton8610e9c2019-05-09 11:07:10 -0600310static bool transform_path_pts(
311 const SkMatrix& m, const SkPath& path, const SkAutoSTArray<32, SkPoint>& outDevPts,
312 GrOctoBounds* octoBounds) {
Chris Daltone1639692018-08-20 14:00:30 -0600313 const SkPoint* pts = SkPathPriv::PointData(path);
314 int numPts = path.countPoints();
315 SkASSERT(numPts + 1 <= outDevPts.count());
316 SkASSERT(numPts);
317
318 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
319 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
320 // transform is not necessary as long as the shader uses the correct inverse.
321 SkMatrix m45;
322 m45.setSinCos(1, 1);
323 m45.preConcat(m);
324
325 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
326 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
327 // | 1 1 |
328 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
329 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
330 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
331
332 // Map the path's points to device space and accumulate bounding boxes.
333 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
334 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
335 Sk4f topLeft = devPt;
336 Sk4f bottomRight = devPt;
337
338 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
339 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
340 // be at least one larger than the number of points.
341 devPt.store(&outDevPts[0]);
342
343 for (int i = 1; i < numPts; ++i) {
344 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
345 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
346 topLeft = Sk4f::Min(topLeft, devPt);
347 bottomRight = Sk4f::Max(bottomRight, devPt);
348 devPt.store(&outDevPts[i]);
349 }
350
Chris Daltonce038dc2018-09-14 14:14:49 -0600351 if (!(Sk4f(0) == topLeft*0).allTrue() || !(Sk4f(0) == bottomRight*0).allTrue()) {
352 // The bounds are infinite or NaN.
353 return false;
354 }
355
Chris Daltone1639692018-08-20 14:00:30 -0600356 SkPoint topLeftPts[2], bottomRightPts[2];
357 topLeft.store(topLeftPts);
358 bottomRight.store(bottomRightPts);
Chris Dalton8610e9c2019-05-09 11:07:10 -0600359
360 const SkRect& devBounds = SkRect::MakeLTRB(
361 topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(), bottomRightPts[0].y());
362 const SkRect& devBounds45 = SkRect::MakeLTRB(
363 topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(), bottomRightPts[1].y());
364
365 octoBounds->set(devBounds, devBounds45);
Chris Daltonce038dc2018-09-14 14:14:49 -0600366 return true;
Chris Daltone1639692018-08-20 14:00:30 -0600367}
368
Chris Dalton351e80c2019-01-06 22:51:00 -0700369GrCCAtlas* GrCCPerFlushResources::renderShapeInAtlas(
Chris Dalton09a7bb22018-08-31 19:53:15 +0800370 const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
Chris Dalton8610e9c2019-05-09 11:07:10 -0600371 GrOctoBounds* octoBounds, SkIRect* devIBounds, SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600372 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600373 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
374
Chris Dalton09a7bb22018-08-31 19:53:15 +0800375 SkPath path;
376 shape.asPath(&path);
Chris Daltone1639692018-08-20 14:00:30 -0600377 if (path.isEmpty()) {
378 SkDEBUGCODE(--fEndPathInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600379 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltone1639692018-08-20 14:00:30 -0600380 return nullptr;
381 }
Chris Dalton8610e9c2019-05-09 11:07:10 -0600382 if (!transform_path_pts(m, path, fLocalDevPtsBuffer, octoBounds)) {
Chris Daltonce038dc2018-09-14 14:14:49 -0600383 // The transformed path had infinite or NaN bounds.
384 SkDEBUGCODE(--fEndPathInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600385 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltonce038dc2018-09-14 14:14:49 -0600386 return nullptr;
387 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800388
389 const SkStrokeRec& stroke = shape.style().strokeRec();
390 if (!stroke.isFillStyle()) {
Chris Dalton8610e9c2019-05-09 11:07:10 -0600391 float r = SkStrokeRec::GetInflationRadius(
392 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), strokeDevWidth);
393 octoBounds->outset(r);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800394 }
Chris Dalton9414c962018-06-14 10:14:50 -0600395
Chris Daltona6fcb762019-05-13 08:57:53 -0600396 GrScissorTest enableScissorInAtlas;
397 if (clipIBounds.contains(octoBounds->bounds())) {
398 enableScissorInAtlas = GrScissorTest::kDisabled;
399 } else if (octoBounds->clip(clipIBounds)) {
400 enableScissorInAtlas = GrScissorTest::kEnabled;
401 } else {
402 // The clip and octo bounds do not intersect. Draw nothing.
Chris Dalton9414c962018-06-14 10:14:50 -0600403 SkDEBUGCODE(--fEndPathInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600404 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltona6fcb762019-05-13 08:57:53 -0600405 return nullptr;
Chris Dalton9414c962018-06-14 10:14:50 -0600406 }
Chris Daltona6fcb762019-05-13 08:57:53 -0600407 octoBounds->roundOut(devIBounds);
408 SkASSERT(clipIBounds.contains(*devIBounds));
409
410 this->placeRenderedPathInAtlas(*devIBounds, enableScissorInAtlas, devToAtlasOffset);
Chris Daltone1639692018-08-20 14:00:30 -0600411
Chris Daltonc3318f02019-07-19 14:20:53 -0600412 GrFillRule fillRule;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800413 if (stroke.isFillStyle()) {
414 SkASSERT(0 == strokeDevWidth);
Chris Daltona6fcb762019-05-13 08:57:53 -0600415 fFiller.parseDeviceSpaceFill(path, fLocalDevPtsBuffer.begin(), enableScissorInAtlas,
416 *devIBounds, *devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -0600417 fillRule = GrFillRuleForSkPath(path);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800418 } else {
419 // Stroke-and-fill is not yet supported.
420 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle() || stroke.isHairlineStyle());
421 SkASSERT(!stroke.isHairlineStyle() || 1 == strokeDevWidth);
Chris Daltona6fcb762019-05-13 08:57:53 -0600422 fStroker.parseDeviceSpaceStroke(
423 path, fLocalDevPtsBuffer.begin(), stroke, strokeDevWidth, enableScissorInAtlas,
424 *devIBounds, *devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -0600425 fillRule = GrFillRule::kNonzero;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800426 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600427
428 if (GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType()) {
429 this->recordStencilResolveInstance(*devIBounds, *devToAtlasOffset, fillRule);
430 }
431
Chris Dalton4da70192018-06-18 09:51:36 -0600432 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600433}
434
Chris Dalton9414c962018-06-14 10:14:50 -0600435const GrCCAtlas* GrCCPerFlushResources::renderDeviceSpacePathInAtlas(
436 const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds,
Chris Daltonc3318f02019-07-19 14:20:53 -0600437 GrFillRule fillRule, SkIVector* devToAtlasOffset) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600438 SkASSERT(this->isMapped());
Chris Daltone1639692018-08-20 14:00:30 -0600439
440 if (devPath.isEmpty()) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600441 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Dalton9414c962018-06-14 10:14:50 -0600442 return nullptr;
443 }
Chris Daltone1639692018-08-20 14:00:30 -0600444
Chris Daltona6fcb762019-05-13 08:57:53 -0600445 GrScissorTest enableScissorInAtlas;
Chris Daltone1639692018-08-20 14:00:30 -0600446 SkIRect clippedPathIBounds;
Chris Daltona6fcb762019-05-13 08:57:53 -0600447 if (clipIBounds.contains(devPathIBounds)) {
448 clippedPathIBounds = devPathIBounds;
449 enableScissorInAtlas = GrScissorTest::kDisabled;
450 } else if (clippedPathIBounds.intersect(clipIBounds, devPathIBounds)) {
451 enableScissorInAtlas = GrScissorTest::kEnabled;
452 } else {
453 // The clip and path bounds do not intersect. Draw nothing.
Chris Daltonc3318f02019-07-19 14:20:53 -0600454 SkDEBUGCODE(--fEndStencilResolveInstance);
Chris Daltone1639692018-08-20 14:00:30 -0600455 return nullptr;
456 }
457
Chris Daltona6fcb762019-05-13 08:57:53 -0600458 this->placeRenderedPathInAtlas(clippedPathIBounds, enableScissorInAtlas, devToAtlasOffset);
459 fFiller.parseDeviceSpaceFill(devPath, SkPathPriv::PointData(devPath), enableScissorInAtlas,
Chris Daltone1639692018-08-20 14:00:30 -0600460 clippedPathIBounds, *devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -0600461
462 // In MSAA mode we also record an internal draw instance that will be used to resolve stencil
463 // winding values to coverage when the atlas is generated.
464 if (GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType()) {
465 this->recordStencilResolveInstance(clippedPathIBounds, *devToAtlasOffset, fillRule);
466 }
467
Chris Dalton4da70192018-06-18 09:51:36 -0600468 return &fRenderedAtlasStack.current();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600469}
470
Chris Daltona6fcb762019-05-13 08:57:53 -0600471void GrCCPerFlushResources::placeRenderedPathInAtlas(
472 const SkIRect& clippedPathIBounds, GrScissorTest scissorTest, SkIVector* devToAtlasOffset) {
Chris Dalton4da70192018-06-18 09:51:36 -0600473 if (GrCCAtlas* retiredAtlas =
Chris Daltona6fcb762019-05-13 08:57:53 -0600474 fRenderedAtlasStack.addRect(clippedPathIBounds, devToAtlasOffset)) {
Chris Dalton9414c962018-06-14 10:14:50 -0600475 // We did not fit in the previous coverage count atlas and it was retired. Close the path
476 // parser's current batch (which does not yet include the path we just parsed). We will
477 // render this batch into the retired atlas during finalize().
Chris Dalton09a7bb22018-08-31 19:53:15 +0800478 retiredAtlas->setFillBatchID(fFiller.closeCurrentBatch());
479 retiredAtlas->setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Daltonc3318f02019-07-19 14:20:53 -0600480 retiredAtlas->setEndStencilResolveInstance(fNextStencilResolveInstanceIdx);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600481 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600482}
483
Chris Daltonc3318f02019-07-19 14:20:53 -0600484void GrCCPerFlushResources::recordStencilResolveInstance(
485 const SkIRect& clippedPathIBounds, const SkIVector& devToAtlasOffset, GrFillRule fillRule) {
486 SkASSERT(GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType());
487 SkASSERT(fNextStencilResolveInstanceIdx < fEndStencilResolveInstance);
488
Brian Salomon9bd947d2019-10-03 14:57:07 -0400489 SkIRect atlasIBounds = clippedPathIBounds.makeOffset(devToAtlasOffset);
Chris Daltonc3318f02019-07-19 14:20:53 -0600490 if (GrFillRule::kEvenOdd == fillRule) {
491 // Make even/odd fills counterclockwise. The resolve draw uses two-sided stencil, with
492 // "nonzero" settings in front and "even/odd" settings in back.
493 std::swap(atlasIBounds.fLeft, atlasIBounds.fRight);
494 }
Chris Daltona6adc5c2020-03-25 12:41:17 -0600495 fStencilResolveBuffer[fNextStencilResolveInstanceIdx++] = {
Chris Daltonc3318f02019-07-19 14:20:53 -0600496 (int16_t)atlasIBounds.left(), (int16_t)atlasIBounds.top(),
497 (int16_t)atlasIBounds.right(), (int16_t)atlasIBounds.bottom()};
498}
499
Chris Daltonc4b47352019-08-23 10:10:36 -0600500bool GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600501 SkASSERT(this->isMapped());
Chris Dalton9414c962018-06-14 10:14:50 -0600502 SkASSERT(fNextPathInstanceIdx == fEndPathInstance);
Chris Dalton351e80c2019-01-06 22:51:00 -0700503 SkASSERT(fNextCopyInstanceIdx == fEndCopyInstance);
Chris Daltonc3318f02019-07-19 14:20:53 -0600504 SkASSERT(GrCCAtlas::CoverageType::kA8_Multisample != this->renderedPathCoverageType() ||
505 fNextStencilResolveInstanceIdx == fEndStencilResolveInstance);
Chris Dalton9414c962018-06-14 10:14:50 -0600506
Chris Daltona6adc5c2020-03-25 12:41:17 -0600507 fPathInstanceBuffer.unmapBuffer();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600508
Chris Daltona6adc5c2020-03-25 12:41:17 -0600509 if (fStencilResolveBuffer.gpuBuffer()) {
510 fStencilResolveBuffer.unmapBuffer();
Chris Daltonc3318f02019-07-19 14:20:53 -0600511 }
512
Chris Dalton4da70192018-06-18 09:51:36 -0600513 if (!fCopyAtlasStack.empty()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700514 fCopyAtlasStack.current().setFillBatchID(fCopyPathRanges.count());
515 fCurrCopyAtlasRangesIdx = fCopyPathRanges.count();
Chris Dalton4da70192018-06-18 09:51:36 -0600516 }
517 if (!fRenderedAtlasStack.empty()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800518 fRenderedAtlasStack.current().setFillBatchID(fFiller.closeCurrentBatch());
519 fRenderedAtlasStack.current().setStrokeBatchID(fStroker.closeCurrentBatch());
Chris Daltonc3318f02019-07-19 14:20:53 -0600520 fRenderedAtlasStack.current().setEndStencilResolveInstance(fNextStencilResolveInstanceIdx);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600521 }
522
Chris Dalton9414c962018-06-14 10:14:50 -0600523 // Build the GPU buffers to render path coverage counts. (This must not happen until after the
Chris Dalton09a7bb22018-08-31 19:53:15 +0800524 // final calls to fFiller/fStroker.closeCurrentBatch().)
Chris Daltone1639692018-08-20 14:00:30 -0600525 if (!fFiller.prepareToDraw(onFlushRP)) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600526 return false;
527 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800528 if (!fStroker.prepareToDraw(onFlushRP)) {
529 return false;
530 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600531
Chris Daltonc3318f02019-07-19 14:20:53 -0600532 // Draw the copies from coverage count or msaa atlas(es) into 8-bit cached atlas(es).
Chris Dalton351e80c2019-01-06 22:51:00 -0700533 int copyRangeIdx = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600534 int baseCopyInstance = 0;
Michael Ludwig45191342020-03-24 12:29:39 -0400535 for (GrCCAtlas& atlas : fCopyAtlasStack.atlases()) {
536 int endCopyRange = atlas.getFillBatchID();
Chris Dalton351e80c2019-01-06 22:51:00 -0700537 SkASSERT(endCopyRange > copyRangeIdx);
538
Michael Ludwig45191342020-03-24 12:29:39 -0400539 auto rtc = atlas.instantiate(onFlushRP);
Chris Dalton351e80c2019-01-06 22:51:00 -0700540 for (; copyRangeIdx < endCopyRange; ++copyRangeIdx) {
541 const CopyPathRange& copyRange = fCopyPathRanges[copyRangeIdx];
542 int endCopyInstance = baseCopyInstance + copyRange.fCount;
543 if (rtc) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600544 auto op = CopyAtlasOp::Make(
545 rtc->surfPriv().getContext(), sk_ref_sp(this), copyRange.fSrcProxy,
Michael Ludwig45191342020-03-24 12:29:39 -0400546 baseCopyInstance, endCopyInstance, atlas.drawBounds());
Chris Dalton351e80c2019-01-06 22:51:00 -0700547 rtc->addDrawOp(GrNoClip(), std::move(op));
548 }
549 baseCopyInstance = endCopyInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600550 }
Chris Dalton4da70192018-06-18 09:51:36 -0600551 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700552 SkASSERT(fCopyPathRanges.count() == copyRangeIdx);
553 SkASSERT(fNextCopyInstanceIdx == baseCopyInstance);
554 SkASSERT(baseCopyInstance == fEndCopyInstance);
Chris Dalton4da70192018-06-18 09:51:36 -0600555
Chris Dalton4da70192018-06-18 09:51:36 -0600556 // Render the coverage count atlas(es).
Chris Daltonc3318f02019-07-19 14:20:53 -0600557 int baseStencilResolveInstance = 0;
Michael Ludwig45191342020-03-24 12:29:39 -0400558 for (GrCCAtlas& atlas : fRenderedAtlasStack.atlases()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700559 // Copies will be finished by the time we get to rendering new atlases. See if we can
560 // recycle any previous invalidated atlas textures instead of creating new ones.
Chris Daltonafde18f2018-06-22 12:44:19 -0600561 sk_sp<GrTexture> backingTexture;
Chris Dalton351e80c2019-01-06 22:51:00 -0700562 for (sk_sp<GrTexture>& texture : fRecyclableAtlasTextures) {
Michael Ludwig45191342020-03-24 12:29:39 -0400563 if (texture && atlas.currentHeight() == texture->height() &&
564 atlas.currentWidth() == texture->width()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700565 backingTexture = skstd::exchange(texture, nullptr);
566 break;
567 }
Chris Daltonafde18f2018-06-22 12:44:19 -0600568 }
569
Michael Ludwig45191342020-03-24 12:29:39 -0400570 if (auto rtc = atlas.instantiate(onFlushRP, std::move(backingTexture))) {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500571 std::unique_ptr<GrDrawOp> op;
Chris Daltonc3318f02019-07-19 14:20:53 -0600572 if (CoverageType::kA8_Multisample == fRenderedAtlasStack.coverageType()) {
573 op = GrStencilAtlasOp::Make(
Michael Ludwig45191342020-03-24 12:29:39 -0400574 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas.getFillBatchID(),
575 atlas.getStrokeBatchID(), baseStencilResolveInstance,
576 atlas.getEndStencilResolveInstance(), atlas.drawBounds());
Chris Daltonc3318f02019-07-19 14:20:53 -0600577 } else if (onFlushRP->caps()->shaderCaps()->geometryShaderSupport()) {
Chris Dalton2c5e0112019-03-29 13:14:18 -0500578 op = RenderAtlasOp<GrGSCoverageProcessor>::Make(
Michael Ludwig45191342020-03-24 12:29:39 -0400579 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas.getFillBatchID(),
580 atlas.getStrokeBatchID(), atlas.drawBounds());
Chris Dalton2c5e0112019-03-29 13:14:18 -0500581 } else {
582 op = RenderAtlasOp<GrVSCoverageProcessor>::Make(
Michael Ludwig45191342020-03-24 12:29:39 -0400583 rtc->surfPriv().getContext(), sk_ref_sp(this), atlas.getFillBatchID(),
584 atlas.getStrokeBatchID(), atlas.drawBounds());
Chris Dalton2c5e0112019-03-29 13:14:18 -0500585 }
Chris Dalton9414c962018-06-14 10:14:50 -0600586 rtc->addDrawOp(GrNoClip(), std::move(op));
Greg Daniel46e366a2019-12-16 14:38:36 -0500587 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
588 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
Chris Dalton4ece96d2019-08-30 11:26:39 -0600589 GrSurfaceProxy::ResolveFlags::kMSAA);
590 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600591 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600592
Michael Ludwig45191342020-03-24 12:29:39 -0400593 SkASSERT(atlas.getEndStencilResolveInstance() >= baseStencilResolveInstance);
594 baseStencilResolveInstance = atlas.getEndStencilResolveInstance();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600595 }
Chris Daltonc3318f02019-07-19 14:20:53 -0600596 SkASSERT(GrCCAtlas::CoverageType::kA8_Multisample != this->renderedPathCoverageType() ||
597 baseStencilResolveInstance == fEndStencilResolveInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600598
599 return true;
600}
Chris Dalton4da70192018-06-18 09:51:36 -0600601
Chris Dalton351e80c2019-01-06 22:51:00 -0700602void GrCCPerFlushResourceSpecs::cancelCopies() {
603 // Convert copies to cached draws.
604 fNumCachedPaths += fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx];
605 fNumCopiedPaths[kFillIdx] = fNumCopiedPaths[kStrokeIdx] = 0;
606 fCopyPathStats[kFillIdx] = fCopyPathStats[kStrokeIdx] = GrCCRenderedPathStats();
Chris Dalton4da70192018-06-18 09:51:36 -0600607 fCopyAtlasSpecs = GrCCAtlas::Specs();
Chris Dalton4da70192018-06-18 09:51:36 -0600608}