blob: fa08af5dcaced2da5bd5d1ad95c73c868ba24dfa [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
12#include "GrGpu.h"
13#include "GrGpuCommandBuffer.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050014#include "GrOpFlushState.h"
Robert Phillips777707b2018-01-17 11:40:14 -050015#include "GrProxyProvider.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050016#include "GrRenderTargetOpList.h"
17#include "GrStyle.h"
18#include "GrTexture.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060019#include "SkMakeUnique.h"
20#include "SkMatrix.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060021#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050022#include "ccpr/GrCCClipProcessor.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060023
Chris Daltona32a3c32017-12-05 10:05:21 -070024// Shorthand for keeping line lengths under control with nested classes...
25using CCPR = GrCoverageCountingPathRenderer;
26
27// If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32
28// precision.
29static constexpr float kPathCropThreshold = 1 << 16;
30
31static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) {
32 SkPath cropPath;
33 cropPath.addRect(SkRect::Make(cropbox));
34 if (!Op(cropPath, path, kIntersect_SkPathOp, out)) {
35 // This can fail if the PathOps encounter NaN or infinities.
36 out->reset();
37 }
38}
Chris Dalton1a325d22017-07-14 15:17:41 -060039
40bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
41 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton383a2ef2018-01-08 17:21:41 -050042 return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() &&
43 caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060044 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Brian Salomonbdecacf2018-02-02 20:32:49 -050045 caps.isConfigRenderable(kAlpha_half_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060046 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060047}
48
Chris Dalton383a2ef2018-01-08 17:21:41 -050049sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
50 const GrCaps& caps, bool drawCachablePaths) {
Chris Daltona2ac30d2017-10-17 10:40:01 -060051 auto ccpr = IsSupported(caps) ? new GrCoverageCountingPathRenderer(drawCachablePaths) : nullptr;
52 return sk_sp<GrCoverageCountingPathRenderer>(ccpr);
Chris Dalton1a325d22017-07-14 15:17:41 -060053}
54
Chris Dalton383a2ef2018-01-08 17:21:41 -050055GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
56 const CanDrawPathArgs& args) const {
Chris Daltona2ac30d2017-10-17 10:40:01 -060057 if (args.fShape->hasUnstyledKey() && !fDrawCachablePaths) {
58 return CanDrawPath::kNo;
59 }
60
Chris Dalton383a2ef2018-01-08 17:21:41 -050061 if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() ||
62 args.fViewMatrix->hasPerspective() || GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060063 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060064 }
65
66 SkPath path;
67 args.fShape->asPath(&path);
Chris Daltondb91c6e2017-09-08 16:25:08 -060068 SkRect devBounds;
69 SkIRect devIBounds;
70 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
71 devBounds.roundOut(&devIBounds);
72 if (!devIBounds.intersect(*args.fClipConservativeBounds)) {
73 // Path is completely clipped away. Our code will eventually notice this before doing any
74 // real work.
75 return CanDrawPath::kYes;
76 }
77
78 if (devIBounds.height() * devIBounds.width() > 256 * 256) {
79 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
80 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
81 return CanDrawPath::kAsBackup;
82 }
83
84 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
85 // Complex paths do better cached in an SDF, if the renderer will accept them.
86 return CanDrawPath::kAsBackup;
87 }
88
Chris Dalton5ed44232017-09-07 13:22:46 -060089 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -060090}
91
92bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
93 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -060094 auto op = skstd::make_unique<DrawPathsOp>(this, args, args.fPaint.getColor());
95 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op));
Chris Dalton1a325d22017-07-14 15:17:41 -060096 return true;
97}
98
Chris Daltona32a3c32017-12-05 10:05:21 -070099CCPR::DrawPathsOp::DrawPathsOp(GrCoverageCountingPathRenderer* ccpr, const DrawPathArgs& args,
100 GrColor color)
Chris Dalton1a325d22017-07-14 15:17:41 -0600101 : INHERITED(ClassID())
102 , fCCPR(ccpr)
103 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(args.fPaint))
104 , fProcessors(std::move(args.fPaint))
105 , fTailDraw(&fHeadDraw)
Chris Daltona32a3c32017-12-05 10:05:21 -0700106 , fOwningRTPendingPaths(nullptr) {
Chris Dalton080baa42017-11-06 14:19:19 -0700107 SkDEBUGCODE(++fCCPR->fPendingDrawOpsCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600108 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500109 SkDEBUGCODE(fInstanceCount = 1);
110 SkDEBUGCODE(fNumSkippedInstances = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600111 GrRenderTargetContext* const rtc = args.fRenderTargetContext;
112
113 SkRect devBounds;
114 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
Chris Daltonc9c97b72017-11-27 15:34:26 -0700115 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &fHeadDraw.fClipIBounds,
116 nullptr);
Chris Daltona32a3c32017-12-05 10:05:21 -0700117 if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) {
118 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
119 SkPath path;
Chris Daltona039d3b2017-09-28 11:16:36 -0600120 args.fShape->asPath(&path);
121 path.transform(*args.fViewMatrix);
122 fHeadDraw.fMatrix.setIdentity();
Chris Daltona32a3c32017-12-05 10:05:21 -0700123 crop_path(path, fHeadDraw.fClipIBounds, &fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600124 devBounds = fHeadDraw.fPath.getBounds();
Chris Daltona039d3b2017-09-28 11:16:36 -0600125 } else {
126 fHeadDraw.fMatrix = *args.fViewMatrix;
127 args.fShape->asPath(&fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600128 }
Chris Dalton383a2ef2018-01-08 17:21:41 -0500129 fHeadDraw.fColor = color; // Can't call args.fPaint.getColor() because it has been std::move'd.
Chris Dalton1a325d22017-07-14 15:17:41 -0600130
131 // FIXME: intersect with clip bounds to (hopefully) improve batching.
132 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
133 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
134}
135
Chris Daltona32a3c32017-12-05 10:05:21 -0700136CCPR::DrawPathsOp::~DrawPathsOp() {
137 if (fOwningRTPendingPaths) {
Chris Dalton080baa42017-11-06 14:19:19 -0700138 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltona32a3c32017-12-05 10:05:21 -0700139 fOwningRTPendingPaths->fDrawOps.remove(this);
Chris Dalton080baa42017-11-06 14:19:19 -0700140 }
141 SkDEBUGCODE(--fCCPR->fPendingDrawOpsCount);
142}
143
Chris Daltona32a3c32017-12-05 10:05:21 -0700144GrDrawOp::RequiresDstTexture CCPR::DrawPathsOp::finalize(const GrCaps& caps,
145 const GrAppliedClip* clip,
146 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton080baa42017-11-06 14:19:19 -0700147 SkASSERT(!fCCPR->fFlushing);
148 // There should only be one single path draw in this Op right now.
Chris Daltona32a3c32017-12-05 10:05:21 -0700149 SkASSERT(1 == fInstanceCount);
Chris Dalton080baa42017-11-06 14:19:19 -0700150 SkASSERT(&fHeadDraw == fTailDraw);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500151 GrProcessorSet::Analysis analysis =
152 fProcessors.finalize(fHeadDraw.fColor, GrProcessorAnalysisCoverage::kSingleChannel,
153 clip, false, caps, dstIsClamped, &fHeadDraw.fColor);
Chris Dalton1a325d22017-07-14 15:17:41 -0600154 return analysis.requiresDstTexture() ? RequiresDstTexture::kYes : RequiresDstTexture::kNo;
155}
156
Chris Daltona32a3c32017-12-05 10:05:21 -0700157bool CCPR::DrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps& caps) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600158 DrawPathsOp* that = op->cast<DrawPathsOp>();
159 SkASSERT(fCCPR == that->fCCPR);
Chris Dalton080baa42017-11-06 14:19:19 -0700160 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700161 SkASSERT(fOwningRTPendingPaths);
162 SkASSERT(fInstanceCount);
163 SkASSERT(!that->fOwningRTPendingPaths || that->fOwningRTPendingPaths == fOwningRTPendingPaths);
164 SkASSERT(that->fInstanceCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600165
Chris Dalton383a2ef2018-01-08 17:21:41 -0500166 if (this->getFillType() != that->getFillType() || fSRGBFlags != that->fSRGBFlags ||
Chris Dalton1a325d22017-07-14 15:17:41 -0600167 fProcessors != that->fProcessors) {
168 return false;
169 }
170
Chris Daltona32a3c32017-12-05 10:05:21 -0700171 fTailDraw->fNext = &fOwningRTPendingPaths->fDrawsAllocator.push_back(that->fHeadDraw);
Chris Dalton080baa42017-11-06 14:19:19 -0700172 fTailDraw = (that->fTailDraw == &that->fHeadDraw) ? fTailDraw->fNext : that->fTailDraw;
Chris Dalton1a325d22017-07-14 15:17:41 -0600173
174 this->joinBounds(*that);
175
Chris Dalton383a2ef2018-01-08 17:21:41 -0500176 SkDEBUGCODE(fInstanceCount += that->fInstanceCount);
Chris Daltona32a3c32017-12-05 10:05:21 -0700177 SkDEBUGCODE(that->fInstanceCount = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600178 return true;
179}
180
Chris Daltona32a3c32017-12-05 10:05:21 -0700181void CCPR::DrawPathsOp::wasRecorded(GrRenderTargetOpList* opList) {
Chris Dalton080baa42017-11-06 14:19:19 -0700182 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700183 SkASSERT(!fOwningRTPendingPaths);
184 fOwningRTPendingPaths = &fCCPR->fRTPendingPathsMap[opList->uniqueID()];
185 fOwningRTPendingPaths->fDrawOps.addToTail(this);
186}
187
Chris Dalton383a2ef2018-01-08 17:21:41 -0500188std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Robert Phillips777707b2018-01-17 11:40:14 -0500189 GrProxyProvider* proxyProvider,
190 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect,
191 int rtWidth, int rtHeight) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500192 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700193
194 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700195
196 ClipPath& clipPath = fRTPendingPathsMap[opListID].fClipPaths[deviceSpacePath.getGenerationID()];
197 if (clipPath.isUninitialized()) {
198 // This ClipPath was just created during lookup. Initialize it.
Robert Phillips777707b2018-01-17 11:40:14 -0500199 clipPath.init(proxyProvider, deviceSpacePath, accessRect, rtWidth, rtHeight);
Chris Daltona32a3c32017-12-05 10:05:21 -0700200 } else {
201 clipPath.addAccess(accessRect);
202 }
203
204 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500205 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
206 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700207}
208
Robert Phillips777707b2018-01-17 11:40:14 -0500209void CCPR::ClipPath::init(GrProxyProvider* proxyProvider,
210 const SkPath& deviceSpacePath, const SkIRect& accessRect,
211 int rtWidth, int rtHeight) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700212 SkASSERT(this->isUninitialized());
213
Robert Phillips777707b2018-01-17 11:40:14 -0500214 fAtlasLazyProxy = proxyProvider->createFullyLazyProxy(
Robert Phillipsce5209a2018-02-13 11:13:51 -0500215 [this](GrResourceProvider* resourceProvider) {
Greg Daniel94a6ce82018-01-16 16:14:41 -0500216 if (!resourceProvider) {
217 return sk_sp<GrTexture>();
218 }
Chris Dalton383a2ef2018-01-08 17:21:41 -0500219 SkASSERT(fHasAtlas);
220 SkASSERT(!fHasAtlasTransform);
Chris Daltona32a3c32017-12-05 10:05:21 -0700221
Chris Dalton383a2ef2018-01-08 17:21:41 -0500222 GrTextureProxy* textureProxy = fAtlas ? fAtlas->textureProxy() : nullptr;
223 if (!textureProxy || !textureProxy->instantiate(resourceProvider)) {
224 fAtlasScale = fAtlasTranslate = {0, 0};
225 SkDEBUGCODE(fHasAtlasTransform = true);
226 return sk_sp<GrTexture>();
227 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700228
Robert Phillipsce5209a2018-02-13 11:13:51 -0500229 SkASSERT(kTopLeft_GrSurfaceOrigin == textureProxy->origin());
230
Chris Dalton383a2ef2018-01-08 17:21:41 -0500231 fAtlasScale = {1.f / textureProxy->width(), 1.f / textureProxy->height()};
232 fAtlasTranslate = {fAtlasOffsetX * fAtlasScale.x(),
233 fAtlasOffsetY * fAtlasScale.y()};
Chris Dalton383a2ef2018-01-08 17:21:41 -0500234 SkDEBUGCODE(fHasAtlasTransform = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700235
Chris Dalton383a2ef2018-01-08 17:21:41 -0500236 return sk_ref_sp(textureProxy->priv().peekTexture());
237 },
Robert Phillipsce5209a2018-02-13 11:13:51 -0500238 GrProxyProvider::Renderable::kYes, kTopLeft_GrSurfaceOrigin, kAlpha_half_GrPixelConfig);
Chris Daltona32a3c32017-12-05 10:05:21 -0700239
240 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
241 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
242 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
243 crop_path(deviceSpacePath, SkIRect::MakeWH(rtWidth, rtHeight), &fDeviceSpacePath);
244 } else {
245 fDeviceSpacePath = deviceSpacePath;
246 }
247 deviceSpacePath.getBounds().roundOut(&fPathDevIBounds);
248 fAccessRect = accessRect;
Chris Dalton1a325d22017-07-14 15:17:41 -0600249}
250
251void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
252 const uint32_t* opListIDs, int numOpListIDs,
253 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500254 using PathInstance = GrCCPathProcessor::Instance;
Chris Daltonc1e59632017-09-05 00:30:07 -0600255
Chris Daltona32a3c32017-12-05 10:05:21 -0700256 SkASSERT(!fFlushing);
Chris Daltonc1e59632017-09-05 00:30:07 -0600257 SkASSERT(!fPerFlushIndexBuffer);
258 SkASSERT(!fPerFlushVertexBuffer);
259 SkASSERT(!fPerFlushInstanceBuffer);
Chris Dalton9ca27842018-01-18 12:24:50 -0700260 SkASSERT(!fPerFlushPathParser);
Chris Daltonc1e59632017-09-05 00:30:07 -0600261 SkASSERT(fPerFlushAtlases.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500262 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700263
264 if (fRTPendingPathsMap.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500265 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700266 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600267
268 fPerFlushResourcesAreValid = false;
269
Chris Daltona32a3c32017-12-05 10:05:21 -0700270 // Count the paths that are being flushed.
Chris Daltonc9c97b72017-11-27 15:34:26 -0700271 int maxTotalPaths = 0, maxPathPoints = 0, numSkPoints = 0, numSkVerbs = 0;
Chris Dalton383a2ef2018-01-08 17:21:41 -0500272 SkDEBUGCODE(int numClipPaths = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600273 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700274 auto it = fRTPendingPathsMap.find(opListIDs[i]);
275 if (fRTPendingPathsMap.end() == it) {
Chris Dalton080baa42017-11-06 14:19:19 -0700276 continue;
Chris Dalton1a325d22017-07-14 15:17:41 -0600277 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700278 const RTPendingPaths& rtPendingPaths = it->second;
279
280 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
281 drawOpsIter.init(rtPendingPaths.fDrawOps,
282 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
283 while (DrawPathsOp* op = drawOpsIter.get()) {
284 for (const DrawPathsOp::SingleDraw* draw = op->head(); draw; draw = draw->fNext) {
Chris Dalton080baa42017-11-06 14:19:19 -0700285 ++maxTotalPaths;
Chris Daltonc9c97b72017-11-27 15:34:26 -0700286 maxPathPoints = SkTMax(draw->fPath.countPoints(), maxPathPoints);
Chris Dalton080baa42017-11-06 14:19:19 -0700287 numSkPoints += draw->fPath.countPoints();
288 numSkVerbs += draw->fPath.countVerbs();
289 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700290 drawOpsIter.next();
Chris Dalton080baa42017-11-06 14:19:19 -0700291 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700292
293 maxTotalPaths += rtPendingPaths.fClipPaths.size();
294 SkDEBUGCODE(numClipPaths += rtPendingPaths.fClipPaths.size());
295 for (const auto& clipsIter : rtPendingPaths.fClipPaths) {
296 const SkPath& path = clipsIter.second.deviceSpacePath();
297 maxPathPoints = SkTMax(path.countPoints(), maxPathPoints);
298 numSkPoints += path.countPoints();
299 numSkVerbs += path.countVerbs();
300 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600301 }
302
Chris Daltona32a3c32017-12-05 10:05:21 -0700303 if (!maxTotalPaths) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500304 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600305 }
306
Chris Daltona32a3c32017-12-05 10:05:21 -0700307 // Allocate GPU buffers.
Chris Dalton383a2ef2018-01-08 17:21:41 -0500308 fPerFlushIndexBuffer = GrCCPathProcessor::FindIndexBuffer(onFlushRP);
Chris Dalton1a325d22017-07-14 15:17:41 -0600309 if (!fPerFlushIndexBuffer) {
310 SkDebugf("WARNING: failed to allocate ccpr path index buffer.\n");
311 return;
312 }
313
Chris Dalton383a2ef2018-01-08 17:21:41 -0500314 fPerFlushVertexBuffer = GrCCPathProcessor::FindVertexBuffer(onFlushRP);
Chris Dalton1a325d22017-07-14 15:17:41 -0600315 if (!fPerFlushVertexBuffer) {
316 SkDebugf("WARNING: failed to allocate ccpr path vertex buffer.\n");
317 return;
318 }
319
Chris Dalton383a2ef2018-01-08 17:21:41 -0500320 fPerFlushInstanceBuffer =
321 onFlushRP->makeBuffer(kVertex_GrBufferType, maxTotalPaths * sizeof(PathInstance));
Chris Dalton1a325d22017-07-14 15:17:41 -0600322 if (!fPerFlushInstanceBuffer) {
323 SkDebugf("WARNING: failed to allocate path instance buffer. No paths will be drawn.\n");
324 return;
325 }
326
327 PathInstance* pathInstanceData = static_cast<PathInstance*>(fPerFlushInstanceBuffer->map());
328 SkASSERT(pathInstanceData);
329 int pathInstanceIdx = 0;
330
Chris Dalton9ca27842018-01-18 12:24:50 -0700331 fPerFlushPathParser = sk_make_sp<GrCCPathParser>(maxTotalPaths, maxPathPoints, numSkPoints,
332 numSkVerbs);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500333 SkDEBUGCODE(int skippedTotalPaths = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600334
Chris Daltona32a3c32017-12-05 10:05:21 -0700335 // Allocate atlas(es) and fill out GPU instance buffers.
336 for (int i = 0; i < numOpListIDs; ++i) {
337 auto it = fRTPendingPathsMap.find(opListIDs[i]);
338 if (fRTPendingPathsMap.end() == it) {
339 continue;
340 }
341 RTPendingPaths& rtPendingPaths = it->second;
Chris Dalton1a325d22017-07-14 15:17:41 -0600342
Chris Daltona32a3c32017-12-05 10:05:21 -0700343 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
344 drawOpsIter.init(rtPendingPaths.fDrawOps,
345 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
346 while (DrawPathsOp* op = drawOpsIter.get()) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700347 pathInstanceIdx = op->setupResources(onFlushRP, pathInstanceData, pathInstanceIdx);
Chris Daltona32a3c32017-12-05 10:05:21 -0700348 drawOpsIter.next();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500349 SkDEBUGCODE(skippedTotalPaths += op->numSkippedInstances_debugOnly());
Chris Dalton1a325d22017-07-14 15:17:41 -0600350 }
351
Chris Daltona32a3c32017-12-05 10:05:21 -0700352 for (auto& clipsIter : rtPendingPaths.fClipPaths) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700353 clipsIter.second.placePathInAtlas(this, onFlushRP, fPerFlushPathParser.get());
Chris Daltonc1e59632017-09-05 00:30:07 -0600354 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600355 }
356
Chris Dalton1a325d22017-07-14 15:17:41 -0600357 fPerFlushInstanceBuffer->unmap();
358
Chris Daltona32a3c32017-12-05 10:05:21 -0700359 SkASSERT(pathInstanceIdx == maxTotalPaths - skippedTotalPaths - numClipPaths);
360
361 if (!fPerFlushAtlases.empty()) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700362 auto coverageCountBatchID = fPerFlushPathParser->closeCurrentBatch();
363 fPerFlushAtlases.back().setCoverageCountBatchID(coverageCountBatchID);
Chris Daltona32a3c32017-12-05 10:05:21 -0700364 }
365
Chris Dalton9ca27842018-01-18 12:24:50 -0700366 if (!fPerFlushPathParser->finalize(onFlushRP)) {
367 SkDebugf("WARNING: failed to allocate GPU buffers for CCPR. No paths will be drawn.\n");
Chris Daltonc1e59632017-09-05 00:30:07 -0600368 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600369 }
370
Chris Dalton9ca27842018-01-18 12:24:50 -0700371 // Draw the atlas(es).
Chris Dalton383a2ef2018-01-08 17:21:41 -0500372 GrTAllocator<GrCCAtlas>::Iter atlasIter(&fPerFlushAtlases);
Chris Dalton9ca27842018-01-18 12:24:50 -0700373 while (atlasIter.next()) {
374 if (auto rtc = atlasIter.get()->finalize(onFlushRP, fPerFlushPathParser)) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600375 results->push_back(std::move(rtc));
376 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600377 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600378
379 fPerFlushResourcesAreValid = true;
Chris Dalton1a325d22017-07-14 15:17:41 -0600380}
381
Chris Daltona32a3c32017-12-05 10:05:21 -0700382int CCPR::DrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton383a2ef2018-01-08 17:21:41 -0500383 GrCCPathProcessor::Instance* pathInstanceData,
Chris Daltona32a3c32017-12-05 10:05:21 -0700384 int pathInstanceIdx) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700385 GrCCPathParser* parser = fCCPR->fPerFlushPathParser.get();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500386 const GrCCAtlas* currentAtlas = nullptr;
Chris Daltona32a3c32017-12-05 10:05:21 -0700387 SkASSERT(fInstanceCount > 0);
388 SkASSERT(-1 == fBaseInstance);
389 fBaseInstance = pathInstanceIdx;
390
391 for (const SingleDraw* draw = this->head(); draw; draw = draw->fNext) {
392 // parsePath gives us two tight bounding boxes: one in device space, as well as a second
393 // one rotated an additional 45 degrees. The path vertex shader uses these two bounding
394 // boxes to generate an octagon that circumscribes the path.
395 SkRect devBounds, devBounds45;
Chris Dalton9ca27842018-01-18 12:24:50 -0700396 parser->parsePath(draw->fMatrix, draw->fPath, &devBounds, &devBounds45);
Chris Daltona32a3c32017-12-05 10:05:21 -0700397
398 SkIRect devIBounds;
399 devBounds.roundOut(&devIBounds);
400
401 int16_t offsetX, offsetY;
Chris Dalton383a2ef2018-01-08 17:21:41 -0500402 GrCCAtlas* atlas = fCCPR->placeParsedPathInAtlas(onFlushRP, draw->fClipIBounds, devIBounds,
Chris Dalton9ca27842018-01-18 12:24:50 -0700403 &offsetX, &offsetY);
Chris Daltona32a3c32017-12-05 10:05:21 -0700404 if (!atlas) {
405 SkDEBUGCODE(++fNumSkippedInstances);
406 continue;
407 }
408 if (currentAtlas != atlas) {
409 if (currentAtlas) {
410 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
411 }
412 currentAtlas = atlas;
413 }
414
415 const SkMatrix& m = draw->fMatrix;
416 pathInstanceData[pathInstanceIdx++] = {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500417 devBounds,
418 devBounds45,
419 {{m.getScaleX(), m.getSkewY(), m.getSkewX(), m.getScaleY()}},
420 {{m.getTranslateX(), m.getTranslateY()}},
421 {{offsetX, offsetY}},
422 draw->fColor};
Chris Daltona32a3c32017-12-05 10:05:21 -0700423 }
424
425 SkASSERT(pathInstanceIdx == fBaseInstance + fInstanceCount - fNumSkippedInstances);
426 if (currentAtlas) {
427 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
428 }
429
430 return pathInstanceIdx;
431}
432
433void CCPR::ClipPath::placePathInAtlas(GrCoverageCountingPathRenderer* ccpr,
434 GrOnFlushResourceProvider* onFlushRP,
Chris Dalton9ca27842018-01-18 12:24:50 -0700435 GrCCPathParser* parser) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700436 SkASSERT(!this->isUninitialized());
437 SkASSERT(!fHasAtlas);
Chris Dalton9ca27842018-01-18 12:24:50 -0700438 parser->parseDeviceSpacePath(fDeviceSpacePath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700439 fAtlas = ccpr->placeParsedPathInAtlas(onFlushRP, fAccessRect, fPathDevIBounds, &fAtlasOffsetX,
Chris Dalton9ca27842018-01-18 12:24:50 -0700440 &fAtlasOffsetY);
Chris Daltona32a3c32017-12-05 10:05:21 -0700441 SkDEBUGCODE(fHasAtlas = true);
442}
443
Chris Dalton383a2ef2018-01-08 17:21:41 -0500444GrCCAtlas* GrCoverageCountingPathRenderer::placeParsedPathInAtlas(
445 GrOnFlushResourceProvider* onFlushRP,
446 const SkIRect& clipIBounds,
447 const SkIRect& pathIBounds,
448 int16_t* atlasOffsetX,
Chris Dalton9ca27842018-01-18 12:24:50 -0700449 int16_t* atlasOffsetY) {
450 using ScissorMode = GrCCPathParser::ScissorMode;
Chris Daltona32a3c32017-12-05 10:05:21 -0700451
452 ScissorMode scissorMode;
453 SkIRect clippedPathIBounds;
454 if (clipIBounds.contains(pathIBounds)) {
455 clippedPathIBounds = pathIBounds;
456 scissorMode = ScissorMode::kNonScissored;
457 } else if (clippedPathIBounds.intersect(clipIBounds, pathIBounds)) {
458 scissorMode = ScissorMode::kScissored;
459 } else {
Chris Dalton9ca27842018-01-18 12:24:50 -0700460 fPerFlushPathParser->discardParsedPath();
Chris Daltona32a3c32017-12-05 10:05:21 -0700461 return nullptr;
462 }
463
464 SkIPoint16 atlasLocation;
Chris Dalton9ca27842018-01-18 12:24:50 -0700465 int h = clippedPathIBounds.height(), w = clippedPathIBounds.width();
Chris Daltona32a3c32017-12-05 10:05:21 -0700466 if (fPerFlushAtlases.empty() || !fPerFlushAtlases.back().addRect(w, h, &atlasLocation)) {
467 if (!fPerFlushAtlases.empty()) {
468 // The atlas is out of room and can't grow any bigger.
Chris Dalton9ca27842018-01-18 12:24:50 -0700469 auto coverageCountBatchID = fPerFlushPathParser->closeCurrentBatch();
470 fPerFlushAtlases.back().setCoverageCountBatchID(coverageCountBatchID);
Chris Daltona32a3c32017-12-05 10:05:21 -0700471 }
Chris Dalton2612bae2018-02-22 13:41:37 -0700472 fPerFlushAtlases.emplace_back(*onFlushRP->caps(), SkTMax(w, h));
473 SkAssertResult(fPerFlushAtlases.back().addRect(w, h, &atlasLocation));
Chris Daltona32a3c32017-12-05 10:05:21 -0700474 }
475
476 *atlasOffsetX = atlasLocation.x() - static_cast<int16_t>(clippedPathIBounds.left());
477 *atlasOffsetY = atlasLocation.y() - static_cast<int16_t>(clippedPathIBounds.top());
Chris Dalton9ca27842018-01-18 12:24:50 -0700478 fPerFlushPathParser->saveParsedPath(scissorMode, clippedPathIBounds, *atlasOffsetX,
479 *atlasOffsetY);
Chris Daltona32a3c32017-12-05 10:05:21 -0700480
481 return &fPerFlushAtlases.back();
482}
483
484void CCPR::DrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600485 SkASSERT(fCCPR->fFlushing);
Greg Daniel500d58b2017-08-24 15:59:33 -0400486 SkASSERT(flushState->rtCommandBuffer());
Chris Dalton1a325d22017-07-14 15:17:41 -0600487
Chris Daltonc1e59632017-09-05 00:30:07 -0600488 if (!fCCPR->fPerFlushResourcesAreValid) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500489 return; // Setup failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600490 }
491
Chris Dalton383a2ef2018-01-08 17:21:41 -0500492 SkASSERT(fBaseInstance >= 0); // Make sure setupResources has been called.
Chris Dalton080baa42017-11-06 14:19:19 -0700493
Chris Daltond1513222017-10-06 08:30:46 -0600494 GrPipeline::InitArgs initArgs;
495 initArgs.fFlags = fSRGBFlags;
496 initArgs.fProxy = flushState->drawOpArgs().fProxy;
497 initArgs.fCaps = &flushState->caps();
498 initArgs.fResourceProvider = flushState->resourceProvider();
499 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
500 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
Chris Dalton1a325d22017-07-14 15:17:41 -0600501
502 int baseInstance = fBaseInstance;
503
504 for (int i = 0; i < fAtlasBatches.count(); baseInstance = fAtlasBatches[i++].fEndInstanceIdx) {
505 const AtlasBatch& batch = fAtlasBatches[i];
506 SkASSERT(batch.fEndInstanceIdx > baseInstance);
507
508 if (!batch.fAtlas->textureProxy()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500509 continue; // Atlas failed to allocate.
Chris Dalton1a325d22017-07-14 15:17:41 -0600510 }
511
Chris Dalton27059d32018-01-23 14:06:50 -0700512 GrCCPathProcessor pathProc(flushState->resourceProvider(),
513 sk_ref_sp(batch.fAtlas->textureProxy()), this->getFillType());
Chris Dalton1a325d22017-07-14 15:17:41 -0600514
Chris Dalton27059d32018-01-23 14:06:50 -0700515 GrMesh mesh(GrCCPathProcessor::MeshPrimitiveType(flushState->caps()));
Chris Dalton1a325d22017-07-14 15:17:41 -0600516 mesh.setIndexedInstanced(fCCPR->fPerFlushIndexBuffer.get(),
Chris Dalton27059d32018-01-23 14:06:50 -0700517 GrCCPathProcessor::NumIndicesPerInstance(flushState->caps()),
Chris Dalton1a325d22017-07-14 15:17:41 -0600518 fCCPR->fPerFlushInstanceBuffer.get(),
519 batch.fEndInstanceIdx - baseInstance, baseInstance);
520 mesh.setVertexData(fCCPR->fPerFlushVertexBuffer.get());
521
Chris Dalton27059d32018-01-23 14:06:50 -0700522 flushState->rtCommandBuffer()->draw(pipeline, pathProc, &mesh, nullptr, 1, this->bounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600523 }
524
Chris Daltona32a3c32017-12-05 10:05:21 -0700525 SkASSERT(baseInstance == fBaseInstance + fInstanceCount - fNumSkippedInstances);
Chris Dalton1a325d22017-07-14 15:17:41 -0600526}
527
Chris Dalton3968ff92017-11-27 12:26:31 -0700528void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
529 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600530 SkASSERT(fFlushing);
531 fPerFlushAtlases.reset();
Chris Dalton9ca27842018-01-18 12:24:50 -0700532 fPerFlushPathParser.reset();
Chris Dalton1a325d22017-07-14 15:17:41 -0600533 fPerFlushInstanceBuffer.reset();
534 fPerFlushVertexBuffer.reset();
535 fPerFlushIndexBuffer.reset();
Chris Daltona32a3c32017-12-05 10:05:21 -0700536 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
537 for (int i = 0; i < numOpListIDs; ++i) {
538 fRTPendingPathsMap.erase(opListIDs[i]);
539 }
Chris Dalton383a2ef2018-01-08 17:21:41 -0500540 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600541}