blob: 1e0bb29acda680b93f95a2887b3c8839fe844454 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
12#include "GrGpu.h"
13#include "GrGpuCommandBuffer.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050014#include "GrOpFlushState.h"
15#include "GrRenderTargetOpList.h"
16#include "GrStyle.h"
17#include "GrTexture.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018#include "SkMakeUnique.h"
19#include "SkMatrix.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060020#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050021#include "ccpr/GrCCClipProcessor.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060022
Chris Daltona32a3c32017-12-05 10:05:21 -070023// Shorthand for keeping line lengths under control with nested classes...
24using CCPR = GrCoverageCountingPathRenderer;
25
26// If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32
27// precision.
28static constexpr float kPathCropThreshold = 1 << 16;
29
30static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) {
31 SkPath cropPath;
32 cropPath.addRect(SkRect::Make(cropbox));
33 if (!Op(cropPath, path, kIntersect_SkPathOp, out)) {
34 // This can fail if the PathOps encounter NaN or infinities.
35 out->reset();
36 }
37}
Chris Dalton1a325d22017-07-14 15:17:41 -060038
39bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
40 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton383a2ef2018-01-08 17:21:41 -050041 return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() &&
42 caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060043 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060044 caps.isConfigRenderable(kAlpha_half_GrPixelConfig, /*withMSAA=*/false) &&
45 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060046}
47
Chris Dalton383a2ef2018-01-08 17:21:41 -050048sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
49 const GrCaps& caps, bool drawCachablePaths) {
Chris Daltona2ac30d2017-10-17 10:40:01 -060050 auto ccpr = IsSupported(caps) ? new GrCoverageCountingPathRenderer(drawCachablePaths) : nullptr;
51 return sk_sp<GrCoverageCountingPathRenderer>(ccpr);
Chris Dalton1a325d22017-07-14 15:17:41 -060052}
53
Chris Dalton383a2ef2018-01-08 17:21:41 -050054GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
55 const CanDrawPathArgs& args) const {
Chris Daltona2ac30d2017-10-17 10:40:01 -060056 if (args.fShape->hasUnstyledKey() && !fDrawCachablePaths) {
57 return CanDrawPath::kNo;
58 }
59
Chris Dalton383a2ef2018-01-08 17:21:41 -050060 if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() ||
61 args.fViewMatrix->hasPerspective() || GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060062 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060063 }
64
65 SkPath path;
66 args.fShape->asPath(&path);
Chris Dalton5ed44232017-09-07 13:22:46 -060067 if (SkPathPriv::ConicWeightCnt(path)) {
68 return CanDrawPath::kNo;
69 }
70
Chris Daltondb91c6e2017-09-08 16:25:08 -060071 SkRect devBounds;
72 SkIRect devIBounds;
73 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
74 devBounds.roundOut(&devIBounds);
75 if (!devIBounds.intersect(*args.fClipConservativeBounds)) {
76 // Path is completely clipped away. Our code will eventually notice this before doing any
77 // real work.
78 return CanDrawPath::kYes;
79 }
80
81 if (devIBounds.height() * devIBounds.width() > 256 * 256) {
82 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
83 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
84 return CanDrawPath::kAsBackup;
85 }
86
87 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
88 // Complex paths do better cached in an SDF, if the renderer will accept them.
89 return CanDrawPath::kAsBackup;
90 }
91
Chris Dalton5ed44232017-09-07 13:22:46 -060092 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -060093}
94
95bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
96 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -060097 auto op = skstd::make_unique<DrawPathsOp>(this, args, args.fPaint.getColor());
98 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op));
Chris Dalton1a325d22017-07-14 15:17:41 -060099 return true;
100}
101
Chris Daltona32a3c32017-12-05 10:05:21 -0700102CCPR::DrawPathsOp::DrawPathsOp(GrCoverageCountingPathRenderer* ccpr, const DrawPathArgs& args,
103 GrColor color)
Chris Dalton1a325d22017-07-14 15:17:41 -0600104 : INHERITED(ClassID())
105 , fCCPR(ccpr)
106 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(args.fPaint))
107 , fProcessors(std::move(args.fPaint))
108 , fTailDraw(&fHeadDraw)
Chris Daltona32a3c32017-12-05 10:05:21 -0700109 , fOwningRTPendingPaths(nullptr) {
Chris Dalton080baa42017-11-06 14:19:19 -0700110 SkDEBUGCODE(++fCCPR->fPendingDrawOpsCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600111 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500112 SkDEBUGCODE(fInstanceCount = 1);
113 SkDEBUGCODE(fNumSkippedInstances = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600114 GrRenderTargetContext* const rtc = args.fRenderTargetContext;
115
116 SkRect devBounds;
117 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
Chris Daltonc9c97b72017-11-27 15:34:26 -0700118 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &fHeadDraw.fClipIBounds,
119 nullptr);
Chris Daltona32a3c32017-12-05 10:05:21 -0700120 if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) {
121 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
122 SkPath path;
Chris Daltona039d3b2017-09-28 11:16:36 -0600123 args.fShape->asPath(&path);
124 path.transform(*args.fViewMatrix);
125 fHeadDraw.fMatrix.setIdentity();
Chris Daltona32a3c32017-12-05 10:05:21 -0700126 crop_path(path, fHeadDraw.fClipIBounds, &fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600127 devBounds = fHeadDraw.fPath.getBounds();
Chris Daltona039d3b2017-09-28 11:16:36 -0600128 } else {
129 fHeadDraw.fMatrix = *args.fViewMatrix;
130 args.fShape->asPath(&fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600131 }
Chris Dalton383a2ef2018-01-08 17:21:41 -0500132 fHeadDraw.fColor = color; // Can't call args.fPaint.getColor() because it has been std::move'd.
Chris Dalton1a325d22017-07-14 15:17:41 -0600133
134 // FIXME: intersect with clip bounds to (hopefully) improve batching.
135 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
136 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
137}
138
Chris Daltona32a3c32017-12-05 10:05:21 -0700139CCPR::DrawPathsOp::~DrawPathsOp() {
140 if (fOwningRTPendingPaths) {
Chris Dalton080baa42017-11-06 14:19:19 -0700141 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltona32a3c32017-12-05 10:05:21 -0700142 fOwningRTPendingPaths->fDrawOps.remove(this);
Chris Dalton080baa42017-11-06 14:19:19 -0700143 }
144 SkDEBUGCODE(--fCCPR->fPendingDrawOpsCount);
145}
146
Chris Daltona32a3c32017-12-05 10:05:21 -0700147GrDrawOp::RequiresDstTexture CCPR::DrawPathsOp::finalize(const GrCaps& caps,
148 const GrAppliedClip* clip,
149 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton080baa42017-11-06 14:19:19 -0700150 SkASSERT(!fCCPR->fFlushing);
151 // There should only be one single path draw in this Op right now.
Chris Daltona32a3c32017-12-05 10:05:21 -0700152 SkASSERT(1 == fInstanceCount);
Chris Dalton080baa42017-11-06 14:19:19 -0700153 SkASSERT(&fHeadDraw == fTailDraw);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500154 GrProcessorSet::Analysis analysis =
155 fProcessors.finalize(fHeadDraw.fColor, GrProcessorAnalysisCoverage::kSingleChannel,
156 clip, false, caps, dstIsClamped, &fHeadDraw.fColor);
Chris Dalton1a325d22017-07-14 15:17:41 -0600157 return analysis.requiresDstTexture() ? RequiresDstTexture::kYes : RequiresDstTexture::kNo;
158}
159
Chris Daltona32a3c32017-12-05 10:05:21 -0700160bool CCPR::DrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps& caps) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600161 DrawPathsOp* that = op->cast<DrawPathsOp>();
162 SkASSERT(fCCPR == that->fCCPR);
Chris Dalton080baa42017-11-06 14:19:19 -0700163 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700164 SkASSERT(fOwningRTPendingPaths);
165 SkASSERT(fInstanceCount);
166 SkASSERT(!that->fOwningRTPendingPaths || that->fOwningRTPendingPaths == fOwningRTPendingPaths);
167 SkASSERT(that->fInstanceCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600168
Chris Dalton383a2ef2018-01-08 17:21:41 -0500169 if (this->getFillType() != that->getFillType() || fSRGBFlags != that->fSRGBFlags ||
Chris Dalton1a325d22017-07-14 15:17:41 -0600170 fProcessors != that->fProcessors) {
171 return false;
172 }
173
Chris Daltona32a3c32017-12-05 10:05:21 -0700174 fTailDraw->fNext = &fOwningRTPendingPaths->fDrawsAllocator.push_back(that->fHeadDraw);
Chris Dalton080baa42017-11-06 14:19:19 -0700175 fTailDraw = (that->fTailDraw == &that->fHeadDraw) ? fTailDraw->fNext : that->fTailDraw;
Chris Dalton1a325d22017-07-14 15:17:41 -0600176
177 this->joinBounds(*that);
178
Chris Dalton383a2ef2018-01-08 17:21:41 -0500179 SkDEBUGCODE(fInstanceCount += that->fInstanceCount);
Chris Daltona32a3c32017-12-05 10:05:21 -0700180 SkDEBUGCODE(that->fInstanceCount = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600181 return true;
182}
183
Chris Daltona32a3c32017-12-05 10:05:21 -0700184void CCPR::DrawPathsOp::wasRecorded(GrRenderTargetOpList* opList) {
Chris Dalton080baa42017-11-06 14:19:19 -0700185 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700186 SkASSERT(!fOwningRTPendingPaths);
187 fOwningRTPendingPaths = &fCCPR->fRTPendingPathsMap[opList->uniqueID()];
188 fOwningRTPendingPaths->fDrawOps.addToTail(this);
189}
190
191bool GrCoverageCountingPathRenderer::canMakeClipProcessor(const SkPath& deviceSpacePath) const {
192 if (!fDrawCachablePaths && !deviceSpacePath.isVolatile()) {
193 return false;
194 }
195
196 if (SkPathPriv::ConicWeightCnt(deviceSpacePath)) {
197 return false;
198 }
199
200 return true;
201}
202
Chris Dalton383a2ef2018-01-08 17:21:41 -0500203std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
204 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
205 int rtHeight) {
206 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700207
208 SkASSERT(!fFlushing);
209 SkASSERT(this->canMakeClipProcessor(deviceSpacePath));
210
211 ClipPath& clipPath = fRTPendingPathsMap[opListID].fClipPaths[deviceSpacePath.getGenerationID()];
212 if (clipPath.isUninitialized()) {
213 // This ClipPath was just created during lookup. Initialize it.
214 clipPath.init(deviceSpacePath, accessRect, rtWidth, rtHeight);
215 } else {
216 clipPath.addAccess(accessRect);
217 }
218
219 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500220 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
221 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700222}
223
224void CCPR::ClipPath::init(const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
225 int rtHeight) {
226 SkASSERT(this->isUninitialized());
227
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500228 fAtlasLazyProxy = GrSurfaceProxy::MakeFullyLazy(
Chris Dalton383a2ef2018-01-08 17:21:41 -0500229 [this](GrResourceProvider* resourceProvider, GrSurfaceOrigin* outOrigin) {
Greg Daniel94a6ce82018-01-16 16:14:41 -0500230 if (!resourceProvider) {
231 return sk_sp<GrTexture>();
232 }
Chris Dalton383a2ef2018-01-08 17:21:41 -0500233 SkASSERT(fHasAtlas);
234 SkASSERT(!fHasAtlasTransform);
Chris Daltona32a3c32017-12-05 10:05:21 -0700235
Chris Dalton383a2ef2018-01-08 17:21:41 -0500236 GrTextureProxy* textureProxy = fAtlas ? fAtlas->textureProxy() : nullptr;
237 if (!textureProxy || !textureProxy->instantiate(resourceProvider)) {
238 fAtlasScale = fAtlasTranslate = {0, 0};
239 SkDEBUGCODE(fHasAtlasTransform = true);
240 return sk_sp<GrTexture>();
241 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700242
Chris Dalton383a2ef2018-01-08 17:21:41 -0500243 fAtlasScale = {1.f / textureProxy->width(), 1.f / textureProxy->height()};
244 fAtlasTranslate = {fAtlasOffsetX * fAtlasScale.x(),
245 fAtlasOffsetY * fAtlasScale.y()};
246 if (kBottomLeft_GrSurfaceOrigin == textureProxy->origin()) {
247 fAtlasScale.fY = -fAtlasScale.y();
248 fAtlasTranslate.fY = 1 - fAtlasTranslate.y();
249 }
250 SkDEBUGCODE(fHasAtlasTransform = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700251
Chris Dalton383a2ef2018-01-08 17:21:41 -0500252 *outOrigin = textureProxy->origin();
253 return sk_ref_sp(textureProxy->priv().peekTexture());
254 },
255 GrSurfaceProxy::Renderable::kYes, kAlpha_half_GrPixelConfig);
Chris Daltona32a3c32017-12-05 10:05:21 -0700256
257 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
258 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
259 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
260 crop_path(deviceSpacePath, SkIRect::MakeWH(rtWidth, rtHeight), &fDeviceSpacePath);
261 } else {
262 fDeviceSpacePath = deviceSpacePath;
263 }
264 deviceSpacePath.getBounds().roundOut(&fPathDevIBounds);
265 fAccessRect = accessRect;
Chris Dalton1a325d22017-07-14 15:17:41 -0600266}
267
268void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
269 const uint32_t* opListIDs, int numOpListIDs,
270 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500271 using PathInstance = GrCCPathProcessor::Instance;
Chris Daltonc1e59632017-09-05 00:30:07 -0600272
Chris Daltona32a3c32017-12-05 10:05:21 -0700273 SkASSERT(!fFlushing);
Chris Daltonc1e59632017-09-05 00:30:07 -0600274 SkASSERT(!fPerFlushIndexBuffer);
275 SkASSERT(!fPerFlushVertexBuffer);
276 SkASSERT(!fPerFlushInstanceBuffer);
277 SkASSERT(fPerFlushAtlases.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500278 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700279
280 if (fRTPendingPathsMap.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500281 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700282 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600283
284 fPerFlushResourcesAreValid = false;
285
Chris Daltona32a3c32017-12-05 10:05:21 -0700286 // Count the paths that are being flushed.
Chris Daltonc9c97b72017-11-27 15:34:26 -0700287 int maxTotalPaths = 0, maxPathPoints = 0, numSkPoints = 0, numSkVerbs = 0;
Chris Dalton383a2ef2018-01-08 17:21:41 -0500288 SkDEBUGCODE(int numClipPaths = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600289 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700290 auto it = fRTPendingPathsMap.find(opListIDs[i]);
291 if (fRTPendingPathsMap.end() == it) {
Chris Dalton080baa42017-11-06 14:19:19 -0700292 continue;
Chris Dalton1a325d22017-07-14 15:17:41 -0600293 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700294 const RTPendingPaths& rtPendingPaths = it->second;
295
296 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
297 drawOpsIter.init(rtPendingPaths.fDrawOps,
298 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
299 while (DrawPathsOp* op = drawOpsIter.get()) {
300 for (const DrawPathsOp::SingleDraw* draw = op->head(); draw; draw = draw->fNext) {
Chris Dalton080baa42017-11-06 14:19:19 -0700301 ++maxTotalPaths;
Chris Daltonc9c97b72017-11-27 15:34:26 -0700302 maxPathPoints = SkTMax(draw->fPath.countPoints(), maxPathPoints);
Chris Dalton080baa42017-11-06 14:19:19 -0700303 numSkPoints += draw->fPath.countPoints();
304 numSkVerbs += draw->fPath.countVerbs();
305 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700306 drawOpsIter.next();
Chris Dalton080baa42017-11-06 14:19:19 -0700307 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700308
309 maxTotalPaths += rtPendingPaths.fClipPaths.size();
310 SkDEBUGCODE(numClipPaths += rtPendingPaths.fClipPaths.size());
311 for (const auto& clipsIter : rtPendingPaths.fClipPaths) {
312 const SkPath& path = clipsIter.second.deviceSpacePath();
313 maxPathPoints = SkTMax(path.countPoints(), maxPathPoints);
314 numSkPoints += path.countPoints();
315 numSkVerbs += path.countVerbs();
316 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600317 }
318
Chris Daltona32a3c32017-12-05 10:05:21 -0700319 if (!maxTotalPaths) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500320 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600321 }
322
Chris Daltona32a3c32017-12-05 10:05:21 -0700323 // Allocate GPU buffers.
Chris Dalton383a2ef2018-01-08 17:21:41 -0500324 fPerFlushIndexBuffer = GrCCPathProcessor::FindIndexBuffer(onFlushRP);
Chris Dalton1a325d22017-07-14 15:17:41 -0600325 if (!fPerFlushIndexBuffer) {
326 SkDebugf("WARNING: failed to allocate ccpr path index buffer.\n");
327 return;
328 }
329
Chris Dalton383a2ef2018-01-08 17:21:41 -0500330 fPerFlushVertexBuffer = GrCCPathProcessor::FindVertexBuffer(onFlushRP);
Chris Dalton1a325d22017-07-14 15:17:41 -0600331 if (!fPerFlushVertexBuffer) {
332 SkDebugf("WARNING: failed to allocate ccpr path vertex buffer.\n");
333 return;
334 }
335
Chris Dalton383a2ef2018-01-08 17:21:41 -0500336 fPerFlushInstanceBuffer =
337 onFlushRP->makeBuffer(kVertex_GrBufferType, maxTotalPaths * sizeof(PathInstance));
Chris Dalton1a325d22017-07-14 15:17:41 -0600338 if (!fPerFlushInstanceBuffer) {
339 SkDebugf("WARNING: failed to allocate path instance buffer. No paths will be drawn.\n");
340 return;
341 }
342
343 PathInstance* pathInstanceData = static_cast<PathInstance*>(fPerFlushInstanceBuffer->map());
344 SkASSERT(pathInstanceData);
345 int pathInstanceIdx = 0;
346
Chris Dalton383a2ef2018-01-08 17:21:41 -0500347 GrCCCoverageOpsBuilder atlasOpsBuilder(maxTotalPaths, maxPathPoints, numSkPoints, numSkVerbs);
348 SkDEBUGCODE(int skippedTotalPaths = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600349
Chris Daltona32a3c32017-12-05 10:05:21 -0700350 // Allocate atlas(es) and fill out GPU instance buffers.
351 for (int i = 0; i < numOpListIDs; ++i) {
352 auto it = fRTPendingPathsMap.find(opListIDs[i]);
353 if (fRTPendingPathsMap.end() == it) {
354 continue;
355 }
356 RTPendingPaths& rtPendingPaths = it->second;
Chris Dalton1a325d22017-07-14 15:17:41 -0600357
Chris Daltona32a3c32017-12-05 10:05:21 -0700358 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
359 drawOpsIter.init(rtPendingPaths.fDrawOps,
360 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
361 while (DrawPathsOp* op = drawOpsIter.get()) {
362 pathInstanceIdx = op->setupResources(onFlushRP, &atlasOpsBuilder, pathInstanceData,
363 pathInstanceIdx);
364 drawOpsIter.next();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500365 SkDEBUGCODE(skippedTotalPaths += op->numSkippedInstances_debugOnly());
Chris Dalton1a325d22017-07-14 15:17:41 -0600366 }
367
Chris Daltona32a3c32017-12-05 10:05:21 -0700368 for (auto& clipsIter : rtPendingPaths.fClipPaths) {
369 clipsIter.second.placePathInAtlas(this, onFlushRP, &atlasOpsBuilder);
Chris Daltonc1e59632017-09-05 00:30:07 -0600370 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600371 }
372
Chris Dalton1a325d22017-07-14 15:17:41 -0600373 fPerFlushInstanceBuffer->unmap();
374
Chris Daltona32a3c32017-12-05 10:05:21 -0700375 SkASSERT(pathInstanceIdx == maxTotalPaths - skippedTotalPaths - numClipPaths);
376
377 if (!fPerFlushAtlases.empty()) {
378 atlasOpsBuilder.emitOp(fPerFlushAtlases.back().drawBounds());
379 }
380
Chris Dalton383a2ef2018-01-08 17:21:41 -0500381 SkSTArray<4, std::unique_ptr<GrCCCoverageOp>> atlasOps(fPerFlushAtlases.count());
Chris Daltonc1e59632017-09-05 00:30:07 -0600382 if (!atlasOpsBuilder.finalize(onFlushRP, &atlasOps)) {
383 SkDebugf("WARNING: failed to allocate ccpr atlas buffers. No paths will be drawn.\n");
384 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600385 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600386 SkASSERT(atlasOps.count() == fPerFlushAtlases.count());
Chris Dalton1a325d22017-07-14 15:17:41 -0600387
Chris Daltona32a3c32017-12-05 10:05:21 -0700388 // Draw the coverage ops into their respective atlases.
Chris Dalton383a2ef2018-01-08 17:21:41 -0500389 GrTAllocator<GrCCAtlas>::Iter atlasIter(&fPerFlushAtlases);
390 for (std::unique_ptr<GrCCCoverageOp>& atlasOp : atlasOps) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600391 SkAssertResult(atlasIter.next());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500392 GrCCAtlas* atlas = atlasIter.get();
393 SkASSERT(atlasOp->bounds() ==
394 SkRect::MakeIWH(atlas->drawBounds().width(), atlas->drawBounds().height()));
Chris Daltonc1e59632017-09-05 00:30:07 -0600395 if (auto rtc = atlas->finalize(onFlushRP, std::move(atlasOp))) {
396 results->push_back(std::move(rtc));
397 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600398 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600399 SkASSERT(!atlasIter.next());
400
401 fPerFlushResourcesAreValid = true;
Chris Dalton1a325d22017-07-14 15:17:41 -0600402}
403
Chris Daltona32a3c32017-12-05 10:05:21 -0700404int CCPR::DrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
Chris Dalton383a2ef2018-01-08 17:21:41 -0500405 GrCCCoverageOpsBuilder* atlasOpsBuilder,
406 GrCCPathProcessor::Instance* pathInstanceData,
Chris Daltona32a3c32017-12-05 10:05:21 -0700407 int pathInstanceIdx) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500408 const GrCCAtlas* currentAtlas = nullptr;
Chris Daltona32a3c32017-12-05 10:05:21 -0700409 SkASSERT(fInstanceCount > 0);
410 SkASSERT(-1 == fBaseInstance);
411 fBaseInstance = pathInstanceIdx;
412
413 for (const SingleDraw* draw = this->head(); draw; draw = draw->fNext) {
414 // parsePath gives us two tight bounding boxes: one in device space, as well as a second
415 // one rotated an additional 45 degrees. The path vertex shader uses these two bounding
416 // boxes to generate an octagon that circumscribes the path.
417 SkRect devBounds, devBounds45;
418 atlasOpsBuilder->parsePath(draw->fMatrix, draw->fPath, &devBounds, &devBounds45);
419
420 SkIRect devIBounds;
421 devBounds.roundOut(&devIBounds);
422
423 int16_t offsetX, offsetY;
Chris Dalton383a2ef2018-01-08 17:21:41 -0500424 GrCCAtlas* atlas = fCCPR->placeParsedPathInAtlas(onFlushRP, draw->fClipIBounds, devIBounds,
425 &offsetX, &offsetY, atlasOpsBuilder);
Chris Daltona32a3c32017-12-05 10:05:21 -0700426 if (!atlas) {
427 SkDEBUGCODE(++fNumSkippedInstances);
428 continue;
429 }
430 if (currentAtlas != atlas) {
431 if (currentAtlas) {
432 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
433 }
434 currentAtlas = atlas;
435 }
436
437 const SkMatrix& m = draw->fMatrix;
438 pathInstanceData[pathInstanceIdx++] = {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500439 devBounds,
440 devBounds45,
441 {{m.getScaleX(), m.getSkewY(), m.getSkewX(), m.getScaleY()}},
442 {{m.getTranslateX(), m.getTranslateY()}},
443 {{offsetX, offsetY}},
444 draw->fColor};
Chris Daltona32a3c32017-12-05 10:05:21 -0700445 }
446
447 SkASSERT(pathInstanceIdx == fBaseInstance + fInstanceCount - fNumSkippedInstances);
448 if (currentAtlas) {
449 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
450 }
451
452 return pathInstanceIdx;
453}
454
455void CCPR::ClipPath::placePathInAtlas(GrCoverageCountingPathRenderer* ccpr,
456 GrOnFlushResourceProvider* onFlushRP,
Chris Dalton383a2ef2018-01-08 17:21:41 -0500457 GrCCCoverageOpsBuilder* atlasOpsBuilder) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700458 SkASSERT(!this->isUninitialized());
459 SkASSERT(!fHasAtlas);
460 atlasOpsBuilder->parseDeviceSpacePath(fDeviceSpacePath);
461 fAtlas = ccpr->placeParsedPathInAtlas(onFlushRP, fAccessRect, fPathDevIBounds, &fAtlasOffsetX,
462 &fAtlasOffsetY, atlasOpsBuilder);
463 SkDEBUGCODE(fHasAtlas = true);
464}
465
Chris Dalton383a2ef2018-01-08 17:21:41 -0500466GrCCAtlas* GrCoverageCountingPathRenderer::placeParsedPathInAtlas(
467 GrOnFlushResourceProvider* onFlushRP,
468 const SkIRect& clipIBounds,
469 const SkIRect& pathIBounds,
470 int16_t* atlasOffsetX,
471 int16_t* atlasOffsetY,
472 GrCCCoverageOpsBuilder* atlasOpsBuilder) {
473 using ScissorMode = GrCCCoverageOpsBuilder::ScissorMode;
Chris Daltona32a3c32017-12-05 10:05:21 -0700474
475 ScissorMode scissorMode;
476 SkIRect clippedPathIBounds;
477 if (clipIBounds.contains(pathIBounds)) {
478 clippedPathIBounds = pathIBounds;
479 scissorMode = ScissorMode::kNonScissored;
480 } else if (clippedPathIBounds.intersect(clipIBounds, pathIBounds)) {
481 scissorMode = ScissorMode::kScissored;
482 } else {
483 atlasOpsBuilder->discardParsedPath();
484 return nullptr;
485 }
486
487 SkIPoint16 atlasLocation;
488 const int h = clippedPathIBounds.height(), w = clippedPathIBounds.width();
489 if (fPerFlushAtlases.empty() || !fPerFlushAtlases.back().addRect(w, h, &atlasLocation)) {
490 if (!fPerFlushAtlases.empty()) {
491 // The atlas is out of room and can't grow any bigger.
492 atlasOpsBuilder->emitOp(fPerFlushAtlases.back().drawBounds());
493 }
494 fPerFlushAtlases.emplace_back(*onFlushRP->caps(), w, h).addRect(w, h, &atlasLocation);
495 }
496
497 *atlasOffsetX = atlasLocation.x() - static_cast<int16_t>(clippedPathIBounds.left());
498 *atlasOffsetY = atlasLocation.y() - static_cast<int16_t>(clippedPathIBounds.top());
499 atlasOpsBuilder->saveParsedPath(scissorMode, clippedPathIBounds, *atlasOffsetX, *atlasOffsetY);
500
501 return &fPerFlushAtlases.back();
502}
503
504void CCPR::DrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600505 SkASSERT(fCCPR->fFlushing);
Greg Daniel500d58b2017-08-24 15:59:33 -0400506 SkASSERT(flushState->rtCommandBuffer());
Chris Dalton1a325d22017-07-14 15:17:41 -0600507
Chris Daltonc1e59632017-09-05 00:30:07 -0600508 if (!fCCPR->fPerFlushResourcesAreValid) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500509 return; // Setup failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600510 }
511
Chris Dalton383a2ef2018-01-08 17:21:41 -0500512 SkASSERT(fBaseInstance >= 0); // Make sure setupResources has been called.
Chris Dalton080baa42017-11-06 14:19:19 -0700513
Chris Daltond1513222017-10-06 08:30:46 -0600514 GrPipeline::InitArgs initArgs;
515 initArgs.fFlags = fSRGBFlags;
516 initArgs.fProxy = flushState->drawOpArgs().fProxy;
517 initArgs.fCaps = &flushState->caps();
518 initArgs.fResourceProvider = flushState->resourceProvider();
519 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
520 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
Chris Dalton1a325d22017-07-14 15:17:41 -0600521
522 int baseInstance = fBaseInstance;
523
524 for (int i = 0; i < fAtlasBatches.count(); baseInstance = fAtlasBatches[i++].fEndInstanceIdx) {
525 const AtlasBatch& batch = fAtlasBatches[i];
526 SkASSERT(batch.fEndInstanceIdx > baseInstance);
527
528 if (!batch.fAtlas->textureProxy()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500529 continue; // Atlas failed to allocate.
Chris Dalton1a325d22017-07-14 15:17:41 -0600530 }
531
Chris Dalton383a2ef2018-01-08 17:21:41 -0500532 GrCCPathProcessor coverProc(flushState->resourceProvider(),
533 sk_ref_sp(batch.fAtlas->textureProxy()), this->getFillType(),
534 *flushState->gpu()->caps()->shaderCaps());
Chris Dalton1a325d22017-07-14 15:17:41 -0600535
536 GrMesh mesh(GrPrimitiveType::kTriangles);
537 mesh.setIndexedInstanced(fCCPR->fPerFlushIndexBuffer.get(),
Chris Dalton383a2ef2018-01-08 17:21:41 -0500538 GrCCPathProcessor::kPerInstanceIndexCount,
Chris Dalton1a325d22017-07-14 15:17:41 -0600539 fCCPR->fPerFlushInstanceBuffer.get(),
540 batch.fEndInstanceIdx - baseInstance, baseInstance);
541 mesh.setVertexData(fCCPR->fPerFlushVertexBuffer.get());
542
Greg Daniel500d58b2017-08-24 15:59:33 -0400543 flushState->rtCommandBuffer()->draw(pipeline, coverProc, &mesh, nullptr, 1, this->bounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600544 }
545
Chris Daltona32a3c32017-12-05 10:05:21 -0700546 SkASSERT(baseInstance == fBaseInstance + fInstanceCount - fNumSkippedInstances);
Chris Dalton1a325d22017-07-14 15:17:41 -0600547}
548
Chris Dalton3968ff92017-11-27 12:26:31 -0700549void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
550 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600551 SkASSERT(fFlushing);
552 fPerFlushAtlases.reset();
553 fPerFlushInstanceBuffer.reset();
554 fPerFlushVertexBuffer.reset();
555 fPerFlushIndexBuffer.reset();
Chris Daltona32a3c32017-12-05 10:05:21 -0700556 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
557 for (int i = 0; i < numOpListIDs; ++i) {
558 fRTPendingPathsMap.erase(opListIDs[i]);
559 }
Chris Dalton383a2ef2018-01-08 17:21:41 -0500560 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600561}