blob: 6a38ee099b42f00658adacf1b9546e5e15e15260 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
12#include "GrGpu.h"
13#include "GrGpuCommandBuffer.h"
14#include "SkMakeUnique.h"
15#include "SkMatrix.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060016#include "SkPathOps.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060017#include "GrOpFlushState.h"
18#include "GrRenderTargetOpList.h"
Chris Daltona32a3c32017-12-05 10:05:21 -070019#include "GrTexture.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060020#include "GrStyle.h"
Chris Daltona32a3c32017-12-05 10:05:21 -070021#include "ccpr/GrCCPRClipProcessor.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060022
Chris Daltona32a3c32017-12-05 10:05:21 -070023// Shorthand for keeping line lengths under control with nested classes...
24using CCPR = GrCoverageCountingPathRenderer;
25
26// If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32
27// precision.
28static constexpr float kPathCropThreshold = 1 << 16;
29
30static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) {
31 SkPath cropPath;
32 cropPath.addRect(SkRect::Make(cropbox));
33 if (!Op(cropPath, path, kIntersect_SkPathOp, out)) {
34 // This can fail if the PathOps encounter NaN or infinities.
35 out->reset();
36 }
37}
Chris Dalton1a325d22017-07-14 15:17:41 -060038
39bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
40 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Daltone3877ce2017-12-22 02:24:53 -070041 return shaderCaps.integerSupport() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060042 shaderCaps.flatInterpolationSupport() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060043 caps.instanceAttribSupport() &&
Chris Daltonfddb6c02017-11-04 15:22:22 -060044 GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060045 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060046 caps.isConfigRenderable(kAlpha_half_GrPixelConfig, /*withMSAA=*/false) &&
47 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060048}
49
50sk_sp<GrCoverageCountingPathRenderer>
Chris Daltona2ac30d2017-10-17 10:40:01 -060051GrCoverageCountingPathRenderer::CreateIfSupported(const GrCaps& caps, bool drawCachablePaths) {
52 auto ccpr = IsSupported(caps) ? new GrCoverageCountingPathRenderer(drawCachablePaths) : nullptr;
53 return sk_sp<GrCoverageCountingPathRenderer>(ccpr);
Chris Dalton1a325d22017-07-14 15:17:41 -060054}
55
Chris Dalton5ed44232017-09-07 13:22:46 -060056GrPathRenderer::CanDrawPath
57GrCoverageCountingPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
Chris Daltona2ac30d2017-10-17 10:40:01 -060058 if (args.fShape->hasUnstyledKey() && !fDrawCachablePaths) {
59 return CanDrawPath::kNo;
60 }
61
Chris Dalton1a325d22017-07-14 15:17:41 -060062 if (!args.fShape->style().isSimpleFill() ||
63 args.fShape->inverseFilled() ||
64 args.fViewMatrix->hasPerspective() ||
65 GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060066 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060067 }
68
69 SkPath path;
70 args.fShape->asPath(&path);
Chris Dalton5ed44232017-09-07 13:22:46 -060071 if (SkPathPriv::ConicWeightCnt(path)) {
72 return CanDrawPath::kNo;
73 }
74
Chris Daltondb91c6e2017-09-08 16:25:08 -060075 SkRect devBounds;
76 SkIRect devIBounds;
77 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
78 devBounds.roundOut(&devIBounds);
79 if (!devIBounds.intersect(*args.fClipConservativeBounds)) {
80 // Path is completely clipped away. Our code will eventually notice this before doing any
81 // real work.
82 return CanDrawPath::kYes;
83 }
84
85 if (devIBounds.height() * devIBounds.width() > 256 * 256) {
86 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
87 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
88 return CanDrawPath::kAsBackup;
89 }
90
91 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
92 // Complex paths do better cached in an SDF, if the renderer will accept them.
93 return CanDrawPath::kAsBackup;
94 }
95
Chris Dalton5ed44232017-09-07 13:22:46 -060096 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -060097}
98
99bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
100 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600101 auto op = skstd::make_unique<DrawPathsOp>(this, args, args.fPaint.getColor());
102 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op));
Chris Dalton1a325d22017-07-14 15:17:41 -0600103 return true;
104}
105
Chris Daltona32a3c32017-12-05 10:05:21 -0700106CCPR::DrawPathsOp::DrawPathsOp(GrCoverageCountingPathRenderer* ccpr, const DrawPathArgs& args,
107 GrColor color)
Chris Dalton1a325d22017-07-14 15:17:41 -0600108 : INHERITED(ClassID())
109 , fCCPR(ccpr)
110 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(args.fPaint))
111 , fProcessors(std::move(args.fPaint))
112 , fTailDraw(&fHeadDraw)
Chris Daltona32a3c32017-12-05 10:05:21 -0700113 , fOwningRTPendingPaths(nullptr) {
Chris Dalton080baa42017-11-06 14:19:19 -0700114 SkDEBUGCODE(++fCCPR->fPendingDrawOpsCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600115 SkDEBUGCODE(fBaseInstance = -1);
Chris Daltona32a3c32017-12-05 10:05:21 -0700116 SkDEBUGCODE(fInstanceCount = 1;)
117 SkDEBUGCODE(fNumSkippedInstances = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600118 GrRenderTargetContext* const rtc = args.fRenderTargetContext;
119
120 SkRect devBounds;
121 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
Chris Daltonc9c97b72017-11-27 15:34:26 -0700122 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &fHeadDraw.fClipIBounds,
123 nullptr);
Chris Daltona32a3c32017-12-05 10:05:21 -0700124 if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) {
125 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
126 SkPath path;
Chris Daltona039d3b2017-09-28 11:16:36 -0600127 args.fShape->asPath(&path);
128 path.transform(*args.fViewMatrix);
129 fHeadDraw.fMatrix.setIdentity();
Chris Daltona32a3c32017-12-05 10:05:21 -0700130 crop_path(path, fHeadDraw.fClipIBounds, &fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600131 devBounds = fHeadDraw.fPath.getBounds();
Chris Daltona039d3b2017-09-28 11:16:36 -0600132 } else {
133 fHeadDraw.fMatrix = *args.fViewMatrix;
134 args.fShape->asPath(&fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600135 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600136 fHeadDraw.fColor = color; // Can't call args.fPaint.getColor() because it has been std::move'd.
137
138 // FIXME: intersect with clip bounds to (hopefully) improve batching.
139 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
140 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
141}
142
Chris Daltona32a3c32017-12-05 10:05:21 -0700143CCPR::DrawPathsOp::~DrawPathsOp() {
144 if (fOwningRTPendingPaths) {
Chris Dalton080baa42017-11-06 14:19:19 -0700145 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltona32a3c32017-12-05 10:05:21 -0700146 fOwningRTPendingPaths->fDrawOps.remove(this);
Chris Dalton080baa42017-11-06 14:19:19 -0700147 }
148 SkDEBUGCODE(--fCCPR->fPendingDrawOpsCount);
149}
150
Chris Daltona32a3c32017-12-05 10:05:21 -0700151GrDrawOp::RequiresDstTexture CCPR::DrawPathsOp::finalize(const GrCaps& caps,
152 const GrAppliedClip* clip,
153 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton080baa42017-11-06 14:19:19 -0700154 SkASSERT(!fCCPR->fFlushing);
155 // There should only be one single path draw in this Op right now.
Chris Daltona32a3c32017-12-05 10:05:21 -0700156 SkASSERT(1 == fInstanceCount);
Chris Dalton080baa42017-11-06 14:19:19 -0700157 SkASSERT(&fHeadDraw == fTailDraw);
Brian Osman9a725dd2017-09-20 09:53:22 -0400158 GrProcessorSet::Analysis analysis = fProcessors.finalize(
Chris Dalton080baa42017-11-06 14:19:19 -0700159 fHeadDraw.fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps,
160 dstIsClamped, &fHeadDraw.fColor);
Chris Dalton1a325d22017-07-14 15:17:41 -0600161 return analysis.requiresDstTexture() ? RequiresDstTexture::kYes : RequiresDstTexture::kNo;
162}
163
Chris Daltona32a3c32017-12-05 10:05:21 -0700164bool CCPR::DrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps& caps) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600165 DrawPathsOp* that = op->cast<DrawPathsOp>();
166 SkASSERT(fCCPR == that->fCCPR);
Chris Dalton080baa42017-11-06 14:19:19 -0700167 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700168 SkASSERT(fOwningRTPendingPaths);
169 SkASSERT(fInstanceCount);
170 SkASSERT(!that->fOwningRTPendingPaths || that->fOwningRTPendingPaths == fOwningRTPendingPaths);
171 SkASSERT(that->fInstanceCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600172
173 if (this->getFillType() != that->getFillType() ||
174 fSRGBFlags != that->fSRGBFlags ||
175 fProcessors != that->fProcessors) {
176 return false;
177 }
178
Chris Daltona32a3c32017-12-05 10:05:21 -0700179 fTailDraw->fNext = &fOwningRTPendingPaths->fDrawsAllocator.push_back(that->fHeadDraw);
Chris Dalton080baa42017-11-06 14:19:19 -0700180 fTailDraw = (that->fTailDraw == &that->fHeadDraw) ? fTailDraw->fNext : that->fTailDraw;
Chris Dalton1a325d22017-07-14 15:17:41 -0600181
182 this->joinBounds(*that);
183
Chris Daltona32a3c32017-12-05 10:05:21 -0700184 SkDEBUGCODE(fInstanceCount += that->fInstanceCount;)
185 SkDEBUGCODE(that->fInstanceCount = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600186 return true;
187}
188
Chris Daltona32a3c32017-12-05 10:05:21 -0700189void CCPR::DrawPathsOp::wasRecorded(GrRenderTargetOpList* opList) {
Chris Dalton080baa42017-11-06 14:19:19 -0700190 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700191 SkASSERT(!fOwningRTPendingPaths);
192 fOwningRTPendingPaths = &fCCPR->fRTPendingPathsMap[opList->uniqueID()];
193 fOwningRTPendingPaths->fDrawOps.addToTail(this);
194}
195
196bool GrCoverageCountingPathRenderer::canMakeClipProcessor(const SkPath& deviceSpacePath) const {
197 if (!fDrawCachablePaths && !deviceSpacePath.isVolatile()) {
198 return false;
199 }
200
201 if (SkPathPriv::ConicWeightCnt(deviceSpacePath)) {
202 return false;
203 }
204
205 return true;
206}
207
208std::unique_ptr<GrFragmentProcessor>
209GrCoverageCountingPathRenderer::makeClipProcessor(uint32_t opListID, const SkPath& deviceSpacePath,
210 const SkIRect& accessRect, int rtWidth,
211 int rtHeight) {
212 using MustCheckBounds = GrCCPRClipProcessor::MustCheckBounds;
213
214 SkASSERT(!fFlushing);
215 SkASSERT(this->canMakeClipProcessor(deviceSpacePath));
216
217 ClipPath& clipPath = fRTPendingPathsMap[opListID].fClipPaths[deviceSpacePath.getGenerationID()];
218 if (clipPath.isUninitialized()) {
219 // This ClipPath was just created during lookup. Initialize it.
220 clipPath.init(deviceSpacePath, accessRect, rtWidth, rtHeight);
221 } else {
222 clipPath.addAccess(accessRect);
223 }
224
225 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
226 return skstd::make_unique<GrCCPRClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
227 deviceSpacePath.getFillType());
228}
229
230void CCPR::ClipPath::init(const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
231 int rtHeight) {
232 SkASSERT(this->isUninitialized());
233
234 fAtlasLazyProxy = GrSurfaceProxy::MakeLazy([this](GrResourceProvider* resourceProvider,
235 GrSurfaceOrigin* outOrigin) {
236 SkASSERT(fHasAtlas);
237 SkASSERT(!fHasAtlasTransform);
238
239 GrTextureProxy* textureProxy = fAtlas ? fAtlas->textureProxy() : nullptr;
240 if (!textureProxy || !textureProxy->instantiate(resourceProvider)) {
241 fAtlasScale = fAtlasTranslate = {0, 0};
242 SkDEBUGCODE(fHasAtlasTransform = true);
243 return sk_sp<GrTexture>();
244 }
245
246 fAtlasScale = {1.f / textureProxy->width(), 1.f / textureProxy->height()};
247 fAtlasTranslate = {fAtlasOffsetX * fAtlasScale.x(), fAtlasOffsetY * fAtlasScale.y()};
248 if (kBottomLeft_GrSurfaceOrigin == textureProxy->origin()) {
249 fAtlasScale.fY = -fAtlasScale.y();
250 fAtlasTranslate.fY = 1 - fAtlasTranslate.y();
251 }
252 SkDEBUGCODE(fHasAtlasTransform = true);
253
254 *outOrigin = textureProxy->origin();
255 return sk_ref_sp(textureProxy->priv().peekTexture());
256 }, GrSurfaceProxy::Renderable::kYes, kAlpha_half_GrPixelConfig);
257
258 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
259 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
260 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
261 crop_path(deviceSpacePath, SkIRect::MakeWH(rtWidth, rtHeight), &fDeviceSpacePath);
262 } else {
263 fDeviceSpacePath = deviceSpacePath;
264 }
265 deviceSpacePath.getBounds().roundOut(&fPathDevIBounds);
266 fAccessRect = accessRect;
Chris Dalton1a325d22017-07-14 15:17:41 -0600267}
268
269void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
270 const uint32_t* opListIDs, int numOpListIDs,
271 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600272 using PathInstance = GrCCPRPathProcessor::Instance;
273
Chris Daltona32a3c32017-12-05 10:05:21 -0700274 SkASSERT(!fFlushing);
Chris Daltonc1e59632017-09-05 00:30:07 -0600275 SkASSERT(!fPerFlushIndexBuffer);
276 SkASSERT(!fPerFlushVertexBuffer);
277 SkASSERT(!fPerFlushInstanceBuffer);
278 SkASSERT(fPerFlushAtlases.empty());
Chris Daltona32a3c32017-12-05 10:05:21 -0700279 SkDEBUGCODE(fFlushing = true;)
280
281 if (fRTPendingPathsMap.empty()) {
282 return; // Nothing to draw.
283 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600284
285 fPerFlushResourcesAreValid = false;
286
Chris Daltona32a3c32017-12-05 10:05:21 -0700287 // Count the paths that are being flushed.
Chris Daltonc9c97b72017-11-27 15:34:26 -0700288 int maxTotalPaths = 0, maxPathPoints = 0, numSkPoints = 0, numSkVerbs = 0;
Chris Daltona32a3c32017-12-05 10:05:21 -0700289 SkDEBUGCODE(int numClipPaths = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600290 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700291 auto it = fRTPendingPathsMap.find(opListIDs[i]);
292 if (fRTPendingPathsMap.end() == it) {
Chris Dalton080baa42017-11-06 14:19:19 -0700293 continue;
Chris Dalton1a325d22017-07-14 15:17:41 -0600294 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700295 const RTPendingPaths& rtPendingPaths = it->second;
296
297 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
298 drawOpsIter.init(rtPendingPaths.fDrawOps,
299 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
300 while (DrawPathsOp* op = drawOpsIter.get()) {
301 for (const DrawPathsOp::SingleDraw* draw = op->head(); draw; draw = draw->fNext) {
Chris Dalton080baa42017-11-06 14:19:19 -0700302 ++maxTotalPaths;
Chris Daltonc9c97b72017-11-27 15:34:26 -0700303 maxPathPoints = SkTMax(draw->fPath.countPoints(), maxPathPoints);
Chris Dalton080baa42017-11-06 14:19:19 -0700304 numSkPoints += draw->fPath.countPoints();
305 numSkVerbs += draw->fPath.countVerbs();
306 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700307 drawOpsIter.next();
Chris Dalton080baa42017-11-06 14:19:19 -0700308 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700309
310 maxTotalPaths += rtPendingPaths.fClipPaths.size();
311 SkDEBUGCODE(numClipPaths += rtPendingPaths.fClipPaths.size());
312 for (const auto& clipsIter : rtPendingPaths.fClipPaths) {
313 const SkPath& path = clipsIter.second.deviceSpacePath();
314 maxPathPoints = SkTMax(path.countPoints(), maxPathPoints);
315 numSkPoints += path.countPoints();
316 numSkVerbs += path.countVerbs();
317 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600318 }
319
Chris Daltona32a3c32017-12-05 10:05:21 -0700320 if (!maxTotalPaths) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600321 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600322 }
323
Chris Daltona32a3c32017-12-05 10:05:21 -0700324 // Allocate GPU buffers.
Chris Dalton5d2de082017-12-19 10:40:23 -0700325 fPerFlushIndexBuffer = GrCCPRPathProcessor::FindIndexBuffer(onFlushRP);
Chris Dalton1a325d22017-07-14 15:17:41 -0600326 if (!fPerFlushIndexBuffer) {
327 SkDebugf("WARNING: failed to allocate ccpr path index buffer.\n");
328 return;
329 }
330
Chris Dalton5d2de082017-12-19 10:40:23 -0700331 fPerFlushVertexBuffer = GrCCPRPathProcessor::FindVertexBuffer(onFlushRP);
Chris Dalton1a325d22017-07-14 15:17:41 -0600332 if (!fPerFlushVertexBuffer) {
333 SkDebugf("WARNING: failed to allocate ccpr path vertex buffer.\n");
334 return;
335 }
336
Chris Dalton1a325d22017-07-14 15:17:41 -0600337 fPerFlushInstanceBuffer = onFlushRP->makeBuffer(kVertex_GrBufferType,
Chris Daltona32a3c32017-12-05 10:05:21 -0700338 maxTotalPaths * sizeof(PathInstance));
Chris Dalton1a325d22017-07-14 15:17:41 -0600339 if (!fPerFlushInstanceBuffer) {
340 SkDebugf("WARNING: failed to allocate path instance buffer. No paths will be drawn.\n");
341 return;
342 }
343
344 PathInstance* pathInstanceData = static_cast<PathInstance*>(fPerFlushInstanceBuffer->map());
345 SkASSERT(pathInstanceData);
346 int pathInstanceIdx = 0;
347
Chris Daltonc9c97b72017-11-27 15:34:26 -0700348 GrCCPRCoverageOpsBuilder atlasOpsBuilder(maxTotalPaths, maxPathPoints, numSkPoints, numSkVerbs);
Chris Daltonc1e59632017-09-05 00:30:07 -0600349 SkDEBUGCODE(int skippedTotalPaths = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600350
Chris Daltona32a3c32017-12-05 10:05:21 -0700351 // Allocate atlas(es) and fill out GPU instance buffers.
352 for (int i = 0; i < numOpListIDs; ++i) {
353 auto it = fRTPendingPathsMap.find(opListIDs[i]);
354 if (fRTPendingPathsMap.end() == it) {
355 continue;
356 }
357 RTPendingPaths& rtPendingPaths = it->second;
Chris Dalton1a325d22017-07-14 15:17:41 -0600358
Chris Daltona32a3c32017-12-05 10:05:21 -0700359 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
360 drawOpsIter.init(rtPendingPaths.fDrawOps,
361 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
362 while (DrawPathsOp* op = drawOpsIter.get()) {
363 pathInstanceIdx = op->setupResources(onFlushRP, &atlasOpsBuilder, pathInstanceData,
364 pathInstanceIdx);
365 drawOpsIter.next();
366 SkDEBUGCODE(skippedTotalPaths += op->numSkippedInstances_debugOnly();)
Chris Dalton1a325d22017-07-14 15:17:41 -0600367 }
368
Chris Daltona32a3c32017-12-05 10:05:21 -0700369 for (auto& clipsIter : rtPendingPaths.fClipPaths) {
370 clipsIter.second.placePathInAtlas(this, onFlushRP, &atlasOpsBuilder);
Chris Daltonc1e59632017-09-05 00:30:07 -0600371 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600372 }
373
Chris Dalton1a325d22017-07-14 15:17:41 -0600374 fPerFlushInstanceBuffer->unmap();
375
Chris Daltona32a3c32017-12-05 10:05:21 -0700376 SkASSERT(pathInstanceIdx == maxTotalPaths - skippedTotalPaths - numClipPaths);
377
378 if (!fPerFlushAtlases.empty()) {
379 atlasOpsBuilder.emitOp(fPerFlushAtlases.back().drawBounds());
380 }
381
Chris Daltonc1e59632017-09-05 00:30:07 -0600382 SkSTArray<4, std::unique_ptr<GrCCPRCoverageOp>> atlasOps(fPerFlushAtlases.count());
383 if (!atlasOpsBuilder.finalize(onFlushRP, &atlasOps)) {
384 SkDebugf("WARNING: failed to allocate ccpr atlas buffers. No paths will be drawn.\n");
385 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600386 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600387 SkASSERT(atlasOps.count() == fPerFlushAtlases.count());
Chris Dalton1a325d22017-07-14 15:17:41 -0600388
Chris Daltona32a3c32017-12-05 10:05:21 -0700389 // Draw the coverage ops into their respective atlases.
Chris Daltonc1e59632017-09-05 00:30:07 -0600390 GrTAllocator<GrCCPRAtlas>::Iter atlasIter(&fPerFlushAtlases);
391 for (std::unique_ptr<GrCCPRCoverageOp>& atlasOp : atlasOps) {
392 SkAssertResult(atlasIter.next());
393 GrCCPRAtlas* atlas = atlasIter.get();
394 SkASSERT(atlasOp->bounds() == SkRect::MakeIWH(atlas->drawBounds().width(),
395 atlas->drawBounds().height()));
396 if (auto rtc = atlas->finalize(onFlushRP, std::move(atlasOp))) {
397 results->push_back(std::move(rtc));
398 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600399 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600400 SkASSERT(!atlasIter.next());
401
402 fPerFlushResourcesAreValid = true;
Chris Dalton1a325d22017-07-14 15:17:41 -0600403}
404
Chris Daltona32a3c32017-12-05 10:05:21 -0700405int CCPR::DrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
406 GrCCPRCoverageOpsBuilder* atlasOpsBuilder,
407 GrCCPRPathProcessor::Instance* pathInstanceData,
408 int pathInstanceIdx) {
409 const GrCCPRAtlas* currentAtlas = nullptr;
410 SkASSERT(fInstanceCount > 0);
411 SkASSERT(-1 == fBaseInstance);
412 fBaseInstance = pathInstanceIdx;
413
414 for (const SingleDraw* draw = this->head(); draw; draw = draw->fNext) {
415 // parsePath gives us two tight bounding boxes: one in device space, as well as a second
416 // one rotated an additional 45 degrees. The path vertex shader uses these two bounding
417 // boxes to generate an octagon that circumscribes the path.
418 SkRect devBounds, devBounds45;
419 atlasOpsBuilder->parsePath(draw->fMatrix, draw->fPath, &devBounds, &devBounds45);
420
421 SkIRect devIBounds;
422 devBounds.roundOut(&devIBounds);
423
424 int16_t offsetX, offsetY;
425 GrCCPRAtlas* atlas = fCCPR->placeParsedPathInAtlas(onFlushRP, draw->fClipIBounds,
426 devIBounds, &offsetX, &offsetY,
427 atlasOpsBuilder);
428 if (!atlas) {
429 SkDEBUGCODE(++fNumSkippedInstances);
430 continue;
431 }
432 if (currentAtlas != atlas) {
433 if (currentAtlas) {
434 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
435 }
436 currentAtlas = atlas;
437 }
438
439 const SkMatrix& m = draw->fMatrix;
440 pathInstanceData[pathInstanceIdx++] = {
441 devBounds,
442 devBounds45,
443 {{m.getScaleX(), m.getSkewY(), m.getSkewX(), m.getScaleY()}},
444 {{m.getTranslateX(), m.getTranslateY()}},
445 {{offsetX, offsetY}},
446 draw->fColor
447 };
448 }
449
450 SkASSERT(pathInstanceIdx == fBaseInstance + fInstanceCount - fNumSkippedInstances);
451 if (currentAtlas) {
452 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
453 }
454
455 return pathInstanceIdx;
456}
457
458void CCPR::ClipPath::placePathInAtlas(GrCoverageCountingPathRenderer* ccpr,
459 GrOnFlushResourceProvider* onFlushRP,
460 GrCCPRCoverageOpsBuilder* atlasOpsBuilder) {
461 SkASSERT(!this->isUninitialized());
462 SkASSERT(!fHasAtlas);
463 atlasOpsBuilder->parseDeviceSpacePath(fDeviceSpacePath);
464 fAtlas = ccpr->placeParsedPathInAtlas(onFlushRP, fAccessRect, fPathDevIBounds, &fAtlasOffsetX,
465 &fAtlasOffsetY, atlasOpsBuilder);
466 SkDEBUGCODE(fHasAtlas = true);
467}
468
469GrCCPRAtlas*
470GrCoverageCountingPathRenderer::placeParsedPathInAtlas(GrOnFlushResourceProvider* onFlushRP,
471 const SkIRect& clipIBounds,
472 const SkIRect& pathIBounds,
473 int16_t* atlasOffsetX,
474 int16_t* atlasOffsetY,
475 GrCCPRCoverageOpsBuilder* atlasOpsBuilder) {
476 using ScissorMode = GrCCPRCoverageOpsBuilder::ScissorMode;
477
478 ScissorMode scissorMode;
479 SkIRect clippedPathIBounds;
480 if (clipIBounds.contains(pathIBounds)) {
481 clippedPathIBounds = pathIBounds;
482 scissorMode = ScissorMode::kNonScissored;
483 } else if (clippedPathIBounds.intersect(clipIBounds, pathIBounds)) {
484 scissorMode = ScissorMode::kScissored;
485 } else {
486 atlasOpsBuilder->discardParsedPath();
487 return nullptr;
488 }
489
490 SkIPoint16 atlasLocation;
491 const int h = clippedPathIBounds.height(), w = clippedPathIBounds.width();
492 if (fPerFlushAtlases.empty() || !fPerFlushAtlases.back().addRect(w, h, &atlasLocation)) {
493 if (!fPerFlushAtlases.empty()) {
494 // The atlas is out of room and can't grow any bigger.
495 atlasOpsBuilder->emitOp(fPerFlushAtlases.back().drawBounds());
496 }
497 fPerFlushAtlases.emplace_back(*onFlushRP->caps(), w, h).addRect(w, h, &atlasLocation);
498 }
499
500 *atlasOffsetX = atlasLocation.x() - static_cast<int16_t>(clippedPathIBounds.left());
501 *atlasOffsetY = atlasLocation.y() - static_cast<int16_t>(clippedPathIBounds.top());
502 atlasOpsBuilder->saveParsedPath(scissorMode, clippedPathIBounds, *atlasOffsetX, *atlasOffsetY);
503
504 return &fPerFlushAtlases.back();
505}
506
507void CCPR::DrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600508 SkASSERT(fCCPR->fFlushing);
Greg Daniel500d58b2017-08-24 15:59:33 -0400509 SkASSERT(flushState->rtCommandBuffer());
Chris Dalton1a325d22017-07-14 15:17:41 -0600510
Chris Daltonc1e59632017-09-05 00:30:07 -0600511 if (!fCCPR->fPerFlushResourcesAreValid) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600512 return; // Setup failed.
513 }
514
Chris Daltona32a3c32017-12-05 10:05:21 -0700515 SkASSERT(fBaseInstance >= 0); // Make sure setupResources has been called.
Chris Dalton080baa42017-11-06 14:19:19 -0700516
Chris Daltond1513222017-10-06 08:30:46 -0600517 GrPipeline::InitArgs initArgs;
518 initArgs.fFlags = fSRGBFlags;
519 initArgs.fProxy = flushState->drawOpArgs().fProxy;
520 initArgs.fCaps = &flushState->caps();
521 initArgs.fResourceProvider = flushState->resourceProvider();
522 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
523 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
Chris Dalton1a325d22017-07-14 15:17:41 -0600524
525 int baseInstance = fBaseInstance;
526
527 for (int i = 0; i < fAtlasBatches.count(); baseInstance = fAtlasBatches[i++].fEndInstanceIdx) {
528 const AtlasBatch& batch = fAtlasBatches[i];
529 SkASSERT(batch.fEndInstanceIdx > baseInstance);
530
531 if (!batch.fAtlas->textureProxy()) {
532 continue; // Atlas failed to allocate.
533 }
534
Chris Daltona32a3c32017-12-05 10:05:21 -0700535 GrCCPRPathProcessor coverProc(flushState->resourceProvider(),
536 sk_ref_sp(batch.fAtlas->textureProxy()), this->getFillType(),
537 *flushState->gpu()->caps()->shaderCaps());
Chris Dalton1a325d22017-07-14 15:17:41 -0600538
539 GrMesh mesh(GrPrimitiveType::kTriangles);
540 mesh.setIndexedInstanced(fCCPR->fPerFlushIndexBuffer.get(),
541 GrCCPRPathProcessor::kPerInstanceIndexCount,
542 fCCPR->fPerFlushInstanceBuffer.get(),
543 batch.fEndInstanceIdx - baseInstance, baseInstance);
544 mesh.setVertexData(fCCPR->fPerFlushVertexBuffer.get());
545
Greg Daniel500d58b2017-08-24 15:59:33 -0400546 flushState->rtCommandBuffer()->draw(pipeline, coverProc, &mesh, nullptr, 1, this->bounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600547 }
548
Chris Daltona32a3c32017-12-05 10:05:21 -0700549 SkASSERT(baseInstance == fBaseInstance + fInstanceCount - fNumSkippedInstances);
Chris Dalton1a325d22017-07-14 15:17:41 -0600550}
551
Chris Dalton3968ff92017-11-27 12:26:31 -0700552void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
553 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600554 SkASSERT(fFlushing);
555 fPerFlushAtlases.reset();
556 fPerFlushInstanceBuffer.reset();
557 fPerFlushVertexBuffer.reset();
558 fPerFlushIndexBuffer.reset();
Chris Daltona32a3c32017-12-05 10:05:21 -0700559 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
560 for (int i = 0; i < numOpListIDs; ++i) {
561 fRTPendingPathsMap.erase(opListIDs[i]);
562 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600563 SkDEBUGCODE(fFlushing = false;)
564}