blob: 0e6198951e655010eed78d9e8a1f4e57c7abf3dc [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
12#include "GrGpu.h"
13#include "GrGpuCommandBuffer.h"
14#include "SkMakeUnique.h"
15#include "SkMatrix.h"
16#include "GrOpFlushState.h"
17#include "GrRenderTargetOpList.h"
18#include "GrStyle.h"
19#include "ccpr/GrCCPRPathProcessor.h"
20
21using DrawPathsOp = GrCoverageCountingPathRenderer::DrawPathsOp;
22using ScissorMode = GrCCPRCoverageOpsBuilder::ScissorMode;
23
24bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
25 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
26 return shaderCaps.geometryShaderSupport() &&
27 shaderCaps.texelBufferSupport() &&
28 shaderCaps.integerSupport() &&
29 shaderCaps.flatInterpolationSupport() &&
30 shaderCaps.maxVertexSamplers() >= 1 &&
31 caps.instanceAttribSupport() &&
32 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
33 caps.isConfigRenderable(kAlpha_half_GrPixelConfig, /*withMSAA=*/false);
34}
35
36sk_sp<GrCoverageCountingPathRenderer>
37GrCoverageCountingPathRenderer::CreateIfSupported(const GrCaps& caps) {
38 return sk_sp<GrCoverageCountingPathRenderer>(IsSupported(caps) ?
39 new GrCoverageCountingPathRenderer : nullptr);
40}
41
Chris Dalton5ed44232017-09-07 13:22:46 -060042GrPathRenderer::CanDrawPath
43GrCoverageCountingPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
Chris Dalton1a325d22017-07-14 15:17:41 -060044 if (!args.fShape->style().isSimpleFill() ||
45 args.fShape->inverseFilled() ||
46 args.fViewMatrix->hasPerspective() ||
47 GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060048 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060049 }
50
51 SkPath path;
52 args.fShape->asPath(&path);
Chris Dalton5ed44232017-09-07 13:22:46 -060053 if (SkPathPriv::ConicWeightCnt(path)) {
54 return CanDrawPath::kNo;
55 }
56
Chris Daltondb91c6e2017-09-08 16:25:08 -060057 SkRect devBounds;
58 SkIRect devIBounds;
59 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
60 devBounds.roundOut(&devIBounds);
61 if (!devIBounds.intersect(*args.fClipConservativeBounds)) {
62 // Path is completely clipped away. Our code will eventually notice this before doing any
63 // real work.
64 return CanDrawPath::kYes;
65 }
66
67 if (devIBounds.height() * devIBounds.width() > 256 * 256) {
68 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
69 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
70 return CanDrawPath::kAsBackup;
71 }
72
73 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
74 // Complex paths do better cached in an SDF, if the renderer will accept them.
75 return CanDrawPath::kAsBackup;
76 }
77
Chris Dalton5ed44232017-09-07 13:22:46 -060078 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -060079}
80
81bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
82 SkASSERT(!fFlushing);
83 SkASSERT(!args.fShape->isEmpty());
84
85 auto op = skstd::make_unique<DrawPathsOp>(this, args, args.fPaint.getColor());
86 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op));
87
88 return true;
89}
90
91GrCoverageCountingPathRenderer::DrawPathsOp::DrawPathsOp(GrCoverageCountingPathRenderer* ccpr,
92 const DrawPathArgs& args, GrColor color)
93 : INHERITED(ClassID())
94 , fCCPR(ccpr)
95 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(args.fPaint))
96 , fProcessors(std::move(args.fPaint))
97 , fTailDraw(&fHeadDraw)
98 , fOwningRTPendingOps(nullptr) {
99 SkDEBUGCODE(fBaseInstance = -1);
100 SkDEBUGCODE(fDebugInstanceCount = 1;)
Chris Daltonc1e59632017-09-05 00:30:07 -0600101 SkDEBUGCODE(fDebugSkippedInstances = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600102
103 GrRenderTargetContext* const rtc = args.fRenderTargetContext;
104
105 SkRect devBounds;
106 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
107
108 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &fHeadDraw.fClipBounds, nullptr);
109 fHeadDraw.fScissorMode = fHeadDraw.fClipBounds.contains(devBounds) ?
110 ScissorMode::kNonScissored : ScissorMode::kScissored;
111 fHeadDraw.fMatrix = *args.fViewMatrix;
112 args.fShape->asPath(&fHeadDraw.fPath);
113 fHeadDraw.fColor = color; // Can't call args.fPaint.getColor() because it has been std::move'd.
114
115 // FIXME: intersect with clip bounds to (hopefully) improve batching.
116 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
117 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
118}
119
120GrDrawOp::RequiresDstTexture DrawPathsOp::finalize(const GrCaps& caps, const GrAppliedClip* clip) {
121 SingleDraw& onlyDraw = this->getOnlyPathDraw();
122 GrProcessorSet::Analysis analysis = fProcessors.finalize(onlyDraw.fColor,
123 GrProcessorAnalysisCoverage::kSingleChannel,
124 clip, false, caps, &onlyDraw.fColor);
125 return analysis.requiresDstTexture() ? RequiresDstTexture::kYes : RequiresDstTexture::kNo;
126}
127
128bool DrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps& caps) {
129 DrawPathsOp* that = op->cast<DrawPathsOp>();
130 SkASSERT(fCCPR == that->fCCPR);
131 SkASSERT(fOwningRTPendingOps);
132 SkASSERT(fDebugInstanceCount);
133 SkASSERT(that->fDebugInstanceCount);
134
135 if (this->getFillType() != that->getFillType() ||
136 fSRGBFlags != that->fSRGBFlags ||
137 fProcessors != that->fProcessors) {
138 return false;
139 }
140
141 if (RTPendingOps* owningRTPendingOps = that->fOwningRTPendingOps) {
142 SkASSERT(owningRTPendingOps == fOwningRTPendingOps);
143 owningRTPendingOps->fOpList.remove(that);
144 } else {
Chris Daltonc1e59632017-09-05 00:30:07 -0600145 // The Op is being combined immediately after creation, before a call to wasRecorded. In
146 // this case wasRecorded will not be called. So we count its path here instead.
147 const SingleDraw& onlyDraw = that->getOnlyPathDraw();
148 ++fOwningRTPendingOps->fNumTotalPaths;
149 fOwningRTPendingOps->fNumSkPoints += onlyDraw.fPath.countPoints();
150 fOwningRTPendingOps->fNumSkVerbs += onlyDraw.fPath.countVerbs();
Chris Dalton1a325d22017-07-14 15:17:41 -0600151 }
152
153 fTailDraw->fNext = &fOwningRTPendingOps->fDrawsAllocator.push_back(that->fHeadDraw);
154 fTailDraw = that->fTailDraw == &that->fHeadDraw ? fTailDraw->fNext : that->fTailDraw;
155
156 this->joinBounds(*that);
157
158 SkDEBUGCODE(fDebugInstanceCount += that->fDebugInstanceCount;)
159 SkDEBUGCODE(that->fDebugInstanceCount = 0);
160 return true;
161}
162
163void DrawPathsOp::wasRecorded(GrRenderTargetOpList* opList) {
164 SkASSERT(!fOwningRTPendingOps);
Chris Daltonc1e59632017-09-05 00:30:07 -0600165 const SingleDraw& onlyDraw = this->getOnlyPathDraw();
Chris Dalton1a325d22017-07-14 15:17:41 -0600166 fOwningRTPendingOps = &fCCPR->fRTPendingOpsMap[opList->uniqueID()];
Chris Daltonc1e59632017-09-05 00:30:07 -0600167 ++fOwningRTPendingOps->fNumTotalPaths;
168 fOwningRTPendingOps->fNumSkPoints += onlyDraw.fPath.countPoints();
169 fOwningRTPendingOps->fNumSkVerbs += onlyDraw.fPath.countVerbs();
Chris Dalton1a325d22017-07-14 15:17:41 -0600170 fOwningRTPendingOps->fOpList.addToTail(this);
Chris Dalton1a325d22017-07-14 15:17:41 -0600171}
172
173void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
174 const uint32_t* opListIDs, int numOpListIDs,
175 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600176 SkASSERT(!fFlushing);
177 SkDEBUGCODE(fFlushing = true;)
178
179 if (fRTPendingOpsMap.empty()) {
180 return; // Nothing to draw.
181 }
182
Chris Daltonc1e59632017-09-05 00:30:07 -0600183 this->setupPerFlushResources(onFlushRP, opListIDs, numOpListIDs, results);
184
185 // Erase these last, once we are done accessing data from the SingleDraw allocators.
186 for (int i = 0; i < numOpListIDs; ++i) {
187 fRTPendingOpsMap.erase(opListIDs[i]);
188 }
189}
190
191void GrCoverageCountingPathRenderer::setupPerFlushResources(GrOnFlushResourceProvider* onFlushRP,
192 const uint32_t* opListIDs,
193 int numOpListIDs,
194 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
195 using PathInstance = GrCCPRPathProcessor::Instance;
196
197 SkASSERT(!fPerFlushIndexBuffer);
198 SkASSERT(!fPerFlushVertexBuffer);
199 SkASSERT(!fPerFlushInstanceBuffer);
200 SkASSERT(fPerFlushAtlases.empty());
201
202 fPerFlushResourcesAreValid = false;
203
Chris Dalton1a325d22017-07-14 15:17:41 -0600204 SkTInternalLList<DrawPathsOp> flushingOps;
Chris Daltonc1e59632017-09-05 00:30:07 -0600205 int maxTotalPaths = 0, numSkPoints = 0, numSkVerbs = 0;
Chris Dalton1a325d22017-07-14 15:17:41 -0600206
207 for (int i = 0; i < numOpListIDs; ++i) {
208 auto it = fRTPendingOpsMap.find(opListIDs[i]);
209 if (fRTPendingOpsMap.end() != it) {
210 RTPendingOps& rtPendingOps = it->second;
211 SkASSERT(!rtPendingOps.fOpList.isEmpty());
212 flushingOps.concat(std::move(rtPendingOps.fOpList));
Chris Daltonc1e59632017-09-05 00:30:07 -0600213 maxTotalPaths += rtPendingOps.fNumTotalPaths;
214 numSkPoints += rtPendingOps.fNumSkPoints;
215 numSkVerbs += rtPendingOps.fNumSkVerbs;
Chris Dalton1a325d22017-07-14 15:17:41 -0600216 }
217 }
218
Chris Daltonc1e59632017-09-05 00:30:07 -0600219 SkASSERT(flushingOps.isEmpty() == !maxTotalPaths);
Chris Dalton1a325d22017-07-14 15:17:41 -0600220 if (flushingOps.isEmpty()) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600221 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600222 }
223
224 fPerFlushIndexBuffer = GrCCPRPathProcessor::FindOrMakeIndexBuffer(onFlushRP);
225 if (!fPerFlushIndexBuffer) {
226 SkDebugf("WARNING: failed to allocate ccpr path index buffer.\n");
227 return;
228 }
229
230 fPerFlushVertexBuffer = GrCCPRPathProcessor::FindOrMakeVertexBuffer(onFlushRP);
231 if (!fPerFlushVertexBuffer) {
232 SkDebugf("WARNING: failed to allocate ccpr path vertex buffer.\n");
233 return;
234 }
235
Chris Dalton1a325d22017-07-14 15:17:41 -0600236 fPerFlushInstanceBuffer = onFlushRP->makeBuffer(kVertex_GrBufferType,
Chris Daltonc1e59632017-09-05 00:30:07 -0600237 maxTotalPaths * sizeof(PathInstance));
Chris Dalton1a325d22017-07-14 15:17:41 -0600238 if (!fPerFlushInstanceBuffer) {
239 SkDebugf("WARNING: failed to allocate path instance buffer. No paths will be drawn.\n");
240 return;
241 }
242
243 PathInstance* pathInstanceData = static_cast<PathInstance*>(fPerFlushInstanceBuffer->map());
244 SkASSERT(pathInstanceData);
245 int pathInstanceIdx = 0;
246
Chris Daltonc1e59632017-09-05 00:30:07 -0600247 GrCCPRCoverageOpsBuilder atlasOpsBuilder(maxTotalPaths, numSkPoints, numSkVerbs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600248 GrCCPRAtlas* atlas = nullptr;
Chris Daltonc1e59632017-09-05 00:30:07 -0600249 SkDEBUGCODE(int skippedTotalPaths = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600250
251 SkTInternalLList<DrawPathsOp>::Iter iter;
252 iter.init(flushingOps, SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
Chris Daltonc1e59632017-09-05 00:30:07 -0600253 while (DrawPathsOp* drawPathOp = iter.get()) {
254 SkASSERT(drawPathOp->fDebugInstanceCount > 0);
255 SkASSERT(-1 == drawPathOp->fBaseInstance);
256 drawPathOp->fBaseInstance = pathInstanceIdx;
Chris Dalton1a325d22017-07-14 15:17:41 -0600257
Chris Daltonc1e59632017-09-05 00:30:07 -0600258 for (const auto* draw = &drawPathOp->fHeadDraw; draw; draw = draw->fNext) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600259 // parsePath gives us two tight bounding boxes: one in device space, as well as a second
260 // one rotated an additional 45 degrees. The path vertex shader uses these two bounding
261 // boxes to generate an octagon that circumscribes the path.
262 SkRect devBounds, devBounds45;
Chris Daltonc1e59632017-09-05 00:30:07 -0600263 atlasOpsBuilder.parsePath(draw->fMatrix, draw->fPath, &devBounds, &devBounds45);
Chris Dalton1a325d22017-07-14 15:17:41 -0600264
265 SkRect clippedDevBounds = devBounds;
266 if (ScissorMode::kScissored == draw->fScissorMode &&
267 !clippedDevBounds.intersect(devBounds, SkRect::Make(draw->fClipBounds))) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600268 SkDEBUGCODE(++drawPathOp->fDebugSkippedInstances);
269 atlasOpsBuilder.discardParsedPath();
Chris Dalton1a325d22017-07-14 15:17:41 -0600270 continue;
271 }
272
273 SkIRect clippedDevIBounds;
274 clippedDevBounds.roundOut(&clippedDevIBounds);
275 const int h = clippedDevIBounds.height(), w = clippedDevIBounds.width();
276
277 SkIPoint16 atlasLocation;
278 if (atlas && !atlas->addRect(w, h, &atlasLocation)) {
279 // The atlas is out of room and can't grow any bigger.
Chris Daltonc1e59632017-09-05 00:30:07 -0600280 atlasOpsBuilder.emitOp(atlas->drawBounds());
281 if (pathInstanceIdx > drawPathOp->fBaseInstance) {
282 drawPathOp->addAtlasBatch(atlas, pathInstanceIdx);
Chris Dalton1a325d22017-07-14 15:17:41 -0600283 }
284 atlas = nullptr;
285 }
286
287 if (!atlas) {
288 atlas = &fPerFlushAtlases.emplace_back(*onFlushRP->caps(), w, h);
289 SkAssertResult(atlas->addRect(w, h, &atlasLocation));
290 }
291
292 const SkMatrix& m = draw->fMatrix;
293 const int16_t offsetX = atlasLocation.x() - static_cast<int16_t>(clippedDevIBounds.x()),
294 offsetY = atlasLocation.y() - static_cast<int16_t>(clippedDevIBounds.y());
295
296 pathInstanceData[pathInstanceIdx++] = {
297 devBounds,
298 devBounds45,
299 {{m.getScaleX(), m.getSkewY(), m.getSkewX(), m.getScaleY()}},
300 {{m.getTranslateX(), m.getTranslateY()}},
301 {{offsetX, offsetY}},
302 draw->fColor
303 };
304
Chris Daltonc1e59632017-09-05 00:30:07 -0600305 atlasOpsBuilder.saveParsedPath(draw->fScissorMode, clippedDevIBounds, offsetX, offsetY);
Chris Dalton1a325d22017-07-14 15:17:41 -0600306 }
307
Chris Daltonc1e59632017-09-05 00:30:07 -0600308 SkASSERT(pathInstanceIdx == drawPathOp->fBaseInstance + drawPathOp->fDebugInstanceCount -
309 drawPathOp->fDebugSkippedInstances);
310 if (pathInstanceIdx > drawPathOp->fBaseInstance) {
311 drawPathOp->addAtlasBatch(atlas, pathInstanceIdx);
312 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600313
314 iter.next();
Chris Daltonc1e59632017-09-05 00:30:07 -0600315 SkDEBUGCODE(skippedTotalPaths += drawPathOp->fDebugSkippedInstances;)
316 }
317 SkASSERT(pathInstanceIdx == maxTotalPaths - skippedTotalPaths);
318
319 if (atlas) {
320 atlasOpsBuilder.emitOp(atlas->drawBounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600321 }
322
Chris Dalton1a325d22017-07-14 15:17:41 -0600323 fPerFlushInstanceBuffer->unmap();
324
Chris Daltonc1e59632017-09-05 00:30:07 -0600325 // Draw the coverage ops into their respective atlases.
326 SkSTArray<4, std::unique_ptr<GrCCPRCoverageOp>> atlasOps(fPerFlushAtlases.count());
327 if (!atlasOpsBuilder.finalize(onFlushRP, &atlasOps)) {
328 SkDebugf("WARNING: failed to allocate ccpr atlas buffers. No paths will be drawn.\n");
329 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600330 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600331 SkASSERT(atlasOps.count() == fPerFlushAtlases.count());
Chris Dalton1a325d22017-07-14 15:17:41 -0600332
Chris Daltonc1e59632017-09-05 00:30:07 -0600333 GrTAllocator<GrCCPRAtlas>::Iter atlasIter(&fPerFlushAtlases);
334 for (std::unique_ptr<GrCCPRCoverageOp>& atlasOp : atlasOps) {
335 SkAssertResult(atlasIter.next());
336 GrCCPRAtlas* atlas = atlasIter.get();
337 SkASSERT(atlasOp->bounds() == SkRect::MakeIWH(atlas->drawBounds().width(),
338 atlas->drawBounds().height()));
339 if (auto rtc = atlas->finalize(onFlushRP, std::move(atlasOp))) {
340 results->push_back(std::move(rtc));
341 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600342 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600343 SkASSERT(!atlasIter.next());
344
345 fPerFlushResourcesAreValid = true;
Chris Dalton1a325d22017-07-14 15:17:41 -0600346}
347
348void DrawPathsOp::onExecute(GrOpFlushState* flushState) {
349 SkASSERT(fCCPR->fFlushing);
Greg Daniel500d58b2017-08-24 15:59:33 -0400350 SkASSERT(flushState->rtCommandBuffer());
Chris Dalton1a325d22017-07-14 15:17:41 -0600351
Chris Daltonc1e59632017-09-05 00:30:07 -0600352 if (!fCCPR->fPerFlushResourcesAreValid) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600353 return; // Setup failed.
354 }
355
Chris Dalton1a325d22017-07-14 15:17:41 -0600356 GrPipeline::InitArgs args;
Chris Dalton1a325d22017-07-14 15:17:41 -0600357 args.fCaps = &flushState->caps();
Chris Dalton1a325d22017-07-14 15:17:41 -0600358 args.fFlags = fSRGBFlags;
Robert Phillips2890fbf2017-07-26 15:48:41 -0400359 args.fProxy = flushState->drawOpArgs().fProxy;
Chris Dalton1a325d22017-07-14 15:17:41 -0600360 args.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomonbfd18cd2017-08-09 16:27:09 -0400361 GrPipeline pipeline(args, std::move(fProcessors), flushState->detachAppliedClip());
Chris Dalton1a325d22017-07-14 15:17:41 -0600362
363 int baseInstance = fBaseInstance;
364
365 for (int i = 0; i < fAtlasBatches.count(); baseInstance = fAtlasBatches[i++].fEndInstanceIdx) {
366 const AtlasBatch& batch = fAtlasBatches[i];
367 SkASSERT(batch.fEndInstanceIdx > baseInstance);
368
369 if (!batch.fAtlas->textureProxy()) {
370 continue; // Atlas failed to allocate.
371 }
372
373 GrCCPRPathProcessor coverProc(flushState->resourceProvider(), batch.fAtlas->textureProxy(),
374 this->getFillType(), *flushState->gpu()->caps()->shaderCaps());
375
376 GrMesh mesh(GrPrimitiveType::kTriangles);
377 mesh.setIndexedInstanced(fCCPR->fPerFlushIndexBuffer.get(),
378 GrCCPRPathProcessor::kPerInstanceIndexCount,
379 fCCPR->fPerFlushInstanceBuffer.get(),
380 batch.fEndInstanceIdx - baseInstance, baseInstance);
381 mesh.setVertexData(fCCPR->fPerFlushVertexBuffer.get());
382
Greg Daniel500d58b2017-08-24 15:59:33 -0400383 flushState->rtCommandBuffer()->draw(pipeline, coverProc, &mesh, nullptr, 1, this->bounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600384 }
385
Chris Daltonc1e59632017-09-05 00:30:07 -0600386 SkASSERT(baseInstance == fBaseInstance + fDebugInstanceCount - fDebugSkippedInstances);
Chris Dalton1a325d22017-07-14 15:17:41 -0600387}
388
389void GrCoverageCountingPathRenderer::postFlush() {
390 SkASSERT(fFlushing);
391 fPerFlushAtlases.reset();
392 fPerFlushInstanceBuffer.reset();
393 fPerFlushVertexBuffer.reset();
394 fPerFlushIndexBuffer.reset();
395 SkDEBUGCODE(fFlushing = false;)
396}