blob: 2bec4ffeb1090e1b6b00bf7b2d24471bdec7d198 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
12#include "GrGpu.h"
13#include "GrGpuCommandBuffer.h"
14#include "SkMakeUnique.h"
15#include "SkMatrix.h"
16#include "GrOpFlushState.h"
17#include "GrRenderTargetOpList.h"
18#include "GrStyle.h"
19#include "ccpr/GrCCPRPathProcessor.h"
20
21using DrawPathsOp = GrCoverageCountingPathRenderer::DrawPathsOp;
22using ScissorMode = GrCCPRCoverageOpsBuilder::ScissorMode;
23
24bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
25 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
26 return shaderCaps.geometryShaderSupport() &&
27 shaderCaps.texelBufferSupport() &&
28 shaderCaps.integerSupport() &&
29 shaderCaps.flatInterpolationSupport() &&
30 shaderCaps.maxVertexSamplers() >= 1 &&
31 caps.instanceAttribSupport() &&
32 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
33 caps.isConfigRenderable(kAlpha_half_GrPixelConfig, /*withMSAA=*/false);
34}
35
36sk_sp<GrCoverageCountingPathRenderer>
37GrCoverageCountingPathRenderer::CreateIfSupported(const GrCaps& caps) {
38 return sk_sp<GrCoverageCountingPathRenderer>(IsSupported(caps) ?
39 new GrCoverageCountingPathRenderer : nullptr);
40}
41
42bool GrCoverageCountingPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
43 if (!args.fShape->style().isSimpleFill() ||
44 args.fShape->inverseFilled() ||
45 args.fViewMatrix->hasPerspective() ||
46 GrAAType::kCoverage != args.fAAType) {
47 return false;
48 }
49
50 SkPath path;
51 args.fShape->asPath(&path);
52 return !SkPathPriv::ConicWeightCnt(path);
53}
54
55bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
56 SkASSERT(!fFlushing);
57 SkASSERT(!args.fShape->isEmpty());
58
59 auto op = skstd::make_unique<DrawPathsOp>(this, args, args.fPaint.getColor());
60 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op));
61
62 return true;
63}
64
65GrCoverageCountingPathRenderer::DrawPathsOp::DrawPathsOp(GrCoverageCountingPathRenderer* ccpr,
66 const DrawPathArgs& args, GrColor color)
67 : INHERITED(ClassID())
68 , fCCPR(ccpr)
69 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(args.fPaint))
70 , fProcessors(std::move(args.fPaint))
71 , fTailDraw(&fHeadDraw)
72 , fOwningRTPendingOps(nullptr) {
73 SkDEBUGCODE(fBaseInstance = -1);
74 SkDEBUGCODE(fDebugInstanceCount = 1;)
Chris Daltonc1e59632017-09-05 00:30:07 -060075 SkDEBUGCODE(fDebugSkippedInstances = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -060076
77 GrRenderTargetContext* const rtc = args.fRenderTargetContext;
78
79 SkRect devBounds;
80 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
81
82 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &fHeadDraw.fClipBounds, nullptr);
83 fHeadDraw.fScissorMode = fHeadDraw.fClipBounds.contains(devBounds) ?
84 ScissorMode::kNonScissored : ScissorMode::kScissored;
85 fHeadDraw.fMatrix = *args.fViewMatrix;
86 args.fShape->asPath(&fHeadDraw.fPath);
87 fHeadDraw.fColor = color; // Can't call args.fPaint.getColor() because it has been std::move'd.
88
89 // FIXME: intersect with clip bounds to (hopefully) improve batching.
90 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
91 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
92}
93
94GrDrawOp::RequiresDstTexture DrawPathsOp::finalize(const GrCaps& caps, const GrAppliedClip* clip) {
95 SingleDraw& onlyDraw = this->getOnlyPathDraw();
96 GrProcessorSet::Analysis analysis = fProcessors.finalize(onlyDraw.fColor,
97 GrProcessorAnalysisCoverage::kSingleChannel,
98 clip, false, caps, &onlyDraw.fColor);
99 return analysis.requiresDstTexture() ? RequiresDstTexture::kYes : RequiresDstTexture::kNo;
100}
101
102bool DrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps& caps) {
103 DrawPathsOp* that = op->cast<DrawPathsOp>();
104 SkASSERT(fCCPR == that->fCCPR);
105 SkASSERT(fOwningRTPendingOps);
106 SkASSERT(fDebugInstanceCount);
107 SkASSERT(that->fDebugInstanceCount);
108
109 if (this->getFillType() != that->getFillType() ||
110 fSRGBFlags != that->fSRGBFlags ||
111 fProcessors != that->fProcessors) {
112 return false;
113 }
114
115 if (RTPendingOps* owningRTPendingOps = that->fOwningRTPendingOps) {
116 SkASSERT(owningRTPendingOps == fOwningRTPendingOps);
117 owningRTPendingOps->fOpList.remove(that);
118 } else {
Chris Daltonc1e59632017-09-05 00:30:07 -0600119 // The Op is being combined immediately after creation, before a call to wasRecorded. In
120 // this case wasRecorded will not be called. So we count its path here instead.
121 const SingleDraw& onlyDraw = that->getOnlyPathDraw();
122 ++fOwningRTPendingOps->fNumTotalPaths;
123 fOwningRTPendingOps->fNumSkPoints += onlyDraw.fPath.countPoints();
124 fOwningRTPendingOps->fNumSkVerbs += onlyDraw.fPath.countVerbs();
Chris Dalton1a325d22017-07-14 15:17:41 -0600125 }
126
127 fTailDraw->fNext = &fOwningRTPendingOps->fDrawsAllocator.push_back(that->fHeadDraw);
128 fTailDraw = that->fTailDraw == &that->fHeadDraw ? fTailDraw->fNext : that->fTailDraw;
129
130 this->joinBounds(*that);
131
132 SkDEBUGCODE(fDebugInstanceCount += that->fDebugInstanceCount;)
133 SkDEBUGCODE(that->fDebugInstanceCount = 0);
134 return true;
135}
136
137void DrawPathsOp::wasRecorded(GrRenderTargetOpList* opList) {
138 SkASSERT(!fOwningRTPendingOps);
Chris Daltonc1e59632017-09-05 00:30:07 -0600139 const SingleDraw& onlyDraw = this->getOnlyPathDraw();
Chris Dalton1a325d22017-07-14 15:17:41 -0600140 fOwningRTPendingOps = &fCCPR->fRTPendingOpsMap[opList->uniqueID()];
Chris Daltonc1e59632017-09-05 00:30:07 -0600141 ++fOwningRTPendingOps->fNumTotalPaths;
142 fOwningRTPendingOps->fNumSkPoints += onlyDraw.fPath.countPoints();
143 fOwningRTPendingOps->fNumSkVerbs += onlyDraw.fPath.countVerbs();
Chris Dalton1a325d22017-07-14 15:17:41 -0600144 fOwningRTPendingOps->fOpList.addToTail(this);
Chris Dalton1a325d22017-07-14 15:17:41 -0600145}
146
147void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
148 const uint32_t* opListIDs, int numOpListIDs,
149 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600150 SkASSERT(!fFlushing);
151 SkDEBUGCODE(fFlushing = true;)
152
153 if (fRTPendingOpsMap.empty()) {
154 return; // Nothing to draw.
155 }
156
Chris Daltonc1e59632017-09-05 00:30:07 -0600157 this->setupPerFlushResources(onFlushRP, opListIDs, numOpListIDs, results);
158
159 // Erase these last, once we are done accessing data from the SingleDraw allocators.
160 for (int i = 0; i < numOpListIDs; ++i) {
161 fRTPendingOpsMap.erase(opListIDs[i]);
162 }
163}
164
165void GrCoverageCountingPathRenderer::setupPerFlushResources(GrOnFlushResourceProvider* onFlushRP,
166 const uint32_t* opListIDs,
167 int numOpListIDs,
168 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
169 using PathInstance = GrCCPRPathProcessor::Instance;
170
171 SkASSERT(!fPerFlushIndexBuffer);
172 SkASSERT(!fPerFlushVertexBuffer);
173 SkASSERT(!fPerFlushInstanceBuffer);
174 SkASSERT(fPerFlushAtlases.empty());
175
176 fPerFlushResourcesAreValid = false;
177
Chris Dalton1a325d22017-07-14 15:17:41 -0600178 SkTInternalLList<DrawPathsOp> flushingOps;
Chris Daltonc1e59632017-09-05 00:30:07 -0600179 int maxTotalPaths = 0, numSkPoints = 0, numSkVerbs = 0;
Chris Dalton1a325d22017-07-14 15:17:41 -0600180
181 for (int i = 0; i < numOpListIDs; ++i) {
182 auto it = fRTPendingOpsMap.find(opListIDs[i]);
183 if (fRTPendingOpsMap.end() != it) {
184 RTPendingOps& rtPendingOps = it->second;
185 SkASSERT(!rtPendingOps.fOpList.isEmpty());
186 flushingOps.concat(std::move(rtPendingOps.fOpList));
Chris Daltonc1e59632017-09-05 00:30:07 -0600187 maxTotalPaths += rtPendingOps.fNumTotalPaths;
188 numSkPoints += rtPendingOps.fNumSkPoints;
189 numSkVerbs += rtPendingOps.fNumSkVerbs;
Chris Dalton1a325d22017-07-14 15:17:41 -0600190 }
191 }
192
Chris Daltonc1e59632017-09-05 00:30:07 -0600193 SkASSERT(flushingOps.isEmpty() == !maxTotalPaths);
Chris Dalton1a325d22017-07-14 15:17:41 -0600194 if (flushingOps.isEmpty()) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600195 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600196 }
197
198 fPerFlushIndexBuffer = GrCCPRPathProcessor::FindOrMakeIndexBuffer(onFlushRP);
199 if (!fPerFlushIndexBuffer) {
200 SkDebugf("WARNING: failed to allocate ccpr path index buffer.\n");
201 return;
202 }
203
204 fPerFlushVertexBuffer = GrCCPRPathProcessor::FindOrMakeVertexBuffer(onFlushRP);
205 if (!fPerFlushVertexBuffer) {
206 SkDebugf("WARNING: failed to allocate ccpr path vertex buffer.\n");
207 return;
208 }
209
Chris Dalton1a325d22017-07-14 15:17:41 -0600210 fPerFlushInstanceBuffer = onFlushRP->makeBuffer(kVertex_GrBufferType,
Chris Daltonc1e59632017-09-05 00:30:07 -0600211 maxTotalPaths * sizeof(PathInstance));
Chris Dalton1a325d22017-07-14 15:17:41 -0600212 if (!fPerFlushInstanceBuffer) {
213 SkDebugf("WARNING: failed to allocate path instance buffer. No paths will be drawn.\n");
214 return;
215 }
216
217 PathInstance* pathInstanceData = static_cast<PathInstance*>(fPerFlushInstanceBuffer->map());
218 SkASSERT(pathInstanceData);
219 int pathInstanceIdx = 0;
220
Chris Daltonc1e59632017-09-05 00:30:07 -0600221 GrCCPRCoverageOpsBuilder atlasOpsBuilder(maxTotalPaths, numSkPoints, numSkVerbs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600222 GrCCPRAtlas* atlas = nullptr;
Chris Daltonc1e59632017-09-05 00:30:07 -0600223 SkDEBUGCODE(int skippedTotalPaths = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600224
225 SkTInternalLList<DrawPathsOp>::Iter iter;
226 iter.init(flushingOps, SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
Chris Daltonc1e59632017-09-05 00:30:07 -0600227 while (DrawPathsOp* drawPathOp = iter.get()) {
228 SkASSERT(drawPathOp->fDebugInstanceCount > 0);
229 SkASSERT(-1 == drawPathOp->fBaseInstance);
230 drawPathOp->fBaseInstance = pathInstanceIdx;
Chris Dalton1a325d22017-07-14 15:17:41 -0600231
Chris Daltonc1e59632017-09-05 00:30:07 -0600232 for (const auto* draw = &drawPathOp->fHeadDraw; draw; draw = draw->fNext) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600233 // parsePath gives us two tight bounding boxes: one in device space, as well as a second
234 // one rotated an additional 45 degrees. The path vertex shader uses these two bounding
235 // boxes to generate an octagon that circumscribes the path.
236 SkRect devBounds, devBounds45;
Chris Daltonc1e59632017-09-05 00:30:07 -0600237 atlasOpsBuilder.parsePath(draw->fMatrix, draw->fPath, &devBounds, &devBounds45);
Chris Dalton1a325d22017-07-14 15:17:41 -0600238
239 SkRect clippedDevBounds = devBounds;
240 if (ScissorMode::kScissored == draw->fScissorMode &&
241 !clippedDevBounds.intersect(devBounds, SkRect::Make(draw->fClipBounds))) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600242 SkDEBUGCODE(++drawPathOp->fDebugSkippedInstances);
243 atlasOpsBuilder.discardParsedPath();
Chris Dalton1a325d22017-07-14 15:17:41 -0600244 continue;
245 }
246
247 SkIRect clippedDevIBounds;
248 clippedDevBounds.roundOut(&clippedDevIBounds);
249 const int h = clippedDevIBounds.height(), w = clippedDevIBounds.width();
250
251 SkIPoint16 atlasLocation;
252 if (atlas && !atlas->addRect(w, h, &atlasLocation)) {
253 // The atlas is out of room and can't grow any bigger.
Chris Daltonc1e59632017-09-05 00:30:07 -0600254 atlasOpsBuilder.emitOp(atlas->drawBounds());
255 if (pathInstanceIdx > drawPathOp->fBaseInstance) {
256 drawPathOp->addAtlasBatch(atlas, pathInstanceIdx);
Chris Dalton1a325d22017-07-14 15:17:41 -0600257 }
258 atlas = nullptr;
259 }
260
261 if (!atlas) {
262 atlas = &fPerFlushAtlases.emplace_back(*onFlushRP->caps(), w, h);
263 SkAssertResult(atlas->addRect(w, h, &atlasLocation));
264 }
265
266 const SkMatrix& m = draw->fMatrix;
267 const int16_t offsetX = atlasLocation.x() - static_cast<int16_t>(clippedDevIBounds.x()),
268 offsetY = atlasLocation.y() - static_cast<int16_t>(clippedDevIBounds.y());
269
270 pathInstanceData[pathInstanceIdx++] = {
271 devBounds,
272 devBounds45,
273 {{m.getScaleX(), m.getSkewY(), m.getSkewX(), m.getScaleY()}},
274 {{m.getTranslateX(), m.getTranslateY()}},
275 {{offsetX, offsetY}},
276 draw->fColor
277 };
278
Chris Daltonc1e59632017-09-05 00:30:07 -0600279 atlasOpsBuilder.saveParsedPath(draw->fScissorMode, clippedDevIBounds, offsetX, offsetY);
Chris Dalton1a325d22017-07-14 15:17:41 -0600280 }
281
Chris Daltonc1e59632017-09-05 00:30:07 -0600282 SkASSERT(pathInstanceIdx == drawPathOp->fBaseInstance + drawPathOp->fDebugInstanceCount -
283 drawPathOp->fDebugSkippedInstances);
284 if (pathInstanceIdx > drawPathOp->fBaseInstance) {
285 drawPathOp->addAtlasBatch(atlas, pathInstanceIdx);
286 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600287
288 iter.next();
Chris Daltonc1e59632017-09-05 00:30:07 -0600289 SkDEBUGCODE(skippedTotalPaths += drawPathOp->fDebugSkippedInstances;)
290 }
291 SkASSERT(pathInstanceIdx == maxTotalPaths - skippedTotalPaths);
292
293 if (atlas) {
294 atlasOpsBuilder.emitOp(atlas->drawBounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600295 }
296
Chris Dalton1a325d22017-07-14 15:17:41 -0600297 fPerFlushInstanceBuffer->unmap();
298
Chris Daltonc1e59632017-09-05 00:30:07 -0600299 // Draw the coverage ops into their respective atlases.
300 SkSTArray<4, std::unique_ptr<GrCCPRCoverageOp>> atlasOps(fPerFlushAtlases.count());
301 if (!atlasOpsBuilder.finalize(onFlushRP, &atlasOps)) {
302 SkDebugf("WARNING: failed to allocate ccpr atlas buffers. No paths will be drawn.\n");
303 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600304 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600305 SkASSERT(atlasOps.count() == fPerFlushAtlases.count());
Chris Dalton1a325d22017-07-14 15:17:41 -0600306
Chris Daltonc1e59632017-09-05 00:30:07 -0600307 GrTAllocator<GrCCPRAtlas>::Iter atlasIter(&fPerFlushAtlases);
308 for (std::unique_ptr<GrCCPRCoverageOp>& atlasOp : atlasOps) {
309 SkAssertResult(atlasIter.next());
310 GrCCPRAtlas* atlas = atlasIter.get();
311 SkASSERT(atlasOp->bounds() == SkRect::MakeIWH(atlas->drawBounds().width(),
312 atlas->drawBounds().height()));
313 if (auto rtc = atlas->finalize(onFlushRP, std::move(atlasOp))) {
314 results->push_back(std::move(rtc));
315 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600316 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600317 SkASSERT(!atlasIter.next());
318
319 fPerFlushResourcesAreValid = true;
Chris Dalton1a325d22017-07-14 15:17:41 -0600320}
321
322void DrawPathsOp::onExecute(GrOpFlushState* flushState) {
323 SkASSERT(fCCPR->fFlushing);
Greg Daniel500d58b2017-08-24 15:59:33 -0400324 SkASSERT(flushState->rtCommandBuffer());
Chris Dalton1a325d22017-07-14 15:17:41 -0600325
Chris Daltonc1e59632017-09-05 00:30:07 -0600326 if (!fCCPR->fPerFlushResourcesAreValid) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600327 return; // Setup failed.
328 }
329
Chris Dalton1a325d22017-07-14 15:17:41 -0600330 GrPipeline::InitArgs args;
Chris Dalton1a325d22017-07-14 15:17:41 -0600331 args.fCaps = &flushState->caps();
Chris Dalton1a325d22017-07-14 15:17:41 -0600332 args.fFlags = fSRGBFlags;
Robert Phillips2890fbf2017-07-26 15:48:41 -0400333 args.fProxy = flushState->drawOpArgs().fProxy;
Chris Dalton1a325d22017-07-14 15:17:41 -0600334 args.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomonbfd18cd2017-08-09 16:27:09 -0400335 GrPipeline pipeline(args, std::move(fProcessors), flushState->detachAppliedClip());
Chris Dalton1a325d22017-07-14 15:17:41 -0600336
337 int baseInstance = fBaseInstance;
338
339 for (int i = 0; i < fAtlasBatches.count(); baseInstance = fAtlasBatches[i++].fEndInstanceIdx) {
340 const AtlasBatch& batch = fAtlasBatches[i];
341 SkASSERT(batch.fEndInstanceIdx > baseInstance);
342
343 if (!batch.fAtlas->textureProxy()) {
344 continue; // Atlas failed to allocate.
345 }
346
347 GrCCPRPathProcessor coverProc(flushState->resourceProvider(), batch.fAtlas->textureProxy(),
348 this->getFillType(), *flushState->gpu()->caps()->shaderCaps());
349
350 GrMesh mesh(GrPrimitiveType::kTriangles);
351 mesh.setIndexedInstanced(fCCPR->fPerFlushIndexBuffer.get(),
352 GrCCPRPathProcessor::kPerInstanceIndexCount,
353 fCCPR->fPerFlushInstanceBuffer.get(),
354 batch.fEndInstanceIdx - baseInstance, baseInstance);
355 mesh.setVertexData(fCCPR->fPerFlushVertexBuffer.get());
356
Greg Daniel500d58b2017-08-24 15:59:33 -0400357 flushState->rtCommandBuffer()->draw(pipeline, coverProc, &mesh, nullptr, 1, this->bounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600358 }
359
Chris Daltonc1e59632017-09-05 00:30:07 -0600360 SkASSERT(baseInstance == fBaseInstance + fDebugInstanceCount - fDebugSkippedInstances);
Chris Dalton1a325d22017-07-14 15:17:41 -0600361}
362
363void GrCoverageCountingPathRenderer::postFlush() {
364 SkASSERT(fFlushing);
365 fPerFlushAtlases.reset();
366 fPerFlushInstanceBuffer.reset();
367 fPerFlushVertexBuffer.reset();
368 fPerFlushIndexBuffer.reset();
369 SkDEBUGCODE(fFlushing = false;)
370}