blob: 95fd619dcd789db256090696feab7ff7d055ef8a [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
12#include "GrGpu.h"
13#include "GrGpuCommandBuffer.h"
14#include "SkMakeUnique.h"
15#include "SkMatrix.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060016#include "SkPathOps.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060017#include "GrOpFlushState.h"
18#include "GrRenderTargetOpList.h"
Chris Daltona32a3c32017-12-05 10:05:21 -070019#include "GrTexture.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060020#include "GrStyle.h"
Chris Daltona32a3c32017-12-05 10:05:21 -070021#include "ccpr/GrCCPRClipProcessor.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060022
Chris Daltona32a3c32017-12-05 10:05:21 -070023// Shorthand for keeping line lengths under control with nested classes...
24using CCPR = GrCoverageCountingPathRenderer;
25
26// If a path spans more pixels than this, we need to crop it or else analytic AA can run out of fp32
27// precision.
28static constexpr float kPathCropThreshold = 1 << 16;
29
30static void crop_path(const SkPath& path, const SkIRect& cropbox, SkPath* out) {
31 SkPath cropPath;
32 cropPath.addRect(SkRect::Make(cropbox));
33 if (!Op(cropPath, path, kIntersect_SkPathOp, out)) {
34 // This can fail if the PathOps encounter NaN or infinities.
35 out->reset();
36 }
37}
Chris Dalton1a325d22017-07-14 15:17:41 -060038
39bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
40 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
41 return shaderCaps.geometryShaderSupport() &&
42 shaderCaps.texelBufferSupport() &&
43 shaderCaps.integerSupport() &&
44 shaderCaps.flatInterpolationSupport() &&
45 shaderCaps.maxVertexSamplers() >= 1 &&
46 caps.instanceAttribSupport() &&
Chris Daltonfddb6c02017-11-04 15:22:22 -060047 GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060048 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060049 caps.isConfigRenderable(kAlpha_half_GrPixelConfig, /*withMSAA=*/false) &&
Brian Salomon7f56d3d2017-10-09 13:02:49 -040050 GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Daltone4679fa2017-09-29 13:58:26 -060051 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060052}
53
54sk_sp<GrCoverageCountingPathRenderer>
Chris Daltona2ac30d2017-10-17 10:40:01 -060055GrCoverageCountingPathRenderer::CreateIfSupported(const GrCaps& caps, bool drawCachablePaths) {
56 auto ccpr = IsSupported(caps) ? new GrCoverageCountingPathRenderer(drawCachablePaths) : nullptr;
57 return sk_sp<GrCoverageCountingPathRenderer>(ccpr);
Chris Dalton1a325d22017-07-14 15:17:41 -060058}
59
Chris Dalton5ed44232017-09-07 13:22:46 -060060GrPathRenderer::CanDrawPath
61GrCoverageCountingPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
Chris Daltona2ac30d2017-10-17 10:40:01 -060062 if (args.fShape->hasUnstyledKey() && !fDrawCachablePaths) {
63 return CanDrawPath::kNo;
64 }
65
Chris Dalton1a325d22017-07-14 15:17:41 -060066 if (!args.fShape->style().isSimpleFill() ||
67 args.fShape->inverseFilled() ||
68 args.fViewMatrix->hasPerspective() ||
69 GrAAType::kCoverage != args.fAAType) {
Chris Dalton5ed44232017-09-07 13:22:46 -060070 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060071 }
72
73 SkPath path;
74 args.fShape->asPath(&path);
Chris Dalton5ed44232017-09-07 13:22:46 -060075 if (SkPathPriv::ConicWeightCnt(path)) {
76 return CanDrawPath::kNo;
77 }
78
Chris Daltondb91c6e2017-09-08 16:25:08 -060079 SkRect devBounds;
80 SkIRect devIBounds;
81 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
82 devBounds.roundOut(&devIBounds);
83 if (!devIBounds.intersect(*args.fClipConservativeBounds)) {
84 // Path is completely clipped away. Our code will eventually notice this before doing any
85 // real work.
86 return CanDrawPath::kYes;
87 }
88
89 if (devIBounds.height() * devIBounds.width() > 256 * 256) {
90 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass rendering
91 // algorithm. Give the simpler direct renderers a chance before we commit to drawing it.
92 return CanDrawPath::kAsBackup;
93 }
94
95 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
96 // Complex paths do better cached in an SDF, if the renderer will accept them.
97 return CanDrawPath::kAsBackup;
98 }
99
Chris Dalton5ed44232017-09-07 13:22:46 -0600100 return CanDrawPath::kYes;
Chris Dalton1a325d22017-07-14 15:17:41 -0600101}
102
103bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
104 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600105 auto op = skstd::make_unique<DrawPathsOp>(this, args, args.fPaint.getColor());
106 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op));
Chris Dalton1a325d22017-07-14 15:17:41 -0600107 return true;
108}
109
Chris Daltona32a3c32017-12-05 10:05:21 -0700110CCPR::DrawPathsOp::DrawPathsOp(GrCoverageCountingPathRenderer* ccpr, const DrawPathArgs& args,
111 GrColor color)
Chris Dalton1a325d22017-07-14 15:17:41 -0600112 : INHERITED(ClassID())
113 , fCCPR(ccpr)
114 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(args.fPaint))
115 , fProcessors(std::move(args.fPaint))
116 , fTailDraw(&fHeadDraw)
Chris Daltona32a3c32017-12-05 10:05:21 -0700117 , fOwningRTPendingPaths(nullptr) {
Chris Dalton080baa42017-11-06 14:19:19 -0700118 SkDEBUGCODE(++fCCPR->fPendingDrawOpsCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600119 SkDEBUGCODE(fBaseInstance = -1);
Chris Daltona32a3c32017-12-05 10:05:21 -0700120 SkDEBUGCODE(fInstanceCount = 1;)
121 SkDEBUGCODE(fNumSkippedInstances = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600122 GrRenderTargetContext* const rtc = args.fRenderTargetContext;
123
124 SkRect devBounds;
125 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
Chris Daltonc9c97b72017-11-27 15:34:26 -0700126 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &fHeadDraw.fClipIBounds,
127 nullptr);
Chris Daltona32a3c32017-12-05 10:05:21 -0700128 if (SkTMax(devBounds.height(), devBounds.width()) > kPathCropThreshold) {
129 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
130 SkPath path;
Chris Daltona039d3b2017-09-28 11:16:36 -0600131 args.fShape->asPath(&path);
132 path.transform(*args.fViewMatrix);
133 fHeadDraw.fMatrix.setIdentity();
Chris Daltona32a3c32017-12-05 10:05:21 -0700134 crop_path(path, fHeadDraw.fClipIBounds, &fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600135 devBounds = fHeadDraw.fPath.getBounds();
Chris Daltona039d3b2017-09-28 11:16:36 -0600136 } else {
137 fHeadDraw.fMatrix = *args.fViewMatrix;
138 args.fShape->asPath(&fHeadDraw.fPath);
Chris Daltona039d3b2017-09-28 11:16:36 -0600139 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600140 fHeadDraw.fColor = color; // Can't call args.fPaint.getColor() because it has been std::move'd.
141
142 // FIXME: intersect with clip bounds to (hopefully) improve batching.
143 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
144 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
145}
146
Chris Daltona32a3c32017-12-05 10:05:21 -0700147CCPR::DrawPathsOp::~DrawPathsOp() {
148 if (fOwningRTPendingPaths) {
Chris Dalton080baa42017-11-06 14:19:19 -0700149 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltona32a3c32017-12-05 10:05:21 -0700150 fOwningRTPendingPaths->fDrawOps.remove(this);
Chris Dalton080baa42017-11-06 14:19:19 -0700151 }
152 SkDEBUGCODE(--fCCPR->fPendingDrawOpsCount);
153}
154
Chris Daltona32a3c32017-12-05 10:05:21 -0700155GrDrawOp::RequiresDstTexture CCPR::DrawPathsOp::finalize(const GrCaps& caps,
156 const GrAppliedClip* clip,
157 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton080baa42017-11-06 14:19:19 -0700158 SkASSERT(!fCCPR->fFlushing);
159 // There should only be one single path draw in this Op right now.
Chris Daltona32a3c32017-12-05 10:05:21 -0700160 SkASSERT(1 == fInstanceCount);
Chris Dalton080baa42017-11-06 14:19:19 -0700161 SkASSERT(&fHeadDraw == fTailDraw);
Brian Osman9a725dd2017-09-20 09:53:22 -0400162 GrProcessorSet::Analysis analysis = fProcessors.finalize(
Chris Dalton080baa42017-11-06 14:19:19 -0700163 fHeadDraw.fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps,
164 dstIsClamped, &fHeadDraw.fColor);
Chris Dalton1a325d22017-07-14 15:17:41 -0600165 return analysis.requiresDstTexture() ? RequiresDstTexture::kYes : RequiresDstTexture::kNo;
166}
167
Chris Daltona32a3c32017-12-05 10:05:21 -0700168bool CCPR::DrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps& caps) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600169 DrawPathsOp* that = op->cast<DrawPathsOp>();
170 SkASSERT(fCCPR == that->fCCPR);
Chris Dalton080baa42017-11-06 14:19:19 -0700171 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700172 SkASSERT(fOwningRTPendingPaths);
173 SkASSERT(fInstanceCount);
174 SkASSERT(!that->fOwningRTPendingPaths || that->fOwningRTPendingPaths == fOwningRTPendingPaths);
175 SkASSERT(that->fInstanceCount);
Chris Dalton1a325d22017-07-14 15:17:41 -0600176
177 if (this->getFillType() != that->getFillType() ||
178 fSRGBFlags != that->fSRGBFlags ||
179 fProcessors != that->fProcessors) {
180 return false;
181 }
182
Chris Daltona32a3c32017-12-05 10:05:21 -0700183 fTailDraw->fNext = &fOwningRTPendingPaths->fDrawsAllocator.push_back(that->fHeadDraw);
Chris Dalton080baa42017-11-06 14:19:19 -0700184 fTailDraw = (that->fTailDraw == &that->fHeadDraw) ? fTailDraw->fNext : that->fTailDraw;
Chris Dalton1a325d22017-07-14 15:17:41 -0600185
186 this->joinBounds(*that);
187
Chris Daltona32a3c32017-12-05 10:05:21 -0700188 SkDEBUGCODE(fInstanceCount += that->fInstanceCount;)
189 SkDEBUGCODE(that->fInstanceCount = 0);
Chris Dalton1a325d22017-07-14 15:17:41 -0600190 return true;
191}
192
Chris Daltona32a3c32017-12-05 10:05:21 -0700193void CCPR::DrawPathsOp::wasRecorded(GrRenderTargetOpList* opList) {
Chris Dalton080baa42017-11-06 14:19:19 -0700194 SkASSERT(!fCCPR->fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700195 SkASSERT(!fOwningRTPendingPaths);
196 fOwningRTPendingPaths = &fCCPR->fRTPendingPathsMap[opList->uniqueID()];
197 fOwningRTPendingPaths->fDrawOps.addToTail(this);
198}
199
200bool GrCoverageCountingPathRenderer::canMakeClipProcessor(const SkPath& deviceSpacePath) const {
201 if (!fDrawCachablePaths && !deviceSpacePath.isVolatile()) {
202 return false;
203 }
204
205 if (SkPathPriv::ConicWeightCnt(deviceSpacePath)) {
206 return false;
207 }
208
209 return true;
210}
211
212std::unique_ptr<GrFragmentProcessor>
213GrCoverageCountingPathRenderer::makeClipProcessor(uint32_t opListID, const SkPath& deviceSpacePath,
214 const SkIRect& accessRect, int rtWidth,
215 int rtHeight) {
216 using MustCheckBounds = GrCCPRClipProcessor::MustCheckBounds;
217
218 SkASSERT(!fFlushing);
219 SkASSERT(this->canMakeClipProcessor(deviceSpacePath));
220
221 ClipPath& clipPath = fRTPendingPathsMap[opListID].fClipPaths[deviceSpacePath.getGenerationID()];
222 if (clipPath.isUninitialized()) {
223 // This ClipPath was just created during lookup. Initialize it.
224 clipPath.init(deviceSpacePath, accessRect, rtWidth, rtHeight);
225 } else {
226 clipPath.addAccess(accessRect);
227 }
228
229 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
230 return skstd::make_unique<GrCCPRClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
231 deviceSpacePath.getFillType());
232}
233
234void CCPR::ClipPath::init(const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
235 int rtHeight) {
236 SkASSERT(this->isUninitialized());
237
238 fAtlasLazyProxy = GrSurfaceProxy::MakeLazy([this](GrResourceProvider* resourceProvider,
239 GrSurfaceOrigin* outOrigin) {
240 SkASSERT(fHasAtlas);
241 SkASSERT(!fHasAtlasTransform);
242
243 GrTextureProxy* textureProxy = fAtlas ? fAtlas->textureProxy() : nullptr;
244 if (!textureProxy || !textureProxy->instantiate(resourceProvider)) {
245 fAtlasScale = fAtlasTranslate = {0, 0};
246 SkDEBUGCODE(fHasAtlasTransform = true);
247 return sk_sp<GrTexture>();
248 }
249
250 fAtlasScale = {1.f / textureProxy->width(), 1.f / textureProxy->height()};
251 fAtlasTranslate = {fAtlasOffsetX * fAtlasScale.x(), fAtlasOffsetY * fAtlasScale.y()};
252 if (kBottomLeft_GrSurfaceOrigin == textureProxy->origin()) {
253 fAtlasScale.fY = -fAtlasScale.y();
254 fAtlasTranslate.fY = 1 - fAtlasTranslate.y();
255 }
256 SkDEBUGCODE(fHasAtlasTransform = true);
257
258 *outOrigin = textureProxy->origin();
259 return sk_ref_sp(textureProxy->priv().peekTexture());
260 }, GrSurfaceProxy::Renderable::kYes, kAlpha_half_GrPixelConfig);
261
262 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
263 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
264 // The path is too large. We need to crop it or analytic AA can run out of fp32 precision.
265 crop_path(deviceSpacePath, SkIRect::MakeWH(rtWidth, rtHeight), &fDeviceSpacePath);
266 } else {
267 fDeviceSpacePath = deviceSpacePath;
268 }
269 deviceSpacePath.getBounds().roundOut(&fPathDevIBounds);
270 fAccessRect = accessRect;
Chris Dalton1a325d22017-07-14 15:17:41 -0600271}
272
273void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
274 const uint32_t* opListIDs, int numOpListIDs,
275 SkTArray<sk_sp<GrRenderTargetContext>>* results) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600276 using PathInstance = GrCCPRPathProcessor::Instance;
277
Chris Daltona32a3c32017-12-05 10:05:21 -0700278 SkASSERT(!fFlushing);
Chris Daltonc1e59632017-09-05 00:30:07 -0600279 SkASSERT(!fPerFlushIndexBuffer);
280 SkASSERT(!fPerFlushVertexBuffer);
281 SkASSERT(!fPerFlushInstanceBuffer);
282 SkASSERT(fPerFlushAtlases.empty());
Chris Daltona32a3c32017-12-05 10:05:21 -0700283 SkDEBUGCODE(fFlushing = true;)
284
285 if (fRTPendingPathsMap.empty()) {
286 return; // Nothing to draw.
287 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600288
289 fPerFlushResourcesAreValid = false;
290
Chris Daltona32a3c32017-12-05 10:05:21 -0700291 // Count the paths that are being flushed.
Chris Daltonc9c97b72017-11-27 15:34:26 -0700292 int maxTotalPaths = 0, maxPathPoints = 0, numSkPoints = 0, numSkVerbs = 0;
Chris Daltona32a3c32017-12-05 10:05:21 -0700293 SkDEBUGCODE(int numClipPaths = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600294 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700295 auto it = fRTPendingPathsMap.find(opListIDs[i]);
296 if (fRTPendingPathsMap.end() == it) {
Chris Dalton080baa42017-11-06 14:19:19 -0700297 continue;
Chris Dalton1a325d22017-07-14 15:17:41 -0600298 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700299 const RTPendingPaths& rtPendingPaths = it->second;
300
301 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
302 drawOpsIter.init(rtPendingPaths.fDrawOps,
303 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
304 while (DrawPathsOp* op = drawOpsIter.get()) {
305 for (const DrawPathsOp::SingleDraw* draw = op->head(); draw; draw = draw->fNext) {
Chris Dalton080baa42017-11-06 14:19:19 -0700306 ++maxTotalPaths;
Chris Daltonc9c97b72017-11-27 15:34:26 -0700307 maxPathPoints = SkTMax(draw->fPath.countPoints(), maxPathPoints);
Chris Dalton080baa42017-11-06 14:19:19 -0700308 numSkPoints += draw->fPath.countPoints();
309 numSkVerbs += draw->fPath.countVerbs();
310 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700311 drawOpsIter.next();
Chris Dalton080baa42017-11-06 14:19:19 -0700312 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700313
314 maxTotalPaths += rtPendingPaths.fClipPaths.size();
315 SkDEBUGCODE(numClipPaths += rtPendingPaths.fClipPaths.size());
316 for (const auto& clipsIter : rtPendingPaths.fClipPaths) {
317 const SkPath& path = clipsIter.second.deviceSpacePath();
318 maxPathPoints = SkTMax(path.countPoints(), maxPathPoints);
319 numSkPoints += path.countPoints();
320 numSkVerbs += path.countVerbs();
321 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600322 }
323
Chris Daltona32a3c32017-12-05 10:05:21 -0700324 if (!maxTotalPaths) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600325 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600326 }
327
Chris Daltona32a3c32017-12-05 10:05:21 -0700328 // Allocate GPU buffers.
Chris Dalton1a325d22017-07-14 15:17:41 -0600329 fPerFlushIndexBuffer = GrCCPRPathProcessor::FindOrMakeIndexBuffer(onFlushRP);
330 if (!fPerFlushIndexBuffer) {
331 SkDebugf("WARNING: failed to allocate ccpr path index buffer.\n");
332 return;
333 }
334
335 fPerFlushVertexBuffer = GrCCPRPathProcessor::FindOrMakeVertexBuffer(onFlushRP);
336 if (!fPerFlushVertexBuffer) {
337 SkDebugf("WARNING: failed to allocate ccpr path vertex buffer.\n");
338 return;
339 }
340
Chris Dalton1a325d22017-07-14 15:17:41 -0600341 fPerFlushInstanceBuffer = onFlushRP->makeBuffer(kVertex_GrBufferType,
Chris Daltona32a3c32017-12-05 10:05:21 -0700342 maxTotalPaths * sizeof(PathInstance));
Chris Dalton1a325d22017-07-14 15:17:41 -0600343 if (!fPerFlushInstanceBuffer) {
344 SkDebugf("WARNING: failed to allocate path instance buffer. No paths will be drawn.\n");
345 return;
346 }
347
348 PathInstance* pathInstanceData = static_cast<PathInstance*>(fPerFlushInstanceBuffer->map());
349 SkASSERT(pathInstanceData);
350 int pathInstanceIdx = 0;
351
Chris Daltonc9c97b72017-11-27 15:34:26 -0700352 GrCCPRCoverageOpsBuilder atlasOpsBuilder(maxTotalPaths, maxPathPoints, numSkPoints, numSkVerbs);
Chris Daltonc1e59632017-09-05 00:30:07 -0600353 SkDEBUGCODE(int skippedTotalPaths = 0;)
Chris Dalton1a325d22017-07-14 15:17:41 -0600354
Chris Daltona32a3c32017-12-05 10:05:21 -0700355 // Allocate atlas(es) and fill out GPU instance buffers.
356 for (int i = 0; i < numOpListIDs; ++i) {
357 auto it = fRTPendingPathsMap.find(opListIDs[i]);
358 if (fRTPendingPathsMap.end() == it) {
359 continue;
360 }
361 RTPendingPaths& rtPendingPaths = it->second;
Chris Dalton1a325d22017-07-14 15:17:41 -0600362
Chris Daltona32a3c32017-12-05 10:05:21 -0700363 SkTInternalLList<DrawPathsOp>::Iter drawOpsIter;
364 drawOpsIter.init(rtPendingPaths.fDrawOps,
365 SkTInternalLList<DrawPathsOp>::Iter::kHead_IterStart);
366 while (DrawPathsOp* op = drawOpsIter.get()) {
367 pathInstanceIdx = op->setupResources(onFlushRP, &atlasOpsBuilder, pathInstanceData,
368 pathInstanceIdx);
369 drawOpsIter.next();
370 SkDEBUGCODE(skippedTotalPaths += op->numSkippedInstances_debugOnly();)
Chris Dalton1a325d22017-07-14 15:17:41 -0600371 }
372
Chris Daltona32a3c32017-12-05 10:05:21 -0700373 for (auto& clipsIter : rtPendingPaths.fClipPaths) {
374 clipsIter.second.placePathInAtlas(this, onFlushRP, &atlasOpsBuilder);
Chris Daltonc1e59632017-09-05 00:30:07 -0600375 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600376 }
377
Chris Dalton1a325d22017-07-14 15:17:41 -0600378 fPerFlushInstanceBuffer->unmap();
379
Chris Daltona32a3c32017-12-05 10:05:21 -0700380 SkASSERT(pathInstanceIdx == maxTotalPaths - skippedTotalPaths - numClipPaths);
381
382 if (!fPerFlushAtlases.empty()) {
383 atlasOpsBuilder.emitOp(fPerFlushAtlases.back().drawBounds());
384 }
385
Chris Daltonc1e59632017-09-05 00:30:07 -0600386 SkSTArray<4, std::unique_ptr<GrCCPRCoverageOp>> atlasOps(fPerFlushAtlases.count());
387 if (!atlasOpsBuilder.finalize(onFlushRP, &atlasOps)) {
388 SkDebugf("WARNING: failed to allocate ccpr atlas buffers. No paths will be drawn.\n");
389 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600390 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600391 SkASSERT(atlasOps.count() == fPerFlushAtlases.count());
Chris Dalton1a325d22017-07-14 15:17:41 -0600392
Chris Daltona32a3c32017-12-05 10:05:21 -0700393 // Draw the coverage ops into their respective atlases.
Chris Daltonc1e59632017-09-05 00:30:07 -0600394 GrTAllocator<GrCCPRAtlas>::Iter atlasIter(&fPerFlushAtlases);
395 for (std::unique_ptr<GrCCPRCoverageOp>& atlasOp : atlasOps) {
396 SkAssertResult(atlasIter.next());
397 GrCCPRAtlas* atlas = atlasIter.get();
398 SkASSERT(atlasOp->bounds() == SkRect::MakeIWH(atlas->drawBounds().width(),
399 atlas->drawBounds().height()));
400 if (auto rtc = atlas->finalize(onFlushRP, std::move(atlasOp))) {
401 results->push_back(std::move(rtc));
402 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600403 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600404 SkASSERT(!atlasIter.next());
405
406 fPerFlushResourcesAreValid = true;
Chris Dalton1a325d22017-07-14 15:17:41 -0600407}
408
Chris Daltona32a3c32017-12-05 10:05:21 -0700409int CCPR::DrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
410 GrCCPRCoverageOpsBuilder* atlasOpsBuilder,
411 GrCCPRPathProcessor::Instance* pathInstanceData,
412 int pathInstanceIdx) {
413 const GrCCPRAtlas* currentAtlas = nullptr;
414 SkASSERT(fInstanceCount > 0);
415 SkASSERT(-1 == fBaseInstance);
416 fBaseInstance = pathInstanceIdx;
417
418 for (const SingleDraw* draw = this->head(); draw; draw = draw->fNext) {
419 // parsePath gives us two tight bounding boxes: one in device space, as well as a second
420 // one rotated an additional 45 degrees. The path vertex shader uses these two bounding
421 // boxes to generate an octagon that circumscribes the path.
422 SkRect devBounds, devBounds45;
423 atlasOpsBuilder->parsePath(draw->fMatrix, draw->fPath, &devBounds, &devBounds45);
424
425 SkIRect devIBounds;
426 devBounds.roundOut(&devIBounds);
427
428 int16_t offsetX, offsetY;
429 GrCCPRAtlas* atlas = fCCPR->placeParsedPathInAtlas(onFlushRP, draw->fClipIBounds,
430 devIBounds, &offsetX, &offsetY,
431 atlasOpsBuilder);
432 if (!atlas) {
433 SkDEBUGCODE(++fNumSkippedInstances);
434 continue;
435 }
436 if (currentAtlas != atlas) {
437 if (currentAtlas) {
438 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
439 }
440 currentAtlas = atlas;
441 }
442
443 const SkMatrix& m = draw->fMatrix;
444 pathInstanceData[pathInstanceIdx++] = {
445 devBounds,
446 devBounds45,
447 {{m.getScaleX(), m.getSkewY(), m.getSkewX(), m.getScaleY()}},
448 {{m.getTranslateX(), m.getTranslateY()}},
449 {{offsetX, offsetY}},
450 draw->fColor
451 };
452 }
453
454 SkASSERT(pathInstanceIdx == fBaseInstance + fInstanceCount - fNumSkippedInstances);
455 if (currentAtlas) {
456 this->addAtlasBatch(currentAtlas, pathInstanceIdx);
457 }
458
459 return pathInstanceIdx;
460}
461
462void CCPR::ClipPath::placePathInAtlas(GrCoverageCountingPathRenderer* ccpr,
463 GrOnFlushResourceProvider* onFlushRP,
464 GrCCPRCoverageOpsBuilder* atlasOpsBuilder) {
465 SkASSERT(!this->isUninitialized());
466 SkASSERT(!fHasAtlas);
467 atlasOpsBuilder->parseDeviceSpacePath(fDeviceSpacePath);
468 fAtlas = ccpr->placeParsedPathInAtlas(onFlushRP, fAccessRect, fPathDevIBounds, &fAtlasOffsetX,
469 &fAtlasOffsetY, atlasOpsBuilder);
470 SkDEBUGCODE(fHasAtlas = true);
471}
472
473GrCCPRAtlas*
474GrCoverageCountingPathRenderer::placeParsedPathInAtlas(GrOnFlushResourceProvider* onFlushRP,
475 const SkIRect& clipIBounds,
476 const SkIRect& pathIBounds,
477 int16_t* atlasOffsetX,
478 int16_t* atlasOffsetY,
479 GrCCPRCoverageOpsBuilder* atlasOpsBuilder) {
480 using ScissorMode = GrCCPRCoverageOpsBuilder::ScissorMode;
481
482 ScissorMode scissorMode;
483 SkIRect clippedPathIBounds;
484 if (clipIBounds.contains(pathIBounds)) {
485 clippedPathIBounds = pathIBounds;
486 scissorMode = ScissorMode::kNonScissored;
487 } else if (clippedPathIBounds.intersect(clipIBounds, pathIBounds)) {
488 scissorMode = ScissorMode::kScissored;
489 } else {
490 atlasOpsBuilder->discardParsedPath();
491 return nullptr;
492 }
493
494 SkIPoint16 atlasLocation;
495 const int h = clippedPathIBounds.height(), w = clippedPathIBounds.width();
496 if (fPerFlushAtlases.empty() || !fPerFlushAtlases.back().addRect(w, h, &atlasLocation)) {
497 if (!fPerFlushAtlases.empty()) {
498 // The atlas is out of room and can't grow any bigger.
499 atlasOpsBuilder->emitOp(fPerFlushAtlases.back().drawBounds());
500 }
501 fPerFlushAtlases.emplace_back(*onFlushRP->caps(), w, h).addRect(w, h, &atlasLocation);
502 }
503
504 *atlasOffsetX = atlasLocation.x() - static_cast<int16_t>(clippedPathIBounds.left());
505 *atlasOffsetY = atlasLocation.y() - static_cast<int16_t>(clippedPathIBounds.top());
506 atlasOpsBuilder->saveParsedPath(scissorMode, clippedPathIBounds, *atlasOffsetX, *atlasOffsetY);
507
508 return &fPerFlushAtlases.back();
509}
510
511void CCPR::DrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600512 SkASSERT(fCCPR->fFlushing);
Greg Daniel500d58b2017-08-24 15:59:33 -0400513 SkASSERT(flushState->rtCommandBuffer());
Chris Dalton1a325d22017-07-14 15:17:41 -0600514
Chris Daltonc1e59632017-09-05 00:30:07 -0600515 if (!fCCPR->fPerFlushResourcesAreValid) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600516 return; // Setup failed.
517 }
518
Chris Daltona32a3c32017-12-05 10:05:21 -0700519 SkASSERT(fBaseInstance >= 0); // Make sure setupResources has been called.
Chris Dalton080baa42017-11-06 14:19:19 -0700520
Chris Daltond1513222017-10-06 08:30:46 -0600521 GrPipeline::InitArgs initArgs;
522 initArgs.fFlags = fSRGBFlags;
523 initArgs.fProxy = flushState->drawOpArgs().fProxy;
524 initArgs.fCaps = &flushState->caps();
525 initArgs.fResourceProvider = flushState->resourceProvider();
526 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
527 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
Chris Dalton1a325d22017-07-14 15:17:41 -0600528
529 int baseInstance = fBaseInstance;
530
531 for (int i = 0; i < fAtlasBatches.count(); baseInstance = fAtlasBatches[i++].fEndInstanceIdx) {
532 const AtlasBatch& batch = fAtlasBatches[i];
533 SkASSERT(batch.fEndInstanceIdx > baseInstance);
534
535 if (!batch.fAtlas->textureProxy()) {
536 continue; // Atlas failed to allocate.
537 }
538
Chris Daltona32a3c32017-12-05 10:05:21 -0700539 GrCCPRPathProcessor coverProc(flushState->resourceProvider(),
540 sk_ref_sp(batch.fAtlas->textureProxy()), this->getFillType(),
541 *flushState->gpu()->caps()->shaderCaps());
Chris Dalton1a325d22017-07-14 15:17:41 -0600542
543 GrMesh mesh(GrPrimitiveType::kTriangles);
544 mesh.setIndexedInstanced(fCCPR->fPerFlushIndexBuffer.get(),
545 GrCCPRPathProcessor::kPerInstanceIndexCount,
546 fCCPR->fPerFlushInstanceBuffer.get(),
547 batch.fEndInstanceIdx - baseInstance, baseInstance);
548 mesh.setVertexData(fCCPR->fPerFlushVertexBuffer.get());
549
Greg Daniel500d58b2017-08-24 15:59:33 -0400550 flushState->rtCommandBuffer()->draw(pipeline, coverProc, &mesh, nullptr, 1, this->bounds());
Chris Dalton1a325d22017-07-14 15:17:41 -0600551 }
552
Chris Daltona32a3c32017-12-05 10:05:21 -0700553 SkASSERT(baseInstance == fBaseInstance + fInstanceCount - fNumSkippedInstances);
Chris Dalton1a325d22017-07-14 15:17:41 -0600554}
555
Chris Dalton3968ff92017-11-27 12:26:31 -0700556void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
557 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600558 SkASSERT(fFlushing);
559 fPerFlushAtlases.reset();
560 fPerFlushInstanceBuffer.reset();
561 fPerFlushVertexBuffer.reset();
562 fPerFlushIndexBuffer.reset();
Chris Daltona32a3c32017-12-05 10:05:21 -0700563 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
564 for (int i = 0; i < numOpListIDs; ++i) {
565 fRTPendingPathsMap.erase(opListIDs[i]);
566 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600567 SkDEBUGCODE(fFlushing = false;)
568}