blob: f7292901874e94919cd5f112ca46ce8b829fe67c [file] [log] [blame]
Chris Dalton9ca27842018-01-18 12:24:50 -07001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathParser.h"
9
10#include "GrCaps.h"
11#include "GrGpuCommandBuffer.h"
12#include "GrOnFlushResourceProvider.h"
13#include "GrOpFlushState.h"
14#include "SkMathPriv.h"
15#include "SkPath.h"
16#include "SkPathPriv.h"
17#include "SkPoint.h"
18#include "ccpr/GrCCGeometry.h"
19
Chris Dalton4138c972018-02-07 13:02:58 -070020using TriPointInstance = GrCCCoverageProcessor::TriPointInstance;
21using QuadPointInstance = GrCCCoverageProcessor::QuadPointInstance;
Chris Dalton9ca27842018-01-18 12:24:50 -070022
23GrCCPathParser::GrCCPathParser(int maxTotalPaths, int maxPathPoints, int numSkPoints,
24 int numSkVerbs)
25 : fLocalDevPtsBuffer(maxPathPoints + 1) // Overallocate by one point to accomodate for
26 // overflow with Sk4f. (See parsePath.)
27 , fGeometry(numSkPoints, numSkVerbs)
28 , fPathsInfo(maxTotalPaths)
29 , fScissorSubBatches(maxTotalPaths)
30 , fTotalPrimitiveCounts{PrimitiveTallies(), PrimitiveTallies()} {
31 // Batches decide what to draw by looking where the previous one ended. Define initial batches
32 // that "end" at the beginning of the data. These will not be drawn, but will only be be read by
33 // the first actual batch.
34 fScissorSubBatches.push_back() = {PrimitiveTallies(), SkIRect::MakeEmpty()};
Chris Dalton4138c972018-02-07 13:02:58 -070035 fCoverageCountBatches.push_back() = {PrimitiveTallies(), fScissorSubBatches.count(),
36 PrimitiveTallies()};
Chris Dalton9ca27842018-01-18 12:24:50 -070037}
38
39void GrCCPathParser::parsePath(const SkMatrix& m, const SkPath& path, SkRect* devBounds,
40 SkRect* devBounds45) {
41 const SkPoint* pts = SkPathPriv::PointData(path);
42 int numPts = path.countPoints();
43 SkASSERT(numPts + 1 <= fLocalDevPtsBuffer.count());
44
45 if (!numPts) {
46 devBounds->setEmpty();
47 devBounds45->setEmpty();
48 this->parsePath(path, nullptr);
49 return;
50 }
51
52 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
53 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
54 // transform is not necessary as long as the shader uses the correct inverse.
55 SkMatrix m45;
56 m45.setSinCos(1, 1);
57 m45.preConcat(m);
58
59 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
60 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
61 // | 1 1 |
62 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
63 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
64 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
65
66 // Map the path's points to device space and accumulate bounding boxes.
67 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
68 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
69 Sk4f topLeft = devPt;
70 Sk4f bottomRight = devPt;
71
72 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
73 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
74 // be at least one larger than the number of points.
75 devPt.store(&fLocalDevPtsBuffer[0]);
76
77 for (int i = 1; i < numPts; ++i) {
78 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
79 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
80 topLeft = Sk4f::Min(topLeft, devPt);
81 bottomRight = Sk4f::Max(bottomRight, devPt);
82 devPt.store(&fLocalDevPtsBuffer[i]);
83 }
84
85 SkPoint topLeftPts[2], bottomRightPts[2];
86 topLeft.store(topLeftPts);
87 bottomRight.store(bottomRightPts);
88 devBounds->setLTRB(topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(),
89 bottomRightPts[0].y());
90 devBounds45->setLTRB(topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(),
91 bottomRightPts[1].y());
92
93 this->parsePath(path, fLocalDevPtsBuffer.get());
94}
95
96void GrCCPathParser::parseDeviceSpacePath(const SkPath& deviceSpacePath) {
97 this->parsePath(deviceSpacePath, SkPathPriv::PointData(deviceSpacePath));
98}
99
100void GrCCPathParser::parsePath(const SkPath& path, const SkPoint* deviceSpacePts) {
101 SkASSERT(!fInstanceBuffer); // Can't call after finalize().
102 SkASSERT(!fParsingPath); // Call saveParsedPath() or discardParsedPath() for the last one first.
103 SkDEBUGCODE(fParsingPath = true);
104 SkASSERT(path.isEmpty() || deviceSpacePts);
105
106 fCurrPathPointsIdx = fGeometry.points().count();
107 fCurrPathVerbsIdx = fGeometry.verbs().count();
108 fCurrPathPrimitiveCounts = PrimitiveTallies();
Chris Dalton4138c972018-02-07 13:02:58 -0700109 fCurrPathFillType = path.getFillType();
Chris Dalton9ca27842018-01-18 12:24:50 -0700110
111 fGeometry.beginPath();
112
113 if (path.isEmpty()) {
114 return;
115 }
116
117 int ptsIdx = 0;
118 bool insideContour = false;
119
120 for (SkPath::Verb verb : SkPathPriv::Verbs(path)) {
121 switch (verb) {
122 case SkPath::kMove_Verb:
123 this->endContourIfNeeded(insideContour);
124 fGeometry.beginContour(deviceSpacePts[ptsIdx]);
125 ++ptsIdx;
126 insideContour = true;
127 continue;
128 case SkPath::kClose_Verb:
129 this->endContourIfNeeded(insideContour);
130 insideContour = false;
131 continue;
132 case SkPath::kLine_Verb:
133 fGeometry.lineTo(deviceSpacePts[ptsIdx]);
134 ++ptsIdx;
135 continue;
136 case SkPath::kQuad_Verb:
137 fGeometry.quadraticTo(deviceSpacePts[ptsIdx], deviceSpacePts[ptsIdx + 1]);
138 ptsIdx += 2;
139 continue;
140 case SkPath::kCubic_Verb:
141 fGeometry.cubicTo(deviceSpacePts[ptsIdx], deviceSpacePts[ptsIdx + 1],
142 deviceSpacePts[ptsIdx + 2]);
143 ptsIdx += 3;
144 continue;
145 case SkPath::kConic_Verb:
146 SK_ABORT("Conics are not supported.");
147 default:
148 SK_ABORT("Unexpected path verb.");
149 }
150 }
151
152 this->endContourIfNeeded(insideContour);
153}
154
155void GrCCPathParser::endContourIfNeeded(bool insideContour) {
156 if (insideContour) {
157 fCurrPathPrimitiveCounts += fGeometry.endContour();
158 }
159}
160
161void GrCCPathParser::saveParsedPath(ScissorMode scissorMode, const SkIRect& clippedDevIBounds,
162 int16_t atlasOffsetX, int16_t atlasOffsetY) {
163 SkASSERT(fParsingPath);
164
Chris Dalton4138c972018-02-07 13:02:58 -0700165 fPathsInfo.emplace_back(scissorMode, atlasOffsetX, atlasOffsetY);
166
167 // Tessellate fans from very large and/or simple paths, in order to reduce overdraw.
168 int numVerbs = fGeometry.verbs().count() - fCurrPathVerbsIdx - 1;
169 int64_t tessellationWork = (int64_t)numVerbs * (32 - SkCLZ(numVerbs)); // N log N.
170 int64_t fanningWork = (int64_t)clippedDevIBounds.height() * clippedDevIBounds.width();
171 if (tessellationWork * (50*50) + (100*100) < fanningWork) { // Don't tessellate under 100x100.
172 fCurrPathPrimitiveCounts.fTriangles =
173 fCurrPathPrimitiveCounts.fWoundTriangles = 0;
174
175 const SkTArray<GrCCGeometry::Verb, true>& verbs = fGeometry.verbs();
176 const SkTArray<SkPoint, true>& pts = fGeometry.points();
177 int ptsIdx = fCurrPathPointsIdx;
178
179 // Build an SkPath of the Redbook fan.
180 SkPath fan;
181 fan.setFillType(fCurrPathFillType);
182 SkASSERT(GrCCGeometry::Verb::kBeginPath == verbs[fCurrPathVerbsIdx]);
183 for (int i = fCurrPathVerbsIdx + 1; i < fGeometry.verbs().count(); ++i) {
184 switch (verbs[i]) {
185 case GrCCGeometry::Verb::kBeginPath:
186 SK_ABORT("Invalid GrCCGeometry");
187 continue;
188
189 case GrCCGeometry::Verb::kBeginContour:
190 fan.moveTo(pts[ptsIdx++]);
191 continue;
192
193 case GrCCGeometry::Verb::kLineTo:
194 fan.lineTo(pts[ptsIdx++]);
195 continue;
196
197 case GrCCGeometry::Verb::kMonotonicQuadraticTo:
198 fan.lineTo(pts[ptsIdx + 1]);
199 ptsIdx += 2;
200 continue;
201
202 case GrCCGeometry::Verb::kMonotonicCubicTo:
203 fan.lineTo(pts[ptsIdx + 2]);
204 ptsIdx += 3;
205 continue;
206
207 case GrCCGeometry::Verb::kEndClosedContour:
208 case GrCCGeometry::Verb::kEndOpenContour:
209 fan.close();
210 continue;
211 }
212 }
213 GrTessellator::WindingVertex* vertices = nullptr;
214 int count = GrTessellator::PathToVertices(fan, std::numeric_limits<float>::infinity(),
215 SkRect::Make(clippedDevIBounds), &vertices);
216 SkASSERT(0 == count % 3);
217 for (int i = 0; i < count; i += 3) {
218 SkASSERT(vertices[i].fWinding == vertices[i + 1].fWinding);
219 SkASSERT(vertices[i].fWinding == vertices[i + 2].fWinding);
220 if (1 == abs(vertices[i].fWinding)) {
221 // Ensure this triangle's points actually wind in the same direction as fWinding.
222 float ax = vertices[i].fPos.fX - vertices[i + 1].fPos.fX;
223 float ay = vertices[i].fPos.fY - vertices[i + 1].fPos.fY;
224 float bx = vertices[i].fPos.fX - vertices[i + 2].fPos.fX;
225 float by = vertices[i].fPos.fY - vertices[i + 2].fPos.fY;
226 float wind = ay*bx - ax*by;
227 if ((wind > 0) != (vertices[i].fWinding > 0)) {
228 std::swap(vertices[i + 1].fPos, vertices[i + 2].fPos);
229 }
230 ++fCurrPathPrimitiveCounts.fTriangles;
231 } else {
232 ++fCurrPathPrimitiveCounts.fWoundTriangles;
233 }
234 }
235
236 fPathsInfo.back().fFanTessellation.reset(vertices);
237 fPathsInfo.back().fFanTessellationCount = count;
238 }
239
Chris Dalton9ca27842018-01-18 12:24:50 -0700240 fTotalPrimitiveCounts[(int)scissorMode] += fCurrPathPrimitiveCounts;
241
242 if (ScissorMode::kScissored == scissorMode) {
243 fScissorSubBatches.push_back() = {fTotalPrimitiveCounts[(int)ScissorMode::kScissored],
244 clippedDevIBounds.makeOffset(atlasOffsetX, atlasOffsetY)};
245 }
246
247 SkDEBUGCODE(fParsingPath = false);
248}
249
250void GrCCPathParser::discardParsedPath() {
251 SkASSERT(fParsingPath);
252 fGeometry.resize_back(fCurrPathPointsIdx, fCurrPathVerbsIdx);
253 SkDEBUGCODE(fParsingPath = false);
254}
255
256GrCCPathParser::CoverageCountBatchID GrCCPathParser::closeCurrentBatch() {
257 SkASSERT(!fInstanceBuffer);
258 SkASSERT(!fCoverageCountBatches.empty());
Chris Dalton4138c972018-02-07 13:02:58 -0700259 const auto& lastBatch = fCoverageCountBatches.back();
260 const auto& lastScissorSubBatch = fScissorSubBatches[lastBatch.fEndScissorSubBatchIdx - 1];
Chris Dalton9ca27842018-01-18 12:24:50 -0700261
Chris Dalton4138c972018-02-07 13:02:58 -0700262 PrimitiveTallies batchTotalCounts = fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored] -
263 lastBatch.fEndNonScissorIndices;
264 batchTotalCounts += fTotalPrimitiveCounts[(int)ScissorMode::kScissored] -
265 lastScissorSubBatch.fEndPrimitiveIndices;
Chris Dalton9ca27842018-01-18 12:24:50 -0700266
267 fCoverageCountBatches.push_back() = {
268 fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored],
Chris Dalton4138c972018-02-07 13:02:58 -0700269 fScissorSubBatches.count(),
270 batchTotalCounts
Chris Dalton9ca27842018-01-18 12:24:50 -0700271 };
Chris Dalton4138c972018-02-07 13:02:58 -0700272
273 int maxMeshes = 1 + fScissorSubBatches.count() - lastBatch.fEndScissorSubBatchIdx;
274 fMaxMeshesPerDraw = SkTMax(fMaxMeshesPerDraw, maxMeshes);
275
Chris Dalton9ca27842018-01-18 12:24:50 -0700276 return fCoverageCountBatches.count() - 1;
277}
278
279// Emits a contour's triangle fan.
280//
281// Classic Redbook fanning would be the triangles: [0 1 2], [0 2 3], ..., [0 n-2 n-1].
282//
283// This function emits the triangle: [0 n/3 n*2/3], and then recurses on all three sides. The
284// advantage to this approach is that for a convex-ish contour, it generates larger triangles.
285// Classic fanning tends to generate long, skinny triangles, which are expensive to draw since they
286// have a longer perimeter to rasterize and antialias.
287//
288// The indices array indexes the fan's points (think: glDrawElements), and must have at least log3
289// elements past the end for this method to use as scratch space.
290//
291// Returns the next triangle instance after the final one emitted.
Chris Dalton4138c972018-02-07 13:02:58 -0700292static TriPointInstance* emit_recursive_fan(const SkTArray<SkPoint, true>& pts,
Chris Dalton9ca27842018-01-18 12:24:50 -0700293 SkTArray<int32_t, true>& indices, int firstIndex,
294 int indexCount, const Sk2f& atlasOffset,
Chris Dalton4138c972018-02-07 13:02:58 -0700295 TriPointInstance out[]) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700296 if (indexCount < 3) {
297 return out;
298 }
299
300 int32_t oneThirdCount = indexCount / 3;
301 int32_t twoThirdsCount = (2 * indexCount) / 3;
302 out++->set(pts[indices[firstIndex]], pts[indices[firstIndex + oneThirdCount]],
303 pts[indices[firstIndex + twoThirdsCount]], atlasOffset);
304
305 out = emit_recursive_fan(pts, indices, firstIndex, oneThirdCount + 1, atlasOffset, out);
306 out = emit_recursive_fan(pts, indices, firstIndex + oneThirdCount,
307 twoThirdsCount - oneThirdCount + 1, atlasOffset, out);
308
309 int endIndex = firstIndex + indexCount;
310 int32_t oldValue = indices[endIndex];
311 indices[endIndex] = indices[firstIndex];
312 out = emit_recursive_fan(pts, indices, firstIndex + twoThirdsCount,
313 indexCount - twoThirdsCount + 1, atlasOffset, out);
314 indices[endIndex] = oldValue;
315
316 return out;
317}
318
Chris Dalton4138c972018-02-07 13:02:58 -0700319static void emit_tessellated_fan(const GrTessellator::WindingVertex* vertices, int numVertices,
320 const Sk2f& atlasOffset, TriPointInstance* triPointInstanceData,
321 QuadPointInstance* quadPointInstanceData,
322 GrCCGeometry::PrimitiveTallies* indices) {
323 for (int i = 0; i < numVertices; i += 3) {
324 if (1 == abs(vertices[i].fWinding)) {
325 triPointInstanceData[indices->fTriangles++].set(vertices[i].fPos, vertices[i + 1].fPos,
326 vertices[i + 2].fPos, atlasOffset);
327 } else {
328 quadPointInstanceData[indices->fWoundTriangles++].set(
329 vertices[i].fPos, vertices[i+1].fPos, vertices[i + 2].fPos, atlasOffset,
330 static_cast<float>(vertices[i].fWinding));
331 }
332 }
333}
334
Chris Dalton9ca27842018-01-18 12:24:50 -0700335bool GrCCPathParser::finalize(GrOnFlushResourceProvider* onFlushRP) {
336 SkASSERT(!fParsingPath); // Call saveParsedPath() or discardParsedPath().
337 SkASSERT(fCoverageCountBatches.back().fEndNonScissorIndices == // Call closeCurrentBatch().
338 fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored]);
339 SkASSERT(fCoverageCountBatches.back().fEndScissorSubBatchIdx == fScissorSubBatches.count());
340
341 // Here we build a single instance buffer to share with every internal batch.
342 //
343 // CCPR processs 3 different types of primitives: triangles, quadratics, cubics. Each primitive
344 // type is further divided into instances that require a scissor and those that don't. This
345 // leaves us with 3*2 = 6 independent instance arrays to build for the GPU.
346 //
347 // Rather than place each instance array in its own GPU buffer, we allocate a single
348 // megabuffer and lay them all out side-by-side. We can offset the "baseInstance" parameter in
349 // our draw calls to direct the GPU to the applicable elements within a given array.
350 //
351 // We already know how big to make each of the 6 arrays from fTotalPrimitiveCounts, so layout is
352 // straightforward. Start with triangles and quadratics. They both view the instance buffer as
Chris Dalton4138c972018-02-07 13:02:58 -0700353 // an array of TriPointInstance[], so we can begin at zero and lay them out one after the other.
Chris Dalton9ca27842018-01-18 12:24:50 -0700354 fBaseInstances[0].fTriangles = 0;
355 fBaseInstances[1].fTriangles = fBaseInstances[0].fTriangles +
356 fTotalPrimitiveCounts[0].fTriangles;
357 fBaseInstances[0].fQuadratics = fBaseInstances[1].fTriangles +
358 fTotalPrimitiveCounts[1].fTriangles;
359 fBaseInstances[1].fQuadratics = fBaseInstances[0].fQuadratics +
360 fTotalPrimitiveCounts[0].fQuadratics;
361 int triEndIdx = fBaseInstances[1].fQuadratics + fTotalPrimitiveCounts[1].fQuadratics;
362
Chris Dalton4138c972018-02-07 13:02:58 -0700363 // Wound triangles and cubics both view the same instance buffer as an array of
364 // QuadPointInstance[]. So, reinterpreting the instance data as QuadPointInstance[], we start
365 // them on the first index that will not overwrite previous TriPointInstance data.
366 int quadBaseIdx =
367 GR_CT_DIV_ROUND_UP(triEndIdx * sizeof(TriPointInstance), sizeof(QuadPointInstance));
368 fBaseInstances[0].fWoundTriangles = quadBaseIdx;
369 fBaseInstances[1].fWoundTriangles = fBaseInstances[0].fWoundTriangles +
370 fTotalPrimitiveCounts[0].fWoundTriangles;
371 fBaseInstances[0].fCubics = fBaseInstances[1].fWoundTriangles +
372 fTotalPrimitiveCounts[1].fWoundTriangles;
Chris Dalton9ca27842018-01-18 12:24:50 -0700373 fBaseInstances[1].fCubics = fBaseInstances[0].fCubics + fTotalPrimitiveCounts[0].fCubics;
Chris Dalton4138c972018-02-07 13:02:58 -0700374 int quadEndIdx = fBaseInstances[1].fCubics + fTotalPrimitiveCounts[1].fCubics;
Chris Dalton9ca27842018-01-18 12:24:50 -0700375
376 fInstanceBuffer = onFlushRP->makeBuffer(kVertex_GrBufferType,
Chris Dalton4138c972018-02-07 13:02:58 -0700377 quadEndIdx * sizeof(QuadPointInstance));
Chris Dalton9ca27842018-01-18 12:24:50 -0700378 if (!fInstanceBuffer) {
379 return false;
380 }
381
Chris Dalton4138c972018-02-07 13:02:58 -0700382 TriPointInstance* triPointInstanceData = static_cast<TriPointInstance*>(fInstanceBuffer->map());
383 QuadPointInstance* quadPointInstanceData =
384 reinterpret_cast<QuadPointInstance*>(triPointInstanceData);
385 SkASSERT(quadPointInstanceData);
Chris Dalton9ca27842018-01-18 12:24:50 -0700386
Chris Dalton4138c972018-02-07 13:02:58 -0700387 PathInfo* nextPathInfo = fPathsInfo.begin();
Chris Dalton9ca27842018-01-18 12:24:50 -0700388 float atlasOffsetX = 0.0, atlasOffsetY = 0.0;
389 Sk2f atlasOffset;
Chris Dalton9ca27842018-01-18 12:24:50 -0700390 PrimitiveTallies instanceIndices[2] = {fBaseInstances[0], fBaseInstances[1]};
391 PrimitiveTallies* currIndices = nullptr;
392 SkSTArray<256, int32_t, true> currFan;
Chris Dalton4138c972018-02-07 13:02:58 -0700393 bool currFanIsTessellated = false;
Chris Dalton9ca27842018-01-18 12:24:50 -0700394
395 const SkTArray<SkPoint, true>& pts = fGeometry.points();
Chris Dalton4138c972018-02-07 13:02:58 -0700396 int ptsIdx = -1;
Chris Dalton9ca27842018-01-18 12:24:50 -0700397
398 // Expand the ccpr verbs into GPU instance buffers.
399 for (GrCCGeometry::Verb verb : fGeometry.verbs()) {
400 switch (verb) {
401 case GrCCGeometry::Verb::kBeginPath:
402 SkASSERT(currFan.empty());
Chris Dalton4138c972018-02-07 13:02:58 -0700403 currIndices = &instanceIndices[(int)nextPathInfo->fScissorMode];
404 atlasOffsetX = static_cast<float>(nextPathInfo->fAtlasOffsetX);
405 atlasOffsetY = static_cast<float>(nextPathInfo->fAtlasOffsetY);
Chris Dalton9ca27842018-01-18 12:24:50 -0700406 atlasOffset = {atlasOffsetX, atlasOffsetY};
Chris Dalton4138c972018-02-07 13:02:58 -0700407 currFanIsTessellated = nextPathInfo->fFanTessellation.get();
408 if (currFanIsTessellated) {
409 emit_tessellated_fan(nextPathInfo->fFanTessellation.get(),
410 nextPathInfo->fFanTessellationCount, atlasOffset,
411 triPointInstanceData, quadPointInstanceData, currIndices);
412 }
413 ++nextPathInfo;
Chris Dalton9ca27842018-01-18 12:24:50 -0700414 continue;
415
416 case GrCCGeometry::Verb::kBeginContour:
417 SkASSERT(currFan.empty());
Chris Dalton4138c972018-02-07 13:02:58 -0700418 ++ptsIdx;
419 if (!currFanIsTessellated) {
420 currFan.push_back(ptsIdx);
421 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700422 continue;
423
424 case GrCCGeometry::Verb::kLineTo:
Chris Dalton4138c972018-02-07 13:02:58 -0700425 ++ptsIdx;
426 if (!currFanIsTessellated) {
427 SkASSERT(!currFan.empty());
428 currFan.push_back(ptsIdx);
429 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700430 continue;
431
432 case GrCCGeometry::Verb::kMonotonicQuadraticTo:
Chris Dalton4138c972018-02-07 13:02:58 -0700433 triPointInstanceData[currIndices->fQuadratics++].set(&pts[ptsIdx], atlasOffset);
434 ptsIdx += 2;
435 if (!currFanIsTessellated) {
436 SkASSERT(!currFan.empty());
437 currFan.push_back(ptsIdx);
438 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700439 continue;
440
441 case GrCCGeometry::Verb::kMonotonicCubicTo:
Chris Dalton4138c972018-02-07 13:02:58 -0700442 quadPointInstanceData[currIndices->fCubics++].set(&pts[ptsIdx], atlasOffsetX,
443 atlasOffsetY);
444 ptsIdx += 3;
445 if (!currFanIsTessellated) {
446 SkASSERT(!currFan.empty());
447 currFan.push_back(ptsIdx);
448 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700449 continue;
450
451 case GrCCGeometry::Verb::kEndClosedContour: // endPt == startPt.
Chris Dalton4138c972018-02-07 13:02:58 -0700452 if (!currFanIsTessellated) {
453 SkASSERT(!currFan.empty());
454 currFan.pop_back();
455 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700456 // fallthru.
457 case GrCCGeometry::Verb::kEndOpenContour: // endPt != startPt.
Chris Dalton4138c972018-02-07 13:02:58 -0700458 SkASSERT(!currFanIsTessellated || currFan.empty());
459 if (!currFanIsTessellated && currFan.count() >= 3) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700460 int fanSize = currFan.count();
461 // Reserve space for emit_recursive_fan. Technically this can grow to
462 // fanSize + log3(fanSize), but we approximate with log2.
463 currFan.push_back_n(SkNextLog2(fanSize));
Chris Dalton4138c972018-02-07 13:02:58 -0700464 SkDEBUGCODE(TriPointInstance* end =)
Chris Dalton9ca27842018-01-18 12:24:50 -0700465 emit_recursive_fan(pts, currFan, 0, fanSize, atlasOffset,
Chris Dalton4138c972018-02-07 13:02:58 -0700466 triPointInstanceData + currIndices->fTriangles);
Chris Dalton9ca27842018-01-18 12:24:50 -0700467 currIndices->fTriangles += fanSize - 2;
Chris Dalton4138c972018-02-07 13:02:58 -0700468 SkASSERT(triPointInstanceData + currIndices->fTriangles == end);
Chris Dalton9ca27842018-01-18 12:24:50 -0700469 }
470 currFan.reset();
471 continue;
472 }
473 }
474
475 fInstanceBuffer->unmap();
476
Chris Dalton4138c972018-02-07 13:02:58 -0700477 SkASSERT(nextPathInfo == fPathsInfo.end());
Chris Dalton9ca27842018-01-18 12:24:50 -0700478 SkASSERT(ptsIdx == pts.count() - 1);
479 SkASSERT(instanceIndices[0].fTriangles == fBaseInstances[1].fTriangles);
480 SkASSERT(instanceIndices[1].fTriangles == fBaseInstances[0].fQuadratics);
481 SkASSERT(instanceIndices[0].fQuadratics == fBaseInstances[1].fQuadratics);
482 SkASSERT(instanceIndices[1].fQuadratics == triEndIdx);
Chris Dalton4138c972018-02-07 13:02:58 -0700483 SkASSERT(instanceIndices[0].fWoundTriangles == fBaseInstances[1].fWoundTriangles);
484 SkASSERT(instanceIndices[1].fWoundTriangles == fBaseInstances[0].fCubics);
Chris Dalton9ca27842018-01-18 12:24:50 -0700485 SkASSERT(instanceIndices[0].fCubics == fBaseInstances[1].fCubics);
Chris Dalton4138c972018-02-07 13:02:58 -0700486 SkASSERT(instanceIndices[1].fCubics == quadEndIdx);
Chris Dalton9ca27842018-01-18 12:24:50 -0700487
488 fMeshesScratchBuffer.reserve(fMaxMeshesPerDraw);
489 fDynamicStatesScratchBuffer.reserve(fMaxMeshesPerDraw);
490
491 return true;
492}
493
494void GrCCPathParser::drawCoverageCount(GrOpFlushState* flushState, CoverageCountBatchID batchID,
495 const SkIRect& drawBounds) const {
496 using RenderPass = GrCCCoverageProcessor::RenderPass;
Chris Dalton4138c972018-02-07 13:02:58 -0700497 using WindMethod = GrCCCoverageProcessor::WindMethod;
Chris Dalton9ca27842018-01-18 12:24:50 -0700498
499 SkASSERT(fInstanceBuffer);
500
Chris Dalton4138c972018-02-07 13:02:58 -0700501 const PrimitiveTallies& batchTotalCounts = fCoverageCountBatches[batchID].fTotalPrimitiveCounts;
502
Chris Dalton9ca27842018-01-18 12:24:50 -0700503 GrPipeline pipeline(flushState->drawOpArgs().fProxy, GrPipeline::ScissorState::kEnabled,
504 SkBlendMode::kPlus);
505
Chris Dalton4138c972018-02-07 13:02:58 -0700506 if (batchTotalCounts.fTriangles) {
507 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleHulls,
508 WindMethod::kCrossProduct, &PrimitiveTallies::fTriangles, drawBounds);
509 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleEdges,
510 WindMethod::kCrossProduct, &PrimitiveTallies::fTriangles,
511 drawBounds); // Might get skipped.
512 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleCorners,
513 WindMethod::kCrossProduct, &PrimitiveTallies::fTriangles, drawBounds);
514 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700515
Chris Dalton4138c972018-02-07 13:02:58 -0700516 if (batchTotalCounts.fWoundTriangles) {
517 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleHulls,
518 WindMethod::kInstanceData, &PrimitiveTallies::fWoundTriangles,
519 drawBounds);
520 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleEdges,
521 WindMethod::kInstanceData, &PrimitiveTallies::fWoundTriangles,
522 drawBounds); // Might get skipped.
523 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleCorners,
524 WindMethod::kInstanceData, &PrimitiveTallies::fWoundTriangles,
525 drawBounds);
526 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700527
Chris Dalton4138c972018-02-07 13:02:58 -0700528 if (batchTotalCounts.fQuadratics) {
529 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kQuadraticHulls,
530 WindMethod::kCrossProduct, &PrimitiveTallies::fQuadratics, drawBounds);
531 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kQuadraticCorners,
532 WindMethod::kCrossProduct, &PrimitiveTallies::fQuadratics, drawBounds);
533 }
534
535 if (batchTotalCounts.fCubics) {
536 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kCubicHulls,
537 WindMethod::kCrossProduct, &PrimitiveTallies::fCubics, drawBounds);
538 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kCubicCorners,
539 WindMethod::kCrossProduct, &PrimitiveTallies::fCubics, drawBounds);
540 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700541}
542
543void GrCCPathParser::drawRenderPass(GrOpFlushState* flushState, const GrPipeline& pipeline,
544 CoverageCountBatchID batchID,
545 GrCCCoverageProcessor::RenderPass renderPass,
Chris Dalton4138c972018-02-07 13:02:58 -0700546 GrCCCoverageProcessor::WindMethod windMethod,
Chris Dalton9ca27842018-01-18 12:24:50 -0700547 int PrimitiveTallies::*instanceType,
548 const SkIRect& drawBounds) const {
549 SkASSERT(pipeline.getScissorState().enabled());
550
Chris Dalton27059d32018-01-23 14:06:50 -0700551 if (!GrCCCoverageProcessor::DoesRenderPass(renderPass, flushState->caps())) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700552 return;
553 }
554
555 // Don't call reset(), as that also resets the reserve count.
556 fMeshesScratchBuffer.pop_back_n(fMeshesScratchBuffer.count());
557 fDynamicStatesScratchBuffer.pop_back_n(fDynamicStatesScratchBuffer.count());
558
Chris Dalton4138c972018-02-07 13:02:58 -0700559 GrCCCoverageProcessor proc(flushState->resourceProvider(), renderPass, windMethod);
Chris Dalton9ca27842018-01-18 12:24:50 -0700560
561 SkASSERT(batchID > 0);
562 SkASSERT(batchID < fCoverageCountBatches.count());
563 const CoverageCountBatch& previousBatch = fCoverageCountBatches[batchID - 1];
564 const CoverageCountBatch& batch = fCoverageCountBatches[batchID];
Chris Dalton4138c972018-02-07 13:02:58 -0700565 SkDEBUGCODE(int totalInstanceCount = 0);
Chris Dalton9ca27842018-01-18 12:24:50 -0700566
567 if (int instanceCount = batch.fEndNonScissorIndices.*instanceType -
568 previousBatch.fEndNonScissorIndices.*instanceType) {
569 SkASSERT(instanceCount > 0);
570 int baseInstance = fBaseInstances[(int)ScissorMode::kNonScissored].*instanceType +
571 previousBatch.fEndNonScissorIndices.*instanceType;
572 proc.appendMesh(fInstanceBuffer.get(), instanceCount, baseInstance, &fMeshesScratchBuffer);
573 fDynamicStatesScratchBuffer.push_back().fScissorRect.setXYWH(0, 0, drawBounds.width(),
574 drawBounds.height());
Chris Dalton4138c972018-02-07 13:02:58 -0700575 SkDEBUGCODE(totalInstanceCount += instanceCount);
Chris Dalton9ca27842018-01-18 12:24:50 -0700576 }
577
578 SkASSERT(previousBatch.fEndScissorSubBatchIdx > 0);
579 SkASSERT(batch.fEndScissorSubBatchIdx <= fScissorSubBatches.count());
580 int baseScissorInstance = fBaseInstances[(int)ScissorMode::kScissored].*instanceType;
581 for (int i = previousBatch.fEndScissorSubBatchIdx; i < batch.fEndScissorSubBatchIdx; ++i) {
582 const ScissorSubBatch& previousSubBatch = fScissorSubBatches[i - 1];
583 const ScissorSubBatch& scissorSubBatch = fScissorSubBatches[i];
584 int startIndex = previousSubBatch.fEndPrimitiveIndices.*instanceType;
585 int instanceCount = scissorSubBatch.fEndPrimitiveIndices.*instanceType - startIndex;
586 if (!instanceCount) {
587 continue;
588 }
589 SkASSERT(instanceCount > 0);
590 proc.appendMesh(fInstanceBuffer.get(), instanceCount,
591 baseScissorInstance + startIndex, &fMeshesScratchBuffer);
592 fDynamicStatesScratchBuffer.push_back().fScissorRect = scissorSubBatch.fScissor;
Chris Dalton4138c972018-02-07 13:02:58 -0700593 SkDEBUGCODE(totalInstanceCount += instanceCount);
Chris Dalton9ca27842018-01-18 12:24:50 -0700594 }
595
596 SkASSERT(fMeshesScratchBuffer.count() == fDynamicStatesScratchBuffer.count());
597 SkASSERT(fMeshesScratchBuffer.count() <= fMaxMeshesPerDraw);
Chris Dalton4138c972018-02-07 13:02:58 -0700598 SkASSERT(totalInstanceCount == batch.fTotalPrimitiveCounts.*instanceType);
Chris Dalton9ca27842018-01-18 12:24:50 -0700599
600 if (!fMeshesScratchBuffer.empty()) {
601 SkASSERT(flushState->rtCommandBuffer());
602 flushState->rtCommandBuffer()->draw(pipeline, proc, fMeshesScratchBuffer.begin(),
603 fDynamicStatesScratchBuffer.begin(),
604 fMeshesScratchBuffer.count(), SkRect::Make(drawBounds));
605 }
606}