blob: 1629a191a57ca23ce5d8d6a58989927498317256 [file] [log] [blame]
Chris Dalton9ca27842018-01-18 12:24:50 -07001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathParser.h"
9
10#include "GrCaps.h"
11#include "GrGpuCommandBuffer.h"
12#include "GrOnFlushResourceProvider.h"
13#include "GrOpFlushState.h"
14#include "SkMathPriv.h"
15#include "SkPath.h"
16#include "SkPathPriv.h"
17#include "SkPoint.h"
18#include "ccpr/GrCCGeometry.h"
Chris Dalton84403d72018-02-13 21:46:17 -050019#include <stdlib.h>
Chris Dalton9ca27842018-01-18 12:24:50 -070020
Chris Dalton84403d72018-02-13 21:46:17 -050021using TriPointInstance = GrCCCoverageProcessor::TriPointInstance;
22using QuadPointInstance = GrCCCoverageProcessor::QuadPointInstance;
Chris Dalton9ca27842018-01-18 12:24:50 -070023
24GrCCPathParser::GrCCPathParser(int maxTotalPaths, int maxPathPoints, int numSkPoints,
25 int numSkVerbs)
26 : fLocalDevPtsBuffer(maxPathPoints + 1) // Overallocate by one point to accomodate for
27 // overflow with Sk4f. (See parsePath.)
28 , fGeometry(numSkPoints, numSkVerbs)
29 , fPathsInfo(maxTotalPaths)
30 , fScissorSubBatches(maxTotalPaths)
31 , fTotalPrimitiveCounts{PrimitiveTallies(), PrimitiveTallies()} {
32 // Batches decide what to draw by looking where the previous one ended. Define initial batches
33 // that "end" at the beginning of the data. These will not be drawn, but will only be be read by
34 // the first actual batch.
35 fScissorSubBatches.push_back() = {PrimitiveTallies(), SkIRect::MakeEmpty()};
Chris Dalton84403d72018-02-13 21:46:17 -050036 fCoverageCountBatches.push_back() = {PrimitiveTallies(), fScissorSubBatches.count(),
37 PrimitiveTallies()};
Chris Dalton9ca27842018-01-18 12:24:50 -070038}
39
40void GrCCPathParser::parsePath(const SkMatrix& m, const SkPath& path, SkRect* devBounds,
41 SkRect* devBounds45) {
42 const SkPoint* pts = SkPathPriv::PointData(path);
43 int numPts = path.countPoints();
44 SkASSERT(numPts + 1 <= fLocalDevPtsBuffer.count());
45
46 if (!numPts) {
47 devBounds->setEmpty();
48 devBounds45->setEmpty();
49 this->parsePath(path, nullptr);
50 return;
51 }
52
53 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
54 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
55 // transform is not necessary as long as the shader uses the correct inverse.
56 SkMatrix m45;
57 m45.setSinCos(1, 1);
58 m45.preConcat(m);
59
60 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
61 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
62 // | 1 1 |
63 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
64 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
65 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
66
67 // Map the path's points to device space and accumulate bounding boxes.
68 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
69 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
70 Sk4f topLeft = devPt;
71 Sk4f bottomRight = devPt;
72
73 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
74 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
75 // be at least one larger than the number of points.
76 devPt.store(&fLocalDevPtsBuffer[0]);
77
78 for (int i = 1; i < numPts; ++i) {
79 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
80 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
81 topLeft = Sk4f::Min(topLeft, devPt);
82 bottomRight = Sk4f::Max(bottomRight, devPt);
83 devPt.store(&fLocalDevPtsBuffer[i]);
84 }
85
86 SkPoint topLeftPts[2], bottomRightPts[2];
87 topLeft.store(topLeftPts);
88 bottomRight.store(bottomRightPts);
89 devBounds->setLTRB(topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(),
90 bottomRightPts[0].y());
91 devBounds45->setLTRB(topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(),
92 bottomRightPts[1].y());
93
94 this->parsePath(path, fLocalDevPtsBuffer.get());
95}
96
97void GrCCPathParser::parseDeviceSpacePath(const SkPath& deviceSpacePath) {
98 this->parsePath(deviceSpacePath, SkPathPriv::PointData(deviceSpacePath));
99}
100
101void GrCCPathParser::parsePath(const SkPath& path, const SkPoint* deviceSpacePts) {
102 SkASSERT(!fInstanceBuffer); // Can't call after finalize().
103 SkASSERT(!fParsingPath); // Call saveParsedPath() or discardParsedPath() for the last one first.
104 SkDEBUGCODE(fParsingPath = true);
105 SkASSERT(path.isEmpty() || deviceSpacePts);
106
107 fCurrPathPointsIdx = fGeometry.points().count();
108 fCurrPathVerbsIdx = fGeometry.verbs().count();
109 fCurrPathPrimitiveCounts = PrimitiveTallies();
110
111 fGeometry.beginPath();
112
113 if (path.isEmpty()) {
114 return;
115 }
116
Chris Dalton9f2dab02018-04-18 14:07:03 -0600117 const float* conicWeights = SkPathPriv::ConicWeightData(path);
Chris Dalton9ca27842018-01-18 12:24:50 -0700118 int ptsIdx = 0;
Chris Dalton9f2dab02018-04-18 14:07:03 -0600119 int conicWeightsIdx = 0;
Chris Dalton9ca27842018-01-18 12:24:50 -0700120 bool insideContour = false;
121
122 for (SkPath::Verb verb : SkPathPriv::Verbs(path)) {
123 switch (verb) {
124 case SkPath::kMove_Verb:
125 this->endContourIfNeeded(insideContour);
126 fGeometry.beginContour(deviceSpacePts[ptsIdx]);
127 ++ptsIdx;
128 insideContour = true;
129 continue;
130 case SkPath::kClose_Verb:
131 this->endContourIfNeeded(insideContour);
132 insideContour = false;
133 continue;
134 case SkPath::kLine_Verb:
Chris Dalton6f5e77a2018-04-23 21:14:42 -0600135 fGeometry.lineTo(&deviceSpacePts[ptsIdx - 1]);
Chris Dalton9ca27842018-01-18 12:24:50 -0700136 ++ptsIdx;
137 continue;
138 case SkPath::kQuad_Verb:
Chris Dalton7ca3b7b2018-04-10 00:21:19 -0600139 fGeometry.quadraticTo(&deviceSpacePts[ptsIdx - 1]);
Chris Dalton9ca27842018-01-18 12:24:50 -0700140 ptsIdx += 2;
141 continue;
142 case SkPath::kCubic_Verb:
Chris Dalton7ca3b7b2018-04-10 00:21:19 -0600143 fGeometry.cubicTo(&deviceSpacePts[ptsIdx - 1]);
Chris Dalton9ca27842018-01-18 12:24:50 -0700144 ptsIdx += 3;
145 continue;
146 case SkPath::kConic_Verb:
Chris Dalton9f2dab02018-04-18 14:07:03 -0600147 fGeometry.conicTo(&deviceSpacePts[ptsIdx - 1], conicWeights[conicWeightsIdx]);
148 ptsIdx += 2;
149 ++conicWeightsIdx;
150 continue;
Chris Dalton9ca27842018-01-18 12:24:50 -0700151 default:
152 SK_ABORT("Unexpected path verb.");
153 }
154 }
Chris Dalton9f2dab02018-04-18 14:07:03 -0600155 SkASSERT(ptsIdx == path.countPoints());
156 SkASSERT(conicWeightsIdx == SkPathPriv::ConicWeightCnt(path));
Chris Dalton9ca27842018-01-18 12:24:50 -0700157
158 this->endContourIfNeeded(insideContour);
159}
160
161void GrCCPathParser::endContourIfNeeded(bool insideContour) {
162 if (insideContour) {
163 fCurrPathPrimitiveCounts += fGeometry.endContour();
164 }
165}
166
167void GrCCPathParser::saveParsedPath(ScissorMode scissorMode, const SkIRect& clippedDevIBounds,
168 int16_t atlasOffsetX, int16_t atlasOffsetY) {
169 SkASSERT(fParsingPath);
170
Chris Dalton84403d72018-02-13 21:46:17 -0500171 fPathsInfo.emplace_back(scissorMode, atlasOffsetX, atlasOffsetY);
172
173 // Tessellate fans from very large and/or simple paths, in order to reduce overdraw.
174 int numVerbs = fGeometry.verbs().count() - fCurrPathVerbsIdx - 1;
175 int64_t tessellationWork = (int64_t)numVerbs * (32 - SkCLZ(numVerbs)); // N log N.
176 int64_t fanningWork = (int64_t)clippedDevIBounds.height() * clippedDevIBounds.width();
177 if (tessellationWork * (50*50) + (100*100) < fanningWork) { // Don't tessellate under 100x100.
178 fCurrPathPrimitiveCounts.fTriangles =
Chris Dalton703b4762018-04-06 16:11:48 -0600179 fCurrPathPrimitiveCounts.fWeightedTriangles = 0;
Chris Dalton84403d72018-02-13 21:46:17 -0500180
181 const SkTArray<GrCCGeometry::Verb, true>& verbs = fGeometry.verbs();
182 const SkTArray<SkPoint, true>& pts = fGeometry.points();
183 int ptsIdx = fCurrPathPointsIdx;
184
Chris Dalton45e46602018-02-15 12:27:29 -0700185 // Build an SkPath of the Redbook fan. We use "winding" fill type right now because we are
186 // producing a coverage count, and must fill in every region that has non-zero wind. The
187 // path processor will convert coverage count to the appropriate fill type later.
Chris Dalton84403d72018-02-13 21:46:17 -0500188 SkPath fan;
Chris Dalton45e46602018-02-15 12:27:29 -0700189 fan.setFillType(SkPath::kWinding_FillType);
Chris Dalton84403d72018-02-13 21:46:17 -0500190 SkASSERT(GrCCGeometry::Verb::kBeginPath == verbs[fCurrPathVerbsIdx]);
191 for (int i = fCurrPathVerbsIdx + 1; i < fGeometry.verbs().count(); ++i) {
192 switch (verbs[i]) {
193 case GrCCGeometry::Verb::kBeginPath:
194 SK_ABORT("Invalid GrCCGeometry");
195 continue;
196
197 case GrCCGeometry::Verb::kBeginContour:
198 fan.moveTo(pts[ptsIdx++]);
199 continue;
200
201 case GrCCGeometry::Verb::kLineTo:
202 fan.lineTo(pts[ptsIdx++]);
203 continue;
204
205 case GrCCGeometry::Verb::kMonotonicQuadraticTo:
Chris Dalton9f2dab02018-04-18 14:07:03 -0600206 case GrCCGeometry::Verb::kMonotonicConicTo:
Chris Dalton84403d72018-02-13 21:46:17 -0500207 fan.lineTo(pts[ptsIdx + 1]);
208 ptsIdx += 2;
209 continue;
210
211 case GrCCGeometry::Verb::kMonotonicCubicTo:
212 fan.lineTo(pts[ptsIdx + 2]);
213 ptsIdx += 3;
214 continue;
215
216 case GrCCGeometry::Verb::kEndClosedContour:
217 case GrCCGeometry::Verb::kEndOpenContour:
218 fan.close();
219 continue;
220 }
221 }
222 GrTessellator::WindingVertex* vertices = nullptr;
223 int count = GrTessellator::PathToVertices(fan, std::numeric_limits<float>::infinity(),
224 SkRect::Make(clippedDevIBounds), &vertices);
225 SkASSERT(0 == count % 3);
226 for (int i = 0; i < count; i += 3) {
Chris Dalton45e46602018-02-15 12:27:29 -0700227 int tessWinding = vertices[i].fWinding;
228 SkASSERT(tessWinding == vertices[i + 1].fWinding);
229 SkASSERT(tessWinding == vertices[i + 2].fWinding);
230
231 // Ensure this triangle's points actually wind in the same direction as tessWinding.
232 // CCPR shaders use the sign of wind to determine which direction to bloat, so even for
233 // "wound" triangles the winding sign and point ordering need to agree.
234 float ax = vertices[i].fPos.fX - vertices[i + 1].fPos.fX;
235 float ay = vertices[i].fPos.fY - vertices[i + 1].fPos.fY;
236 float bx = vertices[i].fPos.fX - vertices[i + 2].fPos.fX;
237 float by = vertices[i].fPos.fY - vertices[i + 2].fPos.fY;
238 float wind = ax*by - ay*bx;
239 if ((wind > 0) != (-tessWinding > 0)) { // Tessellator has opposite winding sense.
240 std::swap(vertices[i + 1].fPos, vertices[i + 2].fPos);
241 }
242
243 if (1 == abs(tessWinding)) {
Chris Dalton84403d72018-02-13 21:46:17 -0500244 ++fCurrPathPrimitiveCounts.fTriangles;
245 } else {
Chris Dalton703b4762018-04-06 16:11:48 -0600246 ++fCurrPathPrimitiveCounts.fWeightedTriangles;
Chris Dalton84403d72018-02-13 21:46:17 -0500247 }
248 }
249
Chris Daltonad065442018-03-08 22:41:33 -0700250 fPathsInfo.back().adoptFanTessellation(vertices, count);
Chris Dalton84403d72018-02-13 21:46:17 -0500251 }
252
Chris Dalton9ca27842018-01-18 12:24:50 -0700253 fTotalPrimitiveCounts[(int)scissorMode] += fCurrPathPrimitiveCounts;
254
255 if (ScissorMode::kScissored == scissorMode) {
256 fScissorSubBatches.push_back() = {fTotalPrimitiveCounts[(int)ScissorMode::kScissored],
257 clippedDevIBounds.makeOffset(atlasOffsetX, atlasOffsetY)};
258 }
259
260 SkDEBUGCODE(fParsingPath = false);
261}
262
263void GrCCPathParser::discardParsedPath() {
264 SkASSERT(fParsingPath);
265 fGeometry.resize_back(fCurrPathPointsIdx, fCurrPathVerbsIdx);
266 SkDEBUGCODE(fParsingPath = false);
267}
268
269GrCCPathParser::CoverageCountBatchID GrCCPathParser::closeCurrentBatch() {
270 SkASSERT(!fInstanceBuffer);
271 SkASSERT(!fCoverageCountBatches.empty());
272
Chris Daltona883aca2018-03-08 23:05:30 -0700273 const auto& lastBatch = fCoverageCountBatches.back();
274 int maxMeshes = 1 + fScissorSubBatches.count() - lastBatch.fEndScissorSubBatchIdx;
275 fMaxMeshesPerDraw = SkTMax(fMaxMeshesPerDraw, maxMeshes);
276
277 const auto& lastScissorSubBatch = fScissorSubBatches[lastBatch.fEndScissorSubBatchIdx - 1];
Chris Dalton84403d72018-02-13 21:46:17 -0500278 PrimitiveTallies batchTotalCounts = fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored] -
279 lastBatch.fEndNonScissorIndices;
280 batchTotalCounts += fTotalPrimitiveCounts[(int)ScissorMode::kScissored] -
281 lastScissorSubBatch.fEndPrimitiveIndices;
Chris Dalton9ca27842018-01-18 12:24:50 -0700282
Chris Daltona883aca2018-03-08 23:05:30 -0700283 // This will invalidate lastBatch.
Chris Dalton9ca27842018-01-18 12:24:50 -0700284 fCoverageCountBatches.push_back() = {
285 fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored],
Chris Dalton84403d72018-02-13 21:46:17 -0500286 fScissorSubBatches.count(),
287 batchTotalCounts
Chris Dalton9ca27842018-01-18 12:24:50 -0700288 };
289 return fCoverageCountBatches.count() - 1;
290}
291
292// Emits a contour's triangle fan.
293//
294// Classic Redbook fanning would be the triangles: [0 1 2], [0 2 3], ..., [0 n-2 n-1].
295//
296// This function emits the triangle: [0 n/3 n*2/3], and then recurses on all three sides. The
297// advantage to this approach is that for a convex-ish contour, it generates larger triangles.
298// Classic fanning tends to generate long, skinny triangles, which are expensive to draw since they
299// have a longer perimeter to rasterize and antialias.
300//
301// The indices array indexes the fan's points (think: glDrawElements), and must have at least log3
302// elements past the end for this method to use as scratch space.
303//
304// Returns the next triangle instance after the final one emitted.
Chris Dalton84403d72018-02-13 21:46:17 -0500305static TriPointInstance* emit_recursive_fan(const SkTArray<SkPoint, true>& pts,
Chris Dalton9ca27842018-01-18 12:24:50 -0700306 SkTArray<int32_t, true>& indices, int firstIndex,
307 int indexCount, const Sk2f& atlasOffset,
Chris Dalton84403d72018-02-13 21:46:17 -0500308 TriPointInstance out[]) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700309 if (indexCount < 3) {
310 return out;
311 }
312
313 int32_t oneThirdCount = indexCount / 3;
314 int32_t twoThirdsCount = (2 * indexCount) / 3;
315 out++->set(pts[indices[firstIndex]], pts[indices[firstIndex + oneThirdCount]],
316 pts[indices[firstIndex + twoThirdsCount]], atlasOffset);
317
318 out = emit_recursive_fan(pts, indices, firstIndex, oneThirdCount + 1, atlasOffset, out);
319 out = emit_recursive_fan(pts, indices, firstIndex + oneThirdCount,
320 twoThirdsCount - oneThirdCount + 1, atlasOffset, out);
321
322 int endIndex = firstIndex + indexCount;
323 int32_t oldValue = indices[endIndex];
324 indices[endIndex] = indices[firstIndex];
325 out = emit_recursive_fan(pts, indices, firstIndex + twoThirdsCount,
326 indexCount - twoThirdsCount + 1, atlasOffset, out);
327 indices[endIndex] = oldValue;
328
329 return out;
330}
331
Chris Dalton84403d72018-02-13 21:46:17 -0500332static void emit_tessellated_fan(const GrTessellator::WindingVertex* vertices, int numVertices,
333 const Sk2f& atlasOffset, TriPointInstance* triPointInstanceData,
334 QuadPointInstance* quadPointInstanceData,
335 GrCCGeometry::PrimitiveTallies* indices) {
336 for (int i = 0; i < numVertices; i += 3) {
337 if (1 == abs(vertices[i].fWinding)) {
338 triPointInstanceData[indices->fTriangles++].set(vertices[i].fPos, vertices[i + 1].fPos,
339 vertices[i + 2].fPos, atlasOffset);
340 } else {
Chris Dalton703b4762018-04-06 16:11:48 -0600341 quadPointInstanceData[indices->fWeightedTriangles++].setW(
Chris Dalton84403d72018-02-13 21:46:17 -0500342 vertices[i].fPos, vertices[i+1].fPos, vertices[i + 2].fPos, atlasOffset,
Chris Dalton6f5e77a2018-04-23 21:14:42 -0600343 static_cast<float>(abs(vertices[i].fWinding)));
Chris Dalton84403d72018-02-13 21:46:17 -0500344 }
345 }
346}
347
Chris Dalton9ca27842018-01-18 12:24:50 -0700348bool GrCCPathParser::finalize(GrOnFlushResourceProvider* onFlushRP) {
349 SkASSERT(!fParsingPath); // Call saveParsedPath() or discardParsedPath().
350 SkASSERT(fCoverageCountBatches.back().fEndNonScissorIndices == // Call closeCurrentBatch().
351 fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored]);
352 SkASSERT(fCoverageCountBatches.back().fEndScissorSubBatchIdx == fScissorSubBatches.count());
353
354 // Here we build a single instance buffer to share with every internal batch.
355 //
356 // CCPR processs 3 different types of primitives: triangles, quadratics, cubics. Each primitive
357 // type is further divided into instances that require a scissor and those that don't. This
358 // leaves us with 3*2 = 6 independent instance arrays to build for the GPU.
359 //
360 // Rather than place each instance array in its own GPU buffer, we allocate a single
361 // megabuffer and lay them all out side-by-side. We can offset the "baseInstance" parameter in
362 // our draw calls to direct the GPU to the applicable elements within a given array.
363 //
364 // We already know how big to make each of the 6 arrays from fTotalPrimitiveCounts, so layout is
365 // straightforward. Start with triangles and quadratics. They both view the instance buffer as
Chris Dalton84403d72018-02-13 21:46:17 -0500366 // an array of TriPointInstance[], so we can begin at zero and lay them out one after the other.
Chris Dalton9ca27842018-01-18 12:24:50 -0700367 fBaseInstances[0].fTriangles = 0;
368 fBaseInstances[1].fTriangles = fBaseInstances[0].fTriangles +
369 fTotalPrimitiveCounts[0].fTriangles;
370 fBaseInstances[0].fQuadratics = fBaseInstances[1].fTriangles +
371 fTotalPrimitiveCounts[1].fTriangles;
372 fBaseInstances[1].fQuadratics = fBaseInstances[0].fQuadratics +
373 fTotalPrimitiveCounts[0].fQuadratics;
374 int triEndIdx = fBaseInstances[1].fQuadratics + fTotalPrimitiveCounts[1].fQuadratics;
375
Chris Dalton84403d72018-02-13 21:46:17 -0500376 // Wound triangles and cubics both view the same instance buffer as an array of
377 // QuadPointInstance[]. So, reinterpreting the instance data as QuadPointInstance[], we start
378 // them on the first index that will not overwrite previous TriPointInstance data.
379 int quadBaseIdx =
380 GR_CT_DIV_ROUND_UP(triEndIdx * sizeof(TriPointInstance), sizeof(QuadPointInstance));
Chris Dalton703b4762018-04-06 16:11:48 -0600381 fBaseInstances[0].fWeightedTriangles = quadBaseIdx;
382 fBaseInstances[1].fWeightedTriangles = fBaseInstances[0].fWeightedTriangles +
383 fTotalPrimitiveCounts[0].fWeightedTriangles;
384 fBaseInstances[0].fCubics = fBaseInstances[1].fWeightedTriangles +
385 fTotalPrimitiveCounts[1].fWeightedTriangles;
Chris Dalton9ca27842018-01-18 12:24:50 -0700386 fBaseInstances[1].fCubics = fBaseInstances[0].fCubics + fTotalPrimitiveCounts[0].fCubics;
Chris Dalton9f2dab02018-04-18 14:07:03 -0600387 fBaseInstances[0].fConics = fBaseInstances[1].fCubics + fTotalPrimitiveCounts[1].fCubics;
388 fBaseInstances[1].fConics = fBaseInstances[0].fConics + fTotalPrimitiveCounts[0].fConics;
389 int quadEndIdx = fBaseInstances[1].fConics + fTotalPrimitiveCounts[1].fConics;
Chris Dalton9ca27842018-01-18 12:24:50 -0700390
391 fInstanceBuffer = onFlushRP->makeBuffer(kVertex_GrBufferType,
Chris Dalton84403d72018-02-13 21:46:17 -0500392 quadEndIdx * sizeof(QuadPointInstance));
Chris Dalton9ca27842018-01-18 12:24:50 -0700393 if (!fInstanceBuffer) {
394 return false;
395 }
396
Chris Dalton84403d72018-02-13 21:46:17 -0500397 TriPointInstance* triPointInstanceData = static_cast<TriPointInstance*>(fInstanceBuffer->map());
398 QuadPointInstance* quadPointInstanceData =
399 reinterpret_cast<QuadPointInstance*>(triPointInstanceData);
400 SkASSERT(quadPointInstanceData);
Chris Dalton9ca27842018-01-18 12:24:50 -0700401
Chris Dalton84403d72018-02-13 21:46:17 -0500402 PathInfo* nextPathInfo = fPathsInfo.begin();
Chris Dalton9ca27842018-01-18 12:24:50 -0700403 float atlasOffsetX = 0.0, atlasOffsetY = 0.0;
404 Sk2f atlasOffset;
Chris Dalton9ca27842018-01-18 12:24:50 -0700405 PrimitiveTallies instanceIndices[2] = {fBaseInstances[0], fBaseInstances[1]};
406 PrimitiveTallies* currIndices = nullptr;
407 SkSTArray<256, int32_t, true> currFan;
Chris Dalton84403d72018-02-13 21:46:17 -0500408 bool currFanIsTessellated = false;
Chris Dalton9ca27842018-01-18 12:24:50 -0700409
410 const SkTArray<SkPoint, true>& pts = fGeometry.points();
Chris Dalton84403d72018-02-13 21:46:17 -0500411 int ptsIdx = -1;
Chris Dalton9f2dab02018-04-18 14:07:03 -0600412 int nextConicWeightIdx = 0;
Chris Dalton9ca27842018-01-18 12:24:50 -0700413
414 // Expand the ccpr verbs into GPU instance buffers.
415 for (GrCCGeometry::Verb verb : fGeometry.verbs()) {
416 switch (verb) {
417 case GrCCGeometry::Verb::kBeginPath:
418 SkASSERT(currFan.empty());
Chris Daltonad065442018-03-08 22:41:33 -0700419 currIndices = &instanceIndices[(int)nextPathInfo->scissorMode()];
420 atlasOffsetX = static_cast<float>(nextPathInfo->atlasOffsetX());
421 atlasOffsetY = static_cast<float>(nextPathInfo->atlasOffsetY());
Chris Dalton9ca27842018-01-18 12:24:50 -0700422 atlasOffset = {atlasOffsetX, atlasOffsetY};
Chris Daltonad065442018-03-08 22:41:33 -0700423 currFanIsTessellated = nextPathInfo->hasFanTessellation();
Chris Dalton84403d72018-02-13 21:46:17 -0500424 if (currFanIsTessellated) {
Chris Daltonad065442018-03-08 22:41:33 -0700425 emit_tessellated_fan(nextPathInfo->fanTessellation(),
426 nextPathInfo->fanTessellationCount(), atlasOffset,
Chris Dalton84403d72018-02-13 21:46:17 -0500427 triPointInstanceData, quadPointInstanceData, currIndices);
428 }
429 ++nextPathInfo;
Chris Dalton9ca27842018-01-18 12:24:50 -0700430 continue;
431
432 case GrCCGeometry::Verb::kBeginContour:
433 SkASSERT(currFan.empty());
Chris Dalton84403d72018-02-13 21:46:17 -0500434 ++ptsIdx;
435 if (!currFanIsTessellated) {
436 currFan.push_back(ptsIdx);
437 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700438 continue;
439
440 case GrCCGeometry::Verb::kLineTo:
Chris Dalton84403d72018-02-13 21:46:17 -0500441 ++ptsIdx;
442 if (!currFanIsTessellated) {
443 SkASSERT(!currFan.empty());
444 currFan.push_back(ptsIdx);
445 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700446 continue;
447
448 case GrCCGeometry::Verb::kMonotonicQuadraticTo:
Chris Dalton84403d72018-02-13 21:46:17 -0500449 triPointInstanceData[currIndices->fQuadratics++].set(&pts[ptsIdx], atlasOffset);
450 ptsIdx += 2;
451 if (!currFanIsTessellated) {
452 SkASSERT(!currFan.empty());
453 currFan.push_back(ptsIdx);
454 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700455 continue;
456
457 case GrCCGeometry::Verb::kMonotonicCubicTo:
Chris Dalton84403d72018-02-13 21:46:17 -0500458 quadPointInstanceData[currIndices->fCubics++].set(&pts[ptsIdx], atlasOffsetX,
459 atlasOffsetY);
460 ptsIdx += 3;
461 if (!currFanIsTessellated) {
462 SkASSERT(!currFan.empty());
463 currFan.push_back(ptsIdx);
464 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700465 continue;
466
Chris Dalton9f2dab02018-04-18 14:07:03 -0600467 case GrCCGeometry::Verb::kMonotonicConicTo:
468 quadPointInstanceData[currIndices->fConics++].setW(
469 &pts[ptsIdx], atlasOffset, fGeometry.getConicWeight(nextConicWeightIdx));
470 ptsIdx += 2;
471 ++nextConicWeightIdx;
472 if (!currFanIsTessellated) {
473 SkASSERT(!currFan.empty());
474 currFan.push_back(ptsIdx);
475 }
476 continue;
477
Chris Dalton9ca27842018-01-18 12:24:50 -0700478 case GrCCGeometry::Verb::kEndClosedContour: // endPt == startPt.
Chris Dalton84403d72018-02-13 21:46:17 -0500479 if (!currFanIsTessellated) {
480 SkASSERT(!currFan.empty());
481 currFan.pop_back();
482 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700483 // fallthru.
484 case GrCCGeometry::Verb::kEndOpenContour: // endPt != startPt.
Chris Dalton84403d72018-02-13 21:46:17 -0500485 SkASSERT(!currFanIsTessellated || currFan.empty());
486 if (!currFanIsTessellated && currFan.count() >= 3) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700487 int fanSize = currFan.count();
488 // Reserve space for emit_recursive_fan. Technically this can grow to
489 // fanSize + log3(fanSize), but we approximate with log2.
490 currFan.push_back_n(SkNextLog2(fanSize));
Chris Dalton84403d72018-02-13 21:46:17 -0500491 SkDEBUGCODE(TriPointInstance* end =)
Chris Dalton9ca27842018-01-18 12:24:50 -0700492 emit_recursive_fan(pts, currFan, 0, fanSize, atlasOffset,
Chris Dalton84403d72018-02-13 21:46:17 -0500493 triPointInstanceData + currIndices->fTriangles);
Chris Dalton9ca27842018-01-18 12:24:50 -0700494 currIndices->fTriangles += fanSize - 2;
Chris Dalton84403d72018-02-13 21:46:17 -0500495 SkASSERT(triPointInstanceData + currIndices->fTriangles == end);
Chris Dalton9ca27842018-01-18 12:24:50 -0700496 }
497 currFan.reset();
498 continue;
499 }
500 }
501
502 fInstanceBuffer->unmap();
503
Chris Dalton84403d72018-02-13 21:46:17 -0500504 SkASSERT(nextPathInfo == fPathsInfo.end());
Chris Dalton9ca27842018-01-18 12:24:50 -0700505 SkASSERT(ptsIdx == pts.count() - 1);
506 SkASSERT(instanceIndices[0].fTriangles == fBaseInstances[1].fTriangles);
507 SkASSERT(instanceIndices[1].fTriangles == fBaseInstances[0].fQuadratics);
508 SkASSERT(instanceIndices[0].fQuadratics == fBaseInstances[1].fQuadratics);
509 SkASSERT(instanceIndices[1].fQuadratics == triEndIdx);
Chris Dalton703b4762018-04-06 16:11:48 -0600510 SkASSERT(instanceIndices[0].fWeightedTriangles == fBaseInstances[1].fWeightedTriangles);
511 SkASSERT(instanceIndices[1].fWeightedTriangles == fBaseInstances[0].fCubics);
Chris Dalton9ca27842018-01-18 12:24:50 -0700512 SkASSERT(instanceIndices[0].fCubics == fBaseInstances[1].fCubics);
Chris Dalton9f2dab02018-04-18 14:07:03 -0600513 SkASSERT(instanceIndices[1].fCubics == fBaseInstances[0].fConics);
514 SkASSERT(instanceIndices[0].fConics == fBaseInstances[1].fConics);
515 SkASSERT(instanceIndices[1].fConics == quadEndIdx);
Chris Dalton9ca27842018-01-18 12:24:50 -0700516
517 fMeshesScratchBuffer.reserve(fMaxMeshesPerDraw);
518 fDynamicStatesScratchBuffer.reserve(fMaxMeshesPerDraw);
519
520 return true;
521}
522
523void GrCCPathParser::drawCoverageCount(GrOpFlushState* flushState, CoverageCountBatchID batchID,
524 const SkIRect& drawBounds) const {
Chris Dalton8dfc70f2018-03-26 19:15:22 -0600525 using PrimitiveType = GrCCCoverageProcessor::PrimitiveType;
Chris Dalton9ca27842018-01-18 12:24:50 -0700526
527 SkASSERT(fInstanceBuffer);
528
Chris Dalton84403d72018-02-13 21:46:17 -0500529 const PrimitiveTallies& batchTotalCounts = fCoverageCountBatches[batchID].fTotalPrimitiveCounts;
530
Chris Dalton9ca27842018-01-18 12:24:50 -0700531 GrPipeline pipeline(flushState->drawOpArgs().fProxy, GrPipeline::ScissorState::kEnabled,
532 SkBlendMode::kPlus);
533
Chris Dalton84403d72018-02-13 21:46:17 -0500534 if (batchTotalCounts.fTriangles) {
Chris Dalton8dfc70f2018-03-26 19:15:22 -0600535 this->drawPrimitives(flushState, pipeline, batchID, PrimitiveType::kTriangles,
Chris Dalton703b4762018-04-06 16:11:48 -0600536 &PrimitiveTallies::fTriangles, drawBounds);
Chris Dalton84403d72018-02-13 21:46:17 -0500537 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700538
Chris Dalton703b4762018-04-06 16:11:48 -0600539 if (batchTotalCounts.fWeightedTriangles) {
540 this->drawPrimitives(flushState, pipeline, batchID, PrimitiveType::kWeightedTriangles,
541 &PrimitiveTallies::fWeightedTriangles, drawBounds);
Chris Dalton84403d72018-02-13 21:46:17 -0500542 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700543
Chris Dalton84403d72018-02-13 21:46:17 -0500544 if (batchTotalCounts.fQuadratics) {
Chris Dalton8dfc70f2018-03-26 19:15:22 -0600545 this->drawPrimitives(flushState, pipeline, batchID, PrimitiveType::kQuadratics,
Chris Dalton703b4762018-04-06 16:11:48 -0600546 &PrimitiveTallies::fQuadratics, drawBounds);
Chris Dalton84403d72018-02-13 21:46:17 -0500547 }
548
549 if (batchTotalCounts.fCubics) {
Chris Dalton8dfc70f2018-03-26 19:15:22 -0600550 this->drawPrimitives(flushState, pipeline, batchID, PrimitiveType::kCubics,
Chris Dalton703b4762018-04-06 16:11:48 -0600551 &PrimitiveTallies::fCubics, drawBounds);
Chris Dalton84403d72018-02-13 21:46:17 -0500552 }
Chris Dalton9f2dab02018-04-18 14:07:03 -0600553
554 if (batchTotalCounts.fConics) {
555 this->drawPrimitives(flushState, pipeline, batchID, PrimitiveType::kConics,
556 &PrimitiveTallies::fConics, drawBounds);
557 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700558}
559
Chris Dalton8dfc70f2018-03-26 19:15:22 -0600560void GrCCPathParser::drawPrimitives(GrOpFlushState* flushState, const GrPipeline& pipeline,
Chris Dalton9ca27842018-01-18 12:24:50 -0700561 CoverageCountBatchID batchID,
Chris Dalton8dfc70f2018-03-26 19:15:22 -0600562 GrCCCoverageProcessor::PrimitiveType primitiveType,
Chris Dalton9ca27842018-01-18 12:24:50 -0700563 int PrimitiveTallies::*instanceType,
564 const SkIRect& drawBounds) const {
565 SkASSERT(pipeline.getScissorState().enabled());
566
Chris Dalton9ca27842018-01-18 12:24:50 -0700567 // Don't call reset(), as that also resets the reserve count.
568 fMeshesScratchBuffer.pop_back_n(fMeshesScratchBuffer.count());
569 fDynamicStatesScratchBuffer.pop_back_n(fDynamicStatesScratchBuffer.count());
570
Chris Dalton703b4762018-04-06 16:11:48 -0600571 GrCCCoverageProcessor proc(flushState->resourceProvider(), primitiveType);
Chris Dalton9ca27842018-01-18 12:24:50 -0700572
573 SkASSERT(batchID > 0);
574 SkASSERT(batchID < fCoverageCountBatches.count());
575 const CoverageCountBatch& previousBatch = fCoverageCountBatches[batchID - 1];
576 const CoverageCountBatch& batch = fCoverageCountBatches[batchID];
Chris Dalton84403d72018-02-13 21:46:17 -0500577 SkDEBUGCODE(int totalInstanceCount = 0);
Chris Dalton9ca27842018-01-18 12:24:50 -0700578
579 if (int instanceCount = batch.fEndNonScissorIndices.*instanceType -
580 previousBatch.fEndNonScissorIndices.*instanceType) {
581 SkASSERT(instanceCount > 0);
582 int baseInstance = fBaseInstances[(int)ScissorMode::kNonScissored].*instanceType +
583 previousBatch.fEndNonScissorIndices.*instanceType;
584 proc.appendMesh(fInstanceBuffer.get(), instanceCount, baseInstance, &fMeshesScratchBuffer);
585 fDynamicStatesScratchBuffer.push_back().fScissorRect.setXYWH(0, 0, drawBounds.width(),
586 drawBounds.height());
Chris Dalton84403d72018-02-13 21:46:17 -0500587 SkDEBUGCODE(totalInstanceCount += instanceCount);
Chris Dalton9ca27842018-01-18 12:24:50 -0700588 }
589
590 SkASSERT(previousBatch.fEndScissorSubBatchIdx > 0);
591 SkASSERT(batch.fEndScissorSubBatchIdx <= fScissorSubBatches.count());
592 int baseScissorInstance = fBaseInstances[(int)ScissorMode::kScissored].*instanceType;
593 for (int i = previousBatch.fEndScissorSubBatchIdx; i < batch.fEndScissorSubBatchIdx; ++i) {
594 const ScissorSubBatch& previousSubBatch = fScissorSubBatches[i - 1];
595 const ScissorSubBatch& scissorSubBatch = fScissorSubBatches[i];
596 int startIndex = previousSubBatch.fEndPrimitiveIndices.*instanceType;
597 int instanceCount = scissorSubBatch.fEndPrimitiveIndices.*instanceType - startIndex;
598 if (!instanceCount) {
599 continue;
600 }
601 SkASSERT(instanceCount > 0);
602 proc.appendMesh(fInstanceBuffer.get(), instanceCount,
603 baseScissorInstance + startIndex, &fMeshesScratchBuffer);
604 fDynamicStatesScratchBuffer.push_back().fScissorRect = scissorSubBatch.fScissor;
Chris Dalton84403d72018-02-13 21:46:17 -0500605 SkDEBUGCODE(totalInstanceCount += instanceCount);
Chris Dalton9ca27842018-01-18 12:24:50 -0700606 }
607
608 SkASSERT(fMeshesScratchBuffer.count() == fDynamicStatesScratchBuffer.count());
609 SkASSERT(fMeshesScratchBuffer.count() <= fMaxMeshesPerDraw);
Chris Dalton84403d72018-02-13 21:46:17 -0500610 SkASSERT(totalInstanceCount == batch.fTotalPrimitiveCounts.*instanceType);
Chris Dalton9ca27842018-01-18 12:24:50 -0700611
612 if (!fMeshesScratchBuffer.empty()) {
Chris Dalton8dfc70f2018-03-26 19:15:22 -0600613 proc.draw(flushState, pipeline, fMeshesScratchBuffer.begin(),
614 fDynamicStatesScratchBuffer.begin(), fMeshesScratchBuffer.count(),
615 SkRect::Make(drawBounds));
Chris Dalton9ca27842018-01-18 12:24:50 -0700616 }
617}