blob: b19ffdff08a56d531a892d1aebebcb150bfc1d59 [file] [log] [blame]
Chris Dalton9ca27842018-01-18 12:24:50 -07001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathParser.h"
9
10#include "GrCaps.h"
11#include "GrGpuCommandBuffer.h"
12#include "GrOnFlushResourceProvider.h"
13#include "GrOpFlushState.h"
14#include "SkMathPriv.h"
15#include "SkPath.h"
16#include "SkPathPriv.h"
17#include "SkPoint.h"
18#include "ccpr/GrCCGeometry.h"
Chris Dalton84403d72018-02-13 21:46:17 -050019#include <stdlib.h>
Chris Dalton9ca27842018-01-18 12:24:50 -070020
Chris Dalton84403d72018-02-13 21:46:17 -050021using TriPointInstance = GrCCCoverageProcessor::TriPointInstance;
22using QuadPointInstance = GrCCCoverageProcessor::QuadPointInstance;
Chris Dalton9ca27842018-01-18 12:24:50 -070023
24GrCCPathParser::GrCCPathParser(int maxTotalPaths, int maxPathPoints, int numSkPoints,
25 int numSkVerbs)
26 : fLocalDevPtsBuffer(maxPathPoints + 1) // Overallocate by one point to accomodate for
27 // overflow with Sk4f. (See parsePath.)
28 , fGeometry(numSkPoints, numSkVerbs)
29 , fPathsInfo(maxTotalPaths)
30 , fScissorSubBatches(maxTotalPaths)
31 , fTotalPrimitiveCounts{PrimitiveTallies(), PrimitiveTallies()} {
32 // Batches decide what to draw by looking where the previous one ended. Define initial batches
33 // that "end" at the beginning of the data. These will not be drawn, but will only be be read by
34 // the first actual batch.
35 fScissorSubBatches.push_back() = {PrimitiveTallies(), SkIRect::MakeEmpty()};
Chris Dalton84403d72018-02-13 21:46:17 -050036 fCoverageCountBatches.push_back() = {PrimitiveTallies(), fScissorSubBatches.count(),
37 PrimitiveTallies()};
Chris Dalton9ca27842018-01-18 12:24:50 -070038}
39
40void GrCCPathParser::parsePath(const SkMatrix& m, const SkPath& path, SkRect* devBounds,
41 SkRect* devBounds45) {
42 const SkPoint* pts = SkPathPriv::PointData(path);
43 int numPts = path.countPoints();
44 SkASSERT(numPts + 1 <= fLocalDevPtsBuffer.count());
45
46 if (!numPts) {
47 devBounds->setEmpty();
48 devBounds45->setEmpty();
49 this->parsePath(path, nullptr);
50 return;
51 }
52
53 // m45 transforms path points into "45 degree" device space. A bounding box in this space gives
54 // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal
55 // transform is not necessary as long as the shader uses the correct inverse.
56 SkMatrix m45;
57 m45.setSinCos(1, 1);
58 m45.preConcat(m);
59
60 // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points:
61 // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords).
62 // | 1 1 |
63 Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY());
64 Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY());
65 Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY());
66
67 // Map the path's points to device space and accumulate bounding boxes.
68 Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T);
69 devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt);
70 Sk4f topLeft = devPt;
71 Sk4f bottomRight = devPt;
72
73 // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two,
74 // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must
75 // be at least one larger than the number of points.
76 devPt.store(&fLocalDevPtsBuffer[0]);
77
78 for (int i = 1; i < numPts; ++i) {
79 devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T);
80 devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt);
81 topLeft = Sk4f::Min(topLeft, devPt);
82 bottomRight = Sk4f::Max(bottomRight, devPt);
83 devPt.store(&fLocalDevPtsBuffer[i]);
84 }
85
86 SkPoint topLeftPts[2], bottomRightPts[2];
87 topLeft.store(topLeftPts);
88 bottomRight.store(bottomRightPts);
89 devBounds->setLTRB(topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(),
90 bottomRightPts[0].y());
91 devBounds45->setLTRB(topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(),
92 bottomRightPts[1].y());
93
94 this->parsePath(path, fLocalDevPtsBuffer.get());
95}
96
97void GrCCPathParser::parseDeviceSpacePath(const SkPath& deviceSpacePath) {
98 this->parsePath(deviceSpacePath, SkPathPriv::PointData(deviceSpacePath));
99}
100
101void GrCCPathParser::parsePath(const SkPath& path, const SkPoint* deviceSpacePts) {
102 SkASSERT(!fInstanceBuffer); // Can't call after finalize().
103 SkASSERT(!fParsingPath); // Call saveParsedPath() or discardParsedPath() for the last one first.
104 SkDEBUGCODE(fParsingPath = true);
105 SkASSERT(path.isEmpty() || deviceSpacePts);
106
107 fCurrPathPointsIdx = fGeometry.points().count();
108 fCurrPathVerbsIdx = fGeometry.verbs().count();
109 fCurrPathPrimitiveCounts = PrimitiveTallies();
Chris Dalton84403d72018-02-13 21:46:17 -0500110 fCurrPathFillType = path.getFillType();
Chris Dalton9ca27842018-01-18 12:24:50 -0700111
112 fGeometry.beginPath();
113
114 if (path.isEmpty()) {
115 return;
116 }
117
118 int ptsIdx = 0;
119 bool insideContour = false;
120
121 for (SkPath::Verb verb : SkPathPriv::Verbs(path)) {
122 switch (verb) {
123 case SkPath::kMove_Verb:
124 this->endContourIfNeeded(insideContour);
125 fGeometry.beginContour(deviceSpacePts[ptsIdx]);
126 ++ptsIdx;
127 insideContour = true;
128 continue;
129 case SkPath::kClose_Verb:
130 this->endContourIfNeeded(insideContour);
131 insideContour = false;
132 continue;
133 case SkPath::kLine_Verb:
134 fGeometry.lineTo(deviceSpacePts[ptsIdx]);
135 ++ptsIdx;
136 continue;
137 case SkPath::kQuad_Verb:
138 fGeometry.quadraticTo(deviceSpacePts[ptsIdx], deviceSpacePts[ptsIdx + 1]);
139 ptsIdx += 2;
140 continue;
141 case SkPath::kCubic_Verb:
142 fGeometry.cubicTo(deviceSpacePts[ptsIdx], deviceSpacePts[ptsIdx + 1],
143 deviceSpacePts[ptsIdx + 2]);
144 ptsIdx += 3;
145 continue;
146 case SkPath::kConic_Verb:
147 SK_ABORT("Conics are not supported.");
148 default:
149 SK_ABORT("Unexpected path verb.");
150 }
151 }
152
153 this->endContourIfNeeded(insideContour);
154}
155
156void GrCCPathParser::endContourIfNeeded(bool insideContour) {
157 if (insideContour) {
158 fCurrPathPrimitiveCounts += fGeometry.endContour();
159 }
160}
161
162void GrCCPathParser::saveParsedPath(ScissorMode scissorMode, const SkIRect& clippedDevIBounds,
163 int16_t atlasOffsetX, int16_t atlasOffsetY) {
164 SkASSERT(fParsingPath);
165
Chris Dalton84403d72018-02-13 21:46:17 -0500166 fPathsInfo.emplace_back(scissorMode, atlasOffsetX, atlasOffsetY);
167
168 // Tessellate fans from very large and/or simple paths, in order to reduce overdraw.
169 int numVerbs = fGeometry.verbs().count() - fCurrPathVerbsIdx - 1;
170 int64_t tessellationWork = (int64_t)numVerbs * (32 - SkCLZ(numVerbs)); // N log N.
171 int64_t fanningWork = (int64_t)clippedDevIBounds.height() * clippedDevIBounds.width();
172 if (tessellationWork * (50*50) + (100*100) < fanningWork) { // Don't tessellate under 100x100.
173 fCurrPathPrimitiveCounts.fTriangles =
174 fCurrPathPrimitiveCounts.fWoundTriangles = 0;
175
176 const SkTArray<GrCCGeometry::Verb, true>& verbs = fGeometry.verbs();
177 const SkTArray<SkPoint, true>& pts = fGeometry.points();
178 int ptsIdx = fCurrPathPointsIdx;
179
180 // Build an SkPath of the Redbook fan.
181 SkPath fan;
182 fan.setFillType(fCurrPathFillType);
183 SkASSERT(GrCCGeometry::Verb::kBeginPath == verbs[fCurrPathVerbsIdx]);
184 for (int i = fCurrPathVerbsIdx + 1; i < fGeometry.verbs().count(); ++i) {
185 switch (verbs[i]) {
186 case GrCCGeometry::Verb::kBeginPath:
187 SK_ABORT("Invalid GrCCGeometry");
188 continue;
189
190 case GrCCGeometry::Verb::kBeginContour:
191 fan.moveTo(pts[ptsIdx++]);
192 continue;
193
194 case GrCCGeometry::Verb::kLineTo:
195 fan.lineTo(pts[ptsIdx++]);
196 continue;
197
198 case GrCCGeometry::Verb::kMonotonicQuadraticTo:
199 fan.lineTo(pts[ptsIdx + 1]);
200 ptsIdx += 2;
201 continue;
202
203 case GrCCGeometry::Verb::kMonotonicCubicTo:
204 fan.lineTo(pts[ptsIdx + 2]);
205 ptsIdx += 3;
206 continue;
207
208 case GrCCGeometry::Verb::kEndClosedContour:
209 case GrCCGeometry::Verb::kEndOpenContour:
210 fan.close();
211 continue;
212 }
213 }
214 GrTessellator::WindingVertex* vertices = nullptr;
215 int count = GrTessellator::PathToVertices(fan, std::numeric_limits<float>::infinity(),
216 SkRect::Make(clippedDevIBounds), &vertices);
217 SkASSERT(0 == count % 3);
218 for (int i = 0; i < count; i += 3) {
219 SkASSERT(vertices[i].fWinding == vertices[i + 1].fWinding);
220 SkASSERT(vertices[i].fWinding == vertices[i + 2].fWinding);
221 if (1 == abs(vertices[i].fWinding)) {
222 // Ensure this triangle's points actually wind in the same direction as fWinding.
223 float ax = vertices[i].fPos.fX - vertices[i + 1].fPos.fX;
224 float ay = vertices[i].fPos.fY - vertices[i + 1].fPos.fY;
225 float bx = vertices[i].fPos.fX - vertices[i + 2].fPos.fX;
226 float by = vertices[i].fPos.fY - vertices[i + 2].fPos.fY;
227 float wind = ay*bx - ax*by;
228 if ((wind > 0) != (vertices[i].fWinding > 0)) {
229 std::swap(vertices[i + 1].fPos, vertices[i + 2].fPos);
230 }
231 ++fCurrPathPrimitiveCounts.fTriangles;
232 } else {
233 ++fCurrPathPrimitiveCounts.fWoundTriangles;
234 }
235 }
236
237 fPathsInfo.back().fFanTessellation.reset(vertices);
238 fPathsInfo.back().fFanTessellationCount = count;
239 }
240
Chris Dalton9ca27842018-01-18 12:24:50 -0700241 fTotalPrimitiveCounts[(int)scissorMode] += fCurrPathPrimitiveCounts;
242
243 if (ScissorMode::kScissored == scissorMode) {
244 fScissorSubBatches.push_back() = {fTotalPrimitiveCounts[(int)ScissorMode::kScissored],
245 clippedDevIBounds.makeOffset(atlasOffsetX, atlasOffsetY)};
246 }
247
248 SkDEBUGCODE(fParsingPath = false);
249}
250
251void GrCCPathParser::discardParsedPath() {
252 SkASSERT(fParsingPath);
253 fGeometry.resize_back(fCurrPathPointsIdx, fCurrPathVerbsIdx);
254 SkDEBUGCODE(fParsingPath = false);
255}
256
257GrCCPathParser::CoverageCountBatchID GrCCPathParser::closeCurrentBatch() {
258 SkASSERT(!fInstanceBuffer);
259 SkASSERT(!fCoverageCountBatches.empty());
Chris Dalton84403d72018-02-13 21:46:17 -0500260 const auto& lastBatch = fCoverageCountBatches.back();
261 const auto& lastScissorSubBatch = fScissorSubBatches[lastBatch.fEndScissorSubBatchIdx - 1];
Chris Dalton9ca27842018-01-18 12:24:50 -0700262
Chris Dalton84403d72018-02-13 21:46:17 -0500263 PrimitiveTallies batchTotalCounts = fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored] -
264 lastBatch.fEndNonScissorIndices;
265 batchTotalCounts += fTotalPrimitiveCounts[(int)ScissorMode::kScissored] -
266 lastScissorSubBatch.fEndPrimitiveIndices;
Chris Dalton9ca27842018-01-18 12:24:50 -0700267
268 fCoverageCountBatches.push_back() = {
269 fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored],
Chris Dalton84403d72018-02-13 21:46:17 -0500270 fScissorSubBatches.count(),
271 batchTotalCounts
Chris Dalton9ca27842018-01-18 12:24:50 -0700272 };
Chris Dalton84403d72018-02-13 21:46:17 -0500273
274 int maxMeshes = 1 + fScissorSubBatches.count() - lastBatch.fEndScissorSubBatchIdx;
275 fMaxMeshesPerDraw = SkTMax(fMaxMeshesPerDraw, maxMeshes);
276
Chris Dalton9ca27842018-01-18 12:24:50 -0700277 return fCoverageCountBatches.count() - 1;
278}
279
280// Emits a contour's triangle fan.
281//
282// Classic Redbook fanning would be the triangles: [0 1 2], [0 2 3], ..., [0 n-2 n-1].
283//
284// This function emits the triangle: [0 n/3 n*2/3], and then recurses on all three sides. The
285// advantage to this approach is that for a convex-ish contour, it generates larger triangles.
286// Classic fanning tends to generate long, skinny triangles, which are expensive to draw since they
287// have a longer perimeter to rasterize and antialias.
288//
289// The indices array indexes the fan's points (think: glDrawElements), and must have at least log3
290// elements past the end for this method to use as scratch space.
291//
292// Returns the next triangle instance after the final one emitted.
Chris Dalton84403d72018-02-13 21:46:17 -0500293static TriPointInstance* emit_recursive_fan(const SkTArray<SkPoint, true>& pts,
Chris Dalton9ca27842018-01-18 12:24:50 -0700294 SkTArray<int32_t, true>& indices, int firstIndex,
295 int indexCount, const Sk2f& atlasOffset,
Chris Dalton84403d72018-02-13 21:46:17 -0500296 TriPointInstance out[]) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700297 if (indexCount < 3) {
298 return out;
299 }
300
301 int32_t oneThirdCount = indexCount / 3;
302 int32_t twoThirdsCount = (2 * indexCount) / 3;
303 out++->set(pts[indices[firstIndex]], pts[indices[firstIndex + oneThirdCount]],
304 pts[indices[firstIndex + twoThirdsCount]], atlasOffset);
305
306 out = emit_recursive_fan(pts, indices, firstIndex, oneThirdCount + 1, atlasOffset, out);
307 out = emit_recursive_fan(pts, indices, firstIndex + oneThirdCount,
308 twoThirdsCount - oneThirdCount + 1, atlasOffset, out);
309
310 int endIndex = firstIndex + indexCount;
311 int32_t oldValue = indices[endIndex];
312 indices[endIndex] = indices[firstIndex];
313 out = emit_recursive_fan(pts, indices, firstIndex + twoThirdsCount,
314 indexCount - twoThirdsCount + 1, atlasOffset, out);
315 indices[endIndex] = oldValue;
316
317 return out;
318}
319
Chris Dalton84403d72018-02-13 21:46:17 -0500320static void emit_tessellated_fan(const GrTessellator::WindingVertex* vertices, int numVertices,
321 const Sk2f& atlasOffset, TriPointInstance* triPointInstanceData,
322 QuadPointInstance* quadPointInstanceData,
323 GrCCGeometry::PrimitiveTallies* indices) {
324 for (int i = 0; i < numVertices; i += 3) {
325 if (1 == abs(vertices[i].fWinding)) {
326 triPointInstanceData[indices->fTriangles++].set(vertices[i].fPos, vertices[i + 1].fPos,
327 vertices[i + 2].fPos, atlasOffset);
328 } else {
329 quadPointInstanceData[indices->fWoundTriangles++].set(
330 vertices[i].fPos, vertices[i+1].fPos, vertices[i + 2].fPos, atlasOffset,
331 static_cast<float>(vertices[i].fWinding));
332 }
333 }
334}
335
Chris Dalton9ca27842018-01-18 12:24:50 -0700336bool GrCCPathParser::finalize(GrOnFlushResourceProvider* onFlushRP) {
337 SkASSERT(!fParsingPath); // Call saveParsedPath() or discardParsedPath().
338 SkASSERT(fCoverageCountBatches.back().fEndNonScissorIndices == // Call closeCurrentBatch().
339 fTotalPrimitiveCounts[(int)ScissorMode::kNonScissored]);
340 SkASSERT(fCoverageCountBatches.back().fEndScissorSubBatchIdx == fScissorSubBatches.count());
341
342 // Here we build a single instance buffer to share with every internal batch.
343 //
344 // CCPR processs 3 different types of primitives: triangles, quadratics, cubics. Each primitive
345 // type is further divided into instances that require a scissor and those that don't. This
346 // leaves us with 3*2 = 6 independent instance arrays to build for the GPU.
347 //
348 // Rather than place each instance array in its own GPU buffer, we allocate a single
349 // megabuffer and lay them all out side-by-side. We can offset the "baseInstance" parameter in
350 // our draw calls to direct the GPU to the applicable elements within a given array.
351 //
352 // We already know how big to make each of the 6 arrays from fTotalPrimitiveCounts, so layout is
353 // straightforward. Start with triangles and quadratics. They both view the instance buffer as
Chris Dalton84403d72018-02-13 21:46:17 -0500354 // an array of TriPointInstance[], so we can begin at zero and lay them out one after the other.
Chris Dalton9ca27842018-01-18 12:24:50 -0700355 fBaseInstances[0].fTriangles = 0;
356 fBaseInstances[1].fTriangles = fBaseInstances[0].fTriangles +
357 fTotalPrimitiveCounts[0].fTriangles;
358 fBaseInstances[0].fQuadratics = fBaseInstances[1].fTriangles +
359 fTotalPrimitiveCounts[1].fTriangles;
360 fBaseInstances[1].fQuadratics = fBaseInstances[0].fQuadratics +
361 fTotalPrimitiveCounts[0].fQuadratics;
362 int triEndIdx = fBaseInstances[1].fQuadratics + fTotalPrimitiveCounts[1].fQuadratics;
363
Chris Dalton84403d72018-02-13 21:46:17 -0500364 // Wound triangles and cubics both view the same instance buffer as an array of
365 // QuadPointInstance[]. So, reinterpreting the instance data as QuadPointInstance[], we start
366 // them on the first index that will not overwrite previous TriPointInstance data.
367 int quadBaseIdx =
368 GR_CT_DIV_ROUND_UP(triEndIdx * sizeof(TriPointInstance), sizeof(QuadPointInstance));
369 fBaseInstances[0].fWoundTriangles = quadBaseIdx;
370 fBaseInstances[1].fWoundTriangles = fBaseInstances[0].fWoundTriangles +
371 fTotalPrimitiveCounts[0].fWoundTriangles;
372 fBaseInstances[0].fCubics = fBaseInstances[1].fWoundTriangles +
373 fTotalPrimitiveCounts[1].fWoundTriangles;
Chris Dalton9ca27842018-01-18 12:24:50 -0700374 fBaseInstances[1].fCubics = fBaseInstances[0].fCubics + fTotalPrimitiveCounts[0].fCubics;
Chris Dalton84403d72018-02-13 21:46:17 -0500375 int quadEndIdx = fBaseInstances[1].fCubics + fTotalPrimitiveCounts[1].fCubics;
Chris Dalton9ca27842018-01-18 12:24:50 -0700376
377 fInstanceBuffer = onFlushRP->makeBuffer(kVertex_GrBufferType,
Chris Dalton84403d72018-02-13 21:46:17 -0500378 quadEndIdx * sizeof(QuadPointInstance));
Chris Dalton9ca27842018-01-18 12:24:50 -0700379 if (!fInstanceBuffer) {
380 return false;
381 }
382
Chris Dalton84403d72018-02-13 21:46:17 -0500383 TriPointInstance* triPointInstanceData = static_cast<TriPointInstance*>(fInstanceBuffer->map());
384 QuadPointInstance* quadPointInstanceData =
385 reinterpret_cast<QuadPointInstance*>(triPointInstanceData);
386 SkASSERT(quadPointInstanceData);
Chris Dalton9ca27842018-01-18 12:24:50 -0700387
Chris Dalton84403d72018-02-13 21:46:17 -0500388 PathInfo* nextPathInfo = fPathsInfo.begin();
Chris Dalton9ca27842018-01-18 12:24:50 -0700389 float atlasOffsetX = 0.0, atlasOffsetY = 0.0;
390 Sk2f atlasOffset;
Chris Dalton9ca27842018-01-18 12:24:50 -0700391 PrimitiveTallies instanceIndices[2] = {fBaseInstances[0], fBaseInstances[1]};
392 PrimitiveTallies* currIndices = nullptr;
393 SkSTArray<256, int32_t, true> currFan;
Chris Dalton84403d72018-02-13 21:46:17 -0500394 bool currFanIsTessellated = false;
Chris Dalton9ca27842018-01-18 12:24:50 -0700395
396 const SkTArray<SkPoint, true>& pts = fGeometry.points();
Chris Dalton84403d72018-02-13 21:46:17 -0500397 int ptsIdx = -1;
Chris Dalton9ca27842018-01-18 12:24:50 -0700398
399 // Expand the ccpr verbs into GPU instance buffers.
400 for (GrCCGeometry::Verb verb : fGeometry.verbs()) {
401 switch (verb) {
402 case GrCCGeometry::Verb::kBeginPath:
403 SkASSERT(currFan.empty());
Chris Dalton84403d72018-02-13 21:46:17 -0500404 currIndices = &instanceIndices[(int)nextPathInfo->fScissorMode];
405 atlasOffsetX = static_cast<float>(nextPathInfo->fAtlasOffsetX);
406 atlasOffsetY = static_cast<float>(nextPathInfo->fAtlasOffsetY);
Chris Dalton9ca27842018-01-18 12:24:50 -0700407 atlasOffset = {atlasOffsetX, atlasOffsetY};
Chris Dalton84403d72018-02-13 21:46:17 -0500408 currFanIsTessellated = nextPathInfo->fFanTessellation.get();
409 if (currFanIsTessellated) {
410 emit_tessellated_fan(nextPathInfo->fFanTessellation.get(),
411 nextPathInfo->fFanTessellationCount, atlasOffset,
412 triPointInstanceData, quadPointInstanceData, currIndices);
413 }
414 ++nextPathInfo;
Chris Dalton9ca27842018-01-18 12:24:50 -0700415 continue;
416
417 case GrCCGeometry::Verb::kBeginContour:
418 SkASSERT(currFan.empty());
Chris Dalton84403d72018-02-13 21:46:17 -0500419 ++ptsIdx;
420 if (!currFanIsTessellated) {
421 currFan.push_back(ptsIdx);
422 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700423 continue;
424
425 case GrCCGeometry::Verb::kLineTo:
Chris Dalton84403d72018-02-13 21:46:17 -0500426 ++ptsIdx;
427 if (!currFanIsTessellated) {
428 SkASSERT(!currFan.empty());
429 currFan.push_back(ptsIdx);
430 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700431 continue;
432
433 case GrCCGeometry::Verb::kMonotonicQuadraticTo:
Chris Dalton84403d72018-02-13 21:46:17 -0500434 triPointInstanceData[currIndices->fQuadratics++].set(&pts[ptsIdx], atlasOffset);
435 ptsIdx += 2;
436 if (!currFanIsTessellated) {
437 SkASSERT(!currFan.empty());
438 currFan.push_back(ptsIdx);
439 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700440 continue;
441
442 case GrCCGeometry::Verb::kMonotonicCubicTo:
Chris Dalton84403d72018-02-13 21:46:17 -0500443 quadPointInstanceData[currIndices->fCubics++].set(&pts[ptsIdx], atlasOffsetX,
444 atlasOffsetY);
445 ptsIdx += 3;
446 if (!currFanIsTessellated) {
447 SkASSERT(!currFan.empty());
448 currFan.push_back(ptsIdx);
449 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700450 continue;
451
452 case GrCCGeometry::Verb::kEndClosedContour: // endPt == startPt.
Chris Dalton84403d72018-02-13 21:46:17 -0500453 if (!currFanIsTessellated) {
454 SkASSERT(!currFan.empty());
455 currFan.pop_back();
456 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700457 // fallthru.
458 case GrCCGeometry::Verb::kEndOpenContour: // endPt != startPt.
Chris Dalton84403d72018-02-13 21:46:17 -0500459 SkASSERT(!currFanIsTessellated || currFan.empty());
460 if (!currFanIsTessellated && currFan.count() >= 3) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700461 int fanSize = currFan.count();
462 // Reserve space for emit_recursive_fan. Technically this can grow to
463 // fanSize + log3(fanSize), but we approximate with log2.
464 currFan.push_back_n(SkNextLog2(fanSize));
Chris Dalton84403d72018-02-13 21:46:17 -0500465 SkDEBUGCODE(TriPointInstance* end =)
Chris Dalton9ca27842018-01-18 12:24:50 -0700466 emit_recursive_fan(pts, currFan, 0, fanSize, atlasOffset,
Chris Dalton84403d72018-02-13 21:46:17 -0500467 triPointInstanceData + currIndices->fTriangles);
Chris Dalton9ca27842018-01-18 12:24:50 -0700468 currIndices->fTriangles += fanSize - 2;
Chris Dalton84403d72018-02-13 21:46:17 -0500469 SkASSERT(triPointInstanceData + currIndices->fTriangles == end);
Chris Dalton9ca27842018-01-18 12:24:50 -0700470 }
471 currFan.reset();
472 continue;
473 }
474 }
475
476 fInstanceBuffer->unmap();
477
Chris Dalton84403d72018-02-13 21:46:17 -0500478 SkASSERT(nextPathInfo == fPathsInfo.end());
Chris Dalton9ca27842018-01-18 12:24:50 -0700479 SkASSERT(ptsIdx == pts.count() - 1);
480 SkASSERT(instanceIndices[0].fTriangles == fBaseInstances[1].fTriangles);
481 SkASSERT(instanceIndices[1].fTriangles == fBaseInstances[0].fQuadratics);
482 SkASSERT(instanceIndices[0].fQuadratics == fBaseInstances[1].fQuadratics);
483 SkASSERT(instanceIndices[1].fQuadratics == triEndIdx);
Chris Dalton84403d72018-02-13 21:46:17 -0500484 SkASSERT(instanceIndices[0].fWoundTriangles == fBaseInstances[1].fWoundTriangles);
485 SkASSERT(instanceIndices[1].fWoundTriangles == fBaseInstances[0].fCubics);
Chris Dalton9ca27842018-01-18 12:24:50 -0700486 SkASSERT(instanceIndices[0].fCubics == fBaseInstances[1].fCubics);
Chris Dalton84403d72018-02-13 21:46:17 -0500487 SkASSERT(instanceIndices[1].fCubics == quadEndIdx);
Chris Dalton9ca27842018-01-18 12:24:50 -0700488
489 fMeshesScratchBuffer.reserve(fMaxMeshesPerDraw);
490 fDynamicStatesScratchBuffer.reserve(fMaxMeshesPerDraw);
491
492 return true;
493}
494
495void GrCCPathParser::drawCoverageCount(GrOpFlushState* flushState, CoverageCountBatchID batchID,
496 const SkIRect& drawBounds) const {
497 using RenderPass = GrCCCoverageProcessor::RenderPass;
Chris Dalton84403d72018-02-13 21:46:17 -0500498 using WindMethod = GrCCCoverageProcessor::WindMethod;
Chris Dalton9ca27842018-01-18 12:24:50 -0700499
500 SkASSERT(fInstanceBuffer);
501
Chris Dalton84403d72018-02-13 21:46:17 -0500502 const PrimitiveTallies& batchTotalCounts = fCoverageCountBatches[batchID].fTotalPrimitiveCounts;
503
Chris Dalton9ca27842018-01-18 12:24:50 -0700504 GrPipeline pipeline(flushState->drawOpArgs().fProxy, GrPipeline::ScissorState::kEnabled,
505 SkBlendMode::kPlus);
506
Chris Dalton84403d72018-02-13 21:46:17 -0500507 if (batchTotalCounts.fTriangles) {
508 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleHulls,
509 WindMethod::kCrossProduct, &PrimitiveTallies::fTriangles, drawBounds);
510 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleEdges,
511 WindMethod::kCrossProduct, &PrimitiveTallies::fTriangles,
512 drawBounds); // Might get skipped.
513 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleCorners,
514 WindMethod::kCrossProduct, &PrimitiveTallies::fTriangles, drawBounds);
515 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700516
Chris Dalton84403d72018-02-13 21:46:17 -0500517 if (batchTotalCounts.fWoundTriangles) {
518 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleHulls,
519 WindMethod::kInstanceData, &PrimitiveTallies::fWoundTriangles,
520 drawBounds);
521 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleEdges,
522 WindMethod::kInstanceData, &PrimitiveTallies::fWoundTriangles,
523 drawBounds); // Might get skipped.
524 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kTriangleCorners,
525 WindMethod::kInstanceData, &PrimitiveTallies::fWoundTriangles,
526 drawBounds);
527 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700528
Chris Dalton84403d72018-02-13 21:46:17 -0500529 if (batchTotalCounts.fQuadratics) {
530 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kQuadraticHulls,
531 WindMethod::kCrossProduct, &PrimitiveTallies::fQuadratics, drawBounds);
532 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kQuadraticCorners,
533 WindMethod::kCrossProduct, &PrimitiveTallies::fQuadratics, drawBounds);
534 }
535
536 if (batchTotalCounts.fCubics) {
537 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kCubicHulls,
538 WindMethod::kCrossProduct, &PrimitiveTallies::fCubics, drawBounds);
539 this->drawRenderPass(flushState, pipeline, batchID, RenderPass::kCubicCorners,
540 WindMethod::kCrossProduct, &PrimitiveTallies::fCubics, drawBounds);
541 }
Chris Dalton9ca27842018-01-18 12:24:50 -0700542}
543
544void GrCCPathParser::drawRenderPass(GrOpFlushState* flushState, const GrPipeline& pipeline,
545 CoverageCountBatchID batchID,
546 GrCCCoverageProcessor::RenderPass renderPass,
Chris Dalton84403d72018-02-13 21:46:17 -0500547 GrCCCoverageProcessor::WindMethod windMethod,
Chris Dalton9ca27842018-01-18 12:24:50 -0700548 int PrimitiveTallies::*instanceType,
549 const SkIRect& drawBounds) const {
550 SkASSERT(pipeline.getScissorState().enabled());
551
Chris Dalton27059d32018-01-23 14:06:50 -0700552 if (!GrCCCoverageProcessor::DoesRenderPass(renderPass, flushState->caps())) {
Chris Dalton9ca27842018-01-18 12:24:50 -0700553 return;
554 }
555
556 // Don't call reset(), as that also resets the reserve count.
557 fMeshesScratchBuffer.pop_back_n(fMeshesScratchBuffer.count());
558 fDynamicStatesScratchBuffer.pop_back_n(fDynamicStatesScratchBuffer.count());
559
Chris Dalton84403d72018-02-13 21:46:17 -0500560 GrCCCoverageProcessor proc(flushState->resourceProvider(), renderPass, windMethod);
Chris Dalton9ca27842018-01-18 12:24:50 -0700561
562 SkASSERT(batchID > 0);
563 SkASSERT(batchID < fCoverageCountBatches.count());
564 const CoverageCountBatch& previousBatch = fCoverageCountBatches[batchID - 1];
565 const CoverageCountBatch& batch = fCoverageCountBatches[batchID];
Chris Dalton84403d72018-02-13 21:46:17 -0500566 SkDEBUGCODE(int totalInstanceCount = 0);
Chris Dalton9ca27842018-01-18 12:24:50 -0700567
568 if (int instanceCount = batch.fEndNonScissorIndices.*instanceType -
569 previousBatch.fEndNonScissorIndices.*instanceType) {
570 SkASSERT(instanceCount > 0);
571 int baseInstance = fBaseInstances[(int)ScissorMode::kNonScissored].*instanceType +
572 previousBatch.fEndNonScissorIndices.*instanceType;
573 proc.appendMesh(fInstanceBuffer.get(), instanceCount, baseInstance, &fMeshesScratchBuffer);
574 fDynamicStatesScratchBuffer.push_back().fScissorRect.setXYWH(0, 0, drawBounds.width(),
575 drawBounds.height());
Chris Dalton84403d72018-02-13 21:46:17 -0500576 SkDEBUGCODE(totalInstanceCount += instanceCount);
Chris Dalton9ca27842018-01-18 12:24:50 -0700577 }
578
579 SkASSERT(previousBatch.fEndScissorSubBatchIdx > 0);
580 SkASSERT(batch.fEndScissorSubBatchIdx <= fScissorSubBatches.count());
581 int baseScissorInstance = fBaseInstances[(int)ScissorMode::kScissored].*instanceType;
582 for (int i = previousBatch.fEndScissorSubBatchIdx; i < batch.fEndScissorSubBatchIdx; ++i) {
583 const ScissorSubBatch& previousSubBatch = fScissorSubBatches[i - 1];
584 const ScissorSubBatch& scissorSubBatch = fScissorSubBatches[i];
585 int startIndex = previousSubBatch.fEndPrimitiveIndices.*instanceType;
586 int instanceCount = scissorSubBatch.fEndPrimitiveIndices.*instanceType - startIndex;
587 if (!instanceCount) {
588 continue;
589 }
590 SkASSERT(instanceCount > 0);
591 proc.appendMesh(fInstanceBuffer.get(), instanceCount,
592 baseScissorInstance + startIndex, &fMeshesScratchBuffer);
593 fDynamicStatesScratchBuffer.push_back().fScissorRect = scissorSubBatch.fScissor;
Chris Dalton84403d72018-02-13 21:46:17 -0500594 SkDEBUGCODE(totalInstanceCount += instanceCount);
Chris Dalton9ca27842018-01-18 12:24:50 -0700595 }
596
597 SkASSERT(fMeshesScratchBuffer.count() == fDynamicStatesScratchBuffer.count());
598 SkASSERT(fMeshesScratchBuffer.count() <= fMaxMeshesPerDraw);
Chris Dalton84403d72018-02-13 21:46:17 -0500599 SkASSERT(totalInstanceCount == batch.fTotalPrimitiveCounts.*instanceType);
Chris Dalton9ca27842018-01-18 12:24:50 -0700600
601 if (!fMeshesScratchBuffer.empty()) {
602 SkASSERT(flushState->rtCommandBuffer());
603 flushState->rtCommandBuffer()->draw(pipeline, proc, fMeshesScratchBuffer.begin(),
604 fDynamicStatesScratchBuffer.begin(),
605 fMeshesScratchBuffer.count(), SkRect::Make(drawBounds));
606 }
607}