Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 8 | #include "SkPolyUtils.h" |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 9 | |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 10 | #include "SkPointPriv.h" |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 11 | #include "SkTArray.h" |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 12 | #include "SkTemplates.h" |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 13 | #include "SkTDPQueue.h" |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 14 | #include "SkTInternalLList.h" |
| 15 | |
| 16 | ////////////////////////////////////////////////////////////////////////////////// |
| 17 | // Helper data structures and functions |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 18 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 19 | struct OffsetSegment { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 20 | SkPoint fP0; |
| 21 | SkPoint fP1; |
| 22 | }; |
| 23 | |
| 24 | // Computes perpDot for point compared to segment. |
| 25 | // A positive value means the point is to the left of the segment, |
| 26 | // negative is to the right, 0 is collinear. |
| 27 | static int compute_side(const SkPoint& s0, const SkPoint& s1, const SkPoint& p) { |
| 28 | SkVector v0 = s1 - s0; |
| 29 | SkVector v1 = p - s0; |
| 30 | SkScalar perpDot = v0.cross(v1); |
| 31 | if (!SkScalarNearlyZero(perpDot)) { |
| 32 | return ((perpDot > 0) ? 1 : -1); |
| 33 | } |
| 34 | |
| 35 | return 0; |
| 36 | } |
| 37 | |
| 38 | // returns 1 for ccw, -1 for cw and 0 if degenerate |
| 39 | static int get_winding(const SkPoint* polygonVerts, int polygonSize) { |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 40 | // compute area and use sign to determine winding |
| 41 | SkScalar quadArea = 0; |
| 42 | for (int curr = 0; curr < polygonSize; ++curr) { |
| 43 | int next = (curr + 1) % polygonSize; |
| 44 | quadArea += polygonVerts[curr].cross(polygonVerts[next]); |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 45 | } |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 46 | if (SkScalarNearlyZero(quadArea)) { |
| 47 | return 0; |
| 48 | } |
| 49 | // 1 == ccw, -1 == cw |
| 50 | return (quadArea > 0) ? 1 : -1; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 51 | } |
| 52 | |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 53 | // Helper function to compute the individual vector for non-equal offsets |
| 54 | inline void compute_offset(SkScalar d, const SkPoint& polyPoint, int side, |
| 55 | const SkPoint& outerTangentIntersect, SkVector* v) { |
| 56 | SkScalar dsq = d * d; |
| 57 | SkVector dP = outerTangentIntersect - polyPoint; |
| 58 | SkScalar dPlenSq = SkPointPriv::LengthSqd(dP); |
| 59 | if (SkScalarNearlyZero(dPlenSq)) { |
| 60 | v->set(0, 0); |
| 61 | } else { |
| 62 | SkScalar discrim = SkScalarSqrt(dPlenSq - dsq); |
| 63 | v->fX = (dsq*dP.fX - side * d*dP.fY*discrim) / dPlenSq; |
| 64 | v->fY = (dsq*dP.fY + side * d*dP.fX*discrim) / dPlenSq; |
| 65 | } |
| 66 | } |
| 67 | |
| 68 | // Compute difference vector to offset p0-p1 'd0' and 'd1' units in direction specified by 'side' |
| 69 | bool compute_offset_vectors(const SkPoint& p0, const SkPoint& p1, SkScalar d0, SkScalar d1, |
| 70 | int side, SkPoint* vector0, SkPoint* vector1) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 71 | SkASSERT(side == -1 || side == 1); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 72 | if (SkScalarNearlyEqual(d0, d1)) { |
| 73 | // if distances are equal, can just outset by the perpendicular |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 74 | SkVector perp = SkVector::Make(p0.fY - p1.fY, p1.fX - p0.fX); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 75 | perp.setLength(d0*side); |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 76 | *vector0 = perp; |
| 77 | *vector1 = perp; |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 78 | } else { |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 79 | SkScalar d0abs = SkTAbs(d0); |
| 80 | SkScalar d1abs = SkTAbs(d1); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 81 | // Otherwise we need to compute the outer tangent. |
| 82 | // See: http://www.ambrsoft.com/TrigoCalc/Circles2/Circles2Tangent_.htm |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 83 | if (d0abs < d1abs) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 84 | side = -side; |
| 85 | } |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 86 | SkScalar dD = d0abs - d1abs; |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 87 | // if one circle is inside another, we can't compute an offset |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 88 | if (dD*dD >= SkPointPriv::DistanceToSqd(p0, p1)) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 89 | return false; |
| 90 | } |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 91 | SkPoint outerTangentIntersect = SkPoint::Make((p1.fX*d0abs - p0.fX*d1abs) / dD, |
| 92 | (p1.fY*d0abs - p0.fY*d1abs) / dD); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 93 | |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 94 | compute_offset(d0, p0, side, outerTangentIntersect, vector0); |
| 95 | compute_offset(d1, p1, side, outerTangentIntersect, vector1); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 96 | } |
| 97 | |
| 98 | return true; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 99 | } |
| 100 | |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 101 | // Offset line segment p0-p1 'd0' and 'd1' units in the direction specified by 'side' |
| 102 | bool SkOffsetSegment(const SkPoint& p0, const SkPoint& p1, SkScalar d0, SkScalar d1, |
| 103 | int side, SkPoint* offset0, SkPoint* offset1) { |
| 104 | SkVector v0, v1; |
| 105 | if (!compute_offset_vectors(p0, p1, d0, d1, side, &v0, &v1)) { |
| 106 | return false; |
| 107 | } |
| 108 | *offset0 = p0 + v0; |
| 109 | *offset1 = p1 + v1; |
| 110 | |
| 111 | return true; |
| 112 | } |
| 113 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 114 | // Compute the intersection 'p' between segments s0 and s1, if any. |
| 115 | // 's' is the parametric value for the intersection along 's0' & 't' is the same for 's1'. |
| 116 | // Returns false if there is no intersection. |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 117 | static bool compute_intersection(const OffsetSegment& s0, const OffsetSegment& s1, |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 118 | SkPoint* p, SkScalar* s, SkScalar* t) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 119 | // Common cases for polygon chains -- check if endpoints are touching |
| 120 | if (SkPointPriv::EqualsWithinTolerance(s0.fP1, s1.fP0)) { |
| 121 | *p = s0.fP1; |
| 122 | *s = SK_Scalar1; |
| 123 | *t = 0; |
| 124 | return true; |
| 125 | } |
| 126 | if (SkPointPriv::EqualsWithinTolerance(s1.fP1, s0.fP0)) { |
| 127 | *p = s1.fP1; |
| 128 | *s = 0; |
| 129 | *t = SK_Scalar1; |
| 130 | return true; |
| 131 | } |
| 132 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 133 | SkVector v0 = s0.fP1 - s0.fP0; |
| 134 | SkVector v1 = s1.fP1 - s1.fP0; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 135 | // We should have culled coincident points before this |
| 136 | SkASSERT(!SkPointPriv::EqualsWithinTolerance(s0.fP0, s0.fP1)); |
| 137 | SkASSERT(!SkPointPriv::EqualsWithinTolerance(s1.fP0, s1.fP1)); |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 138 | |
| 139 | SkVector d = s1.fP0 - s0.fP0; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 140 | SkScalar perpDot = v0.cross(v1); |
| 141 | SkScalar localS, localT; |
| 142 | if (SkScalarNearlyZero(perpDot)) { |
| 143 | // segments are parallel, but not collinear |
| 144 | if (!SkScalarNearlyZero(d.dot(d), SK_ScalarNearlyZero*SK_ScalarNearlyZero)) { |
| 145 | return false; |
| 146 | } |
| 147 | |
| 148 | // project segment1's endpoints onto segment0 |
| 149 | localS = d.fX / v0.fX; |
| 150 | localT = 0; |
| 151 | if (localS < 0 || localS > SK_Scalar1) { |
| 152 | // the first endpoint doesn't lie on segment0, try the other one |
| 153 | SkScalar oldLocalS = localS; |
| 154 | localS = (s1.fP1.fX - s0.fP0.fX) / v0.fX; |
| 155 | localT = SK_Scalar1; |
| 156 | if (localS < 0 || localS > SK_Scalar1) { |
| 157 | // it's possible that segment1's interval surrounds segment0 |
| 158 | // this is false if the params have the same signs, and in that case no collision |
| 159 | if (localS*oldLocalS > 0) { |
| 160 | return false; |
| 161 | } |
| 162 | // otherwise project segment0's endpoint onto segment1 instead |
| 163 | localS = 0; |
| 164 | localT = -d.fX / v1.fX; |
| 165 | } |
| 166 | } |
| 167 | } else { |
| 168 | localS = d.cross(v1) / perpDot; |
| 169 | if (localS < 0 || localS > SK_Scalar1) { |
| 170 | return false; |
| 171 | } |
| 172 | localT = d.cross(v0) / perpDot; |
| 173 | if (localT < 0 || localT > SK_Scalar1) { |
| 174 | return false; |
| 175 | } |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 176 | } |
| 177 | |
| 178 | v0 *= localS; |
| 179 | *p = s0.fP0 + v0; |
| 180 | *s = localS; |
| 181 | *t = localT; |
| 182 | |
| 183 | return true; |
| 184 | } |
| 185 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 186 | // computes the line intersection and then the distance to s0's endpoint |
| 187 | static SkScalar compute_crossing_distance(const OffsetSegment& s0, const OffsetSegment& s1) { |
| 188 | SkVector v0 = s0.fP1 - s0.fP0; |
| 189 | SkVector v1 = s1.fP1 - s1.fP0; |
| 190 | |
| 191 | SkScalar perpDot = v0.cross(v1); |
| 192 | if (SkScalarNearlyZero(perpDot)) { |
| 193 | // segments are parallel |
| 194 | return SK_ScalarMax; |
| 195 | } |
| 196 | |
| 197 | SkVector d = s1.fP0 - s0.fP0; |
| 198 | SkScalar localS = d.cross(v1) / perpDot; |
| 199 | if (localS < 0) { |
| 200 | localS = -localS; |
| 201 | } else { |
| 202 | localS -= SK_Scalar1; |
| 203 | } |
| 204 | |
| 205 | localS *= v0.length(); |
| 206 | |
| 207 | return localS; |
| 208 | } |
| 209 | |
Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 210 | static bool is_convex(const SkTDArray<SkPoint>& poly) { |
| 211 | if (poly.count() <= 3) { |
| 212 | return true; |
| 213 | } |
| 214 | |
| 215 | SkVector v0 = poly[0] - poly[poly.count() - 1]; |
| 216 | SkVector v1 = poly[1] - poly[poly.count() - 1]; |
| 217 | SkScalar winding = v0.cross(v1); |
| 218 | |
| 219 | for (int i = 0; i < poly.count() - 1; ++i) { |
| 220 | int j = i + 1; |
| 221 | int k = (i + 2) % poly.count(); |
| 222 | |
| 223 | SkVector v0 = poly[j] - poly[i]; |
| 224 | SkVector v1 = poly[k] - poly[i]; |
| 225 | SkScalar perpDot = v0.cross(v1); |
Jim Van Verth | 291932e | 2017-03-29 14:37:28 -0400 | [diff] [blame] | 226 | if (winding*perpDot < 0) { |
Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 227 | return false; |
| 228 | } |
| 229 | } |
| 230 | |
| 231 | return true; |
| 232 | } |
Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 233 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 234 | struct EdgeData { |
| 235 | OffsetSegment fInset; |
| 236 | SkPoint fIntersection; |
| 237 | SkScalar fTValue; |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 238 | uint16_t fStart; |
| 239 | uint16_t fEnd; |
| 240 | uint16_t fIndex; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 241 | bool fValid; |
| 242 | |
| 243 | void init() { |
| 244 | fIntersection = fInset.fP0; |
| 245 | fTValue = SK_ScalarMin; |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 246 | fStart = 0; |
| 247 | fEnd = 0; |
| 248 | fIndex = 0; |
| 249 | fValid = true; |
| 250 | } |
| 251 | |
| 252 | void init(uint16_t start, uint16_t end) { |
| 253 | fIntersection = fInset.fP0; |
| 254 | fTValue = SK_ScalarMin; |
| 255 | fStart = start; |
| 256 | fEnd = end; |
| 257 | fIndex = start; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 258 | fValid = true; |
| 259 | } |
| 260 | }; |
| 261 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 262 | ////////////////////////////////////////////////////////////////////////////////// |
| 263 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 264 | // The objective here is to inset all of the edges by the given distance, and then |
| 265 | // remove any invalid inset edges by detecting right-hand turns. In a ccw polygon, |
| 266 | // we should only be making left-hand turns (for cw polygons, we use the winding |
| 267 | // parameter to reverse this). We detect this by checking whether the second intersection |
| 268 | // on an edge is closer to its tail than the first one. |
| 269 | // |
| 270 | // We might also have the case that there is no intersection between two neighboring inset edges. |
| 271 | // In this case, one edge will lie to the right of the other and should be discarded along with |
| 272 | // its previous intersection (if any). |
| 273 | // |
| 274 | // Note: the assumption is that inputPolygon is convex and has no coincident points. |
| 275 | // |
| 276 | bool SkInsetConvexPolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 277 | std::function<SkScalar(const SkPoint&)> insetDistanceFunc, |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 278 | SkTDArray<SkPoint>* insetPolygon) { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 279 | if (inputPolygonSize < 3) { |
| 280 | return false; |
| 281 | } |
| 282 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 283 | // get winding direction |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 284 | int winding = get_winding(inputPolygonVerts, inputPolygonSize); |
| 285 | if (0 == winding) { |
| 286 | return false; |
| 287 | } |
| 288 | |
| 289 | // set up |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 290 | SkAutoSTMalloc<64, EdgeData> edgeData(inputPolygonSize); |
| 291 | for (int i = 0; i < inputPolygonSize; ++i) { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 292 | int j = (i + 1) % inputPolygonSize; |
Jim Van Verth | b55eb28 | 2017-07-18 14:13:45 -0400 | [diff] [blame] | 293 | int k = (i + 2) % inputPolygonSize; |
| 294 | // check for convexity just to be sure |
| 295 | if (compute_side(inputPolygonVerts[i], inputPolygonVerts[j], |
| 296 | inputPolygonVerts[k])*winding < 0) { |
| 297 | return false; |
| 298 | } |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 299 | if (!SkOffsetSegment(inputPolygonVerts[i], inputPolygonVerts[j], |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 300 | insetDistanceFunc(inputPolygonVerts[i]), |
| 301 | insetDistanceFunc(inputPolygonVerts[j]), |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 302 | winding, |
| 303 | &edgeData[i].fInset.fP0, &edgeData[i].fInset.fP1)) { |
| 304 | return false; |
| 305 | } |
| 306 | edgeData[i].init(); |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 307 | } |
| 308 | |
| 309 | int prevIndex = inputPolygonSize - 1; |
| 310 | int currIndex = 0; |
| 311 | int insetVertexCount = inputPolygonSize; |
Jim Van Verth | 796bc1d | 2018-06-19 15:00:28 -0400 | [diff] [blame] | 312 | int iterations = 0; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 313 | while (prevIndex != currIndex) { |
Jim Van Verth | 796bc1d | 2018-06-19 15:00:28 -0400 | [diff] [blame] | 314 | ++iterations; |
Jim Van Verth | 3645bb0 | 2018-06-26 14:58:58 -0400 | [diff] [blame] | 315 | // we should check each edge against each other edge at most once |
Jim Van Verth | 796bc1d | 2018-06-19 15:00:28 -0400 | [diff] [blame] | 316 | if (iterations > inputPolygonSize*inputPolygonSize) { |
| 317 | return false; |
| 318 | } |
| 319 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 320 | if (!edgeData[prevIndex].fValid) { |
| 321 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; |
| 322 | continue; |
| 323 | } |
| 324 | |
| 325 | SkScalar s, t; |
| 326 | SkPoint intersection; |
| 327 | if (compute_intersection(edgeData[prevIndex].fInset, edgeData[currIndex].fInset, |
| 328 | &intersection, &s, &t)) { |
| 329 | // if new intersection is further back on previous inset from the prior intersection |
| 330 | if (s < edgeData[prevIndex].fTValue) { |
| 331 | // no point in considering this one again |
| 332 | edgeData[prevIndex].fValid = false; |
| 333 | --insetVertexCount; |
| 334 | // go back one segment |
| 335 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; |
| 336 | // we've already considered this intersection, we're done |
| 337 | } else if (edgeData[currIndex].fTValue > SK_ScalarMin && |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 338 | SkPointPriv::EqualsWithinTolerance(intersection, |
| 339 | edgeData[currIndex].fIntersection, |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 340 | 1.0e-6f)) { |
| 341 | break; |
| 342 | } else { |
| 343 | // add intersection |
| 344 | edgeData[currIndex].fIntersection = intersection; |
| 345 | edgeData[currIndex].fTValue = t; |
| 346 | |
| 347 | // go to next segment |
| 348 | prevIndex = currIndex; |
| 349 | currIndex = (currIndex + 1) % inputPolygonSize; |
| 350 | } |
| 351 | } else { |
| 352 | // if prev to right side of curr |
| 353 | int side = winding*compute_side(edgeData[currIndex].fInset.fP0, |
| 354 | edgeData[currIndex].fInset.fP1, |
| 355 | edgeData[prevIndex].fInset.fP1); |
| 356 | if (side < 0 && side == winding*compute_side(edgeData[currIndex].fInset.fP0, |
| 357 | edgeData[currIndex].fInset.fP1, |
| 358 | edgeData[prevIndex].fInset.fP0)) { |
| 359 | // no point in considering this one again |
| 360 | edgeData[prevIndex].fValid = false; |
| 361 | --insetVertexCount; |
| 362 | // go back one segment |
| 363 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; |
| 364 | } else { |
| 365 | // move to next segment |
| 366 | edgeData[currIndex].fValid = false; |
| 367 | --insetVertexCount; |
| 368 | currIndex = (currIndex + 1) % inputPolygonSize; |
| 369 | } |
| 370 | } |
| 371 | } |
| 372 | |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 373 | // store all the valid intersections that aren't nearly coincident |
| 374 | // TODO: look at the main algorithm and see if we can detect these better |
| 375 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
| 376 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 377 | insetPolygon->reset(); |
Mike Klein | 23e4544 | 2018-04-19 09:29:02 -0400 | [diff] [blame] | 378 | if (insetVertexCount >= 0) { |
| 379 | insetPolygon->setReserve(insetVertexCount); |
| 380 | } |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 381 | currIndex = -1; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 382 | for (int i = 0; i < inputPolygonSize; ++i) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 383 | if (edgeData[i].fValid && (currIndex == -1 || |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 384 | !SkPointPriv::EqualsWithinTolerance(edgeData[i].fIntersection, |
| 385 | (*insetPolygon)[currIndex], |
| 386 | kCleanupTolerance))) { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 387 | *insetPolygon->push() = edgeData[i].fIntersection; |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 388 | currIndex++; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 389 | } |
| 390 | } |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 391 | // make sure the first and last points aren't coincident |
| 392 | if (currIndex >= 1 && |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 393 | SkPointPriv::EqualsWithinTolerance((*insetPolygon)[0], (*insetPolygon)[currIndex], |
| 394 | kCleanupTolerance)) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 395 | insetPolygon->pop(); |
| 396 | } |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 397 | |
Jim Van Verth | b55eb28 | 2017-07-18 14:13:45 -0400 | [diff] [blame] | 398 | return (insetPolygon->count() >= 3 && is_convex(*insetPolygon)); |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 399 | } |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 400 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 401 | /////////////////////////////////////////////////////////////////////////////////////////// |
| 402 | |
| 403 | // compute the number of points needed for a circular join when offsetting a reflex vertex |
| 404 | void SkComputeRadialSteps(const SkVector& v1, const SkVector& v2, SkScalar r, |
| 405 | SkScalar* rotSin, SkScalar* rotCos, int* n) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 406 | const SkScalar kRecipPixelsPerArcSegment = 0.25f; |
| 407 | |
| 408 | SkScalar rCos = v1.dot(v2); |
| 409 | SkScalar rSin = v1.cross(v2); |
| 410 | SkScalar theta = SkScalarATan2(rSin, rCos); |
| 411 | |
| 412 | int steps = SkScalarRoundToInt(SkScalarAbs(r*theta*kRecipPixelsPerArcSegment)); |
| 413 | |
| 414 | SkScalar dTheta = theta / steps; |
| 415 | *rotSin = SkScalarSinCos(dTheta, rotCos); |
| 416 | *n = steps; |
| 417 | } |
| 418 | |
| 419 | // tolerant less-than comparison |
| 420 | static inline bool nearly_lt(SkScalar a, SkScalar b, SkScalar tolerance = SK_ScalarNearlyZero) { |
| 421 | return a < b - tolerance; |
| 422 | } |
| 423 | |
| 424 | // a point is "left" to another if its x coordinate is less, or if equal, its y coordinate |
| 425 | static bool left(const SkPoint& p0, const SkPoint& p1) { |
| 426 | return nearly_lt(p0.fX, p1.fX) || |
| 427 | (SkScalarNearlyEqual(p0.fX, p1.fX) && nearly_lt(p0.fY, p1.fY)); |
| 428 | } |
| 429 | |
| 430 | struct Vertex { |
| 431 | static bool Left(const Vertex& qv0, const Vertex& qv1) { |
| 432 | return left(qv0.fPosition, qv1.fPosition); |
| 433 | } |
| 434 | // packed to fit into 16 bytes (one cache line) |
| 435 | SkPoint fPosition; |
| 436 | uint16_t fIndex; // index in unsorted polygon |
| 437 | uint16_t fPrevIndex; // indices for previous and next vertex in unsorted polygon |
| 438 | uint16_t fNextIndex; |
| 439 | uint16_t fFlags; |
| 440 | }; |
| 441 | |
| 442 | enum VertexFlags { |
| 443 | kPrevLeft_VertexFlag = 0x1, |
| 444 | kNextLeft_VertexFlag = 0x2, |
| 445 | }; |
| 446 | |
| 447 | struct Edge { |
| 448 | // returns true if "this" is above "that" |
| 449 | bool above(const Edge& that, SkScalar tolerance = SK_ScalarNearlyZero) { |
| 450 | SkASSERT(nearly_lt(this->fSegment.fP0.fX, that.fSegment.fP0.fX, tolerance) || |
| 451 | SkScalarNearlyEqual(this->fSegment.fP0.fX, that.fSegment.fP0.fX, tolerance)); |
| 452 | // The idea here is that if the vector between the origins of the two segments (dv) |
| 453 | // rotates counterclockwise up to the vector representing the "this" segment (u), |
| 454 | // then we know that "this" is above that. If the result is clockwise we say it's below. |
| 455 | SkVector dv = that.fSegment.fP0 - this->fSegment.fP0; |
| 456 | SkVector u = this->fSegment.fP1 - this->fSegment.fP0; |
| 457 | SkScalar cross = dv.cross(u); |
| 458 | if (cross > tolerance) { |
| 459 | return true; |
| 460 | } else if (cross < -tolerance) { |
| 461 | return false; |
| 462 | } |
| 463 | // If the result is 0 then either the two origins are equal or the origin of "that" |
| 464 | // lies on dv. So then we try the same for the vector from the tail of "this" |
| 465 | // to the head of "that". Again, ccw means "this" is above "that". |
| 466 | dv = that.fSegment.fP1 - this->fSegment.fP0; |
| 467 | return (dv.cross(u) > tolerance); |
| 468 | } |
| 469 | |
| 470 | bool intersect(const Edge& that) const { |
| 471 | SkPoint intersection; |
| 472 | SkScalar s, t; |
| 473 | // check first to see if these edges are neighbors in the polygon |
| 474 | if (this->fIndex0 == that.fIndex0 || this->fIndex1 == that.fIndex0 || |
| 475 | this->fIndex0 == that.fIndex1 || this->fIndex1 == that.fIndex1) { |
| 476 | return false; |
| 477 | } |
| 478 | return compute_intersection(this->fSegment, that.fSegment, &intersection, &s, &t); |
| 479 | } |
| 480 | |
| 481 | bool operator==(const Edge& that) const { |
| 482 | return (this->fIndex0 == that.fIndex0 && this->fIndex1 == that.fIndex1); |
| 483 | } |
| 484 | |
| 485 | bool operator!=(const Edge& that) const { |
| 486 | return !operator==(that); |
| 487 | } |
| 488 | |
| 489 | OffsetSegment fSegment; |
| 490 | int32_t fIndex0; // indices for previous and next vertex |
| 491 | int32_t fIndex1; |
| 492 | }; |
| 493 | |
| 494 | class EdgeList { |
| 495 | public: |
| 496 | void reserve(int count) { fEdges.reserve(count); } |
| 497 | |
| 498 | bool insert(const Edge& newEdge) { |
| 499 | // linear search for now (expected case is very few active edges) |
| 500 | int insertIndex = 0; |
| 501 | while (insertIndex < fEdges.count() && fEdges[insertIndex].above(newEdge)) { |
| 502 | ++insertIndex; |
| 503 | } |
| 504 | // if we intersect with the existing edge above or below us |
| 505 | // then we know this polygon is not simple, so don't insert, just fail |
| 506 | if (insertIndex > 0 && newEdge.intersect(fEdges[insertIndex - 1])) { |
| 507 | return false; |
| 508 | } |
| 509 | if (insertIndex < fEdges.count() && newEdge.intersect(fEdges[insertIndex])) { |
| 510 | return false; |
| 511 | } |
| 512 | |
| 513 | fEdges.push_back(); |
| 514 | for (int i = fEdges.count() - 1; i > insertIndex; --i) { |
| 515 | fEdges[i] = fEdges[i - 1]; |
| 516 | } |
| 517 | fEdges[insertIndex] = newEdge; |
| 518 | |
| 519 | return true; |
| 520 | } |
| 521 | |
| 522 | bool remove(const Edge& edge) { |
| 523 | SkASSERT(fEdges.count() > 0); |
| 524 | |
| 525 | // linear search for now (expected case is very few active edges) |
| 526 | int removeIndex = 0; |
| 527 | while (removeIndex < fEdges.count() && fEdges[removeIndex] != edge) { |
| 528 | ++removeIndex; |
| 529 | } |
| 530 | // we'd better find it or something is wrong |
| 531 | SkASSERT(removeIndex < fEdges.count()); |
| 532 | |
| 533 | // if we intersect with the edge above or below us |
| 534 | // then we know this polygon is not simple, so don't remove, just fail |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 535 | if (removeIndex > 0 && fEdges[removeIndex].intersect(fEdges[removeIndex - 1])) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 536 | return false; |
| 537 | } |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 538 | if (removeIndex < fEdges.count() - 1) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 539 | if (fEdges[removeIndex].intersect(fEdges[removeIndex + 1])) { |
| 540 | return false; |
| 541 | } |
| 542 | // copy over the old entry |
| 543 | memmove(&fEdges[removeIndex], &fEdges[removeIndex + 1], |
| 544 | sizeof(Edge)*(fEdges.count() - removeIndex - 1)); |
| 545 | } |
| 546 | |
| 547 | fEdges.pop_back(); |
| 548 | return true; |
| 549 | } |
| 550 | |
| 551 | private: |
| 552 | SkSTArray<1, Edge> fEdges; |
| 553 | }; |
| 554 | |
| 555 | // Here we implement a sweep line algorithm to determine whether the provided points |
| 556 | // represent a simple polygon, i.e., the polygon is non-self-intersecting. |
| 557 | // We first insert the vertices into a priority queue sorting horizontally from left to right. |
| 558 | // Then as we pop the vertices from the queue we generate events which indicate that an edge |
| 559 | // should be added or removed from an edge list. If any intersections are detected in the edge |
| 560 | // list, then we know the polygon is self-intersecting and hence not simple. |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 561 | bool SkIsSimplePolygon(const SkPoint* polygon, int polygonSize) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 562 | SkTDPQueue <Vertex, Vertex::Left> vertexQueue; |
| 563 | EdgeList sweepLine; |
| 564 | |
| 565 | sweepLine.reserve(polygonSize); |
| 566 | for (int i = 0; i < polygonSize; ++i) { |
| 567 | Vertex newVertex; |
| 568 | newVertex.fPosition = polygon[i]; |
| 569 | newVertex.fIndex = i; |
| 570 | newVertex.fPrevIndex = (i - 1 + polygonSize) % polygonSize; |
| 571 | newVertex.fNextIndex = (i + 1) % polygonSize; |
| 572 | newVertex.fFlags = 0; |
| 573 | if (left(polygon[newVertex.fPrevIndex], polygon[i])) { |
| 574 | newVertex.fFlags |= kPrevLeft_VertexFlag; |
| 575 | } |
| 576 | if (left(polygon[newVertex.fNextIndex], polygon[i])) { |
| 577 | newVertex.fFlags |= kNextLeft_VertexFlag; |
| 578 | } |
| 579 | vertexQueue.insert(newVertex); |
| 580 | } |
| 581 | |
| 582 | // pop each vertex from the queue and generate events depending on |
| 583 | // where it lies relative to its neighboring edges |
| 584 | while (vertexQueue.count() > 0) { |
| 585 | const Vertex& v = vertexQueue.peek(); |
| 586 | |
| 587 | // check edge to previous vertex |
| 588 | if (v.fFlags & kPrevLeft_VertexFlag) { |
| 589 | Edge edge{ { polygon[v.fPrevIndex], v.fPosition }, v.fPrevIndex, v.fIndex }; |
| 590 | if (!sweepLine.remove(edge)) { |
| 591 | break; |
| 592 | } |
| 593 | } else { |
| 594 | Edge edge{ { v.fPosition, polygon[v.fPrevIndex] }, v.fIndex, v.fPrevIndex }; |
| 595 | if (!sweepLine.insert(edge)) { |
| 596 | break; |
| 597 | } |
| 598 | } |
| 599 | |
| 600 | // check edge to next vertex |
| 601 | if (v.fFlags & kNextLeft_VertexFlag) { |
| 602 | Edge edge{ { polygon[v.fNextIndex], v.fPosition }, v.fNextIndex, v.fIndex }; |
| 603 | if (!sweepLine.remove(edge)) { |
| 604 | break; |
| 605 | } |
| 606 | } else { |
| 607 | Edge edge{ { v.fPosition, polygon[v.fNextIndex] }, v.fIndex, v.fNextIndex }; |
| 608 | if (!sweepLine.insert(edge)) { |
| 609 | break; |
| 610 | } |
| 611 | } |
| 612 | |
| 613 | vertexQueue.pop(); |
| 614 | } |
| 615 | |
| 616 | return (vertexQueue.count() == 0); |
| 617 | } |
| 618 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 619 | /////////////////////////////////////////////////////////////////////////////////////////// |
| 620 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 621 | bool SkOffsetSimplePolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 622 | std::function<SkScalar(const SkPoint&)> offsetDistanceFunc, |
| 623 | SkTDArray<SkPoint>* offsetPolygon, SkTDArray<int>* polygonIndices) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 624 | if (inputPolygonSize < 3) { |
| 625 | return false; |
| 626 | } |
| 627 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 628 | // get winding direction |
| 629 | int winding = get_winding(inputPolygonVerts, inputPolygonSize); |
| 630 | if (0 == winding) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 631 | return false; |
| 632 | } |
| 633 | |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 634 | // build normals |
| 635 | SkAutoSTMalloc<64, SkVector> normal0(inputPolygonSize); |
| 636 | SkAutoSTMalloc<64, SkVector> normal1(inputPolygonSize); |
| 637 | SkScalar currOffset = offsetDistanceFunc(inputPolygonVerts[0]); |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 638 | for (int curr = 0; curr < inputPolygonSize; ++curr) { |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 639 | int next = (curr + 1) % inputPolygonSize; |
| 640 | SkScalar nextOffset = offsetDistanceFunc(inputPolygonVerts[next]); |
| 641 | if (!compute_offset_vectors(inputPolygonVerts[curr], inputPolygonVerts[next], |
| 642 | currOffset, nextOffset, winding, |
| 643 | &normal0[curr], &normal1[next])) { |
| 644 | return false; |
| 645 | } |
| 646 | currOffset = nextOffset; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 647 | } |
| 648 | |
| 649 | // build initial offset edge list |
| 650 | SkSTArray<64, EdgeData> edgeData(inputPolygonSize); |
| 651 | int prevIndex = inputPolygonSize - 1; |
| 652 | int currIndex = 0; |
| 653 | int nextIndex = 1; |
| 654 | while (currIndex < inputPolygonSize) { |
| 655 | int side = compute_side(inputPolygonVerts[prevIndex], |
| 656 | inputPolygonVerts[currIndex], |
| 657 | inputPolygonVerts[nextIndex]); |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 658 | SkScalar offset = offsetDistanceFunc(inputPolygonVerts[currIndex]); |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 659 | // if reflex point, fill in curve |
| 660 | if (side*winding*offset < 0) { |
| 661 | SkScalar rotSin, rotCos; |
| 662 | int numSteps; |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 663 | SkVector prevNormal = normal1[currIndex]; |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 664 | SkComputeRadialSteps(prevNormal, normal0[currIndex], SkScalarAbs(offset), |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 665 | &rotSin, &rotCos, &numSteps); |
| 666 | for (int i = 0; i < numSteps - 1; ++i) { |
| 667 | SkVector currNormal = SkVector::Make(prevNormal.fX*rotCos - prevNormal.fY*rotSin, |
| 668 | prevNormal.fY*rotCos + prevNormal.fX*rotSin); |
| 669 | EdgeData& edge = edgeData.push_back(); |
| 670 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + prevNormal; |
| 671 | edge.fInset.fP1 = inputPolygonVerts[currIndex] + currNormal; |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 672 | edge.init(currIndex, currIndex); |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 673 | prevNormal = currNormal; |
| 674 | } |
| 675 | EdgeData& edge = edgeData.push_back(); |
| 676 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + prevNormal; |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 677 | edge.fInset.fP1 = inputPolygonVerts[currIndex] + normal0[currIndex]; |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 678 | edge.init(currIndex, currIndex); |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 679 | } |
| 680 | |
| 681 | // Add the edge |
| 682 | EdgeData& edge = edgeData.push_back(); |
Jim Van Verth | bdde428 | 2018-06-14 09:09:18 -0400 | [diff] [blame] | 683 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + normal0[currIndex]; |
| 684 | edge.fInset.fP1 = inputPolygonVerts[nextIndex] + normal1[nextIndex]; |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 685 | edge.init(currIndex, nextIndex); |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 686 | |
| 687 | prevIndex = currIndex; |
| 688 | currIndex++; |
| 689 | nextIndex = (nextIndex + 1) % inputPolygonSize; |
| 690 | } |
| 691 | |
| 692 | int edgeDataSize = edgeData.count(); |
| 693 | prevIndex = edgeDataSize - 1; |
| 694 | currIndex = 0; |
| 695 | int insetVertexCount = edgeDataSize; |
Jim Van Verth | 3645bb0 | 2018-06-26 14:58:58 -0400 | [diff] [blame] | 696 | int iterations = 0; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 697 | while (prevIndex != currIndex) { |
Jim Van Verth | 3645bb0 | 2018-06-26 14:58:58 -0400 | [diff] [blame] | 698 | ++iterations; |
| 699 | // we should check each edge against each other edge at most once |
| 700 | if (iterations > edgeDataSize*edgeDataSize) { |
| 701 | return false; |
| 702 | } |
| 703 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 704 | if (!edgeData[prevIndex].fValid) { |
| 705 | prevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; |
| 706 | continue; |
| 707 | } |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 708 | if (!edgeData[currIndex].fValid) { |
| 709 | currIndex = (currIndex + 1) % edgeDataSize; |
| 710 | continue; |
| 711 | } |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 712 | |
| 713 | SkScalar s, t; |
| 714 | SkPoint intersection; |
| 715 | if (compute_intersection(edgeData[prevIndex].fInset, edgeData[currIndex].fInset, |
| 716 | &intersection, &s, &t)) { |
| 717 | // if new intersection is further back on previous inset from the prior intersection |
| 718 | if (s < edgeData[prevIndex].fTValue) { |
| 719 | // no point in considering this one again |
| 720 | edgeData[prevIndex].fValid = false; |
| 721 | --insetVertexCount; |
| 722 | // go back one segment |
| 723 | prevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; |
| 724 | // we've already considered this intersection, we're done |
| 725 | } else if (edgeData[currIndex].fTValue > SK_ScalarMin && |
| 726 | SkPointPriv::EqualsWithinTolerance(intersection, |
| 727 | edgeData[currIndex].fIntersection, |
| 728 | 1.0e-6f)) { |
| 729 | break; |
| 730 | } else { |
| 731 | // add intersection |
| 732 | edgeData[currIndex].fIntersection = intersection; |
| 733 | edgeData[currIndex].fTValue = t; |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 734 | edgeData[currIndex].fIndex = edgeData[prevIndex].fEnd; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 735 | |
| 736 | // go to next segment |
| 737 | prevIndex = currIndex; |
| 738 | currIndex = (currIndex + 1) % edgeDataSize; |
| 739 | } |
| 740 | } else { |
| 741 | // If there is no intersection, we want to minimize the distance between |
| 742 | // the point where the segment lines cross and the segments themselves. |
| 743 | SkScalar prevPrevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; |
| 744 | SkScalar currNextIndex = (currIndex + 1) % edgeDataSize; |
| 745 | SkScalar dist0 = compute_crossing_distance(edgeData[currIndex].fInset, |
| 746 | edgeData[prevPrevIndex].fInset); |
| 747 | SkScalar dist1 = compute_crossing_distance(edgeData[prevIndex].fInset, |
| 748 | edgeData[currNextIndex].fInset); |
| 749 | if (dist0 < dist1) { |
| 750 | edgeData[prevIndex].fValid = false; |
| 751 | prevIndex = prevPrevIndex; |
| 752 | } else { |
| 753 | edgeData[currIndex].fValid = false; |
| 754 | currIndex = currNextIndex; |
| 755 | } |
| 756 | --insetVertexCount; |
| 757 | } |
| 758 | } |
| 759 | |
| 760 | // store all the valid intersections that aren't nearly coincident |
| 761 | // TODO: look at the main algorithm and see if we can detect these better |
| 762 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
| 763 | |
| 764 | offsetPolygon->reset(); |
| 765 | offsetPolygon->setReserve(insetVertexCount); |
| 766 | currIndex = -1; |
| 767 | for (int i = 0; i < edgeData.count(); ++i) { |
| 768 | if (edgeData[i].fValid && (currIndex == -1 || |
| 769 | !SkPointPriv::EqualsWithinTolerance(edgeData[i].fIntersection, |
| 770 | (*offsetPolygon)[currIndex], |
| 771 | kCleanupTolerance))) { |
| 772 | *offsetPolygon->push() = edgeData[i].fIntersection; |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 773 | if (polygonIndices) { |
| 774 | *polygonIndices->push() = edgeData[i].fIndex; |
| 775 | } |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 776 | currIndex++; |
| 777 | } |
| 778 | } |
| 779 | // make sure the first and last points aren't coincident |
| 780 | if (currIndex >= 1 && |
| 781 | SkPointPriv::EqualsWithinTolerance((*offsetPolygon)[0], (*offsetPolygon)[currIndex], |
| 782 | kCleanupTolerance)) { |
| 783 | offsetPolygon->pop(); |
Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame] | 784 | if (polygonIndices) { |
| 785 | polygonIndices->pop(); |
| 786 | } |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 787 | } |
| 788 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 789 | // check winding of offset polygon (it should be same as the original polygon) |
| 790 | SkScalar offsetWinding = get_winding(offsetPolygon->begin(), offsetPolygon->count()); |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 791 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 792 | return (winding*offsetWinding > 0 && |
| 793 | SkIsSimplePolygon(offsetPolygon->begin(), offsetPolygon->count())); |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 794 | } |
| 795 | |
Jim Van Verth | 8664a1d | 2018-06-28 16:26:50 -0400 | [diff] [blame] | 796 | ////////////////////////////////////////////////////////////////////////////////////////// |
| 797 | |
| 798 | struct TriangulationVertex { |
| 799 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(TriangulationVertex); |
| 800 | |
| 801 | enum class VertexType { kConvex, kReflex }; |
| 802 | |
| 803 | SkPoint fPosition; |
| 804 | VertexType fVertexType; |
| 805 | uint16_t fIndex; |
| 806 | uint16_t fPrevIndex; |
| 807 | uint16_t fNextIndex; |
| 808 | }; |
| 809 | |
| 810 | // test to see if point p is in triangle p0p1p2. |
| 811 | // for now assuming strictly inside -- if on the edge it's outside |
| 812 | static bool point_in_triangle(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
| 813 | const SkPoint& p) { |
| 814 | SkVector v0 = p1 - p0; |
| 815 | SkVector v1 = p2 - p1; |
| 816 | SkScalar n = v0.cross(v1); |
| 817 | |
| 818 | SkVector w0 = p - p0; |
| 819 | if (n*v0.cross(w0) < SK_ScalarNearlyZero) { |
| 820 | return false; |
| 821 | } |
| 822 | |
| 823 | SkVector w1 = p - p1; |
| 824 | if (n*v1.cross(w1) < SK_ScalarNearlyZero) { |
| 825 | return false; |
| 826 | } |
| 827 | |
| 828 | SkVector v2 = p0 - p2; |
| 829 | SkVector w2 = p - p2; |
| 830 | if (n*v2.cross(w2) < SK_ScalarNearlyZero) { |
| 831 | return false; |
| 832 | } |
| 833 | |
| 834 | return true; |
| 835 | } |
| 836 | |
| 837 | // Data structure to track reflex vertices and check whether any are inside a given triangle |
| 838 | class ReflexHash { |
| 839 | public: |
| 840 | void add(TriangulationVertex* v) { |
| 841 | fReflexList.addToTail(v); |
| 842 | } |
| 843 | |
| 844 | void remove(TriangulationVertex* v) { |
| 845 | fReflexList.remove(v); |
| 846 | } |
| 847 | |
| 848 | bool checkTriangle(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2) { |
| 849 | for (SkTInternalLList<TriangulationVertex>::Iter reflexIter = fReflexList.begin(); |
| 850 | reflexIter != fReflexList.end(); ++reflexIter) { |
| 851 | TriangulationVertex* reflexVertex = *reflexIter; |
| 852 | if (point_in_triangle(p0, p1, p2, reflexVertex->fPosition)) { |
| 853 | return true; |
| 854 | } |
| 855 | } |
| 856 | |
| 857 | return false; |
| 858 | } |
| 859 | |
| 860 | private: |
| 861 | // TODO: switch to an actual spatial hash |
| 862 | SkTInternalLList<TriangulationVertex> fReflexList; |
| 863 | }; |
| 864 | |
| 865 | // Check to see if a reflex vertex has become a convex vertex after clipping an ear |
| 866 | static void reclassify_vertex(TriangulationVertex* p, const SkPoint* polygonVerts, |
| 867 | int winding, ReflexHash* reflexHash, |
| 868 | SkTInternalLList<TriangulationVertex>* convexList) { |
| 869 | if (TriangulationVertex::VertexType::kReflex == p->fVertexType) { |
| 870 | SkVector v0 = p->fPosition - polygonVerts[p->fPrevIndex]; |
| 871 | SkVector v1 = polygonVerts[p->fNextIndex] - p->fPosition; |
| 872 | if (winding*v0.cross(v1) > SK_ScalarNearlyZero) { |
| 873 | p->fVertexType = TriangulationVertex::VertexType::kConvex; |
| 874 | reflexHash->remove(p); |
| 875 | p->fPrev = p->fNext = nullptr; |
| 876 | convexList->addToTail(p); |
| 877 | } |
| 878 | } |
| 879 | } |
| 880 | |
| 881 | bool SkTriangulateSimplePolygon(const SkPoint* polygonVerts, uint16_t* indexMap, int polygonSize, |
| 882 | SkTDArray<uint16_t>* triangleIndices) { |
| 883 | if (polygonSize < 3) { |
| 884 | return false; |
| 885 | } |
| 886 | // need to be able to represent all the vertices in the 16-bit indices |
| 887 | if (polygonSize >= (1 << 16)) { |
| 888 | return false; |
| 889 | } |
| 890 | |
| 891 | // get winding direction |
| 892 | // TODO: we do this for all the polygon routines -- might be better to have the client |
| 893 | // compute it and pass it in |
| 894 | int winding = get_winding(polygonVerts, polygonSize); |
| 895 | if (0 == winding) { |
| 896 | return false; |
| 897 | } |
| 898 | |
| 899 | // Classify initial vertices into a list of convex vertices and a hash of reflex vertices |
| 900 | // TODO: possibly sort the convexList in some way to get better triangles |
| 901 | SkTInternalLList<TriangulationVertex> convexList; |
| 902 | ReflexHash reflexHash; |
| 903 | SkAutoSTMalloc<64, TriangulationVertex> triangulationVertices(polygonSize); |
| 904 | int prevIndex = polygonSize - 1; |
| 905 | int currIndex = 0; |
| 906 | int nextIndex = 1; |
| 907 | SkVector v0 = polygonVerts[currIndex] - polygonVerts[prevIndex]; |
| 908 | SkVector v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
| 909 | for (int i = 0; i < polygonSize; ++i) { |
| 910 | SkDEBUGCODE(memset(&triangulationVertices[currIndex], 0, sizeof(TriangulationVertex))); |
| 911 | triangulationVertices[currIndex].fPosition = polygonVerts[currIndex]; |
| 912 | triangulationVertices[currIndex].fIndex = currIndex; |
| 913 | triangulationVertices[currIndex].fPrevIndex = prevIndex; |
| 914 | triangulationVertices[currIndex].fNextIndex = nextIndex; |
| 915 | if (winding*v0.cross(v1) > SK_ScalarNearlyZero) { |
| 916 | triangulationVertices[currIndex].fVertexType = TriangulationVertex::VertexType::kConvex; |
| 917 | convexList.addToTail(&triangulationVertices[currIndex]); |
| 918 | } else { |
| 919 | // We treat near collinear vertices as reflex |
| 920 | triangulationVertices[currIndex].fVertexType = TriangulationVertex::VertexType::kReflex; |
| 921 | reflexHash.add(&triangulationVertices[currIndex]); |
| 922 | } |
| 923 | |
| 924 | prevIndex = currIndex; |
| 925 | currIndex = nextIndex; |
| 926 | nextIndex = (currIndex + 1) % polygonSize; |
| 927 | v0 = v1; |
| 928 | v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
| 929 | } |
| 930 | |
| 931 | // The general concept: We are trying to find three neighboring vertices where |
| 932 | // no other vertex lies inside the triangle (an "ear"). If we find one, we clip |
| 933 | // that ear off, and then repeat on the new polygon. Once we get down to three vertices |
| 934 | // we have triangulated the entire polygon. |
| 935 | // In the worst case this is an n^2 algorithm. We can cut down the search space somewhat by |
| 936 | // noting that only convex vertices can be potential ears, and we only need to check whether |
| 937 | // any reflex vertices lie inside the ear. |
| 938 | triangleIndices->setReserve(triangleIndices->count() + 3 * (polygonSize - 2)); |
| 939 | int vertexCount = polygonSize; |
| 940 | while (vertexCount > 3) { |
| 941 | bool success = false; |
| 942 | TriangulationVertex* earVertex = nullptr; |
| 943 | TriangulationVertex* p0 = nullptr; |
| 944 | TriangulationVertex* p2 = nullptr; |
| 945 | // find a convex vertex to clip |
| 946 | for (SkTInternalLList<TriangulationVertex>::Iter convexIter = convexList.begin(); |
| 947 | convexIter != convexList.end(); ++convexIter) { |
| 948 | earVertex = *convexIter; |
| 949 | SkASSERT(TriangulationVertex::VertexType::kReflex != earVertex->fVertexType); |
| 950 | |
| 951 | p0 = &triangulationVertices[earVertex->fPrevIndex]; |
| 952 | p2 = &triangulationVertices[earVertex->fNextIndex]; |
| 953 | |
| 954 | // see if any reflex vertices are inside the ear |
| 955 | bool failed = reflexHash.checkTriangle(p0->fPosition, earVertex->fPosition, |
| 956 | p2->fPosition); |
| 957 | if (failed) { |
| 958 | continue; |
| 959 | } |
| 960 | |
| 961 | // found one we can clip |
| 962 | success = true; |
| 963 | break; |
| 964 | } |
| 965 | // If we can't find any ears to clip, this probably isn't a simple polygon |
| 966 | if (!success) { |
| 967 | return false; |
| 968 | } |
| 969 | |
| 970 | // add indices |
| 971 | auto indices = triangleIndices->append(3); |
| 972 | indices[0] = indexMap[p0->fIndex]; |
| 973 | indices[1] = indexMap[earVertex->fIndex]; |
| 974 | indices[2] = indexMap[p2->fIndex]; |
| 975 | |
| 976 | // clip the ear |
| 977 | convexList.remove(earVertex); |
| 978 | --vertexCount; |
| 979 | |
| 980 | // reclassify reflex verts |
| 981 | p0->fNextIndex = earVertex->fNextIndex; |
| 982 | reclassify_vertex(p0, polygonVerts, winding, &reflexHash, &convexList); |
| 983 | |
| 984 | p2->fPrevIndex = earVertex->fPrevIndex; |
| 985 | reclassify_vertex(p2, polygonVerts, winding, &reflexHash, &convexList); |
| 986 | } |
| 987 | |
| 988 | // output indices |
| 989 | for (SkTInternalLList<TriangulationVertex>::Iter vertexIter = convexList.begin(); |
| 990 | vertexIter != convexList.end(); ++vertexIter) { |
| 991 | TriangulationVertex* vertex = *vertexIter; |
| 992 | *triangleIndices->push() = indexMap[vertex->fIndex]; |
| 993 | } |
| 994 | |
| 995 | return true; |
| 996 | } |