| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 1 | /* | 
|  | 2 | * Copyright 2017 Google Inc. | 
|  | 3 | * | 
|  | 4 | * Use of this source code is governed by a BSD-style license that can be | 
|  | 5 | * found in the LICENSE file. | 
|  | 6 | */ | 
|  | 7 |  | 
| Jim Van Verth | 41964ed | 2018-03-28 10:10:30 -0400 | [diff] [blame] | 8 | #include "SkOffsetPolygon.h" | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 9 |  | 
| Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 10 | #include "SkPointPriv.h" | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 11 | #include "SkTArray.h" | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 12 | #include "SkTemplates.h" | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 13 | #include "SkTDPQueue.h" | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 14 |  | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 15 | struct OffsetSegment { | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 16 | SkPoint fP0; | 
|  | 17 | SkPoint fP1; | 
|  | 18 | }; | 
|  | 19 |  | 
|  | 20 | // Computes perpDot for point compared to segment. | 
|  | 21 | // A positive value means the point is to the left of the segment, | 
|  | 22 | // negative is to the right, 0 is collinear. | 
|  | 23 | static int compute_side(const SkPoint& s0, const SkPoint& s1, const SkPoint& p) { | 
|  | 24 | SkVector v0 = s1 - s0; | 
|  | 25 | SkVector v1 = p - s0; | 
|  | 26 | SkScalar perpDot = v0.cross(v1); | 
|  | 27 | if (!SkScalarNearlyZero(perpDot)) { | 
|  | 28 | return ((perpDot > 0) ? 1 : -1); | 
|  | 29 | } | 
|  | 30 |  | 
|  | 31 | return 0; | 
|  | 32 | } | 
|  | 33 |  | 
|  | 34 | // returns 1 for ccw, -1 for cw and 0 if degenerate | 
|  | 35 | static int get_winding(const SkPoint* polygonVerts, int polygonSize) { | 
|  | 36 | SkPoint p0 = polygonVerts[0]; | 
|  | 37 | SkPoint p1 = polygonVerts[1]; | 
|  | 38 |  | 
|  | 39 | for (int i = 2; i < polygonSize; ++i) { | 
|  | 40 | SkPoint p2 = polygonVerts[i]; | 
|  | 41 |  | 
|  | 42 | // determine if cw or ccw | 
|  | 43 | int side = compute_side(p0, p1, p2); | 
|  | 44 | if (0 != side) { | 
|  | 45 | return ((side > 0) ? 1 : -1); | 
|  | 46 | } | 
|  | 47 |  | 
|  | 48 | // if nearly collinear, treat as straight line and continue | 
|  | 49 | p1 = p2; | 
|  | 50 | } | 
|  | 51 |  | 
|  | 52 | return 0; | 
|  | 53 | } | 
|  | 54 |  | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 55 | // Offset line segment p0-p1 'd0' and 'd1' units in the direction specified by 'side' | 
|  | 56 | bool SkOffsetSegment(const SkPoint& p0, const SkPoint& p1, SkScalar d0, SkScalar d1, | 
|  | 57 | int side, SkPoint* offset0, SkPoint* offset1) { | 
|  | 58 | SkASSERT(side == -1 || side == 1); | 
|  | 59 | SkVector perp = SkVector::Make(p0.fY - p1.fY, p1.fX - p0.fX); | 
|  | 60 | if (SkScalarNearlyEqual(d0, d1)) { | 
|  | 61 | // if distances are equal, can just outset by the perpendicular | 
|  | 62 | perp.setLength(d0*side); | 
|  | 63 | *offset0 = p0 + perp; | 
|  | 64 | *offset1 = p1 + perp; | 
|  | 65 | } else { | 
|  | 66 | // Otherwise we need to compute the outer tangent. | 
|  | 67 | // See: http://www.ambrsoft.com/TrigoCalc/Circles2/Circles2Tangent_.htm | 
|  | 68 | if (d0 < d1) { | 
|  | 69 | side = -side; | 
|  | 70 | } | 
|  | 71 | SkScalar dD = d0 - d1; | 
|  | 72 | // if one circle is inside another, we can't compute an offset | 
| Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 73 | if (dD*dD >= SkPointPriv::DistanceToSqd(p0, p1)) { | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 74 | return false; | 
|  | 75 | } | 
|  | 76 | SkPoint outerTangentIntersect = SkPoint::Make((p1.fX*d0 - p0.fX*d1) / dD, | 
|  | 77 | (p1.fY*d0 - p0.fY*d1) / dD); | 
|  | 78 |  | 
|  | 79 | SkScalar d0sq = d0*d0; | 
|  | 80 | SkVector dP = outerTangentIntersect - p0; | 
| Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 81 | SkScalar dPlenSq = SkPointPriv::LengthSqd(dP); | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 82 | SkScalar discrim = SkScalarSqrt(dPlenSq - d0sq); | 
|  | 83 | offset0->fX = p0.fX + (d0sq*dP.fX - side*d0*dP.fY*discrim) / dPlenSq; | 
|  | 84 | offset0->fY = p0.fY + (d0sq*dP.fY + side*d0*dP.fX*discrim) / dPlenSq; | 
|  | 85 |  | 
|  | 86 | SkScalar d1sq = d1*d1; | 
|  | 87 | dP = outerTangentIntersect - p1; | 
| Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 88 | dPlenSq = SkPointPriv::LengthSqd(dP); | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 89 | discrim = SkScalarSqrt(dPlenSq - d1sq); | 
|  | 90 | offset1->fX = p1.fX + (d1sq*dP.fX - side*d1*dP.fY*discrim) / dPlenSq; | 
|  | 91 | offset1->fY = p1.fY + (d1sq*dP.fY + side*d1*dP.fX*discrim) / dPlenSq; | 
|  | 92 | } | 
|  | 93 |  | 
|  | 94 | return true; | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 95 | } | 
|  | 96 |  | 
|  | 97 | // Compute the intersection 'p' between segments s0 and s1, if any. | 
|  | 98 | // 's' is the parametric value for the intersection along 's0' & 't' is the same for 's1'. | 
|  | 99 | // Returns false if there is no intersection. | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 100 | static bool compute_intersection(const OffsetSegment& s0, const OffsetSegment& s1, | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 101 | SkPoint* p, SkScalar* s, SkScalar* t) { | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 102 | // Common cases for polygon chains -- check if endpoints are touching | 
|  | 103 | if (SkPointPriv::EqualsWithinTolerance(s0.fP1, s1.fP0)) { | 
|  | 104 | *p = s0.fP1; | 
|  | 105 | *s = SK_Scalar1; | 
|  | 106 | *t = 0; | 
|  | 107 | return true; | 
|  | 108 | } | 
|  | 109 | if (SkPointPriv::EqualsWithinTolerance(s1.fP1, s0.fP0)) { | 
|  | 110 | *p = s1.fP1; | 
|  | 111 | *s = 0; | 
|  | 112 | *t = SK_Scalar1; | 
|  | 113 | return true; | 
|  | 114 | } | 
|  | 115 |  | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 116 | SkVector v0 = s0.fP1 - s0.fP0; | 
|  | 117 | SkVector v1 = s1.fP1 - s1.fP0; | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 118 | // We should have culled coincident points before this | 
|  | 119 | SkASSERT(!SkPointPriv::EqualsWithinTolerance(s0.fP0, s0.fP1)); | 
|  | 120 | SkASSERT(!SkPointPriv::EqualsWithinTolerance(s1.fP0, s1.fP1)); | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 121 |  | 
|  | 122 | SkVector d = s1.fP0 - s0.fP0; | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 123 | SkScalar perpDot = v0.cross(v1); | 
|  | 124 | SkScalar localS, localT; | 
|  | 125 | if (SkScalarNearlyZero(perpDot)) { | 
|  | 126 | // segments are parallel, but not collinear | 
|  | 127 | if (!SkScalarNearlyZero(d.dot(d), SK_ScalarNearlyZero*SK_ScalarNearlyZero)) { | 
|  | 128 | return false; | 
|  | 129 | } | 
|  | 130 |  | 
|  | 131 | // project segment1's endpoints onto segment0 | 
|  | 132 | localS = d.fX / v0.fX; | 
|  | 133 | localT = 0; | 
|  | 134 | if (localS < 0 || localS > SK_Scalar1) { | 
|  | 135 | // the first endpoint doesn't lie on segment0, try the other one | 
|  | 136 | SkScalar oldLocalS = localS; | 
|  | 137 | localS = (s1.fP1.fX - s0.fP0.fX) / v0.fX; | 
|  | 138 | localT = SK_Scalar1; | 
|  | 139 | if (localS < 0 || localS > SK_Scalar1) { | 
|  | 140 | // it's possible that segment1's interval surrounds segment0 | 
|  | 141 | // this is false if the params have the same signs, and in that case no collision | 
|  | 142 | if (localS*oldLocalS > 0) { | 
|  | 143 | return false; | 
|  | 144 | } | 
|  | 145 | // otherwise project segment0's endpoint onto segment1 instead | 
|  | 146 | localS = 0; | 
|  | 147 | localT = -d.fX / v1.fX; | 
|  | 148 | } | 
|  | 149 | } | 
|  | 150 | } else { | 
|  | 151 | localS = d.cross(v1) / perpDot; | 
|  | 152 | if (localS < 0 || localS > SK_Scalar1) { | 
|  | 153 | return false; | 
|  | 154 | } | 
|  | 155 | localT = d.cross(v0) / perpDot; | 
|  | 156 | if (localT < 0 || localT > SK_Scalar1) { | 
|  | 157 | return false; | 
|  | 158 | } | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 159 | } | 
|  | 160 |  | 
|  | 161 | v0 *= localS; | 
|  | 162 | *p = s0.fP0 + v0; | 
|  | 163 | *s = localS; | 
|  | 164 | *t = localT; | 
|  | 165 |  | 
|  | 166 | return true; | 
|  | 167 | } | 
|  | 168 |  | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 169 | // computes the line intersection and then the distance to s0's endpoint | 
|  | 170 | static SkScalar compute_crossing_distance(const OffsetSegment& s0, const OffsetSegment& s1) { | 
|  | 171 | SkVector v0 = s0.fP1 - s0.fP0; | 
|  | 172 | SkVector v1 = s1.fP1 - s1.fP0; | 
|  | 173 |  | 
|  | 174 | SkScalar perpDot = v0.cross(v1); | 
|  | 175 | if (SkScalarNearlyZero(perpDot)) { | 
|  | 176 | // segments are parallel | 
|  | 177 | return SK_ScalarMax; | 
|  | 178 | } | 
|  | 179 |  | 
|  | 180 | SkVector d = s1.fP0 - s0.fP0; | 
|  | 181 | SkScalar localS = d.cross(v1) / perpDot; | 
|  | 182 | if (localS < 0) { | 
|  | 183 | localS = -localS; | 
|  | 184 | } else { | 
|  | 185 | localS -= SK_Scalar1; | 
|  | 186 | } | 
|  | 187 |  | 
|  | 188 | localS *= v0.length(); | 
|  | 189 |  | 
|  | 190 | return localS; | 
|  | 191 | } | 
|  | 192 |  | 
| Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 193 | static bool is_convex(const SkTDArray<SkPoint>& poly) { | 
|  | 194 | if (poly.count() <= 3) { | 
|  | 195 | return true; | 
|  | 196 | } | 
|  | 197 |  | 
|  | 198 | SkVector v0 = poly[0] - poly[poly.count() - 1]; | 
|  | 199 | SkVector v1 = poly[1] - poly[poly.count() - 1]; | 
|  | 200 | SkScalar winding = v0.cross(v1); | 
|  | 201 |  | 
|  | 202 | for (int i = 0; i < poly.count() - 1; ++i) { | 
|  | 203 | int j = i + 1; | 
|  | 204 | int k = (i + 2) % poly.count(); | 
|  | 205 |  | 
|  | 206 | SkVector v0 = poly[j] - poly[i]; | 
|  | 207 | SkVector v1 = poly[k] - poly[i]; | 
|  | 208 | SkScalar perpDot = v0.cross(v1); | 
| Jim Van Verth | 291932e | 2017-03-29 14:37:28 -0400 | [diff] [blame] | 209 | if (winding*perpDot < 0) { | 
| Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 210 | return false; | 
|  | 211 | } | 
|  | 212 | } | 
|  | 213 |  | 
|  | 214 | return true; | 
|  | 215 | } | 
| Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 216 |  | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 217 | struct EdgeData { | 
|  | 218 | OffsetSegment fInset; | 
|  | 219 | SkPoint       fIntersection; | 
|  | 220 | SkScalar      fTValue; | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 221 | uint16_t      fStart; | 
|  | 222 | uint16_t      fEnd; | 
|  | 223 | uint16_t      fIndex; | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 224 | bool          fValid; | 
|  | 225 |  | 
|  | 226 | void init() { | 
|  | 227 | fIntersection = fInset.fP0; | 
|  | 228 | fTValue = SK_ScalarMin; | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 229 | fStart = 0; | 
|  | 230 | fEnd = 0; | 
|  | 231 | fIndex = 0; | 
|  | 232 | fValid = true; | 
|  | 233 | } | 
|  | 234 |  | 
|  | 235 | void init(uint16_t start, uint16_t end) { | 
|  | 236 | fIntersection = fInset.fP0; | 
|  | 237 | fTValue = SK_ScalarMin; | 
|  | 238 | fStart = start; | 
|  | 239 | fEnd = end; | 
|  | 240 | fIndex = start; | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 241 | fValid = true; | 
|  | 242 | } | 
|  | 243 | }; | 
|  | 244 |  | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 245 | // The objective here is to inset all of the edges by the given distance, and then | 
|  | 246 | // remove any invalid inset edges by detecting right-hand turns. In a ccw polygon, | 
|  | 247 | // we should only be making left-hand turns (for cw polygons, we use the winding | 
|  | 248 | // parameter to reverse this). We detect this by checking whether the second intersection | 
|  | 249 | // on an edge is closer to its tail than the first one. | 
|  | 250 | // | 
|  | 251 | // We might also have the case that there is no intersection between two neighboring inset edges. | 
|  | 252 | // In this case, one edge will lie to the right of the other and should be discarded along with | 
|  | 253 | // its previous intersection (if any). | 
|  | 254 | // | 
|  | 255 | // Note: the assumption is that inputPolygon is convex and has no coincident points. | 
|  | 256 | // | 
|  | 257 | bool SkInsetConvexPolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 258 | std::function<SkScalar(int index)> insetDistanceFunc, | 
|  | 259 | SkTDArray<SkPoint>* insetPolygon) { | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 260 | if (inputPolygonSize < 3) { | 
|  | 261 | return false; | 
|  | 262 | } | 
|  | 263 |  | 
|  | 264 | int winding = get_winding(inputPolygonVerts, inputPolygonSize); | 
|  | 265 | if (0 == winding) { | 
|  | 266 | return false; | 
|  | 267 | } | 
|  | 268 |  | 
|  | 269 | // set up | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 270 | SkAutoSTMalloc<64, EdgeData> edgeData(inputPolygonSize); | 
|  | 271 | for (int i = 0; i < inputPolygonSize; ++i) { | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 272 | int j = (i + 1) % inputPolygonSize; | 
| Jim Van Verth | b55eb28 | 2017-07-18 14:13:45 -0400 | [diff] [blame] | 273 | int k = (i + 2) % inputPolygonSize; | 
|  | 274 | // check for convexity just to be sure | 
|  | 275 | if (compute_side(inputPolygonVerts[i], inputPolygonVerts[j], | 
|  | 276 | inputPolygonVerts[k])*winding < 0) { | 
|  | 277 | return false; | 
|  | 278 | } | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 279 | if (!SkOffsetSegment(inputPolygonVerts[i], inputPolygonVerts[j], | 
|  | 280 | insetDistanceFunc(i), insetDistanceFunc(j), | 
|  | 281 | winding, | 
|  | 282 | &edgeData[i].fInset.fP0, &edgeData[i].fInset.fP1)) { | 
|  | 283 | return false; | 
|  | 284 | } | 
|  | 285 | edgeData[i].init(); | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 286 | } | 
|  | 287 |  | 
|  | 288 | int prevIndex = inputPolygonSize - 1; | 
|  | 289 | int currIndex = 0; | 
|  | 290 | int insetVertexCount = inputPolygonSize; | 
|  | 291 | while (prevIndex != currIndex) { | 
|  | 292 | if (!edgeData[prevIndex].fValid) { | 
|  | 293 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; | 
|  | 294 | continue; | 
|  | 295 | } | 
|  | 296 |  | 
|  | 297 | SkScalar s, t; | 
|  | 298 | SkPoint intersection; | 
|  | 299 | if (compute_intersection(edgeData[prevIndex].fInset, edgeData[currIndex].fInset, | 
|  | 300 | &intersection, &s, &t)) { | 
|  | 301 | // if new intersection is further back on previous inset from the prior intersection | 
|  | 302 | if (s < edgeData[prevIndex].fTValue) { | 
|  | 303 | // no point in considering this one again | 
|  | 304 | edgeData[prevIndex].fValid = false; | 
|  | 305 | --insetVertexCount; | 
|  | 306 | // go back one segment | 
|  | 307 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; | 
|  | 308 | // we've already considered this intersection, we're done | 
|  | 309 | } else if (edgeData[currIndex].fTValue > SK_ScalarMin && | 
| Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 310 | SkPointPriv::EqualsWithinTolerance(intersection, | 
|  | 311 | edgeData[currIndex].fIntersection, | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 312 | 1.0e-6f)) { | 
|  | 313 | break; | 
|  | 314 | } else { | 
|  | 315 | // add intersection | 
|  | 316 | edgeData[currIndex].fIntersection = intersection; | 
|  | 317 | edgeData[currIndex].fTValue = t; | 
|  | 318 |  | 
|  | 319 | // go to next segment | 
|  | 320 | prevIndex = currIndex; | 
|  | 321 | currIndex = (currIndex + 1) % inputPolygonSize; | 
|  | 322 | } | 
|  | 323 | } else { | 
|  | 324 | // if prev to right side of curr | 
|  | 325 | int side = winding*compute_side(edgeData[currIndex].fInset.fP0, | 
|  | 326 | edgeData[currIndex].fInset.fP1, | 
|  | 327 | edgeData[prevIndex].fInset.fP1); | 
|  | 328 | if (side < 0 && side == winding*compute_side(edgeData[currIndex].fInset.fP0, | 
|  | 329 | edgeData[currIndex].fInset.fP1, | 
|  | 330 | edgeData[prevIndex].fInset.fP0)) { | 
|  | 331 | // no point in considering this one again | 
|  | 332 | edgeData[prevIndex].fValid = false; | 
|  | 333 | --insetVertexCount; | 
|  | 334 | // go back one segment | 
|  | 335 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; | 
|  | 336 | } else { | 
|  | 337 | // move to next segment | 
|  | 338 | edgeData[currIndex].fValid = false; | 
|  | 339 | --insetVertexCount; | 
|  | 340 | currIndex = (currIndex + 1) % inputPolygonSize; | 
|  | 341 | } | 
|  | 342 | } | 
|  | 343 | } | 
|  | 344 |  | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 345 | // store all the valid intersections that aren't nearly coincident | 
|  | 346 | // TODO: look at the main algorithm and see if we can detect these better | 
|  | 347 | static constexpr SkScalar kCleanupTolerance = 0.01f; | 
|  | 348 |  | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 349 | insetPolygon->reset(); | 
| Mike Klein | 22c1f37 | 2018-04-02 20:37:42 +0000 | [diff] [blame] | 350 | insetPolygon->setReserve(insetVertexCount); | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 351 | currIndex = -1; | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 352 | for (int i = 0; i < inputPolygonSize; ++i) { | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 353 | if (edgeData[i].fValid && (currIndex == -1 || | 
| Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 354 | !SkPointPriv::EqualsWithinTolerance(edgeData[i].fIntersection, | 
|  | 355 | (*insetPolygon)[currIndex], | 
|  | 356 | kCleanupTolerance))) { | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 357 | *insetPolygon->push() = edgeData[i].fIntersection; | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 358 | currIndex++; | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 359 | } | 
|  | 360 | } | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 361 | // make sure the first and last points aren't coincident | 
|  | 362 | if (currIndex >= 1 && | 
| Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 363 | SkPointPriv::EqualsWithinTolerance((*insetPolygon)[0], (*insetPolygon)[currIndex], | 
|  | 364 | kCleanupTolerance)) { | 
| Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 365 | insetPolygon->pop(); | 
|  | 366 | } | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 367 |  | 
| Jim Van Verth | b55eb28 | 2017-07-18 14:13:45 -0400 | [diff] [blame] | 368 | return (insetPolygon->count() >= 3 && is_convex(*insetPolygon)); | 
| Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 369 | } | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 370 |  | 
|  | 371 | // compute the number of points needed for a circular join when offsetting a  reflex vertex | 
|  | 372 | static void compute_radial_steps(const SkVector& v1, const SkVector& v2, SkScalar r, | 
|  | 373 | SkScalar* rotSin, SkScalar* rotCos, int* n) { | 
|  | 374 | const SkScalar kRecipPixelsPerArcSegment = 0.25f; | 
|  | 375 |  | 
|  | 376 | SkScalar rCos = v1.dot(v2); | 
|  | 377 | SkScalar rSin = v1.cross(v2); | 
|  | 378 | SkScalar theta = SkScalarATan2(rSin, rCos); | 
|  | 379 |  | 
|  | 380 | int steps = SkScalarRoundToInt(SkScalarAbs(r*theta*kRecipPixelsPerArcSegment)); | 
|  | 381 |  | 
|  | 382 | SkScalar dTheta = theta / steps; | 
|  | 383 | *rotSin = SkScalarSinCos(dTheta, rotCos); | 
|  | 384 | *n = steps; | 
|  | 385 | } | 
|  | 386 |  | 
|  | 387 | // tolerant less-than comparison | 
|  | 388 | static inline bool nearly_lt(SkScalar a, SkScalar b, SkScalar tolerance = SK_ScalarNearlyZero) { | 
|  | 389 | return a < b - tolerance; | 
|  | 390 | } | 
|  | 391 |  | 
|  | 392 | // a point is "left" to another if its x coordinate is less, or if equal, its y coordinate | 
|  | 393 | static bool left(const SkPoint& p0, const SkPoint& p1) { | 
|  | 394 | return nearly_lt(p0.fX, p1.fX) || | 
|  | 395 | (SkScalarNearlyEqual(p0.fX, p1.fX) && nearly_lt(p0.fY, p1.fY)); | 
|  | 396 | } | 
|  | 397 |  | 
|  | 398 | struct Vertex { | 
|  | 399 | static bool Left(const Vertex& qv0, const Vertex& qv1) { | 
|  | 400 | return left(qv0.fPosition, qv1.fPosition); | 
|  | 401 | } | 
|  | 402 | // packed to fit into 16 bytes (one cache line) | 
|  | 403 | SkPoint  fPosition; | 
|  | 404 | uint16_t fIndex;       // index in unsorted polygon | 
|  | 405 | uint16_t fPrevIndex;   // indices for previous and next vertex in unsorted polygon | 
|  | 406 | uint16_t fNextIndex; | 
|  | 407 | uint16_t fFlags; | 
|  | 408 | }; | 
|  | 409 |  | 
|  | 410 | enum VertexFlags { | 
|  | 411 | kPrevLeft_VertexFlag = 0x1, | 
|  | 412 | kNextLeft_VertexFlag = 0x2, | 
|  | 413 | }; | 
|  | 414 |  | 
|  | 415 | struct Edge { | 
|  | 416 | // returns true if "this" is above "that" | 
|  | 417 | bool above(const Edge& that, SkScalar tolerance = SK_ScalarNearlyZero) { | 
|  | 418 | SkASSERT(nearly_lt(this->fSegment.fP0.fX, that.fSegment.fP0.fX, tolerance) || | 
|  | 419 | SkScalarNearlyEqual(this->fSegment.fP0.fX, that.fSegment.fP0.fX, tolerance)); | 
|  | 420 | // The idea here is that if the vector between the origins of the two segments (dv) | 
|  | 421 | // rotates counterclockwise up to the vector representing the "this" segment (u), | 
|  | 422 | // then we know that "this" is above that. If the result is clockwise we say it's below. | 
|  | 423 | SkVector dv = that.fSegment.fP0 - this->fSegment.fP0; | 
|  | 424 | SkVector u = this->fSegment.fP1 - this->fSegment.fP0; | 
|  | 425 | SkScalar cross = dv.cross(u); | 
|  | 426 | if (cross > tolerance) { | 
|  | 427 | return true; | 
|  | 428 | } else if (cross < -tolerance) { | 
|  | 429 | return false; | 
|  | 430 | } | 
|  | 431 | // If the result is 0 then either the two origins are equal or the origin of "that" | 
|  | 432 | // lies on dv. So then we try the same for the vector from the tail of "this" | 
|  | 433 | // to the head of "that". Again, ccw means "this" is above "that". | 
|  | 434 | dv = that.fSegment.fP1 - this->fSegment.fP0; | 
|  | 435 | return (dv.cross(u) > tolerance); | 
|  | 436 | } | 
|  | 437 |  | 
|  | 438 | bool intersect(const Edge& that) const { | 
|  | 439 | SkPoint intersection; | 
|  | 440 | SkScalar s, t; | 
|  | 441 | // check first to see if these edges are neighbors in the polygon | 
|  | 442 | if (this->fIndex0 == that.fIndex0 || this->fIndex1 == that.fIndex0 || | 
|  | 443 | this->fIndex0 == that.fIndex1 || this->fIndex1 == that.fIndex1) { | 
|  | 444 | return false; | 
|  | 445 | } | 
|  | 446 | return compute_intersection(this->fSegment, that.fSegment, &intersection, &s, &t); | 
|  | 447 | } | 
|  | 448 |  | 
|  | 449 | bool operator==(const Edge& that) const { | 
|  | 450 | return (this->fIndex0 == that.fIndex0 && this->fIndex1 == that.fIndex1); | 
|  | 451 | } | 
|  | 452 |  | 
|  | 453 | bool operator!=(const Edge& that) const { | 
|  | 454 | return !operator==(that); | 
|  | 455 | } | 
|  | 456 |  | 
|  | 457 | OffsetSegment fSegment; | 
|  | 458 | int32_t fIndex0;   // indices for previous and next vertex | 
|  | 459 | int32_t fIndex1; | 
|  | 460 | }; | 
|  | 461 |  | 
|  | 462 | class EdgeList { | 
|  | 463 | public: | 
|  | 464 | void reserve(int count) { fEdges.reserve(count); } | 
|  | 465 |  | 
|  | 466 | bool insert(const Edge& newEdge) { | 
|  | 467 | // linear search for now (expected case is very few active edges) | 
|  | 468 | int insertIndex = 0; | 
|  | 469 | while (insertIndex < fEdges.count() && fEdges[insertIndex].above(newEdge)) { | 
|  | 470 | ++insertIndex; | 
|  | 471 | } | 
|  | 472 | // if we intersect with the existing edge above or below us | 
|  | 473 | // then we know this polygon is not simple, so don't insert, just fail | 
|  | 474 | if (insertIndex > 0 && newEdge.intersect(fEdges[insertIndex - 1])) { | 
|  | 475 | return false; | 
|  | 476 | } | 
|  | 477 | if (insertIndex < fEdges.count() && newEdge.intersect(fEdges[insertIndex])) { | 
|  | 478 | return false; | 
|  | 479 | } | 
|  | 480 |  | 
|  | 481 | fEdges.push_back(); | 
|  | 482 | for (int i = fEdges.count() - 1; i > insertIndex; --i) { | 
|  | 483 | fEdges[i] = fEdges[i - 1]; | 
|  | 484 | } | 
|  | 485 | fEdges[insertIndex] = newEdge; | 
|  | 486 |  | 
|  | 487 | return true; | 
|  | 488 | } | 
|  | 489 |  | 
|  | 490 | bool remove(const Edge& edge) { | 
|  | 491 | SkASSERT(fEdges.count() > 0); | 
|  | 492 |  | 
|  | 493 | // linear search for now (expected case is very few active edges) | 
|  | 494 | int removeIndex = 0; | 
|  | 495 | while (removeIndex < fEdges.count() && fEdges[removeIndex] != edge) { | 
|  | 496 | ++removeIndex; | 
|  | 497 | } | 
|  | 498 | // we'd better find it or something is wrong | 
|  | 499 | SkASSERT(removeIndex < fEdges.count()); | 
|  | 500 |  | 
|  | 501 | // if we intersect with the edge above or below us | 
|  | 502 | // then we know this polygon is not simple, so don't remove, just fail | 
|  | 503 | if (removeIndex > 0 && fEdges[removeIndex].intersect(fEdges[removeIndex-1])) { | 
|  | 504 | return false; | 
|  | 505 | } | 
|  | 506 | if (removeIndex < fEdges.count()-1) { | 
|  | 507 | if (fEdges[removeIndex].intersect(fEdges[removeIndex + 1])) { | 
|  | 508 | return false; | 
|  | 509 | } | 
|  | 510 | // copy over the old entry | 
|  | 511 | memmove(&fEdges[removeIndex], &fEdges[removeIndex + 1], | 
|  | 512 | sizeof(Edge)*(fEdges.count() - removeIndex - 1)); | 
|  | 513 | } | 
|  | 514 |  | 
|  | 515 | fEdges.pop_back(); | 
|  | 516 | return true; | 
|  | 517 | } | 
|  | 518 |  | 
|  | 519 | private: | 
|  | 520 | SkSTArray<1, Edge> fEdges; | 
|  | 521 | }; | 
|  | 522 |  | 
|  | 523 | // Here we implement a sweep line algorithm to determine whether the provided points | 
|  | 524 | // represent a simple polygon, i.e., the polygon is non-self-intersecting. | 
|  | 525 | // We first insert the vertices into a priority queue sorting horizontally from left to right. | 
|  | 526 | // Then as we pop the vertices from the queue we generate events which indicate that an edge | 
|  | 527 | // should be added or removed from an edge list. If any intersections are detected in the edge | 
|  | 528 | // list, then we know the polygon is self-intersecting and hence not simple. | 
|  | 529 | static bool is_simple_polygon(const SkPoint* polygon, int polygonSize) { | 
|  | 530 | SkTDPQueue <Vertex, Vertex::Left> vertexQueue; | 
|  | 531 | EdgeList sweepLine; | 
|  | 532 |  | 
|  | 533 | sweepLine.reserve(polygonSize); | 
|  | 534 | for (int i = 0; i < polygonSize; ++i) { | 
|  | 535 | Vertex newVertex; | 
|  | 536 | newVertex.fPosition = polygon[i]; | 
|  | 537 | newVertex.fIndex = i; | 
|  | 538 | newVertex.fPrevIndex = (i - 1 + polygonSize) % polygonSize; | 
|  | 539 | newVertex.fNextIndex = (i + 1) % polygonSize; | 
|  | 540 | newVertex.fFlags = 0; | 
|  | 541 | if (left(polygon[newVertex.fPrevIndex], polygon[i])) { | 
|  | 542 | newVertex.fFlags |= kPrevLeft_VertexFlag; | 
|  | 543 | } | 
|  | 544 | if (left(polygon[newVertex.fNextIndex], polygon[i])) { | 
|  | 545 | newVertex.fFlags |= kNextLeft_VertexFlag; | 
|  | 546 | } | 
|  | 547 | vertexQueue.insert(newVertex); | 
|  | 548 | } | 
|  | 549 |  | 
|  | 550 | // pop each vertex from the queue and generate events depending on | 
|  | 551 | // where it lies relative to its neighboring edges | 
|  | 552 | while (vertexQueue.count() > 0) { | 
|  | 553 | const Vertex& v = vertexQueue.peek(); | 
|  | 554 |  | 
|  | 555 | // check edge to previous vertex | 
|  | 556 | if (v.fFlags & kPrevLeft_VertexFlag) { | 
|  | 557 | Edge edge{ { polygon[v.fPrevIndex], v.fPosition }, v.fPrevIndex, v.fIndex }; | 
|  | 558 | if (!sweepLine.remove(edge)) { | 
|  | 559 | break; | 
|  | 560 | } | 
|  | 561 | } else { | 
|  | 562 | Edge edge{ { v.fPosition, polygon[v.fPrevIndex] }, v.fIndex, v.fPrevIndex }; | 
|  | 563 | if (!sweepLine.insert(edge)) { | 
|  | 564 | break; | 
|  | 565 | } | 
|  | 566 | } | 
|  | 567 |  | 
|  | 568 | // check edge to next vertex | 
|  | 569 | if (v.fFlags & kNextLeft_VertexFlag) { | 
|  | 570 | Edge edge{ { polygon[v.fNextIndex], v.fPosition }, v.fNextIndex, v.fIndex }; | 
|  | 571 | if (!sweepLine.remove(edge)) { | 
|  | 572 | break; | 
|  | 573 | } | 
|  | 574 | } else { | 
|  | 575 | Edge edge{ { v.fPosition, polygon[v.fNextIndex] }, v.fIndex, v.fNextIndex }; | 
|  | 576 | if (!sweepLine.insert(edge)) { | 
|  | 577 | break; | 
|  | 578 | } | 
|  | 579 | } | 
|  | 580 |  | 
|  | 581 | vertexQueue.pop(); | 
|  | 582 | } | 
|  | 583 |  | 
|  | 584 | return (vertexQueue.count() == 0); | 
|  | 585 | } | 
|  | 586 |  | 
|  | 587 | // TODO: assuming a constant offset here -- do we want to support variable offset? | 
|  | 588 | bool SkOffsetSimplePolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 589 | SkScalar offset, SkTDArray<SkPoint>* offsetPolygon, | 
|  | 590 | SkTDArray<int>* polygonIndices) { | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 591 | if (inputPolygonSize < 3) { | 
|  | 592 | return false; | 
|  | 593 | } | 
|  | 594 |  | 
|  | 595 | if (!is_simple_polygon(inputPolygonVerts, inputPolygonSize)) { | 
|  | 596 | return false; | 
|  | 597 | } | 
|  | 598 |  | 
|  | 599 | // compute area and use sign to determine winding | 
|  | 600 | // do initial pass to build normals | 
|  | 601 | SkAutoSTMalloc<64, SkVector> normals(inputPolygonSize); | 
|  | 602 | SkScalar quadArea = 0; | 
|  | 603 | for (int curr = 0; curr < inputPolygonSize; ++curr) { | 
|  | 604 | int next = (curr + 1) % inputPolygonSize; | 
|  | 605 | SkVector tangent = inputPolygonVerts[next] - inputPolygonVerts[curr]; | 
|  | 606 | SkVector normal = SkVector::Make(-tangent.fY, tangent.fX); | 
|  | 607 | normals[curr] = normal; | 
|  | 608 | quadArea += inputPolygonVerts[curr].cross(inputPolygonVerts[next]); | 
|  | 609 | } | 
|  | 610 | // 1 == ccw, -1 == cw | 
|  | 611 | int winding = (quadArea > 0) ? 1 : -1; | 
|  | 612 | if (0 == winding) { | 
|  | 613 | return false; | 
|  | 614 | } | 
|  | 615 |  | 
|  | 616 | // resize normals to match offset | 
|  | 617 | for (int curr = 0; curr < inputPolygonSize; ++curr) { | 
|  | 618 | normals[curr].setLength(winding*offset); | 
|  | 619 | } | 
|  | 620 |  | 
|  | 621 | // build initial offset edge list | 
|  | 622 | SkSTArray<64, EdgeData> edgeData(inputPolygonSize); | 
|  | 623 | int prevIndex = inputPolygonSize - 1; | 
|  | 624 | int currIndex = 0; | 
|  | 625 | int nextIndex = 1; | 
|  | 626 | while (currIndex < inputPolygonSize) { | 
|  | 627 | int side = compute_side(inputPolygonVerts[prevIndex], | 
|  | 628 | inputPolygonVerts[currIndex], | 
|  | 629 | inputPolygonVerts[nextIndex]); | 
|  | 630 |  | 
|  | 631 | // if reflex point, fill in curve | 
|  | 632 | if (side*winding*offset < 0) { | 
|  | 633 | SkScalar rotSin, rotCos; | 
|  | 634 | int numSteps; | 
|  | 635 | SkVector prevNormal = normals[prevIndex]; | 
|  | 636 | compute_radial_steps(prevNormal, normals[currIndex], SkScalarAbs(offset), | 
|  | 637 | &rotSin, &rotCos, &numSteps); | 
|  | 638 | for (int i = 0; i < numSteps - 1; ++i) { | 
|  | 639 | SkVector currNormal = SkVector::Make(prevNormal.fX*rotCos - prevNormal.fY*rotSin, | 
|  | 640 | prevNormal.fY*rotCos + prevNormal.fX*rotSin); | 
|  | 641 | EdgeData& edge = edgeData.push_back(); | 
|  | 642 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + prevNormal; | 
|  | 643 | edge.fInset.fP1 = inputPolygonVerts[currIndex] + currNormal; | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 644 | edge.init(currIndex, currIndex); | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 645 | prevNormal = currNormal; | 
|  | 646 | } | 
|  | 647 | EdgeData& edge = edgeData.push_back(); | 
|  | 648 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + prevNormal; | 
|  | 649 | edge.fInset.fP1 = inputPolygonVerts[currIndex] + normals[currIndex]; | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 650 | edge.init(currIndex, currIndex); | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 651 | } | 
|  | 652 |  | 
|  | 653 | // Add the edge | 
|  | 654 | EdgeData& edge = edgeData.push_back(); | 
|  | 655 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + normals[currIndex]; | 
|  | 656 | edge.fInset.fP1 = inputPolygonVerts[nextIndex] + normals[currIndex]; | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 657 | edge.init(currIndex, nextIndex); | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 658 |  | 
|  | 659 | prevIndex = currIndex; | 
|  | 660 | currIndex++; | 
|  | 661 | nextIndex = (nextIndex + 1) % inputPolygonSize; | 
|  | 662 | } | 
|  | 663 |  | 
|  | 664 | int edgeDataSize = edgeData.count(); | 
|  | 665 | prevIndex = edgeDataSize - 1; | 
|  | 666 | currIndex = 0; | 
|  | 667 | int insetVertexCount = edgeDataSize; | 
|  | 668 | while (prevIndex != currIndex) { | 
|  | 669 | if (!edgeData[prevIndex].fValid) { | 
|  | 670 | prevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; | 
|  | 671 | continue; | 
|  | 672 | } | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 673 | if (!edgeData[currIndex].fValid) { | 
|  | 674 | currIndex = (currIndex + 1) % edgeDataSize; | 
|  | 675 | continue; | 
|  | 676 | } | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 677 |  | 
|  | 678 | SkScalar s, t; | 
|  | 679 | SkPoint intersection; | 
|  | 680 | if (compute_intersection(edgeData[prevIndex].fInset, edgeData[currIndex].fInset, | 
|  | 681 | &intersection, &s, &t)) { | 
|  | 682 | // if new intersection is further back on previous inset from the prior intersection | 
|  | 683 | if (s < edgeData[prevIndex].fTValue) { | 
|  | 684 | // no point in considering this one again | 
|  | 685 | edgeData[prevIndex].fValid = false; | 
|  | 686 | --insetVertexCount; | 
|  | 687 | // go back one segment | 
|  | 688 | prevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; | 
|  | 689 | // we've already considered this intersection, we're done | 
|  | 690 | } else if (edgeData[currIndex].fTValue > SK_ScalarMin && | 
|  | 691 | SkPointPriv::EqualsWithinTolerance(intersection, | 
|  | 692 | edgeData[currIndex].fIntersection, | 
|  | 693 | 1.0e-6f)) { | 
|  | 694 | break; | 
|  | 695 | } else { | 
|  | 696 | // add intersection | 
|  | 697 | edgeData[currIndex].fIntersection = intersection; | 
|  | 698 | edgeData[currIndex].fTValue = t; | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 699 | edgeData[currIndex].fIndex = edgeData[prevIndex].fEnd; | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 700 |  | 
|  | 701 | // go to next segment | 
|  | 702 | prevIndex = currIndex; | 
|  | 703 | currIndex = (currIndex + 1) % edgeDataSize; | 
|  | 704 | } | 
|  | 705 | } else { | 
|  | 706 | // If there is no intersection, we want to minimize the distance between | 
|  | 707 | // the point where the segment lines cross and the segments themselves. | 
|  | 708 | SkScalar prevPrevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; | 
|  | 709 | SkScalar currNextIndex = (currIndex + 1) % edgeDataSize; | 
|  | 710 | SkScalar dist0 = compute_crossing_distance(edgeData[currIndex].fInset, | 
|  | 711 | edgeData[prevPrevIndex].fInset); | 
|  | 712 | SkScalar dist1 = compute_crossing_distance(edgeData[prevIndex].fInset, | 
|  | 713 | edgeData[currNextIndex].fInset); | 
|  | 714 | if (dist0 < dist1) { | 
|  | 715 | edgeData[prevIndex].fValid = false; | 
|  | 716 | prevIndex = prevPrevIndex; | 
|  | 717 | } else { | 
|  | 718 | edgeData[currIndex].fValid = false; | 
|  | 719 | currIndex = currNextIndex; | 
|  | 720 | } | 
|  | 721 | --insetVertexCount; | 
|  | 722 | } | 
|  | 723 | } | 
|  | 724 |  | 
|  | 725 | // store all the valid intersections that aren't nearly coincident | 
|  | 726 | // TODO: look at the main algorithm and see if we can detect these better | 
|  | 727 | static constexpr SkScalar kCleanupTolerance = 0.01f; | 
|  | 728 |  | 
|  | 729 | offsetPolygon->reset(); | 
|  | 730 | offsetPolygon->setReserve(insetVertexCount); | 
|  | 731 | currIndex = -1; | 
|  | 732 | for (int i = 0; i < edgeData.count(); ++i) { | 
|  | 733 | if (edgeData[i].fValid && (currIndex == -1 || | 
|  | 734 | !SkPointPriv::EqualsWithinTolerance(edgeData[i].fIntersection, | 
|  | 735 | (*offsetPolygon)[currIndex], | 
|  | 736 | kCleanupTolerance))) { | 
|  | 737 | *offsetPolygon->push() = edgeData[i].fIntersection; | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 738 | if (polygonIndices) { | 
|  | 739 | *polygonIndices->push() = edgeData[i].fIndex; | 
|  | 740 | } | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 741 | currIndex++; | 
|  | 742 | } | 
|  | 743 | } | 
|  | 744 | // make sure the first and last points aren't coincident | 
|  | 745 | if (currIndex >= 1 && | 
|  | 746 | SkPointPriv::EqualsWithinTolerance((*offsetPolygon)[0], (*offsetPolygon)[currIndex], | 
|  | 747 | kCleanupTolerance)) { | 
|  | 748 | offsetPolygon->pop(); | 
| Jim Van Verth | 872da6b | 2018-04-10 11:24:11 -0400 | [diff] [blame^] | 749 | if (polygonIndices) { | 
|  | 750 | polygonIndices->pop(); | 
|  | 751 | } | 
| Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 752 | } | 
|  | 753 |  | 
|  | 754 | // compute signed area to check winding (it should be same as the original polygon) | 
|  | 755 | quadArea = 0; | 
|  | 756 | for (int curr = 0; curr < offsetPolygon->count(); ++curr) { | 
|  | 757 | int next = (curr + 1) % offsetPolygon->count(); | 
|  | 758 | quadArea += (*offsetPolygon)[curr].cross((*offsetPolygon)[next]); | 
|  | 759 | } | 
|  | 760 |  | 
|  | 761 | return (winding*quadArea > 0 && | 
|  | 762 | is_simple_polygon(offsetPolygon->begin(), offsetPolygon->count())); | 
|  | 763 | } | 
|  | 764 |  |