Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Jim Van Verth | 41964ed | 2018-03-28 10:10:30 -0400 | [diff] [blame] | 8 | #include "SkOffsetPolygon.h" |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 9 | |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 10 | #include "SkPointPriv.h" |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 11 | #include "SkTArray.h" |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 12 | #include "SkTemplates.h" |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 13 | #include "SkTDPQueue.h" |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 14 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 15 | struct OffsetSegment { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 16 | SkPoint fP0; |
| 17 | SkPoint fP1; |
| 18 | }; |
| 19 | |
| 20 | // Computes perpDot for point compared to segment. |
| 21 | // A positive value means the point is to the left of the segment, |
| 22 | // negative is to the right, 0 is collinear. |
| 23 | static int compute_side(const SkPoint& s0, const SkPoint& s1, const SkPoint& p) { |
| 24 | SkVector v0 = s1 - s0; |
| 25 | SkVector v1 = p - s0; |
| 26 | SkScalar perpDot = v0.cross(v1); |
| 27 | if (!SkScalarNearlyZero(perpDot)) { |
| 28 | return ((perpDot > 0) ? 1 : -1); |
| 29 | } |
| 30 | |
| 31 | return 0; |
| 32 | } |
| 33 | |
| 34 | // returns 1 for ccw, -1 for cw and 0 if degenerate |
| 35 | static int get_winding(const SkPoint* polygonVerts, int polygonSize) { |
| 36 | SkPoint p0 = polygonVerts[0]; |
| 37 | SkPoint p1 = polygonVerts[1]; |
| 38 | |
| 39 | for (int i = 2; i < polygonSize; ++i) { |
| 40 | SkPoint p2 = polygonVerts[i]; |
| 41 | |
| 42 | // determine if cw or ccw |
| 43 | int side = compute_side(p0, p1, p2); |
| 44 | if (0 != side) { |
| 45 | return ((side > 0) ? 1 : -1); |
| 46 | } |
| 47 | |
| 48 | // if nearly collinear, treat as straight line and continue |
| 49 | p1 = p2; |
| 50 | } |
| 51 | |
| 52 | return 0; |
| 53 | } |
| 54 | |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 55 | // Offset line segment p0-p1 'd0' and 'd1' units in the direction specified by 'side' |
| 56 | bool SkOffsetSegment(const SkPoint& p0, const SkPoint& p1, SkScalar d0, SkScalar d1, |
| 57 | int side, SkPoint* offset0, SkPoint* offset1) { |
| 58 | SkASSERT(side == -1 || side == 1); |
| 59 | SkVector perp = SkVector::Make(p0.fY - p1.fY, p1.fX - p0.fX); |
| 60 | if (SkScalarNearlyEqual(d0, d1)) { |
| 61 | // if distances are equal, can just outset by the perpendicular |
| 62 | perp.setLength(d0*side); |
| 63 | *offset0 = p0 + perp; |
| 64 | *offset1 = p1 + perp; |
| 65 | } else { |
| 66 | // Otherwise we need to compute the outer tangent. |
| 67 | // See: http://www.ambrsoft.com/TrigoCalc/Circles2/Circles2Tangent_.htm |
| 68 | if (d0 < d1) { |
| 69 | side = -side; |
| 70 | } |
| 71 | SkScalar dD = d0 - d1; |
| 72 | // if one circle is inside another, we can't compute an offset |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 73 | if (dD*dD >= SkPointPriv::DistanceToSqd(p0, p1)) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 74 | return false; |
| 75 | } |
| 76 | SkPoint outerTangentIntersect = SkPoint::Make((p1.fX*d0 - p0.fX*d1) / dD, |
| 77 | (p1.fY*d0 - p0.fY*d1) / dD); |
| 78 | |
| 79 | SkScalar d0sq = d0*d0; |
| 80 | SkVector dP = outerTangentIntersect - p0; |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 81 | SkScalar dPlenSq = SkPointPriv::LengthSqd(dP); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 82 | SkScalar discrim = SkScalarSqrt(dPlenSq - d0sq); |
| 83 | offset0->fX = p0.fX + (d0sq*dP.fX - side*d0*dP.fY*discrim) / dPlenSq; |
| 84 | offset0->fY = p0.fY + (d0sq*dP.fY + side*d0*dP.fX*discrim) / dPlenSq; |
| 85 | |
| 86 | SkScalar d1sq = d1*d1; |
| 87 | dP = outerTangentIntersect - p1; |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 88 | dPlenSq = SkPointPriv::LengthSqd(dP); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 89 | discrim = SkScalarSqrt(dPlenSq - d1sq); |
| 90 | offset1->fX = p1.fX + (d1sq*dP.fX - side*d1*dP.fY*discrim) / dPlenSq; |
| 91 | offset1->fY = p1.fY + (d1sq*dP.fY + side*d1*dP.fX*discrim) / dPlenSq; |
| 92 | } |
| 93 | |
| 94 | return true; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 95 | } |
| 96 | |
| 97 | // Compute the intersection 'p' between segments s0 and s1, if any. |
| 98 | // 's' is the parametric value for the intersection along 's0' & 't' is the same for 's1'. |
| 99 | // Returns false if there is no intersection. |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 100 | static bool compute_intersection(const OffsetSegment& s0, const OffsetSegment& s1, |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 101 | SkPoint* p, SkScalar* s, SkScalar* t) { |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 102 | // Common cases for polygon chains -- check if endpoints are touching |
| 103 | if (SkPointPriv::EqualsWithinTolerance(s0.fP1, s1.fP0)) { |
| 104 | *p = s0.fP1; |
| 105 | *s = SK_Scalar1; |
| 106 | *t = 0; |
| 107 | return true; |
| 108 | } |
| 109 | if (SkPointPriv::EqualsWithinTolerance(s1.fP1, s0.fP0)) { |
| 110 | *p = s1.fP1; |
| 111 | *s = 0; |
| 112 | *t = SK_Scalar1; |
| 113 | return true; |
| 114 | } |
| 115 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 116 | SkVector v0 = s0.fP1 - s0.fP0; |
| 117 | SkVector v1 = s1.fP1 - s1.fP0; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 118 | // We should have culled coincident points before this |
| 119 | SkASSERT(!SkPointPriv::EqualsWithinTolerance(s0.fP0, s0.fP1)); |
| 120 | SkASSERT(!SkPointPriv::EqualsWithinTolerance(s1.fP0, s1.fP1)); |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 121 | |
| 122 | SkVector d = s1.fP0 - s0.fP0; |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 123 | SkScalar perpDot = v0.cross(v1); |
| 124 | SkScalar localS, localT; |
| 125 | if (SkScalarNearlyZero(perpDot)) { |
| 126 | // segments are parallel, but not collinear |
| 127 | if (!SkScalarNearlyZero(d.dot(d), SK_ScalarNearlyZero*SK_ScalarNearlyZero)) { |
| 128 | return false; |
| 129 | } |
| 130 | |
| 131 | // project segment1's endpoints onto segment0 |
| 132 | localS = d.fX / v0.fX; |
| 133 | localT = 0; |
| 134 | if (localS < 0 || localS > SK_Scalar1) { |
| 135 | // the first endpoint doesn't lie on segment0, try the other one |
| 136 | SkScalar oldLocalS = localS; |
| 137 | localS = (s1.fP1.fX - s0.fP0.fX) / v0.fX; |
| 138 | localT = SK_Scalar1; |
| 139 | if (localS < 0 || localS > SK_Scalar1) { |
| 140 | // it's possible that segment1's interval surrounds segment0 |
| 141 | // this is false if the params have the same signs, and in that case no collision |
| 142 | if (localS*oldLocalS > 0) { |
| 143 | return false; |
| 144 | } |
| 145 | // otherwise project segment0's endpoint onto segment1 instead |
| 146 | localS = 0; |
| 147 | localT = -d.fX / v1.fX; |
| 148 | } |
| 149 | } |
| 150 | } else { |
| 151 | localS = d.cross(v1) / perpDot; |
| 152 | if (localS < 0 || localS > SK_Scalar1) { |
| 153 | return false; |
| 154 | } |
| 155 | localT = d.cross(v0) / perpDot; |
| 156 | if (localT < 0 || localT > SK_Scalar1) { |
| 157 | return false; |
| 158 | } |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 159 | } |
| 160 | |
| 161 | v0 *= localS; |
| 162 | *p = s0.fP0 + v0; |
| 163 | *s = localS; |
| 164 | *t = localT; |
| 165 | |
| 166 | return true; |
| 167 | } |
| 168 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 169 | // computes the line intersection and then the distance to s0's endpoint |
| 170 | static SkScalar compute_crossing_distance(const OffsetSegment& s0, const OffsetSegment& s1) { |
| 171 | SkVector v0 = s0.fP1 - s0.fP0; |
| 172 | SkVector v1 = s1.fP1 - s1.fP0; |
| 173 | |
| 174 | SkScalar perpDot = v0.cross(v1); |
| 175 | if (SkScalarNearlyZero(perpDot)) { |
| 176 | // segments are parallel |
| 177 | return SK_ScalarMax; |
| 178 | } |
| 179 | |
| 180 | SkVector d = s1.fP0 - s0.fP0; |
| 181 | SkScalar localS = d.cross(v1) / perpDot; |
| 182 | if (localS < 0) { |
| 183 | localS = -localS; |
| 184 | } else { |
| 185 | localS -= SK_Scalar1; |
| 186 | } |
| 187 | |
| 188 | localS *= v0.length(); |
| 189 | |
| 190 | return localS; |
| 191 | } |
| 192 | |
Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 193 | static bool is_convex(const SkTDArray<SkPoint>& poly) { |
| 194 | if (poly.count() <= 3) { |
| 195 | return true; |
| 196 | } |
| 197 | |
| 198 | SkVector v0 = poly[0] - poly[poly.count() - 1]; |
| 199 | SkVector v1 = poly[1] - poly[poly.count() - 1]; |
| 200 | SkScalar winding = v0.cross(v1); |
| 201 | |
| 202 | for (int i = 0; i < poly.count() - 1; ++i) { |
| 203 | int j = i + 1; |
| 204 | int k = (i + 2) % poly.count(); |
| 205 | |
| 206 | SkVector v0 = poly[j] - poly[i]; |
| 207 | SkVector v1 = poly[k] - poly[i]; |
| 208 | SkScalar perpDot = v0.cross(v1); |
Jim Van Verth | 291932e | 2017-03-29 14:37:28 -0400 | [diff] [blame] | 209 | if (winding*perpDot < 0) { |
Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 210 | return false; |
| 211 | } |
| 212 | } |
| 213 | |
| 214 | return true; |
| 215 | } |
Jim Van Verth | 0513f14 | 2017-03-24 14:28:57 -0400 | [diff] [blame] | 216 | |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 217 | struct EdgeData { |
| 218 | OffsetSegment fInset; |
| 219 | SkPoint fIntersection; |
| 220 | SkScalar fTValue; |
| 221 | bool fValid; |
| 222 | |
| 223 | void init() { |
| 224 | fIntersection = fInset.fP0; |
| 225 | fTValue = SK_ScalarMin; |
| 226 | fValid = true; |
| 227 | } |
| 228 | }; |
| 229 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 230 | // The objective here is to inset all of the edges by the given distance, and then |
| 231 | // remove any invalid inset edges by detecting right-hand turns. In a ccw polygon, |
| 232 | // we should only be making left-hand turns (for cw polygons, we use the winding |
| 233 | // parameter to reverse this). We detect this by checking whether the second intersection |
| 234 | // on an edge is closer to its tail than the first one. |
| 235 | // |
| 236 | // We might also have the case that there is no intersection between two neighboring inset edges. |
| 237 | // In this case, one edge will lie to the right of the other and should be discarded along with |
| 238 | // its previous intersection (if any). |
| 239 | // |
| 240 | // Note: the assumption is that inputPolygon is convex and has no coincident points. |
| 241 | // |
| 242 | bool SkInsetConvexPolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 243 | std::function<SkScalar(int index)> insetDistanceFunc, |
| 244 | SkTDArray<SkPoint>* insetPolygon) { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 245 | if (inputPolygonSize < 3) { |
| 246 | return false; |
| 247 | } |
| 248 | |
| 249 | int winding = get_winding(inputPolygonVerts, inputPolygonSize); |
| 250 | if (0 == winding) { |
| 251 | return false; |
| 252 | } |
| 253 | |
| 254 | // set up |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 255 | SkAutoSTMalloc<64, EdgeData> edgeData(inputPolygonSize); |
| 256 | for (int i = 0; i < inputPolygonSize; ++i) { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 257 | int j = (i + 1) % inputPolygonSize; |
Jim Van Verth | b55eb28 | 2017-07-18 14:13:45 -0400 | [diff] [blame] | 258 | int k = (i + 2) % inputPolygonSize; |
| 259 | // check for convexity just to be sure |
| 260 | if (compute_side(inputPolygonVerts[i], inputPolygonVerts[j], |
| 261 | inputPolygonVerts[k])*winding < 0) { |
| 262 | return false; |
| 263 | } |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 264 | if (!SkOffsetSegment(inputPolygonVerts[i], inputPolygonVerts[j], |
| 265 | insetDistanceFunc(i), insetDistanceFunc(j), |
| 266 | winding, |
| 267 | &edgeData[i].fInset.fP0, &edgeData[i].fInset.fP1)) { |
| 268 | return false; |
| 269 | } |
| 270 | edgeData[i].init(); |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 271 | } |
| 272 | |
| 273 | int prevIndex = inputPolygonSize - 1; |
| 274 | int currIndex = 0; |
| 275 | int insetVertexCount = inputPolygonSize; |
| 276 | while (prevIndex != currIndex) { |
| 277 | if (!edgeData[prevIndex].fValid) { |
| 278 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; |
| 279 | continue; |
| 280 | } |
| 281 | |
| 282 | SkScalar s, t; |
| 283 | SkPoint intersection; |
| 284 | if (compute_intersection(edgeData[prevIndex].fInset, edgeData[currIndex].fInset, |
| 285 | &intersection, &s, &t)) { |
| 286 | // if new intersection is further back on previous inset from the prior intersection |
| 287 | if (s < edgeData[prevIndex].fTValue) { |
| 288 | // no point in considering this one again |
| 289 | edgeData[prevIndex].fValid = false; |
| 290 | --insetVertexCount; |
| 291 | // go back one segment |
| 292 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; |
| 293 | // we've already considered this intersection, we're done |
| 294 | } else if (edgeData[currIndex].fTValue > SK_ScalarMin && |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 295 | SkPointPriv::EqualsWithinTolerance(intersection, |
| 296 | edgeData[currIndex].fIntersection, |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 297 | 1.0e-6f)) { |
| 298 | break; |
| 299 | } else { |
| 300 | // add intersection |
| 301 | edgeData[currIndex].fIntersection = intersection; |
| 302 | edgeData[currIndex].fTValue = t; |
| 303 | |
| 304 | // go to next segment |
| 305 | prevIndex = currIndex; |
| 306 | currIndex = (currIndex + 1) % inputPolygonSize; |
| 307 | } |
| 308 | } else { |
| 309 | // if prev to right side of curr |
| 310 | int side = winding*compute_side(edgeData[currIndex].fInset.fP0, |
| 311 | edgeData[currIndex].fInset.fP1, |
| 312 | edgeData[prevIndex].fInset.fP1); |
| 313 | if (side < 0 && side == winding*compute_side(edgeData[currIndex].fInset.fP0, |
| 314 | edgeData[currIndex].fInset.fP1, |
| 315 | edgeData[prevIndex].fInset.fP0)) { |
| 316 | // no point in considering this one again |
| 317 | edgeData[prevIndex].fValid = false; |
| 318 | --insetVertexCount; |
| 319 | // go back one segment |
| 320 | prevIndex = (prevIndex + inputPolygonSize - 1) % inputPolygonSize; |
| 321 | } else { |
| 322 | // move to next segment |
| 323 | edgeData[currIndex].fValid = false; |
| 324 | --insetVertexCount; |
| 325 | currIndex = (currIndex + 1) % inputPolygonSize; |
| 326 | } |
| 327 | } |
| 328 | } |
| 329 | |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 330 | // store all the valid intersections that aren't nearly coincident |
| 331 | // TODO: look at the main algorithm and see if we can detect these better |
| 332 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
| 333 | |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 334 | insetPolygon->reset(); |
Mike Klein | 22c1f37 | 2018-04-02 20:37:42 +0000 | [diff] [blame] | 335 | insetPolygon->setReserve(insetVertexCount); |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 336 | currIndex = -1; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 337 | for (int i = 0; i < inputPolygonSize; ++i) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 338 | if (edgeData[i].fValid && (currIndex == -1 || |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 339 | !SkPointPriv::EqualsWithinTolerance(edgeData[i].fIntersection, |
| 340 | (*insetPolygon)[currIndex], |
| 341 | kCleanupTolerance))) { |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 342 | *insetPolygon->push() = edgeData[i].fIntersection; |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 343 | currIndex++; |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 344 | } |
| 345 | } |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 346 | // make sure the first and last points aren't coincident |
| 347 | if (currIndex >= 1 && |
Cary Clark | df429f3 | 2017-11-08 11:44:31 -0500 | [diff] [blame] | 348 | SkPointPriv::EqualsWithinTolerance((*insetPolygon)[0], (*insetPolygon)[currIndex], |
| 349 | kCleanupTolerance)) { |
Jim Van Verth | da96550 | 2017-04-11 15:29:14 -0400 | [diff] [blame] | 350 | insetPolygon->pop(); |
| 351 | } |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 352 | |
Jim Van Verth | b55eb28 | 2017-07-18 14:13:45 -0400 | [diff] [blame] | 353 | return (insetPolygon->count() >= 3 && is_convex(*insetPolygon)); |
Brian Salomon | ab664fa | 2017-03-24 16:07:20 +0000 | [diff] [blame] | 354 | } |
Jim Van Verth | 4db18ed | 2018-04-03 10:00:37 -0400 | [diff] [blame] | 355 | |
| 356 | // compute the number of points needed for a circular join when offsetting a reflex vertex |
| 357 | static void compute_radial_steps(const SkVector& v1, const SkVector& v2, SkScalar r, |
| 358 | SkScalar* rotSin, SkScalar* rotCos, int* n) { |
| 359 | const SkScalar kRecipPixelsPerArcSegment = 0.25f; |
| 360 | |
| 361 | SkScalar rCos = v1.dot(v2); |
| 362 | SkScalar rSin = v1.cross(v2); |
| 363 | SkScalar theta = SkScalarATan2(rSin, rCos); |
| 364 | |
| 365 | int steps = SkScalarRoundToInt(SkScalarAbs(r*theta*kRecipPixelsPerArcSegment)); |
| 366 | |
| 367 | SkScalar dTheta = theta / steps; |
| 368 | *rotSin = SkScalarSinCos(dTheta, rotCos); |
| 369 | *n = steps; |
| 370 | } |
| 371 | |
| 372 | // tolerant less-than comparison |
| 373 | static inline bool nearly_lt(SkScalar a, SkScalar b, SkScalar tolerance = SK_ScalarNearlyZero) { |
| 374 | return a < b - tolerance; |
| 375 | } |
| 376 | |
| 377 | // a point is "left" to another if its x coordinate is less, or if equal, its y coordinate |
| 378 | static bool left(const SkPoint& p0, const SkPoint& p1) { |
| 379 | return nearly_lt(p0.fX, p1.fX) || |
| 380 | (SkScalarNearlyEqual(p0.fX, p1.fX) && nearly_lt(p0.fY, p1.fY)); |
| 381 | } |
| 382 | |
| 383 | struct Vertex { |
| 384 | static bool Left(const Vertex& qv0, const Vertex& qv1) { |
| 385 | return left(qv0.fPosition, qv1.fPosition); |
| 386 | } |
| 387 | // packed to fit into 16 bytes (one cache line) |
| 388 | SkPoint fPosition; |
| 389 | uint16_t fIndex; // index in unsorted polygon |
| 390 | uint16_t fPrevIndex; // indices for previous and next vertex in unsorted polygon |
| 391 | uint16_t fNextIndex; |
| 392 | uint16_t fFlags; |
| 393 | }; |
| 394 | |
| 395 | enum VertexFlags { |
| 396 | kPrevLeft_VertexFlag = 0x1, |
| 397 | kNextLeft_VertexFlag = 0x2, |
| 398 | }; |
| 399 | |
| 400 | struct Edge { |
| 401 | // returns true if "this" is above "that" |
| 402 | bool above(const Edge& that, SkScalar tolerance = SK_ScalarNearlyZero) { |
| 403 | SkASSERT(nearly_lt(this->fSegment.fP0.fX, that.fSegment.fP0.fX, tolerance) || |
| 404 | SkScalarNearlyEqual(this->fSegment.fP0.fX, that.fSegment.fP0.fX, tolerance)); |
| 405 | // The idea here is that if the vector between the origins of the two segments (dv) |
| 406 | // rotates counterclockwise up to the vector representing the "this" segment (u), |
| 407 | // then we know that "this" is above that. If the result is clockwise we say it's below. |
| 408 | SkVector dv = that.fSegment.fP0 - this->fSegment.fP0; |
| 409 | SkVector u = this->fSegment.fP1 - this->fSegment.fP0; |
| 410 | SkScalar cross = dv.cross(u); |
| 411 | if (cross > tolerance) { |
| 412 | return true; |
| 413 | } else if (cross < -tolerance) { |
| 414 | return false; |
| 415 | } |
| 416 | // If the result is 0 then either the two origins are equal or the origin of "that" |
| 417 | // lies on dv. So then we try the same for the vector from the tail of "this" |
| 418 | // to the head of "that". Again, ccw means "this" is above "that". |
| 419 | dv = that.fSegment.fP1 - this->fSegment.fP0; |
| 420 | return (dv.cross(u) > tolerance); |
| 421 | } |
| 422 | |
| 423 | bool intersect(const Edge& that) const { |
| 424 | SkPoint intersection; |
| 425 | SkScalar s, t; |
| 426 | // check first to see if these edges are neighbors in the polygon |
| 427 | if (this->fIndex0 == that.fIndex0 || this->fIndex1 == that.fIndex0 || |
| 428 | this->fIndex0 == that.fIndex1 || this->fIndex1 == that.fIndex1) { |
| 429 | return false; |
| 430 | } |
| 431 | return compute_intersection(this->fSegment, that.fSegment, &intersection, &s, &t); |
| 432 | } |
| 433 | |
| 434 | bool operator==(const Edge& that) const { |
| 435 | return (this->fIndex0 == that.fIndex0 && this->fIndex1 == that.fIndex1); |
| 436 | } |
| 437 | |
| 438 | bool operator!=(const Edge& that) const { |
| 439 | return !operator==(that); |
| 440 | } |
| 441 | |
| 442 | OffsetSegment fSegment; |
| 443 | int32_t fIndex0; // indices for previous and next vertex |
| 444 | int32_t fIndex1; |
| 445 | }; |
| 446 | |
| 447 | class EdgeList { |
| 448 | public: |
| 449 | void reserve(int count) { fEdges.reserve(count); } |
| 450 | |
| 451 | bool insert(const Edge& newEdge) { |
| 452 | // linear search for now (expected case is very few active edges) |
| 453 | int insertIndex = 0; |
| 454 | while (insertIndex < fEdges.count() && fEdges[insertIndex].above(newEdge)) { |
| 455 | ++insertIndex; |
| 456 | } |
| 457 | // if we intersect with the existing edge above or below us |
| 458 | // then we know this polygon is not simple, so don't insert, just fail |
| 459 | if (insertIndex > 0 && newEdge.intersect(fEdges[insertIndex - 1])) { |
| 460 | return false; |
| 461 | } |
| 462 | if (insertIndex < fEdges.count() && newEdge.intersect(fEdges[insertIndex])) { |
| 463 | return false; |
| 464 | } |
| 465 | |
| 466 | fEdges.push_back(); |
| 467 | for (int i = fEdges.count() - 1; i > insertIndex; --i) { |
| 468 | fEdges[i] = fEdges[i - 1]; |
| 469 | } |
| 470 | fEdges[insertIndex] = newEdge; |
| 471 | |
| 472 | return true; |
| 473 | } |
| 474 | |
| 475 | bool remove(const Edge& edge) { |
| 476 | SkASSERT(fEdges.count() > 0); |
| 477 | |
| 478 | // linear search for now (expected case is very few active edges) |
| 479 | int removeIndex = 0; |
| 480 | while (removeIndex < fEdges.count() && fEdges[removeIndex] != edge) { |
| 481 | ++removeIndex; |
| 482 | } |
| 483 | // we'd better find it or something is wrong |
| 484 | SkASSERT(removeIndex < fEdges.count()); |
| 485 | |
| 486 | // if we intersect with the edge above or below us |
| 487 | // then we know this polygon is not simple, so don't remove, just fail |
| 488 | if (removeIndex > 0 && fEdges[removeIndex].intersect(fEdges[removeIndex-1])) { |
| 489 | return false; |
| 490 | } |
| 491 | if (removeIndex < fEdges.count()-1) { |
| 492 | if (fEdges[removeIndex].intersect(fEdges[removeIndex + 1])) { |
| 493 | return false; |
| 494 | } |
| 495 | // copy over the old entry |
| 496 | memmove(&fEdges[removeIndex], &fEdges[removeIndex + 1], |
| 497 | sizeof(Edge)*(fEdges.count() - removeIndex - 1)); |
| 498 | } |
| 499 | |
| 500 | fEdges.pop_back(); |
| 501 | return true; |
| 502 | } |
| 503 | |
| 504 | private: |
| 505 | SkSTArray<1, Edge> fEdges; |
| 506 | }; |
| 507 | |
| 508 | // Here we implement a sweep line algorithm to determine whether the provided points |
| 509 | // represent a simple polygon, i.e., the polygon is non-self-intersecting. |
| 510 | // We first insert the vertices into a priority queue sorting horizontally from left to right. |
| 511 | // Then as we pop the vertices from the queue we generate events which indicate that an edge |
| 512 | // should be added or removed from an edge list. If any intersections are detected in the edge |
| 513 | // list, then we know the polygon is self-intersecting and hence not simple. |
| 514 | static bool is_simple_polygon(const SkPoint* polygon, int polygonSize) { |
| 515 | SkTDPQueue <Vertex, Vertex::Left> vertexQueue; |
| 516 | EdgeList sweepLine; |
| 517 | |
| 518 | sweepLine.reserve(polygonSize); |
| 519 | for (int i = 0; i < polygonSize; ++i) { |
| 520 | Vertex newVertex; |
| 521 | newVertex.fPosition = polygon[i]; |
| 522 | newVertex.fIndex = i; |
| 523 | newVertex.fPrevIndex = (i - 1 + polygonSize) % polygonSize; |
| 524 | newVertex.fNextIndex = (i + 1) % polygonSize; |
| 525 | newVertex.fFlags = 0; |
| 526 | if (left(polygon[newVertex.fPrevIndex], polygon[i])) { |
| 527 | newVertex.fFlags |= kPrevLeft_VertexFlag; |
| 528 | } |
| 529 | if (left(polygon[newVertex.fNextIndex], polygon[i])) { |
| 530 | newVertex.fFlags |= kNextLeft_VertexFlag; |
| 531 | } |
| 532 | vertexQueue.insert(newVertex); |
| 533 | } |
| 534 | |
| 535 | // pop each vertex from the queue and generate events depending on |
| 536 | // where it lies relative to its neighboring edges |
| 537 | while (vertexQueue.count() > 0) { |
| 538 | const Vertex& v = vertexQueue.peek(); |
| 539 | |
| 540 | // check edge to previous vertex |
| 541 | if (v.fFlags & kPrevLeft_VertexFlag) { |
| 542 | Edge edge{ { polygon[v.fPrevIndex], v.fPosition }, v.fPrevIndex, v.fIndex }; |
| 543 | if (!sweepLine.remove(edge)) { |
| 544 | break; |
| 545 | } |
| 546 | } else { |
| 547 | Edge edge{ { v.fPosition, polygon[v.fPrevIndex] }, v.fIndex, v.fPrevIndex }; |
| 548 | if (!sweepLine.insert(edge)) { |
| 549 | break; |
| 550 | } |
| 551 | } |
| 552 | |
| 553 | // check edge to next vertex |
| 554 | if (v.fFlags & kNextLeft_VertexFlag) { |
| 555 | Edge edge{ { polygon[v.fNextIndex], v.fPosition }, v.fNextIndex, v.fIndex }; |
| 556 | if (!sweepLine.remove(edge)) { |
| 557 | break; |
| 558 | } |
| 559 | } else { |
| 560 | Edge edge{ { v.fPosition, polygon[v.fNextIndex] }, v.fIndex, v.fNextIndex }; |
| 561 | if (!sweepLine.insert(edge)) { |
| 562 | break; |
| 563 | } |
| 564 | } |
| 565 | |
| 566 | vertexQueue.pop(); |
| 567 | } |
| 568 | |
| 569 | return (vertexQueue.count() == 0); |
| 570 | } |
| 571 | |
| 572 | // TODO: assuming a constant offset here -- do we want to support variable offset? |
| 573 | bool SkOffsetSimplePolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
| 574 | SkScalar offset, SkTDArray<SkPoint>* offsetPolygon) { |
| 575 | if (inputPolygonSize < 3) { |
| 576 | return false; |
| 577 | } |
| 578 | |
| 579 | if (!is_simple_polygon(inputPolygonVerts, inputPolygonSize)) { |
| 580 | return false; |
| 581 | } |
| 582 | |
| 583 | // compute area and use sign to determine winding |
| 584 | // do initial pass to build normals |
| 585 | SkAutoSTMalloc<64, SkVector> normals(inputPolygonSize); |
| 586 | SkScalar quadArea = 0; |
| 587 | for (int curr = 0; curr < inputPolygonSize; ++curr) { |
| 588 | int next = (curr + 1) % inputPolygonSize; |
| 589 | SkVector tangent = inputPolygonVerts[next] - inputPolygonVerts[curr]; |
| 590 | SkVector normal = SkVector::Make(-tangent.fY, tangent.fX); |
| 591 | normals[curr] = normal; |
| 592 | quadArea += inputPolygonVerts[curr].cross(inputPolygonVerts[next]); |
| 593 | } |
| 594 | // 1 == ccw, -1 == cw |
| 595 | int winding = (quadArea > 0) ? 1 : -1; |
| 596 | if (0 == winding) { |
| 597 | return false; |
| 598 | } |
| 599 | |
| 600 | // resize normals to match offset |
| 601 | for (int curr = 0; curr < inputPolygonSize; ++curr) { |
| 602 | normals[curr].setLength(winding*offset); |
| 603 | } |
| 604 | |
| 605 | // build initial offset edge list |
| 606 | SkSTArray<64, EdgeData> edgeData(inputPolygonSize); |
| 607 | int prevIndex = inputPolygonSize - 1; |
| 608 | int currIndex = 0; |
| 609 | int nextIndex = 1; |
| 610 | while (currIndex < inputPolygonSize) { |
| 611 | int side = compute_side(inputPolygonVerts[prevIndex], |
| 612 | inputPolygonVerts[currIndex], |
| 613 | inputPolygonVerts[nextIndex]); |
| 614 | |
| 615 | // if reflex point, fill in curve |
| 616 | if (side*winding*offset < 0) { |
| 617 | SkScalar rotSin, rotCos; |
| 618 | int numSteps; |
| 619 | SkVector prevNormal = normals[prevIndex]; |
| 620 | compute_radial_steps(prevNormal, normals[currIndex], SkScalarAbs(offset), |
| 621 | &rotSin, &rotCos, &numSteps); |
| 622 | for (int i = 0; i < numSteps - 1; ++i) { |
| 623 | SkVector currNormal = SkVector::Make(prevNormal.fX*rotCos - prevNormal.fY*rotSin, |
| 624 | prevNormal.fY*rotCos + prevNormal.fX*rotSin); |
| 625 | EdgeData& edge = edgeData.push_back(); |
| 626 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + prevNormal; |
| 627 | edge.fInset.fP1 = inputPolygonVerts[currIndex] + currNormal; |
| 628 | edge.init(); |
| 629 | prevNormal = currNormal; |
| 630 | } |
| 631 | EdgeData& edge = edgeData.push_back(); |
| 632 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + prevNormal; |
| 633 | edge.fInset.fP1 = inputPolygonVerts[currIndex] + normals[currIndex]; |
| 634 | edge.init(); |
| 635 | } |
| 636 | |
| 637 | // Add the edge |
| 638 | EdgeData& edge = edgeData.push_back(); |
| 639 | edge.fInset.fP0 = inputPolygonVerts[currIndex] + normals[currIndex]; |
| 640 | edge.fInset.fP1 = inputPolygonVerts[nextIndex] + normals[currIndex]; |
| 641 | edge.init(); |
| 642 | |
| 643 | prevIndex = currIndex; |
| 644 | currIndex++; |
| 645 | nextIndex = (nextIndex + 1) % inputPolygonSize; |
| 646 | } |
| 647 | |
| 648 | int edgeDataSize = edgeData.count(); |
| 649 | prevIndex = edgeDataSize - 1; |
| 650 | currIndex = 0; |
| 651 | int insetVertexCount = edgeDataSize; |
| 652 | while (prevIndex != currIndex) { |
| 653 | if (!edgeData[prevIndex].fValid) { |
| 654 | prevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; |
| 655 | continue; |
| 656 | } |
| 657 | |
| 658 | SkScalar s, t; |
| 659 | SkPoint intersection; |
| 660 | if (compute_intersection(edgeData[prevIndex].fInset, edgeData[currIndex].fInset, |
| 661 | &intersection, &s, &t)) { |
| 662 | // if new intersection is further back on previous inset from the prior intersection |
| 663 | if (s < edgeData[prevIndex].fTValue) { |
| 664 | // no point in considering this one again |
| 665 | edgeData[prevIndex].fValid = false; |
| 666 | --insetVertexCount; |
| 667 | // go back one segment |
| 668 | prevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; |
| 669 | // we've already considered this intersection, we're done |
| 670 | } else if (edgeData[currIndex].fTValue > SK_ScalarMin && |
| 671 | SkPointPriv::EqualsWithinTolerance(intersection, |
| 672 | edgeData[currIndex].fIntersection, |
| 673 | 1.0e-6f)) { |
| 674 | break; |
| 675 | } else { |
| 676 | // add intersection |
| 677 | edgeData[currIndex].fIntersection = intersection; |
| 678 | edgeData[currIndex].fTValue = t; |
| 679 | |
| 680 | // go to next segment |
| 681 | prevIndex = currIndex; |
| 682 | currIndex = (currIndex + 1) % edgeDataSize; |
| 683 | } |
| 684 | } else { |
| 685 | // If there is no intersection, we want to minimize the distance between |
| 686 | // the point where the segment lines cross and the segments themselves. |
| 687 | SkScalar prevPrevIndex = (prevIndex + edgeDataSize - 1) % edgeDataSize; |
| 688 | SkScalar currNextIndex = (currIndex + 1) % edgeDataSize; |
| 689 | SkScalar dist0 = compute_crossing_distance(edgeData[currIndex].fInset, |
| 690 | edgeData[prevPrevIndex].fInset); |
| 691 | SkScalar dist1 = compute_crossing_distance(edgeData[prevIndex].fInset, |
| 692 | edgeData[currNextIndex].fInset); |
| 693 | if (dist0 < dist1) { |
| 694 | edgeData[prevIndex].fValid = false; |
| 695 | prevIndex = prevPrevIndex; |
| 696 | } else { |
| 697 | edgeData[currIndex].fValid = false; |
| 698 | currIndex = currNextIndex; |
| 699 | } |
| 700 | --insetVertexCount; |
| 701 | } |
| 702 | } |
| 703 | |
| 704 | // store all the valid intersections that aren't nearly coincident |
| 705 | // TODO: look at the main algorithm and see if we can detect these better |
| 706 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
| 707 | |
| 708 | offsetPolygon->reset(); |
| 709 | offsetPolygon->setReserve(insetVertexCount); |
| 710 | currIndex = -1; |
| 711 | for (int i = 0; i < edgeData.count(); ++i) { |
| 712 | if (edgeData[i].fValid && (currIndex == -1 || |
| 713 | !SkPointPriv::EqualsWithinTolerance(edgeData[i].fIntersection, |
| 714 | (*offsetPolygon)[currIndex], |
| 715 | kCleanupTolerance))) { |
| 716 | *offsetPolygon->push() = edgeData[i].fIntersection; |
| 717 | currIndex++; |
| 718 | } |
| 719 | } |
| 720 | // make sure the first and last points aren't coincident |
| 721 | if (currIndex >= 1 && |
| 722 | SkPointPriv::EqualsWithinTolerance((*offsetPolygon)[0], (*offsetPolygon)[currIndex], |
| 723 | kCleanupTolerance)) { |
| 724 | offsetPolygon->pop(); |
| 725 | } |
| 726 | |
| 727 | // compute signed area to check winding (it should be same as the original polygon) |
| 728 | quadArea = 0; |
| 729 | for (int curr = 0; curr < offsetPolygon->count(); ++curr) { |
| 730 | int next = (curr + 1) % offsetPolygon->count(); |
| 731 | quadArea += (*offsetPolygon)[curr].cross((*offsetPolygon)[next]); |
| 732 | } |
| 733 | |
| 734 | return (winding*quadArea > 0 && |
| 735 | is_simple_polygon(offsetPolygon->begin(), offsetPolygon->count())); |
| 736 | } |
| 737 | |