Peter Collingbourne | 56109b7 | 2015-01-13 20:45:08 +0000 | [diff] [blame] | 1 | // Copyright 2013 The Go Authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style |
| 3 | // license that can be found in the LICENSE file. |
| 4 | |
| 5 | package pointer |
| 6 | |
| 7 | // This file defines the constraint generation phase. |
| 8 | |
| 9 | // TODO(adonovan): move the constraint definitions and the store() etc |
| 10 | // functions which add them (and are also used by the solver) into a |
| 11 | // new file, constraints.go. |
| 12 | |
| 13 | import ( |
| 14 | "fmt" |
| 15 | "go/token" |
| 16 | |
| 17 | "llvm.org/llgo/third_party/gotools/go/callgraph" |
| 18 | "llvm.org/llgo/third_party/gotools/go/ssa" |
| 19 | "llvm.org/llgo/third_party/gotools/go/types" |
| 20 | ) |
| 21 | |
| 22 | var ( |
| 23 | tEface = types.NewInterface(nil, nil).Complete() |
| 24 | tInvalid = types.Typ[types.Invalid] |
| 25 | tUnsafePtr = types.Typ[types.UnsafePointer] |
| 26 | ) |
| 27 | |
| 28 | // ---------- Node creation ---------- |
| 29 | |
| 30 | // nextNode returns the index of the next unused node. |
| 31 | func (a *analysis) nextNode() nodeid { |
| 32 | return nodeid(len(a.nodes)) |
| 33 | } |
| 34 | |
| 35 | // addNodes creates nodes for all scalar elements in type typ, and |
| 36 | // returns the id of the first one, or zero if the type was |
| 37 | // analytically uninteresting. |
| 38 | // |
| 39 | // comment explains the origin of the nodes, as a debugging aid. |
| 40 | // |
| 41 | func (a *analysis) addNodes(typ types.Type, comment string) nodeid { |
| 42 | id := a.nextNode() |
| 43 | for _, fi := range a.flatten(typ) { |
| 44 | a.addOneNode(fi.typ, comment, fi) |
| 45 | } |
| 46 | if id == a.nextNode() { |
| 47 | return 0 // type contained no pointers |
| 48 | } |
| 49 | return id |
| 50 | } |
| 51 | |
| 52 | // addOneNode creates a single node with type typ, and returns its id. |
| 53 | // |
| 54 | // typ should generally be scalar (except for tagged.T nodes |
| 55 | // and struct/array identity nodes). Use addNodes for non-scalar types. |
| 56 | // |
| 57 | // comment explains the origin of the nodes, as a debugging aid. |
| 58 | // subelement indicates the subelement, e.g. ".a.b[*].c". |
| 59 | // |
| 60 | func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid { |
| 61 | id := a.nextNode() |
| 62 | a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement, solve: new(solverState)}) |
| 63 | if a.log != nil { |
| 64 | fmt.Fprintf(a.log, "\tcreate n%d %s for %s%s\n", |
| 65 | id, typ, comment, subelement.path()) |
| 66 | } |
| 67 | return id |
| 68 | } |
| 69 | |
| 70 | // setValueNode associates node id with the value v. |
| 71 | // cgn identifies the context iff v is a local variable. |
| 72 | // |
| 73 | func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) { |
| 74 | if cgn != nil { |
| 75 | a.localval[v] = id |
| 76 | } else { |
| 77 | a.globalval[v] = id |
| 78 | } |
| 79 | if a.log != nil { |
| 80 | fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v) |
| 81 | } |
| 82 | |
| 83 | // Due to context-sensitivity, we may encounter the same Value |
| 84 | // in many contexts. We merge them to a canonical node, since |
| 85 | // that's what all clients want. |
| 86 | |
| 87 | // Record the (v, id) relation if the client has queried pts(v). |
| 88 | if _, ok := a.config.Queries[v]; ok { |
| 89 | t := v.Type() |
| 90 | ptr, ok := a.result.Queries[v] |
| 91 | if !ok { |
| 92 | // First time? Create the canonical query node. |
| 93 | ptr = Pointer{a, a.addNodes(t, "query")} |
| 94 | a.result.Queries[v] = ptr |
| 95 | } |
| 96 | a.result.Queries[v] = ptr |
| 97 | a.copy(ptr.n, id, a.sizeof(t)) |
| 98 | } |
| 99 | |
| 100 | // Record the (*v, id) relation if the client has queried pts(*v). |
| 101 | if _, ok := a.config.IndirectQueries[v]; ok { |
| 102 | t := v.Type() |
| 103 | ptr, ok := a.result.IndirectQueries[v] |
| 104 | if !ok { |
| 105 | // First time? Create the canonical indirect query node. |
| 106 | ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")} |
| 107 | a.result.IndirectQueries[v] = ptr |
| 108 | } |
| 109 | a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t)) |
| 110 | } |
| 111 | } |
| 112 | |
| 113 | // endObject marks the end of a sequence of calls to addNodes denoting |
| 114 | // a single object allocation. |
| 115 | // |
| 116 | // obj is the start node of the object, from a prior call to nextNode. |
| 117 | // Its size, flags and optional data will be updated. |
| 118 | // |
| 119 | func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object { |
| 120 | // Ensure object is non-empty by padding; |
| 121 | // the pad will be the object node. |
| 122 | size := uint32(a.nextNode() - obj) |
| 123 | if size == 0 { |
| 124 | a.addOneNode(tInvalid, "padding", nil) |
| 125 | } |
| 126 | objNode := a.nodes[obj] |
| 127 | o := &object{ |
| 128 | size: size, // excludes padding |
| 129 | cgn: cgn, |
| 130 | data: data, |
| 131 | } |
| 132 | objNode.obj = o |
| 133 | |
| 134 | return o |
| 135 | } |
| 136 | |
| 137 | // makeFunctionObject creates and returns a new function object |
| 138 | // (contour) for fn, and returns the id of its first node. It also |
| 139 | // enqueues fn for subsequent constraint generation. |
| 140 | // |
| 141 | // For a context-sensitive contour, callersite identifies the sole |
| 142 | // callsite; for shared contours, caller is nil. |
| 143 | // |
| 144 | func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid { |
| 145 | if a.log != nil { |
| 146 | fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn) |
| 147 | } |
| 148 | |
| 149 | // obj is the function object (identity, params, results). |
| 150 | obj := a.nextNode() |
| 151 | cgn := a.makeCGNode(fn, obj, callersite) |
| 152 | sig := fn.Signature |
| 153 | a.addOneNode(sig, "func.cgnode", nil) // (scalar with Signature type) |
| 154 | if recv := sig.Recv(); recv != nil { |
| 155 | a.addNodes(recv.Type(), "func.recv") |
| 156 | } |
| 157 | a.addNodes(sig.Params(), "func.params") |
| 158 | a.addNodes(sig.Results(), "func.results") |
| 159 | a.endObject(obj, cgn, fn).flags |= otFunction |
| 160 | |
| 161 | if a.log != nil { |
| 162 | fmt.Fprintf(a.log, "\t----\n") |
| 163 | } |
| 164 | |
| 165 | // Queue it up for constraint processing. |
| 166 | a.genq = append(a.genq, cgn) |
| 167 | |
| 168 | return obj |
| 169 | } |
| 170 | |
| 171 | // makeTagged creates a tagged object of type typ. |
| 172 | func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nodeid { |
| 173 | obj := a.addOneNode(typ, "tagged.T", nil) // NB: type may be non-scalar! |
| 174 | a.addNodes(typ, "tagged.v") |
| 175 | a.endObject(obj, cgn, data).flags |= otTagged |
| 176 | return obj |
| 177 | } |
| 178 | |
| 179 | // makeRtype returns the canonical tagged object of type *rtype whose |
| 180 | // payload points to the sole rtype object for T. |
| 181 | // |
| 182 | // TODO(adonovan): move to reflect.go; it's part of the solver really. |
| 183 | // |
| 184 | func (a *analysis) makeRtype(T types.Type) nodeid { |
| 185 | if v := a.rtypes.At(T); v != nil { |
| 186 | return v.(nodeid) |
| 187 | } |
| 188 | |
| 189 | // Create the object for the reflect.rtype itself, which is |
| 190 | // ordinarily a large struct but here a single node will do. |
| 191 | obj := a.nextNode() |
| 192 | a.addOneNode(T, "reflect.rtype", nil) |
| 193 | a.endObject(obj, nil, T) |
| 194 | |
| 195 | id := a.makeTagged(a.reflectRtypePtr, nil, T) |
| 196 | a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton) |
| 197 | a.addressOf(a.reflectRtypePtr, id+1, obj) |
| 198 | |
| 199 | a.rtypes.Set(T, id) |
| 200 | return id |
| 201 | } |
| 202 | |
| 203 | // rtypeValue returns the type of the *reflect.rtype-tagged object obj. |
| 204 | func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type { |
| 205 | tDyn, t, _ := a.taggedValue(obj) |
| 206 | if tDyn != a.reflectRtypePtr { |
| 207 | panic(fmt.Sprintf("not a *reflect.rtype-tagged object: obj=n%d tag=%v payload=n%d", obj, tDyn, t)) |
| 208 | } |
| 209 | return a.nodes[t].typ |
| 210 | } |
| 211 | |
| 212 | // valueNode returns the id of the value node for v, creating it (and |
| 213 | // the association) as needed. It may return zero for uninteresting |
| 214 | // values containing no pointers. |
| 215 | // |
| 216 | func (a *analysis) valueNode(v ssa.Value) nodeid { |
| 217 | // Value nodes for locals are created en masse by genFunc. |
| 218 | if id, ok := a.localval[v]; ok { |
| 219 | return id |
| 220 | } |
| 221 | |
| 222 | // Value nodes for globals are created on demand. |
| 223 | id, ok := a.globalval[v] |
| 224 | if !ok { |
| 225 | var comment string |
| 226 | if a.log != nil { |
| 227 | comment = v.String() |
| 228 | } |
| 229 | id = a.addNodes(v.Type(), comment) |
| 230 | if obj := a.objectNode(nil, v); obj != 0 { |
| 231 | a.addressOf(v.Type(), id, obj) |
| 232 | } |
| 233 | a.setValueNode(v, id, nil) |
| 234 | } |
| 235 | return id |
| 236 | } |
| 237 | |
| 238 | // valueOffsetNode ascertains the node for tuple/struct value v, |
| 239 | // then returns the node for its subfield #index. |
| 240 | // |
| 241 | func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid { |
| 242 | id := a.valueNode(v) |
| 243 | if id == 0 { |
| 244 | panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v)) |
| 245 | } |
| 246 | return id + nodeid(a.offsetOf(v.Type(), index)) |
| 247 | } |
| 248 | |
| 249 | // isTaggedObject reports whether object obj is a tagged object. |
| 250 | func (a *analysis) isTaggedObject(obj nodeid) bool { |
| 251 | return a.nodes[obj].obj.flags&otTagged != 0 |
| 252 | } |
| 253 | |
| 254 | // taggedValue returns the dynamic type tag, the (first node of the) |
| 255 | // payload, and the indirect flag of the tagged object starting at id. |
| 256 | // Panic ensues if !isTaggedObject(id). |
| 257 | // |
| 258 | func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) { |
| 259 | n := a.nodes[obj] |
| 260 | flags := n.obj.flags |
| 261 | if flags&otTagged == 0 { |
| 262 | panic(fmt.Sprintf("not a tagged object: n%d", obj)) |
| 263 | } |
| 264 | return n.typ, obj + 1, flags&otIndirect != 0 |
| 265 | } |
| 266 | |
| 267 | // funcParams returns the first node of the params (P) block of the |
| 268 | // function whose object node (obj.flags&otFunction) is id. |
| 269 | // |
| 270 | func (a *analysis) funcParams(id nodeid) nodeid { |
| 271 | n := a.nodes[id] |
| 272 | if n.obj == nil || n.obj.flags&otFunction == 0 { |
| 273 | panic(fmt.Sprintf("funcParams(n%d): not a function object block", id)) |
| 274 | } |
| 275 | return id + 1 |
| 276 | } |
| 277 | |
| 278 | // funcResults returns the first node of the results (R) block of the |
| 279 | // function whose object node (obj.flags&otFunction) is id. |
| 280 | // |
| 281 | func (a *analysis) funcResults(id nodeid) nodeid { |
| 282 | n := a.nodes[id] |
| 283 | if n.obj == nil || n.obj.flags&otFunction == 0 { |
| 284 | panic(fmt.Sprintf("funcResults(n%d): not a function object block", id)) |
| 285 | } |
| 286 | sig := n.typ.(*types.Signature) |
| 287 | id += 1 + nodeid(a.sizeof(sig.Params())) |
| 288 | if sig.Recv() != nil { |
| 289 | id += nodeid(a.sizeof(sig.Recv().Type())) |
| 290 | } |
| 291 | return id |
| 292 | } |
| 293 | |
| 294 | // ---------- Constraint creation ---------- |
| 295 | |
| 296 | // copy creates a constraint of the form dst = src. |
| 297 | // sizeof is the width (in logical fields) of the copied type. |
| 298 | // |
| 299 | func (a *analysis) copy(dst, src nodeid, sizeof uint32) { |
| 300 | if src == dst || sizeof == 0 { |
| 301 | return // trivial |
| 302 | } |
| 303 | if src == 0 || dst == 0 { |
| 304 | panic(fmt.Sprintf("ill-typed copy dst=n%d src=n%d", dst, src)) |
| 305 | } |
| 306 | for i := uint32(0); i < sizeof; i++ { |
| 307 | a.addConstraint(©Constraint{dst, src}) |
| 308 | src++ |
| 309 | dst++ |
| 310 | } |
| 311 | } |
| 312 | |
| 313 | // addressOf creates a constraint of the form id = &obj. |
| 314 | // T is the type of the address. |
| 315 | func (a *analysis) addressOf(T types.Type, id, obj nodeid) { |
| 316 | if id == 0 { |
| 317 | panic("addressOf: zero id") |
| 318 | } |
| 319 | if obj == 0 { |
| 320 | panic("addressOf: zero obj") |
| 321 | } |
| 322 | if a.shouldTrack(T) { |
| 323 | a.addConstraint(&addrConstraint{id, obj}) |
| 324 | } |
| 325 | } |
| 326 | |
| 327 | // load creates a load constraint of the form dst = src[offset]. |
| 328 | // offset is the pointer offset in logical fields. |
| 329 | // sizeof is the width (in logical fields) of the loaded type. |
| 330 | // |
| 331 | func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) { |
| 332 | if dst == 0 { |
| 333 | return // load of non-pointerlike value |
| 334 | } |
| 335 | if src == 0 && dst == 0 { |
| 336 | return // non-pointerlike operation |
| 337 | } |
| 338 | if src == 0 || dst == 0 { |
| 339 | panic(fmt.Sprintf("ill-typed load dst=n%d src=n%d", dst, src)) |
| 340 | } |
| 341 | for i := uint32(0); i < sizeof; i++ { |
| 342 | a.addConstraint(&loadConstraint{offset, dst, src}) |
| 343 | offset++ |
| 344 | dst++ |
| 345 | } |
| 346 | } |
| 347 | |
| 348 | // store creates a store constraint of the form dst[offset] = src. |
| 349 | // offset is the pointer offset in logical fields. |
| 350 | // sizeof is the width (in logical fields) of the stored type. |
| 351 | // |
| 352 | func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) { |
| 353 | if src == 0 { |
| 354 | return // store of non-pointerlike value |
| 355 | } |
| 356 | if src == 0 && dst == 0 { |
| 357 | return // non-pointerlike operation |
| 358 | } |
| 359 | if src == 0 || dst == 0 { |
| 360 | panic(fmt.Sprintf("ill-typed store dst=n%d src=n%d", dst, src)) |
| 361 | } |
| 362 | for i := uint32(0); i < sizeof; i++ { |
| 363 | a.addConstraint(&storeConstraint{offset, dst, src}) |
| 364 | offset++ |
| 365 | src++ |
| 366 | } |
| 367 | } |
| 368 | |
| 369 | // offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset. |
| 370 | // offset is the field offset in logical fields. |
| 371 | // T is the type of the address. |
| 372 | // |
| 373 | func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) { |
| 374 | if !a.shouldTrack(T) { |
| 375 | return |
| 376 | } |
| 377 | if offset == 0 { |
| 378 | // Simplify dst = &src->f0 |
| 379 | // to dst = src |
| 380 | // (NB: this optimisation is defeated by the identity |
| 381 | // field prepended to struct and array objects.) |
| 382 | a.copy(dst, src, 1) |
| 383 | } else { |
| 384 | a.addConstraint(&offsetAddrConstraint{offset, dst, src}) |
| 385 | } |
| 386 | } |
| 387 | |
| 388 | // typeAssert creates a typeFilter or untag constraint of the form dst = src.(T): |
| 389 | // typeFilter for an interface, untag for a concrete type. |
| 390 | // The exact flag is specified as for untagConstraint. |
| 391 | // |
| 392 | func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) { |
| 393 | if isInterface(T) { |
| 394 | a.addConstraint(&typeFilterConstraint{T, dst, src}) |
| 395 | } else { |
| 396 | a.addConstraint(&untagConstraint{T, dst, src, exact}) |
| 397 | } |
| 398 | } |
| 399 | |
| 400 | // addConstraint adds c to the constraint set. |
| 401 | func (a *analysis) addConstraint(c constraint) { |
| 402 | a.constraints = append(a.constraints, c) |
| 403 | if a.log != nil { |
| 404 | fmt.Fprintf(a.log, "\t%s\n", c) |
| 405 | } |
| 406 | } |
| 407 | |
| 408 | // copyElems generates load/store constraints for *dst = *src, |
| 409 | // where src and dst are slices or *arrays. |
| 410 | // |
| 411 | func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) { |
| 412 | tmp := a.addNodes(typ, "copy") |
| 413 | sz := a.sizeof(typ) |
| 414 | a.genLoad(cgn, tmp, src, 1, sz) |
| 415 | a.genStore(cgn, dst, tmp, 1, sz) |
| 416 | } |
| 417 | |
| 418 | // ---------- Constraint generation ---------- |
| 419 | |
| 420 | // genConv generates constraints for the conversion operation conv. |
| 421 | func (a *analysis) genConv(conv *ssa.Convert, cgn *cgnode) { |
| 422 | res := a.valueNode(conv) |
| 423 | if res == 0 { |
| 424 | return // result is non-pointerlike |
| 425 | } |
| 426 | |
| 427 | tSrc := conv.X.Type() |
| 428 | tDst := conv.Type() |
| 429 | |
| 430 | switch utSrc := tSrc.Underlying().(type) { |
| 431 | case *types.Slice: |
| 432 | // []byte/[]rune -> string? |
| 433 | return |
| 434 | |
| 435 | case *types.Pointer: |
| 436 | // *T -> unsafe.Pointer? |
| 437 | if tDst.Underlying() == tUnsafePtr { |
| 438 | return // we don't model unsafe aliasing (unsound) |
| 439 | } |
| 440 | |
| 441 | case *types.Basic: |
| 442 | switch tDst.Underlying().(type) { |
| 443 | case *types.Pointer: |
| 444 | // Treat unsafe.Pointer->*T conversions like |
| 445 | // new(T) and create an unaliased object. |
| 446 | if utSrc == tUnsafePtr { |
| 447 | obj := a.addNodes(mustDeref(tDst), "unsafe.Pointer conversion") |
| 448 | a.endObject(obj, cgn, conv) |
| 449 | a.addressOf(tDst, res, obj) |
| 450 | return |
| 451 | } |
| 452 | |
| 453 | case *types.Slice: |
| 454 | // string -> []byte/[]rune (or named aliases)? |
| 455 | if utSrc.Info()&types.IsString != 0 { |
| 456 | obj := a.addNodes(sliceToArray(tDst), "convert") |
| 457 | a.endObject(obj, cgn, conv) |
| 458 | a.addressOf(tDst, res, obj) |
| 459 | return |
| 460 | } |
| 461 | |
| 462 | case *types.Basic: |
| 463 | // All basic-to-basic type conversions are no-ops. |
| 464 | // This includes uintptr<->unsafe.Pointer conversions, |
| 465 | // which we (unsoundly) ignore. |
| 466 | return |
| 467 | } |
| 468 | } |
| 469 | |
| 470 | panic(fmt.Sprintf("illegal *ssa.Convert %s -> %s: %s", tSrc, tDst, conv.Parent())) |
| 471 | } |
| 472 | |
| 473 | // genAppend generates constraints for a call to append. |
| 474 | func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) { |
| 475 | // Consider z = append(x, y). y is optional. |
| 476 | // This may allocate a new [1]T array; call its object w. |
| 477 | // We get the following constraints: |
| 478 | // z = x |
| 479 | // z = &w |
| 480 | // *z = *y |
| 481 | |
| 482 | x := instr.Call.Args[0] |
| 483 | |
| 484 | z := instr |
| 485 | a.copy(a.valueNode(z), a.valueNode(x), 1) // z = x |
| 486 | |
| 487 | if len(instr.Call.Args) == 1 { |
| 488 | return // no allocation for z = append(x) or _ = append(x). |
| 489 | } |
| 490 | |
| 491 | // TODO(adonovan): test append([]byte, ...string) []byte. |
| 492 | |
| 493 | y := instr.Call.Args[1] |
| 494 | tArray := sliceToArray(instr.Call.Args[0].Type()) |
| 495 | |
| 496 | var w nodeid |
| 497 | w = a.nextNode() |
| 498 | a.addNodes(tArray, "append") |
| 499 | a.endObject(w, cgn, instr) |
| 500 | |
| 501 | a.copyElems(cgn, tArray.Elem(), z, y) // *z = *y |
| 502 | a.addressOf(instr.Type(), a.valueNode(z), w) // z = &w |
| 503 | } |
| 504 | |
| 505 | // genBuiltinCall generates contraints for a call to a built-in. |
| 506 | func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) { |
| 507 | call := instr.Common() |
| 508 | switch call.Value.(*ssa.Builtin).Name() { |
| 509 | case "append": |
| 510 | // Safe cast: append cannot appear in a go or defer statement. |
| 511 | a.genAppend(instr.(*ssa.Call), cgn) |
| 512 | |
| 513 | case "copy": |
| 514 | tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem() |
| 515 | a.copyElems(cgn, tElem, call.Args[0], call.Args[1]) |
| 516 | |
| 517 | case "panic": |
| 518 | a.copy(a.panicNode, a.valueNode(call.Args[0]), 1) |
| 519 | |
| 520 | case "recover": |
| 521 | if v := instr.Value(); v != nil { |
| 522 | a.copy(a.valueNode(v), a.panicNode, 1) |
| 523 | } |
| 524 | |
| 525 | case "print": |
| 526 | // In the tests, the probe might be the sole reference |
| 527 | // to its arg, so make sure we create nodes for it. |
| 528 | if len(call.Args) > 0 { |
| 529 | a.valueNode(call.Args[0]) |
| 530 | } |
| 531 | |
| 532 | case "ssa:wrapnilchk": |
| 533 | a.copy(a.valueNode(instr.Value()), a.valueNode(call.Args[0]), 1) |
| 534 | |
| 535 | default: |
| 536 | // No-ops: close len cap real imag complex print println delete. |
| 537 | } |
| 538 | } |
| 539 | |
| 540 | // shouldUseContext defines the context-sensitivity policy. It |
| 541 | // returns true if we should analyse all static calls to fn anew. |
| 542 | // |
| 543 | // Obviously this interface rather limits how much freedom we have to |
| 544 | // choose a policy. The current policy, rather arbitrarily, is true |
| 545 | // for intrinsics and accessor methods (actually: short, single-block, |
| 546 | // call-free functions). This is just a starting point. |
| 547 | // |
| 548 | func (a *analysis) shouldUseContext(fn *ssa.Function) bool { |
| 549 | if a.findIntrinsic(fn) != nil { |
| 550 | return true // treat intrinsics context-sensitively |
| 551 | } |
| 552 | if len(fn.Blocks) != 1 { |
| 553 | return false // too expensive |
| 554 | } |
| 555 | blk := fn.Blocks[0] |
| 556 | if len(blk.Instrs) > 10 { |
| 557 | return false // too expensive |
| 558 | } |
| 559 | if fn.Synthetic != "" && (fn.Pkg == nil || fn != fn.Pkg.Func("init")) { |
| 560 | return true // treat synthetic wrappers context-sensitively |
| 561 | } |
| 562 | for _, instr := range blk.Instrs { |
| 563 | switch instr := instr.(type) { |
| 564 | case ssa.CallInstruction: |
| 565 | // Disallow function calls (except to built-ins) |
| 566 | // because of the danger of unbounded recursion. |
| 567 | if _, ok := instr.Common().Value.(*ssa.Builtin); !ok { |
| 568 | return false |
| 569 | } |
| 570 | } |
| 571 | } |
| 572 | return true |
| 573 | } |
| 574 | |
| 575 | // genStaticCall generates constraints for a statically dispatched function call. |
| 576 | func (a *analysis) genStaticCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
| 577 | fn := call.StaticCallee() |
| 578 | |
| 579 | // Special cases for inlined intrinsics. |
| 580 | switch fn { |
| 581 | case a.runtimeSetFinalizer: |
| 582 | // Inline SetFinalizer so the call appears direct. |
| 583 | site.targets = a.addOneNode(tInvalid, "SetFinalizer.targets", nil) |
| 584 | a.addConstraint(&runtimeSetFinalizerConstraint{ |
| 585 | targets: site.targets, |
| 586 | x: a.valueNode(call.Args[0]), |
| 587 | f: a.valueNode(call.Args[1]), |
| 588 | }) |
| 589 | return |
| 590 | |
| 591 | case a.reflectValueCall: |
| 592 | // Inline (reflect.Value).Call so the call appears direct. |
| 593 | dotdotdot := false |
| 594 | ret := reflectCallImpl(a, caller, site, a.valueNode(call.Args[0]), a.valueNode(call.Args[1]), dotdotdot) |
| 595 | if result != 0 { |
| 596 | a.addressOf(fn.Signature.Results().At(0).Type(), result, ret) |
| 597 | } |
| 598 | return |
| 599 | } |
| 600 | |
| 601 | // Ascertain the context (contour/cgnode) for a particular call. |
| 602 | var obj nodeid |
| 603 | if a.shouldUseContext(fn) { |
| 604 | obj = a.makeFunctionObject(fn, site) // new contour |
| 605 | } else { |
| 606 | obj = a.objectNode(nil, fn) // shared contour |
| 607 | } |
| 608 | a.callEdge(caller, site, obj) |
| 609 | |
| 610 | sig := call.Signature() |
| 611 | |
| 612 | // Copy receiver, if any. |
| 613 | params := a.funcParams(obj) |
| 614 | args := call.Args |
| 615 | if sig.Recv() != nil { |
| 616 | sz := a.sizeof(sig.Recv().Type()) |
| 617 | a.copy(params, a.valueNode(args[0]), sz) |
| 618 | params += nodeid(sz) |
| 619 | args = args[1:] |
| 620 | } |
| 621 | |
| 622 | // Copy actual parameters into formal params block. |
| 623 | // Must loop, since the actuals aren't contiguous. |
| 624 | for i, arg := range args { |
| 625 | sz := a.sizeof(sig.Params().At(i).Type()) |
| 626 | a.copy(params, a.valueNode(arg), sz) |
| 627 | params += nodeid(sz) |
| 628 | } |
| 629 | |
| 630 | // Copy formal results block to actual result. |
| 631 | if result != 0 { |
| 632 | a.copy(result, a.funcResults(obj), a.sizeof(sig.Results())) |
| 633 | } |
| 634 | } |
| 635 | |
| 636 | // genDynamicCall generates constraints for a dynamic function call. |
| 637 | func (a *analysis) genDynamicCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
| 638 | // pts(targets) will be the set of possible call targets. |
| 639 | site.targets = a.valueNode(call.Value) |
| 640 | |
| 641 | // We add dynamic closure rules that store the arguments into |
| 642 | // the P-block and load the results from the R-block of each |
| 643 | // function discovered in pts(targets). |
| 644 | |
| 645 | sig := call.Signature() |
| 646 | var offset uint32 = 1 // P/R block starts at offset 1 |
| 647 | for i, arg := range call.Args { |
| 648 | sz := a.sizeof(sig.Params().At(i).Type()) |
| 649 | a.genStore(caller, call.Value, a.valueNode(arg), offset, sz) |
| 650 | offset += sz |
| 651 | } |
| 652 | if result != 0 { |
| 653 | a.genLoad(caller, result, call.Value, offset, a.sizeof(sig.Results())) |
| 654 | } |
| 655 | } |
| 656 | |
| 657 | // genInvoke generates constraints for a dynamic method invocation. |
| 658 | func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
| 659 | if call.Value.Type() == a.reflectType { |
| 660 | a.genInvokeReflectType(caller, site, call, result) |
| 661 | return |
| 662 | } |
| 663 | |
| 664 | sig := call.Signature() |
| 665 | |
| 666 | // Allocate a contiguous targets/params/results block for this call. |
| 667 | block := a.nextNode() |
| 668 | // pts(targets) will be the set of possible call targets |
| 669 | site.targets = a.addOneNode(sig, "invoke.targets", nil) |
| 670 | p := a.addNodes(sig.Params(), "invoke.params") |
| 671 | r := a.addNodes(sig.Results(), "invoke.results") |
| 672 | |
| 673 | // Copy the actual parameters into the call's params block. |
| 674 | for i, n := 0, sig.Params().Len(); i < n; i++ { |
| 675 | sz := a.sizeof(sig.Params().At(i).Type()) |
| 676 | a.copy(p, a.valueNode(call.Args[i]), sz) |
| 677 | p += nodeid(sz) |
| 678 | } |
| 679 | // Copy the call's results block to the actual results. |
| 680 | if result != 0 { |
| 681 | a.copy(result, r, a.sizeof(sig.Results())) |
| 682 | } |
| 683 | |
| 684 | // We add a dynamic invoke constraint that will connect the |
| 685 | // caller's and the callee's P/R blocks for each discovered |
| 686 | // call target. |
| 687 | a.addConstraint(&invokeConstraint{call.Method, a.valueNode(call.Value), block}) |
| 688 | } |
| 689 | |
| 690 | // genInvokeReflectType is a specialization of genInvoke where the |
| 691 | // receiver type is a reflect.Type, under the assumption that there |
| 692 | // can be at most one implementation of this interface, *reflect.rtype. |
| 693 | // |
| 694 | // (Though this may appear to be an instance of a pattern---method |
| 695 | // calls on interfaces known to have exactly one implementation---in |
| 696 | // practice it occurs rarely, so we special case for reflect.Type.) |
| 697 | // |
| 698 | // In effect we treat this: |
| 699 | // var rt reflect.Type = ... |
| 700 | // rt.F() |
| 701 | // as this: |
| 702 | // rt.(*reflect.rtype).F() |
| 703 | // |
| 704 | func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
| 705 | // Unpack receiver into rtype |
| 706 | rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil) |
| 707 | recv := a.valueNode(call.Value) |
| 708 | a.typeAssert(a.reflectRtypePtr, rtype, recv, true) |
| 709 | |
| 710 | // Look up the concrete method. |
| 711 | fn := a.prog.LookupMethod(a.reflectRtypePtr, call.Method.Pkg(), call.Method.Name()) |
| 712 | |
| 713 | obj := a.makeFunctionObject(fn, site) // new contour for this call |
| 714 | a.callEdge(caller, site, obj) |
| 715 | |
| 716 | // From now on, it's essentially a static call, but little is |
| 717 | // gained by factoring together the code for both cases. |
| 718 | |
| 719 | sig := fn.Signature // concrete method |
| 720 | targets := a.addOneNode(sig, "call.targets", nil) |
| 721 | a.addressOf(sig, targets, obj) // (a singleton) |
| 722 | |
| 723 | // Copy receiver. |
| 724 | params := a.funcParams(obj) |
| 725 | a.copy(params, rtype, 1) |
| 726 | params++ |
| 727 | |
| 728 | // Copy actual parameters into formal P-block. |
| 729 | // Must loop, since the actuals aren't contiguous. |
| 730 | for i, arg := range call.Args { |
| 731 | sz := a.sizeof(sig.Params().At(i).Type()) |
| 732 | a.copy(params, a.valueNode(arg), sz) |
| 733 | params += nodeid(sz) |
| 734 | } |
| 735 | |
| 736 | // Copy formal R-block to actual R-block. |
| 737 | if result != 0 { |
| 738 | a.copy(result, a.funcResults(obj), a.sizeof(sig.Results())) |
| 739 | } |
| 740 | } |
| 741 | |
| 742 | // genCall generates constraints for call instruction instr. |
| 743 | func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) { |
| 744 | call := instr.Common() |
| 745 | |
| 746 | // Intrinsic implementations of built-in functions. |
| 747 | if _, ok := call.Value.(*ssa.Builtin); ok { |
| 748 | a.genBuiltinCall(instr, caller) |
| 749 | return |
| 750 | } |
| 751 | |
| 752 | var result nodeid |
| 753 | if v := instr.Value(); v != nil { |
| 754 | result = a.valueNode(v) |
| 755 | } |
| 756 | |
| 757 | site := &callsite{instr: instr} |
| 758 | if call.StaticCallee() != nil { |
| 759 | a.genStaticCall(caller, site, call, result) |
| 760 | } else if call.IsInvoke() { |
| 761 | a.genInvoke(caller, site, call, result) |
| 762 | } else { |
| 763 | a.genDynamicCall(caller, site, call, result) |
| 764 | } |
| 765 | |
| 766 | caller.sites = append(caller.sites, site) |
| 767 | |
| 768 | if a.log != nil { |
| 769 | // TODO(adonovan): debug: improve log message. |
| 770 | fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller) |
| 771 | } |
| 772 | } |
| 773 | |
| 774 | // objectNode returns the object to which v points, if known. |
| 775 | // In other words, if the points-to set of v is a singleton, it |
| 776 | // returns the sole label, zero otherwise. |
| 777 | // |
| 778 | // We exploit this information to make the generated constraints less |
| 779 | // dynamic. For example, a complex load constraint can be replaced by |
| 780 | // a simple copy constraint when the sole destination is known a priori. |
| 781 | // |
| 782 | // Some SSA instructions always have singletons points-to sets: |
| 783 | // Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice. |
| 784 | // Others may be singletons depending on their operands: |
| 785 | // FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice. |
| 786 | // |
| 787 | // Idempotent. Objects are created as needed, possibly via recursion |
| 788 | // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))). |
| 789 | // |
| 790 | func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid { |
| 791 | switch v.(type) { |
| 792 | case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar: |
| 793 | // Global object. |
| 794 | obj, ok := a.globalobj[v] |
| 795 | if !ok { |
| 796 | switch v := v.(type) { |
| 797 | case *ssa.Global: |
| 798 | obj = a.nextNode() |
| 799 | a.addNodes(mustDeref(v.Type()), "global") |
| 800 | a.endObject(obj, nil, v) |
| 801 | |
| 802 | case *ssa.Function: |
| 803 | obj = a.makeFunctionObject(v, nil) |
| 804 | |
| 805 | case *ssa.Const: |
| 806 | // not addressable |
| 807 | |
| 808 | case *ssa.FreeVar: |
| 809 | // not addressable |
| 810 | } |
| 811 | |
| 812 | if a.log != nil { |
| 813 | fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj) |
| 814 | } |
| 815 | a.globalobj[v] = obj |
| 816 | } |
| 817 | return obj |
| 818 | } |
| 819 | |
| 820 | // Local object. |
| 821 | obj, ok := a.localobj[v] |
| 822 | if !ok { |
| 823 | switch v := v.(type) { |
| 824 | case *ssa.Alloc: |
| 825 | obj = a.nextNode() |
| 826 | a.addNodes(mustDeref(v.Type()), "alloc") |
| 827 | a.endObject(obj, cgn, v) |
| 828 | |
| 829 | case *ssa.MakeSlice: |
| 830 | obj = a.nextNode() |
| 831 | a.addNodes(sliceToArray(v.Type()), "makeslice") |
| 832 | a.endObject(obj, cgn, v) |
| 833 | |
| 834 | case *ssa.MakeChan: |
| 835 | obj = a.nextNode() |
| 836 | a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan") |
| 837 | a.endObject(obj, cgn, v) |
| 838 | |
| 839 | case *ssa.MakeMap: |
| 840 | obj = a.nextNode() |
| 841 | tmap := v.Type().Underlying().(*types.Map) |
| 842 | a.addNodes(tmap.Key(), "makemap.key") |
| 843 | elem := a.addNodes(tmap.Elem(), "makemap.value") |
| 844 | |
| 845 | // To update the value field, MapUpdate |
| 846 | // generates store-with-offset constraints which |
| 847 | // the presolver can't model, so we must mark |
| 848 | // those nodes indirect. |
| 849 | for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ { |
| 850 | a.mapValues = append(a.mapValues, id) |
| 851 | } |
| 852 | a.endObject(obj, cgn, v) |
| 853 | |
| 854 | case *ssa.MakeInterface: |
| 855 | tConc := v.X.Type() |
| 856 | obj = a.makeTagged(tConc, cgn, v) |
| 857 | |
| 858 | // Copy the value into it, if nontrivial. |
| 859 | if x := a.valueNode(v.X); x != 0 { |
| 860 | a.copy(obj+1, x, a.sizeof(tConc)) |
| 861 | } |
| 862 | |
| 863 | case *ssa.FieldAddr: |
| 864 | if xobj := a.objectNode(cgn, v.X); xobj != 0 { |
| 865 | obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field)) |
| 866 | } |
| 867 | |
| 868 | case *ssa.IndexAddr: |
| 869 | if xobj := a.objectNode(cgn, v.X); xobj != 0 { |
| 870 | obj = xobj + 1 |
| 871 | } |
| 872 | |
| 873 | case *ssa.Slice: |
| 874 | obj = a.objectNode(cgn, v.X) |
| 875 | |
| 876 | case *ssa.Convert: |
| 877 | // TODO(adonovan): opt: handle these cases too: |
| 878 | // - unsafe.Pointer->*T conversion acts like Alloc |
| 879 | // - string->[]byte/[]rune conversion acts like MakeSlice |
| 880 | } |
| 881 | |
| 882 | if a.log != nil { |
| 883 | fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj) |
| 884 | } |
| 885 | a.localobj[v] = obj |
| 886 | } |
| 887 | return obj |
| 888 | } |
| 889 | |
| 890 | // genLoad generates constraints for result = *(ptr + val). |
| 891 | func (a *analysis) genLoad(cgn *cgnode, result nodeid, ptr ssa.Value, offset, sizeof uint32) { |
| 892 | if obj := a.objectNode(cgn, ptr); obj != 0 { |
| 893 | // Pre-apply loadConstraint.solve(). |
| 894 | a.copy(result, obj+nodeid(offset), sizeof) |
| 895 | } else { |
| 896 | a.load(result, a.valueNode(ptr), offset, sizeof) |
| 897 | } |
| 898 | } |
| 899 | |
| 900 | // genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr) |
| 901 | // or 'v=ptr[*]' (IndexAddr) instruction v. |
| 902 | func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) { |
| 903 | dst := a.valueNode(v) |
| 904 | if obj := a.objectNode(cgn, v); obj != 0 { |
| 905 | // Pre-apply offsetAddrConstraint.solve(). |
| 906 | a.addressOf(v.Type(), dst, obj) |
| 907 | } else { |
| 908 | a.offsetAddr(v.Type(), dst, ptr, offset) |
| 909 | } |
| 910 | } |
| 911 | |
| 912 | // genStore generates constraints for *(ptr + offset) = val. |
| 913 | func (a *analysis) genStore(cgn *cgnode, ptr ssa.Value, val nodeid, offset, sizeof uint32) { |
| 914 | if obj := a.objectNode(cgn, ptr); obj != 0 { |
| 915 | // Pre-apply storeConstraint.solve(). |
| 916 | a.copy(obj+nodeid(offset), val, sizeof) |
| 917 | } else { |
| 918 | a.store(a.valueNode(ptr), val, offset, sizeof) |
| 919 | } |
| 920 | } |
| 921 | |
| 922 | // genInstr generates constraints for instruction instr in context cgn. |
| 923 | func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) { |
| 924 | if a.log != nil { |
| 925 | var prefix string |
| 926 | if val, ok := instr.(ssa.Value); ok { |
| 927 | prefix = val.Name() + " = " |
| 928 | } |
| 929 | fmt.Fprintf(a.log, "; %s%s\n", prefix, instr) |
| 930 | } |
| 931 | |
| 932 | switch instr := instr.(type) { |
| 933 | case *ssa.DebugRef: |
| 934 | // no-op. |
| 935 | |
| 936 | case *ssa.UnOp: |
| 937 | switch instr.Op { |
| 938 | case token.ARROW: // <-x |
| 939 | // We can ignore instr.CommaOk because the node we're |
| 940 | // altering is always at zero offset relative to instr |
| 941 | tElem := instr.X.Type().Underlying().(*types.Chan).Elem() |
| 942 | a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem)) |
| 943 | |
| 944 | case token.MUL: // *x |
| 945 | a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type())) |
| 946 | |
| 947 | default: |
| 948 | // NOT, SUB, XOR: no-op. |
| 949 | } |
| 950 | |
| 951 | case *ssa.BinOp: |
| 952 | // All no-ops. |
| 953 | |
| 954 | case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer |
| 955 | a.genCall(cgn, instr) |
| 956 | |
| 957 | case *ssa.ChangeType: |
| 958 | a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) |
| 959 | |
| 960 | case *ssa.Convert: |
| 961 | a.genConv(instr, cgn) |
| 962 | |
| 963 | case *ssa.Extract: |
| 964 | a.copy(a.valueNode(instr), |
| 965 | a.valueOffsetNode(instr.Tuple, instr.Index), |
| 966 | a.sizeof(instr.Type())) |
| 967 | |
| 968 | case *ssa.FieldAddr: |
| 969 | a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), |
| 970 | a.offsetOf(mustDeref(instr.X.Type()), instr.Field)) |
| 971 | |
| 972 | case *ssa.IndexAddr: |
| 973 | a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1) |
| 974 | |
| 975 | case *ssa.Field: |
| 976 | a.copy(a.valueNode(instr), |
| 977 | a.valueOffsetNode(instr.X, instr.Field), |
| 978 | a.sizeof(instr.Type())) |
| 979 | |
| 980 | case *ssa.Index: |
| 981 | a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type())) |
| 982 | |
| 983 | case *ssa.Select: |
| 984 | recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1) |
| 985 | for _, st := range instr.States { |
| 986 | elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem()) |
| 987 | switch st.Dir { |
| 988 | case types.RecvOnly: |
| 989 | a.genLoad(cgn, recv, st.Chan, 0, elemSize) |
| 990 | recv += nodeid(elemSize) |
| 991 | |
| 992 | case types.SendOnly: |
| 993 | a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize) |
| 994 | } |
| 995 | } |
| 996 | |
| 997 | case *ssa.Return: |
| 998 | results := a.funcResults(cgn.obj) |
| 999 | for _, r := range instr.Results { |
| 1000 | sz := a.sizeof(r.Type()) |
| 1001 | a.copy(results, a.valueNode(r), sz) |
| 1002 | results += nodeid(sz) |
| 1003 | } |
| 1004 | |
| 1005 | case *ssa.Send: |
| 1006 | a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type())) |
| 1007 | |
| 1008 | case *ssa.Store: |
| 1009 | a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type())) |
| 1010 | |
| 1011 | case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface: |
| 1012 | v := instr.(ssa.Value) |
| 1013 | a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v)) |
| 1014 | |
| 1015 | case *ssa.ChangeInterface: |
| 1016 | a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) |
| 1017 | |
| 1018 | case *ssa.TypeAssert: |
| 1019 | a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true) |
| 1020 | |
| 1021 | case *ssa.Slice: |
| 1022 | a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) |
| 1023 | |
| 1024 | case *ssa.If, *ssa.Jump: |
| 1025 | // no-op. |
| 1026 | |
| 1027 | case *ssa.Phi: |
| 1028 | sz := a.sizeof(instr.Type()) |
| 1029 | for _, e := range instr.Edges { |
| 1030 | a.copy(a.valueNode(instr), a.valueNode(e), sz) |
| 1031 | } |
| 1032 | |
| 1033 | case *ssa.MakeClosure: |
| 1034 | fn := instr.Fn.(*ssa.Function) |
| 1035 | a.copy(a.valueNode(instr), a.valueNode(fn), 1) |
| 1036 | // Free variables are treated like global variables. |
| 1037 | for i, b := range instr.Bindings { |
| 1038 | a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type())) |
| 1039 | } |
| 1040 | |
| 1041 | case *ssa.RunDefers: |
| 1042 | // The analysis is flow insensitive, so we just "call" |
| 1043 | // defers as we encounter them. |
| 1044 | |
| 1045 | case *ssa.Range: |
| 1046 | // Do nothing. Next{Iter: *ssa.Range} handles this case. |
| 1047 | |
| 1048 | case *ssa.Next: |
| 1049 | if !instr.IsString { // map |
| 1050 | // Assumes that Next is always directly applied to a Range result. |
| 1051 | theMap := instr.Iter.(*ssa.Range).X |
| 1052 | tMap := theMap.Type().Underlying().(*types.Map) |
| 1053 | ksize := a.sizeof(tMap.Key()) |
| 1054 | vsize := a.sizeof(tMap.Elem()) |
| 1055 | |
| 1056 | // Load from the map's (k,v) into the tuple's (ok, k, v). |
| 1057 | a.genLoad(cgn, a.valueNode(instr)+1, theMap, 0, ksize+vsize) |
| 1058 | } |
| 1059 | |
| 1060 | case *ssa.Lookup: |
| 1061 | if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok { |
| 1062 | // CommaOk can be ignored: field 0 is a no-op. |
| 1063 | ksize := a.sizeof(tMap.Key()) |
| 1064 | vsize := a.sizeof(tMap.Elem()) |
| 1065 | a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize) |
| 1066 | } |
| 1067 | |
| 1068 | case *ssa.MapUpdate: |
| 1069 | tmap := instr.Map.Type().Underlying().(*types.Map) |
| 1070 | ksize := a.sizeof(tmap.Key()) |
| 1071 | vsize := a.sizeof(tmap.Elem()) |
| 1072 | a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize) |
| 1073 | a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize) |
| 1074 | |
| 1075 | case *ssa.Panic: |
| 1076 | a.copy(a.panicNode, a.valueNode(instr.X), 1) |
| 1077 | |
| 1078 | default: |
| 1079 | panic(fmt.Sprintf("unimplemented: %T", instr)) |
| 1080 | } |
| 1081 | } |
| 1082 | |
| 1083 | func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite) *cgnode { |
| 1084 | cgn := &cgnode{fn: fn, obj: obj, callersite: callersite} |
| 1085 | a.cgnodes = append(a.cgnodes, cgn) |
| 1086 | return cgn |
| 1087 | } |
| 1088 | |
| 1089 | // genRootCalls generates the synthetic root of the callgraph and the |
| 1090 | // initial calls from it to the analysis scope, such as main, a test |
| 1091 | // or a library. |
| 1092 | // |
| 1093 | func (a *analysis) genRootCalls() *cgnode { |
| 1094 | r := a.prog.NewFunction("<root>", new(types.Signature), "root of callgraph") |
| 1095 | root := a.makeCGNode(r, 0, nil) |
| 1096 | |
| 1097 | // TODO(adonovan): make an ssa utility to construct an actual |
| 1098 | // root function so we don't need to special-case site-less |
| 1099 | // call edges. |
| 1100 | |
| 1101 | // For each main package, call main.init(), main.main(). |
| 1102 | for _, mainPkg := range a.config.Mains { |
| 1103 | main := mainPkg.Func("main") |
| 1104 | if main == nil { |
| 1105 | panic(fmt.Sprintf("%s has no main function", mainPkg)) |
| 1106 | } |
| 1107 | |
| 1108 | targets := a.addOneNode(main.Signature, "root.targets", nil) |
| 1109 | site := &callsite{targets: targets} |
| 1110 | root.sites = append(root.sites, site) |
| 1111 | for _, fn := range [2]*ssa.Function{mainPkg.Func("init"), main} { |
| 1112 | if a.log != nil { |
| 1113 | fmt.Fprintf(a.log, "\troot call to %s:\n", fn) |
| 1114 | } |
| 1115 | a.copy(targets, a.valueNode(fn), 1) |
| 1116 | } |
| 1117 | } |
| 1118 | |
| 1119 | return root |
| 1120 | } |
| 1121 | |
| 1122 | // genFunc generates constraints for function fn. |
| 1123 | func (a *analysis) genFunc(cgn *cgnode) { |
| 1124 | fn := cgn.fn |
| 1125 | |
| 1126 | impl := a.findIntrinsic(fn) |
| 1127 | |
| 1128 | if a.log != nil { |
| 1129 | fmt.Fprintf(a.log, "\n\n==== Generating constraints for %s, %s\n", cgn, cgn.contour()) |
| 1130 | |
| 1131 | // Hack: don't display body if intrinsic. |
| 1132 | if impl != nil { |
| 1133 | fn2 := *cgn.fn // copy |
| 1134 | fn2.Locals = nil |
| 1135 | fn2.Blocks = nil |
| 1136 | fn2.WriteTo(a.log) |
| 1137 | } else { |
| 1138 | cgn.fn.WriteTo(a.log) |
| 1139 | } |
| 1140 | } |
| 1141 | |
| 1142 | if impl != nil { |
| 1143 | impl(a, cgn) |
| 1144 | return |
| 1145 | } |
| 1146 | |
| 1147 | if fn.Blocks == nil { |
| 1148 | // External function with no intrinsic treatment. |
| 1149 | // We'll warn about calls to such functions at the end. |
| 1150 | return |
| 1151 | } |
| 1152 | |
| 1153 | if a.log != nil { |
| 1154 | fmt.Fprintln(a.log, "; Creating nodes for local values") |
| 1155 | } |
| 1156 | |
| 1157 | a.localval = make(map[ssa.Value]nodeid) |
| 1158 | a.localobj = make(map[ssa.Value]nodeid) |
| 1159 | |
| 1160 | // The value nodes for the params are in the func object block. |
| 1161 | params := a.funcParams(cgn.obj) |
| 1162 | for _, p := range fn.Params { |
| 1163 | a.setValueNode(p, params, cgn) |
| 1164 | params += nodeid(a.sizeof(p.Type())) |
| 1165 | } |
| 1166 | |
| 1167 | // Free variables have global cardinality: |
| 1168 | // the outer function sets them with MakeClosure; |
| 1169 | // the inner function accesses them with FreeVar. |
| 1170 | // |
| 1171 | // TODO(adonovan): treat free vars context-sensitively. |
| 1172 | |
| 1173 | // Create value nodes for all value instructions |
| 1174 | // since SSA may contain forward references. |
| 1175 | var space [10]*ssa.Value |
| 1176 | for _, b := range fn.Blocks { |
| 1177 | for _, instr := range b.Instrs { |
| 1178 | switch instr := instr.(type) { |
| 1179 | case *ssa.Range: |
| 1180 | // do nothing: it has a funky type, |
| 1181 | // and *ssa.Next does all the work. |
| 1182 | |
| 1183 | case ssa.Value: |
| 1184 | var comment string |
| 1185 | if a.log != nil { |
| 1186 | comment = instr.Name() |
| 1187 | } |
| 1188 | id := a.addNodes(instr.Type(), comment) |
| 1189 | a.setValueNode(instr, id, cgn) |
| 1190 | } |
| 1191 | |
| 1192 | // Record all address-taken functions (for presolver). |
| 1193 | rands := instr.Operands(space[:0]) |
| 1194 | if call, ok := instr.(ssa.CallInstruction); ok && !call.Common().IsInvoke() { |
| 1195 | // Skip CallCommon.Value in "call" mode. |
| 1196 | // TODO(adonovan): fix: relies on unspecified ordering. Specify it. |
| 1197 | rands = rands[1:] |
| 1198 | } |
| 1199 | for _, rand := range rands { |
| 1200 | if atf, ok := (*rand).(*ssa.Function); ok { |
| 1201 | a.atFuncs[atf] = true |
| 1202 | } |
| 1203 | } |
| 1204 | } |
| 1205 | } |
| 1206 | |
| 1207 | // Generate constraints for instructions. |
| 1208 | for _, b := range fn.Blocks { |
| 1209 | for _, instr := range b.Instrs { |
| 1210 | a.genInstr(cgn, instr) |
| 1211 | } |
| 1212 | } |
| 1213 | |
| 1214 | a.localval = nil |
| 1215 | a.localobj = nil |
| 1216 | } |
| 1217 | |
| 1218 | // genMethodsOf generates nodes and constraints for all methods of type T. |
| 1219 | func (a *analysis) genMethodsOf(T types.Type) { |
| 1220 | itf := isInterface(T) |
| 1221 | |
| 1222 | // TODO(adonovan): can we skip this entirely if itf is true? |
| 1223 | // I think so, but the answer may depend on reflection. |
| 1224 | mset := a.prog.MethodSets.MethodSet(T) |
| 1225 | for i, n := 0, mset.Len(); i < n; i++ { |
| 1226 | m := a.prog.Method(mset.At(i)) |
| 1227 | a.valueNode(m) |
| 1228 | |
| 1229 | if !itf { |
| 1230 | // Methods of concrete types are address-taken functions. |
| 1231 | a.atFuncs[m] = true |
| 1232 | } |
| 1233 | } |
| 1234 | } |
| 1235 | |
| 1236 | // generate generates offline constraints for the entire program. |
| 1237 | func (a *analysis) generate() { |
| 1238 | start("Constraint generation") |
| 1239 | if a.log != nil { |
| 1240 | fmt.Fprintln(a.log, "==== Generating constraints") |
| 1241 | } |
| 1242 | |
| 1243 | // Create a dummy node since we use the nodeid 0 for |
| 1244 | // non-pointerlike variables. |
| 1245 | a.addNodes(tInvalid, "(zero)") |
| 1246 | |
| 1247 | // Create the global node for panic values. |
| 1248 | a.panicNode = a.addNodes(tEface, "panic") |
| 1249 | |
| 1250 | // Create nodes and constraints for all methods of reflect.rtype. |
| 1251 | // (Shared contours are used by dynamic calls to reflect.Type |
| 1252 | // methods---typically just String().) |
| 1253 | if rtype := a.reflectRtypePtr; rtype != nil { |
| 1254 | a.genMethodsOf(rtype) |
| 1255 | } |
| 1256 | |
| 1257 | root := a.genRootCalls() |
| 1258 | |
| 1259 | if a.config.BuildCallGraph { |
| 1260 | a.result.CallGraph = callgraph.New(root.fn) |
| 1261 | } |
| 1262 | |
| 1263 | // Create nodes and constraints for all methods of all types |
| 1264 | // that are dynamically accessible via reflection or interfaces. |
Peter Collingbourne | 7d39641 | 2015-04-05 23:28:18 +0000 | [diff] [blame] | 1265 | for _, T := range a.prog.RuntimeTypes() { |
Peter Collingbourne | 56109b7 | 2015-01-13 20:45:08 +0000 | [diff] [blame] | 1266 | a.genMethodsOf(T) |
| 1267 | } |
| 1268 | |
| 1269 | // Generate constraints for entire program. |
| 1270 | for len(a.genq) > 0 { |
| 1271 | cgn := a.genq[0] |
| 1272 | a.genq = a.genq[1:] |
| 1273 | a.genFunc(cgn) |
| 1274 | } |
| 1275 | |
| 1276 | // The runtime magically allocates os.Args; so should we. |
| 1277 | if os := a.prog.ImportedPackage("os"); os != nil { |
| 1278 | // In effect: os.Args = new([1]string)[:] |
| 1279 | T := types.NewSlice(types.Typ[types.String]) |
| 1280 | obj := a.addNodes(sliceToArray(T), "<command-line args>") |
| 1281 | a.endObject(obj, nil, "<command-line args>") |
| 1282 | a.addressOf(T, a.objectNode(nil, os.Var("Args")), obj) |
| 1283 | } |
| 1284 | |
| 1285 | // Discard generation state, to avoid confusion after node renumbering. |
| 1286 | a.panicNode = 0 |
| 1287 | a.globalval = nil |
| 1288 | a.localval = nil |
| 1289 | a.localobj = nil |
| 1290 | |
| 1291 | stop("Constraint generation") |
| 1292 | } |