blob: b49a1bbdd4f37fccd3e35368d58564a1e67d18da [file] [log] [blame]
Peter Collingbournead9841e2014-11-27 00:06:42 +00001//===- ssa.go - IR generation from go/ssa ---------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the top-level LLVM IR generation from go/ssa form.
11//
12//===----------------------------------------------------------------------===//
13
14package irgen
15
16import (
17 "fmt"
18 "go/ast"
19 "go/token"
20 "os"
21 "sort"
22
23 "llvm.org/llgo/ssaopt"
24 "llvm.org/llgo/third_party/go.tools/go/ssa"
25 "llvm.org/llgo/third_party/go.tools/go/ssa/ssautil"
26 "llvm.org/llgo/third_party/go.tools/go/types"
27 "llvm.org/llvm/bindings/go/llvm"
28)
29
30// A globalInit is used to temporarily store a global's initializer until
31// we are ready to build it.
32type globalInit struct {
33 val llvm.Value
34 elems []globalInit
35}
36
37func (gi *globalInit) update(typ llvm.Type, indices []uint32, val llvm.Value) {
38 if len(indices) == 0 {
39 gi.val = val
40 return
41 }
42
43 if gi.val.C != nil {
44 gi.val = llvm.ConstInsertValue(gi.val, val, indices)
45 }
46
47 tk := typ.TypeKind()
48
49 if len(gi.elems) == 0 {
50 switch tk {
51 case llvm.StructTypeKind:
52 gi.elems = make([]globalInit, typ.StructElementTypesCount())
53 case llvm.ArrayTypeKind:
54 gi.elems = make([]globalInit, typ.ArrayLength())
55 default:
56 panic("unexpected type")
57 }
58 }
59
60 var eltyp llvm.Type
61 switch tk {
62 case llvm.StructTypeKind:
63 eltyp = typ.StructElementTypes()[indices[0]]
64 case llvm.ArrayTypeKind:
65 eltyp = typ.ElementType()
66 default:
67 panic("unexpected type")
68 }
69
70 gi.elems[indices[0]].update(eltyp, indices[1:], val)
71}
72
73func (gi *globalInit) build(typ llvm.Type) llvm.Value {
74 if gi.val.C != nil {
75 return gi.val
76 }
77 if len(gi.elems) == 0 {
78 return llvm.ConstNull(typ)
79 }
80
81 switch typ.TypeKind() {
82 case llvm.StructTypeKind:
83 eltypes := typ.StructElementTypes()
84 elems := make([]llvm.Value, len(eltypes))
85 for i, eltyp := range eltypes {
86 elems[i] = gi.elems[i].build(eltyp)
87 }
88 return llvm.ConstStruct(elems, false)
89 case llvm.ArrayTypeKind:
90 eltyp := typ.ElementType()
91 elems := make([]llvm.Value, len(gi.elems))
92 for i := range gi.elems {
93 elems[i] = gi.elems[i].build(eltyp)
94 }
95 return llvm.ConstArray(eltyp, elems)
96 default:
97 panic("unexpected type")
98 }
99}
100
101type unit struct {
102 *compiler
103 pkg *ssa.Package
104 globals map[ssa.Value]llvm.Value
105 globalInits map[llvm.Value]*globalInit
106
107 // funcDescriptors maps *ssa.Functions to function descriptors,
108 // the first-class representation of functions.
109 funcDescriptors map[*ssa.Function]llvm.Value
110
111 // undefinedFuncs contains functions that have been resolved
112 // (declared) but not defined.
113 undefinedFuncs map[*ssa.Function]bool
114
115 gcRoots []llvm.Value
116}
117
118func newUnit(c *compiler, pkg *ssa.Package) *unit {
119 u := &unit{
120 compiler: c,
121 pkg: pkg,
122 globals: make(map[ssa.Value]llvm.Value),
123 globalInits: make(map[llvm.Value]*globalInit),
124 funcDescriptors: make(map[*ssa.Function]llvm.Value),
125 undefinedFuncs: make(map[*ssa.Function]bool),
126 }
127 return u
128}
129
130type byMemberName []ssa.Member
131
132func (ms byMemberName) Len() int { return len(ms) }
133func (ms byMemberName) Swap(i, j int) {
134 ms[i], ms[j] = ms[j], ms[i]
135}
136func (ms byMemberName) Less(i, j int) bool {
137 return ms[i].Name() < ms[j].Name()
138}
139
140type byFunctionString []*ssa.Function
141
142func (fns byFunctionString) Len() int { return len(fns) }
143func (fns byFunctionString) Swap(i, j int) {
144 fns[i], fns[j] = fns[j], fns[i]
145}
146func (fns byFunctionString) Less(i, j int) bool {
147 return fns[i].String() < fns[j].String()
148}
149
150// Emit functions in order of their fully qualified names. This is so that a
151// bootstrap build can be verified by comparing the stage2 and stage3 binaries.
152func (u *unit) defineFunctionsInOrder(functions map[*ssa.Function]bool) {
153 fns := []*ssa.Function{}
154 for f, _ := range functions {
155 fns = append(fns, f)
156 }
157 sort.Sort(byFunctionString(fns))
158 for _, f := range fns {
159 u.defineFunction(f)
160 }
161}
162
163// translatePackage translates an *ssa.Package into an LLVM module, and returns
164// the translation unit information.
165func (u *unit) translatePackage(pkg *ssa.Package) {
166 ms := make([]ssa.Member, len(pkg.Members))
167 i := 0
168 for _, m := range pkg.Members {
169 ms[i] = m
170 i++
171 }
172
173 sort.Sort(byMemberName(ms))
174
175 // Initialize global storage and type descriptors for this package.
176 // We must create globals regardless of whether they're referenced,
177 // hence the duplication in frame.value.
178 for _, m := range ms {
179 switch v := m.(type) {
180 case *ssa.Global:
181 elemtyp := deref(v.Type())
182 llelemtyp := u.llvmtypes.ToLLVM(elemtyp)
183 vname := u.types.mc.mangleGlobalName(v)
184 global := llvm.AddGlobal(u.module.Module, llelemtyp, vname)
185 if !v.Object().Exported() {
186 global.SetLinkage(llvm.InternalLinkage)
187 }
188 u.addGlobal(global, elemtyp)
189 global = llvm.ConstBitCast(global, u.llvmtypes.ToLLVM(v.Type()))
190 u.globals[v] = global
191 case *ssa.Type:
192 u.types.getTypeDescriptorPointer(v.Type())
193 }
194 }
195
196 // Define functions.
197 u.defineFunctionsInOrder(ssautil.AllFunctions(pkg.Prog))
198
199 // Emit initializers for type descriptors, which may trigger
200 // the resolution of additional functions.
201 u.types.emitTypeDescInitializers()
202
203 // Define remaining functions that were resolved during
204 // runtime type mapping, but not defined.
205 u.defineFunctionsInOrder(u.undefinedFuncs)
206
207 // Set initializers for globals.
208 for global, init := range u.globalInits {
209 initval := init.build(global.Type().ElementType())
210 global.SetInitializer(initval)
211 }
212}
213
214func (u *unit) addGlobal(global llvm.Value, ty types.Type) {
215 u.globalInits[global] = new(globalInit)
216
217 if hasPointers(ty) {
218 global = llvm.ConstBitCast(global, llvm.PointerType(llvm.Int8Type(), 0))
219 size := llvm.ConstInt(u.types.inttype, uint64(u.types.Sizeof(ty)), false)
220 root := llvm.ConstStruct([]llvm.Value{global, size}, false)
221 u.gcRoots = append(u.gcRoots, root)
222 }
223}
224
225// ResolveMethod implements MethodResolver.ResolveMethod.
226func (u *unit) ResolveMethod(s *types.Selection) *govalue {
227 m := u.pkg.Prog.Method(s)
228 llfn := u.resolveFunctionGlobal(m)
229 llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0))
230 return newValue(llfn, m.Signature)
231}
232
233// resolveFunctionDescriptorGlobal returns a reference to the LLVM global
234// storing the function's descriptor.
235func (u *unit) resolveFunctionDescriptorGlobal(f *ssa.Function) llvm.Value {
236 llfd, ok := u.funcDescriptors[f]
237 if !ok {
238 name := u.types.mc.mangleFunctionName(f) + "$descriptor"
239 llfd = llvm.AddGlobal(u.module.Module, llvm.PointerType(llvm.Int8Type(), 0), name)
240 llfd.SetGlobalConstant(true)
241 u.funcDescriptors[f] = llfd
242 }
243 return llfd
244}
245
246// resolveFunctionDescriptor returns a function's
247// first-class value representation.
248func (u *unit) resolveFunctionDescriptor(f *ssa.Function) *govalue {
249 llfd := u.resolveFunctionDescriptorGlobal(f)
250 llfd = llvm.ConstBitCast(llfd, llvm.PointerType(llvm.Int8Type(), 0))
251 return newValue(llfd, f.Signature)
252}
253
254// resolveFunctionGlobal returns an llvm.Value for a function global.
255func (u *unit) resolveFunctionGlobal(f *ssa.Function) llvm.Value {
256 if v, ok := u.globals[f]; ok {
257 return v
258 }
259 name := u.types.mc.mangleFunctionName(f)
260 // It's possible that the function already exists in the module;
261 // for example, if it's a runtime intrinsic that the compiler
262 // has already referenced.
263 llvmFunction := u.module.Module.NamedFunction(name)
264 if llvmFunction.IsNil() {
265 fti := u.llvmtypes.getSignatureInfo(f.Signature)
266 llvmFunction = fti.declare(u.module.Module, name)
267 u.undefinedFuncs[f] = true
268 }
269 u.globals[f] = llvmFunction
270 return llvmFunction
271}
272
273func (u *unit) getFunctionLinkage(f *ssa.Function) llvm.Linkage {
274 switch {
275 case f.Pkg == nil:
276 // Synthetic functions outside packages may appear in multiple packages.
277 return llvm.LinkOnceODRLinkage
278
279 case f.Parent() != nil:
280 // Anonymous.
281 return llvm.InternalLinkage
282
283 case f.Signature.Recv() == nil && !ast.IsExported(f.Name()) &&
284 !(f.Name() == "main" && f.Pkg.Object.Path() == "main") &&
285 f.Name() != "init":
286 // Unexported methods may be referenced as part of an interface method
287 // table in another package. TODO(pcc): detect when this cannot happen.
288 return llvm.InternalLinkage
289
290 default:
291 return llvm.ExternalLinkage
292 }
293}
294
295func (u *unit) defineFunction(f *ssa.Function) {
296 // Only define functions from this package, or synthetic
297 // wrappers (which do not have a package).
298 if f.Pkg != nil && f.Pkg != u.pkg {
299 return
300 }
301
302 llfn := u.resolveFunctionGlobal(f)
303 linkage := u.getFunctionLinkage(f)
304
305 isMethod := f.Signature.Recv() != nil
306
307 // Methods cannot be referred to via a descriptor.
308 if !isMethod {
309 llfd := u.resolveFunctionDescriptorGlobal(f)
310 llfd.SetInitializer(llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)))
311 llfd.SetLinkage(linkage)
312 }
313
314 // We only need to emit a descriptor for functions without bodies.
315 if len(f.Blocks) == 0 {
316 return
317 }
318
319 ssaopt.LowerAllocsToStack(f)
320
321 if u.DumpSSA {
322 f.WriteTo(os.Stderr)
323 }
324
325 fr := newFrame(u, llfn)
326 defer fr.dispose()
327 fr.addCommonFunctionAttrs(fr.function)
328 fr.function.SetLinkage(linkage)
329
330 fr.logf("Define function: %s", f.String())
331 fti := u.llvmtypes.getSignatureInfo(f.Signature)
332 delete(u.undefinedFuncs, f)
333 fr.retInf = fti.retInf
334
335 // Push the compile unit and function onto the debug context.
336 if u.GenerateDebug {
337 u.debug.PushFunction(fr.function, f.Signature, f.Pos())
338 defer u.debug.PopFunction()
339 u.debug.SetLocation(fr.builder, f.Pos())
340 }
341
342 // If a function calls recover, we create a separate function to
343 // hold the real function, and this function calls __go_can_recover
344 // and bridges to it.
345 if callsRecover(f) {
346 fr = fr.bridgeRecoverFunc(fr.function, fti)
347 }
348
349 fr.blocks = make([]llvm.BasicBlock, len(f.Blocks))
350 fr.lastBlocks = make([]llvm.BasicBlock, len(f.Blocks))
351 for i, block := range f.Blocks {
352 fr.blocks[i] = llvm.AddBasicBlock(fr.function, fmt.Sprintf(".%d.%s", i, block.Comment))
353 }
354 fr.builder.SetInsertPointAtEnd(fr.blocks[0])
355
356 prologueBlock := llvm.InsertBasicBlock(fr.blocks[0], "prologue")
357 fr.builder.SetInsertPointAtEnd(prologueBlock)
358
359 // Map parameter positions to indices. We use this
360 // when processing locals to map back to parameters
361 // when generating debug metadata.
362 paramPos := make(map[token.Pos]int)
363 for i, param := range f.Params {
364 paramPos[param.Pos()] = i
365 llparam := fti.argInfos[i].decode(llvm.GlobalContext(), fr.builder, fr.builder)
366 if isMethod && i == 0 {
367 if _, ok := param.Type().Underlying().(*types.Pointer); !ok {
368 llparam = fr.builder.CreateBitCast(llparam, llvm.PointerType(fr.types.ToLLVM(param.Type()), 0), "")
369 llparam = fr.builder.CreateLoad(llparam, "")
370 }
371 }
372 fr.env[param] = newValue(llparam, param.Type())
373 }
374
375 // Load closure, extract free vars.
376 if len(f.FreeVars) > 0 {
377 for _, fv := range f.FreeVars {
378 fr.env[fv] = newValue(llvm.ConstNull(u.llvmtypes.ToLLVM(fv.Type())), fv.Type())
379 }
380 elemTypes := make([]llvm.Type, len(f.FreeVars)+1)
381 elemTypes[0] = llvm.PointerType(llvm.Int8Type(), 0) // function pointer
382 for i, fv := range f.FreeVars {
383 elemTypes[i+1] = u.llvmtypes.ToLLVM(fv.Type())
384 }
385 structType := llvm.StructType(elemTypes, false)
386 closure := fr.runtime.getClosure.call(fr)[0]
387 closure = fr.builder.CreateBitCast(closure, llvm.PointerType(structType, 0), "")
388 for i, fv := range f.FreeVars {
389 ptr := fr.builder.CreateStructGEP(closure, i+1, "")
390 ptr = fr.builder.CreateLoad(ptr, "")
391 fr.env[fv] = newValue(ptr, fv.Type())
392 }
393 }
394
395 // Allocate stack space for locals in the prologue block.
396 for _, local := range f.Locals {
397 typ := fr.llvmtypes.ToLLVM(deref(local.Type()))
398 alloca := fr.builder.CreateAlloca(typ, local.Comment)
399 fr.memsetZero(alloca, llvm.SizeOf(typ))
400 bcalloca := fr.builder.CreateBitCast(alloca, llvm.PointerType(llvm.Int8Type(), 0), "")
401 value := newValue(bcalloca, local.Type())
402 fr.env[local] = value
403 if fr.GenerateDebug {
404 paramIndex, ok := paramPos[local.Pos()]
405 if !ok {
406 paramIndex = -1
407 }
408 fr.debug.Declare(fr.builder, local, alloca, paramIndex)
409 }
410 }
411
412 // If this is the "init" function, enable init-specific optimizations.
413 if !isMethod && f.Name() == "init" {
414 fr.isInit = true
415 }
416
417 // If the function contains any defers, we must first create
418 // an unwind block. We can short-circuit the check for defers with
419 // f.Recover != nil.
420 if f.Recover != nil || hasDefer(f) {
421 fr.unwindBlock = llvm.AddBasicBlock(fr.function, "")
422 fr.frameptr = fr.builder.CreateAlloca(llvm.Int8Type(), "")
423 }
424
425 term := fr.builder.CreateBr(fr.blocks[0])
426 fr.allocaBuilder.SetInsertPointBefore(term)
427
428 for _, block := range f.DomPreorder() {
429 fr.translateBlock(block, fr.blocks[block.Index])
430 }
431
432 fr.fixupPhis()
433
434 if !fr.unwindBlock.IsNil() {
435 fr.setupUnwindBlock(f.Recover, f.Signature.Results())
436 }
437
438 // The init function needs to register the GC roots first. We do this
439 // after generating code for it because allocations may have caused
440 // additional GC roots to be created.
441 if fr.isInit {
442 fr.builder.SetInsertPointBefore(prologueBlock.FirstInstruction())
443 fr.registerGcRoots()
444 }
445}
446
447type pendingPhi struct {
448 ssa *ssa.Phi
449 llvm llvm.Value
450}
451
452type frame struct {
453 *unit
454 function llvm.Value
455 builder, allocaBuilder llvm.Builder
456 retInf retInfo
457 blocks []llvm.BasicBlock
458 lastBlocks []llvm.BasicBlock
459 runtimeErrorBlocks [gccgoRuntimeErrorCount]llvm.BasicBlock
460 unwindBlock llvm.BasicBlock
461 frameptr llvm.Value
462 env map[ssa.Value]*govalue
463 ptr map[ssa.Value]llvm.Value
464 tuples map[ssa.Value][]*govalue
465 phis []pendingPhi
466 canRecover llvm.Value
467 isInit bool
468}
469
470func newFrame(u *unit, fn llvm.Value) *frame {
471 return &frame{
472 unit: u,
473 function: fn,
474 builder: llvm.GlobalContext().NewBuilder(),
475 allocaBuilder: llvm.GlobalContext().NewBuilder(),
476 env: make(map[ssa.Value]*govalue),
477 ptr: make(map[ssa.Value]llvm.Value),
478 tuples: make(map[ssa.Value][]*govalue),
479 }
480}
481
482func (fr *frame) dispose() {
483 fr.builder.Dispose()
484 fr.allocaBuilder.Dispose()
485}
486
487// bridgeRecoverFunc creates a function that may call recover(), and creates
488// a call to it from the current frame. The created function will be called
489// with a boolean parameter that indicates whether it may call recover().
490//
491// The created function will have the same name as the current frame's function
492// with "$recover" appended, having the same return types and parameters with
493// an additional boolean parameter appended.
494//
495// A new frame will be returned for the newly created function.
496func (fr *frame) bridgeRecoverFunc(llfn llvm.Value, fti functionTypeInfo) *frame {
497 // The bridging function must not be inlined, or the return address
498 // may not correspond to the source function.
499 llfn.AddFunctionAttr(llvm.NoInlineAttribute)
500
501 // Call __go_can_recover, passing in the function's return address.
502 entry := llvm.AddBasicBlock(llfn, "entry")
503 fr.builder.SetInsertPointAtEnd(entry)
504 canRecover := fr.runtime.canRecover.call(fr, fr.returnAddress(0))[0]
505 returnType := fti.functionType.ReturnType()
506 argTypes := fti.functionType.ParamTypes()
507 argTypes = append(argTypes, canRecover.Type())
508
509 // Create and call the $recover function.
510 ftiRecover := fti
511 ftiRecover.functionType = llvm.FunctionType(returnType, argTypes, false)
512 llfnRecover := ftiRecover.declare(fr.module.Module, llfn.Name()+"$recover")
513 fr.addCommonFunctionAttrs(llfnRecover)
514 llfnRecover.SetLinkage(llvm.InternalLinkage)
515 args := make([]llvm.Value, len(argTypes)-1, len(argTypes))
516 for i := range args {
517 args[i] = llfn.Param(i)
518 }
519 args = append(args, canRecover)
520 result := fr.builder.CreateCall(llfnRecover, args, "")
521 if returnType.TypeKind() == llvm.VoidTypeKind {
522 fr.builder.CreateRetVoid()
523 } else {
524 fr.builder.CreateRet(result)
525 }
526
527 // The $recover function must condition calls to __go_recover on
528 // the result of __go_can_recover passed in as an argument.
529 fr = newFrame(fr.unit, llfnRecover)
530 fr.retInf = ftiRecover.retInf
531 fr.canRecover = fr.function.Param(len(argTypes) - 1)
532 return fr
533}
534
535func (fr *frame) registerGcRoots() {
536 if len(fr.gcRoots) != 0 {
537 rootty := fr.gcRoots[0].Type()
538 roots := append(fr.gcRoots, llvm.ConstNull(rootty))
539 rootsarr := llvm.ConstArray(rootty, roots)
540 rootsstruct := llvm.ConstStruct([]llvm.Value{llvm.ConstNull(llvm.PointerType(llvm.Int8Type(), 0)), rootsarr}, false)
541
542 rootsglobal := llvm.AddGlobal(fr.module.Module, rootsstruct.Type(), "")
543 rootsglobal.SetInitializer(rootsstruct)
544 rootsglobal.SetLinkage(llvm.InternalLinkage)
545 fr.runtime.registerGcRoots.callOnly(fr, llvm.ConstBitCast(rootsglobal, llvm.PointerType(llvm.Int8Type(), 0)))
546 }
547}
548
549func (fr *frame) fixupPhis() {
550 for _, phi := range fr.phis {
551 values := make([]llvm.Value, len(phi.ssa.Edges))
552 blocks := make([]llvm.BasicBlock, len(phi.ssa.Edges))
553 block := phi.ssa.Block()
554 for i, edge := range phi.ssa.Edges {
555 values[i] = fr.llvmvalue(edge)
556 blocks[i] = fr.lastBlock(block.Preds[i])
557 }
558 phi.llvm.AddIncoming(values, blocks)
559 }
560}
561
562func (fr *frame) createLandingPad(cleanup bool) llvm.Value {
563 lp := fr.builder.CreateLandingPad(fr.runtime.gccgoExceptionType, fr.runtime.gccgoPersonality, 0, "")
564 if cleanup {
565 lp.SetCleanup(true)
566 } else {
567 lp.AddClause(llvm.ConstNull(llvm.PointerType(llvm.Int8Type(), 0)))
568 }
569 return lp
570}
571
572// Runs defers. If a defer panics, check for recovers in later defers.
573func (fr *frame) runDefers() {
574 loopbb := llvm.AddBasicBlock(fr.function, "")
575 fr.builder.CreateBr(loopbb)
576
577 retrylpad := llvm.AddBasicBlock(fr.function, "")
578 fr.builder.SetInsertPointAtEnd(retrylpad)
579 fr.createLandingPad(false)
580 fr.runtime.checkDefer.callOnly(fr, fr.frameptr)
581 fr.builder.CreateBr(loopbb)
582
583 fr.builder.SetInsertPointAtEnd(loopbb)
584 fr.runtime.undefer.invoke(fr, retrylpad, fr.frameptr)
585}
586
587func (fr *frame) setupUnwindBlock(rec *ssa.BasicBlock, results *types.Tuple) {
588 recoverbb := llvm.AddBasicBlock(fr.function, "")
589 if rec != nil {
590 fr.translateBlock(rec, recoverbb)
591 } else if results.Len() == 0 || results.At(0).Anonymous() {
592 // TODO(pcc): Remove this code after https://codereview.appspot.com/87210044/ lands
593 fr.builder.SetInsertPointAtEnd(recoverbb)
594 values := make([]llvm.Value, results.Len())
595 for i := range values {
596 values[i] = llvm.ConstNull(fr.llvmtypes.ToLLVM(results.At(i).Type()))
597 }
598 fr.retInf.encode(llvm.GlobalContext(), fr.allocaBuilder, fr.builder, values)
599 } else {
600 fr.builder.SetInsertPointAtEnd(recoverbb)
601 fr.builder.CreateUnreachable()
602 }
603
604 checkunwindbb := llvm.AddBasicBlock(fr.function, "")
605 fr.builder.SetInsertPointAtEnd(checkunwindbb)
606 exc := fr.createLandingPad(true)
607 fr.runDefers()
608
609 frame := fr.builder.CreateLoad(fr.frameptr, "")
610 shouldresume := fr.builder.CreateIsNull(frame, "")
611
612 resumebb := llvm.AddBasicBlock(fr.function, "")
613 fr.builder.CreateCondBr(shouldresume, resumebb, recoverbb)
614
615 fr.builder.SetInsertPointAtEnd(resumebb)
616 fr.builder.CreateResume(exc)
617
618 fr.builder.SetInsertPointAtEnd(fr.unwindBlock)
619 fr.createLandingPad(false)
620 fr.runtime.checkDefer.invoke(fr, checkunwindbb, fr.frameptr)
621 fr.runDefers()
622 fr.builder.CreateBr(recoverbb)
623}
624
625func (fr *frame) translateBlock(b *ssa.BasicBlock, llb llvm.BasicBlock) {
626 fr.builder.SetInsertPointAtEnd(llb)
627 for _, instr := range b.Instrs {
628 fr.instruction(instr)
629 }
630 fr.lastBlocks[b.Index] = fr.builder.GetInsertBlock()
631}
632
633func (fr *frame) block(b *ssa.BasicBlock) llvm.BasicBlock {
634 return fr.blocks[b.Index]
635}
636
637func (fr *frame) lastBlock(b *ssa.BasicBlock) llvm.BasicBlock {
638 return fr.lastBlocks[b.Index]
639}
640
641func (fr *frame) value(v ssa.Value) (result *govalue) {
642 switch v := v.(type) {
643 case nil:
644 return nil
645 case *ssa.Function:
646 return fr.resolveFunctionDescriptor(v)
647 case *ssa.Const:
648 return fr.newValueFromConst(v.Value, v.Type())
649 case *ssa.Global:
650 if g, ok := fr.globals[v]; ok {
651 return newValue(g, v.Type())
652 }
653 // Create an external global. Globals for this package are defined
654 // on entry to translatePackage, and have initialisers.
655 llelemtyp := fr.llvmtypes.ToLLVM(deref(v.Type()))
656 vname := fr.types.mc.mangleGlobalName(v)
657 llglobal := llvm.AddGlobal(fr.module.Module, llelemtyp, vname)
658 llglobal = llvm.ConstBitCast(llglobal, fr.llvmtypes.ToLLVM(v.Type()))
659 fr.globals[v] = llglobal
660 return newValue(llglobal, v.Type())
661 }
662 if value, ok := fr.env[v]; ok {
663 return value
664 }
665
666 panic("Instruction not visited yet")
667}
668
669func (fr *frame) llvmvalue(v ssa.Value) llvm.Value {
670 if gv := fr.value(v); gv != nil {
671 return gv.value
672 } else {
673 return llvm.Value{nil}
674 }
675}
676
677func (fr *frame) isNonNull(v ssa.Value) bool {
678 switch v.(type) {
679 case
680 // Globals have a fixed (non-nil) address.
681 *ssa.Global,
682 // The language does not specify what happens if an allocation fails.
683 *ssa.Alloc,
684 // These have already been nil checked.
685 *ssa.FieldAddr, *ssa.IndexAddr:
686 return true
687 default:
688 return false
689 }
690}
691
692func (fr *frame) nilCheck(v ssa.Value, llptr llvm.Value) {
693 if !fr.isNonNull(v) {
694 ptrnull := fr.builder.CreateIsNull(llptr, "")
695 fr.condBrRuntimeError(ptrnull, gccgoRuntimeErrorNIL_DEREFERENCE)
696 }
697}
698
699func (fr *frame) canAvoidElementLoad(refs []ssa.Instruction) bool {
700 for _, ref := range refs {
701 switch ref.(type) {
702 case *ssa.Field, *ssa.Index:
703 // ok
704 default:
705 return false
706 }
707 }
708
709 return true
710}
711
712// If this value is sufficiently large, look through referrers to see if we can
713// avoid a load.
714func (fr *frame) canAvoidLoad(instr *ssa.UnOp, op llvm.Value) bool {
715 if fr.types.Sizeof(instr.Type()) < 16 {
716 // Don't bother with small values.
717 return false
718 }
719
720 // Keep track of whether our pointer may escape. We conservatively assume
721 // that MakeInterfaces will escape.
722 esc := false
723
724 // We only know how to avoid loads if they are used to create an interface
725 // or read an element of the structure. If we see any other referrer, abort.
726 for _, ref := range *instr.Referrers() {
727 switch ref.(type) {
728 case *ssa.MakeInterface:
729 esc = true
730 case *ssa.Field, *ssa.Index:
731 // ok
732 default:
733 return false
734 }
735 }
736
737 var opcopy llvm.Value
738 if esc {
739 opcopy = fr.createTypeMalloc(instr.Type())
740 } else {
741 opcopy = fr.allocaBuilder.CreateAlloca(fr.types.ToLLVM(instr.Type()), "")
742 }
743 fr.memcpy(opcopy, op, llvm.ConstInt(fr.types.inttype, uint64(fr.types.Sizeof(instr.Type())), false))
744
745 fr.ptr[instr] = opcopy
746 return true
747}
748
749// Return true iff we think it might be beneficial to turn this alloc instruction
750// into a statically allocated global.
751// Precondition: we are compiling the init function.
752func (fr *frame) shouldStaticallyAllocate(alloc *ssa.Alloc) bool {
753 // First, see if the allocated type is an array or struct, and if so determine
754 // the number of elements in the type. If the type is anything else, we
755 // statically allocate unconditionally.
756 var numElems int64
757 switch ty := deref(alloc.Type()).Underlying().(type) {
758 case *types.Array:
759 numElems = ty.Len()
760 case *types.Struct:
761 numElems = int64(ty.NumFields())
762 default:
763 return true
764 }
765
766 // We treat the number of referrers to the alloc instruction as a rough
767 // proxy for the number of elements initialized. If the data structure
768 // is densely initialized (> 1/4 elements initialized), enable the
769 // optimization.
770 return int64(len(*alloc.Referrers()))*4 > numElems
771}
772
773// If val is a constant and addr refers to a global variable which is defined in
774// this module or an element thereof, simulate the effect of storing val at addr
775// in the global variable's initializer and return true, otherwise return false.
776// Precondition: we are compiling the init function.
777func (fr *frame) maybeStoreInInitializer(val, addr llvm.Value) bool {
778 if val.IsAConstant().IsNil() {
779 return false
780 }
781
782 if !addr.IsAConstantExpr().IsNil() && addr.OperandsCount() >= 2 &&
783 // TODO(pcc): Explicitly check that this is a constant GEP.
784 // I don't think there are any other kinds of constantexpr which
785 // satisfy the conditions we test for here, so this is probably safe.
786 !addr.Operand(0).IsAGlobalVariable().IsNil() &&
787 addr.Operand(1).IsNull() {
788 gv := addr.Operand(0)
789 globalInit, ok := fr.globalInits[gv]
790 if !ok {
791 return false
792 }
793 indices := make([]uint32, addr.OperandsCount()-2)
794 for i := range indices {
795 op := addr.Operand(i + 2)
796 if op.IsAConstantInt().IsNil() {
797 return false
798 }
799 indices[i] = uint32(op.ZExtValue())
800 }
801 globalInit.update(gv.Type().ElementType(), indices, val)
802 return true
803 } else if !addr.IsAGlobalVariable().IsNil() {
804 if globalInit, ok := fr.globalInits[addr]; ok {
805 globalInit.update(addr.Type().ElementType(), nil, val)
806 return true
807 }
808 return false
809 } else {
810 return false
811 }
812}
813
814func (fr *frame) instruction(instr ssa.Instruction) {
815 fr.logf("[%T] %v @ %s\n", instr, instr, fr.pkg.Prog.Fset.Position(instr.Pos()))
816 if fr.GenerateDebug {
817 fr.debug.SetLocation(fr.builder, instr.Pos())
818 }
819
820 switch instr := instr.(type) {
821 case *ssa.Alloc:
822 typ := deref(instr.Type())
823 llvmtyp := fr.llvmtypes.ToLLVM(typ)
824 var value llvm.Value
825 if !instr.Heap {
826 value = fr.env[instr].value
827 fr.memsetZero(value, llvm.SizeOf(llvmtyp))
828 } else if fr.isInit && fr.shouldStaticallyAllocate(instr) {
829 // If this is the init function and we think it may be beneficial,
830 // allocate memory statically in the object file rather than on the
831 // heap. This allows us to optimize constant stores into such
832 // variables as static initializations.
833 global := llvm.AddGlobal(fr.module.Module, llvmtyp, "")
834 global.SetLinkage(llvm.InternalLinkage)
835 fr.addGlobal(global, typ)
836 ptr := llvm.ConstBitCast(global, llvm.PointerType(llvm.Int8Type(), 0))
837 fr.env[instr] = newValue(ptr, instr.Type())
838 } else {
839 value = fr.createTypeMalloc(typ)
840 value.SetName(instr.Comment)
841 value = fr.builder.CreateBitCast(value, llvm.PointerType(llvm.Int8Type(), 0), "")
842 fr.env[instr] = newValue(value, instr.Type())
843 }
844
845 case *ssa.BinOp:
846 lhs, rhs := fr.value(instr.X), fr.value(instr.Y)
847 fr.env[instr] = fr.binaryOp(lhs, instr.Op, rhs)
848
849 case *ssa.Call:
850 tuple := fr.callInstruction(instr)
851 if len(tuple) == 1 {
852 fr.env[instr] = tuple[0]
853 } else {
854 fr.tuples[instr] = tuple
855 }
856
857 case *ssa.ChangeInterface:
858 x := fr.value(instr.X)
859 // The source type must be a non-empty interface,
860 // as ChangeInterface cannot fail (E2I may fail).
861 if instr.Type().Underlying().(*types.Interface).NumMethods() > 0 {
862 x = fr.changeInterface(x, instr.Type(), false)
863 } else {
864 x = fr.convertI2E(x)
865 }
866 fr.env[instr] = x
867
868 case *ssa.ChangeType:
869 value := fr.llvmvalue(instr.X)
870 if _, ok := instr.Type().Underlying().(*types.Pointer); ok {
871 value = fr.builder.CreateBitCast(value, fr.llvmtypes.ToLLVM(instr.Type()), "")
872 }
873 fr.env[instr] = newValue(value, instr.Type())
874
875 case *ssa.Convert:
876 v := fr.value(instr.X)
877 fr.env[instr] = fr.convert(v, instr.Type())
878
879 case *ssa.Defer:
880 fn, arg := fr.createThunk(instr)
881 fr.runtime.Defer.call(fr, fr.frameptr, fn, arg)
882
883 case *ssa.Extract:
884 var elem llvm.Value
885 if t, ok := fr.tuples[instr.Tuple]; ok {
886 elem = t[instr.Index].value
887 } else {
888 tuple := fr.llvmvalue(instr.Tuple)
889 elem = fr.builder.CreateExtractValue(tuple, instr.Index, instr.Name())
890 }
891 elemtyp := instr.Type()
892 fr.env[instr] = newValue(elem, elemtyp)
893
894 case *ssa.Field:
895 fieldtyp := instr.Type()
896 if p, ok := fr.ptr[instr.X]; ok {
897 field := fr.builder.CreateStructGEP(p, instr.Field, instr.Name())
898 if fr.canAvoidElementLoad(*instr.Referrers()) {
899 fr.ptr[instr] = field
900 } else {
901 fr.env[instr] = newValue(fr.builder.CreateLoad(field, ""), fieldtyp)
902 }
903 } else {
904 value := fr.llvmvalue(instr.X)
905 field := fr.builder.CreateExtractValue(value, instr.Field, instr.Name())
906 fr.env[instr] = newValue(field, fieldtyp)
907 }
908
909 case *ssa.FieldAddr:
910 ptr := fr.llvmvalue(instr.X)
911 fr.nilCheck(instr.X, ptr)
912 xtyp := instr.X.Type().Underlying().(*types.Pointer).Elem()
913 ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(xtyp), 0)
914 ptr = fr.builder.CreateBitCast(ptr, ptrtyp, "")
915 fieldptr := fr.builder.CreateStructGEP(ptr, instr.Field, instr.Name())
916 fieldptr = fr.builder.CreateBitCast(fieldptr, llvm.PointerType(llvm.Int8Type(), 0), "")
917 fieldptrtyp := instr.Type()
918 fr.env[instr] = newValue(fieldptr, fieldptrtyp)
919
920 case *ssa.Go:
921 fn, arg := fr.createThunk(instr)
922 fr.runtime.Go.call(fr, fn, arg)
923
924 case *ssa.If:
925 cond := fr.llvmvalue(instr.Cond)
926 block := instr.Block()
927 trueBlock := fr.block(block.Succs[0])
928 falseBlock := fr.block(block.Succs[1])
929 cond = fr.builder.CreateTrunc(cond, llvm.Int1Type(), "")
930 fr.builder.CreateCondBr(cond, trueBlock, falseBlock)
931
932 case *ssa.Index:
933 var arrayptr llvm.Value
934
935 if ptr, ok := fr.ptr[instr.X]; ok {
936 arrayptr = ptr
937 } else {
938 array := fr.llvmvalue(instr.X)
939 arrayptr = fr.allocaBuilder.CreateAlloca(array.Type(), "")
940
941 fr.builder.CreateStore(array, arrayptr)
942 }
943 index := fr.llvmvalue(instr.Index)
944
945 arraytyp := instr.X.Type().Underlying().(*types.Array)
946 arraylen := llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false)
947
948 // The index may not have been promoted to int (for example, if it
949 // came from a composite literal).
950 index = fr.createZExtOrTrunc(index, fr.types.inttype, "")
951
952 // Bounds checking: 0 <= index < len
953 zero := llvm.ConstNull(fr.types.inttype)
954 i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "")
955 li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "")
956
957 cond := fr.builder.CreateOr(i0, li, "")
958
959 fr.condBrRuntimeError(cond, gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS)
960
961 addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{zero, index}, "")
962 if fr.canAvoidElementLoad(*instr.Referrers()) {
963 fr.ptr[instr] = addr
964 } else {
965 fr.env[instr] = newValue(fr.builder.CreateLoad(addr, ""), instr.Type())
966 }
967
968 case *ssa.IndexAddr:
969 x := fr.llvmvalue(instr.X)
970 index := fr.llvmvalue(instr.Index)
971 var arrayptr, arraylen llvm.Value
972 var elemtyp types.Type
973 var errcode uint64
974 switch typ := instr.X.Type().Underlying().(type) {
975 case *types.Slice:
976 elemtyp = typ.Elem()
977 arrayptr = fr.builder.CreateExtractValue(x, 0, "")
978 arraylen = fr.builder.CreateExtractValue(x, 1, "")
979 errcode = gccgoRuntimeErrorSLICE_INDEX_OUT_OF_BOUNDS
980 case *types.Pointer: // *array
981 arraytyp := typ.Elem().Underlying().(*types.Array)
982 elemtyp = arraytyp.Elem()
983 fr.nilCheck(instr.X, x)
984 arrayptr = x
985 arraylen = llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false)
986 errcode = gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS
987 }
988
989 // The index may not have been promoted to int (for example, if it
990 // came from a composite literal).
991 index = fr.createZExtOrTrunc(index, fr.types.inttype, "")
992
993 // Bounds checking: 0 <= index < len
994 zero := llvm.ConstNull(fr.types.inttype)
995 i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "")
996 li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "")
997
998 cond := fr.builder.CreateOr(i0, li, "")
999
1000 fr.condBrRuntimeError(cond, errcode)
1001
1002 ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(elemtyp), 0)
1003 arrayptr = fr.builder.CreateBitCast(arrayptr, ptrtyp, "")
1004 addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{index}, "")
1005 addr = fr.builder.CreateBitCast(addr, llvm.PointerType(llvm.Int8Type(), 0), "")
1006 fr.env[instr] = newValue(addr, types.NewPointer(elemtyp))
1007
1008 case *ssa.Jump:
1009 succ := instr.Block().Succs[0]
1010 fr.builder.CreateBr(fr.block(succ))
1011
1012 case *ssa.Lookup:
1013 x := fr.value(instr.X)
1014 index := fr.value(instr.Index)
1015 if isString(x.Type().Underlying()) {
1016 fr.env[instr] = fr.stringIndex(x, index)
1017 } else {
1018 v, ok := fr.mapLookup(x, index)
1019 if instr.CommaOk {
1020 fr.tuples[instr] = []*govalue{v, ok}
1021 } else {
1022 fr.env[instr] = v
1023 }
1024 }
1025
1026 case *ssa.MakeChan:
1027 fr.env[instr] = fr.makeChan(instr.Type(), fr.value(instr.Size))
1028
1029 case *ssa.MakeClosure:
1030 llfn := fr.resolveFunctionGlobal(instr.Fn.(*ssa.Function))
1031 llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0))
1032 fn := newValue(llfn, instr.Fn.(*ssa.Function).Signature)
1033 bindings := make([]*govalue, len(instr.Bindings))
1034 for i, binding := range instr.Bindings {
1035 bindings[i] = fr.value(binding)
1036 }
1037 fr.env[instr] = fr.makeClosure(fn, bindings)
1038
1039 case *ssa.MakeInterface:
1040 // fr.ptr[instr.X] will be set if a pointer load was elided by canAvoidLoad
1041 if ptr, ok := fr.ptr[instr.X]; ok {
1042 fr.env[instr] = fr.makeInterfaceFromPointer(ptr, instr.X.Type(), instr.Type())
1043 } else {
1044 receiver := fr.llvmvalue(instr.X)
1045 fr.env[instr] = fr.makeInterface(receiver, instr.X.Type(), instr.Type())
1046 }
1047
1048 case *ssa.MakeMap:
1049 fr.env[instr] = fr.makeMap(instr.Type(), fr.value(instr.Reserve))
1050
1051 case *ssa.MakeSlice:
1052 length := fr.value(instr.Len)
1053 capacity := fr.value(instr.Cap)
1054 fr.env[instr] = fr.makeSlice(instr.Type(), length, capacity)
1055
1056 case *ssa.MapUpdate:
1057 m := fr.value(instr.Map)
1058 k := fr.value(instr.Key)
1059 v := fr.value(instr.Value)
1060 fr.mapUpdate(m, k, v)
1061
1062 case *ssa.Next:
1063 iter := fr.tuples[instr.Iter]
1064 if instr.IsString {
1065 fr.tuples[instr] = fr.stringIterNext(iter)
1066 } else {
1067 fr.tuples[instr] = fr.mapIterNext(iter)
1068 }
1069
1070 case *ssa.Panic:
1071 arg := fr.value(instr.X)
1072 fr.callPanic(arg)
1073
1074 case *ssa.Phi:
1075 typ := instr.Type()
1076 phi := fr.builder.CreatePHI(fr.llvmtypes.ToLLVM(typ), instr.Comment)
1077 fr.env[instr] = newValue(phi, typ)
1078 fr.phis = append(fr.phis, pendingPhi{instr, phi})
1079
1080 case *ssa.Range:
1081 x := fr.value(instr.X)
1082 switch x.Type().Underlying().(type) {
1083 case *types.Map:
1084 fr.tuples[instr] = fr.mapIterInit(x)
1085 case *types.Basic: // string
1086 fr.tuples[instr] = fr.stringIterInit(x)
1087 default:
1088 panic(fmt.Sprintf("unhandled range for type %T", x.Type()))
1089 }
1090
1091 case *ssa.Return:
1092 vals := make([]llvm.Value, len(instr.Results))
1093 for i, res := range instr.Results {
1094 vals[i] = fr.llvmvalue(res)
1095 }
1096 fr.retInf.encode(llvm.GlobalContext(), fr.allocaBuilder, fr.builder, vals)
1097
1098 case *ssa.RunDefers:
1099 fr.runDefers()
1100
1101 case *ssa.Select:
1102 states := make([]selectState, len(instr.States))
1103 for i, state := range instr.States {
1104 states[i] = selectState{
1105 Dir: state.Dir,
1106 Chan: fr.value(state.Chan),
1107 Send: fr.value(state.Send),
1108 }
1109 }
1110 index, recvOk, recvElems := fr.chanSelect(states, instr.Blocking)
1111 tuple := append([]*govalue{index, recvOk}, recvElems...)
1112 fr.tuples[instr] = tuple
1113
1114 case *ssa.Send:
1115 fr.chanSend(fr.value(instr.Chan), fr.value(instr.X))
1116
1117 case *ssa.Slice:
1118 x := fr.llvmvalue(instr.X)
1119 low := fr.llvmvalue(instr.Low)
1120 high := fr.llvmvalue(instr.High)
1121 max := fr.llvmvalue(instr.Max)
1122 slice := fr.slice(x, instr.X.Type(), low, high, max)
1123 fr.env[instr] = newValue(slice, instr.Type())
1124
1125 case *ssa.Store:
1126 addr := fr.llvmvalue(instr.Addr)
1127 value := fr.llvmvalue(instr.Val)
1128 addr = fr.builder.CreateBitCast(addr, llvm.PointerType(value.Type(), 0), "")
1129 // If this is the init function, see if we can simulate the effect
1130 // of the store in a global's initializer, in which case we can avoid
1131 // generating code for it.
1132 if !fr.isInit || !fr.maybeStoreInInitializer(value, addr) {
1133 fr.nilCheck(instr.Addr, addr)
1134 fr.builder.CreateStore(value, addr)
1135 }
1136
1137 case *ssa.TypeAssert:
1138 x := fr.value(instr.X)
1139 if instr.CommaOk {
1140 v, ok := fr.interfaceTypeCheck(x, instr.AssertedType)
1141 fr.tuples[instr] = []*govalue{v, ok}
1142 } else {
1143 fr.env[instr] = fr.interfaceTypeAssert(x, instr.AssertedType)
1144 }
1145
1146 case *ssa.UnOp:
1147 operand := fr.value(instr.X)
1148 switch instr.Op {
1149 case token.ARROW:
1150 x, ok := fr.chanRecv(operand, instr.CommaOk)
1151 if instr.CommaOk {
1152 fr.tuples[instr] = []*govalue{x, ok}
1153 } else {
1154 fr.env[instr] = x
1155 }
1156 case token.MUL:
1157 fr.nilCheck(instr.X, operand.value)
1158 if !fr.canAvoidLoad(instr, operand.value) {
1159 // The bitcast is necessary to handle recursive pointer loads.
1160 llptr := fr.builder.CreateBitCast(operand.value, llvm.PointerType(fr.llvmtypes.ToLLVM(instr.Type()), 0), "")
1161 fr.env[instr] = newValue(fr.builder.CreateLoad(llptr, ""), instr.Type())
1162 }
1163 default:
1164 fr.env[instr] = fr.unaryOp(operand, instr.Op)
1165 }
1166
1167 default:
1168 panic(fmt.Sprintf("unhandled: %v", instr))
1169 }
1170}
1171
1172func (fr *frame) callBuiltin(typ types.Type, builtin *ssa.Builtin, args []ssa.Value) []*govalue {
1173 switch builtin.Name() {
1174 case "print", "println":
1175 llargs := make([]*govalue, len(args))
1176 for i, arg := range args {
1177 llargs[i] = fr.value(arg)
1178 }
1179 fr.printValues(builtin.Name() == "println", llargs...)
1180 return nil
1181
1182 case "panic":
1183 fr.callPanic(fr.value(args[0]))
1184 return nil
1185
1186 case "recover":
1187 return []*govalue{fr.callRecover(false)}
1188
1189 case "append":
1190 return []*govalue{fr.callAppend(fr.value(args[0]), fr.value(args[1]))}
1191
1192 case "close":
1193 fr.chanClose(fr.value(args[0]))
1194 return nil
1195
1196 case "cap":
1197 return []*govalue{fr.callCap(fr.value(args[0]))}
1198
1199 case "len":
1200 return []*govalue{fr.callLen(fr.value(args[0]))}
1201
1202 case "copy":
1203 return []*govalue{fr.callCopy(fr.value(args[0]), fr.value(args[1]))}
1204
1205 case "delete":
1206 fr.mapDelete(fr.value(args[0]), fr.value(args[1]))
1207 return nil
1208
1209 case "real":
1210 return []*govalue{fr.extractRealValue(fr.value(args[0]))}
1211
1212 case "imag":
1213 return []*govalue{fr.extractImagValue(fr.value(args[0]))}
1214
1215 case "complex":
1216 r := fr.llvmvalue(args[0])
1217 i := fr.llvmvalue(args[1])
1218 cmplx := llvm.Undef(fr.llvmtypes.ToLLVM(typ))
1219 cmplx = fr.builder.CreateInsertValue(cmplx, r, 0, "")
1220 cmplx = fr.builder.CreateInsertValue(cmplx, i, 1, "")
1221 return []*govalue{newValue(cmplx, typ)}
1222
1223 case "ssa:wrapnilchk":
1224 ptr := fr.value(args[0])
1225 fr.nilCheck(args[0], ptr.value)
1226 return []*govalue{ptr}
1227
1228 default:
1229 panic("unimplemented: " + builtin.Name())
1230 }
1231}
1232
1233// callInstruction translates function call instructions.
1234func (fr *frame) callInstruction(instr ssa.CallInstruction) []*govalue {
1235 call := instr.Common()
1236 if builtin, ok := call.Value.(*ssa.Builtin); ok {
1237 var typ types.Type
1238 if v := instr.Value(); v != nil {
1239 typ = v.Type()
1240 }
1241 return fr.callBuiltin(typ, builtin, call.Args)
1242 }
1243
1244 args := make([]*govalue, len(call.Args))
1245 for i, arg := range call.Args {
1246 args[i] = fr.value(arg)
1247 }
1248
1249 var fn *govalue
1250 if call.IsInvoke() {
1251 var recv *govalue
1252 fn, recv = fr.interfaceMethod(fr.llvmvalue(call.Value), call.Value.Type(), call.Method)
1253 args = append([]*govalue{recv}, args...)
1254 } else {
1255 if ssafn, ok := call.Value.(*ssa.Function); ok {
1256 llfn := fr.resolveFunctionGlobal(ssafn)
1257 llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0))
1258 fn = newValue(llfn, ssafn.Type())
1259 } else {
1260 // First-class function values are stored as *{*fnptr}, so
1261 // we must extract the function pointer. We must also
1262 // call __go_set_closure, in case the function is a closure.
1263 fn = fr.value(call.Value)
1264 fr.runtime.setClosure.call(fr, fn.value)
1265 fnptr := fr.builder.CreateBitCast(fn.value, llvm.PointerType(fn.value.Type(), 0), "")
1266 fnptr = fr.builder.CreateLoad(fnptr, "")
1267 fn = newValue(fnptr, fn.Type())
1268 }
1269 if recv := call.Signature().Recv(); recv != nil {
1270 if _, ok := recv.Type().Underlying().(*types.Pointer); !ok {
1271 recvalloca := fr.allocaBuilder.CreateAlloca(args[0].value.Type(), "")
1272 fr.builder.CreateStore(args[0].value, recvalloca)
1273 args[0] = newValue(recvalloca, types.NewPointer(args[0].Type()))
1274 }
1275 }
1276 }
1277 return fr.createCall(fn, args)
1278}
1279
1280func hasDefer(f *ssa.Function) bool {
1281 for _, b := range f.Blocks {
1282 for _, instr := range b.Instrs {
1283 if _, ok := instr.(*ssa.Defer); ok {
1284 return true
1285 }
1286 }
1287 }
1288 return false
1289}
1290
1291func callsRecover(f *ssa.Function) bool {
1292 for _, b := range f.Blocks {
1293 for _, instr := range b.Instrs {
1294 if instr, ok := instr.(ssa.CallInstruction); ok {
1295 b, ok := instr.Common().Value.(*ssa.Builtin)
1296 if ok && b.Name() == "recover" {
1297 return true
1298 }
1299 }
1300 }
1301 }
1302 return false
1303}