1 // Copyright 2013 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
7 // This file implements the BUILD phase of IR construction.
9 // IR construction has two phases, CREATE and BUILD. In the CREATE phase
10 // (create.go), all packages are constructed and type-checked and
11 // definitions of all package members are created, method-sets are
12 // computed, and wrapper methods are synthesized.
13 // ir.Packages are created in arbitrary order.
15 // In the BUILD phase (builder.go), the builder traverses the AST of
16 // each Go source function and generates IR instructions for the
17 // function body. Initializer expressions for package-level variables
18 // are emitted to the package's init() function in the order specified
19 // by go/types.Info.InitOrder, then code for each function in the
20 // package is generated in lexical order.
22 // The builder's and Program's indices (maps) are populated and
23 // mutated during the CREATE phase, but during the BUILD phase they
24 // remain constant. The sole exception is Prog.methodSets and its
25 // related maps, which are protected by a dedicated mutex.
36 type opaqueType struct {
41 func (t *opaqueType) String() string { return t.name }
44 varOk = newVar("ok", tBool)
45 varIndex = newVar("index", tInt)
48 tBool = types.Typ[types.Bool]
49 tByte = types.Typ[types.Byte]
50 tInt = types.Typ[types.Int]
51 tInvalid = types.Typ[types.Invalid]
52 tString = types.Typ[types.String]
53 tUntypedNil = types.Typ[types.UntypedNil]
54 tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
55 tEface = types.NewInterfaceType(nil, nil).Complete()
58 // builder holds state associated with the package currently being built.
59 // Its methods contain all the logic for AST-to-IR conversion.
66 // cond emits to fn code to evaluate boolean condition e and jump
67 // to t or f depending on its value, performing various simplifications.
69 // Postcondition: fn.currentBlock is nil.
71 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If {
72 switch e := e.(type) {
74 return b.cond(fn, e.X, t, f)
79 ltrue := fn.newBasicBlock("cond.true")
80 b.cond(fn, e.X, ltrue, f)
81 fn.currentBlock = ltrue
82 return b.cond(fn, e.Y, t, f)
85 lfalse := fn.newBasicBlock("cond.false")
86 b.cond(fn, e.X, t, lfalse)
87 fn.currentBlock = lfalse
88 return b.cond(fn, e.Y, t, f)
92 if e.Op == token.NOT {
93 return b.cond(fn, e.X, f, t)
97 // A traditional compiler would simplify "if false" (etc) here
98 // but we do not, for better fidelity to the source code.
100 // The value of a constant condition may be platform-specific,
101 // and may cause blocks that are reachable in some configuration
102 // to be hidden from subsequent analyses such as bug-finding tools.
103 return emitIf(fn, b.expr(fn, e), t, f, e)
106 // logicalBinop emits code to fn to evaluate e, a &&- or
107 // ||-expression whose reified boolean value is wanted.
108 // The value is returned.
110 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
111 rhs := fn.newBasicBlock("binop.rhs")
112 done := fn.newBasicBlock("binop.done")
114 // T(e) = T(e.X) = T(e.Y) after untyped constants have been
116 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool.
117 t := fn.Pkg.typeOf(e)
119 var short Value // value of the short-circuit path
122 b.cond(fn, e.X, rhs, done)
123 short = emitConst(fn, NewConst(constant.MakeBool(false), t))
126 b.cond(fn, e.X, done, rhs)
127 short = emitConst(fn, NewConst(constant.MakeBool(true), t))
130 // Is rhs unreachable?
131 if rhs.Preds == nil {
132 // Simplify false&&y to false, true||y to true.
133 fn.currentBlock = done
137 // Is done unreachable?
138 if done.Preds == nil {
139 // Simplify true&&y (or false||y) to y.
140 fn.currentBlock = rhs
141 return b.expr(fn, e.Y)
144 // All edges from e.X to done carry the short-circuit value.
146 for range done.Preds {
147 edges = append(edges, short)
150 // The edge from e.Y to done carries the value of e.Y.
151 fn.currentBlock = rhs
152 edges = append(edges, b.expr(fn, e.Y))
153 emitJump(fn, done, e)
154 fn.currentBlock = done
156 phi := &Phi{Edges: edges}
158 return done.emit(phi, e)
161 // exprN lowers a multi-result expression e to IR form, emitting code
162 // to fn and returning a single Value whose type is a *types.Tuple.
163 // The caller must access the components via Extract.
165 // Multi-result expressions include CallExprs in a multi-value
166 // assignment or return statement, and "value,ok" uses of
167 // TypeAssertExpr, IndexExpr (when X is a map), and Recv.
169 func (b *builder) exprN(fn *Function, e ast.Expr) Value {
170 typ := fn.Pkg.typeOf(e).(*types.Tuple)
171 switch e := e.(type) {
173 return b.exprN(fn, e.X)
176 // Currently, no built-in function nor type conversion
177 // has multiple results, so we can avoid some of the
178 // cases for single-valued CallExpr.
180 b.setCall(fn, e, &c.Call)
182 return fn.emit(&c, e)
185 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
186 lookup := &MapLookup{
188 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e),
192 return fn.emit(lookup, e)
194 case *ast.TypeAssertExpr:
195 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e)
197 case *ast.UnaryExpr: // must be receive <-
198 return emitRecv(fn, b.expr(fn, e.X), true, typ, e)
200 panic(fmt.Sprintf("exprN(%T) in %s", e, fn))
203 // builtin emits to fn IR instructions to implement a call to the
204 // built-in function obj with the specified arguments
205 // and return type. It returns the value defined by the result.
207 // The result is nil if no special handling was required; in this case
208 // the caller should treat this like an ordinary library function
211 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value {
214 switch typ.Underlying().(type) {
216 n := b.expr(fn, args[1])
219 m = b.expr(fn, args[2])
221 if m, ok := m.(*Const); ok {
222 // treat make([]T, n, m) as new([m]T)[:n]
224 at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap)
225 alloc := emitNew(fn, at, source)
231 return fn.emit(v, source)
238 return fn.emit(v, source)
243 res = b.expr(fn, args[1])
245 v := &MakeMap{Reserve: res}
247 return fn.emit(v, source)
250 var sz Value = emitConst(fn, intConst(0))
252 sz = b.expr(fn, args[1])
254 v := &MakeChan{Size: sz}
256 return fn.emit(v, source)
260 alloc := emitNew(fn, deref(typ), source)
264 // Special case: len or cap of an array or *array is
265 // based on the type, not the value which may be nil.
266 // We must still evaluate the value, though. (If it
267 // was side-effect free, the whole call would have
268 // been constant-folded.)
269 t := deref(fn.Pkg.typeOf(args[0])).Underlying()
270 if at, ok := t.(*types.Array); ok {
271 b.expr(fn, args[0]) // for effects only
272 return emitConst(fn, intConst(at.Len()))
274 // Otherwise treat as normal.
278 X: emitConv(fn, b.expr(fn, args[0]), tEface, source),
280 addEdge(fn.currentBlock, fn.Exit)
281 fn.currentBlock = fn.newBasicBlock("unreachable")
282 return emitConst(fn, NewConst(constant.MakeBool(true), tBool)) // any non-nil Value will do
284 return nil // treat all others as a regular function call
287 // addr lowers a single-result addressable expression e to IR form,
288 // emitting code to fn and returning the location (an lvalue) defined
289 // by the expression.
291 // If escaping is true, addr marks the base variable of the
292 // addressable expression e as being a potentially escaping pointer
293 // value. For example, in this code:
300 // the application of & causes a.b[0].c to have its address taken,
301 // which means that ultimately the local variable a must be
302 // heap-allocated. This is a simple but very conservative escape
305 // Operations forming potentially escaping pointers include:
306 // - &x, including when implicit in method call or composite literals.
307 // - a[:] iff a is an array (not *array)
308 // - references to variables in lexically enclosing functions.
310 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
311 switch e := e.(type) {
316 obj := fn.Pkg.objectOf(e)
317 v := fn.Prog.packageLevelValue(obj) // var (address)
319 v = fn.lookup(obj, escaping)
321 return &address{addr: v, expr: e}
323 case *ast.CompositeLit:
324 t := deref(fn.Pkg.typeOf(e))
327 v = emitNew(fn, t, e)
329 v = fn.addLocal(t, e)
332 b.compLit(fn, v, e, true, &sb)
334 return &address{addr: v, expr: e}
337 return b.addr(fn, e.X, escaping)
339 case *ast.SelectorExpr:
340 sel, ok := fn.Pkg.info.Selections[e]
342 // qualified identifier
343 return b.addr(fn, e.Sel, escaping)
345 if sel.Kind() != types.FieldVal {
349 v := b.receiver(fn, e.X, wantAddr, escaping, sel, e)
350 last := len(sel.Index()) - 1
352 addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel),
359 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
361 x = b.addr(fn, e.X, escaping).address(fn)
362 et = types.NewPointer(t.Elem())
363 case *types.Pointer: // *array
365 et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())
368 et = types.NewPointer(t.Elem())
372 k: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index),
376 panic("unexpected container type in IndexExpr: " + t.String())
380 Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index),
383 return &address{addr: fn.emit(v, e), expr: e}
386 return &address{addr: b.expr(fn, e.X), expr: e}
389 panic(fmt.Sprintf("unexpected address expression: %T", e))
398 type storebuf struct{ stores []store }
400 func (sb *storebuf) store(lhs lvalue, rhs Value, source ast.Node) {
401 sb.stores = append(sb.stores, store{lhs, rhs, source})
404 func (sb *storebuf) emit(fn *Function) {
405 for _, s := range sb.stores {
406 s.lhs.store(fn, s.rhs, s.source)
410 // assign emits to fn code to initialize the lvalue loc with the value
411 // of expression e. If isZero is true, assign assumes that loc holds
412 // the zero value for its type.
414 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate
415 // better code in some cases, e.g., for composite literals in an
416 // addressable location.
418 // If sb is not nil, assign generates code to evaluate expression e, but
419 // not to update loc. Instead, the necessary stores are appended to the
420 // storebuf sb so that they can be executed later. This allows correct
421 // in-place update of existing variables when the RHS is a composite
422 // literal that may reference parts of the LHS.
424 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf, source ast.Node) {
425 // Can we initialize it in place?
426 if e, ok := unparen(e).(*ast.CompositeLit); ok {
427 // A CompositeLit never evaluates to a pointer,
428 // so if the type of the location is a pointer,
429 // an &-operation is implied.
430 if _, ok := loc.(blank); !ok { // avoid calling blank.typ()
431 if isPointer(loc.typ()) {
432 ptr := b.addr(fn, e, true).address(fn)
435 sb.store(loc, ptr, source)
437 loc.store(fn, ptr, source)
443 if _, ok := loc.(*address); ok {
444 if isInterface(loc.typ()) {
445 // e.g. var x interface{} = T{...}
446 // Can't in-place initialize an interface value.
447 // Fall back to copying.
449 // x = T{...} or x := T{...}
450 addr := loc.address(fn)
452 b.compLit(fn, addr, e, isZero, sb)
455 b.compLit(fn, addr, e, isZero, &sb)
459 // Subtle: emit debug ref for aggregate types only;
460 // slice and map are handled by store ops in compLit.
461 switch loc.typ().Underlying().(type) {
462 case *types.Struct, *types.Array:
463 emitDebugRef(fn, e, addr, true)
471 // simple case: just copy
474 sb.store(loc, rhs, source)
476 loc.store(fn, rhs, source)
480 // expr lowers a single-result expression e to IR form, emitting code
481 // to fn and returning the Value defined by the expression.
483 func (b *builder) expr(fn *Function, e ast.Expr) Value {
486 tv := fn.Pkg.info.Types[e]
488 // Is expression a constant?
490 return emitConst(fn, NewConst(tv.Value, tv.Type))
494 if tv.Addressable() {
495 // Prefer pointer arithmetic ({Index,Field}Addr) followed
496 // by Load over subelement extraction (e.g. Index, Field),
497 // to avoid large copies.
498 v = b.addr(fn, e, false).load(fn, e)
500 v = b.expr0(fn, e, tv)
503 emitDebugRef(fn, e, v, false)
508 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
509 switch e := e.(type) {
511 panic("non-constant BasicLit") // unreachable
515 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
516 Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature),
520 functionBody: new(functionBody),
523 fn.AnonFuncs = append(fn.AnonFuncs, fn2)
524 fn2.initHTML(b.printFunc)
526 if fn2.FreeVars == nil {
529 v := &MakeClosure{Fn: fn2}
531 for _, fv := range fn2.FreeVars {
532 v.Bindings = append(v.Bindings, fv.outer)
537 case *ast.TypeAssertExpr: // single-result form only
538 return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e)
541 if fn.Pkg.info.Types[e.Fun].IsType() {
542 // Explicit type conversion, e.g. string(x) or big.Int(x)
543 x := b.expr(fn, e.Args[0])
544 y := emitConv(fn, x, tv.Type, e)
547 // Call to "intrinsic" built-ins, e.g. new, make, panic.
548 if id, ok := unparen(e.Fun).(*ast.Ident); ok {
549 if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok {
550 if v := b.builtin(fn, obj, e.Args, tv.Type, e); v != nil {
555 // Regular function call.
557 b.setCall(fn, e, &v.Call)
559 return fn.emit(&v, e)
563 case token.AND: // &X --- potentially escaping.
564 addr := b.addr(fn, e.X, true)
565 if _, ok := unparen(e.X).(*ast.StarExpr); ok {
566 // &*p must panic if p is nil (http://golang.org/s/go12nil).
567 // For simplicity, we'll just (suboptimally) rely
568 // on the side effects of a load.
569 // TODO(adonovan): emit dedicated nilcheck.
572 return addr.address(fn)
574 return b.expr(fn, e.X)
575 case token.NOT, token.SUB, token.XOR: // ! <- - ^
583 return emitRecv(fn, b.expr(fn, e.X), false, tv.Type, e)
588 case *ast.BinaryExpr:
590 case token.LAND, token.LOR:
591 return b.logicalBinop(fn, e)
592 case token.SHL, token.SHR:
594 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
595 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e)
597 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
598 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e)
599 // The type of x==y may be UntypedBool.
600 return emitConv(fn, cmp, types.Default(tv.Type), e)
602 panic("illegal op in BinaryExpr: " + e.Op.String())
606 var low, high, max Value
608 switch fn.Pkg.typeOf(e.X).Underlying().(type) {
610 // Potentially escaping.
611 x = b.addr(fn, e.X, true).address(fn)
612 case *types.Basic, *types.Slice, *types.Pointer: // *array
618 high = b.expr(fn, e.High)
621 low = b.expr(fn, e.Low)
624 max = b.expr(fn, e.Max)
636 obj := fn.Pkg.info.Uses[e]
637 // Universal built-in or nil?
638 switch obj := obj.(type) {
640 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
642 return emitConst(fn, nilConst(tv.Type))
644 // Package-level func or var?
645 if v := fn.Prog.packageLevelValue(obj); v != nil {
646 if _, ok := obj.(*types.Var); ok {
647 return emitLoad(fn, v, e) // var (address)
652 return emitLoad(fn, fn.lookup(obj, false), e) // var (address)
654 case *ast.SelectorExpr:
655 sel, ok := fn.Pkg.info.Selections[e]
657 // qualified identifier
658 return b.expr(fn, e.Sel)
661 case types.MethodExpr:
662 // (*T).f or T.f, the method f from the method-set of type T.
663 // The result is a "thunk".
664 return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type, e)
666 case types.MethodVal:
667 // e.f where e is an expression and f is a method.
668 // The result is a "bound".
669 obj := sel.Obj().(*types.Func)
671 wantAddr := isPointer(rt)
673 v := b.receiver(fn, e.X, wantAddr, escaping, sel, e)
675 // If v has interface type I,
676 // we must emit a check that v is non-nil.
677 // We use: typeassert v.(I).
678 emitTypeAssert(fn, v, rt, e)
681 Fn: makeBound(fn.Prog, obj),
682 Bindings: []Value{v},
689 indices := sel.Index()
690 last := len(indices) - 1
692 v = emitImplicitSelections(fn, v, indices[:last], e)
693 v = emitFieldSelection(fn, v, indices[last], false, e.Sel)
697 panic("unexpected expression-relative selector")
700 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
702 // Non-addressable array (in a register).
705 Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index),
711 // Maps are not addressable.
712 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
715 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e.Index),
717 v.setType(mapt.Elem())
720 case *types.Basic: // => string
721 // Strings are not addressable.
724 Index: b.expr(fn, e.Index),
729 case *types.Slice, *types.Pointer: // *array
730 // Addressable slice/array; use IndexAddr and Load.
731 return b.addr(fn, e, false).load(fn, e)
734 panic("unexpected container type in IndexExpr: " + t.String())
737 case *ast.CompositeLit, *ast.StarExpr:
738 // Addressable types (lvalues)
739 return b.addr(fn, e, false).load(fn, e)
742 panic(fmt.Sprintf("unexpected expr: %T", e))
745 // stmtList emits to fn code for all statements in list.
746 func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
747 for _, s := range list {
752 // receiver emits to fn code for expression e in the "receiver"
753 // position of selection e.f (where f may be a field or a method) and
754 // returns the effective receiver after applying the implicit field
755 // selections of sel.
757 // wantAddr requests that the result is an an address. If
758 // !sel.Indirect(), this may require that e be built in addr() mode; it
759 // must thus be addressable.
761 // escaping is defined as per builder.addr().
763 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection, source ast.Node) Value {
765 if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) {
766 v = b.addr(fn, e, escaping).address(fn)
771 last := len(sel.Index()) - 1
772 v = emitImplicitSelections(fn, v, sel.Index()[:last], source)
773 if !wantAddr && isPointer(v.Type()) {
774 v = emitLoad(fn, v, e)
779 // setCallFunc populates the function parts of a CallCommon structure
780 // (Func, Method, Recv, Args[0]) based on the kind of invocation
783 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
784 // Is this a method call?
785 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
786 sel, ok := fn.Pkg.info.Selections[selector]
787 if ok && sel.Kind() == types.MethodVal {
788 obj := sel.Obj().(*types.Func)
789 recv := recvType(obj)
790 wantAddr := isPointer(recv)
792 v := b.receiver(fn, selector.X, wantAddr, escaping, sel, selector)
793 if isInterface(recv) {
799 c.Value = fn.Prog.declaredFunc(obj)
800 c.Args = append(c.Args, v)
805 // sel.Kind()==MethodExpr indicates T.f() or (*T).f():
806 // a statically dispatched call to the method f in the
807 // method-set of T or *T. T may be an interface.
809 // e.Fun would evaluate to a concrete method, interface
810 // wrapper function, or promotion wrapper.
812 // For now, we evaluate it in the usual way.
814 // TODO(adonovan): opt: inline expr() here, to make the
815 // call static and to avoid generation of wrappers.
816 // It's somewhat tricky as it may consume the first
817 // actual parameter if the call is "invoke" mode.
820 // type T struct{}; func (T) f() {} // "call" mode
821 // type T interface { f() } // "invoke" mode
823 // type S struct{ T }
829 // Suggested approach:
830 // - consume the first actual parameter expression
831 // and build it with b.expr().
832 // - apply implicit field selections.
833 // - use MethodVal logic to populate fields of c.
836 // Evaluate the function operand in the usual way.
837 c.Value = b.expr(fn, e.Fun)
840 // emitCallArgs emits to f code for the actual parameters of call e to
841 // a (possibly built-in) function of effective type sig.
842 // The argument values are appended to args, which is then returned.
844 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
845 // f(x, y, z...): pass slice z straight through.
847 for i, arg := range e.Args {
848 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type(), arg)
849 args = append(args, v)
854 offset := len(args) // 1 if call has receiver, 0 otherwise
856 // Evaluate actual parameter expressions.
858 // If this is a chained call of the form f(g()) where g has
859 // multiple return values (MRV), they are flattened out into
860 // args; a suffix of them may end up in a varargs slice.
861 for _, arg := range e.Args {
863 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain
864 for i, n := 0, ttuple.Len(); i < n; i++ {
865 args = append(args, emitExtract(fn, v, i, arg))
868 args = append(args, v)
872 // Actual->formal assignability conversions for normal parameters.
873 np := sig.Params().Len() // number of normal parameters
877 for i := 0; i < np; i++ {
878 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type(), args[offset+i].Source())
881 // Actual->formal assignability conversions for variadic parameter,
882 // and construction of slice.
884 varargs := args[offset+np:]
885 st := sig.Params().At(np).Type().(*types.Slice)
887 if len(varargs) == 0 {
888 args = append(args, emitConst(fn, nilConst(st)))
890 // Replace a suffix of args with a slice containing it.
891 at := types.NewArray(vt, int64(len(varargs)))
892 a := emitNew(fn, at, e)
894 for i, arg := range varargs {
897 Index: emitConst(fn, intConst(int64(i))),
899 iaddr.setType(types.NewPointer(vt))
901 emitStore(fn, iaddr, arg, arg.Source())
905 args[offset+np] = fn.emit(s, args[offset+np].Source())
906 args = args[:offset+np+1]
912 // setCall emits to fn code to evaluate all the parameters of a function
913 // call e, and populates *c with those values.
915 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
916 // First deal with the f(...) part and optional receiver.
917 b.setCallFunc(fn, e, c)
919 // Then append the other actual parameters.
920 sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature)
922 panic(fmt.Sprintf("no signature for call of %s", e.Fun))
924 c.Args = b.emitCallArgs(fn, sig, e, c.Args)
927 // assignOp emits to fn code to perform loc <op>= val.
928 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, source ast.Node) {
929 oldv := loc.load(fn, source)
930 loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type(), source), loc.typ(), source), source)
933 // localValueSpec emits to fn code to define all of the vars in the
934 // function-local ValueSpec, spec.
936 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
938 case len(spec.Values) == len(spec.Names):
939 // e.g. var x, y = 0, 1
941 for i, id := range spec.Names {
942 if !isBlankIdent(id) {
943 fn.addLocalForIdent(id)
945 lval := b.addr(fn, id, false) // non-escaping
946 b.assign(fn, lval, spec.Values[i], true, nil, spec)
949 case len(spec.Values) == 0:
951 // Locals are implicitly zero-initialized.
952 for _, id := range spec.Names {
953 if !isBlankIdent(id) {
954 lhs := fn.addLocalForIdent(id)
956 emitDebugRef(fn, id, lhs, true)
962 // e.g. var x, y = pos()
963 tuple := b.exprN(fn, spec.Values[0])
964 for i, id := range spec.Names {
965 if !isBlankIdent(id) {
966 fn.addLocalForIdent(id)
967 lhs := b.addr(fn, id, false) // non-escaping
968 lhs.store(fn, emitExtract(fn, tuple, i, id), id)
974 // assignStmt emits code to fn for a parallel assignment of rhss to lhss.
975 // isDef is true if this is a short variable declaration (:=).
977 // Note the similarity with localValueSpec.
979 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, source ast.Node) {
980 // Side effects of all LHSs and RHSs must occur in left-to-right order.
981 lvals := make([]lvalue, len(lhss))
982 isZero := make([]bool, len(lhss))
983 for i, lhs := range lhss {
984 var lval lvalue = blank{}
985 if !isBlankIdent(lhs) {
987 if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil {
988 fn.addNamedLocal(obj, lhs)
992 lval = b.addr(fn, lhs, false) // non-escaping
996 if len(lhss) == len(rhss) {
997 // Simple assignment: x = f() (!isDef)
998 // Parallel assignment: x, y = f(), g() (!isDef)
999 // or short var decl: x, y := f(), g() (isDef)
1001 // In all cases, the RHSs may refer to the LHSs,
1002 // so we need a storebuf.
1004 for i := range rhss {
1005 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb, source)
1009 // e.g. x, y = pos()
1010 tuple := b.exprN(fn, rhss[0])
1011 emitDebugRef(fn, rhss[0], tuple, false)
1012 for i, lval := range lvals {
1013 lval.store(fn, emitExtract(fn, tuple, i, source), source)
1018 // arrayLen returns the length of the array whose composite literal elements are elts.
1019 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
1022 for _, e := range elts {
1023 if kv, ok := e.(*ast.KeyValueExpr); ok {
1024 i = b.expr(fn, kv.Key).(*Const).Int64()
1035 // compLit emits to fn code to initialize a composite literal e at
1036 // address addr with type typ.
1038 // Nested composite literals are recursively initialized in place
1039 // where possible. If isZero is true, compLit assumes that addr
1040 // holds the zero value for typ.
1042 // Because the elements of a composite literal may refer to the
1043 // variables being updated, as in the second line below,
1046 // all the reads must occur before all the writes. Thus all stores to
1047 // loc are emitted to the storebuf sb for later execution.
1049 // A CompositeLit may have pointer type only in the recursive (nested)
1050 // case when the type name is implicit. e.g. in []*T{{}}, the inner
1051 // literal has type *T behaves like &T{}.
1052 // In that case, addr must hold a T, not a *T.
1054 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
1055 typ := deref(fn.Pkg.typeOf(e))
1056 switch t := typ.Underlying().(type) {
1058 if !isZero && len(e.Elts) != t.NumFields() {
1060 sb.store(&address{addr, nil}, zeroValue(fn, deref(addr.Type()), e), e)
1063 for i, e := range e.Elts {
1065 if kv, ok := e.(*ast.KeyValueExpr); ok {
1066 fname := kv.Key.(*ast.Ident).Name
1067 for i, n := 0, t.NumFields(); i < n; i++ {
1069 if sf.Name() == fname {
1076 sf := t.Field(fieldIndex)
1077 faddr := &FieldAddr{
1081 faddr.setType(types.NewPointer(sf.Type()))
1083 b.assign(fn, &address{addr: faddr, expr: e}, e, isZero, sb, e)
1086 case *types.Array, *types.Slice:
1089 switch t := t.(type) {
1091 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
1092 alloc := emitNew(fn, at, e)
1098 if !isZero && int64(len(e.Elts)) != at.Len() {
1100 sb.store(&address{array, nil}, zeroValue(fn, deref(array.Type()), e), e)
1105 for _, e := range e.Elts {
1106 if kv, ok := e.(*ast.KeyValueExpr); ok {
1107 idx = b.expr(fn, kv.Key).(*Const)
1112 idxval = idx.Int64() + 1
1114 idx = emitConst(fn, intConst(idxval))
1116 iaddr := &IndexAddr{
1120 iaddr.setType(types.NewPointer(at.Elem()))
1122 if t != at { // slice
1123 // backing array is unaliased => storebuf not needed.
1124 b.assign(fn, &address{addr: iaddr, expr: e}, e, true, nil, e)
1126 b.assign(fn, &address{addr: iaddr, expr: e}, e, true, sb, e)
1130 if t != at { // slice
1131 s := &Slice{X: array}
1133 sb.store(&address{addr: addr, expr: e}, fn.emit(s, e), e)
1137 m := &MakeMap{Reserve: emitConst(fn, intConst(int64(len(e.Elts))))}
1140 for _, e := range e.Elts {
1141 e := e.(*ast.KeyValueExpr)
1143 // If a key expression in a map literal is itself a
1144 // composite literal, the type may be omitted.
1146 // map[*struct{}]bool{{}: true}
1147 // An &-operation may be implied:
1148 // map[*struct{}]bool{&struct{}{}: true}
1150 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) {
1151 // A CompositeLit never evaluates to a pointer,
1152 // so if the type of the location is a pointer,
1153 // an &-operation is implied.
1154 key = b.addr(fn, e.Key, true).address(fn)
1156 key = b.expr(fn, e.Key)
1161 k: emitConv(fn, key, t.Key(), e),
1165 // We call assign() only because it takes care
1166 // of any &-operation required in the recursive
1168 // map[int]*struct{}{0: {}} implies &struct{}{}.
1169 // In-place update is of course impossible,
1170 // and no storebuf is needed.
1171 b.assign(fn, &loc, e.Value, true, nil, e)
1173 sb.store(&address{addr: addr, expr: e}, m, e)
1176 panic("unexpected CompositeLit type: " + t.String())
1180 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
1182 b.switchStmtDynamic(fn, s, label)
1186 for _, iclause := range s.Body.List {
1187 clause := iclause.(*ast.CaseClause)
1188 for _, cond := range clause.List {
1189 if fn.Pkg.info.Types[unparen(cond)].Value == nil {
1197 b.switchStmtDynamic(fn, s, label)
1205 entry := fn.currentBlock
1206 tag := b.expr(fn, s.Tag)
1208 heads := make([]*BasicBlock, 0, len(s.Body.List))
1209 bodies := make([]*BasicBlock, len(s.Body.List))
1210 conds := make([]Value, 0, len(s.Body.List))
1213 done := fn.newBasicBlock(fmt.Sprintf("switch.done"))
1217 for i, stmt := range s.Body.List {
1218 body := fn.newBasicBlock(fmt.Sprintf("switch.body.%d", i))
1220 cas := stmt.(*ast.CaseClause)
1221 if cas.List == nil {
1224 head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d", i))
1225 conds = append(conds, nil)
1226 heads = append(heads, head)
1227 fn.currentBlock = head
1228 emitJump(fn, body, cas)
1230 for j, cond := range stmt.(*ast.CaseClause).List {
1231 fn.currentBlock = entry
1232 head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d.%d", i, j))
1233 conds = append(conds, b.expr(fn, cond))
1234 heads = append(heads, head)
1235 fn.currentBlock = head
1236 emitJump(fn, body, cond)
1240 for i, stmt := range s.Body.List {
1241 clause := stmt.(*ast.CaseClause)
1243 fn.currentBlock = body
1245 if i+1 < len(bodies) {
1246 fallthru = bodies[i+1]
1248 fn.targets = &targets{
1251 _fallthrough: fallthru,
1253 b.stmtList(fn, clause.Body)
1254 fn.targets = fn.targets.tail
1255 emitJump(fn, done, stmt)
1259 head := fn.newBasicBlock(fmt.Sprintf("switch.head.implicit-default"))
1260 body := fn.newBasicBlock("switch.body.implicit-default")
1261 fn.currentBlock = head
1262 emitJump(fn, body, s)
1263 fn.currentBlock = body
1264 emitJump(fn, done, s)
1265 heads = append(heads, head)
1266 conds = append(conds, nil)
1269 if len(heads) != len(conds) {
1270 panic(fmt.Sprintf("internal error: %d heads for %d conds", len(heads), len(conds)))
1272 for _, head := range heads {
1273 addEdge(entry, head)
1275 fn.currentBlock = entry
1276 entry.emit(&ConstantSwitch{
1280 fn.currentBlock = done
1283 // switchStmt emits to fn code for the switch statement s, optionally
1284 // labelled by label.
1286 func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblock) {
1287 // We treat SwitchStmt like a sequential if-else chain.
1288 // Multiway dispatch can be recovered later by irutil.Switches()
1289 // to those cases that are free of side effects.
1293 kTrue := emitConst(fn, NewConst(constant.MakeBool(true), tBool))
1295 var tagv Value = kTrue
1296 var tagSource ast.Node = s
1298 tagv = b.expr(fn, s.Tag)
1301 // lifting only considers loads and stores, but we want different
1302 // sigma nodes for the different comparisons. use a temporary and
1303 // load it in every branch.
1304 tag := fn.addLocal(tagv.Type(), tagSource)
1305 emitStore(fn, tag, tagv, tagSource)
1307 done := fn.newBasicBlock("switch.done")
1311 // We pull the default case (if present) down to the end.
1312 // But each fallthrough label must point to the next
1313 // body block in source order, so we preallocate a
1314 // body block (fallthru) for the next case.
1315 // Unfortunately this makes for a confusing block order.
1316 var dfltBody *[]ast.Stmt
1317 var dfltFallthrough *BasicBlock
1318 var fallthru, dfltBlock *BasicBlock
1319 ncases := len(s.Body.List)
1320 for i, clause := range s.Body.List {
1323 body = fn.newBasicBlock("switch.body") // first case only
1326 // Preallocate body block for the next case.
1329 fallthru = fn.newBasicBlock("switch.body")
1332 cc := clause.(*ast.CaseClause)
1336 dfltFallthrough = fallthru
1341 var nextCond *BasicBlock
1342 for _, cond := range cc.List {
1343 nextCond = fn.newBasicBlock("switch.next")
1345 // emit a proper if/else chain instead of a comparison
1346 // of a value against true.
1348 // NOTE(dh): adonovan had a todo saying "don't forget
1349 // conversions though". As far as I can tell, there
1350 // aren't any conversions that we need to take care of
1351 // here. `case bool(a) && bool(b)` as well as `case
1352 // bool(a && b)` are being taken care of by b.cond,
1353 // and `case a` where a is not of type bool is
1355 b.cond(fn, cond, body, nextCond)
1357 cond := emitCompare(fn, token.EQL, emitLoad(fn, tag, cond), b.expr(fn, cond), cond)
1358 emitIf(fn, cond, body, nextCond, cond.Source())
1361 fn.currentBlock = nextCond
1363 fn.currentBlock = body
1364 fn.targets = &targets{
1367 _fallthrough: fallthru,
1369 b.stmtList(fn, cc.Body)
1370 fn.targets = fn.targets.tail
1371 emitJump(fn, done, s)
1372 fn.currentBlock = nextCond
1374 if dfltBlock != nil {
1375 // The lack of a Source for the jump doesn't matter, block
1376 // fusing will get rid of the jump later.
1378 emitJump(fn, dfltBlock, s)
1379 fn.currentBlock = dfltBlock
1380 fn.targets = &targets{
1383 _fallthrough: dfltFallthrough,
1385 b.stmtList(fn, *dfltBody)
1386 fn.targets = fn.targets.tail
1388 emitJump(fn, done, s)
1389 fn.currentBlock = done
1392 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
1398 switch e := s.Assign.(type) {
1399 case *ast.ExprStmt: // x.(type)
1400 tag = b.expr(fn, unparen(e.X).(*ast.TypeAssertExpr).X)
1401 case *ast.AssignStmt: // y := x.(type)
1402 tag = b.expr(fn, unparen(e.Rhs[0]).(*ast.TypeAssertExpr).X)
1404 panic("unreachable")
1406 tagPtr := fn.addLocal(tag.Type(), tag.Source())
1407 emitStore(fn, tagPtr, tag, tag.Source())
1409 // +1 in case there's no explicit default case
1410 heads := make([]*BasicBlock, 0, len(s.Body.List)+1)
1412 entry := fn.currentBlock
1413 done := fn.newBasicBlock("done")
1418 // set up type switch and constant switch, populate their conditions
1419 tswtch := &TypeSwitch{
1420 Tag: emitLoad(fn, tagPtr, tag.Source()),
1421 Conds: make([]types.Type, 0, len(s.Body.List)+1),
1423 cswtch := &ConstantSwitch{
1424 Conds: make([]Value, 0, len(s.Body.List)+1),
1427 rets := make([]types.Type, 0, len(s.Body.List)+1)
1429 var default_ *ast.CaseClause
1430 for _, clause := range s.Body.List {
1431 cc := clause.(*ast.CaseClause)
1432 if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
1433 fn.addNamedLocal(obj, cc)
1439 for _, expr := range cc.List {
1440 tswtch.Conds = append(tswtch.Conds, fn.Pkg.typeOf(expr))
1441 cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(index))))
1444 if len(cc.List) == 1 {
1445 rets = append(rets, fn.Pkg.typeOf(cc.List[0]))
1448 rets = append(rets, tag.Type())
1455 rets = append(rets, tag.Type())
1457 var vars []*types.Var
1458 vars = append(vars, varIndex)
1459 for _, typ := range rets {
1460 vars = append(vars, anonVar(typ))
1462 tswtch.setType(types.NewTuple(vars...))
1464 fn.currentBlock = entry
1466 cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(-1))))
1467 // in theory we should add a local and stores/loads for tswtch, to
1468 // generate sigma nodes in the branches. however, there isn't any
1469 // useful information we could possibly attach to it.
1470 cswtch.Tag = emitExtract(fn, tswtch, 0, s)
1473 // build heads and bodies
1475 for _, clause := range s.Body.List {
1476 cc := clause.(*ast.CaseClause)
1481 body := fn.newBasicBlock("typeswitch.body")
1482 for _, expr := range cc.List {
1483 head := fn.newBasicBlock("typeswitch.head")
1484 heads = append(heads, head)
1485 fn.currentBlock = head
1487 if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
1488 // In a switch y := x.(type), each case clause
1489 // implicitly declares a distinct object y.
1490 // In a single-type case, y has that type.
1491 // In multi-type cases, 'case nil' and default,
1492 // y has the same type as the interface operand.
1494 l := fn.objects[obj]
1495 if rets[index] == tUntypedNil {
1496 emitStore(fn, l, emitConst(fn, nilConst(tswtch.Tag.Type())), s.Assign)
1498 x := emitExtract(fn, tswtch, index+1, s.Assign)
1499 emitStore(fn, l, x, nil)
1503 emitJump(fn, body, expr)
1506 fn.currentBlock = body
1507 fn.targets = &targets{
1511 b.stmtList(fn, cc.Body)
1512 fn.targets = fn.targets.tail
1513 emitJump(fn, done, clause)
1516 if default_ == nil {
1518 heads = append(heads, done)
1520 body := fn.newBasicBlock("typeswitch.default")
1521 heads = append(heads, body)
1522 fn.currentBlock = body
1523 fn.targets = &targets{
1527 if obj := fn.Pkg.info.Implicits[default_]; obj != nil {
1528 l := fn.objects[obj]
1529 x := emitExtract(fn, tswtch, index+1, s.Assign)
1530 emitStore(fn, l, x, s)
1532 b.stmtList(fn, default_.Body)
1533 fn.targets = fn.targets.tail
1534 emitJump(fn, done, s)
1537 fn.currentBlock = entry
1538 for _, head := range heads {
1539 addEdge(entry, head)
1541 fn.currentBlock = done
1544 // selectStmt emits to fn code for the select statement s, optionally
1545 // labelled by label.
1547 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (noreturn bool) {
1548 if len(s.Body.List) == 0 {
1549 instr := &Select{Blocking: true}
1550 instr.setType(types.NewTuple(varIndex, varOk))
1552 fn.emit(new(Unreachable), s)
1553 addEdge(fn.currentBlock, fn.Exit)
1557 // A blocking select of a single case degenerates to a
1558 // simple send or receive.
1559 // TODO(adonovan): opt: is this optimization worth its weight?
1560 if len(s.Body.List) == 1 {
1561 clause := s.Body.List[0].(*ast.CommClause)
1562 if clause.Comm != nil {
1563 b.stmt(fn, clause.Comm)
1564 done := fn.newBasicBlock("select.done")
1568 fn.targets = &targets{
1572 b.stmtList(fn, clause.Body)
1573 fn.targets = fn.targets.tail
1574 emitJump(fn, done, clause)
1575 fn.currentBlock = done
1580 // First evaluate all channels in all cases, and find
1581 // the directions of each state.
1582 var states []*SelectState
1584 debugInfo := fn.debugInfo()
1585 for _, clause := range s.Body.List {
1587 switch comm := clause.(*ast.CommClause).Comm.(type) {
1588 case nil: // default case
1592 case *ast.SendStmt: // ch<- i
1593 ch := b.expr(fn, comm.Chan)
1595 Dir: types.SendOnly,
1597 Send: emitConv(fn, b.expr(fn, comm.Value),
1598 ch.Type().Underlying().(*types.Chan).Elem(), comm),
1605 case *ast.AssignStmt: // x := <-ch
1606 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr)
1608 Dir: types.RecvOnly,
1609 Chan: b.expr(fn, recv.X),
1616 case *ast.ExprStmt: // <-ch
1617 recv := unparen(comm.X).(*ast.UnaryExpr)
1619 Dir: types.RecvOnly,
1620 Chan: b.expr(fn, recv.X),
1627 states = append(states, st)
1630 // We dispatch on the (fair) result of Select using a
1631 // switch on the returned index.
1637 var vars []*types.Var
1638 vars = append(vars, varIndex, varOk)
1639 for _, st := range states {
1640 if st.Dir == types.RecvOnly {
1641 tElem := st.Chan.Type().Underlying().(*types.Chan).Elem()
1642 vars = append(vars, anonVar(tElem))
1645 sel.setType(types.NewTuple(vars...))
1647 idx := emitExtract(fn, sel, 0, s)
1649 done := fn.newBasicBlock("select.done")
1654 entry := fn.currentBlock
1655 swtch := &ConstantSwitch{
1657 // one condition per case
1658 Conds: make([]Value, 0, len(s.Body.List)+1),
1660 // note that we don't need heads; a select case can only have a single condition
1661 var bodies []*BasicBlock
1664 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV
1665 for _, cc := range s.Body.List {
1666 clause := cc.(*ast.CommClause)
1667 if clause.Comm == nil {
1668 body := fn.newBasicBlock("select.default")
1669 fn.currentBlock = body
1670 bodies = append(bodies, body)
1671 fn.targets = &targets{
1675 b.stmtList(fn, clause.Body)
1676 emitJump(fn, done, s)
1677 fn.targets = fn.targets.tail
1678 swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(-1)))
1681 swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(int64(state))))
1682 body := fn.newBasicBlock("select.body")
1683 fn.currentBlock = body
1684 bodies = append(bodies, body)
1685 fn.targets = &targets{
1689 switch comm := clause.Comm.(type) {
1690 case *ast.ExprStmt: // <-ch
1692 v := emitExtract(fn, sel, r, comm)
1693 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
1697 case *ast.AssignStmt: // x := <-states[state].Chan
1698 if comm.Tok == token.DEFINE {
1699 fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident))
1701 x := b.addr(fn, comm.Lhs[0], false) // non-escaping
1702 v := emitExtract(fn, sel, r, comm)
1704 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
1706 x.store(fn, v, comm)
1708 if len(comm.Lhs) == 2 { // x, ok := ...
1709 if comm.Tok == token.DEFINE {
1710 fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident))
1712 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping
1713 ok.store(fn, emitExtract(fn, sel, 1, comm), comm)
1717 b.stmtList(fn, clause.Body)
1718 fn.targets = fn.targets.tail
1719 emitJump(fn, done, s)
1722 fn.currentBlock = entry
1724 for _, body := range bodies {
1725 addEdge(entry, body)
1727 fn.currentBlock = done
1731 // forStmt emits to fn code for the for statement s, optionally
1732 // labelled by label.
1734 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
1738 // if cond goto body else done
1742 // post: (target of continue)
1745 // done: (target of break)
1749 body := fn.newBasicBlock("for.body")
1750 done := fn.newBasicBlock("for.done") // target of 'break'
1751 loop := body // target of back-edge
1753 loop = fn.newBasicBlock("for.loop")
1755 cont := loop // target of 'continue'
1757 cont = fn.newBasicBlock("for.post")
1761 label._continue = cont
1763 emitJump(fn, loop, s)
1764 fn.currentBlock = loop
1766 b.cond(fn, s.Cond, body, done)
1767 fn.currentBlock = body
1769 fn.targets = &targets{
1775 fn.targets = fn.targets.tail
1776 emitJump(fn, cont, s)
1779 fn.currentBlock = cont
1781 emitJump(fn, loop, s) // back-edge
1783 fn.currentBlock = done
1786 // rangeIndexed emits to fn the header for an integer-indexed loop
1787 // over array, *array or slice value x.
1788 // The v result is defined only if tv is non-nil.
1789 // forPos is the position of the "for" token.
1791 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) {
1795 // loop: (target of continue)
1797 // if index < length goto body else done
1803 // done: (target of break)
1805 // Determine number of iterations.
1807 if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok {
1808 // For array or *array, the number of iterations is
1809 // known statically thanks to the type. We avoid a
1810 // data dependence upon x, permitting later dead-code
1811 // elimination if x is pure, static unrolling, etc.
1812 // Ranging over a nil *array may have >0 iterations.
1813 // We still generate code for x, in case it has effects.
1814 length = emitConst(fn, intConst(arr.Len()))
1818 c.Call.Value = makeLen(x.Type())
1819 c.Call.Args = []Value{x}
1821 length = fn.emit(&c, source)
1824 index := fn.addLocal(tInt, source)
1825 emitStore(fn, index, emitConst(fn, intConst(-1)), source)
1827 loop = fn.newBasicBlock("rangeindex.loop")
1828 emitJump(fn, loop, source)
1829 fn.currentBlock = loop
1833 X: emitLoad(fn, index, source),
1834 Y: emitConst(fn, intConst(1)),
1837 emitStore(fn, index, fn.emit(incr, source), source)
1839 body := fn.newBasicBlock("rangeindex.body")
1840 done = fn.newBasicBlock("rangeindex.done")
1841 emitIf(fn, emitCompare(fn, token.LSS, incr, length, source), body, done, source)
1842 fn.currentBlock = body
1844 k = emitLoad(fn, index, source)
1846 switch t := x.Type().Underlying().(type) {
1852 instr.setType(t.Elem())
1853 v = fn.emit(instr, source)
1855 case *types.Pointer: // *array
1856 instr := &IndexAddr{
1860 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()))
1861 v = emitLoad(fn, fn.emit(instr, source), source)
1864 instr := &IndexAddr{
1868 instr.setType(types.NewPointer(t.Elem()))
1869 v = emitLoad(fn, fn.emit(instr, source), source)
1872 panic("rangeIndexed x:" + t.String())
1878 // rangeIter emits to fn the header for a loop using
1879 // Range/Next/Extract to iterate over map or string value x.
1880 // tk and tv are the types of the key/value results k and v, or nil
1881 // if the respective component is not wanted.
1883 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) {
1886 // loop: (target of continue)
1887 // okv = next it (ok, key, value)
1888 // ok = extract okv #0
1889 // if ok goto body else done
1891 // k = extract okv #1
1892 // v = extract okv #2
1895 // done: (target of break)
1906 rng.setType(tRangeIter)
1907 it := fn.emit(rng, source)
1909 loop = fn.newBasicBlock("rangeiter.loop")
1910 emitJump(fn, loop, source)
1911 fn.currentBlock = loop
1913 _, isString := x.Type().Underlying().(*types.Basic)
1919 okv.setType(types.NewTuple(
1924 fn.emit(okv, source)
1926 body := fn.newBasicBlock("rangeiter.body")
1927 done = fn.newBasicBlock("rangeiter.done")
1928 emitIf(fn, emitExtract(fn, okv, 0, source), body, done, source)
1929 fn.currentBlock = body
1932 k = emitExtract(fn, okv, 1, source)
1935 v = emitExtract(fn, okv, 2, source)
1940 // rangeChan emits to fn the header for a loop that receives from
1941 // channel x until it fails.
1942 // tk is the channel's element type, or nil if the k result is
1944 // pos is the position of the '=' or ':=' token.
1946 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Node) (k Value, loop, done *BasicBlock) {
1948 // loop: (target of continue)
1949 // ko = <-x (key, ok)
1950 // ok = extract ko #1
1951 // if ok goto body else done
1953 // k = extract ko #0
1956 // done: (target of break)
1958 loop = fn.newBasicBlock("rangechan.loop")
1959 emitJump(fn, loop, source)
1960 fn.currentBlock = loop
1961 retv := emitRecv(fn, x, true, types.NewTuple(newVar("k", x.Type().Underlying().(*types.Chan).Elem()), varOk), source)
1962 body := fn.newBasicBlock("rangechan.body")
1963 done = fn.newBasicBlock("rangechan.done")
1964 emitIf(fn, emitExtract(fn, retv, 1, source), body, done, source)
1965 fn.currentBlock = body
1967 k = emitExtract(fn, retv, 0, source)
1972 // rangeStmt emits to fn code for the range statement s, optionally
1973 // labelled by label.
1975 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, source ast.Node) {
1976 var tk, tv types.Type
1977 if s.Key != nil && !isBlankIdent(s.Key) {
1978 tk = fn.Pkg.typeOf(s.Key)
1980 if s.Value != nil && !isBlankIdent(s.Value) {
1981 tv = fn.Pkg.typeOf(s.Value)
1984 // If iteration variables are defined (:=), this
1985 // occurs once outside the loop.
1987 // Unlike a short variable declaration, a RangeStmt
1988 // using := never redeclares an existing variable; it
1989 // always creates a new one.
1990 if s.Tok == token.DEFINE {
1992 fn.addLocalForIdent(s.Key.(*ast.Ident))
1995 fn.addLocalForIdent(s.Value.(*ast.Ident))
1999 x := b.expr(fn, s.X)
2002 var loop, done *BasicBlock
2003 switch rt := x.Type().Underlying().(type) {
2004 case *types.Slice, *types.Array, *types.Pointer: // *array
2005 k, v, loop, done = b.rangeIndexed(fn, x, tv, source)
2008 k, loop, done = b.rangeChan(fn, x, tk, source)
2010 case *types.Map, *types.Basic: // string
2011 k, v, loop, done = b.rangeIter(fn, x, tk, tv, source)
2014 panic("Cannot range over: " + rt.String())
2017 // Evaluate both LHS expressions before we update either.
2020 kl = b.addr(fn, s.Key, false) // non-escaping
2023 vl = b.addr(fn, s.Value, false) // non-escaping
2034 label._continue = loop
2037 fn.targets = &targets{
2043 fn.targets = fn.targets.tail
2044 emitJump(fn, loop, source) // back-edge
2045 fn.currentBlock = done
2048 // stmt lowers statement s to IR form, emitting code to fn.
2049 func (b *builder) stmt(fn *Function, _s ast.Stmt) {
2050 // The label of the current statement. If non-nil, its _goto
2051 // target is always set; its _break and _continue are set only
2052 // within the body of switch/typeswitch/select/for/range.
2053 // It is effectively an additional default-nil parameter of stmt().
2056 switch s := _s.(type) {
2057 case *ast.EmptyStmt:
2058 // ignore. (Usually removed by gofmt.)
2060 case *ast.DeclStmt: // Con, Var or Typ
2061 d := s.Decl.(*ast.GenDecl)
2062 if d.Tok == token.VAR {
2063 for _, spec := range d.Specs {
2064 if vs, ok := spec.(*ast.ValueSpec); ok {
2065 b.localValueSpec(fn, vs)
2070 case *ast.LabeledStmt:
2071 label = fn.labelledBlock(s.Label)
2072 emitJump(fn, label._goto, s)
2073 fn.currentBlock = label._goto
2075 goto start // effectively: tailcall stmt(fn, s.Stmt, label)
2082 Chan: b.expr(fn, s.Chan),
2083 X: emitConv(fn, b.expr(fn, s.Value),
2084 fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem(), s),
2088 case *ast.IncDecStmt:
2090 if s.Tok == token.DEC {
2093 loc := b.addr(fn, s.X, false)
2094 b.assignOp(fn, loc, emitConst(fn, NewConst(constant.MakeInt64(1), loc.typ())), op, s)
2096 case *ast.AssignStmt:
2098 case token.ASSIGN, token.DEFINE:
2099 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE, _s)
2101 default: // +=, etc.
2102 op := s.Tok + token.ADD - token.ADD_ASSIGN
2103 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s)
2107 // The "intrinsics" new/make/len/cap are forbidden here.
2108 // panic is treated like an ordinary function call.
2110 b.setCall(fn, s.Call, &v.Call)
2113 case *ast.DeferStmt:
2114 // The "intrinsics" new/make/len/cap are forbidden here.
2115 // panic is treated like an ordinary function call.
2117 b.setCall(fn, s.Call, &v.Call)
2121 case *ast.ReturnStmt:
2122 // TODO(dh): we could emit tigher position information by
2123 // using the ith returned expression
2126 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 {
2127 // Return of one expression in a multi-valued function.
2128 tuple := b.exprN(fn, s.Results[0])
2129 ttuple := tuple.Type().(*types.Tuple)
2130 for i, n := 0, ttuple.Len(); i < n; i++ {
2131 results = append(results,
2132 emitConv(fn, emitExtract(fn, tuple, i, s),
2133 fn.Signature.Results().At(i).Type(), s))
2136 // 1:1 return, or no-arg return in non-void function.
2137 for i, r := range s.Results {
2138 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type(), s)
2139 results = append(results, v)
2144 for i, r := range results {
2145 emitStore(fn, ret[i], r, s)
2148 emitJump(fn, fn.Exit, s)
2149 fn.currentBlock = fn.newBasicBlock("unreachable")
2151 case *ast.BranchStmt:
2152 var block *BasicBlock
2156 block = fn.labelledBlock(s.Label)._break
2158 for t := fn.targets; t != nil && block == nil; t = t.tail {
2163 case token.CONTINUE:
2165 block = fn.labelledBlock(s.Label)._continue
2167 for t := fn.targets; t != nil && block == nil; t = t.tail {
2172 case token.FALLTHROUGH:
2173 for t := fn.targets; t != nil && block == nil; t = t.tail {
2174 block = t._fallthrough
2178 block = fn.labelledBlock(s.Label)._goto
2180 j := emitJump(fn, block, s)
2181 j.Comment = s.Tok.String()
2182 fn.currentBlock = fn.newBasicBlock("unreachable")
2184 case *ast.BlockStmt:
2185 b.stmtList(fn, s.List)
2191 then := fn.newBasicBlock("if.then")
2192 done := fn.newBasicBlock("if.done")
2195 els = fn.newBasicBlock("if.else")
2197 instr := b.cond(fn, s.Cond, then, els)
2199 fn.currentBlock = then
2201 emitJump(fn, done, s)
2204 fn.currentBlock = els
2206 emitJump(fn, done, s)
2209 fn.currentBlock = done
2211 case *ast.SwitchStmt:
2212 b.switchStmt(fn, s, label)
2214 case *ast.TypeSwitchStmt:
2215 b.typeSwitchStmt(fn, s, label)
2217 case *ast.SelectStmt:
2218 if b.selectStmt(fn, s, label) {
2219 // the select has no cases, it blocks forever
2220 fn.currentBlock = fn.newBasicBlock("unreachable")
2224 b.forStmt(fn, s, label)
2226 case *ast.RangeStmt:
2227 b.rangeStmt(fn, s, label, s)
2230 panic(fmt.Sprintf("unexpected statement kind: %T", s))
2234 // buildFunction builds IR code for the body of function fn. Idempotent.
2235 func (b *builder) buildFunction(fn *Function) {
2236 if fn.Blocks != nil {
2237 return // building already started
2240 var recvField *ast.FieldList
2241 var body *ast.BlockStmt
2242 var functype *ast.FuncType
2243 switch n := fn.source.(type) {
2245 return // not a Go source function. (Synthetic, or from object file.)
2257 if fn.Package().Pkg.Path() == "syscall" && fn.Name() == "Exit" {
2258 // syscall.Exit is a stub and the way os.Exit terminates the
2259 // process. Note that there are other functions in the runtime
2260 // that also terminate or unwind that we cannot analyze.
2261 // However, they aren't stubs, so buildExits ends up getting
2262 // called on them, so that's where we handle those special
2268 // External function.
2269 if fn.Params == nil {
2270 // This condition ensures we add a non-empty
2271 // params list once only, but we may attempt
2272 // the degenerate empty case repeatedly.
2273 // TODO(adonovan): opt: don't do that.
2275 // We set Function.Params even though there is no body
2276 // code to reference them. This simplifies clients.
2277 if recv := fn.Signature.Recv(); recv != nil {
2278 // XXX synthesize an ast.Node
2279 fn.addParamObj(recv, nil)
2281 params := fn.Signature.Params()
2282 for i, n := 0, params.Len(); i < n; i++ {
2283 // XXX synthesize an ast.Node
2284 fn.addParamObj(params.At(i), nil)
2289 if fn.Prog.mode&LogSource != 0 {
2290 defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.Pos()))()
2292 fn.blocksets = b.blocksets
2294 fn.createSyntacticParams(recvField, functype)
2297 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb.Preds != nil) {
2298 // Control fell off the end of the function's body block.
2300 // Block optimizations eliminate the current block, if
2301 // unreachable. It is a builder invariant that
2302 // if this no-arg return is ill-typed for
2303 // fn.Signature.Results, this block must be
2304 // unreachable. The sanity checker checks this.
2305 // fn.emit(new(RunDefers))
2306 // fn.emit(new(Return))
2307 emitJump(fn, fn.Exit, nil)
2312 b.addUnreachables(fn)
2314 b.blocksets = fn.blocksets
2315 fn.functionBody = nil
2318 // buildFuncDecl builds IR code for the function or method declared
2319 // by decl in package pkg.
2321 func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
2323 if isBlankIdent(id) {
2326 fn := pkg.values[pkg.info.Defs[id]].(*Function)
2327 if decl.Recv == nil && id.Name == "init" {
2330 v.setType(types.NewTuple())
2331 pkg.init.emit(&v, decl)
2337 // Build calls Package.Build for each package in prog.
2339 // Build is intended for whole-program analysis; a typical compiler
2340 // need only build a single package.
2342 // Build is idempotent and thread-safe.
2344 func (prog *Program) Build() {
2345 for _, p := range prog.packages {
2350 // Build builds IR code for all functions and vars in package p.
2352 // Precondition: CreatePackage must have been called for all of p's
2353 // direct imports (and hence its direct imports must have been
2356 // Build is idempotent and thread-safe.
2358 func (p *Package) Build() { p.buildOnce.Do(p.build) }
2360 func (p *Package) build() {
2362 return // synthetic package, e.g. "testmain"
2365 // Ensure we have runtime type info for all exported members.
2366 // TODO(adonovan): ideally belongs in memberFromObject, but
2367 // that would require package creation in topological order.
2368 for name, mem := range p.Members {
2369 if ast.IsExported(name) {
2370 p.Prog.needMethodsOf(mem.Type())
2373 if p.Prog.mode&LogSource != 0 {
2374 defer logStack("build %s", p)()
2380 var done *BasicBlock
2382 // Make init() skip if package is already initialized.
2383 initguard := p.Var("init$guard")
2384 doinit := init.newBasicBlock("init.start")
2386 emitIf(init, emitLoad(init, initguard, nil), done, doinit, nil)
2387 init.currentBlock = doinit
2388 emitStore(init, initguard, emitConst(init, NewConst(constant.MakeBool(true), tBool)), nil)
2390 // Call the init() function of each package we import.
2391 for _, pkg := range p.Pkg.Imports() {
2392 prereq := p.Prog.packages[pkg]
2394 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path()))
2397 v.Call.Value = prereq.init
2398 v.setType(types.NewTuple())
2403 printFunc: p.printFunc,
2406 // Initialize package-level vars in correct order.
2407 for _, varinit := range p.info.InitOrder {
2408 if init.Prog.mode&LogSource != 0 {
2409 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
2410 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos()))
2412 if len(varinit.Lhs) == 1 {
2413 // 1:1 initialization: var x, y = a(), b()
2415 if v := varinit.Lhs[0]; v.Name() != "_" {
2416 lval = &address{addr: p.values[v].(*Global)}
2420 // TODO(dh): do emit position information
2421 b.assign(init, lval, varinit.Rhs, true, nil, nil)
2423 // n:1 initialization: var x, y := f()
2424 tuple := b.exprN(init, varinit.Rhs)
2425 for i, v := range varinit.Lhs {
2426 if v.Name() == "_" {
2429 emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i, nil), nil)
2434 // Build all package-level functions, init functions
2435 // and methods, including unreachable/blank ones.
2436 // We build them in source order, but it's not significant.
2437 for _, file := range p.files {
2438 for _, decl := range file.Decls {
2439 if decl, ok := decl.(*ast.FuncDecl); ok {
2440 b.buildFuncDecl(p, decl)
2445 // Finish up init().
2446 emitJump(init, done, nil)
2449 p.info = nil // We no longer need ASTs or go/types deductions.
2451 if p.Prog.mode&SanityCheckFunctions != 0 {
2452 sanityCheckPackage(p)
2456 // Like ObjectOf, but panics instead of returning nil.
2457 // Only valid during p's create and build phases.
2458 func (p *Package) objectOf(id *ast.Ident) types.Object {
2459 if o := p.info.ObjectOf(id); o != nil {
2462 panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
2463 id.Name, p.Prog.Fset.Position(id.Pos())))
2466 // Like TypeOf, but panics instead of returning nil.
2467 // Only valid during p's create and build phases.
2468 func (p *Package) typeOf(e ast.Expr) types.Type {
2469 if T := p.info.TypeOf(e); T != nil {
2472 panic(fmt.Sprintf("no type for %T @ %s",
2473 e, p.Prog.Fset.Position(e.Pos())))