1 // Copyright 2013 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
7 // This file implements the BUILD phase of SSA construction.
9 // SSA construction has two phases, CREATE and BUILD. In the CREATE phase
10 // (create.go), all packages are constructed and type-checked and
11 // definitions of all package members are created, method-sets are
12 // computed, and wrapper methods are synthesized.
13 // ssa.Packages are created in arbitrary order.
15 // In the BUILD phase (builder.go), the builder traverses the AST of
16 // each Go source function and generates SSA instructions for the
17 // function body. Initializer expressions for package-level variables
18 // are emitted to the package's init() function in the order specified
19 // by go/types.Info.InitOrder, then code for each function in the
20 // package is generated in lexical order.
21 // The BUILD phases for distinct packages are independent and are
22 // executed in parallel.
24 // TODO(adonovan): indeed, building functions is now embarrassingly parallel.
25 // Audit for concurrency then benchmark using more goroutines.
27 // The builder's and Program's indices (maps) are populated and
28 // mutated during the CREATE phase, but during the BUILD phase they
29 // remain constant. The sole exception is Prog.methodSets and its
30 // related maps, which are protected by a dedicated mutex.
42 type opaqueType struct {
47 func (t *opaqueType) String() string { return t.name }
50 varOk = newVar("ok", tBool)
51 varIndex = newVar("index", tInt)
54 tBool = types.Typ[types.Bool]
55 tByte = types.Typ[types.Byte]
56 tInt = types.Typ[types.Int]
57 tInvalid = types.Typ[types.Invalid]
58 tString = types.Typ[types.String]
59 tUntypedNil = types.Typ[types.UntypedNil]
60 tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
61 tEface = types.NewInterface(nil, nil).Complete()
63 // SSA Value constants.
66 vTrue = NewConst(constant.MakeBool(true), tBool)
69 // builder holds state associated with the package currently being built.
70 // Its methods contain all the logic for AST-to-SSA conversion.
73 // cond emits to fn code to evaluate boolean condition e and jump
74 // to t or f depending on its value, performing various simplifications.
76 // Postcondition: fn.currentBlock is nil.
78 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
79 switch e := e.(type) {
87 ltrue := fn.newBasicBlock("cond.true")
88 b.cond(fn, e.X, ltrue, f)
89 fn.currentBlock = ltrue
94 lfalse := fn.newBasicBlock("cond.false")
95 b.cond(fn, e.X, t, lfalse)
96 fn.currentBlock = lfalse
102 if e.Op == token.NOT {
103 b.cond(fn, e.X, f, t)
108 // A traditional compiler would simplify "if false" (etc) here
109 // but we do not, for better fidelity to the source code.
111 // The value of a constant condition may be platform-specific,
112 // and may cause blocks that are reachable in some configuration
113 // to be hidden from subsequent analyses such as bug-finding tools.
114 emitIf(fn, b.expr(fn, e), t, f)
117 // logicalBinop emits code to fn to evaluate e, a &&- or
118 // ||-expression whose reified boolean value is wanted.
119 // The value is returned.
121 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
122 rhs := fn.newBasicBlock("binop.rhs")
123 done := fn.newBasicBlock("binop.done")
125 // T(e) = T(e.X) = T(e.Y) after untyped constants have been
127 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool.
128 t := fn.Pkg.typeOf(e)
130 var short Value // value of the short-circuit path
133 b.cond(fn, e.X, rhs, done)
134 short = NewConst(constant.MakeBool(false), t)
137 b.cond(fn, e.X, done, rhs)
138 short = NewConst(constant.MakeBool(true), t)
141 // Is rhs unreachable?
142 if rhs.Preds == nil {
143 // Simplify false&&y to false, true||y to true.
144 fn.currentBlock = done
148 // Is done unreachable?
149 if done.Preds == nil {
150 // Simplify true&&y (or false||y) to y.
151 fn.currentBlock = rhs
152 return b.expr(fn, e.Y)
155 // All edges from e.X to done carry the short-circuit value.
157 for range done.Preds {
158 edges = append(edges, short)
161 // The edge from e.Y to done carries the value of e.Y.
162 fn.currentBlock = rhs
163 edges = append(edges, b.expr(fn, e.Y))
165 fn.currentBlock = done
167 phi := &Phi{Edges: edges, Comment: e.Op.String()}
170 return done.emit(phi)
173 // exprN lowers a multi-result expression e to SSA form, emitting code
174 // to fn and returning a single Value whose type is a *types.Tuple.
175 // The caller must access the components via Extract.
177 // Multi-result expressions include CallExprs in a multi-value
178 // assignment or return statement, and "value,ok" uses of
179 // TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
182 func (b *builder) exprN(fn *Function, e ast.Expr) Value {
183 typ := fn.Pkg.typeOf(e).(*types.Tuple)
184 switch e := e.(type) {
186 return b.exprN(fn, e.X)
189 // Currently, no built-in function nor type conversion
190 // has multiple results, so we can avoid some of the
191 // cases for single-valued CallExpr.
193 b.setCall(fn, e, &c.Call)
198 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
201 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
205 lookup.setPos(e.Lbrack)
206 return fn.emit(lookup)
208 case *ast.TypeAssertExpr:
209 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen)
211 case *ast.UnaryExpr: // must be receive <-
221 panic(fmt.Sprintf("exprN(%T) in %s", e, fn))
224 // builtin emits to fn SSA instructions to implement a call to the
225 // built-in function obj with the specified arguments
226 // and return type. It returns the value defined by the result.
228 // The result is nil if no special handling was required; in this case
229 // the caller should treat this like an ordinary library function
232 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value {
235 switch typ.Underlying().(type) {
237 n := b.expr(fn, args[1])
240 m = b.expr(fn, args[2])
242 if m, ok := m.(*Const); ok {
243 // treat make([]T, n, m) as new([m]T)[:n]
245 at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap)
246 alloc := emitNew(fn, at, pos)
247 alloc.Comment = "makeslice"
267 res = b.expr(fn, args[1])
269 v := &MakeMap{Reserve: res}
277 sz = b.expr(fn, args[1])
279 v := &MakeChan{Size: sz}
286 alloc := emitNew(fn, deref(typ), pos)
287 alloc.Comment = "new"
291 // Special case: len or cap of an array or *array is
292 // based on the type, not the value which may be nil.
293 // We must still evaluate the value, though. (If it
294 // was side-effect free, the whole call would have
295 // been constant-folded.)
296 t := deref(fn.Pkg.typeOf(args[0])).Underlying()
297 if at, ok := t.(*types.Array); ok {
298 b.expr(fn, args[0]) // for effects only
299 return intConst(at.Len())
301 // Otherwise treat as normal.
305 X: emitConv(fn, b.expr(fn, args[0]), tEface),
308 fn.currentBlock = fn.newBasicBlock("unreachable")
309 return vTrue // any non-nil Value will do
311 return nil // treat all others as a regular function call
314 // addr lowers a single-result addressable expression e to SSA form,
315 // emitting code to fn and returning the location (an lvalue) defined
316 // by the expression.
318 // If escaping is true, addr marks the base variable of the
319 // addressable expression e as being a potentially escaping pointer
320 // value. For example, in this code:
327 // the application of & causes a.b[0].c to have its address taken,
328 // which means that ultimately the local variable a must be
329 // heap-allocated. This is a simple but very conservative escape
332 // Operations forming potentially escaping pointers include:
333 // - &x, including when implicit in method call or composite literals.
334 // - a[:] iff a is an array (not *array)
335 // - references to variables in lexically enclosing functions.
337 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
338 switch e := e.(type) {
343 obj := fn.Pkg.objectOf(e)
344 v := fn.Prog.packageLevelValue(obj) // var (address)
346 v = fn.lookup(obj, escaping)
348 return &address{addr: v, pos: e.Pos(), expr: e}
350 case *ast.CompositeLit:
351 t := deref(fn.Pkg.typeOf(e))
354 v = emitNew(fn, t, e.Lbrace)
356 v = fn.addLocal(t, e.Lbrace)
358 v.Comment = "complit"
360 b.compLit(fn, v, e, true, &sb)
362 return &address{addr: v, pos: e.Lbrace, expr: e}
365 return b.addr(fn, e.X, escaping)
367 case *ast.SelectorExpr:
368 sel, ok := fn.Pkg.info.Selections[e]
370 // qualified identifier
371 return b.addr(fn, e.Sel, escaping)
373 if sel.Kind() != types.FieldVal {
377 v := b.receiver(fn, e.X, wantAddr, escaping, sel)
378 last := len(sel.Index()) - 1
380 addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel),
388 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
390 x = b.addr(fn, e.X, escaping).address(fn)
391 et = types.NewPointer(t.Elem())
392 case *types.Pointer: // *array
394 et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())
397 et = types.NewPointer(t.Elem())
401 k: emitConv(fn, b.expr(fn, e.Index), t.Key()),
406 panic("unexpected container type in IndexExpr: " + t.String())
410 Index: emitConv(fn, b.expr(fn, e.Index), tInt),
414 return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e}
417 return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e}
420 panic(fmt.Sprintf("unexpected address expression: %T", e))
428 type storebuf struct{ stores []store }
430 func (sb *storebuf) store(lhs lvalue, rhs Value) {
431 sb.stores = append(sb.stores, store{lhs, rhs})
434 func (sb *storebuf) emit(fn *Function) {
435 for _, s := range sb.stores {
436 s.lhs.store(fn, s.rhs)
440 // assign emits to fn code to initialize the lvalue loc with the value
441 // of expression e. If isZero is true, assign assumes that loc holds
442 // the zero value for its type.
444 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate
445 // better code in some cases, e.g., for composite literals in an
446 // addressable location.
448 // If sb is not nil, assign generates code to evaluate expression e, but
449 // not to update loc. Instead, the necessary stores are appended to the
450 // storebuf sb so that they can be executed later. This allows correct
451 // in-place update of existing variables when the RHS is a composite
452 // literal that may reference parts of the LHS.
454 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) {
455 // Can we initialize it in place?
456 if e, ok := unparen(e).(*ast.CompositeLit); ok {
457 // A CompositeLit never evaluates to a pointer,
458 // so if the type of the location is a pointer,
459 // an &-operation is implied.
460 if _, ok := loc.(blank); !ok { // avoid calling blank.typ()
461 if isPointer(loc.typ()) {
462 ptr := b.addr(fn, e, true).address(fn)
473 if _, ok := loc.(*address); ok {
474 if isInterface(loc.typ()) {
475 // e.g. var x interface{} = T{...}
476 // Can't in-place initialize an interface value.
477 // Fall back to copying.
479 // x = T{...} or x := T{...}
480 addr := loc.address(fn)
482 b.compLit(fn, addr, e, isZero, sb)
485 b.compLit(fn, addr, e, isZero, &sb)
489 // Subtle: emit debug ref for aggregate types only;
490 // slice and map are handled by store ops in compLit.
491 switch loc.typ().Underlying().(type) {
492 case *types.Struct, *types.Array:
493 emitDebugRef(fn, e, addr, true)
501 // simple case: just copy
510 // expr lowers a single-result expression e to SSA form, emitting code
511 // to fn and returning the Value defined by the expression.
513 func (b *builder) expr(fn *Function, e ast.Expr) Value {
516 tv := fn.Pkg.info.Types[e]
518 // Is expression a constant?
520 return NewConst(tv.Value, tv.Type)
524 if tv.Addressable() {
525 // Prefer pointer arithmetic ({Index,Field}Addr) followed
526 // by Load over subelement extraction (e.g. Index, Field),
527 // to avoid large copies.
528 v = b.addr(fn, e, false).load(fn)
530 v = b.expr0(fn, e, tv)
533 emitDebugRef(fn, e, v, false)
538 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
539 switch e := e.(type) {
541 panic("non-constant BasicLit") // unreachable
545 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
546 Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature),
553 fn.AnonFuncs = append(fn.AnonFuncs, fn2)
555 if fn2.FreeVars == nil {
558 v := &MakeClosure{Fn: fn2}
560 for _, fv := range fn2.FreeVars {
561 v.Bindings = append(v.Bindings, fv.outer)
566 case *ast.TypeAssertExpr: // single-result form only
567 return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen)
570 if fn.Pkg.info.Types[e.Fun].IsType() {
571 // Explicit type conversion, e.g. string(x) or big.Int(x)
572 x := b.expr(fn, e.Args[0])
573 y := emitConv(fn, x, tv.Type)
575 switch y := y.(type) {
586 // Call to "intrinsic" built-ins, e.g. new, make, panic.
587 if id, ok := unparen(e.Fun).(*ast.Ident); ok {
588 if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok {
589 if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil {
594 // Regular function call.
596 b.setCall(fn, e, &v.Call)
602 case token.AND: // &X --- potentially escaping.
603 addr := b.addr(fn, e.X, true)
604 if _, ok := unparen(e.X).(*ast.StarExpr); ok {
605 // &*p must panic if p is nil (http://golang.org/s/go12nil).
606 // For simplicity, we'll just (suboptimally) rely
607 // on the side effects of a load.
608 // TODO(adonovan): emit dedicated nilcheck.
611 return addr.address(fn)
613 return b.expr(fn, e.X)
614 case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^
626 case *ast.BinaryExpr:
628 case token.LAND, token.LOR:
629 return b.logicalBinop(fn, e)
630 case token.SHL, token.SHR:
632 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
633 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos)
635 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
636 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
637 // The type of x==y may be UntypedBool.
638 return emitConv(fn, cmp, types.Default(tv.Type))
640 panic("illegal op in BinaryExpr: " + e.Op.String())
644 var low, high, max Value
646 switch fn.Pkg.typeOf(e.X).Underlying().(type) {
648 // Potentially escaping.
649 x = b.addr(fn, e.X, true).address(fn)
650 case *types.Basic, *types.Slice, *types.Pointer: // *array
656 high = b.expr(fn, e.High)
659 low = b.expr(fn, e.Low)
662 max = b.expr(fn, e.Max)
675 obj := fn.Pkg.info.Uses[e]
676 // Universal built-in or nil?
677 switch obj := obj.(type) {
679 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
681 return nilConst(tv.Type)
683 // Package-level func or var?
684 if v := fn.Prog.packageLevelValue(obj); v != nil {
685 if _, ok := obj.(*types.Var); ok {
686 return emitLoad(fn, v) // var (address)
691 return emitLoad(fn, fn.lookup(obj, false)) // var (address)
693 case *ast.SelectorExpr:
694 sel, ok := fn.Pkg.info.Selections[e]
696 // qualified identifier
697 return b.expr(fn, e.Sel)
700 case types.MethodExpr:
701 // (*T).f or T.f, the method f from the method-set of type T.
702 // The result is a "thunk".
703 return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type)
705 case types.MethodVal:
706 // e.f where e is an expression and f is a method.
707 // The result is a "bound".
708 obj := sel.Obj().(*types.Func)
710 wantAddr := isPointer(rt)
712 v := b.receiver(fn, e.X, wantAddr, escaping, sel)
714 // If v has interface type I,
715 // we must emit a check that v is non-nil.
716 // We use: typeassert v.(I).
717 emitTypeAssert(fn, v, rt, token.NoPos)
720 Fn: makeBound(fn.Prog, obj),
721 Bindings: []Value{v},
723 c.setPos(e.Sel.Pos())
728 indices := sel.Index()
729 last := len(indices) - 1
731 v = emitImplicitSelections(fn, v, indices[:last])
732 v = emitFieldSelection(fn, v, indices[last], false, e.Sel)
736 panic("unexpected expression-relative selector")
739 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
741 // Non-addressable array (in a register).
744 Index: emitConv(fn, b.expr(fn, e.Index), tInt),
751 // Maps are not addressable.
752 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
755 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
758 v.setType(mapt.Elem())
761 case *types.Basic: // => string
762 // Strings are not addressable.
765 Index: b.expr(fn, e.Index),
771 case *types.Slice, *types.Pointer: // *array
772 // Addressable slice/array; use IndexAddr and Load.
773 return b.addr(fn, e, false).load(fn)
776 panic("unexpected container type in IndexExpr: " + t.String())
779 case *ast.CompositeLit, *ast.StarExpr:
780 // Addressable types (lvalues)
781 return b.addr(fn, e, false).load(fn)
784 panic(fmt.Sprintf("unexpected expr: %T", e))
787 // stmtList emits to fn code for all statements in list.
788 func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
789 for _, s := range list {
794 // receiver emits to fn code for expression e in the "receiver"
795 // position of selection e.f (where f may be a field or a method) and
796 // returns the effective receiver after applying the implicit field
797 // selections of sel.
799 // wantAddr requests that the result is an an address. If
800 // !sel.Indirect(), this may require that e be built in addr() mode; it
801 // must thus be addressable.
803 // escaping is defined as per builder.addr().
805 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value {
807 if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) {
808 v = b.addr(fn, e, escaping).address(fn)
813 last := len(sel.Index()) - 1
814 v = emitImplicitSelections(fn, v, sel.Index()[:last])
815 if !wantAddr && isPointer(v.Type()) {
821 // setCallFunc populates the function parts of a CallCommon structure
822 // (Func, Method, Recv, Args[0]) based on the kind of invocation
825 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
828 // Is this a method call?
829 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
830 sel, ok := fn.Pkg.info.Selections[selector]
831 if ok && sel.Kind() == types.MethodVal {
832 obj := sel.Obj().(*types.Func)
833 recv := recvType(obj)
834 wantAddr := isPointer(recv)
836 v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
837 if isInterface(recv) {
843 c.Value = fn.Prog.declaredFunc(obj)
844 c.Args = append(c.Args, v)
849 // sel.Kind()==MethodExpr indicates T.f() or (*T).f():
850 // a statically dispatched call to the method f in the
851 // method-set of T or *T. T may be an interface.
853 // e.Fun would evaluate to a concrete method, interface
854 // wrapper function, or promotion wrapper.
856 // For now, we evaluate it in the usual way.
858 // TODO(adonovan): opt: inline expr() here, to make the
859 // call static and to avoid generation of wrappers.
860 // It's somewhat tricky as it may consume the first
861 // actual parameter if the call is "invoke" mode.
864 // type T struct{}; func (T) f() {} // "call" mode
865 // type T interface { f() } // "invoke" mode
867 // type S struct{ T }
873 // Suggested approach:
874 // - consume the first actual parameter expression
875 // and build it with b.expr().
876 // - apply implicit field selections.
877 // - use MethodVal logic to populate fields of c.
880 // Evaluate the function operand in the usual way.
881 c.Value = b.expr(fn, e.Fun)
884 // emitCallArgs emits to f code for the actual parameters of call e to
885 // a (possibly built-in) function of effective type sig.
886 // The argument values are appended to args, which is then returned.
888 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
889 // f(x, y, z...): pass slice z straight through.
891 for i, arg := range e.Args {
892 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type())
893 args = append(args, v)
898 offset := len(args) // 1 if call has receiver, 0 otherwise
900 // Evaluate actual parameter expressions.
902 // If this is a chained call of the form f(g()) where g has
903 // multiple return values (MRV), they are flattened out into
904 // args; a suffix of them may end up in a varargs slice.
905 for _, arg := range e.Args {
907 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain
908 for i, n := 0, ttuple.Len(); i < n; i++ {
909 args = append(args, emitExtract(fn, v, i))
912 args = append(args, v)
916 // Actual->formal assignability conversions for normal parameters.
917 np := sig.Params().Len() // number of normal parameters
921 for i := 0; i < np; i++ {
922 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type())
925 // Actual->formal assignability conversions for variadic parameter,
926 // and construction of slice.
928 varargs := args[offset+np:]
929 st := sig.Params().At(np).Type().(*types.Slice)
931 if len(varargs) == 0 {
932 args = append(args, nilConst(st))
934 // Replace a suffix of args with a slice containing it.
935 at := types.NewArray(vt, int64(len(varargs)))
936 a := emitNew(fn, at, token.NoPos)
938 a.Comment = "varargs"
939 for i, arg := range varargs {
942 Index: intConst(int64(i)),
944 iaddr.setType(types.NewPointer(vt))
946 emitStore(fn, iaddr, arg, arg.Pos())
950 args[offset+np] = fn.emit(s)
951 args = args[:offset+np+1]
957 // setCall emits to fn code to evaluate all the parameters of a function
958 // call e, and populates *c with those values.
960 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
961 // First deal with the f(...) part and optional receiver.
962 b.setCallFunc(fn, e, c)
964 // Then append the other actual parameters.
965 sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature)
967 panic(fmt.Sprintf("no signature for call of %s", e.Fun))
969 c.Args = b.emitCallArgs(fn, sig, e, c.Args)
972 // assignOp emits to fn code to perform loc <op>= val.
973 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) {
975 loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos))
978 // localValueSpec emits to fn code to define all of the vars in the
979 // function-local ValueSpec, spec.
981 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
983 case len(spec.Values) == len(spec.Names):
984 // e.g. var x, y = 0, 1
986 for i, id := range spec.Names {
987 if !isBlankIdent(id) {
988 fn.addLocalForIdent(id)
990 lval := b.addr(fn, id, false) // non-escaping
991 b.assign(fn, lval, spec.Values[i], true, nil)
994 case len(spec.Values) == 0:
996 // Locals are implicitly zero-initialized.
997 for _, id := range spec.Names {
998 if !isBlankIdent(id) {
999 lhs := fn.addLocalForIdent(id)
1001 emitDebugRef(fn, id, lhs, true)
1007 // e.g. var x, y = pos()
1008 tuple := b.exprN(fn, spec.Values[0])
1009 for i, id := range spec.Names {
1010 if !isBlankIdent(id) {
1011 fn.addLocalForIdent(id)
1012 lhs := b.addr(fn, id, false) // non-escaping
1013 lhs.store(fn, emitExtract(fn, tuple, i))
1019 // assignStmt emits code to fn for a parallel assignment of rhss to lhss.
1020 // isDef is true if this is a short variable declaration (:=).
1022 // Note the similarity with localValueSpec.
1024 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
1025 // Side effects of all LHSs and RHSs must occur in left-to-right order.
1026 lvals := make([]lvalue, len(lhss))
1027 isZero := make([]bool, len(lhss))
1028 for i, lhs := range lhss {
1029 var lval lvalue = blank{}
1030 if !isBlankIdent(lhs) {
1032 if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil {
1033 fn.addNamedLocal(obj)
1037 lval = b.addr(fn, lhs, false) // non-escaping
1041 if len(lhss) == len(rhss) {
1042 // Simple assignment: x = f() (!isDef)
1043 // Parallel assignment: x, y = f(), g() (!isDef)
1044 // or short var decl: x, y := f(), g() (isDef)
1046 // In all cases, the RHSs may refer to the LHSs,
1047 // so we need a storebuf.
1049 for i := range rhss {
1050 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb)
1054 // e.g. x, y = pos()
1055 tuple := b.exprN(fn, rhss[0])
1056 emitDebugRef(fn, rhss[0], tuple, false)
1057 for i, lval := range lvals {
1058 lval.store(fn, emitExtract(fn, tuple, i))
1063 // arrayLen returns the length of the array whose composite literal elements are elts.
1064 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
1067 for _, e := range elts {
1068 if kv, ok := e.(*ast.KeyValueExpr); ok {
1069 i = b.expr(fn, kv.Key).(*Const).Int64()
1080 // compLit emits to fn code to initialize a composite literal e at
1081 // address addr with type typ.
1083 // Nested composite literals are recursively initialized in place
1084 // where possible. If isZero is true, compLit assumes that addr
1085 // holds the zero value for typ.
1087 // Because the elements of a composite literal may refer to the
1088 // variables being updated, as in the second line below,
1091 // all the reads must occur before all the writes. Thus all stores to
1092 // loc are emitted to the storebuf sb for later execution.
1094 // A CompositeLit may have pointer type only in the recursive (nested)
1095 // case when the type name is implicit. e.g. in []*T{{}}, the inner
1096 // literal has type *T behaves like &T{}.
1097 // In that case, addr must hold a T, not a *T.
1099 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
1100 typ := deref(fn.Pkg.typeOf(e))
1101 switch t := typ.Underlying().(type) {
1103 if !isZero && len(e.Elts) != t.NumFields() {
1105 sb.store(&address{addr, e.Lbrace, nil},
1106 zeroValue(fn, deref(addr.Type())))
1109 for i, e := range e.Elts {
1112 if kv, ok := e.(*ast.KeyValueExpr); ok {
1113 fname := kv.Key.(*ast.Ident).Name
1114 for i, n := 0, t.NumFields(); i < n; i++ {
1116 if sf.Name() == fname {
1124 sf := t.Field(fieldIndex)
1125 faddr := &FieldAddr{
1129 faddr.setType(types.NewPointer(sf.Type()))
1131 b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb)
1134 case *types.Array, *types.Slice:
1137 switch t := t.(type) {
1139 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
1140 alloc := emitNew(fn, at, e.Lbrace)
1141 alloc.Comment = "slicelit"
1147 if !isZero && int64(len(e.Elts)) != at.Len() {
1149 sb.store(&address{array, e.Lbrace, nil},
1150 zeroValue(fn, deref(array.Type())))
1155 for _, e := range e.Elts {
1157 if kv, ok := e.(*ast.KeyValueExpr); ok {
1158 idx = b.expr(fn, kv.Key).(*Const)
1164 idxval = idx.Int64() + 1
1166 idx = intConst(idxval)
1168 iaddr := &IndexAddr{
1172 iaddr.setType(types.NewPointer(at.Elem()))
1174 if t != at { // slice
1175 // backing array is unaliased => storebuf not needed.
1176 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil)
1178 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb)
1182 if t != at { // slice
1183 s := &Slice{X: array}
1186 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s))
1190 m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))}
1194 for _, e := range e.Elts {
1195 e := e.(*ast.KeyValueExpr)
1197 // If a key expression in a map literal is itself a
1198 // composite literal, the type may be omitted.
1200 // map[*struct{}]bool{{}: true}
1201 // An &-operation may be implied:
1202 // map[*struct{}]bool{&struct{}{}: true}
1204 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) {
1205 // A CompositeLit never evaluates to a pointer,
1206 // so if the type of the location is a pointer,
1207 // an &-operation is implied.
1208 key = b.addr(fn, e.Key, true).address(fn)
1210 key = b.expr(fn, e.Key)
1215 k: emitConv(fn, key, t.Key()),
1220 // We call assign() only because it takes care
1221 // of any &-operation required in the recursive
1223 // map[int]*struct{}{0: {}} implies &struct{}{}.
1224 // In-place update is of course impossible,
1225 // and no storebuf is needed.
1226 b.assign(fn, &loc, e.Value, true, nil)
1228 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m)
1231 panic("unexpected CompositeLit type: " + t.String())
1235 // switchStmt emits to fn code for the switch statement s, optionally
1236 // labelled by label.
1238 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
1239 // We treat SwitchStmt like a sequential if-else chain.
1240 // Multiway dispatch can be recovered later by ssautil.Switches()
1241 // to those cases that are free of side effects.
1245 var tag Value = vTrue
1247 tag = b.expr(fn, s.Tag)
1249 done := fn.newBasicBlock("switch.done")
1253 // We pull the default case (if present) down to the end.
1254 // But each fallthrough label must point to the next
1255 // body block in source order, so we preallocate a
1256 // body block (fallthru) for the next case.
1257 // Unfortunately this makes for a confusing block order.
1258 var dfltBody *[]ast.Stmt
1259 var dfltFallthrough *BasicBlock
1260 var fallthru, dfltBlock *BasicBlock
1261 ncases := len(s.Body.List)
1262 for i, clause := range s.Body.List {
1265 body = fn.newBasicBlock("switch.body") // first case only
1268 // Preallocate body block for the next case.
1271 fallthru = fn.newBasicBlock("switch.body")
1274 cc := clause.(*ast.CaseClause)
1278 dfltFallthrough = fallthru
1283 var nextCond *BasicBlock
1284 for _, cond := range cc.List {
1285 nextCond = fn.newBasicBlock("switch.next")
1286 // TODO(adonovan): opt: when tag==vTrue, we'd
1287 // get better code if we use b.cond(cond)
1288 // instead of BinOp(EQL, tag, b.expr(cond))
1289 // followed by If. Don't forget conversions
1291 cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos)
1292 emitIf(fn, cond, body, nextCond)
1293 fn.currentBlock = nextCond
1295 fn.currentBlock = body
1296 fn.targets = &targets{
1299 _fallthrough: fallthru,
1301 b.stmtList(fn, cc.Body)
1302 fn.targets = fn.targets.tail
1304 fn.currentBlock = nextCond
1306 if dfltBlock != nil {
1307 emitJump(fn, dfltBlock)
1308 fn.currentBlock = dfltBlock
1309 fn.targets = &targets{
1312 _fallthrough: dfltFallthrough,
1314 b.stmtList(fn, *dfltBody)
1315 fn.targets = fn.targets.tail
1318 fn.currentBlock = done
1321 // typeSwitchStmt emits to fn code for the type switch statement s, optionally
1322 // labelled by label.
1324 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
1325 // We treat TypeSwitchStmt like a sequential if-else chain.
1326 // Multiway dispatch can be recovered later by ssautil.Switches().
1328 // Typeswitch lowering:
1331 // switch y := x.(type) {
1332 // case T1, T2: S1 // >1 (y := x)
1333 // case nil: SN // nil (y := x)
1334 // default: SD // 0 types (y := x)
1335 // case T3: S3 // 1 type (y := x.(T3))
1341 // t1, ok1 := typeswitch,ok x <T1>
1342 // if ok1 then goto S1 else goto .caseT2
1344 // t2, ok2 := typeswitch,ok x <T2>
1345 // if ok2 then goto S1 else goto .caseNil
1351 // if t2, ok2 := typeswitch,ok x <T2>
1352 // if x == nil then goto SN else goto .caseT3
1358 // t3, ok3 := typeswitch,ok x <T3>
1359 // if ok3 then goto S3 else goto default
1375 switch ass := s.Assign.(type) {
1376 case *ast.ExprStmt: // x.(type)
1377 x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X)
1378 case *ast.AssignStmt: // y := x.(type)
1379 x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
1382 done := fn.newBasicBlock("typeswitch.done")
1386 var default_ *ast.CaseClause
1387 for _, clause := range s.Body.List {
1388 cc := clause.(*ast.CaseClause)
1393 body := fn.newBasicBlock("typeswitch.body")
1394 var next *BasicBlock
1395 var casetype types.Type
1396 var ti Value // ti, ok := typeassert,ok x <Ti>
1397 for _, cond := range cc.List {
1398 next = fn.newBasicBlock("typeswitch.next")
1399 casetype = fn.Pkg.typeOf(cond)
1401 if casetype == tUntypedNil {
1402 condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos)
1405 yok := emitTypeTest(fn, x, casetype, cc.Case)
1406 ti = emitExtract(fn, yok, 0)
1407 condv = emitExtract(fn, yok, 1)
1409 emitIf(fn, condv, body, next)
1410 fn.currentBlock = next
1412 if len(cc.List) != 1 {
1415 fn.currentBlock = body
1416 b.typeCaseBody(fn, cc, ti, done)
1417 fn.currentBlock = next
1419 if default_ != nil {
1420 b.typeCaseBody(fn, default_, x, done)
1424 fn.currentBlock = done
1427 func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) {
1428 if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
1429 // In a switch y := x.(type), each case clause
1430 // implicitly declares a distinct object y.
1431 // In a single-type case, y has that type.
1432 // In multi-type cases, 'case nil' and default,
1433 // y has the same type as the interface operand.
1434 emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos())
1436 fn.targets = &targets{
1440 b.stmtList(fn, cc.Body)
1441 fn.targets = fn.targets.tail
1445 // selectStmt emits to fn code for the select statement s, optionally
1446 // labelled by label.
1448 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
1449 // A blocking select of a single case degenerates to a
1450 // simple send or receive.
1451 // TODO(adonovan): opt: is this optimization worth its weight?
1452 if len(s.Body.List) == 1 {
1453 clause := s.Body.List[0].(*ast.CommClause)
1454 if clause.Comm != nil {
1455 b.stmt(fn, clause.Comm)
1456 done := fn.newBasicBlock("select.done")
1460 fn.targets = &targets{
1464 b.stmtList(fn, clause.Body)
1465 fn.targets = fn.targets.tail
1467 fn.currentBlock = done
1472 // First evaluate all channels in all cases, and find
1473 // the directions of each state.
1474 var states []*SelectState
1476 debugInfo := fn.debugInfo()
1477 for _, clause := range s.Body.List {
1479 switch comm := clause.(*ast.CommClause).Comm.(type) {
1480 case nil: // default case
1484 case *ast.SendStmt: // ch<- i
1485 ch := b.expr(fn, comm.Chan)
1487 Dir: types.SendOnly,
1489 Send: emitConv(fn, b.expr(fn, comm.Value),
1490 ch.Type().Underlying().(*types.Chan).Elem()),
1497 case *ast.AssignStmt: // x := <-ch
1498 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr)
1500 Dir: types.RecvOnly,
1501 Chan: b.expr(fn, recv.X),
1508 case *ast.ExprStmt: // <-ch
1509 recv := unparen(comm.X).(*ast.UnaryExpr)
1511 Dir: types.RecvOnly,
1512 Chan: b.expr(fn, recv.X),
1519 states = append(states, st)
1522 // We dispatch on the (fair) result of Select using a
1523 // sequential if-else chain, in effect:
1525 // idx, recvOk, r0...r_n-1 := select(...)
1526 // if idx == 0 { // receive on channel 0 (first receive => r0)
1527 // x, ok := r0, recvOk
1529 // } else if v == 1 { // send on channel 1
1538 sel.setPos(s.Select)
1539 var vars []*types.Var
1540 vars = append(vars, varIndex, varOk)
1541 for _, st := range states {
1542 if st.Dir == types.RecvOnly {
1543 tElem := st.Chan.Type().Underlying().(*types.Chan).Elem()
1544 vars = append(vars, anonVar(tElem))
1547 sel.setType(types.NewTuple(vars...))
1550 idx := emitExtract(fn, sel, 0)
1552 done := fn.newBasicBlock("select.done")
1557 var defaultBody *[]ast.Stmt
1559 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV
1560 for _, cc := range s.Body.List {
1561 clause := cc.(*ast.CommClause)
1562 if clause.Comm == nil {
1563 defaultBody = &clause.Body
1566 body := fn.newBasicBlock("select.body")
1567 next := fn.newBasicBlock("select.next")
1568 emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next)
1569 fn.currentBlock = body
1570 fn.targets = &targets{
1574 switch comm := clause.Comm.(type) {
1575 case *ast.ExprStmt: // <-ch
1577 v := emitExtract(fn, sel, r)
1578 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
1582 case *ast.AssignStmt: // x := <-states[state].Chan
1583 if comm.Tok == token.DEFINE {
1584 fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident))
1586 x := b.addr(fn, comm.Lhs[0], false) // non-escaping
1587 v := emitExtract(fn, sel, r)
1589 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
1593 if len(comm.Lhs) == 2 { // x, ok := ...
1594 if comm.Tok == token.DEFINE {
1595 fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident))
1597 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping
1598 ok.store(fn, emitExtract(fn, sel, 1))
1602 b.stmtList(fn, clause.Body)
1603 fn.targets = fn.targets.tail
1605 fn.currentBlock = next
1608 if defaultBody != nil {
1609 fn.targets = &targets{
1613 b.stmtList(fn, *defaultBody)
1614 fn.targets = fn.targets.tail
1616 // A blocking select must match some case.
1617 // (This should really be a runtime.errorString, not a string.)
1619 X: emitConv(fn, stringConst("blocking select matched no case"), tEface),
1621 fn.currentBlock = fn.newBasicBlock("unreachable")
1624 fn.currentBlock = done
1627 // forStmt emits to fn code for the for statement s, optionally
1628 // labelled by label.
1630 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
1634 // if cond goto body else done
1638 // post: (target of continue)
1641 // done: (target of break)
1645 body := fn.newBasicBlock("for.body")
1646 done := fn.newBasicBlock("for.done") // target of 'break'
1647 loop := body // target of back-edge
1649 loop = fn.newBasicBlock("for.loop")
1651 cont := loop // target of 'continue'
1653 cont = fn.newBasicBlock("for.post")
1657 label._continue = cont
1660 fn.currentBlock = loop
1662 b.cond(fn, s.Cond, body, done)
1663 fn.currentBlock = body
1665 fn.targets = &targets{
1671 fn.targets = fn.targets.tail
1675 fn.currentBlock = cont
1677 emitJump(fn, loop) // back-edge
1679 fn.currentBlock = done
1682 // rangeIndexed emits to fn the header for an integer-indexed loop
1683 // over array, *array or slice value x.
1684 // The v result is defined only if tv is non-nil.
1685 // forPos is the position of the "for" token.
1687 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
1691 // loop: (target of continue)
1693 // if index < length goto body else done
1699 // done: (target of break)
1701 // Determine number of iterations.
1703 if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok {
1704 // For array or *array, the number of iterations is
1705 // known statically thanks to the type. We avoid a
1706 // data dependence upon x, permitting later dead-code
1707 // elimination if x is pure, static unrolling, etc.
1708 // Ranging over a nil *array may have >0 iterations.
1709 // We still generate code for x, in case it has effects.
1710 length = intConst(arr.Len())
1714 c.Call.Value = makeLen(x.Type())
1715 c.Call.Args = []Value{x}
1717 length = fn.emit(&c)
1720 index := fn.addLocal(tInt, token.NoPos)
1721 emitStore(fn, index, intConst(-1), pos)
1723 loop = fn.newBasicBlock("rangeindex.loop")
1725 fn.currentBlock = loop
1729 X: emitLoad(fn, index),
1733 emitStore(fn, index, fn.emit(incr), pos)
1735 body := fn.newBasicBlock("rangeindex.body")
1736 done = fn.newBasicBlock("rangeindex.done")
1737 emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done)
1738 fn.currentBlock = body
1740 k = emitLoad(fn, index)
1742 switch t := x.Type().Underlying().(type) {
1748 instr.setType(t.Elem())
1749 instr.setPos(x.Pos())
1752 case *types.Pointer: // *array
1753 instr := &IndexAddr{
1757 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()))
1758 instr.setPos(x.Pos())
1759 v = emitLoad(fn, fn.emit(instr))
1762 instr := &IndexAddr{
1766 instr.setType(types.NewPointer(t.Elem()))
1767 instr.setPos(x.Pos())
1768 v = emitLoad(fn, fn.emit(instr))
1771 panic("rangeIndexed x:" + t.String())
1777 // rangeIter emits to fn the header for a loop using
1778 // Range/Next/Extract to iterate over map or string value x.
1779 // tk and tv are the types of the key/value results k and v, or nil
1780 // if the respective component is not wanted.
1782 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
1785 // loop: (target of continue)
1786 // okv = next it (ok, key, value)
1787 // ok = extract okv #0
1788 // if ok goto body else done
1790 // k = extract okv #1
1791 // v = extract okv #2
1794 // done: (target of break)
1806 rng.setType(tRangeIter)
1809 loop = fn.newBasicBlock("rangeiter.loop")
1811 fn.currentBlock = loop
1813 _, isString := x.Type().Underlying().(*types.Basic)
1819 okv.setType(types.NewTuple(
1826 body := fn.newBasicBlock("rangeiter.body")
1827 done = fn.newBasicBlock("rangeiter.done")
1828 emitIf(fn, emitExtract(fn, okv, 0), body, done)
1829 fn.currentBlock = body
1832 k = emitExtract(fn, okv, 1)
1835 v = emitExtract(fn, okv, 2)
1840 // rangeChan emits to fn the header for a loop that receives from
1841 // channel x until it fails.
1842 // tk is the channel's element type, or nil if the k result is
1844 // pos is the position of the '=' or ':=' token.
1846 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
1848 // loop: (target of continue)
1849 // ko = <-x (key, ok)
1850 // ok = extract ko #1
1851 // if ok goto body else done
1853 // k = extract ko #0
1856 // done: (target of break)
1858 loop = fn.newBasicBlock("rangechan.loop")
1860 fn.currentBlock = loop
1867 recv.setType(types.NewTuple(
1868 newVar("k", x.Type().Underlying().(*types.Chan).Elem()),
1872 body := fn.newBasicBlock("rangechan.body")
1873 done = fn.newBasicBlock("rangechan.done")
1874 emitIf(fn, emitExtract(fn, ko, 1), body, done)
1875 fn.currentBlock = body
1877 k = emitExtract(fn, ko, 0)
1882 // rangeStmt emits to fn code for the range statement s, optionally
1883 // labelled by label.
1885 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
1886 var tk, tv types.Type
1887 if s.Key != nil && !isBlankIdent(s.Key) {
1888 tk = fn.Pkg.typeOf(s.Key)
1890 if s.Value != nil && !isBlankIdent(s.Value) {
1891 tv = fn.Pkg.typeOf(s.Value)
1894 // If iteration variables are defined (:=), this
1895 // occurs once outside the loop.
1897 // Unlike a short variable declaration, a RangeStmt
1898 // using := never redeclares an existing variable; it
1899 // always creates a new one.
1900 if s.Tok == token.DEFINE {
1902 fn.addLocalForIdent(s.Key.(*ast.Ident))
1905 fn.addLocalForIdent(s.Value.(*ast.Ident))
1909 x := b.expr(fn, s.X)
1912 var loop, done *BasicBlock
1913 switch rt := x.Type().Underlying().(type) {
1914 case *types.Slice, *types.Array, *types.Pointer: // *array
1915 k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For)
1918 k, loop, done = b.rangeChan(fn, x, tk, s.For)
1920 case *types.Map, *types.Basic: // string
1921 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For)
1924 panic("Cannot range over: " + rt.String())
1927 // Evaluate both LHS expressions before we update either.
1930 kl = b.addr(fn, s.Key, false) // non-escaping
1933 vl = b.addr(fn, s.Value, false) // non-escaping
1944 label._continue = loop
1947 fn.targets = &targets{
1953 fn.targets = fn.targets.tail
1954 emitJump(fn, loop) // back-edge
1955 fn.currentBlock = done
1958 // stmt lowers statement s to SSA form, emitting code to fn.
1959 func (b *builder) stmt(fn *Function, _s ast.Stmt) {
1960 // The label of the current statement. If non-nil, its _goto
1961 // target is always set; its _break and _continue are set only
1962 // within the body of switch/typeswitch/select/for/range.
1963 // It is effectively an additional default-nil parameter of stmt().
1966 switch s := _s.(type) {
1967 case *ast.EmptyStmt:
1968 // ignore. (Usually removed by gofmt.)
1970 case *ast.DeclStmt: // Con, Var or Typ
1971 d := s.Decl.(*ast.GenDecl)
1972 if d.Tok == token.VAR {
1973 for _, spec := range d.Specs {
1974 if vs, ok := spec.(*ast.ValueSpec); ok {
1975 b.localValueSpec(fn, vs)
1980 case *ast.LabeledStmt:
1981 label = fn.labelledBlock(s.Label)
1982 emitJump(fn, label._goto)
1983 fn.currentBlock = label._goto
1985 goto start // effectively: tailcall stmt(fn, s.Stmt, label)
1992 Chan: b.expr(fn, s.Chan),
1993 X: emitConv(fn, b.expr(fn, s.Value),
1994 fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
1998 case *ast.IncDecStmt:
2000 if s.Tok == token.DEC {
2003 loc := b.addr(fn, s.X, false)
2004 b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos())
2006 case *ast.AssignStmt:
2008 case token.ASSIGN, token.DEFINE:
2009 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE)
2011 default: // +=, etc.
2012 op := s.Tok + token.ADD - token.ADD_ASSIGN
2013 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos())
2017 // The "intrinsics" new/make/len/cap are forbidden here.
2018 // panic is treated like an ordinary function call.
2020 b.setCall(fn, s.Call, &v.Call)
2023 case *ast.DeferStmt:
2024 // The "intrinsics" new/make/len/cap are forbidden here.
2025 // panic is treated like an ordinary function call.
2026 v := Defer{pos: s.Defer}
2027 b.setCall(fn, s.Call, &v.Call)
2030 // A deferred call can cause recovery from panic,
2031 // and control resumes at the Recover block.
2032 createRecoverBlock(fn)
2034 case *ast.ReturnStmt:
2036 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 {
2037 // Return of one expression in a multi-valued function.
2038 tuple := b.exprN(fn, s.Results[0])
2039 ttuple := tuple.Type().(*types.Tuple)
2040 for i, n := 0, ttuple.Len(); i < n; i++ {
2041 results = append(results,
2042 emitConv(fn, emitExtract(fn, tuple, i),
2043 fn.Signature.Results().At(i).Type()))
2046 // 1:1 return, or no-arg return in non-void function.
2047 for i, r := range s.Results {
2048 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type())
2049 results = append(results, v)
2052 if fn.namedResults != nil {
2053 // Function has named result parameters (NRPs).
2054 // Perform parallel assignment of return operands to NRPs.
2055 for i, r := range results {
2056 emitStore(fn, fn.namedResults[i], r, s.Return)
2059 // Run function calls deferred in this
2060 // function when explicitly returning from it.
2061 fn.emit(new(RunDefers))
2062 if fn.namedResults != nil {
2063 // Reload NRPs to form the result tuple.
2064 results = results[:0]
2065 for _, r := range fn.namedResults {
2066 results = append(results, emitLoad(fn, r))
2069 fn.emit(&Return{Results: results, pos: s.Return})
2070 fn.currentBlock = fn.newBasicBlock("unreachable")
2072 case *ast.BranchStmt:
2073 var block *BasicBlock
2077 block = fn.labelledBlock(s.Label)._break
2079 for t := fn.targets; t != nil && block == nil; t = t.tail {
2084 case token.CONTINUE:
2086 block = fn.labelledBlock(s.Label)._continue
2088 for t := fn.targets; t != nil && block == nil; t = t.tail {
2093 case token.FALLTHROUGH:
2094 for t := fn.targets; t != nil && block == nil; t = t.tail {
2095 block = t._fallthrough
2099 block = fn.labelledBlock(s.Label)._goto
2102 fn.currentBlock = fn.newBasicBlock("unreachable")
2104 case *ast.BlockStmt:
2105 b.stmtList(fn, s.List)
2111 then := fn.newBasicBlock("if.then")
2112 done := fn.newBasicBlock("if.done")
2115 els = fn.newBasicBlock("if.else")
2117 b.cond(fn, s.Cond, then, els)
2118 fn.currentBlock = then
2123 fn.currentBlock = els
2128 fn.currentBlock = done
2130 case *ast.SwitchStmt:
2131 b.switchStmt(fn, s, label)
2133 case *ast.TypeSwitchStmt:
2134 b.typeSwitchStmt(fn, s, label)
2136 case *ast.SelectStmt:
2137 b.selectStmt(fn, s, label)
2140 b.forStmt(fn, s, label)
2142 case *ast.RangeStmt:
2143 b.rangeStmt(fn, s, label)
2146 panic(fmt.Sprintf("unexpected statement kind: %T", s))
2150 // buildFunction builds SSA code for the body of function fn. Idempotent.
2151 func (b *builder) buildFunction(fn *Function) {
2152 if fn.Blocks != nil {
2153 return // building already started
2156 var recvField *ast.FieldList
2157 var body *ast.BlockStmt
2158 var functype *ast.FuncType
2159 switch n := fn.syntax.(type) {
2161 return // not a Go source function. (Synthetic, or from object file.)
2174 // External function.
2175 if fn.Params == nil {
2176 // This condition ensures we add a non-empty
2177 // params list once only, but we may attempt
2178 // the degenerate empty case repeatedly.
2179 // TODO(adonovan): opt: don't do that.
2181 // We set Function.Params even though there is no body
2182 // code to reference them. This simplifies clients.
2183 if recv := fn.Signature.Recv(); recv != nil {
2184 fn.addParamObj(recv)
2186 params := fn.Signature.Params()
2187 for i, n := 0, params.Len(); i < n; i++ {
2188 fn.addParamObj(params.At(i))
2193 if fn.Prog.mode&LogSource != 0 {
2194 defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))()
2197 fn.createSyntacticParams(recvField, functype)
2199 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) {
2200 // Control fell off the end of the function's body block.
2202 // Block optimizations eliminate the current block, if
2203 // unreachable. It is a builder invariant that
2204 // if this no-arg return is ill-typed for
2205 // fn.Signature.Results, this block must be
2206 // unreachable. The sanity checker checks this.
2207 fn.emit(new(RunDefers))
2208 fn.emit(new(Return))
2213 // buildFuncDecl builds SSA code for the function or method declared
2214 // by decl in package pkg.
2216 func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
2218 if isBlankIdent(id) {
2221 fn := pkg.values[pkg.info.Defs[id]].(*Function)
2222 if decl.Recv == nil && id.Name == "init" {
2225 v.setType(types.NewTuple())
2231 // Build calls Package.Build for each package in prog.
2232 // Building occurs in parallel unless the BuildSerially mode flag was set.
2234 // Build is intended for whole-program analysis; a typical compiler
2235 // need only build a single package.
2237 // Build is idempotent and thread-safe.
2239 func (prog *Program) Build() {
2240 var wg sync.WaitGroup
2241 for _, p := range prog.packages {
2242 if prog.mode&BuildSerially != 0 {
2246 go func(p *Package) {
2255 // Build builds SSA code for all functions and vars in package p.
2257 // Precondition: CreatePackage must have been called for all of p's
2258 // direct imports (and hence its direct imports must have been
2261 // Build is idempotent and thread-safe.
2263 func (p *Package) Build() { p.buildOnce.Do(p.build) }
2265 func (p *Package) build() {
2267 return // synthetic package, e.g. "testmain"
2270 // Ensure we have runtime type info for all exported members.
2271 // TODO(adonovan): ideally belongs in memberFromObject, but
2272 // that would require package creation in topological order.
2273 for name, mem := range p.Members {
2274 if ast.IsExported(name) {
2275 p.Prog.needMethodsOf(mem.Type())
2278 if p.Prog.mode&LogSource != 0 {
2279 defer logStack("build %s", p)()
2284 var done *BasicBlock
2286 if p.Prog.mode&BareInits == 0 {
2287 // Make init() skip if package is already initialized.
2288 initguard := p.Var("init$guard")
2289 doinit := init.newBasicBlock("init.start")
2290 done = init.newBasicBlock("init.done")
2291 emitIf(init, emitLoad(init, initguard), done, doinit)
2292 init.currentBlock = doinit
2293 emitStore(init, initguard, vTrue, token.NoPos)
2295 // Call the init() function of each package we import.
2296 for _, pkg := range p.Pkg.Imports() {
2297 prereq := p.Prog.packages[pkg]
2299 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path()))
2302 v.Call.Value = prereq.init
2303 v.Call.pos = init.pos
2304 v.setType(types.NewTuple())
2311 // Initialize package-level vars in correct order.
2312 for _, varinit := range p.info.InitOrder {
2313 if init.Prog.mode&LogSource != 0 {
2314 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
2315 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos()))
2317 if len(varinit.Lhs) == 1 {
2318 // 1:1 initialization: var x, y = a(), b()
2320 if v := varinit.Lhs[0]; v.Name() != "_" {
2321 lval = &address{addr: p.values[v].(*Global), pos: v.Pos()}
2325 b.assign(init, lval, varinit.Rhs, true, nil)
2327 // n:1 initialization: var x, y := f()
2328 tuple := b.exprN(init, varinit.Rhs)
2329 for i, v := range varinit.Lhs {
2330 if v.Name() == "_" {
2333 emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos())
2338 // Build all package-level functions, init functions
2339 // and methods, including unreachable/blank ones.
2340 // We build them in source order, but it's not significant.
2341 for _, file := range p.files {
2342 for _, decl := range file.Decls {
2343 if decl, ok := decl.(*ast.FuncDecl); ok {
2344 b.buildFuncDecl(p, decl)
2349 // Finish up init().
2350 if p.Prog.mode&BareInits == 0 {
2351 emitJump(init, done)
2352 init.currentBlock = done
2354 init.emit(new(Return))
2357 p.info = nil // We no longer need ASTs or go/types deductions.
2359 if p.Prog.mode&SanityCheckFunctions != 0 {
2360 sanityCheckPackage(p)
2364 // Like ObjectOf, but panics instead of returning nil.
2365 // Only valid during p's create and build phases.
2366 func (p *Package) objectOf(id *ast.Ident) types.Object {
2367 if o := p.info.ObjectOf(id); o != nil {
2370 panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
2371 id.Name, p.Prog.Fset.Position(id.Pos())))
2374 // Like TypeOf, but panics instead of returning nil.
2375 // Only valid during p's create and build phases.
2376 func (p *Package) typeOf(e ast.Expr) types.Type {
2377 if T := p.info.TypeOf(e); T != nil {
2380 panic(fmt.Sprintf("no type for %T @ %s",
2381 e, p.Prog.Fset.Position(e.Pos())))