1 // Copyright 2013 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
7 // This file defines the constraint generation phase.
9 // TODO(adonovan): move the constraint definitions and the store() etc
10 // functions which add them (and are also used by the solver) into a
11 // new file, constraints.go.
18 "golang.org/x/tools/go/callgraph"
19 "golang.org/x/tools/go/ssa"
23 tEface = types.NewInterfaceType(nil, nil).Complete()
24 tInvalid = types.Typ[types.Invalid]
25 tUnsafePtr = types.Typ[types.UnsafePointer]
28 // ---------- Node creation ----------
30 // nextNode returns the index of the next unused node.
31 func (a *analysis) nextNode() nodeid {
32 return nodeid(len(a.nodes))
35 // addNodes creates nodes for all scalar elements in type typ, and
36 // returns the id of the first one, or zero if the type was
37 // analytically uninteresting.
39 // comment explains the origin of the nodes, as a debugging aid.
41 func (a *analysis) addNodes(typ types.Type, comment string) nodeid {
43 for _, fi := range a.flatten(typ) {
44 a.addOneNode(fi.typ, comment, fi)
46 if id == a.nextNode() {
47 return 0 // type contained no pointers
52 // addOneNode creates a single node with type typ, and returns its id.
54 // typ should generally be scalar (except for tagged.T nodes
55 // and struct/array identity nodes). Use addNodes for non-scalar types.
57 // comment explains the origin of the nodes, as a debugging aid.
58 // subelement indicates the subelement, e.g. ".a.b[*].c".
60 func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid {
62 a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement, solve: new(solverState)})
64 fmt.Fprintf(a.log, "\tcreate n%d %s for %s%s\n",
65 id, typ, comment, subelement.path())
70 // setValueNode associates node id with the value v.
71 // cgn identifies the context iff v is a local variable.
73 func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) {
80 fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v)
83 // Due to context-sensitivity, we may encounter the same Value
84 // in many contexts. We merge them to a canonical node, since
85 // that's what all clients want.
87 // Record the (v, id) relation if the client has queried pts(v).
88 if _, ok := a.config.Queries[v]; ok {
90 ptr, ok := a.result.Queries[v]
92 // First time? Create the canonical query node.
93 ptr = Pointer{a, a.addNodes(t, "query")}
94 a.result.Queries[v] = ptr
96 a.result.Queries[v] = ptr
97 a.copy(ptr.n, id, a.sizeof(t))
100 // Record the (*v, id) relation if the client has queried pts(*v).
101 if _, ok := a.config.IndirectQueries[v]; ok {
103 ptr, ok := a.result.IndirectQueries[v]
105 // First time? Create the canonical indirect query node.
106 ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")}
107 a.result.IndirectQueries[v] = ptr
109 a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t))
112 for _, query := range a.config.extendedQueries[v] {
113 t, nid := a.evalExtendedQuery(v.Type().Underlying(), id, query.ops)
115 if query.ptr.a == nil {
117 query.ptr.n = a.addNodes(t, "query.extended")
119 a.copy(query.ptr.n, nid, a.sizeof(t))
123 // endObject marks the end of a sequence of calls to addNodes denoting
124 // a single object allocation.
126 // obj is the start node of the object, from a prior call to nextNode.
127 // Its size, flags and optional data will be updated.
129 func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object {
130 // Ensure object is non-empty by padding;
131 // the pad will be the object node.
132 size := uint32(a.nextNode() - obj)
134 a.addOneNode(tInvalid, "padding", nil)
136 objNode := a.nodes[obj]
138 size: size, // excludes padding
147 // makeFunctionObject creates and returns a new function object
148 // (contour) for fn, and returns the id of its first node. It also
149 // enqueues fn for subsequent constraint generation.
151 // For a context-sensitive contour, callersite identifies the sole
152 // callsite; for shared contours, caller is nil.
154 func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid {
156 fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn)
159 // obj is the function object (identity, params, results).
161 cgn := a.makeCGNode(fn, obj, callersite)
163 a.addOneNode(sig, "func.cgnode", nil) // (scalar with Signature type)
164 if recv := sig.Recv(); recv != nil {
165 a.addNodes(recv.Type(), "func.recv")
167 a.addNodes(sig.Params(), "func.params")
168 a.addNodes(sig.Results(), "func.results")
169 a.endObject(obj, cgn, fn).flags |= otFunction
172 fmt.Fprintf(a.log, "\t----\n")
175 // Queue it up for constraint processing.
176 a.genq = append(a.genq, cgn)
181 // makeTagged creates a tagged object of type typ.
182 func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nodeid {
183 obj := a.addOneNode(typ, "tagged.T", nil) // NB: type may be non-scalar!
184 a.addNodes(typ, "tagged.v")
185 a.endObject(obj, cgn, data).flags |= otTagged
189 // makeRtype returns the canonical tagged object of type *rtype whose
190 // payload points to the sole rtype object for T.
192 // TODO(adonovan): move to reflect.go; it's part of the solver really.
194 func (a *analysis) makeRtype(T types.Type) nodeid {
195 if v := a.rtypes.At(T); v != nil {
199 // Create the object for the reflect.rtype itself, which is
200 // ordinarily a large struct but here a single node will do.
202 a.addOneNode(T, "reflect.rtype", nil)
203 a.endObject(obj, nil, T)
205 id := a.makeTagged(a.reflectRtypePtr, nil, T)
206 a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton)
207 a.addressOf(a.reflectRtypePtr, id+1, obj)
213 // rtypeValue returns the type of the *reflect.rtype-tagged object obj.
214 func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type {
215 tDyn, t, _ := a.taggedValue(obj)
216 if tDyn != a.reflectRtypePtr {
217 panic(fmt.Sprintf("not a *reflect.rtype-tagged object: obj=n%d tag=%v payload=n%d", obj, tDyn, t))
219 return a.nodes[t].typ
222 // valueNode returns the id of the value node for v, creating it (and
223 // the association) as needed. It may return zero for uninteresting
224 // values containing no pointers.
226 func (a *analysis) valueNode(v ssa.Value) nodeid {
227 // Value nodes for locals are created en masse by genFunc.
228 if id, ok := a.localval[v]; ok {
232 // Value nodes for globals are created on demand.
233 id, ok := a.globalval[v]
239 id = a.addNodes(v.Type(), comment)
240 if obj := a.objectNode(nil, v); obj != 0 {
241 a.addressOf(v.Type(), id, obj)
243 a.setValueNode(v, id, nil)
248 // valueOffsetNode ascertains the node for tuple/struct value v,
249 // then returns the node for its subfield #index.
251 func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid {
254 panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v))
256 return id + nodeid(a.offsetOf(v.Type(), index))
259 // isTaggedObject reports whether object obj is a tagged object.
260 func (a *analysis) isTaggedObject(obj nodeid) bool {
261 return a.nodes[obj].obj.flags&otTagged != 0
264 // taggedValue returns the dynamic type tag, the (first node of the)
265 // payload, and the indirect flag of the tagged object starting at id.
266 // Panic ensues if !isTaggedObject(id).
268 func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) {
271 if flags&otTagged == 0 {
272 panic(fmt.Sprintf("not a tagged object: n%d", obj))
274 return n.typ, obj + 1, flags&otIndirect != 0
277 // funcParams returns the first node of the params (P) block of the
278 // function whose object node (obj.flags&otFunction) is id.
280 func (a *analysis) funcParams(id nodeid) nodeid {
282 if n.obj == nil || n.obj.flags&otFunction == 0 {
283 panic(fmt.Sprintf("funcParams(n%d): not a function object block", id))
288 // funcResults returns the first node of the results (R) block of the
289 // function whose object node (obj.flags&otFunction) is id.
291 func (a *analysis) funcResults(id nodeid) nodeid {
293 if n.obj == nil || n.obj.flags&otFunction == 0 {
294 panic(fmt.Sprintf("funcResults(n%d): not a function object block", id))
296 sig := n.typ.(*types.Signature)
297 id += 1 + nodeid(a.sizeof(sig.Params()))
298 if sig.Recv() != nil {
299 id += nodeid(a.sizeof(sig.Recv().Type()))
304 // ---------- Constraint creation ----------
306 // copy creates a constraint of the form dst = src.
307 // sizeof is the width (in logical fields) of the copied type.
309 func (a *analysis) copy(dst, src nodeid, sizeof uint32) {
310 if src == dst || sizeof == 0 {
313 if src == 0 || dst == 0 {
314 panic(fmt.Sprintf("ill-typed copy dst=n%d src=n%d", dst, src))
316 for i := uint32(0); i < sizeof; i++ {
317 a.addConstraint(©Constraint{dst, src})
323 // addressOf creates a constraint of the form id = &obj.
324 // T is the type of the address.
325 func (a *analysis) addressOf(T types.Type, id, obj nodeid) {
327 panic("addressOf: zero id")
330 panic("addressOf: zero obj")
332 if a.shouldTrack(T) {
333 a.addConstraint(&addrConstraint{id, obj})
337 // load creates a load constraint of the form dst = src[offset].
338 // offset is the pointer offset in logical fields.
339 // sizeof is the width (in logical fields) of the loaded type.
341 func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) {
343 return // load of non-pointerlike value
345 if src == 0 && dst == 0 {
346 return // non-pointerlike operation
348 if src == 0 || dst == 0 {
349 panic(fmt.Sprintf("ill-typed load dst=n%d src=n%d", dst, src))
351 for i := uint32(0); i < sizeof; i++ {
352 a.addConstraint(&loadConstraint{offset, dst, src})
358 // store creates a store constraint of the form dst[offset] = src.
359 // offset is the pointer offset in logical fields.
360 // sizeof is the width (in logical fields) of the stored type.
362 func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) {
364 return // store of non-pointerlike value
366 if src == 0 && dst == 0 {
367 return // non-pointerlike operation
369 if src == 0 || dst == 0 {
370 panic(fmt.Sprintf("ill-typed store dst=n%d src=n%d", dst, src))
372 for i := uint32(0); i < sizeof; i++ {
373 a.addConstraint(&storeConstraint{offset, dst, src})
379 // offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset.
380 // offset is the field offset in logical fields.
381 // T is the type of the address.
383 func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) {
384 if !a.shouldTrack(T) {
388 // Simplify dst = &src->f0
390 // (NB: this optimisation is defeated by the identity
391 // field prepended to struct and array objects.)
394 a.addConstraint(&offsetAddrConstraint{offset, dst, src})
398 // typeAssert creates a typeFilter or untag constraint of the form dst = src.(T):
399 // typeFilter for an interface, untag for a concrete type.
400 // The exact flag is specified as for untagConstraint.
402 func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) {
404 a.addConstraint(&typeFilterConstraint{T, dst, src})
406 a.addConstraint(&untagConstraint{T, dst, src, exact})
410 // addConstraint adds c to the constraint set.
411 func (a *analysis) addConstraint(c constraint) {
412 a.constraints = append(a.constraints, c)
414 fmt.Fprintf(a.log, "\t%s\n", c)
418 // copyElems generates load/store constraints for *dst = *src,
419 // where src and dst are slices or *arrays.
421 func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) {
422 tmp := a.addNodes(typ, "copy")
424 a.genLoad(cgn, tmp, src, 1, sz)
425 a.genStore(cgn, dst, tmp, 1, sz)
428 // ---------- Constraint generation ----------
430 // genConv generates constraints for the conversion operation conv.
431 func (a *analysis) genConv(conv *ssa.Convert, cgn *cgnode) {
432 res := a.valueNode(conv)
434 return // result is non-pointerlike
437 tSrc := conv.X.Type()
440 switch utSrc := tSrc.Underlying().(type) {
442 // []byte/[]rune -> string?
446 // *T -> unsafe.Pointer?
447 if tDst.Underlying() == tUnsafePtr {
448 return // we don't model unsafe aliasing (unsound)
452 switch tDst.Underlying().(type) {
454 // Treat unsafe.Pointer->*T conversions like
455 // new(T) and create an unaliased object.
456 if utSrc == tUnsafePtr {
457 obj := a.addNodes(mustDeref(tDst), "unsafe.Pointer conversion")
458 a.endObject(obj, cgn, conv)
459 a.addressOf(tDst, res, obj)
464 // string -> []byte/[]rune (or named aliases)?
465 if utSrc.Info()&types.IsString != 0 {
466 obj := a.addNodes(sliceToArray(tDst), "convert")
467 a.endObject(obj, cgn, conv)
468 a.addressOf(tDst, res, obj)
473 // All basic-to-basic type conversions are no-ops.
474 // This includes uintptr<->unsafe.Pointer conversions,
475 // which we (unsoundly) ignore.
480 panic(fmt.Sprintf("illegal *ssa.Convert %s -> %s: %s", tSrc, tDst, conv.Parent()))
483 // genAppend generates constraints for a call to append.
484 func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) {
485 // Consider z = append(x, y). y is optional.
486 // This may allocate a new [1]T array; call its object w.
487 // We get the following constraints:
492 x := instr.Call.Args[0]
495 a.copy(a.valueNode(z), a.valueNode(x), 1) // z = x
497 if len(instr.Call.Args) == 1 {
498 return // no allocation for z = append(x) or _ = append(x).
501 // TODO(adonovan): test append([]byte, ...string) []byte.
503 y := instr.Call.Args[1]
504 tArray := sliceToArray(instr.Call.Args[0].Type())
507 a.addNodes(tArray, "append")
508 a.endObject(w, cgn, instr)
510 a.copyElems(cgn, tArray.Elem(), z, y) // *z = *y
511 a.addressOf(instr.Type(), a.valueNode(z), w) // z = &w
514 // genBuiltinCall generates constraints for a call to a built-in.
515 func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) {
516 call := instr.Common()
517 switch call.Value.(*ssa.Builtin).Name() {
519 // Safe cast: append cannot appear in a go or defer statement.
520 a.genAppend(instr.(*ssa.Call), cgn)
523 tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem()
524 a.copyElems(cgn, tElem, call.Args[0], call.Args[1])
527 a.copy(a.panicNode, a.valueNode(call.Args[0]), 1)
530 if v := instr.Value(); v != nil {
531 a.copy(a.valueNode(v), a.panicNode, 1)
535 // In the tests, the probe might be the sole reference
536 // to its arg, so make sure we create nodes for it.
537 if len(call.Args) > 0 {
538 a.valueNode(call.Args[0])
541 case "ssa:wrapnilchk":
542 a.copy(a.valueNode(instr.Value()), a.valueNode(call.Args[0]), 1)
545 // No-ops: close len cap real imag complex print println delete.
549 // shouldUseContext defines the context-sensitivity policy. It
550 // returns true if we should analyse all static calls to fn anew.
552 // Obviously this interface rather limits how much freedom we have to
553 // choose a policy. The current policy, rather arbitrarily, is true
554 // for intrinsics and accessor methods (actually: short, single-block,
555 // call-free functions). This is just a starting point.
557 func (a *analysis) shouldUseContext(fn *ssa.Function) bool {
558 if a.findIntrinsic(fn) != nil {
559 return true // treat intrinsics context-sensitively
561 if len(fn.Blocks) != 1 {
562 return false // too expensive
565 if len(blk.Instrs) > 10 {
566 return false // too expensive
568 if fn.Synthetic != "" && (fn.Pkg == nil || fn != fn.Pkg.Func("init")) {
569 return true // treat synthetic wrappers context-sensitively
571 for _, instr := range blk.Instrs {
572 switch instr := instr.(type) {
573 case ssa.CallInstruction:
574 // Disallow function calls (except to built-ins)
575 // because of the danger of unbounded recursion.
576 if _, ok := instr.Common().Value.(*ssa.Builtin); !ok {
584 // genStaticCall generates constraints for a statically dispatched function call.
585 func (a *analysis) genStaticCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
586 fn := call.StaticCallee()
588 // Special cases for inlined intrinsics.
590 case a.runtimeSetFinalizer:
591 // Inline SetFinalizer so the call appears direct.
592 site.targets = a.addOneNode(tInvalid, "SetFinalizer.targets", nil)
593 a.addConstraint(&runtimeSetFinalizerConstraint{
594 targets: site.targets,
595 x: a.valueNode(call.Args[0]),
596 f: a.valueNode(call.Args[1]),
600 case a.reflectValueCall:
601 // Inline (reflect.Value).Call so the call appears direct.
603 ret := reflectCallImpl(a, caller, site, a.valueNode(call.Args[0]), a.valueNode(call.Args[1]), dotdotdot)
605 a.addressOf(fn.Signature.Results().At(0).Type(), result, ret)
610 // Ascertain the context (contour/cgnode) for a particular call.
612 if a.shouldUseContext(fn) {
613 obj = a.makeFunctionObject(fn, site) // new contour
615 obj = a.objectNode(nil, fn) // shared contour
617 a.callEdge(caller, site, obj)
619 sig := call.Signature()
621 // Copy receiver, if any.
622 params := a.funcParams(obj)
624 if sig.Recv() != nil {
625 sz := a.sizeof(sig.Recv().Type())
626 a.copy(params, a.valueNode(args[0]), sz)
631 // Copy actual parameters into formal params block.
632 // Must loop, since the actuals aren't contiguous.
633 for i, arg := range args {
634 sz := a.sizeof(sig.Params().At(i).Type())
635 a.copy(params, a.valueNode(arg), sz)
639 // Copy formal results block to actual result.
641 a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
645 // genDynamicCall generates constraints for a dynamic function call.
646 func (a *analysis) genDynamicCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
647 // pts(targets) will be the set of possible call targets.
648 site.targets = a.valueNode(call.Value)
650 // We add dynamic closure rules that store the arguments into
651 // the P-block and load the results from the R-block of each
652 // function discovered in pts(targets).
654 sig := call.Signature()
655 var offset uint32 = 1 // P/R block starts at offset 1
656 for i, arg := range call.Args {
657 sz := a.sizeof(sig.Params().At(i).Type())
658 a.genStore(caller, call.Value, a.valueNode(arg), offset, sz)
662 a.genLoad(caller, result, call.Value, offset, a.sizeof(sig.Results()))
666 // genInvoke generates constraints for a dynamic method invocation.
667 func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
668 if call.Value.Type() == a.reflectType {
669 a.genInvokeReflectType(caller, site, call, result)
673 sig := call.Signature()
675 // Allocate a contiguous targets/params/results block for this call.
676 block := a.nextNode()
677 // pts(targets) will be the set of possible call targets
678 site.targets = a.addOneNode(sig, "invoke.targets", nil)
679 p := a.addNodes(sig.Params(), "invoke.params")
680 r := a.addNodes(sig.Results(), "invoke.results")
682 // Copy the actual parameters into the call's params block.
683 for i, n := 0, sig.Params().Len(); i < n; i++ {
684 sz := a.sizeof(sig.Params().At(i).Type())
685 a.copy(p, a.valueNode(call.Args[i]), sz)
688 // Copy the call's results block to the actual results.
690 a.copy(result, r, a.sizeof(sig.Results()))
693 // We add a dynamic invoke constraint that will connect the
694 // caller's and the callee's P/R blocks for each discovered
696 a.addConstraint(&invokeConstraint{call.Method, a.valueNode(call.Value), block})
699 // genInvokeReflectType is a specialization of genInvoke where the
700 // receiver type is a reflect.Type, under the assumption that there
701 // can be at most one implementation of this interface, *reflect.rtype.
703 // (Though this may appear to be an instance of a pattern---method
704 // calls on interfaces known to have exactly one implementation---in
705 // practice it occurs rarely, so we special case for reflect.Type.)
707 // In effect we treat this:
708 // var rt reflect.Type = ...
711 // rt.(*reflect.rtype).F()
713 func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
714 // Unpack receiver into rtype
715 rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil)
716 recv := a.valueNode(call.Value)
717 a.typeAssert(a.reflectRtypePtr, rtype, recv, true)
719 // Look up the concrete method.
720 fn := a.prog.LookupMethod(a.reflectRtypePtr, call.Method.Pkg(), call.Method.Name())
722 obj := a.makeFunctionObject(fn, site) // new contour for this call
723 a.callEdge(caller, site, obj)
725 // From now on, it's essentially a static call, but little is
726 // gained by factoring together the code for both cases.
728 sig := fn.Signature // concrete method
729 targets := a.addOneNode(sig, "call.targets", nil)
730 a.addressOf(sig, targets, obj) // (a singleton)
733 params := a.funcParams(obj)
734 a.copy(params, rtype, 1)
737 // Copy actual parameters into formal P-block.
738 // Must loop, since the actuals aren't contiguous.
739 for i, arg := range call.Args {
740 sz := a.sizeof(sig.Params().At(i).Type())
741 a.copy(params, a.valueNode(arg), sz)
745 // Copy formal R-block to actual R-block.
747 a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
751 // genCall generates constraints for call instruction instr.
752 func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) {
753 call := instr.Common()
755 // Intrinsic implementations of built-in functions.
756 if _, ok := call.Value.(*ssa.Builtin); ok {
757 a.genBuiltinCall(instr, caller)
762 if v := instr.Value(); v != nil {
763 result = a.valueNode(v)
766 site := &callsite{instr: instr}
767 if call.StaticCallee() != nil {
768 a.genStaticCall(caller, site, call, result)
769 } else if call.IsInvoke() {
770 a.genInvoke(caller, site, call, result)
772 a.genDynamicCall(caller, site, call, result)
775 caller.sites = append(caller.sites, site)
778 // TODO(adonovan): debug: improve log message.
779 fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller)
783 // objectNode returns the object to which v points, if known.
784 // In other words, if the points-to set of v is a singleton, it
785 // returns the sole label, zero otherwise.
787 // We exploit this information to make the generated constraints less
788 // dynamic. For example, a complex load constraint can be replaced by
789 // a simple copy constraint when the sole destination is known a priori.
791 // Some SSA instructions always have singletons points-to sets:
792 // Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice.
793 // Others may be singletons depending on their operands:
794 // FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice.
796 // Idempotent. Objects are created as needed, possibly via recursion
797 // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))).
799 func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid {
801 case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar:
803 obj, ok := a.globalobj[v]
805 switch v := v.(type) {
808 a.addNodes(mustDeref(v.Type()), "global")
809 a.endObject(obj, nil, v)
812 obj = a.makeFunctionObject(v, nil)
822 fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj)
830 obj, ok := a.localobj[v]
832 switch v := v.(type) {
835 a.addNodes(mustDeref(v.Type()), "alloc")
836 a.endObject(obj, cgn, v)
840 a.addNodes(sliceToArray(v.Type()), "makeslice")
841 a.endObject(obj, cgn, v)
845 a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan")
846 a.endObject(obj, cgn, v)
850 tmap := v.Type().Underlying().(*types.Map)
851 a.addNodes(tmap.Key(), "makemap.key")
852 elem := a.addNodes(tmap.Elem(), "makemap.value")
854 // To update the value field, MapUpdate
855 // generates store-with-offset constraints which
856 // the presolver can't model, so we must mark
857 // those nodes indirect.
858 for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ {
859 a.mapValues = append(a.mapValues, id)
861 a.endObject(obj, cgn, v)
863 case *ssa.MakeInterface:
865 obj = a.makeTagged(tConc, cgn, v)
867 // Copy the value into it, if nontrivial.
868 if x := a.valueNode(v.X); x != 0 {
869 a.copy(obj+1, x, a.sizeof(tConc))
873 if xobj := a.objectNode(cgn, v.X); xobj != 0 {
874 obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field))
878 if xobj := a.objectNode(cgn, v.X); xobj != 0 {
883 obj = a.objectNode(cgn, v.X)
886 // TODO(adonovan): opt: handle these cases too:
887 // - unsafe.Pointer->*T conversion acts like Alloc
888 // - string->[]byte/[]rune conversion acts like MakeSlice
892 fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj)
899 // genLoad generates constraints for result = *(ptr + val).
900 func (a *analysis) genLoad(cgn *cgnode, result nodeid, ptr ssa.Value, offset, sizeof uint32) {
901 if obj := a.objectNode(cgn, ptr); obj != 0 {
902 // Pre-apply loadConstraint.solve().
903 a.copy(result, obj+nodeid(offset), sizeof)
905 a.load(result, a.valueNode(ptr), offset, sizeof)
909 // genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr)
910 // or 'v=ptr[*]' (IndexAddr) instruction v.
911 func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) {
912 dst := a.valueNode(v)
913 if obj := a.objectNode(cgn, v); obj != 0 {
914 // Pre-apply offsetAddrConstraint.solve().
915 a.addressOf(v.Type(), dst, obj)
917 a.offsetAddr(v.Type(), dst, ptr, offset)
921 // genStore generates constraints for *(ptr + offset) = val.
922 func (a *analysis) genStore(cgn *cgnode, ptr ssa.Value, val nodeid, offset, sizeof uint32) {
923 if obj := a.objectNode(cgn, ptr); obj != 0 {
924 // Pre-apply storeConstraint.solve().
925 a.copy(obj+nodeid(offset), val, sizeof)
927 a.store(a.valueNode(ptr), val, offset, sizeof)
931 // genInstr generates constraints for instruction instr in context cgn.
932 func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
935 if val, ok := instr.(ssa.Value); ok {
936 prefix = val.Name() + " = "
938 fmt.Fprintf(a.log, "; %s%s\n", prefix, instr)
941 switch instr := instr.(type) {
947 case token.ARROW: // <-x
948 // We can ignore instr.CommaOk because the node we're
949 // altering is always at zero offset relative to instr
950 tElem := instr.X.Type().Underlying().(*types.Chan).Elem()
951 a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem))
953 case token.MUL: // *x
954 a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type()))
957 // NOT, SUB, XOR: no-op.
963 case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer
964 a.genCall(cgn, instr)
966 case *ssa.ChangeType:
967 a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
970 a.genConv(instr, cgn)
973 a.copy(a.valueNode(instr),
974 a.valueOffsetNode(instr.Tuple, instr.Index),
975 a.sizeof(instr.Type()))
978 a.genOffsetAddr(cgn, instr, a.valueNode(instr.X),
979 a.offsetOf(mustDeref(instr.X.Type()), instr.Field))
982 a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1)
985 a.copy(a.valueNode(instr),
986 a.valueOffsetNode(instr.X, instr.Field),
987 a.sizeof(instr.Type()))
990 a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type()))
993 recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1)
994 for _, st := range instr.States {
995 elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem())
998 a.genLoad(cgn, recv, st.Chan, 0, elemSize)
999 recv += nodeid(elemSize)
1001 case types.SendOnly:
1002 a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize)
1007 results := a.funcResults(cgn.obj)
1008 for _, r := range instr.Results {
1009 sz := a.sizeof(r.Type())
1010 a.copy(results, a.valueNode(r), sz)
1011 results += nodeid(sz)
1015 a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type()))
1018 a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type()))
1020 case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface:
1021 v := instr.(ssa.Value)
1022 a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v))
1024 case *ssa.ChangeInterface:
1025 a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
1027 case *ssa.TypeAssert:
1028 a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true)
1031 a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
1033 case *ssa.If, *ssa.Jump:
1037 sz := a.sizeof(instr.Type())
1038 for _, e := range instr.Edges {
1039 a.copy(a.valueNode(instr), a.valueNode(e), sz)
1042 case *ssa.MakeClosure:
1043 fn := instr.Fn.(*ssa.Function)
1044 a.copy(a.valueNode(instr), a.valueNode(fn), 1)
1045 // Free variables are treated like global variables.
1046 for i, b := range instr.Bindings {
1047 a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type()))
1050 case *ssa.RunDefers:
1051 // The analysis is flow insensitive, so we just "call"
1052 // defers as we encounter them.
1055 // Do nothing. Next{Iter: *ssa.Range} handles this case.
1058 if !instr.IsString { // map
1059 // Assumes that Next is always directly applied to a Range result.
1060 theMap := instr.Iter.(*ssa.Range).X
1061 tMap := theMap.Type().Underlying().(*types.Map)
1063 ksize := a.sizeof(tMap.Key())
1064 vsize := a.sizeof(tMap.Elem())
1066 // The k/v components of the Next tuple may each be invalid.
1067 tTuple := instr.Type().(*types.Tuple)
1069 // Load from the map's (k,v) into the tuple's (ok, k, v).
1070 osrc := uint32(0) // offset within map object
1071 odst := uint32(1) // offset within tuple (initially just after 'ok bool')
1072 sz := uint32(0) // amount to copy
1075 if tTuple.At(1).Type() != tInvalid {
1083 if tTuple.At(2).Type() != tInvalid {
1087 a.genLoad(cgn, a.valueNode(instr)+nodeid(odst), theMap, osrc, sz)
1091 if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok {
1092 // CommaOk can be ignored: field 0 is a no-op.
1093 ksize := a.sizeof(tMap.Key())
1094 vsize := a.sizeof(tMap.Elem())
1095 a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize)
1098 case *ssa.MapUpdate:
1099 tmap := instr.Map.Type().Underlying().(*types.Map)
1100 ksize := a.sizeof(tmap.Key())
1101 vsize := a.sizeof(tmap.Elem())
1102 a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize)
1103 a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize)
1106 a.copy(a.panicNode, a.valueNode(instr.X), 1)
1109 panic(fmt.Sprintf("unimplemented: %T", instr))
1113 func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite) *cgnode {
1114 cgn := &cgnode{fn: fn, obj: obj, callersite: callersite}
1115 a.cgnodes = append(a.cgnodes, cgn)
1119 // genRootCalls generates the synthetic root of the callgraph and the
1120 // initial calls from it to the analysis scope, such as main, a test
1123 func (a *analysis) genRootCalls() *cgnode {
1124 r := a.prog.NewFunction("<root>", new(types.Signature), "root of callgraph")
1125 root := a.makeCGNode(r, 0, nil)
1127 // TODO(adonovan): make an ssa utility to construct an actual
1128 // root function so we don't need to special-case site-less
1131 // For each main package, call main.init(), main.main().
1132 for _, mainPkg := range a.config.Mains {
1133 main := mainPkg.Func("main")
1135 panic(fmt.Sprintf("%s has no main function", mainPkg))
1138 targets := a.addOneNode(main.Signature, "root.targets", nil)
1139 site := &callsite{targets: targets}
1140 root.sites = append(root.sites, site)
1141 for _, fn := range [2]*ssa.Function{mainPkg.Func("init"), main} {
1143 fmt.Fprintf(a.log, "\troot call to %s:\n", fn)
1145 a.copy(targets, a.valueNode(fn), 1)
1152 // genFunc generates constraints for function fn.
1153 func (a *analysis) genFunc(cgn *cgnode) {
1156 impl := a.findIntrinsic(fn)
1159 fmt.Fprintf(a.log, "\n\n==== Generating constraints for %s, %s\n", cgn, cgn.contour())
1161 // Hack: don't display body if intrinsic.
1163 fn2 := *cgn.fn // copy
1168 cgn.fn.WriteTo(a.log)
1177 if fn.Blocks == nil {
1178 // External function with no intrinsic treatment.
1179 // We'll warn about calls to such functions at the end.
1184 fmt.Fprintln(a.log, "; Creating nodes for local values")
1187 a.localval = make(map[ssa.Value]nodeid)
1188 a.localobj = make(map[ssa.Value]nodeid)
1190 // The value nodes for the params are in the func object block.
1191 params := a.funcParams(cgn.obj)
1192 for _, p := range fn.Params {
1193 a.setValueNode(p, params, cgn)
1194 params += nodeid(a.sizeof(p.Type()))
1197 // Free variables have global cardinality:
1198 // the outer function sets them with MakeClosure;
1199 // the inner function accesses them with FreeVar.
1201 // TODO(adonovan): treat free vars context-sensitively.
1203 // Create value nodes for all value instructions
1204 // since SSA may contain forward references.
1205 var space [10]*ssa.Value
1206 for _, b := range fn.Blocks {
1207 for _, instr := range b.Instrs {
1208 switch instr := instr.(type) {
1210 // do nothing: it has a funky type,
1211 // and *ssa.Next does all the work.
1216 comment = instr.Name()
1218 id := a.addNodes(instr.Type(), comment)
1219 a.setValueNode(instr, id, cgn)
1222 // Record all address-taken functions (for presolver).
1223 rands := instr.Operands(space[:0])
1224 if call, ok := instr.(ssa.CallInstruction); ok && !call.Common().IsInvoke() {
1225 // Skip CallCommon.Value in "call" mode.
1226 // TODO(adonovan): fix: relies on unspecified ordering. Specify it.
1229 for _, rand := range rands {
1230 if atf, ok := (*rand).(*ssa.Function); ok {
1231 a.atFuncs[atf] = true
1237 // Generate constraints for instructions.
1238 for _, b := range fn.Blocks {
1239 for _, instr := range b.Instrs {
1240 a.genInstr(cgn, instr)
1248 // genMethodsOf generates nodes and constraints for all methods of type T.
1249 func (a *analysis) genMethodsOf(T types.Type) {
1250 itf := isInterface(T)
1252 // TODO(adonovan): can we skip this entirely if itf is true?
1253 // I think so, but the answer may depend on reflection.
1254 mset := a.prog.MethodSets.MethodSet(T)
1255 for i, n := 0, mset.Len(); i < n; i++ {
1256 m := a.prog.MethodValue(mset.At(i))
1260 // Methods of concrete types are address-taken functions.
1266 // generate generates offline constraints for the entire program.
1267 func (a *analysis) generate() {
1268 start("Constraint generation")
1270 fmt.Fprintln(a.log, "==== Generating constraints")
1273 // Create a dummy node since we use the nodeid 0 for
1274 // non-pointerlike variables.
1275 a.addNodes(tInvalid, "(zero)")
1277 // Create the global node for panic values.
1278 a.panicNode = a.addNodes(tEface, "panic")
1280 // Create nodes and constraints for all methods of reflect.rtype.
1281 // (Shared contours are used by dynamic calls to reflect.Type
1282 // methods---typically just String().)
1283 if rtype := a.reflectRtypePtr; rtype != nil {
1284 a.genMethodsOf(rtype)
1287 root := a.genRootCalls()
1289 if a.config.BuildCallGraph {
1290 a.result.CallGraph = callgraph.New(root.fn)
1293 // Create nodes and constraints for all methods of all types
1294 // that are dynamically accessible via reflection or interfaces.
1295 for _, T := range a.prog.RuntimeTypes() {
1299 // Generate constraints for functions as they become reachable
1300 // from the roots. (No constraints are generated for functions
1301 // that are dead in this analysis scope.)
1302 for len(a.genq) > 0 {
1308 // The runtime magically allocates os.Args; so should we.
1309 if os := a.prog.ImportedPackage("os"); os != nil {
1310 // In effect: os.Args = new([1]string)[:]
1311 T := types.NewSlice(types.Typ[types.String])
1312 obj := a.addNodes(sliceToArray(T), "<command-line args>")
1313 a.endObject(obj, nil, "<command-line args>")
1314 a.addressOf(T, a.objectNode(nil, os.Var("Args")), obj)
1317 // Discard generation state, to avoid confusion after node renumbering.
1323 stop("Constraint generation")