1 // Copyright 2019 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
18 "golang.org/x/tools/internal/event"
19 "golang.org/x/tools/internal/lsp/debug/tag"
20 "golang.org/x/tools/internal/lsp/diff"
21 "golang.org/x/tools/internal/lsp/diff/myers"
22 "golang.org/x/tools/internal/lsp/protocol"
23 "golang.org/x/tools/internal/lsp/source"
24 "golang.org/x/tools/internal/memoize"
25 "golang.org/x/tools/internal/span"
26 errors "golang.org/x/xerrors"
29 // parseKey uniquely identifies a parsed Go file.
30 type parseKey struct {
31 file source.FileIdentity
35 // astCacheKey is similar to parseKey, but is a distinct type because
36 // it is used to key a different value within the same map.
37 type astCacheKey parseKey
39 type parseGoHandle struct {
40 handle *memoize.Handle
41 file source.FileHandle
43 astCacheHandle *memoize.Handle
46 type parseGoData struct {
47 parsed *source.ParsedGoFile
49 // If true, we adjusted the AST to make it type check better, and
50 // it may not match the source code.
52 err error // any other errors
55 func (s *snapshot) parseGoHandle(ctx context.Context, fh source.FileHandle, mode source.ParseMode) *parseGoHandle {
57 file: fh.FileIdentity(),
60 if pgh := s.getGoFile(key); pgh != nil {
63 parseHandle := s.generation.Bind(key, func(ctx context.Context, arg memoize.Arg) interface{} {
64 snapshot := arg.(*snapshot)
65 return parseGo(ctx, snapshot.view.session.cache.fset, fh, mode)
68 astHandle := s.generation.Bind(astCacheKey(key), func(ctx context.Context, arg memoize.Arg) interface{} {
69 snapshot := arg.(*snapshot)
70 return buildASTCache(ctx, snapshot, parseHandle)
73 pgh := &parseGoHandle{
77 astCacheHandle: astHandle,
79 return s.addGoFile(key, pgh)
82 func (pgh *parseGoHandle) String() string {
83 return pgh.File().URI().Filename()
86 func (pgh *parseGoHandle) File() source.FileHandle {
90 func (pgh *parseGoHandle) Mode() source.ParseMode {
94 func (s *snapshot) ParseGo(ctx context.Context, fh source.FileHandle, mode source.ParseMode) (*source.ParsedGoFile, error) {
95 pgh := s.parseGoHandle(ctx, fh, mode)
96 pgf, _, err := s.parseGo(ctx, pgh)
100 func (s *snapshot) parseGo(ctx context.Context, pgh *parseGoHandle) (*source.ParsedGoFile, bool, error) {
101 d, err := pgh.handle.Get(ctx, s.generation, s)
103 return nil, false, err
105 data := d.(*parseGoData)
106 return data.parsed, data.fixed, data.err
109 func (s *snapshot) PosToDecl(ctx context.Context, pgf *source.ParsedGoFile) (map[token.Pos]ast.Decl, error) {
110 fh, err := s.GetFile(ctx, pgf.URI)
115 pgh := s.parseGoHandle(ctx, fh, pgf.Mode)
116 d, err := pgh.astCacheHandle.Get(ctx, s.generation, s)
121 data := d.(*astCacheData)
122 return data.posToDecl, data.err
125 func (s *snapshot) PosToField(ctx context.Context, pgf *source.ParsedGoFile) (map[token.Pos]*ast.Field, error) {
126 fh, err := s.GetFile(ctx, pgf.URI)
131 pgh := s.parseGoHandle(ctx, fh, pgf.Mode)
132 d, err := pgh.astCacheHandle.Get(ctx, s.generation, s)
133 if err != nil || d == nil {
137 data := d.(*astCacheData)
138 return data.posToField, data.err
141 type astCacheData struct {
144 posToDecl map[token.Pos]ast.Decl
145 posToField map[token.Pos]*ast.Field
148 // buildASTCache builds caches to aid in quickly going from the typed
149 // world to the syntactic world.
150 func buildASTCache(ctx context.Context, snapshot *snapshot, parseHandle *memoize.Handle) *astCacheData {
152 // path contains all ancestors, including n.
154 // decls contains all ancestors that are decls.
158 v, err := parseHandle.Get(ctx, snapshot.generation, snapshot)
160 return &astCacheData{err: err}
162 file := v.(*parseGoData).parsed.File
164 return &astCacheData{err: fmt.Errorf("nil file")}
167 data := &astCacheData{
168 posToDecl: make(map[token.Pos]ast.Decl),
169 posToField: make(map[token.Pos]*ast.Field),
172 ast.Inspect(file, func(n ast.Node) bool {
174 lastP := path[len(path)-1]
175 path = path[:len(path)-1]
176 if len(decls) > 0 && decls[len(decls)-1] == lastP {
177 decls = decls[:len(decls)-1]
182 path = append(path, n)
184 switch n := n.(type) {
186 addField := func(f ast.Node) {
187 if f.Pos().IsValid() {
188 data.posToField[f.Pos()] = n
190 data.posToDecl[f.Pos()] = decls[len(decls)-1]
195 // Add mapping for *ast.Field itself. This handles embedded
196 // fields which have no associated *ast.Ident name.
199 // Add mapping for each field name since you can have
200 // multiple names for the same type expression.
201 for _, name := range n.Names {
205 // Also map "X" in "...X" to the containing *ast.Field. This
206 // makes it easy to format variadic signature params
208 if elips, ok := n.Type.(*ast.Ellipsis); ok && elips.Elt != nil {
212 decls = append(decls, n)
214 if n.Name != nil && n.Name.Pos().IsValid() {
215 data.posToDecl[n.Name.Pos()] = n
218 decls = append(decls, n)
220 for _, spec := range n.Specs {
221 switch spec := spec.(type) {
223 if spec.Name != nil && spec.Name.Pos().IsValid() {
224 data.posToDecl[spec.Name.Pos()] = n
227 for _, id := range spec.Names {
228 if id != nil && id.Pos().IsValid() {
229 data.posToDecl[id.Pos()] = n
242 func parseGo(ctx context.Context, fset *token.FileSet, fh source.FileHandle, mode source.ParseMode) *parseGoData {
243 ctx, done := event.Start(ctx, "cache.parseGo", tag.File.Of(fh.URI().Filename()))
246 if fh.Kind() != source.Go {
247 return &parseGoData{err: errors.Errorf("cannot parse non-Go file %s", fh.URI())}
249 src, err := fh.Read()
251 return &parseGoData{err: err}
254 parserMode := parser.AllErrors | parser.ParseComments
255 if mode == source.ParseHeader {
256 parserMode = parser.ImportsOnly | parser.ParseComments
259 file, err := parser.ParseFile(fset, fh.URI().Filename(), src, parserMode)
260 var parseErr scanner.ErrorList
262 // We passed a byte slice, so the only possible error is a parse error.
263 parseErr = err.(scanner.ErrorList)
266 tok := fset.File(file.Pos())
268 // file.Pos is the location of the package declaration. If there was
269 // none, we can't find the token.File that ParseFile created, and we
270 // have no choice but to recreate it.
271 tok = fset.AddFile(fh.URI().Filename(), -1, len(src))
272 tok.SetLinesForContent(src)
276 // If there were parse errors, attempt to fix them up.
278 // Fix any badly parsed parts of the AST.
279 fixed = fixAST(ctx, file, tok, src)
281 for i := 0; i < 10; i++ {
282 // Fix certain syntax errors that render the file unparseable.
283 newSrc := fixSrc(file, tok, src)
288 // If we thought there was something to fix 10 times in a row,
289 // it is likely we got stuck in a loop somehow. Log out a diff
290 // of the last changes we made to aid in debugging.
292 edits, err := myers.ComputeEdits(fh.URI(), string(src), string(newSrc))
294 event.Error(ctx, "error generating fixSrc diff", err, tag.File.Of(tok.Name()))
296 unified := diff.ToUnified("before", "after", string(src), edits)
297 event.Log(ctx, fmt.Sprintf("fixSrc loop - last diff:\n%v", unified), tag.File.Of(tok.Name()))
301 newFile, _ := parser.ParseFile(fset, fh.URI().Filename(), newSrc, parserMode)
303 // Maintain the original parseError so we don't try formatting the doctored file.
306 tok = fset.File(file.Pos())
308 fixed = fixAST(ctx, file, tok, src)
313 if mode == source.ParseExported {
318 parsed: &source.ParsedGoFile{
324 Mapper: &protocol.ColumnMapper{
326 Converter: span.NewTokenConverter(fset, tok),
335 // trimAST clears any part of the AST not relevant to type checking
336 // expressions at pos.
337 func trimAST(file *ast.File) {
338 ast.Inspect(file, func(n ast.Node) bool {
342 switch n := n.(type) {
347 case *ast.CaseClause:
349 case *ast.CommClause:
351 case *ast.CompositeLit:
352 // types.Info.Types for long slice/array literals are particularly
353 // expensive. Try to clear them out.
354 at, ok := n.Type.(*ast.ArrayType)
358 // Removing the elements from an ellipsis array changes its type.
359 // Try to set the length explicitly so we can continue.
360 if _, ok := at.Len.(*ast.Ellipsis); ok {
361 length, ok := arrayLength(n)
365 at.Len = &ast.BasicLit{
367 Value: fmt.Sprint(length),
368 ValuePos: at.Len.Pos(),
377 // arrayLength returns the length of some simple forms of ellipsis array literal.
378 // Notably, it handles the tables in golang.org/x/text.
379 func arrayLength(array *ast.CompositeLit) (int, bool) {
380 litVal := func(expr ast.Expr) (int, bool) {
381 lit, ok := expr.(*ast.BasicLit)
385 val, err := strconv.ParseInt(lit.Value, 10, 64)
389 return int(val), true
392 for _, elt := range array.Elts {
393 kve, ok := elt.(*ast.KeyValueExpr)
397 switch key := kve.Key.(type) {
399 if val, ok := litVal(key); ok && largestKey < val {
402 case *ast.BinaryExpr:
403 // golang.org/x/text uses subtraction (and only subtraction) in its indices.
404 if key.Op != token.SUB {
407 x, ok := litVal(key.X)
411 y, ok := litVal(key.Y)
415 if val := x - y; largestKey < val {
420 if largestKey != -1 {
421 return largestKey + 1, true
423 return len(array.Elts), true
426 // fixAST inspects the AST and potentially modifies any *ast.BadStmts so that it can be
427 // type-checked more effectively.
428 func fixAST(ctx context.Context, n ast.Node, tok *token.File, src []byte) (fixed bool) {
430 walkASTWithParent(n, func(n, parent ast.Node) bool {
431 switch n := n.(type) {
433 if fixed = fixDeferOrGoStmt(n, parent, tok, src); fixed {
434 // Recursively fix in our fixed node.
435 _ = fixAST(ctx, parent, tok, src)
437 err = errors.Errorf("unable to parse defer or go from *ast.BadStmt: %v", err)
441 if fixed = fixArrayType(n, parent, tok, src); fixed {
442 // Recursively fix in our fixed node.
443 _ = fixAST(ctx, parent, tok, src)
447 // Fix cases where parser interprets if/for/switch "init"
448 // statement as "cond" expression, e.g.:
450 // // "i := foo" is init statement, not condition.
453 fixInitStmt(n, parent, tok, src)
456 case *ast.SelectorExpr:
457 // Fix cases where a keyword prefix results in a phantom "_" selector, e.g.:
459 // foo.var<> // want to complete to "foo.variance"
461 fixPhantomSelector(n, tok, src)
465 switch parent.(type) {
466 case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt:
467 // Adjust closing curly brace of empty switch/select
468 // statements so we can complete inside them.
469 fixEmptySwitch(n, tok, src)
480 // walkASTWithParent walks the AST rooted at n. The semantics are
481 // similar to ast.Inspect except it does not call f(nil).
482 func walkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) {
483 var ancestors []ast.Node
484 ast.Inspect(n, func(n ast.Node) (recurse bool) {
487 ancestors = append(ancestors, n)
492 ancestors = ancestors[:len(ancestors)-1]
497 if len(ancestors) > 0 {
498 parent = ancestors[len(ancestors)-1]
505 // fixSrc attempts to modify the file's source code to fix certain
506 // syntax errors that leave the rest of the file unparsed.
507 func fixSrc(f *ast.File, tok *token.File, src []byte) (newSrc []byte) {
508 walkASTWithParent(f, func(n, parent ast.Node) bool {
513 switch n := n.(type) {
515 newSrc = fixMissingCurlies(f, n, parent, tok, src)
516 case *ast.SelectorExpr:
517 newSrc = fixDanglingSelector(n, tok, src)
526 // fixMissingCurlies adds in curly braces for block statements that
527 // are missing curly braces. For example:
534 func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *token.File, src []byte) []byte {
535 // If the "{" is already in the source code, there isn't anything to
536 // fix since we aren't missing curlies.
537 if b.Lbrace.IsValid() {
538 braceOffset := tok.Offset(b.Lbrace)
539 if braceOffset < len(src) && src[braceOffset] == '{' {
544 parentLine := tok.Line(parent.Pos())
546 if parentLine >= tok.LineCount() {
547 // If we are the last line in the file, no need to fix anything.
551 // Insert curlies at the end of parent's starting line. The parent
552 // is the statement that contains the block, e.g. *ast.IfStmt. The
553 // block's Pos()/End() can't be relied upon because they are based
554 // on the (missing) curly braces. We assume the statement is a
555 // single line for now and try sticking the curly braces at the end.
556 insertPos := tok.LineStart(parentLine+1) - 1
558 // Scootch position backwards until it's not in a comment. For example:
560 // if foo<> // some amazing comment |
563 // insertPos will be located at "|", so we back it out of the comment.
567 for _, c := range f.Comments {
568 if c.Pos() < insertPos && insertPos <= c.End() {
575 // Bail out if line doesn't end in an ident or ".". This is to avoid
576 // cases like below where we end up making things worse by adding
581 switch precedingToken(insertPos, tok, src) {
582 case token.IDENT, token.PERIOD:
589 buf.Grow(len(src) + 3)
590 buf.Write(src[:tok.Offset(insertPos)])
592 // Detect if we need to insert a semicolon to fix "for" loop situations like:
594 // for i := foo(); foo<>
596 // Just adding curlies is not sufficient to make things parse well.
597 if fs, ok := parent.(*ast.ForStmt); ok {
598 if _, ok := fs.Cond.(*ast.BadExpr); !ok {
599 if xs, ok := fs.Post.(*ast.ExprStmt); ok {
600 if _, ok := xs.X.(*ast.BadExpr); ok {
607 // Insert "{}" at insertPos.
610 buf.Write(src[tok.Offset(insertPos):])
614 // fixEmptySwitch moves empty switch/select statements' closing curly
615 // brace down one line. This allows us to properly detect incomplete
616 // "case" and "default" keywords as inside the switch statement. For
628 // Later we manually pull out the "def" token, but we need to detect
629 // that our "<>" position is inside the switch block. To do that we
630 // move the curly brace so it looks like:
636 func fixEmptySwitch(body *ast.BlockStmt, tok *token.File, src []byte) {
637 // We only care about empty switch statements.
638 if len(body.List) > 0 || !body.Rbrace.IsValid() {
642 // If the right brace is actually in the source code at the
643 // specified position, don't mess with it.
644 braceOffset := tok.Offset(body.Rbrace)
645 if braceOffset < len(src) && src[braceOffset] == '}' {
649 braceLine := tok.Line(body.Rbrace)
650 if braceLine >= tok.LineCount() {
651 // If we are the last line in the file, no need to fix anything.
655 // Move the right brace down one line.
656 body.Rbrace = tok.LineStart(braceLine + 1)
659 // fixDanglingSelector inserts real "_" selector expressions in place
660 // of phantom "_" selectors. For example:
665 // var x struct { i int }
667 // To fix completion at "<>", we insert a real "_" after the "." so the
668 // following declaration of "x" can be parsed and type checked
670 func fixDanglingSelector(s *ast.SelectorExpr, tok *token.File, src []byte) []byte {
671 if !isPhantomUnderscore(s.Sel, tok, src) {
675 if !s.X.End().IsValid() {
679 // Insert directly after the selector's ".".
680 insertOffset := tok.Offset(s.X.End()) + 1
681 if src[insertOffset-1] != '.' {
686 buf.Grow(len(src) + 1)
687 buf.Write(src[:insertOffset])
689 buf.Write(src[insertOffset:])
693 // fixPhantomSelector tries to fix selector expressions with phantom
694 // "_" selectors. In particular, we check if the selector is a
695 // keyword, and if so we swap in an *ast.Ident with the keyword text. For example:
699 // yields a "_" selector instead of "var" since "var" is a keyword.
700 func fixPhantomSelector(sel *ast.SelectorExpr, tok *token.File, src []byte) {
701 if !isPhantomUnderscore(sel.Sel, tok, src) {
705 // Only consider selectors directly abutting the selector ".". This
706 // avoids false positives in cases like:
708 // foo. // don't think "var" is our selector
711 if sel.Sel.Pos() != sel.X.End()+1 {
715 maybeKeyword := readKeyword(sel.Sel.Pos(), tok, src)
716 if maybeKeyword == "" {
720 replaceNode(sel, sel.Sel, &ast.Ident{
722 NamePos: sel.Sel.Pos(),
726 // isPhantomUnderscore reports whether the given ident is a phantom
727 // underscore. The parser sometimes inserts phantom underscores when
728 // it encounters otherwise unparseable situations.
729 func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool {
730 if id == nil || id.Name != "_" {
734 // Phantom underscore means the underscore is not actually in the
736 offset := tok.Offset(id.Pos())
737 return len(src) <= offset || src[offset] != '_'
740 // fixInitStmt fixes cases where the parser misinterprets an
741 // if/for/switch "init" statement as the "cond" conditional. In cases
742 // like "if i := 0" the user hasn't typed the semicolon yet so the
743 // parser is looking for the conditional expression. However, "i := 0"
744 // are not valid expressions, so we get a BadExpr.
745 func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) {
746 if !bad.Pos().IsValid() || !bad.End().IsValid() {
750 // Try to extract a statement from the BadExpr.
751 stmtBytes := src[tok.Offset(bad.Pos()) : tok.Offset(bad.End()-1)+1]
752 stmt, err := parseStmt(bad.Pos(), stmtBytes)
757 // If the parent statement doesn't already have an "init" statement,
758 // move the extracted statement into the "init" field and insert a
759 // dummy expression into the required "cond" field.
760 switch p := parent.(type) {
779 case *ast.SwitchStmt:
788 // readKeyword reads the keyword starting at pos, if any.
789 func readKeyword(pos token.Pos, tok *token.File, src []byte) string {
791 for i := tok.Offset(pos); i < len(src); i++ {
792 // Use a simplified identifier check since keywords are always lowercase ASCII.
793 if src[i] < 'a' || src[i] > 'z' {
796 kwBytes = append(kwBytes, src[i])
798 // Stop search at arbitrarily chosen too-long-for-a-keyword length.
799 if len(kwBytes) > 15 {
804 if kw := string(kwBytes); token.Lookup(kw).IsKeyword() {
811 // fixArrayType tries to parse an *ast.BadExpr into an *ast.ArrayType.
812 // go/parser often turns lone array types like "[]int" into BadExprs
813 // if it isn't expecting a type.
814 func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool {
815 // Our expected input is a bad expression that looks like "[]someExpr".
820 if !from.IsValid() || !to.IsValid() {
824 exprBytes := make([]byte, 0, int(to-from)+3)
825 // Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF.
826 exprBytes = append(exprBytes, src[tok.Offset(from):tok.Offset(to-1)+1]...)
827 exprBytes = bytes.TrimSpace(exprBytes)
829 // If our expression ends in "]" (e.g. "[]"), add a phantom selector
830 // so we can complete directly after the "[]".
831 if len(exprBytes) > 0 && exprBytes[len(exprBytes)-1] == ']' {
832 exprBytes = append(exprBytes, '_')
835 // Add "{}" to turn our ArrayType into a CompositeLit. This is to
836 // handle the case of "[...]int" where we must make it a composite
837 // literal to be parseable.
838 exprBytes = append(exprBytes, '{', '}')
840 expr, err := parseExpr(from, exprBytes)
845 cl, _ := expr.(*ast.CompositeLit)
850 at, _ := cl.Type.(*ast.ArrayType)
855 return replaceNode(parent, bad, at)
858 // precedingToken scans src to find the token preceding pos.
859 func precedingToken(pos token.Pos, tok *token.File, src []byte) token.Token {
860 s := &scanner.Scanner{}
861 s.Init(tok, src, nil, 0)
863 var lastTok token.Token
866 if t == token.EOF || p >= pos {
875 // fixDeferOrGoStmt tries to parse an *ast.BadStmt into a defer or a go statement.
877 // go/parser packages a statement of the form "defer x." as an *ast.BadStmt because
878 // it does not include a call expression. This means that go/types skips type-checking
879 // this statement entirely, and we can't use the type information when completing.
880 // Here, we try to generate a fake *ast.DeferStmt or *ast.GoStmt to put into the AST,
881 // instead of the *ast.BadStmt.
882 func fixDeferOrGoStmt(bad *ast.BadStmt, parent ast.Node, tok *token.File, src []byte) bool {
883 // Check if we have a bad statement containing either a "go" or "defer".
884 s := &scanner.Scanner{}
885 s.Init(tok, src, nil, 0)
892 if tkn == token.EOF {
898 pos, tkn, _ = s.Scan()
904 stmt = &ast.DeferStmt{
916 from, to, last token.Pos
917 lastToken token.Token
919 phantomSelectors []token.Pos
923 to, tkn, _ = s.Scan()
925 if from == token.NoPos {
932 case token.SEMICOLON:
933 // If we aren't in nested braces, end of statement means
934 // end of expression.
942 // This handles the common dangling selector case. For example in
947 // we notice the dangling period and end our expression.
949 // If the previous token was a "." and we are looking at a "}",
950 // the period is likely a dangling selector and needs a phantom
951 // "_". Likewise if the current token is on a different line than
952 // the period, the period is likely a dangling selector.
953 if lastToken == token.PERIOD && (tkn == token.RBRACE || tok.Line(to) > tok.Line(last)) {
954 // Insert phantom "_" selector after the dangling ".".
955 phantomSelectors = append(phantomSelectors, last+1)
956 // If we aren't in a block then end the expression after the ".".
971 // +1 to include the "}" itself.
979 if !from.IsValid() || tok.Offset(from) >= len(src) {
983 if !to.IsValid() || tok.Offset(to) >= len(src) {
987 // Insert any phantom selectors needed to prevent dangling "." from messing
989 exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors))
990 for i, b := range src[tok.Offset(from):tok.Offset(to)] {
991 if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] {
992 exprBytes = append(exprBytes, '_')
993 phantomSelectors = phantomSelectors[1:]
995 exprBytes = append(exprBytes, b)
998 if len(phantomSelectors) > 0 {
999 exprBytes = append(exprBytes, '_')
1002 expr, err := parseExpr(from, exprBytes)
1007 // Package the expression into a fake *ast.CallExpr and re-insert
1008 // into the function.
1009 call := &ast.CallExpr{
1015 switch stmt := stmt.(type) {
1016 case *ast.DeferStmt:
1022 return replaceNode(parent, bad, stmt)
1025 // parseStmt parses the statement in src and updates its position to
1027 func parseStmt(pos token.Pos, src []byte) (ast.Stmt, error) {
1028 // Wrap our expression to make it a valid Go file we can pass to ParseFile.
1029 fileSrc := bytes.Join([][]byte{
1030 []byte("package fake;func _(){"),
1035 // Use ParseFile instead of ParseExpr because ParseFile has
1036 // best-effort behavior, whereas ParseExpr fails hard on any error.
1037 fakeFile, err := parser.ParseFile(token.NewFileSet(), "", fileSrc, 0)
1038 if fakeFile == nil {
1039 return nil, errors.Errorf("error reading fake file source: %v", err)
1042 // Extract our expression node from inside the fake file.
1043 if len(fakeFile.Decls) == 0 {
1044 return nil, errors.Errorf("error parsing fake file: %v", err)
1047 fakeDecl, _ := fakeFile.Decls[0].(*ast.FuncDecl)
1048 if fakeDecl == nil || len(fakeDecl.Body.List) == 0 {
1049 return nil, errors.Errorf("no statement in %s: %v", src, err)
1052 stmt := fakeDecl.Body.List[0]
1054 // parser.ParseFile returns undefined positions.
1055 // Adjust them for the current file.
1056 offsetPositions(stmt, pos-1-(stmt.Pos()-1))
1061 // parseExpr parses the expression in src and updates its position to
1063 func parseExpr(pos token.Pos, src []byte) (ast.Expr, error) {
1064 stmt, err := parseStmt(pos, src)
1069 exprStmt, ok := stmt.(*ast.ExprStmt)
1071 return nil, errors.Errorf("no expr in %s: %v", src, err)
1074 return exprStmt.X, nil
1077 var tokenPosType = reflect.TypeOf(token.NoPos)
1079 // offsetPositions applies an offset to the positions in an ast.Node.
1080 func offsetPositions(n ast.Node, offset token.Pos) {
1081 ast.Inspect(n, func(n ast.Node) bool {
1086 v := reflect.ValueOf(n).Elem()
1089 case reflect.Struct:
1090 for i := 0; i < v.NumField(); i++ {
1092 if f.Type() != tokenPosType {
1100 f.SetInt(f.Int() + int64(offset))
1108 // replaceNode updates parent's child oldChild to be newChild. It
1109 // returns whether it replaced successfully.
1110 func replaceNode(parent, oldChild, newChild ast.Node) bool {
1111 if parent == nil || oldChild == nil || newChild == nil {
1115 parentVal := reflect.ValueOf(parent).Elem()
1116 if parentVal.Kind() != reflect.Struct {
1120 newChildVal := reflect.ValueOf(newChild)
1122 tryReplace := func(v reflect.Value) bool {
1123 if !v.CanSet() || !v.CanInterface() {
1127 // If the existing value is oldChild, we found our child. Make
1128 // sure our newChild is assignable and then make the swap.
1129 if v.Interface() == oldChild && newChildVal.Type().AssignableTo(v.Type()) {
1137 // Loop over parent's struct fields.
1138 for i := 0; i < parentVal.NumField(); i++ {
1139 f := parentVal.Field(i)
1142 // Check interface and pointer fields.
1143 case reflect.Interface, reflect.Ptr:
1148 // Search through any slice fields.
1150 for i := 0; i < f.Len(); i++ {
1151 if tryReplace(f.Index(i)) {