1 // Copyright 2020 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
19 "golang.org/x/tools/internal/event"
20 "golang.org/x/tools/internal/lsp/protocol"
21 "golang.org/x/tools/internal/lsp/source"
22 errors "golang.org/x/xerrors"
25 func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
26 ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil)
30 func (s *Server) semanticTokensFullDelta(ctx context.Context, p *protocol.SemanticTokensDeltaParams) (interface{}, error) {
31 return nil, errors.Errorf("implement SemanticTokensFullDelta")
34 func (s *Server) semanticTokensRange(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) {
35 ret, err := s.computeSemanticTokens(ctx, p.TextDocument, &p.Range)
39 func (s *Server) semanticTokensRefresh(ctx context.Context) error {
40 // in the code, but not in the protocol spec
41 return errors.Errorf("implement SemanticTokensRefresh")
44 func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) {
45 ans := protocol.SemanticTokens{
48 snapshot, _, ok, release, err := s.beginFileRequest(ctx, td.URI, source.Go)
54 if !vv.Options().SemanticTokens {
55 // return an error, so if the option changes
56 // the client won't remember the wrong answer
57 return nil, errors.Errorf("semantictokens are disabled")
59 pkg, err := snapshot.PackageForFile(ctx, td.URI.SpanURI(), source.TypecheckFull, source.WidestPackage)
63 info := pkg.GetTypesInfo()
64 pgf, err := pkg.File(td.URI.SpanURI())
68 if pgf.ParseErr != nil {
69 return nil, pgf.ParseErr
76 fset: snapshot.FileSet(),
78 if err := e.init(); err != nil {
82 ans.Data, err = e.Data()
84 // this is an internal error, likely caused by a typo
85 // for a token or modifier
88 // for small cache, some day. for now, the client ignores this
89 ans.ResultID = fmt.Sprintf("%v", time.Now())
93 func (e *encoded) semantics() {
95 e.token(f.Package, len("package"), tokKeyword, nil)
96 e.token(f.Name.NamePos, len(f.Name.Name), tokNamespace, nil)
97 inspect := func(n ast.Node) bool {
100 for _, d := range f.Decls {
101 // only look at the decls that overlap the range
102 start, end := d.Pos(), d.End()
103 if end <= e.start || start >= e.end {
106 ast.Inspect(d, inspect)
110 type tokenType string
113 tokNamespace tokenType = "namespace"
114 tokType tokenType = "type"
115 tokInterface tokenType = "interface"
116 tokParameter tokenType = "parameter"
117 tokVariable tokenType = "variable"
118 tokMember tokenType = "member"
119 tokFunction tokenType = "function"
120 tokKeyword tokenType = "keyword"
121 tokComment tokenType = "comment"
122 tokString tokenType = "string"
123 tokNumber tokenType = "number"
124 tokOperator tokenType = "operator"
127 var lastPosition token.Position
129 func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string) {
131 e.unexpected("token at token.NoPos")
133 if start >= e.end || start+token.Pos(leng) <= e.start {
136 // want a line and column from start (in LSP coordinates)
137 // [//line directives should be ignored]
138 rng := source.NewMappedRange(e.fset, e.pgf.Mapper, start, start+token.Pos(leng))
139 lspRange, err := rng.Range()
141 // possibly a //line directive. TODO(pjw): fix this somehow
142 // "column mapper is for file...instead of..."
143 // "line is beyond end of file..."
144 // see line 116 of internal/span/token.go which uses Position not PositionFor
145 event.Error(e.ctx, "failed to convert to range", err)
148 if lspRange.End.Line != lspRange.Start.Line {
149 // abrupt end of file, without \n. TODO(pjw): fix?
150 pos := e.fset.PositionFor(start, false)
151 msg := fmt.Sprintf("token at %s:%d.%d overflows", pos.Filename, pos.Line, pos.Column)
152 event.Log(e.ctx, msg)
155 // token is all on one line
156 length := lspRange.End.Character - lspRange.Start.Character
157 e.add(lspRange.Start.Line, lspRange.Start.Character, length, typ, mods)
160 func (e *encoded) add(line, start float64, len float64, tok tokenType, mod []string) {
161 x := semItem{line, start, len, tok, mod}
162 e.items = append(e.items, x)
165 // semItem represents a token found walking the parse tree
166 type semItem struct {
173 type encoded struct {
174 // the generated data
178 pgf *source.ParsedGoFile
182 // allowed starting and ending token.Pos, set by init
183 // used to avoid looking at declarations not in range
185 // path from the root of the parse tree, used for debugging
189 // convert the stack to a string, for debugging
190 func (e *encoded) strStack() string {
192 for _, s := range e.stack {
193 msg = append(msg, fmt.Sprintf("%T", s)[5:])
195 if len(e.stack) > 0 {
196 loc := e.stack[len(e.stack)-1].Pos()
197 add := e.pgf.Tok.PositionFor(loc, false)
198 msg = append(msg, fmt.Sprintf("(%d:%d)", add.Line, add.Column))
200 msg = append(msg, "]")
201 return strings.Join(msg, " ")
204 func (e *encoded) inspector(n ast.Node) bool {
206 e.stack = e.stack[:len(e.stack)-1]
212 e.stack = append(e.stack, n)
213 switch x := n.(type) {
215 case *ast.AssignStmt:
216 e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil)
218 // if it extends across a line, skip it
219 // better would be to mark each line as string TODO(pjw)
220 if strings.Contains(x.Value, "\n") {
225 if x.Kind == token.STRING {
227 if _, ok := e.stack[len(e.stack)-2].(*ast.Field); ok {
228 // struct tags (this is probably pointless, as the
229 // TextMate grammar will treat all the other comments the same)
233 e.token(x.Pos(), ln, what, nil)
234 case *ast.BinaryExpr:
235 e.token(x.OpPos, len(x.Op.String()), tokOperator, nil)
237 case *ast.BranchStmt:
238 e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil)
239 // There's no semantic encoding for labels
241 if x.Ellipsis != token.NoPos {
242 e.token(x.Ellipsis, len("..."), tokOperator, nil)
244 case *ast.CaseClause:
249 e.token(x.Case, len(iam), tokKeyword, nil)
251 // chan | chan <- | <- chan
252 if x.Arrow == token.NoPos || x.Arrow != x.Begin {
253 e.token(x.Begin, len("chan"), tokKeyword, nil)
256 pos := e.findKeyword("chan", x.Begin+2, x.Value.Pos())
257 e.token(pos, len("chan"), tokKeyword, nil)
258 case *ast.CommClause:
263 e.token(x.Case, iam, tokKeyword, nil)
264 case *ast.CompositeLit:
267 e.token(x.Defer, len("defer"), tokKeyword, nil)
269 e.token(x.Ellipsis, len("..."), tokOperator, nil)
275 e.token(x.For, len("for"), tokKeyword, nil)
279 if x.Func != token.NoPos {
280 e.token(x.Func, len("func"), tokKeyword, nil)
283 e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil)
285 e.token(x.Go, len("go"), tokKeyword, nil)
289 e.token(x.If, len("if"), tokKeyword, nil)
291 // x.Body.End() or x.Body.End()+1, not that it matters
292 pos := e.findKeyword("else", x.Body.End(), x.Else.Pos())
293 e.token(pos, len("else"), tokKeyword, nil)
295 case *ast.ImportSpec:
299 case *ast.IncDecStmt:
300 e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil)
302 case *ast.InterfaceType:
303 e.token(x.Interface, len("interface"), tokKeyword, nil)
304 case *ast.KeyValueExpr:
305 case *ast.LabeledStmt:
307 e.token(x.Map, len("map"), tokKeyword, nil)
310 e.token(x.For, len("for"), tokKeyword, nil)
311 // x.TokPos == token.NoPos is legal (for range foo {})
313 if offset == token.NoPos {
316 pos := e.findKeyword("range", offset, x.X.Pos())
317 e.token(pos, len("range"), tokKeyword, nil)
318 case *ast.ReturnStmt:
319 e.token(x.Return, len("return"), tokKeyword, nil)
320 case *ast.SelectStmt:
321 e.token(x.Select, len("select"), tokKeyword, nil)
322 case *ast.SelectorExpr:
324 e.token(x.Arrow, len("<-"), tokOperator, nil)
327 e.token(x.Star, len("*"), tokOperator, nil)
328 case *ast.StructType:
329 e.token(x.Struct, len("struct"), tokKeyword, nil)
330 case *ast.SwitchStmt:
331 e.token(x.Switch, len("switch"), tokKeyword, nil)
332 case *ast.TypeAssertExpr:
334 pos := e.findKeyword("type", x.Lparen, x.Rparen)
335 e.token(pos, len("type"), tokKeyword, nil)
338 case *ast.TypeSwitchStmt:
339 e.token(x.Switch, len("switch"), tokKeyword, nil)
341 e.token(x.OpPos, len(x.Op.String()), tokOperator, nil)
343 // things we won't see
344 case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt,
345 *ast.File, *ast.Package:
346 log.Printf("implement %T %s", x, e.pgf.Tok.PositionFor(x.Pos(), false))
347 // things we knowingly ignore
348 case *ast.Comment, *ast.CommentGroup:
351 default: // just to be super safe.
352 e.unexpected(fmt.Sprintf("failed to implement %T", x))
356 func (e *encoded) ident(x *ast.Ident) {
359 what, mods := e.definitionFor(x)
361 e.token(x.Pos(), len(x.String()), what, mods)
366 switch y := use.(type) {
368 e.token(x.NamePos, len(x.Name), tokVariable, []string{"definition"})
370 e.token(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"})
372 mods := []string{"readonly"}
374 if ttx, ok := tt.(*types.Basic); ok {
375 switch bi := ttx.Info(); {
376 case bi&types.IsNumeric != 0:
378 if x.String() == "iota" {
382 e.token(x.Pos(), len(x.String()), me, mods)
383 case bi&types.IsString != 0:
384 e.token(x.Pos(), len(x.String()), tokString, mods)
385 case bi&types.IsBoolean != 0:
386 e.token(x.Pos(), len(x.Name), tokKeyword, nil)
388 e.token(x.Pos(), len(x.String()), tokVariable, mods)
390 msg := fmt.Sprintf("unexpected %x at %s", bi, e.pgf.Tok.PositionFor(x.Pos(), false))
395 if ttx, ok := tt.(*types.Named); ok {
396 if x.String() == "iota" {
397 e.unexpected(fmt.Sprintf("iota:%T", ttx))
399 if _, ok := ttx.Underlying().(*types.Basic); ok {
400 e.token(x.Pos(), len(x.String()), tokVariable, mods)
403 e.unexpected(fmt.Sprintf("%q/%T", x.String(), tt))
405 // can this happen? Don't think so
406 e.unexpected(fmt.Sprintf("%s %T %#v", x.String(), tt, tt))
408 e.token(x.Pos(), len(x.Name), tokFunction, nil)
410 // nothing to map it to
412 // nil is a predeclared identifier
413 e.token(x.Pos(), len("nil"), tokKeyword, []string{"readonly"})
415 e.token(x.Pos(), len(x.Name), tokNamespace, nil)
416 case *types.TypeName:
417 e.token(x.Pos(), len(x.String()), tokType, nil)
419 e.token(x.Pos(), len(x.Name), tokVariable, nil)
421 // replace with panic after extensive testing
423 msg := fmt.Sprintf("%#v/%#v %#v %#v", x, x.Obj, e.ti.Defs[x], e.ti.Uses[x])
426 if use.Type() != nil {
427 e.unexpected(fmt.Sprintf("%s %T/%T,%#v", x.String(), use, use.Type(), use))
429 e.unexpected(fmt.Sprintf("%s %T", x.String(), use))
434 func (e *encoded) definitionFor(x *ast.Ident) (tokenType, []string) {
435 mods := []string{"definition"}
436 for i := len(e.stack) - 1; i >= 0; i-- {
438 switch y := s.(type) {
439 case *ast.AssignStmt, *ast.RangeStmt:
441 return "", nil // not really a variable
443 return "variable", mods
445 if y.Tok == token.CONST {
446 mods = append(mods, "readonly")
448 return tokVariable, mods
450 // If x is immediately under a FuncDecl, it is a function or method
451 if i == len(e.stack)-2 {
453 return tokMember, mods
455 return tokFunction, mods
457 // if x < ... < FieldList < FuncDecl, this is the receiver, a variable
458 if _, ok := e.stack[i+1].(*ast.FieldList); ok {
459 return tokVariable, nil
461 // if x < ... < FieldList < FuncType < FuncDecl, this is a param
462 return tokParameter, mods
463 case *ast.InterfaceType:
464 return tokMember, mods
469 // panic after extensive testing
470 msg := fmt.Sprintf("failed to find the decl for %s", e.pgf.Tok.PositionFor(x.Pos(), false))
472 return "", []string{""}
475 // findKeyword finds a keyword rather than guessing its location
476 func (e *encoded) findKeyword(keyword string, start, end token.Pos) token.Pos {
477 offset := int(start) - e.pgf.Tok.Base()
478 last := int(end) - e.pgf.Tok.Base()
480 idx := bytes.Index(buf[offset:last], []byte(keyword))
482 return start + token.Pos(idx)
485 e.unexpected(fmt.Sprintf("not found:%s %v", keyword, e.fset.PositionFor(start, false)))
489 func (e *encoded) init() error {
490 e.start = token.Pos(e.pgf.Tok.Base())
491 e.end = e.start + token.Pos(e.pgf.Tok.Size())
495 span, err := e.pgf.Mapper.RangeSpan(*e.rng)
497 return errors.Errorf("range span error for %s", e.pgf.File.Name)
499 e.end = e.start + token.Pos(span.End().Offset())
500 e.start += token.Pos(span.Start().Offset())
504 func (e *encoded) Data() ([]float64, error) {
505 // binary operators, at least, will be out of order
506 sort.Slice(e.items, func(i, j int) bool {
507 if e.items[i].line != e.items[j].line {
508 return e.items[i].line < e.items[j].line
510 return e.items[i].start < e.items[j].start
512 // each semantic token needs five values
513 // (see Integer Encoding for Tokens in the LSP spec)
514 x := make([]float64, 5*len(e.items))
515 for i := 0; i < len(e.items); i++ {
518 x[0] = e.items[0].line
520 x[j] = e.items[i].line - e.items[i-1].line
522 x[j+1] = e.items[i].start
523 if i > 0 && e.items[i].line == e.items[i-1].line {
524 x[j+1] = e.items[i].start - e.items[i-1].start
526 x[j+2] = e.items[i].len
527 x[j+3] = float64(SemanticMemo.TypeMap[e.items[i].typeStr])
529 for _, s := range e.items[i].mods {
530 mask |= SemanticMemo.ModMap[s]
532 x[j+4] = float64(mask)
537 func (e *encoded) importSpec(d *ast.ImportSpec) {
538 // a local package name or the last component of the Path
540 nm := d.Name.String()
541 // import . x => x is not a namespace
542 // import _ x => x is a namespace
543 if nm != "_" && nm != "." {
544 e.token(d.Name.Pos(), len(nm), tokNamespace, nil)
550 // and fall through for _
552 nm := d.Path.Value[1 : len(d.Path.Value)-1] // trailing "
553 v := strings.LastIndex(nm, "/")
557 start := d.Path.End() - token.Pos(1+len(nm))
558 e.token(start, len(nm), tokNamespace, nil)
561 // panic on unexpected state
562 func (e *encoded) unexpected(msg string) {
564 log.Print(e.strStack())
568 // SemMemo supports semantic token translations between numbers and strings
569 type SemMemo struct {
570 tokTypes, tokMods []string
571 // these exported fields are used in the 'gopls semtok' command
572 TypeMap map[tokenType]int
573 ModMap map[string]int
576 var SemanticMemo *SemMemo
578 // Type returns a string equivalent of the type, for gopls semtok
579 func (m *SemMemo) Type(n int) string {
580 if n >= 0 && n < len(m.tokTypes) {
583 return fmt.Sprintf("?%d[%d,%d]?", n, len(m.tokTypes), len(m.tokMods))
586 // Mods returns the []string equivalent of the mods, for gopls semtok.
587 func (m *SemMemo) Mods(n int) []string {
589 for i := 0; i < len(m.tokMods); i++ {
590 if (n & (1 << uint(i))) != 0 {
591 mods = append(mods, m.tokMods[i])
597 // save what the client sent
598 func rememberToks(toks []string, mods []string) {
599 SemanticMemo = &SemMemo{
602 TypeMap: make(map[tokenType]int),
603 ModMap: make(map[string]int),
605 for i, t := range toks {
606 SemanticMemo.TypeMap[tokenType(t)] = i
608 for i, m := range mods {
609 SemanticMemo.ModMap[m] = 1 << uint(i)
611 // we could have pruned or rearranged them.
612 // But then change the list in cmd.go too
615 // SemanticTypes to use in case there is no client, as in the command line, or tests
616 func SemanticTypes() []string {
617 return semanticTypes[:]
620 // SemanticModifiers to use in case there is no client.
621 func SemanticModifiers() []string {
622 return semanticModifiers[:]
626 semanticTypes = [...]string{
627 "namespace", "type", "class", "enum", "interface",
628 "struct", "typeParameter", "parameter", "variable", "property", "enumMember",
629 "event", "function", "member", "macro", "keyword", "modifier", "comment",
630 "string", "number", "regexp", "operator"}
631 semanticModifiers = [...]string{
632 "declaration", "definition", "readonly", "static",
633 "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary"}