1 // Copyright 2019 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
5 // Package tests exports functionality to be used across a variety of gopls tests.
26 "golang.org/x/tools/go/expect"
27 "golang.org/x/tools/go/packages"
28 "golang.org/x/tools/go/packages/packagestest"
29 "golang.org/x/tools/internal/lsp/protocol"
30 "golang.org/x/tools/internal/lsp/source"
31 "golang.org/x/tools/internal/lsp/source/completion"
32 "golang.org/x/tools/internal/span"
33 "golang.org/x/tools/internal/testenv"
34 "golang.org/x/tools/txtar"
38 overlayFileSuffix = ".overlay"
39 goldenFileSuffix = ".golden"
41 summaryFile = "summary.txt"
42 testModule = "golang.org/x/tools/internal/lsp"
45 var UpdateGolden = flag.Bool("golden", false, "Update golden files")
47 type CallHierarchy map[span.Span]*CallHierarchyResult
48 type CodeLens map[span.URI][]protocol.CodeLens
49 type Diagnostics map[span.URI][]*source.Diagnostic
50 type CompletionItems map[token.Pos]*completion.CompletionItem
51 type Completions map[span.Span][]Completion
52 type CompletionSnippets map[span.Span][]CompletionSnippet
53 type UnimportedCompletions map[span.Span][]Completion
54 type DeepCompletions map[span.Span][]Completion
55 type FuzzyCompletions map[span.Span][]Completion
56 type CaseSensitiveCompletions map[span.Span][]Completion
57 type RankCompletions map[span.Span][]Completion
58 type FoldingRanges []span.Span
59 type Formats []span.Span
60 type Imports []span.Span
61 type SemanticTokens []span.Span
62 type SuggestedFixes map[span.Span][]string
63 type FunctionExtractions map[span.Span]span.Span
64 type Definitions map[span.Span]Definition
65 type Implementations map[span.Span][]span.Span
66 type Highlights map[span.Span][]span.Span
67 type References map[span.Span][]span.Span
68 type Renames map[span.Span]string
69 type PrepareRenames map[span.Span]*source.PrepareItem
70 type Symbols map[span.URI][]protocol.DocumentSymbol
71 type SymbolsChildren map[string][]protocol.DocumentSymbol
72 type SymbolInformation map[span.Span]protocol.SymbolInformation
73 type WorkspaceSymbols map[string][]protocol.SymbolInformation
74 type Signatures map[span.Span]*protocol.SignatureHelp
75 type Links map[span.URI][]Link
78 Config packages.Config
79 Exported *packagestest.Exported
80 CallHierarchy CallHierarchy
82 Diagnostics Diagnostics
83 CompletionItems CompletionItems
84 Completions Completions
85 CompletionSnippets CompletionSnippets
86 UnimportedCompletions UnimportedCompletions
87 DeepCompletions DeepCompletions
88 FuzzyCompletions FuzzyCompletions
89 CaseSensitiveCompletions CaseSensitiveCompletions
90 RankCompletions RankCompletions
91 FoldingRanges FoldingRanges
94 SemanticTokens SemanticTokens
95 SuggestedFixes SuggestedFixes
96 FunctionExtractions FunctionExtractions
97 Definitions Definitions
98 Implementations Implementations
100 References References
102 PrepareRenames PrepareRenames
104 symbolsChildren SymbolsChildren
105 symbolInformation SymbolInformation
106 WorkspaceSymbols WorkspaceSymbols
107 FuzzyWorkspaceSymbols WorkspaceSymbols
108 CaseSensitiveWorkspaceSymbols WorkspaceSymbols
109 Signatures Signatures
113 fragments map[string]string
115 golden map[string]*Golden
118 ModfileFlagAvailable bool
121 mappers map[span.URI]*protocol.ColumnMapper
124 type Tests interface {
125 CallHierarchy(*testing.T, span.Span, *CallHierarchyResult)
126 CodeLens(*testing.T, span.URI, []protocol.CodeLens)
127 Diagnostics(*testing.T, span.URI, []*source.Diagnostic)
128 Completion(*testing.T, span.Span, Completion, CompletionItems)
129 CompletionSnippet(*testing.T, span.Span, CompletionSnippet, bool, CompletionItems)
130 UnimportedCompletion(*testing.T, span.Span, Completion, CompletionItems)
131 DeepCompletion(*testing.T, span.Span, Completion, CompletionItems)
132 FuzzyCompletion(*testing.T, span.Span, Completion, CompletionItems)
133 CaseSensitiveCompletion(*testing.T, span.Span, Completion, CompletionItems)
134 RankCompletion(*testing.T, span.Span, Completion, CompletionItems)
135 FoldingRanges(*testing.T, span.Span)
136 Format(*testing.T, span.Span)
137 Import(*testing.T, span.Span)
138 SemanticTokens(*testing.T, span.Span)
139 SuggestedFix(*testing.T, span.Span, []string)
140 FunctionExtraction(*testing.T, span.Span, span.Span)
141 Definition(*testing.T, span.Span, Definition)
142 Implementation(*testing.T, span.Span, []span.Span)
143 Highlight(*testing.T, span.Span, []span.Span)
144 References(*testing.T, span.Span, []span.Span)
145 Rename(*testing.T, span.Span, string)
146 PrepareRename(*testing.T, span.Span, *source.PrepareItem)
147 Symbols(*testing.T, span.URI, []protocol.DocumentSymbol)
148 WorkspaceSymbols(*testing.T, string, []protocol.SymbolInformation, map[string]struct{})
149 FuzzyWorkspaceSymbols(*testing.T, string, []protocol.SymbolInformation, map[string]struct{})
150 CaseSensitiveWorkspaceSymbols(*testing.T, string, []protocol.SymbolInformation, map[string]struct{})
151 SignatureHelp(*testing.T, span.Span, *protocol.SignatureHelp)
152 Link(*testing.T, span.URI, []Link)
155 type Definition struct {
162 type CompletionTestType int
165 // Default runs the standard completion tests.
166 CompletionDefault = CompletionTestType(iota)
168 // Unimported tests the autocompletion of unimported packages.
171 // Deep tests deep completion.
174 // Fuzzy tests deep completion and fuzzy matching.
177 // CaseSensitive tests case sensitive completion.
178 CompletionCaseSensitive
180 // CompletionRank candidates in test must be valid and in the right relative order.
184 type WorkspaceSymbolsTestType int
187 // Default runs the standard workspace symbols tests.
188 WorkspaceSymbolsDefault = WorkspaceSymbolsTestType(iota)
190 // Fuzzy tests workspace symbols with fuzzy matching.
191 WorkspaceSymbolsFuzzy
193 // CaseSensitive tests workspace symbols with case sensitive.
194 WorkspaceSymbolsCaseSensitive
197 type Completion struct {
198 CompletionItems []token.Pos
201 type CompletionSnippet struct {
202 CompletionItem token.Pos
204 PlaceholderSnippet string
207 type CallHierarchyResult struct {
208 IncomingCalls, OutgoingCalls []protocol.CallHierarchyItem
214 NotePosition token.Position
219 Archive *txtar.Archive
223 func Context(t testing.TB) context.Context {
224 return context.Background()
227 func DefaultOptions(o *source.Options) {
228 o.SupportedCodeActions = map[source.FileKind]map[protocol.CodeActionKind]bool{
230 protocol.SourceOrganizeImports: true,
231 protocol.QuickFix: true,
232 protocol.RefactorRewrite: true,
233 protocol.RefactorExtract: true,
234 protocol.SourceFixAll: true,
237 protocol.SourceOrganizeImports: true,
241 o.ExperimentalOptions.Codelens[source.CommandTest.Name] = true
242 o.HoverKind = source.SynopsisDocumentation
243 o.InsertTextFormat = protocol.SnippetTextFormat
244 o.CompletionBudget = time.Minute
245 o.HierarchicalDocumentSymbolSupport = true
246 o.ExperimentalWorkspaceModule = true
247 o.SemanticTokens = true
250 func RunTests(t *testing.T, dataDir string, includeMultiModule bool, f func(*testing.T, *Data)) {
252 modes := []string{"Modules", "GOPATH"}
253 if includeMultiModule {
254 modes = append(modes, "MultiModule")
256 for _, mode := range modes {
257 t.Run(mode, func(t *testing.T) {
259 if mode == "MultiModule" {
260 // Some bug in 1.12 breaks reading markers, and it's not worth figuring out.
261 testenv.NeedsGo1Point(t, 13)
263 datum := load(t, mode, dataDir)
269 func load(t testing.TB, mode string, dir string) *Data {
273 CallHierarchy: make(CallHierarchy),
274 CodeLens: make(CodeLens),
275 Diagnostics: make(Diagnostics),
276 CompletionItems: make(CompletionItems),
277 Completions: make(Completions),
278 CompletionSnippets: make(CompletionSnippets),
279 UnimportedCompletions: make(UnimportedCompletions),
280 DeepCompletions: make(DeepCompletions),
281 FuzzyCompletions: make(FuzzyCompletions),
282 RankCompletions: make(RankCompletions),
283 CaseSensitiveCompletions: make(CaseSensitiveCompletions),
284 Definitions: make(Definitions),
285 Implementations: make(Implementations),
286 Highlights: make(Highlights),
287 References: make(References),
288 Renames: make(Renames),
289 PrepareRenames: make(PrepareRenames),
290 SuggestedFixes: make(SuggestedFixes),
291 FunctionExtractions: make(FunctionExtractions),
292 Symbols: make(Symbols),
293 symbolsChildren: make(SymbolsChildren),
294 symbolInformation: make(SymbolInformation),
295 WorkspaceSymbols: make(WorkspaceSymbols),
296 FuzzyWorkspaceSymbols: make(WorkspaceSymbols),
297 CaseSensitiveWorkspaceSymbols: make(WorkspaceSymbols),
298 Signatures: make(Signatures),
303 fragments: map[string]string{},
304 golden: map[string]*Golden{},
306 mappers: map[span.URI]*protocol.ColumnMapper{},
310 summary := filepath.Join(filepath.FromSlash(dir), summaryFile+goldenFileSuffix)
311 if _, err := os.Stat(summary); os.IsNotExist(err) {
312 t.Fatalf("could not find golden file summary.txt in %#v", dir)
314 archive, err := txtar.ParseFile(summary)
316 t.Fatalf("could not read golden file %v/%v: %v", dir, summary, err)
318 datum.golden[summaryFile] = &Golden{
324 files := packagestest.MustCopyFileTree(dir)
325 overlays := map[string][]byte{}
326 for fragment, operation := range files {
327 if trimmed := strings.TrimSuffix(fragment, goldenFileSuffix); trimmed != fragment {
328 delete(files, fragment)
329 goldFile := filepath.Join(dir, fragment)
330 archive, err := txtar.ParseFile(goldFile)
332 t.Fatalf("could not read golden file %v: %v", fragment, err)
334 datum.golden[trimmed] = &Golden{
338 } else if trimmed := strings.TrimSuffix(fragment, inFileSuffix); trimmed != fragment {
339 delete(files, fragment)
340 files[trimmed] = operation
341 } else if index := strings.Index(fragment, overlayFileSuffix); index >= 0 {
342 delete(files, fragment)
343 partial := fragment[:index] + fragment[index+len(overlayFileSuffix):]
344 contents, err := ioutil.ReadFile(filepath.Join(dir, fragment))
348 overlays[partial] = contents
352 modules := []packagestest.Module{
361 datum.Exported = packagestest.Export(t, packagestest.Modules, modules)
363 datum.Exported = packagestest.Export(t, packagestest.GOPATH, modules)
365 files := map[string]interface{}{}
366 for k, v := range modules[0].Files {
367 files[filepath.Join("testmodule", k)] = v
369 modules[0].Files = files
371 overlays := map[string][]byte{}
372 for k, v := range modules[0].Overlay {
373 overlays[filepath.Join("testmodule", k)] = v
375 modules[0].Overlay = overlays
377 golden := map[string]*Golden{}
378 for k, v := range datum.golden {
379 if k == summaryFile {
382 golden[filepath.Join("testmodule", k)] = v
385 datum.golden = golden
387 datum.Exported = packagestest.Export(t, packagestest.Modules, modules)
389 panic("unknown mode " + mode)
392 for _, m := range modules {
393 for fragment := range m.Files {
394 filename := datum.Exported.File(m.Name, fragment)
395 datum.fragments[filename] = fragment
399 // Turn off go/packages debug logging.
400 datum.Exported.Config.Logf = nil
401 datum.Config.Logf = nil
403 // Merge the exported.Config with the view.Config.
404 datum.Config = *datum.Exported.Config
405 datum.Config.Fset = token.NewFileSet()
406 datum.Config.Context = Context(nil)
407 datum.Config.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) {
408 panic("ParseFile should not be called")
411 // Do a first pass to collect special markers for completion and workspace symbols.
412 if err := datum.Exported.Expect(map[string]interface{}{
413 "item": func(name string, r packagestest.Range, _ []string) {
414 datum.Exported.Mark(name, r)
416 "symbol": func(name string, r packagestest.Range, _ []string) {
417 datum.Exported.Mark(name, r)
423 // Collect any data that needs to be used by subsequent tests.
424 if err := datum.Exported.Expect(map[string]interface{}{
425 "codelens": datum.collectCodeLens,
426 "diag": datum.collectDiagnostics,
427 "item": datum.collectCompletionItems,
428 "complete": datum.collectCompletions(CompletionDefault),
429 "unimported": datum.collectCompletions(CompletionUnimported),
430 "deep": datum.collectCompletions(CompletionDeep),
431 "fuzzy": datum.collectCompletions(CompletionFuzzy),
432 "casesensitive": datum.collectCompletions(CompletionCaseSensitive),
433 "rank": datum.collectCompletions(CompletionRank),
434 "snippet": datum.collectCompletionSnippets,
435 "fold": datum.collectFoldingRanges,
436 "format": datum.collectFormats,
437 "import": datum.collectImports,
438 "semantic": datum.collectSemanticTokens,
439 "godef": datum.collectDefinitions,
440 "implementations": datum.collectImplementations,
441 "typdef": datum.collectTypeDefinitions,
442 "hover": datum.collectHoverDefinitions,
443 "highlight": datum.collectHighlights,
444 "refs": datum.collectReferences,
445 "rename": datum.collectRenames,
446 "prepare": datum.collectPrepareRenames,
447 "symbol": datum.collectSymbols,
448 "signature": datum.collectSignatures,
449 "link": datum.collectLinks,
450 "suggestedfix": datum.collectSuggestedFixes,
451 "extractfunc": datum.collectFunctionExtractions,
452 "incomingcalls": datum.collectIncomingCalls,
453 "outgoingcalls": datum.collectOutgoingCalls,
457 for _, symbols := range datum.Symbols {
458 for i := range symbols {
459 children := datum.symbolsChildren[symbols[i].Name]
460 symbols[i].Children = children
463 // Collect names for the entries that require golden files.
464 if err := datum.Exported.Expect(map[string]interface{}{
465 "godef": datum.collectDefinitionNames,
466 "hover": datum.collectDefinitionNames,
467 "workspacesymbol": datum.collectWorkspaceSymbols(WorkspaceSymbolsDefault),
468 "workspacesymbolfuzzy": datum.collectWorkspaceSymbols(WorkspaceSymbolsFuzzy),
469 "workspacesymbolcasesensitive": datum.collectWorkspaceSymbols(WorkspaceSymbolsCaseSensitive),
473 if mode == "MultiModule" {
474 if err := os.Rename(filepath.Join(datum.Config.Dir, "go.mod"), filepath.Join(datum.Config.Dir, "testmodule/go.mod")); err != nil {
482 func Run(t *testing.T, tests Tests, data *Data) {
486 eachCompletion := func(t *testing.T, cases map[span.Span][]Completion, test func(*testing.T, span.Span, Completion, CompletionItems)) {
489 for src, exp := range cases {
490 for i, e := range exp {
491 t.Run(SpanName(src)+"_"+strconv.Itoa(i), func(t *testing.T) {
493 if strings.Contains(t.Name(), "complit") || strings.Contains(t.Name(), "UnimportedCompletion") {
494 if data.mode == "MultiModule" {
495 t.Skip("Unimported completions are broken in multi-module mode")
498 if strings.Contains(t.Name(), "cgo") {
499 testenv.NeedsTool(t, "cgo")
501 if strings.Contains(t.Name(), "declarecgo") {
502 testenv.NeedsGo1Point(t, 15)
504 test(t, src, e, data.CompletionItems)
511 eachWorkspaceSymbols := func(t *testing.T, cases map[string][]protocol.SymbolInformation, test func(*testing.T, string, []protocol.SymbolInformation, map[string]struct{})) {
514 for query, expectedSymbols := range cases {
519 t.Run(name, func(t *testing.T) {
521 dirs := make(map[string]struct{})
522 for _, si := range expectedSymbols {
523 d := filepath.Dir(si.Location.URI.SpanURI().Filename())
524 if _, ok := dirs[d]; !ok {
528 test(t, query, expectedSymbols, dirs)
533 t.Run("CallHierarchy", func(t *testing.T) {
535 for spn, callHierarchyResult := range data.CallHierarchy {
536 t.Run(SpanName(spn), func(t *testing.T) {
538 tests.CallHierarchy(t, spn, callHierarchyResult)
543 t.Run("Completion", func(t *testing.T) {
545 eachCompletion(t, data.Completions, tests.Completion)
548 t.Run("CompletionSnippets", func(t *testing.T) {
550 for _, placeholders := range []bool{true, false} {
551 for src, expecteds := range data.CompletionSnippets {
552 for i, expected := range expecteds {
553 name := SpanName(src) + "_" + strconv.Itoa(i+1)
555 name += "_placeholders"
558 t.Run(name, func(t *testing.T) {
560 tests.CompletionSnippet(t, src, expected, placeholders, data.CompletionItems)
567 t.Run("UnimportedCompletion", func(t *testing.T) {
569 eachCompletion(t, data.UnimportedCompletions, tests.UnimportedCompletion)
572 t.Run("DeepCompletion", func(t *testing.T) {
574 eachCompletion(t, data.DeepCompletions, tests.DeepCompletion)
577 t.Run("FuzzyCompletion", func(t *testing.T) {
579 eachCompletion(t, data.FuzzyCompletions, tests.FuzzyCompletion)
582 t.Run("CaseSensitiveCompletion", func(t *testing.T) {
584 eachCompletion(t, data.CaseSensitiveCompletions, tests.CaseSensitiveCompletion)
587 t.Run("RankCompletions", func(t *testing.T) {
589 eachCompletion(t, data.RankCompletions, tests.RankCompletion)
592 t.Run("CodeLens", func(t *testing.T) {
594 for uri, want := range data.CodeLens {
595 // Check if we should skip this URI if the -modfile flag is not available.
596 if shouldSkip(data, uri) {
599 t.Run(uriName(uri), func(t *testing.T) {
601 tests.CodeLens(t, uri, want)
606 t.Run("Diagnostics", func(t *testing.T) {
608 for uri, want := range data.Diagnostics {
609 // Check if we should skip this URI if the -modfile flag is not available.
610 if shouldSkip(data, uri) {
613 t.Run(uriName(uri), func(t *testing.T) {
615 tests.Diagnostics(t, uri, want)
620 t.Run("FoldingRange", func(t *testing.T) {
622 for _, spn := range data.FoldingRanges {
623 t.Run(uriName(spn.URI()), func(t *testing.T) {
625 tests.FoldingRanges(t, spn)
630 t.Run("Format", func(t *testing.T) {
632 for _, spn := range data.Formats {
633 t.Run(uriName(spn.URI()), func(t *testing.T) {
640 t.Run("Import", func(t *testing.T) {
642 for _, spn := range data.Imports {
643 t.Run(uriName(spn.URI()), func(t *testing.T) {
650 t.Run("SemanticTokens", func(t *testing.T) {
652 for _, spn := range data.SemanticTokens {
653 t.Run(uriName(spn.URI()), func(t *testing.T) {
655 tests.SemanticTokens(t, spn)
660 t.Run("SuggestedFix", func(t *testing.T) {
662 for spn, actionKinds := range data.SuggestedFixes {
663 // Check if we should skip this spn if the -modfile flag is not available.
664 if shouldSkip(data, spn.URI()) {
667 t.Run(SpanName(spn), func(t *testing.T) {
669 tests.SuggestedFix(t, spn, actionKinds)
674 t.Run("FunctionExtraction", func(t *testing.T) {
676 for start, end := range data.FunctionExtractions {
677 // Check if we should skip this spn if the -modfile flag is not available.
678 if shouldSkip(data, start.URI()) {
681 t.Run(SpanName(start), func(t *testing.T) {
683 tests.FunctionExtraction(t, start, end)
688 t.Run("Definition", func(t *testing.T) {
690 for spn, d := range data.Definitions {
691 t.Run(SpanName(spn), func(t *testing.T) {
693 if strings.Contains(t.Name(), "cgo") {
694 testenv.NeedsTool(t, "cgo")
696 if strings.Contains(t.Name(), "declarecgo") {
697 testenv.NeedsGo1Point(t, 15)
699 tests.Definition(t, spn, d)
704 t.Run("Implementation", func(t *testing.T) {
706 for spn, m := range data.Implementations {
707 t.Run(SpanName(spn), func(t *testing.T) {
709 tests.Implementation(t, spn, m)
714 t.Run("Highlight", func(t *testing.T) {
716 for pos, locations := range data.Highlights {
717 t.Run(SpanName(pos), func(t *testing.T) {
719 tests.Highlight(t, pos, locations)
724 t.Run("References", func(t *testing.T) {
726 for src, itemList := range data.References {
727 t.Run(SpanName(src), func(t *testing.T) {
729 tests.References(t, src, itemList)
734 t.Run("Renames", func(t *testing.T) {
736 for spn, newText := range data.Renames {
737 t.Run(uriName(spn.URI())+"_"+newText, func(t *testing.T) {
739 tests.Rename(t, spn, newText)
744 t.Run("PrepareRenames", func(t *testing.T) {
746 for src, want := range data.PrepareRenames {
747 t.Run(SpanName(src), func(t *testing.T) {
749 tests.PrepareRename(t, src, want)
754 t.Run("Symbols", func(t *testing.T) {
756 for uri, expectedSymbols := range data.Symbols {
757 t.Run(uriName(uri), func(t *testing.T) {
759 tests.Symbols(t, uri, expectedSymbols)
764 t.Run("WorkspaceSymbols", func(t *testing.T) {
766 eachWorkspaceSymbols(t, data.WorkspaceSymbols, tests.WorkspaceSymbols)
769 t.Run("FuzzyWorkspaceSymbols", func(t *testing.T) {
771 eachWorkspaceSymbols(t, data.FuzzyWorkspaceSymbols, tests.FuzzyWorkspaceSymbols)
774 t.Run("CaseSensitiveWorkspaceSymbols", func(t *testing.T) {
776 eachWorkspaceSymbols(t, data.CaseSensitiveWorkspaceSymbols, tests.CaseSensitiveWorkspaceSymbols)
779 t.Run("SignatureHelp", func(t *testing.T) {
781 for spn, expectedSignature := range data.Signatures {
782 t.Run(SpanName(spn), func(t *testing.T) {
784 tests.SignatureHelp(t, spn, expectedSignature)
789 t.Run("Link", func(t *testing.T) {
791 for uri, wantLinks := range data.Links {
792 // If we are testing GOPATH, then we do not want links with
793 // the versions attached (pkg.go.dev/repoa/moda@v1.1.0/pkg),
794 // unless the file is a go.mod, then we can skip it alltogether.
795 if data.Exported.Exporter == packagestest.GOPATH {
796 if strings.HasSuffix(uri.Filename(), ".mod") {
799 re := regexp.MustCompile(`@v\d+\.\d+\.[\w-]+`)
800 for i, link := range wantLinks {
801 wantLinks[i].Target = re.ReplaceAllString(link.Target, "")
804 t.Run(uriName(uri), func(t *testing.T) {
806 tests.Link(t, uri, wantLinks)
812 for _, golden := range data.golden {
813 if !golden.Modified {
816 sort.Slice(golden.Archive.Files, func(i, j int) bool {
817 return golden.Archive.Files[i].Name < golden.Archive.Files[j].Name
819 if err := ioutil.WriteFile(golden.Filename, txtar.Format(golden.Archive), 0666); err != nil {
826 func checkData(t *testing.T, data *Data) {
827 buf := &bytes.Buffer{}
828 diagnosticsCount := 0
829 for _, want := range data.Diagnostics {
830 diagnosticsCount += len(want)
833 for _, want := range data.Links {
834 linksCount += len(want)
837 typeDefinitionCount := 0
838 for _, d := range data.Definitions {
840 typeDefinitionCount++
847 for _, want := range data.CompletionSnippets {
848 snippetCount += len(want)
851 countCompletions := func(c map[span.Span][]Completion) (count int) {
852 for _, want := range c {
858 countCodeLens := func(c map[span.URI][]protocol.CodeLens) (count int) {
859 for _, want := range c {
865 fmt.Fprintf(buf, "CallHierarchyCount = %v\n", len(data.CallHierarchy))
866 fmt.Fprintf(buf, "CodeLensCount = %v\n", countCodeLens(data.CodeLens))
867 fmt.Fprintf(buf, "CompletionsCount = %v\n", countCompletions(data.Completions))
868 fmt.Fprintf(buf, "CompletionSnippetCount = %v\n", snippetCount)
869 fmt.Fprintf(buf, "UnimportedCompletionsCount = %v\n", countCompletions(data.UnimportedCompletions))
870 fmt.Fprintf(buf, "DeepCompletionsCount = %v\n", countCompletions(data.DeepCompletions))
871 fmt.Fprintf(buf, "FuzzyCompletionsCount = %v\n", countCompletions(data.FuzzyCompletions))
872 fmt.Fprintf(buf, "RankedCompletionsCount = %v\n", countCompletions(data.RankCompletions))
873 fmt.Fprintf(buf, "CaseSensitiveCompletionsCount = %v\n", countCompletions(data.CaseSensitiveCompletions))
874 fmt.Fprintf(buf, "DiagnosticsCount = %v\n", diagnosticsCount)
875 fmt.Fprintf(buf, "FoldingRangesCount = %v\n", len(data.FoldingRanges))
876 fmt.Fprintf(buf, "FormatCount = %v\n", len(data.Formats))
877 fmt.Fprintf(buf, "ImportCount = %v\n", len(data.Imports))
878 fmt.Fprintf(buf, "SemanticTokenCount = %v\n", len(data.SemanticTokens))
879 fmt.Fprintf(buf, "SuggestedFixCount = %v\n", len(data.SuggestedFixes))
880 fmt.Fprintf(buf, "FunctionExtractionCount = %v\n", len(data.FunctionExtractions))
881 fmt.Fprintf(buf, "DefinitionsCount = %v\n", definitionCount)
882 fmt.Fprintf(buf, "TypeDefinitionsCount = %v\n", typeDefinitionCount)
883 fmt.Fprintf(buf, "HighlightsCount = %v\n", len(data.Highlights))
884 fmt.Fprintf(buf, "ReferencesCount = %v\n", len(data.References))
885 fmt.Fprintf(buf, "RenamesCount = %v\n", len(data.Renames))
886 fmt.Fprintf(buf, "PrepareRenamesCount = %v\n", len(data.PrepareRenames))
887 fmt.Fprintf(buf, "SymbolsCount = %v\n", len(data.Symbols))
888 fmt.Fprintf(buf, "WorkspaceSymbolsCount = %v\n", len(data.WorkspaceSymbols))
889 fmt.Fprintf(buf, "FuzzyWorkspaceSymbolsCount = %v\n", len(data.FuzzyWorkspaceSymbols))
890 fmt.Fprintf(buf, "CaseSensitiveWorkspaceSymbolsCount = %v\n", len(data.CaseSensitiveWorkspaceSymbols))
891 fmt.Fprintf(buf, "SignaturesCount = %v\n", len(data.Signatures))
892 fmt.Fprintf(buf, "LinksCount = %v\n", linksCount)
893 fmt.Fprintf(buf, "ImplementationsCount = %v\n", len(data.Implementations))
895 want := string(data.Golden("summary", summaryFile, func() ([]byte, error) {
896 return buf.Bytes(), nil
900 t.Errorf("test summary does not match:\n%s", Diff(want, got))
904 func (data *Data) Mapper(uri span.URI) (*protocol.ColumnMapper, error) {
905 data.mappersMu.Lock()
906 defer data.mappersMu.Unlock()
908 if _, ok := data.mappers[uri]; !ok {
909 content, err := data.Exported.FileContents(uri.Filename())
913 converter := span.NewContentConverter(uri.Filename(), content)
914 data.mappers[uri] = &protocol.ColumnMapper{
916 Converter: converter,
920 return data.mappers[uri], nil
923 func (data *Data) Golden(tag string, target string, update func() ([]byte, error)) []byte {
925 fragment, found := data.fragments[target]
927 if filepath.IsAbs(target) {
928 data.t.Fatalf("invalid golden file fragment %v", target)
932 golden := data.golden[fragment]
935 data.t.Fatalf("could not find golden file %v: %v", fragment, tag)
938 if fragment != summaryFile {
939 subdir = "primarymod"
942 Filename: filepath.Join(data.dir, subdir, fragment+goldenFileSuffix),
943 Archive: &txtar.Archive{},
946 data.golden[fragment] = golden
949 for i := range golden.Archive.Files {
950 f := &golden.Archive.Files[i]
958 golden.Archive.Files = append(golden.Archive.Files, txtar.File{
961 file = &golden.Archive.Files[len(golden.Archive.Files)-1]
963 contents, err := update()
965 data.t.Fatalf("could not update golden file %v: %v", fragment, err)
967 file.Data = append(contents, '\n') // add trailing \n for txtar
968 golden.Modified = true
972 data.t.Fatalf("could not find golden contents %v: %v", fragment, tag)
974 if len(file.Data) == 0 {
977 return file.Data[:len(file.Data)-1] // drop the trailing \n
980 func (data *Data) collectCodeLens(spn span.Span, title, cmd string) {
981 if _, ok := data.CodeLens[spn.URI()]; !ok {
982 data.CodeLens[spn.URI()] = []protocol.CodeLens{}
984 m, err := data.Mapper(spn.URI())
988 rng, err := m.Range(spn)
992 data.CodeLens[spn.URI()] = append(data.CodeLens[spn.URI()], protocol.CodeLens{
994 Command: protocol.Command{
1001 func (data *Data) collectDiagnostics(spn span.Span, msgSource, msg, msgSeverity string) {
1002 if _, ok := data.Diagnostics[spn.URI()]; !ok {
1003 data.Diagnostics[spn.URI()] = []*source.Diagnostic{}
1005 m, err := data.Mapper(spn.URI())
1009 rng, err := m.Range(spn)
1013 severity := protocol.SeverityError
1014 switch msgSeverity {
1016 severity = protocol.SeverityError
1018 severity = protocol.SeverityWarning
1020 severity = protocol.SeverityHint
1022 severity = protocol.SeverityInformation
1024 // This is not the correct way to do this, but it seems excessive to do the full conversion here.
1025 want := &source.Diagnostic{
1031 data.Diagnostics[spn.URI()] = append(data.Diagnostics[spn.URI()], want)
1034 func (data *Data) collectCompletions(typ CompletionTestType) func(span.Span, []token.Pos) {
1035 result := func(m map[span.Span][]Completion, src span.Span, expected []token.Pos) {
1036 m[src] = append(m[src], Completion{
1037 CompletionItems: expected,
1041 case CompletionDeep:
1042 return func(src span.Span, expected []token.Pos) {
1043 result(data.DeepCompletions, src, expected)
1045 case CompletionUnimported:
1046 return func(src span.Span, expected []token.Pos) {
1047 result(data.UnimportedCompletions, src, expected)
1049 case CompletionFuzzy:
1050 return func(src span.Span, expected []token.Pos) {
1051 result(data.FuzzyCompletions, src, expected)
1053 case CompletionRank:
1054 return func(src span.Span, expected []token.Pos) {
1055 result(data.RankCompletions, src, expected)
1057 case CompletionCaseSensitive:
1058 return func(src span.Span, expected []token.Pos) {
1059 result(data.CaseSensitiveCompletions, src, expected)
1062 return func(src span.Span, expected []token.Pos) {
1063 result(data.Completions, src, expected)
1068 func (data *Data) collectCompletionItems(pos token.Pos, args []string) {
1070 loc := data.Exported.ExpectFileSet.Position(pos)
1071 data.t.Fatalf("%s:%d: @item expects at least 3 args, got %d",
1072 loc.Filename, loc.Line, len(args))
1074 label, detail, kind := args[0], args[1], args[2]
1075 var documentation string
1077 documentation = args[3]
1079 data.CompletionItems[pos] = &completion.CompletionItem{
1082 Kind: protocol.ParseCompletionItemKind(kind),
1083 Documentation: documentation,
1087 func (data *Data) collectFoldingRanges(spn span.Span) {
1088 data.FoldingRanges = append(data.FoldingRanges, spn)
1091 func (data *Data) collectFormats(spn span.Span) {
1092 data.Formats = append(data.Formats, spn)
1095 func (data *Data) collectImports(spn span.Span) {
1096 data.Imports = append(data.Imports, spn)
1099 func (data *Data) collectSemanticTokens(spn span.Span) {
1100 data.SemanticTokens = append(data.SemanticTokens, spn)
1103 func (data *Data) collectSuggestedFixes(spn span.Span, actionKind string) {
1104 if _, ok := data.SuggestedFixes[spn]; !ok {
1105 data.SuggestedFixes[spn] = []string{}
1107 data.SuggestedFixes[spn] = append(data.SuggestedFixes[spn], actionKind)
1110 func (data *Data) collectFunctionExtractions(start span.Span, end span.Span) {
1111 if _, ok := data.FunctionExtractions[start]; !ok {
1112 data.FunctionExtractions[start] = end
1116 func (data *Data) collectDefinitions(src, target span.Span) {
1117 data.Definitions[src] = Definition{
1123 func (data *Data) collectImplementations(src span.Span, targets []span.Span) {
1124 data.Implementations[src] = targets
1127 func (data *Data) collectIncomingCalls(src span.Span, calls []span.Span) {
1128 for _, call := range calls {
1129 m, err := data.Mapper(call.URI())
1133 rng, err := m.Range(call)
1137 // we're only comparing protocol.range
1138 if data.CallHierarchy[src] != nil {
1139 data.CallHierarchy[src].IncomingCalls = append(data.CallHierarchy[src].IncomingCalls,
1140 protocol.CallHierarchyItem{
1141 URI: protocol.DocumentURI(call.URI()),
1145 data.CallHierarchy[src] = &CallHierarchyResult{
1146 IncomingCalls: []protocol.CallHierarchyItem{
1147 {URI: protocol.DocumentURI(call.URI()), Range: rng},
1154 func (data *Data) collectOutgoingCalls(src span.Span, calls []span.Span) {
1155 for _, call := range calls {
1156 m, err := data.Mapper(call.URI())
1160 rng, err := m.Range(call)
1164 // we're only comparing protocol.range
1165 if data.CallHierarchy[src] != nil {
1166 data.CallHierarchy[src].OutgoingCalls = append(data.CallHierarchy[src].OutgoingCalls,
1167 protocol.CallHierarchyItem{
1168 URI: protocol.DocumentURI(call.URI()),
1172 data.CallHierarchy[src] = &CallHierarchyResult{
1173 OutgoingCalls: []protocol.CallHierarchyItem{
1174 {URI: protocol.DocumentURI(call.URI()), Range: rng},
1181 func (data *Data) collectHoverDefinitions(src, target span.Span) {
1182 data.Definitions[src] = Definition{
1189 func (data *Data) collectTypeDefinitions(src, target span.Span) {
1190 data.Definitions[src] = Definition{
1197 func (data *Data) collectDefinitionNames(src span.Span, name string) {
1198 d := data.Definitions[src]
1200 data.Definitions[src] = d
1203 func (data *Data) collectHighlights(src span.Span, expected []span.Span) {
1204 // Declaring a highlight in a test file: @highlight(src, expected1, expected2)
1205 data.Highlights[src] = append(data.Highlights[src], expected...)
1208 func (data *Data) collectReferences(src span.Span, expected []span.Span) {
1209 data.References[src] = expected
1212 func (data *Data) collectRenames(src span.Span, newText string) {
1213 data.Renames[src] = newText
1216 func (data *Data) collectPrepareRenames(src span.Span, rng span.Range, placeholder string) {
1217 m, err := data.Mapper(src.URI())
1221 // Convert range to span and then to protocol.Range.
1222 spn, err := rng.Span()
1226 prng, err := m.Range(spn)
1230 data.PrepareRenames[src] = &source.PrepareItem{
1236 // collectSymbols is responsible for collecting @symbol annotations.
1237 func (data *Data) collectSymbols(name string, spn span.Span, kind string, parentName string, siName string) {
1238 m, err := data.Mapper(spn.URI())
1242 rng, err := m.Range(spn)
1246 sym := protocol.DocumentSymbol{
1248 Kind: protocol.ParseSymbolKind(kind),
1249 SelectionRange: rng,
1251 if parentName == "" {
1252 data.Symbols[spn.URI()] = append(data.Symbols[spn.URI()], sym)
1254 data.symbolsChildren[parentName] = append(data.symbolsChildren[parentName], sym)
1257 // Reuse @symbol in the workspace symbols tests.
1258 si := protocol.SymbolInformation{
1261 Location: protocol.Location{
1262 URI: protocol.URIFromSpanURI(spn.URI()),
1263 Range: sym.SelectionRange,
1266 data.symbolInformation[spn] = si
1269 func (data *Data) collectWorkspaceSymbols(typ WorkspaceSymbolsTestType) func(string, []span.Span) {
1271 case WorkspaceSymbolsFuzzy:
1272 return func(query string, targets []span.Span) {
1273 data.FuzzyWorkspaceSymbols[query] = make([]protocol.SymbolInformation, 0, len(targets))
1274 for _, target := range targets {
1275 data.FuzzyWorkspaceSymbols[query] = append(data.FuzzyWorkspaceSymbols[query], data.symbolInformation[target])
1278 case WorkspaceSymbolsCaseSensitive:
1279 return func(query string, targets []span.Span) {
1280 data.CaseSensitiveWorkspaceSymbols[query] = make([]protocol.SymbolInformation, 0, len(targets))
1281 for _, target := range targets {
1282 data.CaseSensitiveWorkspaceSymbols[query] = append(data.CaseSensitiveWorkspaceSymbols[query], data.symbolInformation[target])
1286 return func(query string, targets []span.Span) {
1287 data.WorkspaceSymbols[query] = make([]protocol.SymbolInformation, 0, len(targets))
1288 for _, target := range targets {
1289 data.WorkspaceSymbols[query] = append(data.WorkspaceSymbols[query], data.symbolInformation[target])
1295 func (data *Data) collectSignatures(spn span.Span, signature string, activeParam int64) {
1296 data.Signatures[spn] = &protocol.SignatureHelp{
1297 Signatures: []protocol.SignatureInformation{
1302 ActiveParameter: float64(activeParam),
1304 // Hardcode special case to test the lack of a signature.
1305 if signature == "" && activeParam == 0 {
1306 data.Signatures[spn] = nil
1310 func (data *Data) collectCompletionSnippets(spn span.Span, item token.Pos, plain, placeholder string) {
1311 data.CompletionSnippets[spn] = append(data.CompletionSnippets[spn], CompletionSnippet{
1312 CompletionItem: item,
1313 PlainSnippet: plain,
1314 PlaceholderSnippet: placeholder,
1318 func (data *Data) collectLinks(spn span.Span, link string, note *expect.Note, fset *token.FileSet) {
1319 position := fset.Position(note.Pos)
1321 data.Links[uri] = append(data.Links[uri], Link{
1324 NotePosition: position,
1328 func uriName(uri span.URI) string {
1329 return filepath.Base(strings.TrimSuffix(uri.Filename(), ".go"))
1332 func SpanName(spn span.Span) string {
1333 return fmt.Sprintf("%v_%v_%v", uriName(spn.URI()), spn.Start().Line(), spn.Start().Column())
1336 func CopyFolderToTempDir(folder string) (string, error) {
1337 if _, err := os.Stat(folder); err != nil {
1340 dst, err := ioutil.TempDir("", "modfile_test")
1344 fds, err := ioutil.ReadDir(folder)
1348 for _, fd := range fds {
1349 srcfp := filepath.Join(folder, fd.Name())
1350 stat, err := os.Stat(srcfp)
1354 if !stat.Mode().IsRegular() {
1355 return "", fmt.Errorf("cannot copy non regular file %s", srcfp)
1357 contents, err := ioutil.ReadFile(srcfp)
1361 if err := ioutil.WriteFile(filepath.Join(dst, fd.Name()), contents, stat.Mode()); err != nil {
1368 func shouldSkip(data *Data, uri span.URI) bool {
1369 if data.ModfileFlagAvailable {
1372 // If the -modfile flag is not available, then we do not want to run
1373 // any tests on the go.mod file.
1374 if strings.HasSuffix(uri.Filename(), ".mod") {
1377 // If the -modfile flag is not available, then we do not want to test any
1378 // uri that contains "go mod tidy".
1379 m, err := data.Mapper(uri)
1380 return err == nil && strings.Contains(string(m.Content), ", \"go mod tidy\",")