diff --git a/benchmark/parse/parse_test.go b/benchmark/parse/parse_test.go index ca3dfa658f6..219d1dbb32d 100644 --- a/benchmark/parse/parse_test.go +++ b/benchmark/parse/parse_test.go @@ -59,11 +59,11 @@ func TestParseLine(t *testing.T) { // error handling cases { line: "BenchPress 100 19.6 ns/op", // non-benchmark - err: true, + err: true, }, { line: "BenchmarkEncrypt lots 19.6 ns/op", // non-int iterations - err: true, + err: true, }, { line: "BenchmarkBridge 100000000 19.6 smoots", // unknown unit diff --git a/cmd/auth/authtest/authtest.go b/cmd/auth/authtest/authtest.go index 0489b931786..263eed828c4 100644 --- a/cmd/auth/authtest/authtest.go +++ b/cmd/auth/authtest/authtest.go @@ -18,13 +18,13 @@ import ( "bytes" "flag" "fmt" - exec "golang.org/x/sys/execabs" "io" "log" "net/http" "net/textproto" "net/url" "os" + "os/exec" "path/filepath" "strings" ) diff --git a/cmd/auth/gitauth/gitauth.go b/cmd/auth/gitauth/gitauth.go index 6128889f056..f61a020b7c8 100644 --- a/cmd/auth/gitauth/gitauth.go +++ b/cmd/auth/gitauth/gitauth.go @@ -17,11 +17,11 @@ package main import ( "bytes" "fmt" - exec "golang.org/x/sys/execabs" "log" "net/http" "net/url" "os" + "os/exec" "path/filepath" "strings" ) diff --git a/cmd/bisect/main_test.go b/cmd/bisect/main_test.go index 7c10ff0fb4b..bff1bf23c0c 100644 --- a/cmd/bisect/main_test.go +++ b/cmd/bisect/main_test.go @@ -17,7 +17,6 @@ import ( "testing" "golang.org/x/tools/internal/bisect" - "golang.org/x/tools/internal/compat" "golang.org/x/tools/internal/diffp" "golang.org/x/tools/txtar" ) @@ -82,7 +81,7 @@ func Test(t *testing.T) { have[color] = true } if m.ShouldReport(uint64(i)) { - out = compat.Appendf(out, "%s %s\n", color, bisect.Marker(uint64(i))) + out = fmt.Appendf(out, "%s %s\n", color, bisect.Marker(uint64(i))) } } err = nil diff --git a/cmd/callgraph/main.go b/cmd/callgraph/main.go index 33f7dfa8098..7853826b8fc 100644 --- a/cmd/callgraph/main.go +++ b/cmd/callgraph/main.go @@ -109,9 +109,20 @@ Flags: Caller and Callee are *ssa.Function values, which print as "(*sync/atomic.Mutex).Lock", but other attributes may be - derived from them, e.g. Caller.Pkg.Pkg.Path yields the - import path of the enclosing package. Consult the go/ssa - API documentation for details. + derived from them. For example: + + - {{.Caller.Pkg.Pkg.Path}} yields the import path of the + enclosing package; and + + - {{(.Caller.Prog.Fset.Position .Caller.Pos).Filename}} + yields the name of the file that declares the caller. + + - The 'posn' template function returns the token.Position + of an ssa.Function, so the previous example can be + reduced to {{(posn .Caller).Filename}}. + + Consult the documentation for go/token, text/template, and + golang.org/x/tools/go/ssa for more detail. Examples: @@ -238,7 +249,12 @@ func doCallgraph(dir, gopath, algo, format string, tests bool, args []string) er format = ` {{printf "%q" .Caller}} -> {{printf "%q" .Callee}}` } - tmpl, err := template.New("-format").Parse(format) + funcMap := template.FuncMap{ + "posn": func(f *ssa.Function) token.Position { + return f.Prog.Fset.Position(f.Pos()) + }, + } + tmpl, err := template.New("-format").Funcs(funcMap).Parse(format) if err != nil { return fmt.Errorf("invalid -format template: %v", err) } diff --git a/cmd/callgraph/main_test.go b/cmd/callgraph/main_test.go index afcb7a967df..ce634139e68 100644 --- a/cmd/callgraph/main_test.go +++ b/cmd/callgraph/main_test.go @@ -15,7 +15,6 @@ import ( "log" "os" "path/filepath" - "runtime" "strings" "testing" @@ -35,10 +34,6 @@ func init() { } func TestCallgraph(t *testing.T) { - if runtime.GOOS == "windows" && runtime.GOARCH == "arm64" { - t.Skipf("skipping due to suspected file corruption bug on windows/arm64 (https://go.dev/issue/50706)") - } - testenv.NeedsTool(t, "go") gopath, err := filepath.Abs("testdata") diff --git a/cmd/compilebench/main.go b/cmd/compilebench/main.go index 15323c2ee7e..a1805fda391 100644 --- a/cmd/compilebench/main.go +++ b/cmd/compilebench/main.go @@ -83,14 +83,13 @@ import ( "fmt" "log" "os" + "os/exec" "path/filepath" "regexp" "runtime" "strconv" "strings" "time" - - exec "golang.org/x/sys/execabs" ) var ( diff --git a/internal/cmd/deadcode/deadcode.go b/cmd/deadcode/deadcode.go similarity index 79% rename from internal/cmd/deadcode/deadcode.go rename to cmd/deadcode/deadcode.go index 58b42c25180..e0fce428d08 100644 --- a/internal/cmd/deadcode/deadcode.go +++ b/cmd/deadcode/deadcode.go @@ -18,6 +18,7 @@ import ( "io" "log" "os" + "path/filepath" "regexp" "runtime" "runtime/pprof" @@ -25,11 +26,13 @@ import ( "strings" "text/template" + "golang.org/x/telemetry" "golang.org/x/tools/go/callgraph" "golang.org/x/tools/go/callgraph/rta" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -61,6 +64,8 @@ Flags: } func main() { + telemetry.Start(telemetry.Config{ReportCrashes: true}) + log.SetPrefix("deadcode: ") log.SetFlags(0) // no time prefix @@ -125,16 +130,6 @@ func main() { log.Fatalf("packages contain errors") } - // Gather names of generated files. - generated := make(map[string]bool) - packages.Visit(initial, nil, func(p *packages.Package) { - for _, file := range p.Syntax { - if isGenerated(file) { - generated[p.Fset.File(file.Pos()).Name()] = true - } - } - }) - // If -filter is unset, use first module (if available). if *filterFlag == "" { if mod := initial[0].Module; mod != nil && mod.Path != "" { @@ -162,6 +157,32 @@ func main() { roots = append(roots, main.Func("init"), main.Func("main")) } + // Gather all source-level functions, + // as the user interface is expressed in terms of them. + // + // We ignore synthetic wrappers, and nested functions. Literal + // functions passed as arguments to other functions are of + // course address-taken and there exists a dynamic call of + // that signature, so when they are unreachable, it is + // invariably because the parent is unreachable. + var sourceFuncs []*ssa.Function + generated := make(map[string]bool) + packages.Visit(initial, nil, func(p *packages.Package) { + for _, file := range p.Syntax { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + obj := p.TypesInfo.Defs[decl.Name].(*types.Func) + fn := prog.FuncValue(obj) + sourceFuncs = append(sourceFuncs, fn) + } + } + + if isGenerated(file) { + generated[p.Fset.File(file.Pos()).Name()] = true + } + } + }) + // Compute the reachabilty from main. // (Build a call graph only for -whylive.) res := rta.Analyze(roots, *whyLiveFlag != "") @@ -188,8 +209,8 @@ func main() { // is not dead, by showing a path to it from some root. if *whyLiveFlag != "" { targets := make(map[*ssa.Function]bool) - for fn := range ssautil.AllFunctions(prog) { - if fn.String() == *whyLiveFlag { + for _, fn := range sourceFuncs { + if prettyName(fn, true) == *whyLiveFlag { targets[fn] = true } } @@ -220,6 +241,7 @@ func main() { log.Fatalf("function %s is dead code", *whyLiveFlag) } + res.CallGraph.DeleteSyntheticNodes() // inline synthetic wrappers (except inits) root, path := pathSearch(roots, res, targets) if root == nil { // RTA doesn't add callgraph edges for reflective calls. @@ -236,13 +258,13 @@ func main() { var edges []any for _, edge := range path { edges = append(edges, jsonEdge{ - Initial: cond(len(edges) == 0, edge.Caller.Func.String(), ""), - Kind: cond(isStaticCall(edge), "static", "dynamic"), - Posn: toJSONPosition(prog.Fset.Position(edge.Site.Pos())), - Callee: edge.Callee.Func.String(), + Initial: cond(len(edges) == 0, prettyName(edge.Caller.Func, true), ""), + Kind: cond(isStaticCall(edge), "static", "dynamic"), + Position: toJSONPosition(prog.Fset.Position(edge.Site.Pos())), + Callee: prettyName(edge.Callee.Func, true), }) } - format := `{{if .Initial}}{{printf "%19s%s\n" "" .Initial}}{{end}}{{printf "%8s@L%.4d --> %s" .Kind .Posn.Line .Callee}}` + format := `{{if .Initial}}{{printf "%19s%s\n" "" .Initial}}{{end}}{{printf "%8s@L%.4d --> %s" .Kind .Position.Line .Callee}}` if *formatFlag != "" { format = *formatFlag } @@ -252,26 +274,7 @@ func main() { // Group unreachable functions by package path. byPkgPath := make(map[string]map[*ssa.Function]bool) - for fn := range ssautil.AllFunctions(prog) { - if fn.Synthetic != "" { - continue // ignore synthetic wrappers etc - } - - // Use generic, as instantiations may not have a Pkg. - if orig := fn.Origin(); orig != nil { - fn = orig - } - - // Ignore unreachable nested functions. - // Literal functions passed as arguments to other - // functions are of course address-taken and there - // exists a dynamic call of that signature, so when - // they are unreachable, it is invariably because the - // parent is unreachable. - if fn.Parent() != nil { - continue - } - + for _, fn := range sourceFuncs { posn := prog.Fset.Position(fn.Pos()) if !reachablePosn[posn] { @@ -325,26 +328,68 @@ func main() { } functions = append(functions, jsonFunction{ - Name: fn.String(), - RelName: fn.RelString(fn.Pkg.Pkg), - Posn: toJSONPosition(posn), + Name: prettyName(fn, false), + Position: toJSONPosition(posn), Generated: gen, }) } - packages = append(packages, jsonPackage{ - Path: pkgpath, - Funcs: functions, - }) + if len(functions) > 0 { + packages = append(packages, jsonPackage{ + Name: fns[0].Pkg.Pkg.Name(), + Path: pkgpath, + Funcs: functions, + }) + } } - // Default format: functions grouped by package. - format := `{{println .Path}}{{range .Funcs}}{{printf "\t%s\n" .RelName}}{{end}}{{println}}` + // Default line-oriented format: "a/b/c.go:1:2: unreachable func: T.f" + format := `{{range .Funcs}}{{printf "%s: unreachable func: %s\n" .Position .Name}}{{end}}` if *formatFlag != "" { format = *formatFlag } printObjects(format, packages) } +// prettyName is a fork of Function.String designed to reduce +// go/ssa's fussy punctuation symbols, e.g. "(*pkg.T).F" -> "pkg.T.F". +// +// It only works for functions that remain after +// callgraph.Graph.DeleteSyntheticNodes: source-level named functions +// and methods, their anonymous functions, and synthetic package +// initializers. +func prettyName(fn *ssa.Function, qualified bool) string { + var buf strings.Builder + + // optional package qualifier + if qualified && fn.Pkg != nil { + fmt.Fprintf(&buf, "%s.", fn.Pkg.Pkg.Path()) + } + + var format func(*ssa.Function) + format = func(fn *ssa.Function) { + // anonymous? + if fn.Parent() != nil { + format(fn.Parent()) + i := index(fn.Parent().AnonFuncs, fn) + fmt.Fprintf(&buf, "$%d", i+1) + return + } + + // method receiver? + if recv := fn.Signature.Recv(); recv != nil { + _, named := typesinternal.ReceiverNamed(recv) + buf.WriteString(named.Obj().Name()) + buf.WriteByte('.') + } + + // function/method name + buf.WriteString(fn.Name()) + } + format(fn) + + return buf.String() +} + // printObjects formats an array of objects, either as JSON or using a // template, following the manner of 'go list (-json|-f=template)'. func printObjects(format string, objects []any) { @@ -478,6 +523,11 @@ func pathSearch(roots []*ssa.Function, res *rta.Result, targets map[*ssa.Functio } for _, rootFn := range roots { root := res.CallGraph.Nodes[rootFn] + if root == nil { + // Missing call graph node for root. + // TODO(adonovan): seems like a bug in rta. + continue + } if path := bfs(root); path != nil { return root, path } @@ -500,8 +550,16 @@ func isStaticCall(edge *callgraph.Edge) bool { return edge.Site != nil && edge.Site.Common().StaticCallee() != nil } +var cwd, _ = os.Getwd() + func toJSONPosition(posn token.Position) jsonPosition { - return jsonPosition{posn.Filename, posn.Line, posn.Column} + // Use cwd-relative filename if possible. + filename := posn.Filename + if rel, err := filepath.Rel(cwd, filename); err == nil && !strings.HasPrefix(rel, "..") { + filename = rel + } + + return jsonPosition{filename, posn.Line, posn.Column} } func cond[T any](cond bool, t, f T) T { @@ -517,26 +575,27 @@ func cond[T any](cond bool, t, f T) T { // Keep in sync with doc comment! type jsonFunction struct { - Name string // name (with package qualifier) - RelName string // name (sans package qualifier) - Posn jsonPosition // file/line/column of declaration + Name string // name (sans package qualifier) + Position jsonPosition // file/line/column of declaration Generated bool // function is declared in a generated .go file } func (f jsonFunction) String() string { return f.Name } type jsonPackage struct { - Path string - Funcs []jsonFunction + Name string // declared name + Path string // full import path + Funcs []jsonFunction // non-empty list of package's dead functions } func (p jsonPackage) String() string { return p.Path } +// The Initial and Callee names are package-qualified. type jsonEdge struct { - Initial string `json:",omitempty"` // initial entrypoint (main or init); first edge only - Kind string // = static | dynamic - Posn jsonPosition - Callee string + Initial string `json:",omitempty"` // initial entrypoint (main or init); first edge only + Kind string // = static | dynamic + Position jsonPosition + Callee string } type jsonPosition struct { @@ -565,6 +624,15 @@ func indexFunc[S ~[]E, E any](s S, f func(E) bool) int { return -1 } +func index[S ~[]E, E comparable](s S, v E) int { + for i := range s { + if v == s[i] { + return i + } + } + return -1 +} + func reverse[S ~[]E, E any](s S) { for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { s[i], s[j] = s[j], s[i] diff --git a/internal/cmd/deadcode/deadcode_test.go b/cmd/deadcode/deadcode_test.go similarity index 100% rename from internal/cmd/deadcode/deadcode_test.go rename to cmd/deadcode/deadcode_test.go diff --git a/cmd/deadcode/doc.go b/cmd/deadcode/doc.go new file mode 100644 index 00000000000..66a150dd19d --- /dev/null +++ b/cmd/deadcode/doc.go @@ -0,0 +1,138 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +The deadcode command reports unreachable functions in Go programs. + +Usage: deadcode [flags] package... + +The deadcode command loads a Go program from source then uses Rapid +Type Analysis (RTA) to build a call graph of all the functions +reachable from the program's main function. Any functions that are not +reachable are reported as dead code, grouped by package. + +Packages are expressed in the notation of 'go list' (or other +underlying build system if you are using an alternative +golang.org/x/go/packages driver). Only executable (main) packages are +considered starting points for the analysis. + +The -test flag causes it to analyze test executables too. Tests +sometimes make use of functions that would otherwise appear to be dead +code, and public API functions reported as dead with -test indicate +possible gaps in your test coverage. Bear in mind that an Example test +function without an "Output:" comment is merely documentation: +it is dead code, and does not contribute coverage. + +The -filter flag restricts results to packages that match the provided +regular expression; its default value is the module name of the first +package. Use -filter= to display all results. + +Example: show all dead code within the gopls module: + + $ deadcode -test golang.org/x/tools/gopls/... + +The analysis can soundly analyze dynamic calls though func values, +interface methods, and reflection. However, it does not currently +understand the aliasing created by //go:linkname directives, so it +will fail to recognize that calls to a linkname-annotated function +with no body in fact dispatch to the function named in the annotation. +This may result in the latter function being spuriously reported as dead. + +By default, the tool does not report dead functions in generated files, +as determined by the special comment described in +https://go.dev/s/generatedcode. Use the -generated flag to include them. + +In any case, just because a function is reported as dead does not mean +it is unconditionally safe to delete it. For example, a dead function +may be referenced by another dead function, and a dead method may be +required to satisfy an interface that is never called. +Some judgement is required. + +The analysis is valid only for a single GOOS/GOARCH/-tags configuration, +so a function reported as dead may be live in a different configuration. +Consider running the tool once for each configuration of interest. +Consider using a line-oriented output format (see below) to make it +easier to compute the intersection of results across all runs. + +# Output + +The command supports three output formats. + +With no flags, the command prints the name and location of each dead +function in the form of a typical compiler diagnostic, for example: + + $ deadcode -f='{{range .Funcs}}{{println .Position}}{{end}}' -test ./gopls/... + gopls/internal/protocol/command.go:1206:6: unreachable func: openClientEditor + gopls/internal/template/parse.go:414:18: unreachable func: Parsed.WriteNode + gopls/internal/template/parse.go:419:18: unreachable func: wrNode.writeNode + +With the -json flag, the command prints an array of Package +objects, as defined by the JSON schema (see below). + +With the -f=template flag, the command executes the specified template +on each Package record. So, this template shows dead functions grouped +by package: + + $ deadcode -f='{{println .Path}}{{range .Funcs}}{{printf "\t%s\n" .Name}}{{end}}{{println}}' -test ./gopls/... + golang.org/x/tools/gopls/internal/lsp + openClientEditor + + golang.org/x/tools/gopls/internal/template + Parsed.WriteNode + wrNode.writeNode + +# Why is a function not dead? + +The -whylive=function flag explain why the named function is not dead +by showing an arbitrary shortest path to it from one of the main functions. +(To enumerate the functions in a program, or for more sophisticated +call graph queries, use golang.org/x/tools/cmd/callgraph.) + +Fully static call paths are preferred over paths involving dynamic +edges, even if longer. Paths starting from a non-test package are +preferred over those from tests. Paths from main functions are +preferred over paths from init functions. + +The result is a list of Edge objects (see JSON schema below). +Again, the -json and -f=template flags may be used to control +the formatting of the list of Edge objects. +The default format shows, for each edge in the path, whether the call +is static or dynamic, and its source line number. For example: + + $ deadcode -whylive=bytes.Buffer.String -test ./cmd/deadcode/... + golang.org/x/tools/cmd/deadcode.main + static@L0117 --> golang.org/x/tools/go/packages.Load + static@L0262 --> golang.org/x/tools/go/packages.defaultDriver + static@L0305 --> golang.org/x/tools/go/packages.goListDriver + static@L0153 --> golang.org/x/tools/go/packages.goListDriver$1 + static@L0154 --> golang.org/x/tools/go/internal/packagesdriver.GetSizesForArgsGolist + static@L0044 --> bytes.Buffer.String + +# JSON schema + + type Package struct { + Name string // declared name + Path string // full import path + Funcs []Function // list of dead functions within it + } + + type Function struct { + Name string // name (sans package qualifier) + Position Position // file/line/column of function declaration + Generated bool // function is declared in a generated .go file + } + + type Edge struct { + Initial string // initial entrypoint (main or init); first edge only + Kind string // = static | dynamic + Position Position // file/line/column of call site + Callee string // target of the call + } + + type Position struct { + File string // name of file + Line, Col int // line and byte index, both 1-based + } +*/ +package main diff --git a/cmd/deadcode/testdata/basic.txtar b/cmd/deadcode/testdata/basic.txtar new file mode 100644 index 00000000000..70cc79807cf --- /dev/null +++ b/cmd/deadcode/testdata/basic.txtar @@ -0,0 +1,38 @@ +# Test of basic functionality. + + deadcode -filter= example.com + + want "T.Goodbye" + want "T.Goodbye2" + want "T.Goodbye3" +!want "T.Hello" + want "unreferenced" + + want "Scanf" + want "Printf" +!want "Println" + +-- go.mod -- +module example.com +go 1.18 + +-- main.go -- +package main + +import "fmt" + +type T int + +func main() { + var x T + x.Hello() +} + +func (T) Hello() { fmt.Println("hello") } +func (T) Goodbye() { fmt.Println("goodbye") } +func (*T) Goodbye2() { fmt.Println("goodbye2") } +func (*A) Goodbye3() { fmt.Println("goodbye3") } + +type A = T + +func unreferenced() {} \ No newline at end of file diff --git a/internal/cmd/deadcode/testdata/filterflag.txtar b/cmd/deadcode/testdata/filterflag.txtar similarity index 100% rename from internal/cmd/deadcode/testdata/filterflag.txtar rename to cmd/deadcode/testdata/filterflag.txtar diff --git a/cmd/deadcode/testdata/generated.txtar b/cmd/deadcode/testdata/generated.txtar new file mode 100644 index 00000000000..a2a29497cbe --- /dev/null +++ b/cmd/deadcode/testdata/generated.txtar @@ -0,0 +1,28 @@ +# Test of -generated flag output. + + deadcode "-f={{range .Funcs}}{{$.Name}}.{{.Name}}{{end}}" example.com +!want "main.main" + want "main.Dead1" +!want "main.Dead2" + + deadcode "-f={{range .Funcs}}{{$.Name}}.{{.Name}}{{end}}" -generated example.com +!want "main.main" + want "main.Dead1" + want "main.Dead2" + +-- go.mod -- +module example.com +go 1.18 + +-- main.go -- +package main + +func main() {} +func Dead1() {} + +-- gen.go -- +// Code generated by hand. DO NOT EDIT. + +package main + +func Dead2() {} \ No newline at end of file diff --git a/cmd/deadcode/testdata/issue65915.txtar b/cmd/deadcode/testdata/issue65915.txtar new file mode 100644 index 00000000000..a7c15630bdd --- /dev/null +++ b/cmd/deadcode/testdata/issue65915.txtar @@ -0,0 +1,44 @@ +# Regression test for issue 65915: the enumeration of source-level +# functions used the flawed ssautil.AllFunctions, causing it to +# miss some unexported ones. + + deadcode -filter= example.com + + want "unreachable func: example.UnUsed" + want "unreachable func: example.unUsed" + want "unreachable func: PublicExample.UnUsed" + want "unreachable func: PublicExample.unUsed" + +-- go.mod -- +module example.com +go 1.18 + +-- main.go -- +package main + +type example struct{} + +func (e example) UnUsed() {} + +func (e example) Used() {} + +func (e example) unUsed() {} + +func (e example) used() {} + +type PublicExample struct{} + +func (p PublicExample) UnUsed() {} + +func (p PublicExample) Used() {} + +func (p PublicExample) unUsed() {} + +func (p PublicExample) used() {} + +func main() { + example{}.Used() + example{}.used() + PublicExample{}.Used() + PublicExample{}.used() +} diff --git a/cmd/deadcode/testdata/jsonflag.txtar b/cmd/deadcode/testdata/jsonflag.txtar new file mode 100644 index 00000000000..608657b6580 --- /dev/null +++ b/cmd/deadcode/testdata/jsonflag.txtar @@ -0,0 +1,23 @@ +# Very minimal test of -json flag. + +deadcode -json example.com/p + + want `"Path": "example.com/p",` + want `"Name": "DeadFunc",` + want `"Generated": false` + want `"Line": 5,` + want `"Col": 6` + +-- go.mod -- +module example.com +go 1.18 + +-- p/p.go -- +package main + +func main() {} + +func DeadFunc() {} + +type T int +func (*T) DeadMethod() {} \ No newline at end of file diff --git a/cmd/deadcode/testdata/lineflag.txtar b/cmd/deadcode/testdata/lineflag.txtar new file mode 100644 index 00000000000..6ba006d6aa6 --- /dev/null +++ b/cmd/deadcode/testdata/lineflag.txtar @@ -0,0 +1,32 @@ +# Test of line-oriented output. + + deadcode `-f={{range .Funcs}}{{printf "%s: %s.%s\n" .Position $.Path .Name}}{{end}}` -filter= example.com + + want "main.go:13:10: example.com.T.Goodbye" +!want "example.com.T.Hello" + want "main.go:15:6: example.com.unreferenced" + + want "fmt.Scanf" + want "fmt.Printf" +!want "fmt.Println" + +-- go.mod -- +module example.com +go 1.18 + +-- main.go -- +package main + +import "fmt" + +type T int + +func main() { + var x T + x.Hello() +} + +func (T) Hello() { fmt.Println("hello") } +func (T) Goodbye() { fmt.Println("goodbye") } + +func unreferenced() {} \ No newline at end of file diff --git a/internal/cmd/deadcode/testdata/testflag.txtar b/cmd/deadcode/testdata/testflag.txtar similarity index 100% rename from internal/cmd/deadcode/testdata/testflag.txtar rename to cmd/deadcode/testdata/testflag.txtar diff --git a/cmd/deadcode/testdata/whylive.txtar b/cmd/deadcode/testdata/whylive.txtar new file mode 100644 index 00000000000..4185876779b --- /dev/null +++ b/cmd/deadcode/testdata/whylive.txtar @@ -0,0 +1,133 @@ +# Test of -whylive flag. + +# The -whylive argument must be live. + +!deadcode -whylive=example.com.d example.com + want "function example.com.d is dead code" + +# A fully static path is preferred, even if longer. + + deadcode -whylive=example.com.c example.com + want " example.com.main" + want " static@L0004 --> example.com.a" + want " static@L0009 --> example.com.b" + want " static@L0012 --> example.com.c" + +# Dynamic edges are followed if necessary. +# (Note that main is preferred over init.) + + deadcode -whylive=example.com.f example.com + want " example.com.main" + want "dynamic@L0006 --> example.com.e" + want " static@L0017 --> example.com.f" + +# Degenerate case where target is itself a root. + +!deadcode -whylive=example.com.main example.com + want "example.com.main is a root" + +# Test of path through (*T).m method wrapper. + + deadcode -whylive=example.com/p.live example.com/p + want " example.com/p.main" + want "static@L0006 --> example.com/p.E.Error" + want "static@L0010 --> example.com/p.live" + +# Test of path through (I).m interface method wrapper (thunk). + + deadcode -whylive=example.com/q.live example.com/q + want " example.com/q.main" + want "static@L0006 --> example.com/q.E.Error" + want "static@L0010 --> example.com/q.live" + +# Test of path through synthetic package initializer, +# a declared package initializer, and its anonymous function. + + deadcode -whylive=example.com/q.live2 example.com/q + want " example.com/q.init" + want "static@L0000 --> example.com/q.init#1" + want "static@L0016 --> example.com/q.init#1$1" + want "static@L0015 --> example.com/q.live2" + +# Test of path through synthetic package initializer, +# and a global var initializer. + + deadcode -whylive=example.com/r.live example.com/r + want " example.com/r.init" + want "static@L0007 --> example.com/r.init$1" + want "static@L0006 --> example.com/r.live" + +-- go.mod -- +module example.com +go 1.18 + +-- main.go -- +package main + +func main() { + a() + println(c, e) // c, e are address-taken + (func ())(nil)() // potential dynamic call to c, e +} +func a() { + b() +} +func b() { + c() +} +func c() +func d() +func e() { + f() +} +func f() + +func init() { + (func ())(nil)() // potential dynamic call to c, e +} + +-- p/p.go -- +package main + +func main() { + f := (*E).Error + var e E + f(&e) +} + +type E int +func (E) Error() string { return live() } + +func live() string + +-- q/q.go -- +package main + +func main() { + f := error.Error + var e E + f(e) +} + +type E int +func (E) Error() string { return live() } + +func live() string + +func init() { + f := func() { live2() } + f() +} + +func live2() + +-- r/r.go -- +package main + +func main() {} + +var x = func() int { + return live() +}() + +func live() int diff --git a/cmd/eg/eg.go b/cmd/eg/eg.go index 5d21138a49e..108b9e3009f 100644 --- a/cmd/eg/eg.go +++ b/cmd/eg/eg.go @@ -16,10 +16,10 @@ import ( "go/token" "go/types" "os" + "os/exec" "path/filepath" "strings" - exec "golang.org/x/sys/execabs" "golang.org/x/tools/go/packages" "golang.org/x/tools/refactor/eg" ) diff --git a/cmd/fiximports/main.go b/cmd/fiximports/main.go index 0893b068756..a5284029ab4 100644 --- a/cmd/fiximports/main.go +++ b/cmd/fiximports/main.go @@ -78,13 +78,12 @@ import ( "io" "log" "os" + "os/exec" "path" "path/filepath" "sort" "strconv" "strings" - - exec "golang.org/x/sys/execabs" ) // flags diff --git a/cmd/getgo/.dockerignore b/cmd/getgo/.dockerignore deleted file mode 100644 index 2b87ad9cd76..00000000000 --- a/cmd/getgo/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -.git -.dockerignore -LICENSE -README.md -.gitignore diff --git a/cmd/getgo/.gitignore b/cmd/getgo/.gitignore deleted file mode 100644 index 47fe98419a0..00000000000 --- a/cmd/getgo/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -build -getgo diff --git a/cmd/getgo/Dockerfile b/cmd/getgo/Dockerfile deleted file mode 100644 index 78fd9566799..00000000000 --- a/cmd/getgo/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM golang:latest - -ENV SHELL /bin/bash -ENV HOME /root -WORKDIR $HOME - -COPY . /go/src/golang.org/x/tools/cmd/getgo - -RUN ( \ - cd /go/src/golang.org/x/tools/cmd/getgo \ - && go build \ - && mv getgo /usr/local/bin/getgo \ - ) - -# undo the adding of GOPATH to env for testing -ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin -ENV GOPATH "" - -# delete /go and /usr/local/go for testing -RUN rm -rf /go /usr/local/go diff --git a/cmd/getgo/LICENSE b/cmd/getgo/LICENSE deleted file mode 100644 index 32017f8fa1d..00000000000 --- a/cmd/getgo/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2017 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/cmd/getgo/README.md b/cmd/getgo/README.md deleted file mode 100644 index e62a6c2b64e..00000000000 --- a/cmd/getgo/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# getgo - -A proof-of-concept command-line installer for Go. - -This installer is designed to both install Go as well as do the initial configuration -of setting up the right environment variables and paths. - -It will install the Go distribution (tools & stdlib) to "/.go" inside your home directory by default. - -It will setup "$HOME/go" as your GOPATH. -This is where third party libraries and apps will be installed as well as where you will write your Go code. - -If Go is already installed via this installer it will upgrade it to the latest version of Go. - -Currently the installer supports Windows, \*nix and macOS on x86 & x64. -It supports Bash and Zsh on all of these platforms as well as powershell & cmd.exe on Windows. - -## Usage - -Windows Powershell/cmd.exe: - -`(New-Object System.Net.WebClient).DownloadFile('https://get.golang.org/installer.exe', 'installer.exe'); Start-Process -Wait -NonewWindow installer.exe; Remove-Item installer.exe` - -Shell (Linux/macOS/Windows): - -`curl -LO https://get.golang.org/$(uname)/go_installer && chmod +x go_installer && ./go_installer && rm go_installer` - -## To Do - -* Check if Go is already installed (via a different method) and update it in place or at least notify the user -* Lots of testing. It's only had limited testing so far. -* Add support for additional shells. - -## Development instructions - -### Testing - -There are integration tests in [`main_test.go`](main_test.go). Please add more -tests there. - -#### On unix/linux with the Dockerfile - -The Dockerfile automatically builds the binary, moves it to -`/usr/local/bin/getgo` and then unsets `$GOPATH` and removes all `$GOPATH` from -`$PATH`. - -```bash -$ docker build --rm --force-rm -t getgo . -... -$ docker run --rm -it getgo bash -root@78425260fad0:~# getgo -v -Welcome to the Go installer! -Downloading Go version go1.8.3 to /usr/local/go -This may take a bit of time... -Adding "export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin" to /root/.bashrc -Downloaded! -Setting up GOPATH -Adding "export GOPATH=/root/go" to /root/.bashrc -Adding "export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin:/root/go/bin" to /root/.bashrc -GOPATH has been setup! -root@78425260fad0:~# which go -/usr/local/go/bin/go -root@78425260fad0:~# echo $GOPATH -/root/go -root@78425260fad0:~# echo $PATH -/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin:/root/go/bin -``` - -## Release instructions - -To upload a new release of getgo, run `./make.bash && ./upload.bash`. diff --git a/cmd/getgo/download.go b/cmd/getgo/download.go deleted file mode 100644 index 18e1aec2eef..00000000000 --- a/cmd/getgo/download.go +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -package main - -import ( - "archive/tar" - "archive/zip" - "compress/gzip" - "crypto/sha256" - "encoding/json" - "fmt" - "io" - "net/http" - "os" - "path/filepath" - "strings" -) - -const ( - downloadURLPrefix = "https://dl.google.com/go" -) - -// downloadGoVersion downloads and upacks the specific go version to dest/go. -func downloadGoVersion(version, ops, arch, dest string) error { - suffix := "tar.gz" - if ops == "windows" { - suffix = "zip" - } - uri := fmt.Sprintf("%s/%s.%s-%s.%s", downloadURLPrefix, version, ops, arch, suffix) - - verbosef("Downloading %s", uri) - - req, err := http.NewRequest("GET", uri, nil) - if err != nil { - return err - } - req.Header.Add("User-Agent", fmt.Sprintf("golang.org-getgo/%s", version)) - - resp, err := http.DefaultClient.Do(req) - if err != nil { - return fmt.Errorf("Downloading Go from %s failed: %v", uri, err) - } - if resp.StatusCode > 299 { - return fmt.Errorf("Downloading Go from %s failed with HTTP status %s", uri, resp.Status) - } - defer resp.Body.Close() - - tmpf, err := os.CreateTemp("", "go") - if err != nil { - return err - } - defer os.Remove(tmpf.Name()) - - h := sha256.New() - - w := io.MultiWriter(tmpf, h) - if _, err := io.Copy(w, resp.Body); err != nil { - return err - } - - verbosef("Downloading SHA %s.sha256", uri) - - sresp, err := http.Get(uri + ".sha256") - if err != nil { - return fmt.Errorf("Downloading Go sha256 from %s.sha256 failed: %v", uri, err) - } - defer sresp.Body.Close() - if sresp.StatusCode > 299 { - return fmt.Errorf("Downloading Go sha256 from %s.sha256 failed with HTTP status %s", uri, sresp.Status) - } - - shasum, err := io.ReadAll(sresp.Body) - if err != nil { - return err - } - - // Check the shasum. - sum := fmt.Sprintf("%x", h.Sum(nil)) - if sum != string(shasum) { - return fmt.Errorf("Shasum mismatch %s vs. %s", sum, string(shasum)) - } - - unpackFunc := unpackTar - if ops == "windows" { - unpackFunc = unpackZip - } - if err := unpackFunc(tmpf.Name(), dest); err != nil { - return fmt.Errorf("Unpacking Go to %s failed: %v", dest, err) - } - return nil -} - -func unpack(dest, name string, fi os.FileInfo, r io.Reader) error { - if strings.HasPrefix(name, "go/") { - name = name[len("go/"):] - } - - path := filepath.Join(dest, name) - if fi.IsDir() { - return os.MkdirAll(path, fi.Mode()) - } - - f, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, fi.Mode()) - if err != nil { - return err - } - defer f.Close() - - _, err = io.Copy(f, r) - return err -} - -func unpackTar(src, dest string) error { - r, err := os.Open(src) - if err != nil { - return err - } - defer r.Close() - - archive, err := gzip.NewReader(r) - if err != nil { - return err - } - defer archive.Close() - - tarReader := tar.NewReader(archive) - - for { - header, err := tarReader.Next() - if err == io.EOF { - break - } else if err != nil { - return err - } - - if err := unpack(dest, header.Name, header.FileInfo(), tarReader); err != nil { - return err - } - } - - return nil -} - -func unpackZip(src, dest string) error { - zr, err := zip.OpenReader(src) - if err != nil { - return err - } - - for _, f := range zr.File { - fr, err := f.Open() - if err != nil { - return err - } - if err := unpack(dest, f.Name, f.FileInfo(), fr); err != nil { - return err - } - fr.Close() - } - - return nil -} - -func getLatestGoVersion() (string, error) { - resp, err := http.Get("https://golang.org/dl/?mode=json") - if err != nil { - return "", fmt.Errorf("Getting current Go version failed: %v", err) - } - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - b, _ := io.ReadAll(io.LimitReader(resp.Body, 1024)) - return "", fmt.Errorf("Could not get current Go release: HTTP %d: %q", resp.StatusCode, b) - } - var releases []struct { - Version string - } - err = json.NewDecoder(resp.Body).Decode(&releases) - if err != nil { - return "", err - } - if len(releases) < 1 { - return "", fmt.Errorf("Could not get at least one Go release") - } - return releases[0].Version, nil -} diff --git a/cmd/getgo/download_test.go b/cmd/getgo/download_test.go deleted file mode 100644 index b4f2059d14e..00000000000 --- a/cmd/getgo/download_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -package main - -import ( - "os" - "path/filepath" - "testing" -) - -func TestDownloadGoVersion(t *testing.T) { - if testing.Short() { - t.Skipf("Skipping download in short mode") - } - - tmpd, err := os.MkdirTemp("", "go") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpd) - - if err := downloadGoVersion("go1.8.1", "linux", "amd64", filepath.Join(tmpd, "go")); err != nil { - t.Fatal(err) - } - - // Ensure the VERSION file exists. - vf := filepath.Join(tmpd, "go", "VERSION") - if _, err := os.Stat(vf); os.IsNotExist(err) { - t.Fatalf("file %s does not exist and should", vf) - } -} diff --git a/cmd/getgo/main.go b/cmd/getgo/main.go deleted file mode 100644 index 441fd89cd95..00000000000 --- a/cmd/getgo/main.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -// The getgo command installs Go to the user's system. -package main - -import ( - "bufio" - "context" - "errors" - "flag" - "fmt" - exec "golang.org/x/sys/execabs" - "os" - "strings" -) - -var ( - interactive = flag.Bool("i", false, "Interactive mode, prompt for inputs.") - verbose = flag.Bool("v", false, "Verbose.") - setupOnly = flag.Bool("skip-dl", false, "Don't download - only set up environment variables") - goVersion = flag.String("version", "", `Version of Go to install (e.g. "1.8.3"). If empty, uses the latest version.`) - - version = "devel" -) - -var errExitCleanly error = errors.New("exit cleanly sentinel value") - -func main() { - flag.Parse() - if *goVersion != "" && !strings.HasPrefix(*goVersion, "go") { - *goVersion = "go" + *goVersion - } - - ctx := context.Background() - - verbosef("version " + version) - - runStep := func(s step) { - err := s(ctx) - if err == errExitCleanly { - os.Exit(0) - } - if err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(2) - } - } - - if !*setupOnly { - runStep(welcome) - runStep(checkOthers) - runStep(chooseVersion) - runStep(downloadGo) - } - - runStep(setupGOPATH) -} - -func verbosef(format string, v ...interface{}) { - if !*verbose { - return - } - - fmt.Printf(format+"\n", v...) -} - -func prompt(ctx context.Context, query, defaultAnswer string) (string, error) { - if !*interactive { - return defaultAnswer, nil - } - - fmt.Printf("%s [%s]: ", query, defaultAnswer) - - type result struct { - answer string - err error - } - ch := make(chan result, 1) - go func() { - s := bufio.NewScanner(os.Stdin) - if !s.Scan() { - ch <- result{"", s.Err()} - return - } - answer := s.Text() - if answer == "" { - answer = defaultAnswer - } - ch <- result{answer, nil} - }() - - select { - case r := <-ch: - return r.answer, r.err - case <-ctx.Done(): - return "", ctx.Err() - } -} - -func runCommand(ctx context.Context, prog string, args ...string) ([]byte, error) { - verbosef("Running command: %s %v", prog, args) - - cmd := exec.CommandContext(ctx, prog, args...) - out, err := cmd.CombinedOutput() - if err != nil { - return nil, fmt.Errorf("running cmd '%s %s' failed: %s err: %v", prog, strings.Join(args, " "), string(out), err) - } - if out != nil && err == nil && len(out) != 0 { - verbosef("%s", out) - } - - return out, nil -} diff --git a/cmd/getgo/main_test.go b/cmd/getgo/main_test.go deleted file mode 100644 index 878137dd3f4..00000000000 --- a/cmd/getgo/main_test.go +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -package main - -import ( - "bytes" - "fmt" - "os" - "os/exec" - "testing" -) - -func TestMain(m *testing.M) { - if os.Getenv("GO_GETGO_TEST_IS_GETGO") != "" { - main() - os.Exit(0) - } - - if os.Getenv("GOGET_INTEGRATION") == "" { - fmt.Fprintln(os.Stderr, "main_test: Skipping integration tests with GOGET_INTEGRATION unset") - return - } - - // Don't let these environment variables confuse the test. - os.Unsetenv("GOBIN") - os.Unsetenv("GOPATH") - os.Unsetenv("GIT_ALLOW_PROTOCOL") - os.Unsetenv("PATH") - - os.Exit(m.Run()) -} - -func createTmpHome(t *testing.T) string { - tmpd, err := os.MkdirTemp("", "testgetgo") - if err != nil { - t.Fatalf("creating test tempdir failed: %v", err) - } - - os.Setenv("HOME", tmpd) - return tmpd -} - -// doRun runs the test getgo command, recording stdout and stderr and -// returning exit status. -func doRun(t *testing.T, args ...string) error { - exe, err := os.Executable() - if err != nil { - t.Fatal(err) - } - t.Helper() - - t.Logf("running getgo %v", args) - var stdout, stderr bytes.Buffer - cmd := exec.Command(exe, args...) - cmd.Stdout = &stdout - cmd.Stderr = &stderr - cmd.Env = append(os.Environ(), "GO_GETGO_TEST_IS_GETGO=1") - status := cmd.Run() - if stdout.Len() > 0 { - t.Log("standard output:") - t.Log(stdout.String()) - } - if stderr.Len() > 0 { - t.Log("standard error:") - t.Log(stderr.String()) - } - return status -} - -func TestCommandVerbose(t *testing.T) { - tmpd := createTmpHome(t) - defer os.RemoveAll(tmpd) - - err := doRun(t, "-v") - if err != nil { - t.Fatal(err) - } - // make sure things are in path - shellConfig, err := shellConfigFile() - if err != nil { - t.Fatal(err) - } - b, err := os.ReadFile(shellConfig) - if err != nil { - t.Fatal(err) - } - home, err := getHomeDir() - if err != nil { - t.Fatal(err) - } - - expected := fmt.Sprintf(` -export PATH=$PATH:%s/.go/bin - -export GOPATH=%s/go - -export PATH=$PATH:%s/go/bin -`, home, home, home) - - if string(b) != expected { - t.Fatalf("%s expected %q, got %q", shellConfig, expected, string(b)) - } -} - -func TestCommandPathExists(t *testing.T) { - tmpd := createTmpHome(t) - defer os.RemoveAll(tmpd) - - // run once - err := doRun(t, "-skip-dl") - if err != nil { - t.Fatal(err) - } - // make sure things are in path - shellConfig, err := shellConfigFile() - if err != nil { - t.Fatal(err) - } - b, err := os.ReadFile(shellConfig) - if err != nil { - t.Fatal(err) - } - home, err := getHomeDir() - if err != nil { - t.Fatal(err) - } - - expected := fmt.Sprintf(` -export GOPATH=%s/go - -export PATH=$PATH:%s/go/bin -`, home, home) - - if string(b) != expected { - t.Fatalf("%s expected %q, got %q", shellConfig, expected, string(b)) - } - - // run twice - if err := doRun(t, "-skip-dl"); err != nil { - t.Fatal(err) - } - - b, err = os.ReadFile(shellConfig) - if err != nil { - t.Fatal(err) - } - - if string(b) != expected { - t.Fatalf("%s expected %q, got %q", shellConfig, expected, string(b)) - } -} diff --git a/cmd/getgo/make.bash b/cmd/getgo/make.bash deleted file mode 100755 index cbc36857e86..00000000000 --- a/cmd/getgo/make.bash +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -# Copyright 2017 The Go Authors. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE file. - -set -e -o -x - -LDFLAGS="-X main.version=$(git describe --always --dirty='*')" - -GOOS=windows GOARCH=386 go build -o build/installer.exe -ldflags="$LDFLAGS" -GOOS=linux GOARCH=386 go build -o build/installer_linux -ldflags="$LDFLAGS" -GOOS=darwin GOARCH=386 go build -o build/installer_darwin -ldflags="$LDFLAGS" diff --git a/cmd/getgo/path.go b/cmd/getgo/path.go deleted file mode 100644 index f1799a85f4e..00000000000 --- a/cmd/getgo/path.go +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -package main - -import ( - "bufio" - "context" - "fmt" - "os" - "os/user" - "path/filepath" - "runtime" - "strings" -) - -const ( - bashConfig = ".bash_profile" - zshConfig = ".zshrc" -) - -// appendToPATH adds the given path to the PATH environment variable and -// persists it for future sessions. -func appendToPATH(value string) error { - if isInPATH(value) { - return nil - } - return persistEnvVar("PATH", pathVar+envSeparator+value) -} - -func isInPATH(dir string) bool { - p := os.Getenv("PATH") - - paths := strings.Split(p, envSeparator) - for _, d := range paths { - if d == dir { - return true - } - } - - return false -} - -func getHomeDir() (string, error) { - home := os.Getenv(homeKey) - if home != "" { - return home, nil - } - - u, err := user.Current() - if err != nil { - return "", err - } - return u.HomeDir, nil -} - -func checkStringExistsFile(filename, value string) (bool, error) { - file, err := os.OpenFile(filename, os.O_RDONLY, 0600) - if err != nil { - if os.IsNotExist(err) { - return false, nil - } - return false, err - } - defer file.Close() - - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := scanner.Text() - if line == value { - return true, nil - } - } - - return false, scanner.Err() -} - -func appendToFile(filename, value string) error { - verbosef("Adding %q to %s", value, filename) - - ok, err := checkStringExistsFile(filename, value) - if err != nil { - return err - } - if ok { - // Nothing to do. - return nil - } - - f, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600) - if err != nil { - return err - } - defer f.Close() - - _, err = f.WriteString(lineEnding + value + lineEnding) - return err -} - -func isShell(name string) bool { - return strings.Contains(currentShell(), name) -} - -// persistEnvVarWindows sets an environment variable in the Windows -// registry. -func persistEnvVarWindows(name, value string) error { - _, err := runCommand(context.Background(), "powershell", "-command", - fmt.Sprintf(`[Environment]::SetEnvironmentVariable("%s", "%s", "User")`, name, value)) - return err -} - -func persistEnvVar(name, value string) error { - if runtime.GOOS == "windows" { - if err := persistEnvVarWindows(name, value); err != nil { - return err - } - - if isShell("cmd.exe") || isShell("powershell.exe") { - return os.Setenv(strings.ToUpper(name), value) - } - // User is in bash, zsh, etc. - // Also set the environment variable in their shell config. - } - - rc, err := shellConfigFile() - if err != nil { - return err - } - - line := fmt.Sprintf("export %s=%s", strings.ToUpper(name), value) - if err := appendToFile(rc, line); err != nil { - return err - } - - return os.Setenv(strings.ToUpper(name), value) -} - -func shellConfigFile() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - - switch { - case isShell("bash"): - return filepath.Join(home, bashConfig), nil - case isShell("zsh"): - return filepath.Join(home, zshConfig), nil - default: - return "", fmt.Errorf("%q is not a supported shell", currentShell()) - } -} diff --git a/cmd/getgo/path_test.go b/cmd/getgo/path_test.go deleted file mode 100644 index 8195f2e68d5..00000000000 --- a/cmd/getgo/path_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -package main - -import ( - "os" - "path/filepath" - "strings" - "testing" -) - -func TestAppendPath(t *testing.T) { - tmpd, err := os.MkdirTemp("", "go") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpd) - - if err := os.Setenv("HOME", tmpd); err != nil { - t.Fatal(err) - } - - GOPATH := os.Getenv("GOPATH") - if err := appendToPATH(filepath.Join(GOPATH, "bin")); err != nil { - t.Fatal(err) - } - - shellConfig, err := shellConfigFile() - if err != nil { - t.Fatal(err) - } - b, err := os.ReadFile(shellConfig) - if err != nil { - t.Fatal(err) - } - - expected := "export PATH=" + pathVar + envSeparator + filepath.Join(GOPATH, "bin") - if strings.TrimSpace(string(b)) != expected { - t.Fatalf("expected: %q, got %q", expected, strings.TrimSpace(string(b))) - } - - // Check that appendToPATH is idempotent. - if err := appendToPATH(filepath.Join(GOPATH, "bin")); err != nil { - t.Fatal(err) - } - b, err = os.ReadFile(shellConfig) - if err != nil { - t.Fatal(err) - } - if strings.TrimSpace(string(b)) != expected { - t.Fatalf("expected: %q, got %q", expected, strings.TrimSpace(string(b))) - } -} diff --git a/cmd/getgo/server/.gcloudignore b/cmd/getgo/server/.gcloudignore deleted file mode 100644 index 199e6d9f2f9..00000000000 --- a/cmd/getgo/server/.gcloudignore +++ /dev/null @@ -1,25 +0,0 @@ -# This file specifies files that are *not* uploaded to Google Cloud Platform -# using gcloud. It follows the same syntax as .gitignore, with the addition of -# "#!include" directives (which insert the entries of the given .gitignore-style -# file at that point). -# -# For more information, run: -# $ gcloud topic gcloudignore -# -.gcloudignore -# If you would like to upload your .git directory, .gitignore file or files -# from your .gitignore file, remove the corresponding line -# below: -.git -.gitignore - -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib -# Test binary, build with `go test -c` -*.test -# Output of the go coverage tool, specifically when used with LiteIDE -*.out \ No newline at end of file diff --git a/cmd/getgo/server/README.md b/cmd/getgo/server/README.md deleted file mode 100644 index 0cf629d6e6e..00000000000 --- a/cmd/getgo/server/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# getgo server - -## Deployment - -``` -gcloud app deploy --promote --project golang-org -``` diff --git a/cmd/getgo/server/app.yaml b/cmd/getgo/server/app.yaml deleted file mode 100644 index 5c47312ef1d..00000000000 --- a/cmd/getgo/server/app.yaml +++ /dev/null @@ -1,2 +0,0 @@ -runtime: go112 -service: get diff --git a/cmd/getgo/server/main.go b/cmd/getgo/server/main.go deleted file mode 100644 index bdb0f70cf49..00000000000 --- a/cmd/getgo/server/main.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Command server serves get.golang.org, redirecting users to the appropriate -// getgo installer based on the request path. -package main - -import ( - "fmt" - "net/http" - "os" - "strings" - "time" -) - -const ( - base = "https://dl.google.com/go/getgo/" - windowsInstaller = base + "installer.exe" - linuxInstaller = base + "installer_linux" - macInstaller = base + "installer_darwin" -) - -// substring-based redirects. -var stringMatch = map[string]string{ - // via uname, from bash - "MINGW": windowsInstaller, // Reported as MINGW64_NT-10.0 in git bash - "Linux": linuxInstaller, - "Darwin": macInstaller, -} - -func main() { - http.HandleFunc("/", handler) - - port := os.Getenv("PORT") - if port == "" { - port = "8080" - fmt.Printf("Defaulting to port %s", port) - } - - fmt.Printf("Listening on port %s", port) - if err := http.ListenAndServe(fmt.Sprintf(":%s", port), nil); err != nil { - fmt.Fprintf(os.Stderr, "http.ListenAndServe: %v", err) - } -} - -func handler(w http.ResponseWriter, r *http.Request) { - if containsIgnoreCase(r.URL.Path, "installer.exe") { - // cache bust - http.Redirect(w, r, windowsInstaller+cacheBust(), http.StatusFound) - return - } - - for match, redirect := range stringMatch { - if containsIgnoreCase(r.URL.Path, match) { - http.Redirect(w, r, redirect, http.StatusFound) - return - } - } - - http.NotFound(w, r) -} - -func containsIgnoreCase(s, substr string) bool { - return strings.Contains( - strings.ToLower(s), - strings.ToLower(substr), - ) -} - -func cacheBust() string { - return fmt.Sprintf("?%d", time.Now().Nanosecond()) -} diff --git a/cmd/getgo/steps.go b/cmd/getgo/steps.go deleted file mode 100644 index fe69aa63aaf..00000000000 --- a/cmd/getgo/steps.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -package main - -import ( - "context" - "fmt" - "os" - "path/filepath" - "runtime" - "strings" -) - -type step func(context.Context) error - -func welcome(ctx context.Context) error { - fmt.Println("Welcome to the Go installer!") - answer, err := prompt(ctx, "Would you like to install Go? Y/n", "Y") - if err != nil { - return err - } - if strings.ToLower(answer) != "y" { - fmt.Println("Exiting install.") - return errExitCleanly - } - - return nil -} - -func checkOthers(ctx context.Context) error { - // TODO: if go is currently installed install new version over that - path, err := whichGo(ctx) - if err != nil { - fmt.Printf("Cannot check if Go is already installed:\n%v\n", err) - } - if path == "" { - return nil - } - if path != installPath { - fmt.Printf("Go is already installed at %v; remove it from your PATH.\n", path) - } - return nil -} - -func chooseVersion(ctx context.Context) error { - if *goVersion != "" { - return nil - } - - var err error - *goVersion, err = getLatestGoVersion() - if err != nil { - return err - } - - answer, err := prompt(ctx, fmt.Sprintf("The latest Go version is %s, install that? Y/n", *goVersion), "Y") - if err != nil { - return err - } - - if strings.ToLower(answer) != "y" { - // TODO: handle passing a version - fmt.Println("Aborting install.") - return errExitCleanly - } - - return nil -} - -func downloadGo(ctx context.Context) error { - answer, err := prompt(ctx, fmt.Sprintf("Download Go version %s to %s? Y/n", *goVersion, installPath), "Y") - if err != nil { - return err - } - - if strings.ToLower(answer) != "y" { - fmt.Println("Aborting install.") - return errExitCleanly - } - - fmt.Printf("Downloading Go version %s to %s\n", *goVersion, installPath) - fmt.Println("This may take a bit of time...") - - if err := downloadGoVersion(*goVersion, runtime.GOOS, arch, installPath); err != nil { - return err - } - - if err := appendToPATH(filepath.Join(installPath, "bin")); err != nil { - return err - } - - fmt.Println("Downloaded!") - return nil -} - -func setupGOPATH(ctx context.Context) error { - answer, err := prompt(ctx, "Would you like us to setup your GOPATH? Y/n", "Y") - if err != nil { - return err - } - - if strings.ToLower(answer) != "y" { - fmt.Println("Exiting and not setting up GOPATH.") - return errExitCleanly - } - - fmt.Println("Setting up GOPATH") - home, err := getHomeDir() - if err != nil { - return err - } - - gopath := os.Getenv("GOPATH") - if gopath == "" { - // set $GOPATH - gopath = filepath.Join(home, "go") - if err := persistEnvVar("GOPATH", gopath); err != nil { - return err - } - fmt.Println("GOPATH has been set up!") - } else { - verbosef("GOPATH is already set to %s", gopath) - } - - if err := appendToPATH(filepath.Join(gopath, "bin")); err != nil { - return err - } - return persistEnvChangesForSession() -} diff --git a/cmd/getgo/system.go b/cmd/getgo/system.go deleted file mode 100644 index 3449c9c64f9..00000000000 --- a/cmd/getgo/system.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 -// +build !plan9 - -package main - -import ( - "bytes" - "context" - exec "golang.org/x/sys/execabs" - "runtime" - "strings" -) - -// arch contains either amd64 or 386. -var arch = func() string { - cmd := exec.Command("uname", "-m") // "x86_64" - if runtime.GOOS == "windows" { - cmd = exec.Command("powershell", "-command", "(Get-WmiObject -Class Win32_ComputerSystem).SystemType") // "x64-based PC" - } - - out, err := cmd.Output() - if err != nil { - // a sensible default? - return "amd64" - } - if bytes.Contains(out, []byte("64")) { - return "amd64" - } - return "386" -}() - -func findGo(ctx context.Context, cmd string) (string, error) { - out, err := exec.CommandContext(ctx, cmd, "go").CombinedOutput() - return strings.TrimSpace(string(out)), err -} diff --git a/cmd/getgo/system_unix.go b/cmd/getgo/system_unix.go deleted file mode 100644 index 0b511dbeb4b..00000000000 --- a/cmd/getgo/system_unix.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !plan9 && !windows - -package main - -import ( - "context" - "fmt" - "os" - "path/filepath" -) - -const ( - envSeparator = ":" - homeKey = "HOME" - lineEnding = "\n" - pathVar = "$PATH" -) - -var installPath = func() string { - home, err := getHomeDir() - if err != nil { - return "/usr/local/go" - } - - return filepath.Join(home, ".go") -}() - -func whichGo(ctx context.Context) (string, error) { - return findGo(ctx, "which") -} - -func isWindowsXP() bool { - return false -} - -func currentShell() string { - return os.Getenv("SHELL") -} - -func persistEnvChangesForSession() error { - shellConfig, err := shellConfigFile() - if err != nil { - return err - } - fmt.Println() - fmt.Printf("One more thing! Run `source %s` to persist the\n", shellConfig) - fmt.Println("new environment variables to your current session, or open a") - fmt.Println("new shell prompt.") - - return nil -} diff --git a/cmd/getgo/system_windows.go b/cmd/getgo/system_windows.go deleted file mode 100644 index 5b1e2471300..00000000000 --- a/cmd/getgo/system_windows.go +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build windows -// +build windows - -package main - -import ( - "context" - "log" - "os" - "syscall" - "unsafe" -) - -const ( - envSeparator = ";" - homeKey = "USERPROFILE" - lineEnding = "/r/n" - pathVar = "$env:Path" -) - -var installPath = `c:\go` - -func isWindowsXP() bool { - v, err := syscall.GetVersion() - if err != nil { - log.Fatalf("GetVersion failed: %v", err) - } - major := byte(v) - return major < 6 -} - -func whichGo(ctx context.Context) (string, error) { - return findGo(ctx, "where") -} - -// currentShell reports the current shell. -// It might be "powershell.exe", "cmd.exe" or any of the *nix shells. -// -// Returns empty string if the shell is unknown. -func currentShell() string { - shell := os.Getenv("SHELL") - if shell != "" { - return shell - } - - pid := os.Getppid() - pe, err := getProcessEntry(pid) - if err != nil { - verbosef("getting shell from process entry failed: %v", err) - return "" - } - - return syscall.UTF16ToString(pe.ExeFile[:]) -} - -func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) { - // From https://go.googlesource.com/go/+/go1.8.3/src/syscall/syscall_windows.go#941 - snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0) - if err != nil { - return nil, err - } - defer syscall.CloseHandle(snapshot) - - var procEntry syscall.ProcessEntry32 - procEntry.Size = uint32(unsafe.Sizeof(procEntry)) - if err = syscall.Process32First(snapshot, &procEntry); err != nil { - return nil, err - } - - for { - if procEntry.ProcessID == uint32(pid) { - return &procEntry, nil - } - - if err := syscall.Process32Next(snapshot, &procEntry); err != nil { - return nil, err - } - } -} - -func persistEnvChangesForSession() error { - return nil -} diff --git a/cmd/getgo/upload.bash b/cmd/getgo/upload.bash deleted file mode 100755 index f52bb23c93c..00000000000 --- a/cmd/getgo/upload.bash +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -# Copyright 2017 The Go Authors. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE file. - -if ! command -v gsutil 2>&1 > /dev/null; then - echo "Install gsutil:" - echo - echo " https://cloud.google.com/storage/docs/gsutil_install#sdk-install" -fi - -if [ ! -d build ]; then - echo "Run make.bash first" -fi - -set -e -o -x - -gsutil -m cp -a public-read build/* gs://golang/getgo diff --git a/cmd/go-contrib-init/contrib.go b/cmd/go-contrib-init/contrib.go index 9b4d265025c..9254b86388f 100644 --- a/cmd/go-contrib-init/contrib.go +++ b/cmd/go-contrib-init/contrib.go @@ -13,9 +13,9 @@ import ( "flag" "fmt" "go/build" - exec "golang.org/x/sys/execabs" "log" "os" + "os/exec" "path/filepath" "regexp" "runtime" diff --git a/cmd/godex/print.go b/cmd/godex/print.go index 1bb5214edfd..da3b2f04e0b 100644 --- a/cmd/godex/print.go +++ b/cmd/godex/print.go @@ -12,6 +12,8 @@ import ( "go/types" "io" "math/big" + + "golang.org/x/tools/internal/aliases" ) // TODO(gri) use tabwriter for alignment? @@ -56,7 +58,7 @@ func (p *printer) printf(format string, args ...interface{}) { // denoted by obj is not an interface and has methods. Otherwise it returns // the zero value. func methodsFor(obj *types.TypeName) (*types.Named, []*types.Selection) { - named, _ := obj.Type().(*types.Named) + named, _ := aliases.Unalias(obj.Type()).(*types.Named) if named == nil { // A type name's type can also be the // exported basic type unsafe.Pointer. diff --git a/cmd/godex/writetype.go b/cmd/godex/writetype.go index 5cbe1b12c84..6ae365d13a3 100644 --- a/cmd/godex/writetype.go +++ b/cmd/godex/writetype.go @@ -12,7 +12,11 @@ package main -import "go/types" +import ( + "go/types" + + "golang.org/x/tools/internal/aliases" +) func (p *printer) writeType(this *types.Package, typ types.Type) { p.writeTypeInternal(this, typ, make([]types.Type, 8)) @@ -173,6 +177,10 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited p.print(")") } + case *aliases.Alias: + // TODO(adonovan): display something aliasy. + p.writeTypeInternal(this, aliases.Unalias(t), visited) + case *types.Named: s := "" if obj := t.Obj(); obj != nil { diff --git a/cmd/godoc/goroot.go b/cmd/godoc/goroot.go index c284ca89109..755069d949b 100644 --- a/cmd/godoc/goroot.go +++ b/cmd/godoc/goroot.go @@ -5,8 +5,8 @@ package main import ( - exec "golang.org/x/sys/execabs" "os" + "os/exec" "path/filepath" "runtime" "strings" diff --git a/cmd/godoc/main.go b/cmd/godoc/main.go index a4ca1c4c175..a665be0769d 100644 --- a/cmd/godoc/main.go +++ b/cmd/godoc/main.go @@ -32,14 +32,13 @@ import ( _ "net/http/pprof" // to serve /debug/pprof/* "net/url" "os" + "os/exec" "path" "path/filepath" "regexp" "runtime" "strings" - exec "golang.org/x/sys/execabs" - "golang.org/x/tools/godoc" "golang.org/x/tools/godoc/static" "golang.org/x/tools/godoc/vfs" diff --git a/cmd/goimports/goimports.go b/cmd/goimports/goimports.go index 3b6bd72503e..dcb5023a2e7 100644 --- a/cmd/goimports/goimports.go +++ b/cmd/goimports/goimports.go @@ -11,10 +11,10 @@ import ( "flag" "fmt" "go/scanner" - exec "golang.org/x/sys/execabs" "io" "log" "os" + "os/exec" "path/filepath" "runtime" "runtime/pprof" diff --git a/cmd/goimports/goimports_gc.go b/cmd/goimports/goimports_gc.go index 190a56535ca..3326646d035 100644 --- a/cmd/goimports/goimports_gc.go +++ b/cmd/goimports/goimports_gc.go @@ -19,8 +19,8 @@ func doTrace() func() { bw, flush := bufferedFileWriter(*traceProfile) trace.Start(bw) return func() { - flush() trace.Stop() + flush() } } return func() {} diff --git a/cmd/guru/TODO b/cmd/guru/TODO deleted file mode 100644 index 61bf1519e84..00000000000 --- a/cmd/guru/TODO +++ /dev/null @@ -1,11 +0,0 @@ --*- text -*- - -Guru to-do list -=========================== - -Generics: -- decide on whether to support generics in guru -- decide on whether to instantiate generics in ssa (go.dev/issue/52503) - -MISC: -- test support for *ssa.SliceToArrayPointer instructions (go.dev/issue/47326) \ No newline at end of file diff --git a/cmd/guru/definition.go b/cmd/guru/definition.go deleted file mode 100644 index 46d48060b16..00000000000 --- a/cmd/guru/definition.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - pathpkg "path" - "path/filepath" - "strconv" - - "golang.org/x/tools/cmd/guru/serial" - "golang.org/x/tools/go/buildutil" - "golang.org/x/tools/go/loader" -) - -// definition reports the location of the definition of an identifier. -func definition(q *Query) error { - // First try the simple resolution done by parser. - // It only works for intra-file references but it is very fast. - // (Extending this approach to all the files of the package, - // resolved using ast.NewPackage, was not worth the effort.) - { - qpos, err := fastQueryPos(q.Build, q.Pos) - if err != nil { - return err - } - - id, _ := qpos.path[0].(*ast.Ident) - if id == nil { - return fmt.Errorf("no identifier here") - } - - // Did the parser resolve it to a local object? - if obj := id.Obj; obj != nil && obj.Pos().IsValid() { - q.Output(qpos.fset, &definitionResult{ - pos: obj.Pos(), - descr: fmt.Sprintf("%s %s", obj.Kind, obj.Name), - }) - return nil // success - } - - // Qualified identifier? - if pkg := packageForQualIdent(qpos.path, id); pkg != "" { - srcdir := filepath.Dir(qpos.fset.File(qpos.start).Name()) - tok, pos, err := findPackageMember(q.Build, qpos.fset, srcdir, pkg, id.Name) - if err != nil { - return err - } - q.Output(qpos.fset, &definitionResult{ - pos: pos, - descr: fmt.Sprintf("%s %s.%s", tok, pkg, id.Name), - }) - return nil // success - } - - // Fall back on the type checker. - } - - // Run the type checker. - lconf := loader.Config{Build: q.Build} - allowErrors(&lconf) - - if _, err := importQueryPackage(q.Pos, &lconf); err != nil { - return err - } - - // Load/parse/type-check the program. - lprog, err := lconf.Load() - if err != nil { - return err - } - - qpos, err := parseQueryPos(lprog, q.Pos, false) - if err != nil { - return err - } - - id, _ := qpos.path[0].(*ast.Ident) - if id == nil { - return fmt.Errorf("no identifier here") - } - - // Look up the declaration of this identifier. - // If id is an anonymous field declaration, - // it is both a use of a type and a def of a field; - // prefer the use in that case. - obj := qpos.info.Uses[id] - if obj == nil { - obj = qpos.info.Defs[id] - if obj == nil { - // Happens for y in "switch y := x.(type)", - // and the package declaration, - // but I think that's all. - return fmt.Errorf("no object for identifier") - } - } - - if !obj.Pos().IsValid() { - return fmt.Errorf("%s is built in", obj.Name()) - } - - q.Output(lprog.Fset, &definitionResult{ - pos: obj.Pos(), - descr: qpos.objectString(obj), - }) - return nil -} - -// packageForQualIdent returns the package p if id is X in a qualified -// identifier p.X; it returns "" otherwise. -// -// Precondition: id is path[0], and the parser did not resolve id to a -// local object. For speed, packageForQualIdent assumes that p is a -// package iff it is the basename of an import path (and not, say, a -// package-level decl in another file or a predeclared identifier). -func packageForQualIdent(path []ast.Node, id *ast.Ident) string { - if sel, ok := path[1].(*ast.SelectorExpr); ok && sel.Sel == id && ast.IsExported(id.Name) { - if pkgid, ok := sel.X.(*ast.Ident); ok && pkgid.Obj == nil { - f := path[len(path)-1].(*ast.File) - for _, imp := range f.Imports { - path, _ := strconv.Unquote(imp.Path.Value) - if imp.Name != nil { - if imp.Name.Name == pkgid.Name { - return path // renaming import - } - } else if pathpkg.Base(path) == pkgid.Name { - return path // ordinary import - } - } - } - } - return "" -} - -// findPackageMember returns the type and position of the declaration of -// pkg.member by loading and parsing the files of that package. -// srcdir is the directory in which the import appears. -func findPackageMember(ctxt *build.Context, fset *token.FileSet, srcdir, pkg, member string) (token.Token, token.Pos, error) { - bp, err := ctxt.Import(pkg, srcdir, 0) - if err != nil { - return 0, token.NoPos, err // no files for package - } - - // TODO(adonovan): opt: parallelize. - for _, fname := range bp.GoFiles { - filename := filepath.Join(bp.Dir, fname) - - // Parse the file, opening it the file via the build.Context - // so that we observe the effects of the -modified flag. - f, _ := buildutil.ParseFile(fset, ctxt, nil, ".", filename, parser.Mode(0)) - if f == nil { - continue - } - - // Find a package-level decl called 'member'. - for _, decl := range f.Decls { - switch decl := decl.(type) { - case *ast.GenDecl: - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.ValueSpec: - // const or var - for _, id := range spec.Names { - if id.Name == member { - return decl.Tok, id.Pos(), nil - } - } - case *ast.TypeSpec: - if spec.Name.Name == member { - return token.TYPE, spec.Name.Pos(), nil - } - } - } - case *ast.FuncDecl: - if decl.Recv == nil && decl.Name.Name == member { - return token.FUNC, decl.Name.Pos(), nil - } - } - } - } - - return 0, token.NoPos, fmt.Errorf("couldn't find declaration of %s in %q", member, pkg) -} - -type definitionResult struct { - pos token.Pos // (nonzero) location of definition - descr string // description of object it denotes -} - -func (r *definitionResult) PrintPlain(printf printfFunc) { - printf(r.pos, "defined here as %s", r.descr) -} - -func (r *definitionResult) JSON(fset *token.FileSet) []byte { - return toJSON(&serial.Definition{ - Desc: r.descr, - ObjPos: fset.Position(r.pos).String(), - }) -} diff --git a/cmd/guru/describe.go b/cmd/guru/describe.go deleted file mode 100644 index 0e4964428d5..00000000000 --- a/cmd/guru/describe.go +++ /dev/null @@ -1,962 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "bytes" - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "os" - "strings" - "unicode/utf8" - - "golang.org/x/tools/cmd/guru/serial" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/loader" - "golang.org/x/tools/go/types/typeutil" -) - -// describe describes the syntax node denoted by the query position, -// including: -// - its syntactic category -// - the definition of its referent (for identifiers) [now redundant] -// - its type, fields, and methods (for an expression or type expression) -func describe(q *Query) error { - lconf := loader.Config{Build: q.Build} - allowErrors(&lconf) - - if _, err := importQueryPackage(q.Pos, &lconf); err != nil { - return err - } - - // Load/parse/type-check the program. - lprog, err := lconf.Load() - if err != nil { - return err - } - - qpos, err := parseQueryPos(lprog, q.Pos, true) // (need exact pos) - if err != nil { - return err - } - - if false { // debugging - fprintf(os.Stderr, lprog.Fset, qpos.path[0], "you selected: %s %s", - astutil.NodeDescription(qpos.path[0]), pathToString(qpos.path)) - } - - var qr QueryResult - path, action := findInterestingNode(qpos.info, qpos.path) - switch action { - case actionExpr: - qr, err = describeValue(qpos, path) - - case actionType: - qr, err = describeType(qpos, path) - - case actionPackage: - qr, err = describePackage(qpos, path) - - case actionStmt: - qr, err = describeStmt(qpos, path) - - case actionUnknown: - qr = &describeUnknownResult{path[0]} - - default: - panic(action) // unreachable - } - if err != nil { - return err - } - q.Output(lprog.Fset, qr) - return nil -} - -type describeUnknownResult struct { - node ast.Node -} - -func (r *describeUnknownResult) PrintPlain(printf printfFunc) { - // Nothing much to say about misc syntax. - printf(r.node, "%s", astutil.NodeDescription(r.node)) -} - -func (r *describeUnknownResult) JSON(fset *token.FileSet) []byte { - return toJSON(&serial.Describe{ - Desc: astutil.NodeDescription(r.node), - Pos: fset.Position(r.node.Pos()).String(), - }) -} - -type action int - -const ( - actionUnknown action = iota // None of the below - actionExpr // FuncDecl, true Expr or Ident(types.{Const,Var}) - actionType // type Expr or Ident(types.TypeName). - actionStmt // Stmt or Ident(types.Label) - actionPackage // Ident(types.Package) or ImportSpec -) - -// findInterestingNode classifies the syntax node denoted by path as one of: -// - an expression, part of an expression or a reference to a constant -// or variable; -// - a type, part of a type, or a reference to a named type; -// - a statement, part of a statement, or a label referring to a statement; -// - part of a package declaration or import spec. -// - none of the above. -// -// and returns the most "interesting" associated node, which may be -// the same node, an ancestor or a descendent. -func findInterestingNode(pkginfo *loader.PackageInfo, path []ast.Node) ([]ast.Node, action) { - // TODO(adonovan): integrate with go/types/stdlib_test.go and - // apply this to every AST node we can find to make sure it - // doesn't crash. - - // TODO(adonovan): audit for ParenExpr safety, esp. since we - // traverse up and down. - - // TODO(adonovan): if the users selects the "." in - // "fmt.Fprintf()", they'll get an ambiguous selection error; - // we won't even reach here. Can we do better? - - // TODO(adonovan): describing a field within 'type T struct {...}' - // describes the (anonymous) struct type and concludes "no methods". - // We should ascend to the enclosing type decl, if any. - - for len(path) > 0 { - switch n := path[0].(type) { - case *ast.GenDecl: - if len(n.Specs) == 1 { - // Descend to sole {Import,Type,Value}Spec child. - path = append([]ast.Node{n.Specs[0]}, path...) - continue - } - return path, actionUnknown // uninteresting - - case *ast.FuncDecl: - // Descend to function name. - path = append([]ast.Node{n.Name}, path...) - continue - - case *ast.ImportSpec: - return path, actionPackage - - case *ast.ValueSpec: - if len(n.Names) == 1 { - // Descend to sole Ident child. - path = append([]ast.Node{n.Names[0]}, path...) - continue - } - return path, actionUnknown // uninteresting - - case *ast.TypeSpec: - // Descend to type name. - path = append([]ast.Node{n.Name}, path...) - continue - - case *ast.Comment, *ast.CommentGroup, *ast.File, *ast.KeyValueExpr, *ast.CommClause: - return path, actionUnknown // uninteresting - - case ast.Stmt: - return path, actionStmt - - case *ast.ArrayType, - *ast.StructType, - *ast.FuncType, - *ast.InterfaceType, - *ast.MapType, - *ast.ChanType: - return path, actionType - - case *ast.Ellipsis: - // Continue to enclosing node. - // e.g. [...]T in ArrayType - // f(x...) in CallExpr - // f(x...T) in FuncType - - case *ast.Field: - // TODO(adonovan): this needs more thought, - // since fields can be so many things. - if len(n.Names) == 1 { - // Descend to sole Ident child. - path = append([]ast.Node{n.Names[0]}, path...) - continue - } - // Zero names (e.g. anon field in struct) - // or multiple field or param names: - // continue to enclosing field list. - - case *ast.FieldList: - // Continue to enclosing node: - // {Struct,Func,Interface}Type or FuncDecl. - - case *ast.BasicLit: - if _, ok := path[1].(*ast.ImportSpec); ok { - return path[1:], actionPackage - } - return path, actionExpr - - case *ast.SelectorExpr: - // TODO(adonovan): use Selections info directly. - if pkginfo.Uses[n.Sel] == nil { - // TODO(adonovan): is this reachable? - return path, actionUnknown - } - // Descend to .Sel child. - path = append([]ast.Node{n.Sel}, path...) - continue - - case *ast.Ident: - switch pkginfo.ObjectOf(n).(type) { - case *types.PkgName: - return path, actionPackage - - case *types.Const: - return path, actionExpr - - case *types.Label: - return path, actionStmt - - case *types.TypeName: - return path, actionType - - case *types.Var: - // For x in 'struct {x T}', return struct type, for now. - if _, ok := path[1].(*ast.Field); ok { - _ = path[2].(*ast.FieldList) // assertion - if _, ok := path[3].(*ast.StructType); ok { - return path[3:], actionType - } - } - return path, actionExpr - - case *types.Func: - return path, actionExpr - - case *types.Builtin: - // For reference to built-in function, return enclosing call. - path = path[1:] // ascend to enclosing function call - continue - - case *types.Nil: - return path, actionExpr - } - - // No object. - switch path[1].(type) { - case *ast.SelectorExpr: - // Return enclosing selector expression. - return path[1:], actionExpr - - case *ast.Field: - // TODO(adonovan): test this. - // e.g. all f in: - // struct { f, g int } - // interface { f() } - // func (f T) method(f, g int) (f, g bool) - // - // switch path[3].(type) { - // case *ast.FuncDecl: - // case *ast.StructType: - // case *ast.InterfaceType: - // } - // - // return path[1:], actionExpr - // - // Unclear what to do with these. - // Struct.Fields -- field - // Interface.Methods -- field - // FuncType.{Params.Results} -- actionExpr - // FuncDecl.Recv -- actionExpr - - case *ast.File: - // 'package foo' - return path, actionPackage - - case *ast.ImportSpec: - return path[1:], actionPackage - - default: - // e.g. blank identifier - // or y in "switch y := x.(type)" - // or code in a _test.go file that's not part of the package. - return path, actionUnknown - } - - case *ast.StarExpr: - if pkginfo.Types[n].IsType() { - return path, actionType - } - return path, actionExpr - - case ast.Expr: - // All Expr but {BasicLit,Ident,StarExpr} are - // "true" expressions that evaluate to a value. - return path, actionExpr - } - - // Ascend to parent. - path = path[1:] - } - - return nil, actionUnknown // unreachable -} - -func describeValue(qpos *queryPos, path []ast.Node) (*describeValueResult, error) { - var expr ast.Expr - var obj types.Object - switch n := path[0].(type) { - case *ast.ValueSpec: - // ambiguous ValueSpec containing multiple names - return nil, fmt.Errorf("multiple value specification") - case *ast.Ident: - obj = qpos.info.ObjectOf(n) - expr = n - case ast.Expr: - expr = n - default: - // TODO(adonovan): is this reachable? - return nil, fmt.Errorf("unexpected AST for expr: %T", n) - } - - typ := qpos.info.TypeOf(expr) - if typ == nil { - typ = types.Typ[types.Invalid] - } - constVal := qpos.info.Types[expr].Value - if c, ok := obj.(*types.Const); ok { - constVal = c.Val() - } - - return &describeValueResult{ - qpos: qpos, - expr: expr, - typ: typ, - names: appendNames(nil, typ), - constVal: constVal, - obj: obj, - methods: accessibleMethods(typ, qpos.info.Pkg), - fields: accessibleFields(typ, qpos.info.Pkg), - }, nil -} - -// appendNames returns named types found within the Type by -// removing map, pointer, channel, slice, and array constructors. -// It does not descend into structs or interfaces. -func appendNames(names []*types.Named, typ types.Type) []*types.Named { - // elemType specifies type that has some element in it - // such as array, slice, chan, pointer - type elemType interface { - Elem() types.Type - } - - switch t := typ.(type) { - case *types.Named: - names = append(names, t) - case *types.Map: - names = appendNames(names, t.Key()) - names = appendNames(names, t.Elem()) - case elemType: - names = appendNames(names, t.Elem()) - } - - return names -} - -type describeValueResult struct { - qpos *queryPos - expr ast.Expr // query node - typ types.Type // type of expression - names []*types.Named // named types within typ - constVal constant.Value // value of expression, if constant - obj types.Object // var/func/const object, if expr was Ident - methods []*types.Selection - fields []describeField -} - -func (r *describeValueResult) PrintPlain(printf printfFunc) { - var prefix, suffix string - if r.constVal != nil { - suffix = fmt.Sprintf(" of value %s", r.constVal) - } - switch obj := r.obj.(type) { - case *types.Func: - if recv := obj.Type().(*types.Signature).Recv(); recv != nil { - if _, ok := recv.Type().Underlying().(*types.Interface); ok { - prefix = "interface method " - } else { - prefix = "method " - } - } - } - - // Describe the expression. - if r.obj != nil { - if r.obj.Pos() == r.expr.Pos() { - // defining ident - printf(r.expr, "definition of %s%s%s", prefix, r.qpos.objectString(r.obj), suffix) - } else { - // referring ident - printf(r.expr, "reference to %s%s%s", prefix, r.qpos.objectString(r.obj), suffix) - if def := r.obj.Pos(); def != token.NoPos { - printf(def, "defined here") - } - } - } else { - desc := astutil.NodeDescription(r.expr) - if suffix != "" { - // constant expression - printf(r.expr, "%s%s", desc, suffix) - } else { - // non-constant expression - printf(r.expr, "%s of type %s", desc, r.qpos.typeString(r.typ)) - } - } - - printMethods(printf, r.expr, r.methods) - printFields(printf, r.expr, r.fields) - printNamedTypes(printf, r.expr, r.names) -} - -func (r *describeValueResult) JSON(fset *token.FileSet) []byte { - var value, objpos string - if r.constVal != nil { - value = r.constVal.String() - } - if r.obj != nil { - objpos = fset.Position(r.obj.Pos()).String() - } - - typesPos := make([]serial.Definition, len(r.names)) - for i, t := range r.names { - typesPos[i] = serial.Definition{ - ObjPos: fset.Position(t.Obj().Pos()).String(), - Desc: r.qpos.typeString(t), - } - } - - return toJSON(&serial.Describe{ - Desc: astutil.NodeDescription(r.expr), - Pos: fset.Position(r.expr.Pos()).String(), - Detail: "value", - Value: &serial.DescribeValue{ - Type: r.qpos.typeString(r.typ), - TypesPos: typesPos, - Value: value, - ObjPos: objpos, - }, - }) -} - -// ---- TYPE ------------------------------------------------------------ - -func describeType(qpos *queryPos, path []ast.Node) (*describeTypeResult, error) { - var description string - var typ types.Type - switch n := path[0].(type) { - case *ast.Ident: - obj := qpos.info.ObjectOf(n).(*types.TypeName) - typ = obj.Type() - if isAlias(obj) { - description = "alias of " - } else if obj.Pos() == n.Pos() { - description = "definition of " // (Named type) - } else if _, ok := typ.(*types.Basic); ok { - description = "reference to built-in " - } else { - description = "reference to " // (Named type) - } - - case ast.Expr: - typ = qpos.info.TypeOf(n) - - default: - // Unreachable? - return nil, fmt.Errorf("unexpected AST for type: %T", n) - } - - description = description + "type " + qpos.typeString(typ) - - // Show sizes for structs and named types (it's fairly obvious for others). - switch typ.(type) { - case *types.Named, *types.Struct: - szs := types.StdSizes{WordSize: 8, MaxAlign: 8} // assume amd64 - description = fmt.Sprintf("%s (size %d, align %d)", description, - szs.Sizeof(typ), szs.Alignof(typ)) - } - - return &describeTypeResult{ - qpos: qpos, - node: path[0], - description: description, - typ: typ, - methods: accessibleMethods(typ, qpos.info.Pkg), - fields: accessibleFields(typ, qpos.info.Pkg), - }, nil -} - -type describeTypeResult struct { - qpos *queryPos - node ast.Node - description string - typ types.Type - methods []*types.Selection - fields []describeField -} - -type describeField struct { - implicits []*types.Named - field *types.Var -} - -func printMethods(printf printfFunc, node ast.Node, methods []*types.Selection) { - if len(methods) > 0 { - printf(node, "Methods:") - } - for _, meth := range methods { - // Print the method type relative to the package - // in which it was defined, not the query package, - printf(meth.Obj(), "\t%s", - types.SelectionString(meth, types.RelativeTo(meth.Obj().Pkg()))) - } -} - -func printFields(printf printfFunc, node ast.Node, fields []describeField) { - if len(fields) > 0 { - printf(node, "Fields:") - } - - // Align the names and the types (requires two passes). - var width int - var names []string - for _, f := range fields { - var buf bytes.Buffer - for _, fld := range f.implicits { - buf.WriteString(fld.Obj().Name()) - buf.WriteByte('.') - } - buf.WriteString(f.field.Name()) - name := buf.String() - if n := utf8.RuneCountInString(name); n > width { - width = n - } - names = append(names, name) - } - - for i, f := range fields { - // Print the field type relative to the package - // in which it was defined, not the query package, - printf(f.field, "\t%*s %s", -width, names[i], - types.TypeString(f.field.Type(), types.RelativeTo(f.field.Pkg()))) - } -} - -func printNamedTypes(printf printfFunc, node ast.Node, names []*types.Named) { - if len(names) > 0 { - printf(node, "Named types:") - } - - for _, t := range names { - // Print the type relative to the package - // in which it was defined, not the query package, - printf(t.Obj(), "\ttype %s defined here", - types.TypeString(t.Obj().Type(), types.RelativeTo(t.Obj().Pkg()))) - } -} - -func (r *describeTypeResult) PrintPlain(printf printfFunc) { - printf(r.node, "%s", r.description) - - // Show the underlying type for a reference to a named type. - if nt, ok := r.typ.(*types.Named); ok && r.node.Pos() != nt.Obj().Pos() { - // TODO(adonovan): improve display of complex struct/interface types. - printf(nt.Obj(), "defined as %s", r.qpos.typeString(nt.Underlying())) - } - - printMethods(printf, r.node, r.methods) - if len(r.methods) == 0 { - // Only report null result for type kinds - // capable of bearing methods. - switch r.typ.(type) { - case *types.Interface, *types.Struct, *types.Named: - printf(r.node, "No methods.") - } - } - - printFields(printf, r.node, r.fields) -} - -func (r *describeTypeResult) JSON(fset *token.FileSet) []byte { - var namePos, nameDef string - if nt, ok := r.typ.(*types.Named); ok { - namePos = fset.Position(nt.Obj().Pos()).String() - nameDef = nt.Underlying().String() - } - return toJSON(&serial.Describe{ - Desc: r.description, - Pos: fset.Position(r.node.Pos()).String(), - Detail: "type", - Type: &serial.DescribeType{ - Type: r.qpos.typeString(r.typ), - NamePos: namePos, - NameDef: nameDef, - Methods: methodsToSerial(r.qpos.info.Pkg, r.methods, fset), - }, - }) -} - -// ---- PACKAGE ------------------------------------------------------------ - -func describePackage(qpos *queryPos, path []ast.Node) (*describePackageResult, error) { - var description string - var pkg *types.Package - switch n := path[0].(type) { - case *ast.ImportSpec: - var obj types.Object - if n.Name != nil { - obj = qpos.info.Defs[n.Name] - } else { - obj = qpos.info.Implicits[n] - } - pkgname, _ := obj.(*types.PkgName) - if pkgname == nil { - return nil, fmt.Errorf("can't import package %s", n.Path.Value) - } - pkg = pkgname.Imported() - description = fmt.Sprintf("import of package %q", pkg.Path()) - - case *ast.Ident: - if _, isDef := path[1].(*ast.File); isDef { - // e.g. package id - pkg = qpos.info.Pkg - description = fmt.Sprintf("definition of package %q", pkg.Path()) - } else { - // e.g. import id "..." - // or id.F() - pkg = qpos.info.ObjectOf(n).(*types.PkgName).Imported() - description = fmt.Sprintf("reference to package %q", pkg.Path()) - } - - default: - // Unreachable? - return nil, fmt.Errorf("unexpected AST for package: %T", n) - } - - var members []*describeMember - // NB: "unsafe" has no types.Package - if pkg != nil { - // Enumerate the accessible package members - // in lexicographic order. - for _, name := range pkg.Scope().Names() { - if pkg == qpos.info.Pkg || ast.IsExported(name) { - mem := pkg.Scope().Lookup(name) - var methods []*types.Selection - if mem, ok := mem.(*types.TypeName); ok { - methods = accessibleMethods(mem.Type(), qpos.info.Pkg) - } - members = append(members, &describeMember{ - mem, - methods, - }) - - } - } - } - - return &describePackageResult{qpos.fset, path[0], description, pkg, members}, nil -} - -type describePackageResult struct { - fset *token.FileSet - node ast.Node - description string - pkg *types.Package - members []*describeMember // in lexicographic name order -} - -type describeMember struct { - obj types.Object - methods []*types.Selection // in types.MethodSet order -} - -func (r *describePackageResult) PrintPlain(printf printfFunc) { - printf(r.node, "%s", r.description) - - // Compute max width of name "column". - maxname := 0 - for _, mem := range r.members { - if l := len(mem.obj.Name()); l > maxname { - maxname = l - } - } - - for _, mem := range r.members { - printf(mem.obj, "\t%s", formatMember(mem.obj, maxname)) - for _, meth := range mem.methods { - printf(meth.Obj(), "\t\t%s", types.SelectionString(meth, types.RelativeTo(r.pkg))) - } - } -} - -func formatMember(obj types.Object, maxname int) string { - qualifier := types.RelativeTo(obj.Pkg()) - var buf bytes.Buffer - fmt.Fprintf(&buf, "%-5s %-*s", tokenOf(obj), maxname, obj.Name()) - switch obj := obj.(type) { - case *types.Const: - fmt.Fprintf(&buf, " %s = %s", types.TypeString(obj.Type(), qualifier), obj.Val()) - - case *types.Func: - fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier)) - - case *types.TypeName: - typ := obj.Type() - if isAlias(obj) { - buf.WriteString(" = ") - } else { - buf.WriteByte(' ') - typ = typ.Underlying() - } - var typestr string - // Abbreviate long aggregate type names. - switch typ := typ.(type) { - case *types.Interface: - if typ.NumMethods() > 1 { - typestr = "interface{...}" - } - case *types.Struct: - if typ.NumFields() > 1 { - typestr = "struct{...}" - } - } - if typestr == "" { - // The fix for #44515 changed the printing of unsafe.Pointer - // such that it uses a qualifier if one is provided. Using - // the types.RelativeTo qualifier provided here, the output - // is just "Pointer" rather than "unsafe.Pointer". This is - // consistent with the printing of non-type objects but it - // breaks an existing test which needs to work with older - // versions of Go. Re-establish the original output by not - // using a qualifier at all if we're printing a type from - // package unsafe - there's only unsafe.Pointer (#44596). - // NOTE: This correction can be removed (and the test's - // golden file adjusted) once we only run against go1.17 - // or bigger. - qualifier := qualifier - if obj.Pkg() == types.Unsafe { - qualifier = nil - } - typestr = types.TypeString(typ, qualifier) - } - buf.WriteString(typestr) - - case *types.Var: - fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier)) - } - return buf.String() -} - -func (r *describePackageResult) JSON(fset *token.FileSet) []byte { - var members []*serial.DescribeMember - for _, mem := range r.members { - obj := mem.obj - typ := obj.Type() - var val string - var alias string - switch obj := obj.(type) { - case *types.Const: - val = obj.Val().String() - case *types.TypeName: - if isAlias(obj) { - alias = "= " // kludgy - } else { - typ = typ.Underlying() - } - } - members = append(members, &serial.DescribeMember{ - Name: obj.Name(), - Type: alias + typ.String(), - Value: val, - Pos: fset.Position(obj.Pos()).String(), - Kind: tokenOf(obj), - Methods: methodsToSerial(r.pkg, mem.methods, fset), - }) - } - return toJSON(&serial.Describe{ - Desc: r.description, - Pos: fset.Position(r.node.Pos()).String(), - Detail: "package", - Package: &serial.DescribePackage{ - Path: r.pkg.Path(), - Members: members, - }, - }) -} - -func tokenOf(o types.Object) string { - switch o.(type) { - case *types.Func: - return "func" - case *types.Var: - return "var" - case *types.TypeName: - return "type" - case *types.Const: - return "const" - case *types.PkgName: - return "package" - case *types.Builtin: - return "builtin" // e.g. when describing package "unsafe" - case *types.Nil: - return "nil" - case *types.Label: - return "label" - } - panic(o) -} - -// ---- STATEMENT ------------------------------------------------------------ - -func describeStmt(qpos *queryPos, path []ast.Node) (*describeStmtResult, error) { - var description string - switch n := path[0].(type) { - case *ast.Ident: - if qpos.info.Defs[n] != nil { - description = "labelled statement" - } else { - description = "reference to labelled statement" - } - - default: - // Nothing much to say about statements. - description = astutil.NodeDescription(n) - } - return &describeStmtResult{qpos.fset, path[0], description}, nil -} - -type describeStmtResult struct { - fset *token.FileSet - node ast.Node - description string -} - -func (r *describeStmtResult) PrintPlain(printf printfFunc) { - printf(r.node, "%s", r.description) -} - -func (r *describeStmtResult) JSON(fset *token.FileSet) []byte { - return toJSON(&serial.Describe{ - Desc: r.description, - Pos: fset.Position(r.node.Pos()).String(), - Detail: "unknown", - }) -} - -// ------------------- Utilities ------------------- - -// pathToString returns a string containing the concrete types of the -// nodes in path. -func pathToString(path []ast.Node) string { - var buf bytes.Buffer - fmt.Fprint(&buf, "[") - for i, n := range path { - if i > 0 { - fmt.Fprint(&buf, " ") - } - fmt.Fprint(&buf, strings.TrimPrefix(fmt.Sprintf("%T", n), "*ast.")) - } - fmt.Fprint(&buf, "]") - return buf.String() -} - -func accessibleMethods(t types.Type, from *types.Package) []*types.Selection { - var methods []*types.Selection - for _, meth := range typeutil.IntuitiveMethodSet(t, nil) { - if isAccessibleFrom(meth.Obj(), from) { - methods = append(methods, meth) - } - } - return methods -} - -// accessibleFields returns the set of accessible -// field selections on a value of type recv. -func accessibleFields(recv types.Type, from *types.Package) []describeField { - wantField := func(f *types.Var) bool { - if !isAccessibleFrom(f, from) { - return false - } - // Check that the field is not shadowed. - obj, _, _ := types.LookupFieldOrMethod(recv, true, f.Pkg(), f.Name()) - return obj == f - } - - var fields []describeField - var visit func(t types.Type, stack []*types.Named) - visit = func(t types.Type, stack []*types.Named) { - tStruct, ok := deref(t).Underlying().(*types.Struct) - if !ok { - return - } - fieldloop: - for i := 0; i < tStruct.NumFields(); i++ { - f := tStruct.Field(i) - - // Handle recursion through anonymous fields. - if f.Anonymous() { - tf := f.Type() - if ptr, ok := tf.(*types.Pointer); ok { - tf = ptr.Elem() - } - if named, ok := tf.(*types.Named); ok { // (be defensive) - // If we've already visited this named type - // on this path, break the cycle. - for _, x := range stack { - if x == named { - continue fieldloop - } - } - visit(f.Type(), append(stack, named)) - } - } - - // Save accessible fields. - if wantField(f) { - fields = append(fields, describeField{ - implicits: append([]*types.Named(nil), stack...), - field: f, - }) - } - } - } - visit(recv, nil) - - return fields -} - -func isAccessibleFrom(obj types.Object, pkg *types.Package) bool { - return ast.IsExported(obj.Name()) || obj.Pkg() == pkg -} - -func methodsToSerial(this *types.Package, methods []*types.Selection, fset *token.FileSet) []serial.DescribeMethod { - qualifier := types.RelativeTo(this) - var jmethods []serial.DescribeMethod - for _, meth := range methods { - var ser serial.DescribeMethod - if meth != nil { // may contain nils when called by implements (on a method) - ser = serial.DescribeMethod{ - Name: types.SelectionString(meth, qualifier), - Pos: fset.Position(meth.Obj().Pos()).String(), - } - } - jmethods = append(jmethods, ser) - } - return jmethods -} diff --git a/cmd/guru/freevars.go b/cmd/guru/freevars.go deleted file mode 100644 index b079a3ef4a5..00000000000 --- a/cmd/guru/freevars.go +++ /dev/null @@ -1,222 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "bytes" - "go/ast" - "go/printer" - "go/token" - "go/types" - "sort" - - "golang.org/x/tools/cmd/guru/serial" - "golang.org/x/tools/go/loader" -) - -// freevars displays the lexical (not package-level) free variables of -// the selection. -// -// It treats A.B.C as a separate variable from A to reveal the parts -// of an aggregate type that are actually needed. -// This aids refactoring. -// -// TODO(adonovan): optionally display the free references to -// file/package scope objects, and to objects from other packages. -// Depending on where the resulting function abstraction will go, -// these might be interesting. Perhaps group the results into three -// bands. -func freevars(q *Query) error { - lconf := loader.Config{Build: q.Build} - allowErrors(&lconf) - - if _, err := importQueryPackage(q.Pos, &lconf); err != nil { - return err - } - - // Load/parse/type-check the program. - lprog, err := lconf.Load() - if err != nil { - return err - } - - qpos, err := parseQueryPos(lprog, q.Pos, false) - if err != nil { - return err - } - - file := qpos.path[len(qpos.path)-1] // the enclosing file - fileScope := qpos.info.Scopes[file] - pkgScope := fileScope.Parent() - - // The id and sel functions return non-nil if they denote an - // object o or selection o.x.y that is referenced by the - // selection but defined neither within the selection nor at - // file scope, i.e. it is in the lexical environment. - var id func(n *ast.Ident) types.Object - var sel func(n *ast.SelectorExpr) types.Object - - sel = func(n *ast.SelectorExpr) types.Object { - switch x := unparen(n.X).(type) { - case *ast.SelectorExpr: - return sel(x) - case *ast.Ident: - return id(x) - } - return nil - } - - id = func(n *ast.Ident) types.Object { - obj := qpos.info.Uses[n] - if obj == nil { - return nil // not a reference - } - if _, ok := obj.(*types.PkgName); ok { - return nil // imported package - } - if !(file.Pos() <= obj.Pos() && obj.Pos() <= file.End()) { - return nil // not defined in this file - } - scope := obj.Parent() - if scope == nil { - return nil // e.g. interface method, struct field - } - if scope == fileScope || scope == pkgScope { - return nil // defined at file or package scope - } - if qpos.start <= obj.Pos() && obj.Pos() <= qpos.end { - return nil // defined within selection => not free - } - return obj - } - - // Maps each reference that is free in the selection - // to the object it refers to. - // The map de-duplicates repeated references. - refsMap := make(map[string]freevarsRef) - - // Visit all the identifiers in the selected ASTs. - ast.Inspect(qpos.path[0], func(n ast.Node) bool { - if n == nil { - return true // popping DFS stack - } - - // Is this node contained within the selection? - // (freevars permits inexact selections, - // like two stmts in a block.) - if qpos.start <= n.Pos() && n.End() <= qpos.end { - var obj types.Object - var prune bool - switch n := n.(type) { - case *ast.Ident: - obj = id(n) - - case *ast.SelectorExpr: - obj = sel(n) - prune = true - } - - if obj != nil { - var kind string - switch obj.(type) { - case *types.Var: - kind = "var" - case *types.Func: - kind = "func" - case *types.TypeName: - kind = "type" - case *types.Const: - kind = "const" - case *types.Label: - kind = "label" - default: - panic(obj) - } - - typ := qpos.info.TypeOf(n.(ast.Expr)) - ref := freevarsRef{kind, printNode(lprog.Fset, n), typ, obj} - refsMap[ref.ref] = ref - - if prune { - return false // don't descend - } - } - } - - return true // descend - }) - - refs := make([]freevarsRef, 0, len(refsMap)) - for _, ref := range refsMap { - refs = append(refs, ref) - } - sort.Sort(byRef(refs)) - - q.Output(lprog.Fset, &freevarsResult{ - qpos: qpos, - refs: refs, - }) - return nil -} - -type freevarsResult struct { - qpos *queryPos - refs []freevarsRef -} - -type freevarsRef struct { - kind string - ref string - typ types.Type - obj types.Object -} - -func (r *freevarsResult) PrintPlain(printf printfFunc) { - if len(r.refs) == 0 { - printf(r.qpos, "No free identifiers.") - } else { - printf(r.qpos, "Free identifiers:") - qualifier := types.RelativeTo(r.qpos.info.Pkg) - for _, ref := range r.refs { - // Avoid printing "type T T". - var typstr string - if ref.kind != "type" && ref.kind != "label" { - typstr = " " + types.TypeString(ref.typ, qualifier) - } - printf(ref.obj, "%s %s%s", ref.kind, ref.ref, typstr) - } - } -} - -func (r *freevarsResult) JSON(fset *token.FileSet) []byte { - var buf bytes.Buffer - for i, ref := range r.refs { - if i > 0 { - buf.WriteByte('\n') - } - buf.Write(toJSON(serial.FreeVar{ - Pos: fset.Position(ref.obj.Pos()).String(), - Kind: ref.kind, - Ref: ref.ref, - Type: ref.typ.String(), - })) - } - return buf.Bytes() -} - -// -------- utils -------- - -type byRef []freevarsRef - -func (p byRef) Len() int { return len(p) } -func (p byRef) Less(i, j int) bool { return p[i].ref < p[j].ref } -func (p byRef) Swap(i, j int) { p[i], p[j] = p[j], p[i] } - -// printNode returns the pretty-printed syntax of n. -func printNode(fset *token.FileSet, n ast.Node) string { - var buf bytes.Buffer - printer.Fprint(&buf, fset, n) - return buf.String() -} diff --git a/cmd/guru/guru.go b/cmd/guru/guru.go deleted file mode 100644 index 575136cf3d8..00000000000 --- a/cmd/guru/guru.go +++ /dev/null @@ -1,334 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -// TODO(adonovan): new queries -// - show all statements that may update the selected lvalue -// (local, global, field, etc). -// - show all places where an object of type T is created -// (&T{}, var t T, new(T), new(struct{array [3]T}), etc. - -import ( - "encoding/json" - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "go/types" - "io" - "log" - "path/filepath" - "strings" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/loader" -) - -type printfFunc func(pos interface{}, format string, args ...interface{}) - -// A QueryResult is an item of output. Each query produces a stream of -// query results, calling Query.Output for each one. -type QueryResult interface { - // JSON returns the QueryResult in JSON form. - JSON(fset *token.FileSet) []byte - - // PrintPlain prints the QueryResult in plain text form. - // The implementation calls printfFunc to print each line of output. - PrintPlain(printf printfFunc) -} - -// A QueryPos represents the position provided as input to a query: -// a textual extent in the program's source code, the AST node it -// corresponds to, and the package to which it belongs. -// Instances are created by parseQueryPos. -type queryPos struct { - fset *token.FileSet - start, end token.Pos // source extent of query - path []ast.Node // AST path from query node to root of ast.File - exact bool // 2nd result of PathEnclosingInterval - info *loader.PackageInfo // type info for the queried package (nil for fastQueryPos) -} - -// typeString prints type T relative to the query position. -func (qpos *queryPos) typeString(T types.Type) string { - return types.TypeString(T, types.RelativeTo(qpos.info.Pkg)) -} - -// objectString prints object obj relative to the query position. -func (qpos *queryPos) objectString(obj types.Object) string { - return types.ObjectString(obj, types.RelativeTo(qpos.info.Pkg)) -} - -// A Query specifies a single guru query. -type Query struct { - Pos string // query position - Build *build.Context // package loading configuration - - // result-printing function, safe for concurrent use - Output func(*token.FileSet, QueryResult) -} - -// Run runs an guru query and populates its Fset and Result. -func Run(mode string, q *Query) error { - switch mode { - case "definition": - return definition(q) - case "describe": - return describe(q) - case "freevars": - return freevars(q) - case "implements": - return implements(q) - case "referrers": - return referrers(q) - case "what": - return what(q) - case "callees", "callers", "pointsto", "whicherrs", "callstack", "peers": - return fmt.Errorf("mode %q is no longer supported (see Go issue #59676)", mode) - default: - return fmt.Errorf("invalid mode: %q", mode) - } -} - -// importQueryPackage finds the package P containing the -// query position and tells conf to import it. -// It returns the package's path. -func importQueryPackage(pos string, conf *loader.Config) (string, error) { - fqpos, err := fastQueryPos(conf.Build, pos) - if err != nil { - return "", err // bad query - } - filename := fqpos.fset.File(fqpos.start).Name() - - _, importPath, err := guessImportPath(filename, conf.Build) - if err != nil { - // Can't find GOPATH dir. - // Treat the query file as its own package. - importPath = "command-line-arguments" - conf.CreateFromFilenames(importPath, filename) - } else { - // Check that it's possible to load the queried package. - // (e.g. guru tests contain different 'package' decls in same dir.) - // Keep consistent with logic in loader/util.go! - cfg2 := *conf.Build - cfg2.CgoEnabled = false - bp, err := cfg2.Import(importPath, "", 0) - if err != nil { - return "", err // no files for package - } - - switch pkgContainsFile(bp, filename) { - case 'T': - conf.ImportWithTests(importPath) - case 'X': - conf.ImportWithTests(importPath) - importPath += "_test" // for TypeCheckFuncBodies - case 'G': - conf.Import(importPath) - default: - // This happens for ad-hoc packages like - // $GOROOT/src/net/http/triv.go. - return "", fmt.Errorf("package %q doesn't contain file %s", - importPath, filename) - } - } - - conf.TypeCheckFuncBodies = func(p string) bool { return p == importPath } - - return importPath, nil -} - -// pkgContainsFile reports whether file was among the packages Go -// files, Test files, eXternal test files, or not found. -func pkgContainsFile(bp *build.Package, filename string) byte { - for i, files := range [][]string{bp.GoFiles, bp.TestGoFiles, bp.XTestGoFiles} { - for _, file := range files { - if sameFile(filepath.Join(bp.Dir, file), filename) { - return "GTX"[i] - } - } - } - return 0 // not found -} - -// parseQueryPos parses the source query position pos and returns the -// AST node of the loaded program lprog that it identifies. -// If needExact, it must identify a single AST subtree; -// this is appropriate for queries that allow fairly arbitrary syntax, -// e.g. "describe". -func parseQueryPos(lprog *loader.Program, pos string, needExact bool) (*queryPos, error) { - filename, startOffset, endOffset, err := parsePos(pos) - if err != nil { - return nil, err - } - - // Find the named file among those in the loaded program. - var file *token.File - lprog.Fset.Iterate(func(f *token.File) bool { - if sameFile(filename, f.Name()) { - file = f - return false // done - } - return true // continue - }) - if file == nil { - return nil, fmt.Errorf("file %s not found in loaded program", filename) - } - - start, end, err := fileOffsetToPos(file, startOffset, endOffset) - if err != nil { - return nil, err - } - info, path, exact := lprog.PathEnclosingInterval(start, end) - if path == nil { - return nil, fmt.Errorf("no syntax here") - } - if needExact && !exact { - return nil, fmt.Errorf("ambiguous selection within %s", astutil.NodeDescription(path[0])) - } - return &queryPos{lprog.Fset, start, end, path, exact, info}, nil -} - -// ---------- Utilities ---------- - -// loadWithSoftErrors calls lconf.Load, suppressing "soft" errors. (See Go issue 16530.) -// TODO(adonovan): Once the loader has an option to allow soft errors, -// replace calls to loadWithSoftErrors with loader calls with that parameter. -func loadWithSoftErrors(lconf *loader.Config) (*loader.Program, error) { - lconf.AllowErrors = true - - // Ideally we would just return conf.Load() here, but go/types - // reports certain "soft" errors that gc does not (Go issue 14596). - // As a workaround, we set AllowErrors=true and then duplicate - // the loader's error checking but allow soft errors. - // It would be nice if the loader API permitted "AllowErrors: soft". - prog, err := lconf.Load() - if err != nil { - return nil, err - } - var errpkgs []string - // Report hard errors in indirectly imported packages. - for _, info := range prog.AllPackages { - if containsHardErrors(info.Errors) { - errpkgs = append(errpkgs, info.Pkg.Path()) - } else { - // Enable SSA construction for packages containing only soft errors. - info.TransitivelyErrorFree = true - } - } - if errpkgs != nil { - var more string - if len(errpkgs) > 3 { - more = fmt.Sprintf(" and %d more", len(errpkgs)-3) - errpkgs = errpkgs[:3] - } - return nil, fmt.Errorf("couldn't load packages due to errors: %s%s", - strings.Join(errpkgs, ", "), more) - } - return prog, err -} - -func containsHardErrors(errors []error) bool { - for _, err := range errors { - if err, ok := err.(types.Error); ok && err.Soft { - continue - } - return true - } - return false -} - -// allowErrors causes type errors to be silently ignored. -// (Not suitable if SSA construction follows.) -func allowErrors(lconf *loader.Config) { - ctxt := *lconf.Build // copy - ctxt.CgoEnabled = false - lconf.Build = &ctxt - lconf.AllowErrors = true - // AllErrors makes the parser always return an AST instead of - // bailing out after 10 errors and returning an empty ast.File. - lconf.ParserMode = parser.AllErrors - lconf.TypeChecker.Error = func(err error) {} -} - -func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } - -// deref returns a pointer's element type; otherwise it returns typ. -func deref(typ types.Type) types.Type { - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return typ -} - -// fprintf prints to w a message of the form "location: message\n" -// where location is derived from pos. -// -// pos must be one of: -// - a token.Pos, denoting a position -// - an ast.Node, denoting an interval -// - anything with a Pos() method: -// ssa.Member, ssa.Value, ssa.Instruction, types.Object, etc. -// - a QueryPos, denoting the extent of the user's query. -// - nil, meaning no position at all. -// -// The output format is compatible with the 'gnu' -// compilation-error-regexp in Emacs' compilation mode. -func fprintf(w io.Writer, fset *token.FileSet, pos interface{}, format string, args ...interface{}) { - var start, end token.Pos - switch pos := pos.(type) { - case ast.Node: - start = pos.Pos() - end = pos.End() - case token.Pos: - start = pos - end = start - case *types.PkgName: - // The Pos of most PkgName objects does not coincide with an identifier, - // so we suppress the usual start+len(name) heuristic for types.Objects. - start = pos.Pos() - end = start - case types.Object: - start = pos.Pos() - end = start + token.Pos(len(pos.Name())) // heuristic - case interface { - Pos() token.Pos - }: - start = pos.Pos() - end = start - case *queryPos: - start = pos.start - end = pos.end - case nil: - // no-op - default: - panic(fmt.Sprintf("invalid pos: %T", pos)) - } - - if sp := fset.Position(start); start == end { - // (prints "-: " for token.NoPos) - fmt.Fprintf(w, "%s: ", sp) - } else { - ep := fset.Position(end) - // The -1 below is a concession to Emacs's broken use of - // inclusive (not half-open) intervals. - // Other editors may not want it. - // TODO(adonovan): add an -editor=vim|emacs|acme|auto - // flag; auto uses EMACS=t / VIM=... / etc env vars. - fmt.Fprintf(w, "%s:%d.%d-%d.%d: ", - sp.Filename, sp.Line, sp.Column, ep.Line, ep.Column-1) - } - fmt.Fprintf(w, format, args...) - io.WriteString(w, "\n") -} - -func toJSON(x interface{}) []byte { - b, err := json.MarshalIndent(x, "", "\t") - if err != nil { - log.Fatalf("JSON error: %v", err) - } - return b -} diff --git a/cmd/guru/guru_test.go b/cmd/guru/guru_test.go deleted file mode 100644 index d3c38e0a472..00000000000 --- a/cmd/guru/guru_test.go +++ /dev/null @@ -1,325 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main_test - -// This file defines a test framework for guru queries. -// -// The files beneath testdata/src contain Go programs containing -// query annotations of the form: -// -// @verb id "select" -// -// where verb is the query mode (e.g. "callers"), id is a unique name -// for this query, and "select" is a regular expression matching the -// substring of the current line that is the query's input selection. -// -// The expected output for each query is provided in the accompanying -// .golden file. -// -// (Location information is not included because it's too fragile to -// display as text. TODO(adonovan): think about how we can test its -// correctness, since it is critical information.) -// -// Run this test with: -// % go test golang.org/x/tools/cmd/guru -update -// to update the golden files. - -import ( - "bytes" - "flag" - "fmt" - "go/build" - "go/parser" - "go/token" - "io" - "log" - "os" - "os/exec" - "path/filepath" - "regexp" - "runtime" - "sort" - "strconv" - "strings" - "sync" - "testing" - - guru "golang.org/x/tools/cmd/guru" - "golang.org/x/tools/internal/testenv" -) - -func init() { - // This test currently requires GOPATH mode. - // Explicitly disabling module mode should suffix, but - // we'll also turn off GOPROXY just for good measure. - if err := os.Setenv("GO111MODULE", "off"); err != nil { - log.Fatal(err) - } - if err := os.Setenv("GOPROXY", "off"); err != nil { - log.Fatal(err) - } -} - -var updateFlag = flag.Bool("update", false, "Update the golden files.") - -type query struct { - id string // unique id - verb string // query mode, e.g. "callees" - posn token.Position // query position - filename string - queryPos string // query position in command-line syntax -} - -func parseRegexp(text string) (*regexp.Regexp, error) { - pattern, err := strconv.Unquote(text) - if err != nil { - return nil, fmt.Errorf("can't unquote %s", text) - } - return regexp.Compile(pattern) -} - -// parseQueries parses and returns the queries in the named file. -func parseQueries(t *testing.T, filename string) []*query { - filedata, err := os.ReadFile(filename) - if err != nil { - t.Fatal(err) - } - - // Parse the file once to discover the test queries. - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, filename, filedata, parser.ParseComments) - if err != nil { - t.Fatal(err) - } - - lines := bytes.Split(filedata, []byte("\n")) - - var queries []*query - queriesById := make(map[string]*query) - - // Find all annotations of these forms: - expectRe := regexp.MustCompile(`@([a-z]+)\s+(\S+)\s+(\".*)$`) // @verb id "regexp" - for _, c := range f.Comments { - text := strings.TrimSpace(c.Text()) - if text == "" || text[0] != '@' { - continue - } - posn := fset.Position(c.Pos()) - - // @verb id "regexp" - match := expectRe.FindStringSubmatch(text) - if match == nil { - t.Errorf("%s: ill-formed query: %s", posn, text) - continue - } - - id := match[2] - if prev, ok := queriesById[id]; ok { - t.Errorf("%s: duplicate id %s", posn, id) - t.Errorf("%s: previously used here", prev.posn) - continue - } - - q := &query{ - id: id, - verb: match[1], - filename: filename, - posn: posn, - } - - if match[3] != `"nopos"` { - selectRe, err := parseRegexp(match[3]) - if err != nil { - t.Errorf("%s: %s", posn, err) - continue - } - - // Find text of the current line, sans query. - // (Queries must be // not /**/ comments.) - line := lines[posn.Line-1][:posn.Column-1] - - // Apply regexp to current line to find input selection. - loc := selectRe.FindIndex(line) - if loc == nil { - t.Errorf("%s: selection pattern %s doesn't match line %q", - posn, match[3], string(line)) - continue - } - - // Assumes ASCII. TODO(adonovan): test on UTF-8. - linestart := posn.Offset - (posn.Column - 1) - - // Compute the file offsets. - q.queryPos = fmt.Sprintf("%s:#%d,#%d", - filename, linestart+loc[0], linestart+loc[1]) - } - - queries = append(queries, q) - queriesById[id] = q - } - - // Return the slice, not map, for deterministic iteration. - return queries -} - -// doQuery poses query q to the guru and writes its response and -// error (if any) to out. -func doQuery(out io.Writer, q *query, json bool) { - fmt.Fprintf(out, "-------- @%s %s --------\n", q.verb, q.id) - - var buildContext = build.Default - buildContext.GOPATH = "testdata" - - gopathAbs, _ := filepath.Abs(buildContext.GOPATH) - - var outputMu sync.Mutex // guards outputs - var outputs []string // JSON objects or lines of text - outputFn := func(fset *token.FileSet, qr guru.QueryResult) { - outputMu.Lock() - defer outputMu.Unlock() - if json { - jsonstr := string(qr.JSON(fset)) - // Sanitize any absolute filenames that creep in. - jsonstr = strings.Replace(jsonstr, gopathAbs, "$GOPATH", -1) - outputs = append(outputs, jsonstr) - } else { - // suppress position information - qr.PrintPlain(func(_ interface{}, format string, args ...interface{}) { - outputs = append(outputs, fmt.Sprintf(format, args...)) - }) - } - } - - query := guru.Query{ - Pos: q.queryPos, - Build: &buildContext, - Output: outputFn, - } - - if err := guru.Run(q.verb, &query); err != nil { - fmt.Fprintf(out, "\nError: %s\n", err) - return - } - - // In a "referrers" query, references are sorted within each - // package but packages are visited in arbitrary order, - // so for determinism we sort them. Line 0 is a caption. - if q.verb == "referrers" { - sort.Strings(outputs[1:]) - } - - for _, output := range outputs { - // Replace occurrences of interface{} with any, for consistent output - // across go 1.18 and earlier. - output = strings.ReplaceAll(output, "interface{}", "any") - fmt.Fprintf(out, "%s\n", output) - } - - if !json { - io.WriteString(out, "\n") - } -} - -func TestGuru(t *testing.T) { - if testing.Short() { - // These tests are super slow. - // TODO: make a lighter version of the tests for short mode? - t.Skipf("skipping in short mode") - } - - diffCmd := "/usr/bin/diff" - if runtime.GOOS == "plan9" { - diffCmd = "/bin/diff" - } - if _, err := exec.LookPath(diffCmd); err != nil { - t.Skipf("skipping test: %v", err) - } - - for _, filename := range []string{ - "testdata/src/alias/alias.go", - "testdata/src/describe/main.go", - "testdata/src/freevars/main.go", - "testdata/src/implements/main.go", - "testdata/src/implements-methods/main.go", - "testdata/src/imports/main.go", - "testdata/src/referrers/main.go", - "testdata/src/what/main.go", - "testdata/src/definition-json/main.go", - "testdata/src/describe-json/main.go", - "testdata/src/implements-json/main.go", - "testdata/src/implements-methods-json/main.go", - "testdata/src/referrers-json/main.go", - "testdata/src/what-json/main.go", - } { - filename := filename - name := strings.Split(filename, "/")[2] - t.Run(name, func(t *testing.T) { - t.Parallel() - if filename == "testdata/src/referrers/main.go" && runtime.GOOS == "plan9" { - // Disable this test on plan9 since it expects a particular - // wording for a "no such file or directory" error. - t.Skip() - } - json := strings.Contains(filename, "-json/") - queries := parseQueries(t, filename) - golden := filename + "lden" - gotfh, err := os.CreateTemp("", filepath.Base(filename)+"t") - if err != nil { - t.Fatal(err) - } - got := gotfh.Name() - defer func() { - gotfh.Close() - os.Remove(got) - }() - - // Run the guru on each query, redirecting its output - // and error (if any) to the foo.got file. - for _, q := range queries { - doQuery(gotfh, q, json) - } - - // Compare foo.got with foo.golden. - var cmd *exec.Cmd - switch runtime.GOOS { - case "plan9": - cmd = exec.Command(diffCmd, "-c", golden, got) - default: - cmd = exec.Command(diffCmd, "-u", golden, got) - } - testenv.NeedsTool(t, cmd.Path) - buf := new(bytes.Buffer) - cmd.Stdout = buf - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - t.Errorf("Guru tests for %s failed: %s.\n%s\n", - filename, err, buf) - - if *updateFlag { - t.Logf("Updating %s...", golden) - if err := exec.Command("/bin/cp", got, golden).Run(); err != nil { - t.Errorf("Update failed: %s", err) - } - } - } - }) - } -} - -func TestIssue14684(t *testing.T) { - var buildContext = build.Default - buildContext.GOPATH = "testdata" - query := guru.Query{ - Pos: "testdata/src/README.txt:#1", - Build: &buildContext, - } - err := guru.Run("freevars", &query) - if err == nil { - t.Fatal("guru query succeeded unexpectedly") - } - if got, want := err.Error(), "testdata/src/README.txt is not a Go source file"; got != want { - t.Errorf("query error was %q, want %q", got, want) - } -} diff --git a/cmd/guru/implements.go b/cmd/guru/implements.go deleted file mode 100644 index 9e4d0dba6ee..00000000000 --- a/cmd/guru/implements.go +++ /dev/null @@ -1,358 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - "sort" - "strings" - - "golang.org/x/tools/cmd/guru/serial" - "golang.org/x/tools/go/loader" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/refactor/importgraph" -) - -// The implements function displays the "implements" relation as it pertains to the -// selected type. -// If the selection is a method, 'implements' displays -// the corresponding methods of the types that would have been reported -// by an implements query on the receiver type. -func implements(q *Query) error { - lconf := loader.Config{Build: q.Build} - allowErrors(&lconf) - - qpkg, err := importQueryPackage(q.Pos, &lconf) - if err != nil { - return err - } - - // Set the packages to search. - { - // Otherwise inspect the forward and reverse - // transitive closure of the selected package. - // (In theory even this is incomplete.) - _, rev, _ := importgraph.Build(q.Build) - for path := range rev.Search(qpkg) { - lconf.ImportWithTests(path) - } - - // TODO(adonovan): for completeness, we should also - // type-check and inspect function bodies in all - // imported packages. This would be expensive, but we - // could optimize by skipping functions that do not - // contain type declarations. This would require - // changing the loader's TypeCheckFuncBodies hook to - // provide the []*ast.File. - } - - // Load/parse/type-check the program. - lprog, err := lconf.Load() - if err != nil { - return err - } - - qpos, err := parseQueryPos(lprog, q.Pos, false) - if err != nil { - return err - } - - // Find the selected type. - path, action := findInterestingNode(qpos.info, qpos.path) - - var method *types.Func - var T types.Type // selected type (receiver if method != nil) - - switch action { - case actionExpr: - // method? - if id, ok := path[0].(*ast.Ident); ok { - if obj, ok := qpos.info.ObjectOf(id).(*types.Func); ok { - recv := obj.Type().(*types.Signature).Recv() - if recv == nil { - return fmt.Errorf("this function is not a method") - } - method = obj - T = recv.Type() - } - } - - // If not a method, use the expression's type. - if T == nil { - T = qpos.info.TypeOf(path[0].(ast.Expr)) - } - - case actionType: - T = qpos.info.TypeOf(path[0].(ast.Expr)) - } - if T == nil { - return fmt.Errorf("not a type, method, or value") - } - - // Find all named types, even local types (which can have - // methods due to promotion) and the built-in "error". - // We ignore aliases 'type M = N' to avoid duplicate - // reporting of the Named type N. - var allNamed []*types.Named - for _, info := range lprog.AllPackages { - for _, obj := range info.Defs { - if obj, ok := obj.(*types.TypeName); ok && !isAlias(obj) { - if named, ok := obj.Type().(*types.Named); ok { - allNamed = append(allNamed, named) - } - } - } - } - allNamed = append(allNamed, types.Universe.Lookup("error").Type().(*types.Named)) - - var msets typeutil.MethodSetCache - - // Test each named type. - var to, from, fromPtr []types.Type - for _, U := range allNamed { - if isInterface(T) { - if msets.MethodSet(T).Len() == 0 { - continue // empty interface - } - if isInterface(U) { - if msets.MethodSet(U).Len() == 0 { - continue // empty interface - } - - // T interface, U interface - if !types.Identical(T, U) { - if types.AssignableTo(U, T) { - to = append(to, U) - } - if types.AssignableTo(T, U) { - from = append(from, U) - } - } - } else { - // T interface, U concrete - if types.AssignableTo(U, T) { - to = append(to, U) - } else if pU := types.NewPointer(U); types.AssignableTo(pU, T) { - to = append(to, pU) - } - } - } else if isInterface(U) { - if msets.MethodSet(U).Len() == 0 { - continue // empty interface - } - - // T concrete, U interface - if types.AssignableTo(T, U) { - from = append(from, U) - } else if pT := types.NewPointer(T); types.AssignableTo(pT, U) { - fromPtr = append(fromPtr, U) - } - } - } - - var pos interface{} = qpos - if nt, ok := deref(T).(*types.Named); ok { - pos = nt.Obj() - } - - // Sort types (arbitrarily) to ensure test determinism. - sort.Sort(typesByString(to)) - sort.Sort(typesByString(from)) - sort.Sort(typesByString(fromPtr)) - - var toMethod, fromMethod, fromPtrMethod []*types.Selection // contain nils - if method != nil { - for _, t := range to { - toMethod = append(toMethod, - types.NewMethodSet(t).Lookup(method.Pkg(), method.Name())) - } - for _, t := range from { - fromMethod = append(fromMethod, - types.NewMethodSet(t).Lookup(method.Pkg(), method.Name())) - } - for _, t := range fromPtr { - fromPtrMethod = append(fromPtrMethod, - types.NewMethodSet(t).Lookup(method.Pkg(), method.Name())) - } - } - - q.Output(lprog.Fset, &implementsResult{ - qpos, T, pos, to, from, fromPtr, method, toMethod, fromMethod, fromPtrMethod, - }) - return nil -} - -type implementsResult struct { - qpos *queryPos - - t types.Type // queried type (not necessarily named) - pos interface{} // pos of t (*types.Name or *QueryPos) - to []types.Type // named or ptr-to-named types assignable to interface T - from []types.Type // named interfaces assignable from T - fromPtr []types.Type // named interfaces assignable only from *T - - // if a method was queried: - method *types.Func // queried method - toMethod []*types.Selection // method of type to[i], if any - fromMethod []*types.Selection // method of type from[i], if any - fromPtrMethod []*types.Selection // method of type fromPtrMethod[i], if any -} - -func (r *implementsResult) PrintPlain(printf printfFunc) { - relation := "is implemented by" - - meth := func(sel *types.Selection) { - if sel != nil { - printf(sel.Obj(), "\t%s method (%s).%s", - relation, r.qpos.typeString(sel.Recv()), sel.Obj().Name()) - } - } - - if isInterface(r.t) { - if types.NewMethodSet(r.t).Len() == 0 { // TODO(adonovan): cache mset - printf(r.pos, "empty interface type %s", r.qpos.typeString(r.t)) - return - } - - if r.method == nil { - printf(r.pos, "interface type %s", r.qpos.typeString(r.t)) - } else { - printf(r.method, "abstract method %s", r.qpos.objectString(r.method)) - } - - // Show concrete types (or methods) first; use two passes. - for i, sub := range r.to { - if !isInterface(sub) { - if r.method == nil { - printf(deref(sub).(*types.Named).Obj(), "\t%s %s type %s", - relation, typeKind(sub), r.qpos.typeString(sub)) - } else { - meth(r.toMethod[i]) - } - } - } - for i, sub := range r.to { - if isInterface(sub) { - if r.method == nil { - printf(sub.(*types.Named).Obj(), "\t%s %s type %s", - relation, typeKind(sub), r.qpos.typeString(sub)) - } else { - meth(r.toMethod[i]) - } - } - } - - relation = "implements" - for i, super := range r.from { - if r.method == nil { - printf(super.(*types.Named).Obj(), "\t%s %s", - relation, r.qpos.typeString(super)) - } else { - meth(r.fromMethod[i]) - } - } - } else { - relation = "implements" - - if r.from != nil { - if r.method == nil { - printf(r.pos, "%s type %s", - typeKind(r.t), r.qpos.typeString(r.t)) - } else { - printf(r.method, "concrete method %s", - r.qpos.objectString(r.method)) - } - for i, super := range r.from { - if r.method == nil { - printf(super.(*types.Named).Obj(), "\t%s %s", - relation, r.qpos.typeString(super)) - } else { - meth(r.fromMethod[i]) - } - } - } - if r.fromPtr != nil { - if r.method == nil { - printf(r.pos, "pointer type *%s", r.qpos.typeString(r.t)) - } else { - // TODO(adonovan): de-dup (C).f and (*C).f implementing (I).f. - printf(r.method, "concrete method %s", - r.qpos.objectString(r.method)) - } - - for i, psuper := range r.fromPtr { - if r.method == nil { - printf(psuper.(*types.Named).Obj(), "\t%s %s", - relation, r.qpos.typeString(psuper)) - } else { - meth(r.fromPtrMethod[i]) - } - } - } else if r.from == nil { - printf(r.pos, "%s type %s implements only interface{}", - typeKind(r.t), r.qpos.typeString(r.t)) - } - } -} - -func (r *implementsResult) JSON(fset *token.FileSet) []byte { - var method *serial.DescribeMethod - if r.method != nil { - method = &serial.DescribeMethod{ - Name: r.qpos.objectString(r.method), - Pos: fset.Position(r.method.Pos()).String(), - } - } - return toJSON(&serial.Implements{ - T: makeImplementsType(r.t, fset), - AssignableTo: makeImplementsTypes(r.to, fset), - AssignableFrom: makeImplementsTypes(r.from, fset), - AssignableFromPtr: makeImplementsTypes(r.fromPtr, fset), - AssignableToMethod: methodsToSerial(r.qpos.info.Pkg, r.toMethod, fset), - AssignableFromMethod: methodsToSerial(r.qpos.info.Pkg, r.fromMethod, fset), - AssignableFromPtrMethod: methodsToSerial(r.qpos.info.Pkg, r.fromPtrMethod, fset), - Method: method, - }) - -} - -func makeImplementsTypes(tt []types.Type, fset *token.FileSet) []serial.ImplementsType { - var r []serial.ImplementsType - for _, t := range tt { - r = append(r, makeImplementsType(t, fset)) - } - return r -} - -func makeImplementsType(T types.Type, fset *token.FileSet) serial.ImplementsType { - var pos token.Pos - if nt, ok := deref(T).(*types.Named); ok { // implementsResult.t may be non-named - pos = nt.Obj().Pos() - } - return serial.ImplementsType{ - Name: T.String(), - Pos: fset.Position(pos).String(), - Kind: typeKind(T), - } -} - -// typeKind returns a string describing the underlying kind of type, -// e.g. "slice", "array", "struct". -func typeKind(T types.Type) string { - s := reflect.TypeOf(T.Underlying()).String() - return strings.ToLower(strings.TrimPrefix(s, "*types.")) -} - -func isInterface(T types.Type) bool { return types.IsInterface(T) } - -type typesByString []types.Type - -func (p typesByString) Len() int { return len(p) } -func (p typesByString) Less(i, j int) bool { return p[i].String() < p[j].String() } -func (p typesByString) Swap(i, j int) { p[i], p[j] = p[j], p[i] } diff --git a/cmd/guru/isAlias18.go b/cmd/guru/isAlias18.go deleted file mode 100644 index 6d9101735d1..00000000000 --- a/cmd/guru/isAlias18.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.9 -// +build !go1.9 - -package main - -import "go/types" - -func isAlias(obj *types.TypeName) bool { - return false // there are no type aliases before Go 1.9 -} - -const HasAlias = false diff --git a/cmd/guru/isAlias19.go b/cmd/guru/isAlias19.go deleted file mode 100644 index 4d6367996b1..00000000000 --- a/cmd/guru/isAlias19.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.9 -// +build go1.9 - -package main - -import "go/types" - -func isAlias(obj *types.TypeName) bool { - return obj.IsAlias() -} - -const HasAlias = true diff --git a/cmd/guru/main.go b/cmd/guru/main.go deleted file mode 100644 index 283b1db7a64..00000000000 --- a/cmd/guru/main.go +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// guru: a tool for answering questions about Go source code. -// -// http://golang.org/s/using-guru -// -// Run with -help flag or help subcommand for usage information. -package main // import "golang.org/x/tools/cmd/guru" - -import ( - "flag" - "fmt" - "go/build" - "go/token" - "log" - "os" - "path/filepath" - "runtime" - "runtime/pprof" - "sync" - - "golang.org/x/tools/go/buildutil" -) - -// flags -var ( - modifiedFlag = flag.Bool("modified", false, "read archive of modified files from standard input") - scopeFlag = flag.String("scope", "", "comma-separated list of `packages` the analysis should be limited to") - ptalogFlag = flag.String("ptalog", "", "write points-to analysis log to `file`") - jsonFlag = flag.Bool("json", false, "emit output in JSON format") - reflectFlag = flag.Bool("reflect", false, "analyze reflection soundly (slow)") - cpuprofileFlag = flag.String("cpuprofile", "", "write CPU profile to `file`") -) - -func init() { - flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc) - - // gccgo does not provide a GOROOT with standard library sources. - // If we have one in the environment, force gc mode. - if build.Default.Compiler == "gccgo" { - if _, err := os.Stat(filepath.Join(runtime.GOROOT(), "src", "runtime", "runtime.go")); err == nil { - build.Default.Compiler = "gc" - } - } -} - -const useHelp = "Run 'guru -help' for more information.\n" - -const helpMessage = `Go source code guru. -Usage: guru [flags] - -The mode argument determines the query to perform: - - callees show possible targets of selected function call - callers show possible callers of selected function - callstack show path from callgraph root to selected function - definition show declaration of selected identifier - describe describe selected syntax: definition, methods, etc - freevars show free variables of selection - implements show 'implements' relation for selected type or method - peers show send/receive corresponding to selected channel op - referrers show all refs to entity denoted by selected identifier - what show basic information about the selected syntax node - -The position argument specifies the filename and byte offset (or range) -of the syntax element to query. For example: - - foo.go:#123,#128 - bar.go:#123 - -The -json flag causes guru to emit output in JSON format; - golang.org/x/tools/cmd/guru/serial defines its schema. - Otherwise, the output is in an editor-friendly format in which - every line has the form "pos: text", where pos is "-" if unknown. - -The -modified flag causes guru to read an archive from standard input. - Files in this archive will be used in preference to those in - the file system. In this way, a text editor may supply guru - with the contents of its unsaved buffers. Each archive entry - consists of the file name, a newline, the decimal file size, - another newline, and the contents of the file. - -The -scope flag restricts analysis to the specified packages. - Its value is a comma-separated list of patterns of these forms: - golang.org/x/tools/cmd/guru # a single package - golang.org/x/tools/... # all packages beneath dir - ... # the entire workspace. - A pattern preceded by '-' is negative, so the scope - encoding/...,-encoding/xml - matches all encoding packages except encoding/xml. - -User manual: http://golang.org/s/using-guru - -Example: describe syntax at offset 530 in this file (an import spec): - - $ guru describe src/golang.org/x/tools/cmd/guru/main.go:#530 -` - -func printHelp() { - fmt.Fprint(os.Stderr, helpMessage) - fmt.Fprintln(os.Stderr, "\nFlags:") - flag.PrintDefaults() -} - -func main() { - log.SetPrefix("guru: ") - log.SetFlags(0) - - // Don't print full help unless -help was requested. - // Just gently remind users that it's there. - flag.Usage = func() { fmt.Fprint(os.Stderr, useHelp) } - flag.CommandLine.Init(os.Args[0], flag.ContinueOnError) // hack - if err := flag.CommandLine.Parse(os.Args[1:]); err != nil { - // (err has already been printed) - if err == flag.ErrHelp { - printHelp() - } - os.Exit(2) - } - - args := flag.Args() - if len(args) != 2 { - flag.Usage() - os.Exit(2) - } - mode, posn := args[0], args[1] - - if mode == "help" { - printHelp() - os.Exit(2) - } - - // Profiling support. - if *cpuprofileFlag != "" { - f, err := os.Create(*cpuprofileFlag) - if err != nil { - log.Fatal(err) - } - pprof.StartCPUProfile(f) - defer pprof.StopCPUProfile() - } - - ctxt := &build.Default - - // If there were modified files, - // read them from the standard input and - // overlay them on the build context. - if *modifiedFlag { - modified, err := buildutil.ParseOverlayArchive(os.Stdin) - if err != nil { - log.Fatal(err) - } - - // All I/O done by guru needs to consult the modified map. - // The ReadFile done by referrers does, - // but the loader's cgo preprocessing currently does not. - - if len(modified) > 0 { - ctxt = buildutil.OverlayContext(ctxt, modified) - } - } - - var outputMu sync.Mutex - output := func(fset *token.FileSet, qr QueryResult) { - outputMu.Lock() - defer outputMu.Unlock() - if *jsonFlag { - // JSON output - fmt.Printf("%s\n", qr.JSON(fset)) - } else { - // plain output - printf := func(pos interface{}, format string, args ...interface{}) { - fprintf(os.Stdout, fset, pos, format, args...) - } - qr.PrintPlain(printf) - } - } - - // Ask the guru. - query := Query{ - Pos: posn, - Build: ctxt, - Output: output, - } - - if err := Run(mode, &query); err != nil { - log.Fatal(err) - } -} diff --git a/cmd/guru/pos.go b/cmd/guru/pos.go deleted file mode 100644 index 9ae4d16b63d..00000000000 --- a/cmd/guru/pos.go +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -// This file defines utilities for working with file positions. - -import ( - "fmt" - "go/build" - "go/parser" - "go/token" - "os" - "path/filepath" - "strconv" - "strings" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/buildutil" -) - -// parseOctothorpDecimal returns the numeric value if s matches "#%d", -// otherwise -1. -func parseOctothorpDecimal(s string) int { - if s != "" && s[0] == '#' { - if s, err := strconv.ParseInt(s[1:], 10, 32); err == nil { - return int(s) - } - } - return -1 -} - -// parsePos parses a string of the form "file:pos" or -// file:start,end" where pos, start, end match #%d and represent byte -// offsets, and returns its components. -// -// (Numbers without a '#' prefix are reserved for future use, -// e.g. to indicate line/column positions.) -func parsePos(pos string) (filename string, startOffset, endOffset int, err error) { - if pos == "" { - err = fmt.Errorf("no source position specified") - return - } - - colon := strings.LastIndex(pos, ":") - if colon < 0 { - err = fmt.Errorf("bad position syntax %q", pos) - return - } - filename, offset := pos[:colon], pos[colon+1:] - startOffset = -1 - endOffset = -1 - if comma := strings.Index(offset, ","); comma < 0 { - // e.g. "foo.go:#123" - startOffset = parseOctothorpDecimal(offset) - endOffset = startOffset - } else { - // e.g. "foo.go:#123,#456" - startOffset = parseOctothorpDecimal(offset[:comma]) - endOffset = parseOctothorpDecimal(offset[comma+1:]) - } - if startOffset < 0 || endOffset < 0 { - err = fmt.Errorf("invalid offset %q in query position", offset) - return - } - return -} - -// fileOffsetToPos translates the specified file-relative byte offsets -// into token.Pos form. It returns an error if the file was not found -// or the offsets were out of bounds. -func fileOffsetToPos(file *token.File, startOffset, endOffset int) (start, end token.Pos, err error) { - // Range check [start..end], inclusive of both end-points. - - if 0 <= startOffset && startOffset <= file.Size() { - start = file.Pos(int(startOffset)) - } else { - err = fmt.Errorf("start position is beyond end of file") - return - } - - if 0 <= endOffset && endOffset <= file.Size() { - end = file.Pos(int(endOffset)) - } else { - err = fmt.Errorf("end position is beyond end of file") - return - } - - return -} - -// sameFile returns true if x and y have the same basename and denote -// the same file. -func sameFile(x, y string) bool { - if filepath.Base(x) == filepath.Base(y) { // (optimisation) - if xi, err := os.Stat(x); err == nil { - if yi, err := os.Stat(y); err == nil { - return os.SameFile(xi, yi) - } - } - } - return false -} - -// fastQueryPos parses the position string and returns a queryPos. -// It parses only a single file and does not run the type checker. -func fastQueryPos(ctxt *build.Context, pos string) (*queryPos, error) { - filename, startOffset, endOffset, err := parsePos(pos) - if err != nil { - return nil, err - } - - // Parse the file, opening it the file via the build.Context - // so that we observe the effects of the -modified flag. - fset := token.NewFileSet() - cwd, _ := os.Getwd() - f, err := buildutil.ParseFile(fset, ctxt, nil, cwd, filename, parser.Mode(0)) - // ParseFile usually returns a partial file along with an error. - // Only fail if there is no file. - if f == nil { - return nil, err - } - if !f.Pos().IsValid() { - return nil, fmt.Errorf("%s is not a Go source file", filename) - } - - start, end, err := fileOffsetToPos(fset.File(f.Pos()), startOffset, endOffset) - if err != nil { - return nil, err - } - - path, exact := astutil.PathEnclosingInterval(f, start, end) - if path == nil { - return nil, fmt.Errorf("no syntax here") - } - - return &queryPos{fset, start, end, path, exact, nil}, nil -} diff --git a/cmd/guru/referrers.go b/cmd/guru/referrers.go deleted file mode 100644 index 70db3d1841a..00000000000 --- a/cmd/guru/referrers.go +++ /dev/null @@ -1,801 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "bytes" - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "go/types" - "io" - "log" - "os" - "sort" - "strconv" - "strings" - "sync" - - "golang.org/x/tools/cmd/guru/serial" - "golang.org/x/tools/go/buildutil" - "golang.org/x/tools/go/loader" - "golang.org/x/tools/imports" - "golang.org/x/tools/refactor/importgraph" -) - -// The referrers function reports all identifiers that resolve to the same object -// as the queried identifier, within any package in the workspace. -func referrers(q *Query) error { - fset := token.NewFileSet() - lconf := loader.Config{Fset: fset, Build: q.Build} - allowErrors(&lconf) - - if _, err := importQueryPackage(q.Pos, &lconf); err != nil { - return err - } - - // Load tests of the query package - // even if the query location is not in the tests. - for path := range lconf.ImportPkgs { - lconf.ImportPkgs[path] = true - } - - // Load/parse/type-check the query package. - lprog, err := lconf.Load() - if err != nil { - return err - } - - qpos, err := parseQueryPos(lprog, q.Pos, false) - if err != nil { - return err - } - - id, _ := qpos.path[0].(*ast.Ident) - if id == nil { - return fmt.Errorf("no identifier here") - } - - obj := qpos.info.ObjectOf(id) - if obj == nil { - // Happens for y in "switch y := x.(type)", - // the package declaration, - // and unresolved identifiers. - if _, ok := qpos.path[1].(*ast.File); ok { // package decl? - return packageReferrers(q, qpos.info.Pkg.Path()) - } - return fmt.Errorf("no object for identifier: %T", qpos.path[1]) - } - - // Imported package name? - if pkgname, ok := obj.(*types.PkgName); ok { - return packageReferrers(q, pkgname.Imported().Path()) - } - - if obj.Pkg() == nil { - return fmt.Errorf("references to predeclared %q are everywhere!", obj.Name()) - } - - q.Output(fset, &referrersInitialResult{ - qinfo: qpos.info, - obj: obj, - }) - - // For a globally accessible object defined in package P, we - // must load packages that depend on P. Specifically, for a - // package-level object, we need load only direct importers - // of P, but for a field or method, we must load - // any package that transitively imports P. - - if global, pkglevel := classify(obj); global { - if pkglevel { - return globalReferrersPkgLevel(q, obj, fset) - } - // We'll use the object's position to identify it in the larger program. - objposn := fset.Position(obj.Pos()) - defpkg := obj.Pkg().Path() // defining package - return globalReferrers(q, qpos.info.Pkg.Path(), defpkg, objposn) - } - - outputUses(q, fset, usesOf(obj, qpos.info), obj.Pkg()) - - return nil // success -} - -// classify classifies objects by how far -// we have to look to find references to them. -func classify(obj types.Object) (global, pkglevel bool) { - if obj.Exported() { - if obj.Parent() == nil { - // selectable object (field or method) - return true, false - } - if obj.Parent() == obj.Pkg().Scope() { - // lexical object (package-level var/const/func/type) - return true, true - } - } - // object with unexported named or defined in local scope - return false, false -} - -// packageReferrers reports all references to the specified package -// throughout the workspace. -func packageReferrers(q *Query, path string) error { - // Scan the workspace and build the import graph. - // Ignore broken packages. - _, rev, _ := importgraph.Build(q.Build) - - // Find the set of packages that directly import the query package. - // Only those packages need typechecking of function bodies. - users := rev[path] - - // Load the larger program. - fset := token.NewFileSet() - lconf := loader.Config{ - Fset: fset, - Build: q.Build, - TypeCheckFuncBodies: func(p string) bool { - return users[strings.TrimSuffix(p, "_test")] - }, - } - allowErrors(&lconf) - - // The importgraph doesn't treat external test packages - // as separate nodes, so we must use ImportWithTests. - for path := range users { - lconf.ImportWithTests(path) - } - - // Subtle! AfterTypeCheck needs no mutex for qpkg because the - // topological import order gives us the necessary happens-before edges. - // TODO(adonovan): what about import cycles? - var qpkg *types.Package - - // For efficiency, we scan each package for references - // just after it has been type-checked. The loader calls - // AfterTypeCheck (concurrently), providing us with a stream of - // packages. - lconf.AfterTypeCheck = func(info *loader.PackageInfo, files []*ast.File) { - // AfterTypeCheck may be called twice for the same package due to augmentation. - - if info.Pkg.Path() == path && qpkg == nil { - // Found the package of interest. - qpkg = info.Pkg - fakepkgname := types.NewPkgName(token.NoPos, qpkg, qpkg.Name(), qpkg) - q.Output(fset, &referrersInitialResult{ - qinfo: info, - obj: fakepkgname, // bogus - }) - } - - // Only inspect packages that directly import the - // declaring package (and thus were type-checked). - if lconf.TypeCheckFuncBodies(info.Pkg.Path()) { - // Find PkgNames that refer to qpkg. - // TODO(adonovan): perhaps more useful would be to show imports - // of the package instead of qualified identifiers. - var refs []*ast.Ident - for id, obj := range info.Uses { - if obj, ok := obj.(*types.PkgName); ok && obj.Imported() == qpkg { - refs = append(refs, id) - } - } - outputUses(q, fset, refs, info.Pkg) - } - - clearInfoFields(info) // save memory - } - - lconf.Load() // ignore error - - if qpkg == nil { - log.Fatalf("query package %q not found during reloading", path) - } - - return nil -} - -func usesOf(queryObj types.Object, info *loader.PackageInfo) []*ast.Ident { - var refs []*ast.Ident - for id, obj := range info.Uses { - if sameObj(queryObj, obj) { - refs = append(refs, id) - } - } - return refs -} - -// outputUses outputs a result describing refs, which appear in the package denoted by info. -func outputUses(q *Query, fset *token.FileSet, refs []*ast.Ident, pkg *types.Package) { - if len(refs) > 0 { - sort.Sort(byNamePos{fset, refs}) - q.Output(fset, &referrersPackageResult{ - pkg: pkg, - build: q.Build, - fset: fset, - refs: refs, - }) - } -} - -// globalReferrers reports references throughout the entire workspace to the -// object (a field or method) at the specified source position. -// Its defining package is defpkg, and the query package is qpkg. -func globalReferrers(q *Query, qpkg, defpkg string, objposn token.Position) error { - // Scan the workspace and build the import graph. - // Ignore broken packages. - _, rev, _ := importgraph.Build(q.Build) - - // Find the set of packages that depend on defpkg. - // Only function bodies in those packages need type-checking. - users := rev.Search(defpkg) // transitive importers - - // Prepare to load the larger program. - fset := token.NewFileSet() - lconf := loader.Config{ - Fset: fset, - Build: q.Build, - TypeCheckFuncBodies: func(p string) bool { - return users[strings.TrimSuffix(p, "_test")] - }, - } - allowErrors(&lconf) - - // The importgraph doesn't treat external test packages - // as separate nodes, so we must use ImportWithTests. - for path := range users { - lconf.ImportWithTests(path) - } - - // The remainder of this function is somewhat tricky because it - // operates on the concurrent stream of packages observed by the - // loader's AfterTypeCheck hook. Most of guru's helper - // functions assume the entire program has already been loaded, - // so we can't use them here. - // TODO(adonovan): smooth things out once the other changes have landed. - - // Results are reported concurrently from within the - // AfterTypeCheck hook. The program may provide a useful stream - // of information even if the user doesn't let the program run - // to completion. - - var ( - mu sync.Mutex - qobj types.Object - ) - - // For efficiency, we scan each package for references - // just after it has been type-checked. The loader calls - // AfterTypeCheck (concurrently), providing us with a stream of - // packages. - lconf.AfterTypeCheck = func(info *loader.PackageInfo, files []*ast.File) { - // AfterTypeCheck may be called twice for the same package due to augmentation. - - // Only inspect packages that depend on the declaring package - // (and thus were type-checked). - if lconf.TypeCheckFuncBodies(info.Pkg.Path()) { - // Record the query object and its package when we see it. - mu.Lock() - if qobj == nil && info.Pkg.Path() == defpkg { - // Find the object by its position (slightly ugly). - qobj = findObject(fset, &info.Info, objposn) - if qobj == nil { - // It really ought to be there; - // we found it once already. - log.Fatalf("object at %s not found in package %s", - objposn, defpkg) - } - } - obj := qobj - mu.Unlock() - - // Look for references to the query object. - if obj != nil { - outputUses(q, fset, usesOf(obj, info), info.Pkg) - } - } - - clearInfoFields(info) // save memory - } - - lconf.Load() // ignore error - - if qobj == nil { - log.Fatal("query object not found during reloading") - } - - return nil // success -} - -// globalReferrersPkgLevel reports references throughout the entire workspace to the package-level object obj. -// It assumes that the query object itself has already been reported. -func globalReferrersPkgLevel(q *Query, obj types.Object, fset *token.FileSet) error { - // globalReferrersPkgLevel uses go/ast and friends instead of go/types. - // This affords a considerable performance benefit. - // It comes at the cost of some code complexity. - // - // Here's a high level summary. - // - // The goal is to find references to the query object p.Q. - // There are several possible scenarios, each handled differently. - // - // 1. We are looking in a package other than p, and p is not dot-imported. - // This is the simplest case. Q must be referred to as n.Q, - // where n is the name under which p is imported. - // We look at all imports of p to gather all names under which it is imported. - // (In the typical case, it is imported only once, under its default name.) - // Then we look at all selector expressions and report any matches. - // - // 2. We are looking in a package other than p, and p is dot-imported. - // In this case, Q will be referred to just as Q. - // Furthermore, go/ast's object resolution will not be able to resolve - // Q to any other object, unlike any local (file- or function- or block-scoped) object. - // So we look at all matching identifiers and report all unresolvable ones. - // - // 3. We are looking in package p. - // (Care must be taken to separate p and p_test (an xtest package), - // and make sure that they are treated as separate packages.) - // In this case, we give go/ast the entire package for object resolution, - // instead of going file by file. - // We then iterate over all identifiers that resolve to the query object. - // (The query object itself has already been reported, so we don't re-report it.) - // - // We always skip all files that don't contain the string Q, as they cannot be - // relevant to finding references to Q. - // - // We parse all files leniently. In the presence of parsing errors, results are best-effort. - - // Scan the workspace and build the import graph. - // Ignore broken packages. - _, rev, _ := importgraph.Build(q.Build) - - // Find the set of packages that directly import defpkg. - defpkg := obj.Pkg().Path() - defpkg = strings.TrimSuffix(defpkg, "_test") // package x_test actually has package name x - defpkg = imports.VendorlessPath(defpkg) // remove vendor goop - - users := rev[defpkg] - if len(users) == 0 { - users = make(map[string]bool) - } - // We also need to check defpkg itself, and its xtests. - // For the reverse graph packages, we process xtests with the main package. - // defpkg gets special handling; we must distinguish between in-package vs out-of-package. - // To make the control flow below simpler, add defpkg and defpkg xtest placeholders. - // Use "!test" instead of "_test" because "!" is not a valid character in an import path. - // (More precisely, it is not guaranteed to be a valid character in an import path, - // so it is unlikely that it will be in use. See https://golang.org/ref/spec#Import_declarations.) - users[defpkg] = true - users[defpkg+"!test"] = true - - cwd, err := os.Getwd() - if err != nil { - return err - } - - defname := obj.Pkg().Name() // name of defining package, used for imports using import path only - isxtest := strings.HasSuffix(defname, "_test") // indicates whether the query object is defined in an xtest package - - name := obj.Name() - namebytes := []byte(name) // byte slice version of query object name, for early filtering - objpos := fset.Position(obj.Pos()) // position of query object, used to prevent re-emitting original decl - - sema := make(chan struct{}, 20) // counting semaphore to limit I/O concurrency - var wg sync.WaitGroup - - for u := range users { - u := u - wg.Add(1) - go func() { - defer wg.Done() - - uIsXTest := strings.HasSuffix(u, "!test") // indicates whether this package is the special defpkg xtest package - u = strings.TrimSuffix(u, "!test") - - // Resolve package. - sema <- struct{}{} // acquire token - pkg, err := q.Build.Import(u, cwd, build.IgnoreVendor) - <-sema // release token - if err != nil { - return - } - - // If we're not in the query package, - // the object is in another package regardless, - // so we want to process all files. - // If we are in the query package, - // we want to only process the files that are - // part of that query package; - // that set depends on whether the query package itself is an xtest. - inQueryPkg := u == defpkg && isxtest == uIsXTest - var files []string - if !inQueryPkg || !isxtest { - files = append(files, pkg.GoFiles...) - files = append(files, pkg.TestGoFiles...) - files = append(files, pkg.CgoFiles...) // use raw cgo files, as we're only parsing - } - if !inQueryPkg || isxtest { - files = append(files, pkg.XTestGoFiles...) - } - - if len(files) == 0 { - return - } - - var deffiles map[string]*ast.File - if inQueryPkg { - deffiles = make(map[string]*ast.File) - } - - buf := new(bytes.Buffer) // reusable buffer for reading files - - for _, file := range files { - if !buildutil.IsAbsPath(q.Build, file) { - file = buildutil.JoinPath(q.Build, pkg.Dir, file) - } - buf.Reset() - sema <- struct{}{} // acquire token - src, err := readFile(q.Build, file, buf) - <-sema // release token - if err != nil { - continue - } - - // Fast path: If the object's name isn't present anywhere in the source, ignore the file. - if !bytes.Contains(src, namebytes) { - continue - } - - if inQueryPkg { - // If we're in the query package, we defer final processing until we have - // parsed all of the candidate files in the package. - // Best effort; allow errors and use what we can from what remains. - f, _ := parser.ParseFile(fset, file, src, parser.AllErrors) - if f != nil { - deffiles[file] = f - } - continue - } - - // We aren't in the query package. Go file by file. - - // Parse out only the imports, to check whether the defining package - // was imported, and if so, under what names. - // Best effort; allow errors and use what we can from what remains. - f, _ := parser.ParseFile(fset, file, src, parser.ImportsOnly|parser.AllErrors) - if f == nil { - continue - } - - // pkgnames is the set of names by which defpkg is imported in this file. - // (Multiple imports in the same file are legal but vanishingly rare.) - pkgnames := make([]string, 0, 1) - var isdotimport bool - for _, imp := range f.Imports { - path, err := strconv.Unquote(imp.Path.Value) - if err != nil || path != defpkg { - continue - } - switch { - case imp.Name == nil: - pkgnames = append(pkgnames, defname) - case imp.Name.Name == ".": - isdotimport = true - default: - pkgnames = append(pkgnames, imp.Name.Name) - } - } - if len(pkgnames) == 0 && !isdotimport { - // Defining package not imported, bail. - continue - } - - // Re-parse the entire file. - // Parse errors are ok; we'll do the best we can with a partial AST, if we have one. - f, _ = parser.ParseFile(fset, file, src, parser.AllErrors) - if f == nil { - continue - } - - // Walk the AST looking for references. - var refs []*ast.Ident - ast.Inspect(f, func(n ast.Node) bool { - // Check selector expressions. - // If the selector matches the target name, - // and the expression is one of the names - // that the defining package was imported under, - // then we have a match. - if sel, ok := n.(*ast.SelectorExpr); ok && sel.Sel.Name == name { - if id, ok := sel.X.(*ast.Ident); ok { - for _, n := range pkgnames { - if n == id.Name { - refs = append(refs, sel.Sel) - // Don't recurse further, to avoid duplicate entries - // from the dot import check below. - return false - } - } - } - } - // Dot imports are special. - // Objects imported from the defining package are placed in the package scope. - // go/ast does not resolve them to an object. - // At all other scopes (file, local), go/ast can do the resolution. - // So we're looking for object-free idents with the right name. - // The only other way to get something with the right name at the package scope - // is to *be* the defining package. We handle that case separately (inQueryPkg). - if isdotimport { - if id, ok := n.(*ast.Ident); ok && id.Obj == nil && id.Name == name { - refs = append(refs, id) - return false - } - } - return true - }) - - // Emit any references we found. - if len(refs) > 0 { - q.Output(fset, &referrersPackageResult{ - pkg: types.NewPackage(pkg.ImportPath, pkg.Name), - build: q.Build, - fset: fset, - refs: refs, - }) - } - } - - // If we're in the query package, we've now collected all the files in the package. - // (Or at least the ones that might contain references to the object.) - // Find and emit refs. - if inQueryPkg { - // Bundle the files together into a package. - // This does package-level object resolution. - qpkg, _ := ast.NewPackage(fset, deffiles, nil, nil) - // Look up the query object; we know that it is defined in the package scope. - pkgobj := qpkg.Scope.Objects[name] - if pkgobj == nil { - panic("missing defpkg object for " + defpkg + "." + name) - } - // Find all references to the query object. - var refs []*ast.Ident - ast.Inspect(qpkg, func(n ast.Node) bool { - if id, ok := n.(*ast.Ident); ok { - // Check both that this is a reference to the query object - // and that it is not the query object itself; - // the query object itself was already emitted. - if id.Obj == pkgobj && objpos != fset.Position(id.Pos()) { - refs = append(refs, id) - return false - } - } - return true - }) - if len(refs) > 0 { - q.Output(fset, &referrersPackageResult{ - pkg: types.NewPackage(pkg.ImportPath, pkg.Name), - build: q.Build, - fset: fset, - refs: refs, - }) - } - deffiles = nil // allow GC - } - }() - } - - wg.Wait() - - return nil -} - -// findObject returns the object defined at the specified position. -func findObject(fset *token.FileSet, info *types.Info, objposn token.Position) types.Object { - good := func(obj types.Object) bool { - if obj == nil { - return false - } - posn := fset.Position(obj.Pos()) - return posn.Filename == objposn.Filename && posn.Offset == objposn.Offset - } - for _, obj := range info.Defs { - if good(obj) { - return obj - } - } - for _, obj := range info.Implicits { - if good(obj) { - return obj - } - } - return nil -} - -// same reports whether x and y are identical, or both are PkgNames -// that import the same Package. -func sameObj(x, y types.Object) bool { - if x == y { - return true - } - if x, ok := x.(*types.PkgName); ok { - if y, ok := y.(*types.PkgName); ok { - return x.Imported() == y.Imported() - } - } - return false -} - -func clearInfoFields(info *loader.PackageInfo) { - // TODO(adonovan): opt: save memory by eliminating unneeded scopes/objects. - // (Requires go/types change for Go 1.7.) - // info.Pkg.Scope().ClearChildren() - - // Discard the file ASTs and their accumulated type - // information to save memory. - info.Files = nil - info.Defs = make(map[*ast.Ident]types.Object) - info.Uses = make(map[*ast.Ident]types.Object) - info.Implicits = make(map[ast.Node]types.Object) - - // Also, disable future collection of wholly unneeded - // type information for the package in case there is - // more type-checking to do (augmentation). - info.Types = nil - info.Scopes = nil - info.Selections = nil -} - -// -------- utils -------- - -// An deterministic ordering for token.Pos that doesn't -// depend on the order in which packages were loaded. -func lessPos(fset *token.FileSet, x, y token.Pos) bool { - fx := fset.File(x) - fy := fset.File(y) - if fx != fy { - return fx.Name() < fy.Name() - } - return x < y -} - -type byNamePos struct { - fset *token.FileSet - ids []*ast.Ident -} - -func (p byNamePos) Len() int { return len(p.ids) } -func (p byNamePos) Swap(i, j int) { p.ids[i], p.ids[j] = p.ids[j], p.ids[i] } -func (p byNamePos) Less(i, j int) bool { - return lessPos(p.fset, p.ids[i].NamePos, p.ids[j].NamePos) -} - -// referrersInitialResult is the initial result of a "referrers" query. -type referrersInitialResult struct { - qinfo *loader.PackageInfo - obj types.Object // object it denotes -} - -func (r *referrersInitialResult) PrintPlain(printf printfFunc) { - printf(r.obj, "references to %s", - types.ObjectString(r.obj, types.RelativeTo(r.qinfo.Pkg))) -} - -func (r *referrersInitialResult) JSON(fset *token.FileSet) []byte { - var objpos string - if pos := r.obj.Pos(); pos.IsValid() { - objpos = fset.Position(pos).String() - } - return toJSON(&serial.ReferrersInitial{ - Desc: r.obj.String(), - ObjPos: objpos, - }) -} - -// referrersPackageResult is the streaming result for one package of a "referrers" query. -type referrersPackageResult struct { - pkg *types.Package - build *build.Context - fset *token.FileSet - refs []*ast.Ident // set of all other references to it -} - -// foreachRef calls f(id, text) for id in r.refs, in order. -// Text is the text of the line on which id appears. -func (r *referrersPackageResult) foreachRef(f func(id *ast.Ident, text string)) { - // Show referring lines, like grep. - type fileinfo struct { - refs []*ast.Ident - linenums []int // line number of refs[i] - data chan interface{} // file contents or error - } - var fileinfos []*fileinfo - fileinfosByName := make(map[string]*fileinfo) - - // First pass: start the file reads concurrently. - sema := make(chan struct{}, 20) // counting semaphore to limit I/O concurrency - for _, ref := range r.refs { - posn := r.fset.Position(ref.Pos()) - fi := fileinfosByName[posn.Filename] - if fi == nil { - fi = &fileinfo{data: make(chan interface{})} - fileinfosByName[posn.Filename] = fi - fileinfos = append(fileinfos, fi) - - // First request for this file: - // start asynchronous read. - go func() { - sema <- struct{}{} // acquire token - content, err := readFile(r.build, posn.Filename, nil) - <-sema // release token - if err != nil { - fi.data <- err - } else { - fi.data <- content - } - }() - } - fi.refs = append(fi.refs, ref) - fi.linenums = append(fi.linenums, posn.Line) - } - - // Second pass: print refs in original order. - // One line may have several refs at different columns. - for _, fi := range fileinfos { - v := <-fi.data // wait for I/O completion - - // Print one item for all refs in a file that could not - // be loaded (perhaps due to //line directives). - if err, ok := v.(error); ok { - var suffix string - if more := len(fi.refs) - 1; more > 0 { - suffix = fmt.Sprintf(" (+ %d more refs in this file)", more) - } - f(fi.refs[0], err.Error()+suffix) - continue - } - - lines := bytes.Split(v.([]byte), []byte("\n")) - for i, ref := range fi.refs { - f(ref, string(lines[fi.linenums[i]-1])) - } - } -} - -// readFile is like os.ReadFile, but -// it goes through the virtualized build.Context. -// If non-nil, buf must have been reset. -func readFile(ctxt *build.Context, filename string, buf *bytes.Buffer) ([]byte, error) { - rc, err := buildutil.OpenFile(ctxt, filename) - if err != nil { - return nil, err - } - defer rc.Close() - if buf == nil { - buf = new(bytes.Buffer) - } - if _, err := io.Copy(buf, rc); err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -func (r *referrersPackageResult) PrintPlain(printf printfFunc) { - r.foreachRef(func(id *ast.Ident, text string) { - printf(id, "%s", text) - }) -} - -func (r *referrersPackageResult) JSON(fset *token.FileSet) []byte { - refs := serial.ReferrersPackage{Package: r.pkg.Path()} - r.foreachRef(func(id *ast.Ident, text string) { - refs.Refs = append(refs.Refs, serial.Ref{ - Pos: fset.Position(id.NamePos).String(), - Text: text, - }) - }) - return toJSON(refs) -} diff --git a/cmd/guru/serial/serial.go b/cmd/guru/serial/serial.go deleted file mode 100644 index 3af7f4731f7..00000000000 --- a/cmd/guru/serial/serial.go +++ /dev/null @@ -1,251 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package serial defines the guru's schema for -json output. -// -// The output of a guru query is a stream of one or more JSON objects. -// This table shows the types of objects in the result stream for each -// query type. -// -// Query Result stream -// ----- ------------- -// definition Definition -// describe Describe -// freevars FreeVar ... -// implements Implements -// referrers ReferrersInitial ReferrersPackage ... -// what What -// -// All 'pos' strings in the output are of the form "file:line:col", -// where line is the 1-based line number and col is the 1-based byte index. -package serial - -// A Peers is the result of a 'peers' query. -// If Allocs is empty, the selected channel can't point to anything. -type Peers struct { - Pos string `json:"pos"` // location of the selected channel op (<-) - Type string `json:"type"` // type of the selected channel - Allocs []string `json:"allocs,omitempty"` // locations of aliased make(chan) ops - Sends []string `json:"sends,omitempty"` // locations of aliased ch<-x ops - Receives []string `json:"receives,omitempty"` // locations of aliased <-ch ops - Closes []string `json:"closes,omitempty"` // locations of aliased close(ch) ops -} - -// A "referrers" query emits a ReferrersInitial object followed by zero or -// more ReferrersPackage objects, one per package that contains a reference. -type ( - ReferrersInitial struct { - ObjPos string `json:"objpos,omitempty"` // location of the definition - Desc string `json:"desc"` // description of the denoted object - } - ReferrersPackage struct { - Package string `json:"package"` - Refs []Ref `json:"refs"` // non-empty list of references within this package - } - Ref struct { - Pos string `json:"pos"` // location of all references - Text string `json:"text"` // text of the referring line - } -) - -// A Definition is the result of a 'definition' query. -type Definition struct { - ObjPos string `json:"objpos,omitempty"` // location of the definition - Desc string `json:"desc"` // description of the denoted object -} - -// A Callees is the result of a 'callees' query. -// -// Callees is nonempty unless the call was a dynamic call on a -// provably nil func or interface value. -type ( - Callees struct { - Pos string `json:"pos"` // location of selected call site - Desc string `json:"desc"` // description of call site - Callees []*Callee `json:"callees"` - } - Callee struct { - Name string `json:"name"` // full name of called function - Pos string `json:"pos"` // location of called function - } -) - -// A Caller is one element of the slice returned by a 'callers' query. -// (Callstack also contains a similar slice.) -// -// The root of the callgraph has an unspecified "Caller" string. -type Caller struct { - Pos string `json:"pos,omitempty"` // location of the calling function - Desc string `json:"desc"` // description of call site - Caller string `json:"caller"` // full name of calling function -} - -// A CallStack is the result of a 'callstack' query. -// It indicates an arbitrary path from the root of the callgraph to -// the query function. -// -// If the Callers slice is empty, the function was unreachable in this -// analysis scope. -type CallStack struct { - Pos string `json:"pos"` // location of the selected function - Target string `json:"target"` // the selected function - Callers []Caller `json:"callers"` // enclosing calls, innermost first. -} - -// A FreeVar is one element of the slice returned by a 'freevars' -// query. Each one identifies an expression referencing a local -// identifier defined outside the selected region. -type FreeVar struct { - Pos string `json:"pos"` // location of the identifier's definition - Kind string `json:"kind"` // one of {var,func,type,const,label} - Ref string `json:"ref"` // referring expression (e.g. "x" or "x.y.z") - Type string `json:"type"` // type of the expression -} - -// An Implements contains the result of an 'implements' query. -// It describes the queried type, the set of named non-empty interface -// types to which it is assignable, and the set of named/*named types -// (concrete or non-empty interface) which may be assigned to it. -type Implements struct { - T ImplementsType `json:"type,omitempty"` // the queried type - AssignableTo []ImplementsType `json:"to,omitempty"` // types assignable to T - AssignableFrom []ImplementsType `json:"from,omitempty"` // interface types assignable from T - AssignableFromPtr []ImplementsType `json:"fromptr,omitempty"` // interface types assignable only from *T - - // The following fields are set only if the query was a method. - // Assignable{To,From,FromPtr}Method[i] is the corresponding - // method of type Assignable{To,From,FromPtr}[i], or blank - // {"",""} if that type lacks the method. - Method *DescribeMethod `json:"method,omitempty"` // the queried method - AssignableToMethod []DescribeMethod `json:"to_method,omitempty"` - AssignableFromMethod []DescribeMethod `json:"from_method,omitempty"` - AssignableFromPtrMethod []DescribeMethod `json:"fromptr_method,omitempty"` -} - -// An ImplementsType describes a single type as part of an 'implements' query. -type ImplementsType struct { - Name string `json:"name"` // full name of the type - Pos string `json:"pos"` // location of its definition - Kind string `json:"kind"` // "basic", "array", etc -} - -// A SyntaxNode is one element of a stack of enclosing syntax nodes in -// a "what" query. -type SyntaxNode struct { - Description string `json:"desc"` // description of syntax tree - Start int `json:"start"` // start byte offset, 0-based - End int `json:"end"` // end byte offset -} - -// A What is the result of the "what" query, which quickly identifies -// the selection, parsing only a single file. It is intended for use -// in low-latency GUIs. -type What struct { - Enclosing []SyntaxNode `json:"enclosing"` // enclosing nodes of syntax tree - Modes []string `json:"modes"` // query modes enabled for this selection. - SrcDir string `json:"srcdir,omitempty"` // $GOROOT src directory containing queried package - ImportPath string `json:"importpath,omitempty"` // import path of queried package - Object string `json:"object,omitempty"` // name of identified object, if any - SameIDs []string `json:"sameids,omitempty"` // locations of references to same object -} - -// A PointsToLabel describes a pointer analysis label. -// -// A "label" is an object that may be pointed to by a pointer, map, -// channel, 'func', slice or interface. Labels include: -// - functions -// - globals -// - arrays created by literals (e.g. []byte("foo")) and conversions ([]byte(s)) -// - stack- and heap-allocated variables (including composite literals) -// - arrays allocated by append() -// - channels, maps and arrays created by make() -// - and their subelements, e.g. "alloc.y[*].z" -type PointsToLabel struct { - Pos string `json:"pos"` // location of syntax that allocated the object - Desc string `json:"desc"` // description of the label -} - -// A PointsTo is one element of the result of a 'pointsto' query on an -// expression. It describes a single pointer: its type and the set of -// "labels" it points to. -// -// If the pointer is of interface type, it will have one PTS entry -// describing each concrete type that it may contain. For each -// concrete type that is a pointer, the PTS entry describes the labels -// it may point to. The same is true for reflect.Values, except the -// dynamic types needn't be concrete. -type PointsTo struct { - Type string `json:"type"` // (concrete) type of the pointer - NamePos string `json:"namepos,omitempty"` // location of type defn, if Named - Labels []PointsToLabel `json:"labels,omitempty"` // pointed-to objects -} - -// A DescribeValue is the additional result of a 'describe' query -// if the selection indicates a value or expression. -type DescribeValue struct { - Type string `json:"type"` // type of the expression - Value string `json:"value,omitempty"` // value of the expression, if constant - ObjPos string `json:"objpos,omitempty"` // location of the definition, if an Ident - TypesPos []Definition `json:"typespos,omitempty"` // location of the named types, that type consist of -} - -type DescribeMethod struct { - Name string `json:"name"` // method name, as defined by types.Selection.String() - Pos string `json:"pos"` // location of the method's definition -} - -// A DescribeType is the additional result of a 'describe' query -// if the selection indicates a type. -type DescribeType struct { - Type string `json:"type"` // the string form of the type - NamePos string `json:"namepos,omitempty"` // location of definition of type, if named - NameDef string `json:"namedef,omitempty"` // underlying definition of type, if named - Methods []DescribeMethod `json:"methods,omitempty"` // methods of the type -} - -type DescribeMember struct { - Name string `json:"name"` // name of member - Type string `json:"type,omitempty"` // type of member (underlying, if 'type') - Value string `json:"value,omitempty"` // value of member (if 'const') - Pos string `json:"pos"` // location of definition of member - Kind string `json:"kind"` // one of {var,const,func,type} - Methods []DescribeMethod `json:"methods,omitempty"` // methods (if member is a type) -} - -// A DescribePackage is the additional result of a 'describe' if -// the selection indicates a package. -type DescribePackage struct { - Path string `json:"path"` // import path of the package - Members []*DescribeMember `json:"members,omitempty"` // accessible members of the package -} - -// A Describe is the result of a 'describe' query. -// It may contain an element describing the selected semantic entity -// in detail. -type Describe struct { - Desc string `json:"desc"` // description of the selected syntax node - Pos string `json:"pos"` // location of the selected syntax node - Detail string `json:"detail,omitempty"` // one of {package, type, value}, or "". - - // At most one of the following fields is populated: - // the one specified by 'detail'. - Package *DescribePackage `json:"package,omitempty"` - Type *DescribeType `json:"type,omitempty"` - Value *DescribeValue `json:"value,omitempty"` -} - -// A WhichErrs is the result of a 'whicherrs' query. -// It contains the position of the queried error and the possible globals, -// constants, and types it may point to. -type WhichErrs struct { - ErrPos string `json:"errpos,omitempty"` // location of queried error - Globals []string `json:"globals,omitempty"` // locations of globals - Constants []string `json:"constants,omitempty"` // locations of constants - Types []WhichErrsType `json:"types,omitempty"` // Types -} - -type WhichErrsType struct { - Type string `json:"type,omitempty"` - Position string `json:"position,omitempty"` -} diff --git a/cmd/guru/testdata/src/README.txt b/cmd/guru/testdata/src/README.txt deleted file mode 100644 index 34fc41ae4e3..00000000000 --- a/cmd/guru/testdata/src/README.txt +++ /dev/null @@ -1,2 +0,0 @@ -This is not a Go source file. -Used by TestIssue14684. diff --git a/cmd/guru/testdata/src/alias/alias.go b/cmd/guru/testdata/src/alias/alias.go deleted file mode 100644 index 42e1d297f12..00000000000 --- a/cmd/guru/testdata/src/alias/alias.go +++ /dev/null @@ -1,23 +0,0 @@ -// Tests of Go 1.9 type aliases. -// See go.tools/guru/guru_test.go for explanation. -// See alias.golden for expected query results. - -package alias // @describe describe-pkg "alias" - -type I interface { // @implements implements-I "I" - f() -} - -type N int - -func (N) f() {} - -type M = N // @describe describe-def-M "M" -var m M // @describe describe-ref-M "M" - -type O N // @describe describe-O "O" - -type P = struct{ N } // @describe describe-P "N" - -type U = undefined // @describe describe-U "U" -type _ = undefined // @describe describe-undefined "undefined" diff --git a/cmd/guru/testdata/src/alias/alias.golden b/cmd/guru/testdata/src/alias/alias.golden deleted file mode 100644 index b5ba46e542b..00000000000 --- a/cmd/guru/testdata/src/alias/alias.golden +++ /dev/null @@ -1,47 +0,0 @@ --------- @describe describe-pkg -------- -definition of package "alias" - type I interface{f()} - method (I) f() - type M = N - method (N) f() - type N int - method (N) f() - type O int - type P = struct{N} - method (struct{N}) f() - type U = invalid type - var m N - --------- @implements implements-I -------- -interface type I - is implemented by basic type N - --------- @describe describe-def-M -------- -alias of type N (size 8, align 8) -defined as int -Methods: - method (N) f() - --------- @describe describe-ref-M -------- -alias of type N (size 8, align 8) -defined as int -Methods: - method (N) f() - --------- @describe describe-O -------- -definition of type O (size 8, align 8) -No methods. - --------- @describe describe-P -------- -type struct{N} (size 8, align 8) -Methods: - method (struct{N}) f() -Fields: - N N - --------- @describe describe-U -------- -alias of type invalid type - --------- @describe describe-undefined -------- -identifier - diff --git a/cmd/guru/testdata/src/definition-json/main.go b/cmd/guru/testdata/src/definition-json/main.go deleted file mode 100644 index 16745192914..00000000000 --- a/cmd/guru/testdata/src/definition-json/main.go +++ /dev/null @@ -1,68 +0,0 @@ -package definition - -// Tests of 'definition' query, -json output. -// See golang.org/x/tools/cmd/guru/guru_test.go for explanation. -// See main.golden for expected query results. - -// TODO(adonovan): test: selection of member of same package defined in another file. - -import ( - "lib" - lib2 "lib" - "nosuchpkg" -) - -func main() { - var _ int // @definition builtin "int" - - var _ undef // @definition lexical-undef "undef" - var x lib.T // @definition lexical-pkgname "lib" - f() // @definition lexical-func "f" - print(x) // @definition lexical-var "x" - if x := ""; x == "" { // @definition lexical-shadowing "x" - } - - var _ lib.Type // @definition qualified-type "Type" - var _ lib.Func // @definition qualified-func "Func" - var _ lib.Var // @definition qualified-var "Var" - var _ lib.Const // @definition qualified-const "Const" - var _ lib2.Type // @definition qualified-type-renaming "Type" - var _ lib.Nonesuch // @definition qualified-nomember "Nonesuch" - var _ nosuchpkg.T // @definition qualified-nopkg "nosuchpkg" - - var u U - print(u.field) // @definition select-field "field" - u.method() // @definition select-method "method" -} - -func f() - -type T struct{ field int } - -func (T) method() - -type U struct{ T } - -type V1 struct { - W // @definition embedded-other-file "W" -} - -type V2 struct { - *W // @definition embedded-other-file-pointer "W" -} - -type V3 struct { - int // @definition embedded-basic "int" -} - -type V4 struct { - *int // @definition embedded-basic-pointer "int" -} - -type V5 struct { - lib.Type // @definition embedded-other-pkg "Type" -} - -type V6 struct { - T // @definition embedded-same-file "T" -} diff --git a/cmd/guru/testdata/src/definition-json/main.golden b/cmd/guru/testdata/src/definition-json/main.golden deleted file mode 100644 index dee878d0073..00000000000 --- a/cmd/guru/testdata/src/definition-json/main.golden +++ /dev/null @@ -1,95 +0,0 @@ --------- @definition builtin -------- - -Error: int is built in --------- @definition lexical-undef -------- - -Error: no object for identifier --------- @definition lexical-pkgname -------- -{ - "objpos": "testdata/src/definition-json/main.go:10:2", - "desc": "package lib" -} --------- @definition lexical-func -------- -{ - "objpos": "$GOPATH/src/definition-json/main.go:38:6", - "desc": "func f" -} --------- @definition lexical-var -------- -{ - "objpos": "$GOPATH/src/definition-json/main.go:19:6", - "desc": "var x" -} --------- @definition lexical-shadowing -------- -{ - "objpos": "$GOPATH/src/definition-json/main.go:22:5", - "desc": "var x" -} --------- @definition qualified-type -------- -{ - "objpos": "testdata/src/lib/lib.go:3:6", - "desc": "type lib.Type" -} --------- @definition qualified-func -------- -{ - "objpos": "testdata/src/lib/lib.go:9:6", - "desc": "func lib.Func" -} --------- @definition qualified-var -------- -{ - "objpos": "testdata/src/lib/lib.go:14:5", - "desc": "var lib.Var" -} --------- @definition qualified-const -------- -{ - "objpos": "testdata/src/lib/lib.go:12:7", - "desc": "const lib.Const" -} --------- @definition qualified-type-renaming -------- -{ - "objpos": "testdata/src/lib/lib.go:3:6", - "desc": "type lib.Type" -} --------- @definition qualified-nomember -------- - -Error: couldn't find declaration of Nonesuch in "lib" --------- @definition qualified-nopkg -------- -{ - "objpos": "testdata/src/definition-json/main.go:12:2", - "desc": "package nosuchpkg" -} --------- @definition select-field -------- -{ - "objpos": "testdata/src/definition-json/main.go:40:16", - "desc": "field field int" -} --------- @definition select-method -------- -{ - "objpos": "testdata/src/definition-json/main.go:42:10", - "desc": "func (T).method()" -} --------- @definition embedded-other-file -------- -{ - "objpos": "testdata/src/definition-json/type.go:3:6", - "desc": "type W int" -} --------- @definition embedded-other-file-pointer -------- -{ - "objpos": "testdata/src/definition-json/type.go:3:6", - "desc": "type W int" -} --------- @definition embedded-basic -------- - -Error: int is built in --------- @definition embedded-basic-pointer -------- - -Error: int is built in --------- @definition embedded-other-pkg -------- -{ - "objpos": "testdata/src/lib/lib.go:3:6", - "desc": "type lib.Type" -} --------- @definition embedded-same-file -------- -{ - "objpos": "$GOPATH/src/definition-json/main.go:40:6", - "desc": "type T" -} diff --git a/cmd/guru/testdata/src/definition-json/type.go b/cmd/guru/testdata/src/definition-json/type.go deleted file mode 100644 index a574bf37fcc..00000000000 --- a/cmd/guru/testdata/src/definition-json/type.go +++ /dev/null @@ -1,3 +0,0 @@ -package definition - -type W int diff --git a/cmd/guru/testdata/src/describe-json/main.go b/cmd/guru/testdata/src/describe-json/main.go deleted file mode 100644 index 54b52c92bf8..00000000000 --- a/cmd/guru/testdata/src/describe-json/main.go +++ /dev/null @@ -1,29 +0,0 @@ -package describe // @describe pkgdecl "describe" - -// Tests of 'describe' query, -format=json. -// See go.tools/guru/guru_test.go for explanation. -// See describe-json.golden for expected query results. - -func main() { - var s struct{ x [3]int } - p := &s.x[0] // @describe desc-val-p "p" - _ = p - - var i I = C(0) - if i == nil { - i = new(D) - } - print(i) // @describe desc-val-i "\\bi\\b" - - go main() // @describe desc-stmt "go" -} - -type I interface { - f() -} - -type C int // @describe desc-type-C "C" -type D struct{} - -func (c C) f() {} // @describe desc-param-c "\\bc\\b" -func (d *D) f() {} // @describe desc-param-d "\\bd\\b" diff --git a/cmd/guru/testdata/src/describe-json/main.golden b/cmd/guru/testdata/src/describe-json/main.golden deleted file mode 100644 index bdb36938538..00000000000 --- a/cmd/guru/testdata/src/describe-json/main.golden +++ /dev/null @@ -1,134 +0,0 @@ --------- @describe pkgdecl -------- -{ - "desc": "definition of package \"describe-json\"", - "pos": "testdata/src/describe-json/main.go:1:9", - "detail": "package", - "package": { - "path": "describe-json", - "members": [ - { - "name": "C", - "type": "int", - "pos": "testdata/src/describe-json/main.go:25:6", - "kind": "type", - "methods": [ - { - "name": "method (C) f()", - "pos": "testdata/src/describe-json/main.go:28:12" - } - ] - }, - { - "name": "D", - "type": "struct{}", - "pos": "testdata/src/describe-json/main.go:26:6", - "kind": "type", - "methods": [ - { - "name": "method (*D) f()", - "pos": "testdata/src/describe-json/main.go:29:13" - } - ] - }, - { - "name": "I", - "type": "interface{f()}", - "pos": "testdata/src/describe-json/main.go:21:6", - "kind": "type", - "methods": [ - { - "name": "method (I) f()", - "pos": "testdata/src/describe-json/main.go:22:2" - } - ] - }, - { - "name": "main", - "type": "func()", - "pos": "testdata/src/describe-json/main.go:7:6", - "kind": "func" - } - ] - } -} --------- @describe desc-val-p -------- -{ - "desc": "identifier", - "pos": "testdata/src/describe-json/main.go:9:2", - "detail": "value", - "value": { - "type": "*int", - "objpos": "testdata/src/describe-json/main.go:9:2" - } -} --------- @describe desc-val-i -------- -{ - "desc": "identifier", - "pos": "testdata/src/describe-json/main.go:16:8", - "detail": "value", - "value": { - "type": "I", - "objpos": "testdata/src/describe-json/main.go:12:6", - "typespos": [ - { - "objpos": "testdata/src/describe-json/main.go:21:6", - "desc": "I" - } - ] - } -} --------- @describe desc-stmt -------- -{ - "desc": "go statement", - "pos": "testdata/src/describe-json/main.go:18:2", - "detail": "unknown" -} --------- @describe desc-type-C -------- -{ - "desc": "definition of type C (size 8, align 8)", - "pos": "testdata/src/describe-json/main.go:25:6", - "detail": "type", - "type": { - "type": "C", - "namepos": "testdata/src/describe-json/main.go:25:6", - "namedef": "int", - "methods": [ - { - "name": "method (C) f()", - "pos": "testdata/src/describe-json/main.go:28:12" - } - ] - } -} --------- @describe desc-param-c -------- -{ - "desc": "identifier", - "pos": "testdata/src/describe-json/main.go:28:7", - "detail": "value", - "value": { - "type": "C", - "objpos": "testdata/src/describe-json/main.go:28:7", - "typespos": [ - { - "objpos": "testdata/src/describe-json/main.go:25:6", - "desc": "C" - } - ] - } -} --------- @describe desc-param-d -------- -{ - "desc": "identifier", - "pos": "testdata/src/describe-json/main.go:29:7", - "detail": "value", - "value": { - "type": "*D", - "objpos": "testdata/src/describe-json/main.go:29:7", - "typespos": [ - { - "objpos": "testdata/src/describe-json/main.go:26:6", - "desc": "D" - } - ] - } -} diff --git a/cmd/guru/testdata/src/describe/main.go b/cmd/guru/testdata/src/describe/main.go deleted file mode 100644 index 2e24396fdc1..00000000000 --- a/cmd/guru/testdata/src/describe/main.go +++ /dev/null @@ -1,119 +0,0 @@ -package describe // @describe pkgdecl "describe" - -// Tests of 'describe' query. -// See go.tools/guru/guru_test.go for explanation. -// See describe.golden for expected query results. - -// TODO(adonovan): more coverage of the (extensive) logic. - -import ( - "lib" - "nosuchpkg" // @describe badimport1 "nosuchpkg" - nosuchpkg2 "nosuchpkg" // @describe badimport2 "nosuchpkg2" - // The unsafe package changed in Go 1.17 with the addition of - // unsafe.Add and unsafe.Slice. While we still support older versions - // of Go, the test case below cannot be enabled. - // _ "unsafe" // @describe unsafe "unsafe" -) - -var _ nosuchpkg.T -var _ nosuchpkg2.T - -type cake float64 // @describe type-ref-builtin "float64" - -const c = iota // @describe const-ref-iota "iota" - -const pi = 3.141 // @describe const-def-pi "pi" -const pie = cake(pi) // @describe const-def-pie "pie" -const _ = pi // @describe const-ref-pi "pi" - -var global = new(string) // NB: ssa.Global is indirect, i.e. **string - -func main() { // @describe func-def-main "main" - // func objects - _ = main // @describe func-ref-main "main" - _ = (*C).f // @describe func-ref-*C.f "..C..f" - _ = D.f // @describe func-ref-D.f "D.f" - _ = I.f // @describe func-ref-I.f "I.f" - var d D // @describe type-D "D" - var i I // @describe type-I "I" - _ = d.f // @describe func-ref-d.f "d.f" - _ = i.f // @describe func-ref-i.f "i.f" - var slice []D // @describe slice-of-D "slice" - - var dptr *D // @describe ptr-with-nonptr-methods "dptr" - _ = dptr - - // var objects - anon := func() { - _ = d // @describe ref-lexical-d "d" - } - _ = anon // @describe ref-anon "anon" - _ = global // @describe ref-global "global" - - // SSA affords some local flow sensitivity. - var a, b int - var x = &a // @describe var-def-x-1 "x" - _ = x // @describe var-ref-x-1 "x" - x = &b // @describe var-def-x-2 "x" - _ = x // @describe var-ref-x-2 "x" - - i = new(C) // @describe var-ref-i-C "i" - if i != nil { - i = D{} // @describe var-ref-i-D "i" - } - print(i) // @describe var-ref-i "\\bi\\b" - - // const objects - const localpi = 3.141 // @describe const-local-pi "localpi" - const localpie = cake(pi) // @describe const-local-pie "localpie" - const _ = localpi // @describe const-ref-localpi "localpi" - - // type objects - type T int // @describe type-def-T "T" - var three T = 3 // @describe type-ref-T "T" - _ = three - - print(1 + 2*3) // @describe const-expr " 2.3" - print(real(1+2i) - 3) // @describe const-expr2 "real.*3" - - m := map[string]*int{"a": &a} - mapval, _ := m["a"] // @describe map-lookup,ok "m..a.." - _ = mapval // @describe mapval "mapval" - _ = m // @describe m "m" - - defer main() // @describe defer-stmt "defer" - go main() // @describe go-stmt "go" - - panic(3) // @describe builtin-ref-panic "panic" - - var a2 int // @describe var-decl-stmt "var a2 int" - _ = a2 - var _ int // @describe var-decl-stmt2 "var _ int" - var _ int // @describe var-def-blank "_" - - var _ lib.Outer // @describe lib-outer "Outer" - - var mmm map[C]D // @describe var-map-of-C-D "mmm" - - d := newD().ThirdField // @describe field-access "ThirdField" - - astCopy := ast - unknown() // @describe call-unknown "\\(" -} - -type I interface { // @describe def-iface-I "I" - f() // @describe def-imethod-I.f "f" -} - -type C int -type D struct { - Field int - AnotherField string - ThirdField C -} - -func (c *C) f() {} -func (d D) f() {} - -func newD() D { return D{} } diff --git a/cmd/guru/testdata/src/describe/main.golden b/cmd/guru/testdata/src/describe/main.golden deleted file mode 100644 index 68de5279a28..00000000000 --- a/cmd/guru/testdata/src/describe/main.golden +++ /dev/null @@ -1,248 +0,0 @@ --------- @describe pkgdecl -------- -definition of package "describe" - type C int - method (*C) f() - type D struct{...} - method (D) f() - type I interface{f()} - method (I) f() - const c untyped int = 0 - type cake float64 - var global *string - func main func() - func newD func() D - const pi untyped float = 3.141 - const pie cake = 3.141 - --------- @describe badimport1 -------- -import of package "nosuchpkg" - --------- @describe badimport2 -------- -reference to package "nosuchpkg" - --------- @describe type-ref-builtin -------- -reference to built-in type float64 - --------- @describe const-ref-iota -------- -reference to const iota untyped int of value 0 - --------- @describe const-def-pi -------- -definition of const pi untyped float of value 3.141 - --------- @describe const-def-pie -------- -definition of const pie cake of value 3.141 -Named types: - type cake defined here - --------- @describe const-ref-pi -------- -reference to const pi untyped float of value 3.141 -defined here - --------- @describe func-def-main -------- -definition of func main() - --------- @describe func-ref-main -------- -reference to func main() -defined here - --------- @describe func-ref-*C.f -------- -reference to method func (*C).f() -defined here - --------- @describe func-ref-D.f -------- -reference to method func (D).f() -defined here - --------- @describe func-ref-I.f -------- -reference to interface method func (I).f() -defined here - --------- @describe type-D -------- -reference to type D (size 32, align 8) -defined as struct{Field int; AnotherField string; ThirdField C} -Methods: - method (D) f() -Fields: - Field int - AnotherField string - ThirdField C - --------- @describe type-I -------- -reference to type I (size 16, align 8) -defined as interface{f()} -Methods: - method (I) f() - --------- @describe func-ref-d.f -------- -reference to method func (D).f() -defined here - --------- @describe func-ref-i.f -------- -reference to interface method func (I).f() -defined here - --------- @describe slice-of-D -------- -definition of var slice []D -Named types: - type D defined here - --------- @describe ptr-with-nonptr-methods -------- -definition of var dptr *D -Methods: - method (*D) f() -Fields: - Field int - AnotherField string - ThirdField C -Named types: - type D defined here - --------- @describe ref-lexical-d -------- -reference to var d D -defined here -Methods: - method (D) f() -Fields: - Field int - AnotherField string - ThirdField C -Named types: - type D defined here - --------- @describe ref-anon -------- -reference to var anon func() -defined here - --------- @describe ref-global -------- -reference to var global *string -defined here - --------- @describe var-def-x-1 -------- -definition of var x *int - --------- @describe var-ref-x-1 -------- -reference to var x *int -defined here - --------- @describe var-def-x-2 -------- -reference to var x *int -defined here - --------- @describe var-ref-x-2 -------- -reference to var x *int -defined here - --------- @describe var-ref-i-C -------- -reference to var i I -defined here -Methods: - method (I) f() -Named types: - type I defined here - --------- @describe var-ref-i-D -------- -reference to var i I -defined here -Methods: - method (I) f() -Named types: - type I defined here - --------- @describe var-ref-i -------- -reference to var i I -defined here -Methods: - method (I) f() -Named types: - type I defined here - --------- @describe const-local-pi -------- -definition of const localpi untyped float of value 3.141 - --------- @describe const-local-pie -------- -definition of const localpie cake of value 3.141 -Named types: - type cake defined here - --------- @describe const-ref-localpi -------- -reference to const localpi untyped float of value 3.141 -defined here - --------- @describe type-def-T -------- -definition of type T (size 8, align 8) -No methods. - --------- @describe type-ref-T -------- -reference to type T (size 8, align 8) -defined as int -No methods. - --------- @describe const-expr -------- -binary * operation of value 6 - --------- @describe const-expr2 -------- -binary - operation of value -2 - --------- @describe map-lookup,ok -------- -index expression of type (*int, bool) - --------- @describe mapval -------- -reference to var mapval *int -defined here - --------- @describe m -------- -reference to var m map[string]*int -defined here - --------- @describe defer-stmt -------- -defer statement - --------- @describe go-stmt -------- -go statement - --------- @describe builtin-ref-panic -------- -function call (or conversion) of type () - --------- @describe var-decl-stmt -------- -definition of var a2 int - --------- @describe var-decl-stmt2 -------- -definition of var _ int - --------- @describe var-def-blank -------- -definition of var _ int - --------- @describe lib-outer -------- -reference to type lib.Outer (size 56, align 8) -defined as struct{A int; b int; lib.inner} -No methods. -Fields: - A int - inner.C bool - inner.recursive.E bool - --------- @describe var-map-of-C-D -------- -definition of var mmm map[C]D -Named types: - type C defined here - type D defined here - --------- @describe field-access -------- -reference to field ThirdField C -defined here -Methods: - method (*C) f() -Named types: - type C defined here - --------- @describe call-unknown -------- -function call of type invalid type - --------- @describe def-iface-I -------- -definition of type I (size 16, align 8) -Methods: - method (I) f() - --------- @describe def-imethod-I.f -------- -definition of interface method func (I).f() - diff --git a/cmd/guru/testdata/src/freevars/main.go b/cmd/guru/testdata/src/freevars/main.go deleted file mode 100644 index c6aa08d2296..00000000000 --- a/cmd/guru/testdata/src/freevars/main.go +++ /dev/null @@ -1,40 +0,0 @@ -package main - -// Tests of 'freevars' query. -// See go.tools/guru/guru_test.go for explanation. -// See freevars.golden for expected query results. - -// TODO(adonovan): it's hard to test this query in a single line of gofmt'd code. - -type T struct { - a, b int -} - -type S struct { - x int - t T -} - -func f(int) {} - -func main() { - type C int - x := 1 - const exp = 6 - if y := 2; x+y+int(C(3)) != exp { // @freevars fv1 "if.*{" - panic("expected 6") - } - - var s S - - for x, y := range "foo" { - println(s.x + s.t.a + s.t.b + x + int(y)) // @freevars fv2 "print.*y." - } - - f(x) // @freevars fv3 "f.x." - -loop: // @freevars fv-def-label "loop:" - for { - break loop // @freevars fv-ref-label "break loop" - } -} diff --git a/cmd/guru/testdata/src/freevars/main.golden b/cmd/guru/testdata/src/freevars/main.golden deleted file mode 100644 index a3bc0c95383..00000000000 --- a/cmd/guru/testdata/src/freevars/main.golden +++ /dev/null @@ -1,25 +0,0 @@ --------- @freevars fv1 -------- -Free identifiers: -type C -const exp int -var x int - --------- @freevars fv2 -------- -Free identifiers: -var s.t.a int -var s.t.b int -var s.x int -var x int -var y rune - --------- @freevars fv3 -------- -Free identifiers: -var x int - --------- @freevars fv-def-label -------- -No free identifiers. - --------- @freevars fv-ref-label -------- -Free identifiers: -label loop - diff --git a/cmd/guru/testdata/src/implements-json/main.go b/cmd/guru/testdata/src/implements-json/main.go deleted file mode 100644 index e18a373ab76..00000000000 --- a/cmd/guru/testdata/src/implements-json/main.go +++ /dev/null @@ -1,27 +0,0 @@ -package main - -// Tests of 'implements' query, -output=json. -// See go.tools/guru/guru_test.go for explanation. -// See implements.golden for expected query results. - -func main() { -} - -type E interface{} // @implements E "E" - -type F interface { // @implements F "F" - f() -} - -type FG interface { // @implements FG "FG" - f() - g() []int // @implements slice "..int" -} - -type C int // @implements C "C" -type D struct{} - -func (c *C) f() {} // @implements starC ".C" -func (d D) f() {} // @implements D "D" - -func (d *D) g() []int { return nil } // @implements starD ".D" diff --git a/cmd/guru/testdata/src/implements-json/main.golden b/cmd/guru/testdata/src/implements-json/main.golden deleted file mode 100644 index ce18c1c6425..00000000000 --- a/cmd/guru/testdata/src/implements-json/main.golden +++ /dev/null @@ -1,135 +0,0 @@ --------- @implements E -------- -{ - "type": { - "name": "implements-json.E", - "pos": "testdata/src/implements-json/main.go:10:6", - "kind": "interface" - } -} --------- @implements F -------- -{ - "type": { - "name": "implements-json.F", - "pos": "testdata/src/implements-json/main.go:12:6", - "kind": "interface" - }, - "to": [ - { - "name": "*implements-json.C", - "pos": "testdata/src/implements-json/main.go:21:6", - "kind": "pointer" - }, - { - "name": "implements-json.D", - "pos": "testdata/src/implements-json/main.go:22:6", - "kind": "struct" - }, - { - "name": "implements-json.FG", - "pos": "testdata/src/implements-json/main.go:16:6", - "kind": "interface" - } - ] -} --------- @implements FG -------- -{ - "type": { - "name": "implements-json.FG", - "pos": "testdata/src/implements-json/main.go:16:6", - "kind": "interface" - }, - "to": [ - { - "name": "*implements-json.D", - "pos": "testdata/src/implements-json/main.go:22:6", - "kind": "pointer" - } - ], - "from": [ - { - "name": "implements-json.F", - "pos": "testdata/src/implements-json/main.go:12:6", - "kind": "interface" - } - ] -} --------- @implements slice -------- -{ - "type": { - "name": "[]int", - "pos": "-", - "kind": "slice" - } -} --------- @implements C -------- -{ - "type": { - "name": "implements-json.C", - "pos": "testdata/src/implements-json/main.go:21:6", - "kind": "basic" - }, - "fromptr": [ - { - "name": "implements-json.F", - "pos": "testdata/src/implements-json/main.go:12:6", - "kind": "interface" - } - ] -} --------- @implements starC -------- -{ - "type": { - "name": "*implements-json.C", - "pos": "testdata/src/implements-json/main.go:21:6", - "kind": "pointer" - }, - "from": [ - { - "name": "implements-json.F", - "pos": "testdata/src/implements-json/main.go:12:6", - "kind": "interface" - } - ] -} --------- @implements D -------- -{ - "type": { - "name": "implements-json.D", - "pos": "testdata/src/implements-json/main.go:22:6", - "kind": "struct" - }, - "from": [ - { - "name": "implements-json.F", - "pos": "testdata/src/implements-json/main.go:12:6", - "kind": "interface" - } - ], - "fromptr": [ - { - "name": "implements-json.FG", - "pos": "testdata/src/implements-json/main.go:16:6", - "kind": "interface" - } - ] -} --------- @implements starD -------- -{ - "type": { - "name": "*implements-json.D", - "pos": "testdata/src/implements-json/main.go:22:6", - "kind": "pointer" - }, - "from": [ - { - "name": "implements-json.F", - "pos": "testdata/src/implements-json/main.go:12:6", - "kind": "interface" - }, - { - "name": "implements-json.FG", - "pos": "testdata/src/implements-json/main.go:16:6", - "kind": "interface" - } - ] -} diff --git a/cmd/guru/testdata/src/implements-methods-json/main.go b/cmd/guru/testdata/src/implements-methods-json/main.go deleted file mode 100644 index 646276d5681..00000000000 --- a/cmd/guru/testdata/src/implements-methods-json/main.go +++ /dev/null @@ -1,37 +0,0 @@ -package main - -// Tests of 'implements' query applied to methods, -output=json. -// See go.tools/guru/guru_test.go for explanation. -// See implements-methods.golden for expected query results. - -import _ "lib" - -func main() { -} - -type F interface { - f() // @implements F.f "f" -} - -type FG interface { - f() // @implements FG.f "f" - g() []int // @implements FG.g "g" -} - -type C int -type D struct{} - -func (c *C) f() {} // @implements *C.f "f" -func (d D) f() {} // @implements D.f "f" - -func (d *D) g() []int { return nil } // @implements *D.g "g" - -type sorter []int - -func (sorter) Len() int { return 0 } // @implements Len "Len" -func (sorter) Less(i, j int) bool { return false } -func (sorter) Swap(i, j int) {} - -type I interface { - Method(*int) *int // @implements I.Method "Method" -} diff --git a/cmd/guru/testdata/src/implements-methods-json/main.golden b/cmd/guru/testdata/src/implements-methods-json/main.golden deleted file mode 100644 index 137261b65d6..00000000000 --- a/cmd/guru/testdata/src/implements-methods-json/main.golden +++ /dev/null @@ -1,266 +0,0 @@ --------- @implements F.f -------- -{ - "type": { - "name": "implements-methods-json.F", - "pos": "testdata/src/implements-methods-json/main.go:12:6", - "kind": "interface" - }, - "to": [ - { - "name": "*implements-methods-json.C", - "pos": "testdata/src/implements-methods-json/main.go:21:6", - "kind": "pointer" - }, - { - "name": "implements-methods-json.D", - "pos": "testdata/src/implements-methods-json/main.go:22:6", - "kind": "struct" - }, - { - "name": "implements-methods-json.FG", - "pos": "testdata/src/implements-methods-json/main.go:16:6", - "kind": "interface" - } - ], - "method": { - "name": "func (F).f()", - "pos": "testdata/src/implements-methods-json/main.go:13:2" - }, - "to_method": [ - { - "name": "method (*C) f()", - "pos": "testdata/src/implements-methods-json/main.go:24:13" - }, - { - "name": "method (D) f()", - "pos": "testdata/src/implements-methods-json/main.go:25:12" - }, - { - "name": "method (FG) f()", - "pos": "testdata/src/implements-methods-json/main.go:17:2" - } - ] -} --------- @implements FG.f -------- -{ - "type": { - "name": "implements-methods-json.FG", - "pos": "testdata/src/implements-methods-json/main.go:16:6", - "kind": "interface" - }, - "to": [ - { - "name": "*implements-methods-json.D", - "pos": "testdata/src/implements-methods-json/main.go:22:6", - "kind": "pointer" - } - ], - "from": [ - { - "name": "implements-methods-json.F", - "pos": "testdata/src/implements-methods-json/main.go:12:6", - "kind": "interface" - } - ], - "method": { - "name": "func (FG).f()", - "pos": "testdata/src/implements-methods-json/main.go:17:2" - }, - "to_method": [ - { - "name": "method (*D) f()", - "pos": "testdata/src/implements-methods-json/main.go:25:12" - } - ], - "from_method": [ - { - "name": "method (F) f()", - "pos": "testdata/src/implements-methods-json/main.go:13:2" - } - ] -} --------- @implements FG.g -------- -{ - "type": { - "name": "implements-methods-json.FG", - "pos": "testdata/src/implements-methods-json/main.go:16:6", - "kind": "interface" - }, - "to": [ - { - "name": "*implements-methods-json.D", - "pos": "testdata/src/implements-methods-json/main.go:22:6", - "kind": "pointer" - } - ], - "from": [ - { - "name": "implements-methods-json.F", - "pos": "testdata/src/implements-methods-json/main.go:12:6", - "kind": "interface" - } - ], - "method": { - "name": "func (FG).g() []int", - "pos": "testdata/src/implements-methods-json/main.go:18:2" - }, - "to_method": [ - { - "name": "method (*D) g() []int", - "pos": "testdata/src/implements-methods-json/main.go:27:13" - } - ], - "from_method": [ - { - "name": "", - "pos": "" - } - ] -} --------- @implements *C.f -------- -{ - "type": { - "name": "*implements-methods-json.C", - "pos": "testdata/src/implements-methods-json/main.go:21:6", - "kind": "pointer" - }, - "from": [ - { - "name": "implements-methods-json.F", - "pos": "testdata/src/implements-methods-json/main.go:12:6", - "kind": "interface" - } - ], - "method": { - "name": "func (*C).f()", - "pos": "testdata/src/implements-methods-json/main.go:24:13" - }, - "from_method": [ - { - "name": "method (F) f()", - "pos": "testdata/src/implements-methods-json/main.go:13:2" - } - ] -} --------- @implements D.f -------- -{ - "type": { - "name": "implements-methods-json.D", - "pos": "testdata/src/implements-methods-json/main.go:22:6", - "kind": "struct" - }, - "from": [ - { - "name": "implements-methods-json.F", - "pos": "testdata/src/implements-methods-json/main.go:12:6", - "kind": "interface" - } - ], - "fromptr": [ - { - "name": "implements-methods-json.FG", - "pos": "testdata/src/implements-methods-json/main.go:16:6", - "kind": "interface" - } - ], - "method": { - "name": "func (D).f()", - "pos": "testdata/src/implements-methods-json/main.go:25:12" - }, - "from_method": [ - { - "name": "method (F) f()", - "pos": "testdata/src/implements-methods-json/main.go:13:2" - } - ], - "fromptr_method": [ - { - "name": "method (FG) f()", - "pos": "testdata/src/implements-methods-json/main.go:17:2" - } - ] -} --------- @implements *D.g -------- -{ - "type": { - "name": "*implements-methods-json.D", - "pos": "testdata/src/implements-methods-json/main.go:22:6", - "kind": "pointer" - }, - "from": [ - { - "name": "implements-methods-json.F", - "pos": "testdata/src/implements-methods-json/main.go:12:6", - "kind": "interface" - }, - { - "name": "implements-methods-json.FG", - "pos": "testdata/src/implements-methods-json/main.go:16:6", - "kind": "interface" - } - ], - "method": { - "name": "func (*D).g() []int", - "pos": "testdata/src/implements-methods-json/main.go:27:13" - }, - "from_method": [ - { - "name": "", - "pos": "" - }, - { - "name": "method (FG) g() []int", - "pos": "testdata/src/implements-methods-json/main.go:18:2" - } - ] -} --------- @implements Len -------- -{ - "type": { - "name": "implements-methods-json.sorter", - "pos": "testdata/src/implements-methods-json/main.go:29:6", - "kind": "slice" - }, - "from": [ - { - "name": "lib.Sorter", - "pos": "testdata/src/lib/lib.go:16:6", - "kind": "interface" - } - ], - "method": { - "name": "func (sorter).Len() int", - "pos": "testdata/src/implements-methods-json/main.go:31:15" - }, - "from_method": [ - { - "name": "method (lib.Sorter) Len() int", - "pos": "testdata/src/lib/lib.go:17:2" - } - ] -} --------- @implements I.Method -------- -{ - "type": { - "name": "implements-methods-json.I", - "pos": "testdata/src/implements-methods-json/main.go:35:6", - "kind": "interface" - }, - "to": [ - { - "name": "lib.Type", - "pos": "testdata/src/lib/lib.go:3:6", - "kind": "basic" - } - ], - "method": { - "name": "func (I).Method(*int) *int", - "pos": "testdata/src/implements-methods-json/main.go:36:2" - }, - "to_method": [ - { - "name": "method (lib.Type) Method(x *int) *int", - "pos": "testdata/src/lib/lib.go:5:13" - } - ] -} diff --git a/cmd/guru/testdata/src/implements-methods/main.go b/cmd/guru/testdata/src/implements-methods/main.go deleted file mode 100644 index 757be44af6d..00000000000 --- a/cmd/guru/testdata/src/implements-methods/main.go +++ /dev/null @@ -1,37 +0,0 @@ -package main - -// Tests of 'implements' query applied to methods. -// See go.tools/guru/guru_test.go for explanation. -// See implements-methods.golden for expected query results. - -import _ "lib" - -func main() { -} - -type F interface { - f() // @implements F.f "f" -} - -type FG interface { - f() // @implements FG.f "f" - g() []int // @implements FG.g "g" -} - -type C int -type D struct{} - -func (c *C) f() {} // @implements *C.f "f" -func (d D) f() {} // @implements D.f "f" - -func (d *D) g() []int { return nil } // @implements *D.g "g" - -type sorter []int - -func (sorter) Len() int { return 0 } // @implements Len "Len" -func (sorter) Less(i, j int) bool { return false } -func (sorter) Swap(i, j int) {} - -type I interface { - Method(*int) *int // @implements I.Method "Method" -} diff --git a/cmd/guru/testdata/src/implements-methods/main.golden b/cmd/guru/testdata/src/implements-methods/main.golden deleted file mode 100644 index bd591e84760..00000000000 --- a/cmd/guru/testdata/src/implements-methods/main.golden +++ /dev/null @@ -1,37 +0,0 @@ --------- @implements F.f -------- -abstract method func (F).f() - is implemented by method (*C).f - is implemented by method (D).f - is implemented by method (FG).f - --------- @implements FG.f -------- -abstract method func (FG).f() - is implemented by method (*D).f - implements method (F).f - --------- @implements FG.g -------- -abstract method func (FG).g() []int - is implemented by method (*D).g - --------- @implements *C.f -------- -concrete method func (*C).f() - implements method (F).f - --------- @implements D.f -------- -concrete method func (D).f() - implements method (F).f -concrete method func (D).f() - implements method (FG).f - --------- @implements *D.g -------- -concrete method func (*D).g() []int - implements method (FG).g - --------- @implements Len -------- -concrete method func (sorter).Len() int - implements method (lib.Sorter).Len - --------- @implements I.Method -------- -abstract method func (I).Method(*int) *int - is implemented by method (lib.Type).Method - diff --git a/cmd/guru/testdata/src/implements/main.go b/cmd/guru/testdata/src/implements/main.go deleted file mode 100644 index fea9006ec9e..00000000000 --- a/cmd/guru/testdata/src/implements/main.go +++ /dev/null @@ -1,44 +0,0 @@ -package main - -// Tests of 'implements' query. -// See go.tools/guru/guru_test.go for explanation. -// See implements.golden for expected query results. - -import _ "lib" - -func main() { -} - -type E interface{} // @implements E "E" - -type F interface { // @implements F "F" - f() -} - -type FG interface { // @implements FG "FG" - f() - g() []int // @implements slice "..int" -} - -type C int // @implements C "C" -type D struct{} - -func (c *C) f() {} // @implements starC ".C" -func (d D) f() {} // @implements D "D" - -func (d *D) g() []int { return nil } // @implements starD ".D" - -type sorter []int // @implements sorter "sorter" - -func (sorter) Len() int { return 0 } -func (sorter) Less(i, j int) bool { return false } -func (sorter) Swap(i, j int) {} - -type I interface { // @implements I "I" - Method(*int) *int -} - -func _() { - var d D - _ = d // @implements var_d "d" -} diff --git a/cmd/guru/testdata/src/implements/main.golden b/cmd/guru/testdata/src/implements/main.golden deleted file mode 100644 index 71d00ce7d93..00000000000 --- a/cmd/guru/testdata/src/implements/main.golden +++ /dev/null @@ -1,50 +0,0 @@ --------- @implements E -------- -empty interface type E - --------- @implements F -------- -interface type F - is implemented by pointer type *C - is implemented by struct type D - is implemented by interface type FG - --------- @implements FG -------- -interface type FG - is implemented by pointer type *D - implements F - --------- @implements slice -------- -slice type []int implements only any - --------- @implements C -------- -pointer type *C - implements F - --------- @implements starC -------- -pointer type *C - implements F - --------- @implements D -------- -struct type D - implements F -pointer type *D - implements FG - --------- @implements starD -------- -pointer type *D - implements F - implements FG - --------- @implements sorter -------- -slice type sorter - implements lib.Sorter - --------- @implements I -------- -interface type I - is implemented by basic type lib.Type - --------- @implements var_d -------- -struct type D - implements F -pointer type *D - implements FG - diff --git a/cmd/guru/testdata/src/imports/main.go b/cmd/guru/testdata/src/imports/main.go deleted file mode 100644 index 0fc40f22b4f..00000000000 --- a/cmd/guru/testdata/src/imports/main.go +++ /dev/null @@ -1,29 +0,0 @@ -package main - -import ( - "lib" // @describe ref-pkg-import "lib" - "lib/sublib" // @describe ref-pkg-import2 "sublib" -) - -// Tests that import another package. (To make the tests run quickly, -// we avoid using imports in all the other tests. Remember, each -// query causes parsing and typechecking of the whole program.) -// -// See go.tools/guru/guru_test.go for explanation. -// See imports.golden for expected query results. - -var a int - -func main() { - const c = lib.Const // @describe ref-const "Const" - lib.Func() // @describe ref-func "Func" - lib.Var++ // @describe ref-var "Var" - var t lib.Type // @describe ref-type "Type" - p := t.Method(&a) // @describe ref-method "Method" - - print(*p + 1) - - var _ lib.Type // @describe ref-pkg "lib" - - _ = sublib.C -} diff --git a/cmd/guru/testdata/src/imports/main.golden b/cmd/guru/testdata/src/imports/main.golden deleted file mode 100644 index 18a3e22c8cc..00000000000 --- a/cmd/guru/testdata/src/imports/main.golden +++ /dev/null @@ -1,52 +0,0 @@ --------- @describe ref-pkg-import -------- -import of package "lib" - const Const untyped int = 3 - func Func func() - type Outer struct{...} - type Sorter interface{...} - method (Sorter) Len() int - method (Sorter) Less(i int, j int) bool - method (Sorter) Swap(i int, j int) - type Type int - method (Type) Method(x *int) *int - var Var int - --------- @describe ref-pkg-import2 -------- -import of package "lib/sublib" - const C untyped int = 0 - --------- @describe ref-const -------- -reference to const lib.Const untyped int of value 3 -defined here - --------- @describe ref-func -------- -reference to func lib.Func() -defined here - --------- @describe ref-var -------- -reference to var lib.Var int -defined here - --------- @describe ref-type -------- -reference to type lib.Type (size 8, align 8) -defined as int -Methods: - method (Type) Method(x *int) *int - --------- @describe ref-method -------- -reference to method func (lib.Type).Method(x *int) *int -defined here - --------- @describe ref-pkg -------- -reference to package "lib" - const Const untyped int = 3 - func Func func() - type Outer struct{...} - type Sorter interface{...} - method (Sorter) Len() int - method (Sorter) Less(i int, j int) bool - method (Sorter) Swap(i int, j int) - type Type int - method (Type) Method(x *int) *int - var Var int - diff --git a/cmd/guru/testdata/src/lib/lib.go b/cmd/guru/testdata/src/lib/lib.go deleted file mode 100644 index 742cdbfaede..00000000000 --- a/cmd/guru/testdata/src/lib/lib.go +++ /dev/null @@ -1,37 +0,0 @@ -package lib - -type Type int - -func (Type) Method(x *int) *int { - return x -} - -func Func() { -} - -const Const = 3 - -var Var = 0 - -type Sorter interface { - Len() int - Less(i, j int) bool - Swap(i, j int) -} - -type Outer struct { - A int - b int - inner -} - -type inner struct { - C bool - d string - recursive -} - -type recursive struct { - E bool - *inner -} diff --git a/cmd/guru/testdata/src/lib/sublib/sublib.go b/cmd/guru/testdata/src/lib/sublib/sublib.go deleted file mode 100644 index 33c6498a345..00000000000 --- a/cmd/guru/testdata/src/lib/sublib/sublib.go +++ /dev/null @@ -1,3 +0,0 @@ -package sublib - -const C = 0 diff --git a/cmd/guru/testdata/src/main/multi.go b/cmd/guru/testdata/src/main/multi.go deleted file mode 100644 index 8c650cd2894..00000000000 --- a/cmd/guru/testdata/src/main/multi.go +++ /dev/null @@ -1,13 +0,0 @@ -package main - -func g(x int) { -} - -func f() { - x := 1 - g(x) // "g(x)" is the selection for multiple queries -} - -func main() { - f() -} diff --git a/cmd/guru/testdata/src/referrers-json/main.go b/cmd/guru/testdata/src/referrers-json/main.go deleted file mode 100644 index 0fd23425260..00000000000 --- a/cmd/guru/testdata/src/referrers-json/main.go +++ /dev/null @@ -1,24 +0,0 @@ -package main - -// Tests of 'referrers' query. -// See go.tools/guru/guru_test.go for explanation. -// See referrers.golden for expected query results. - -import "lib" - -type s struct { - f int -} - -func main() { - var v lib.Type = lib.Const // @referrers ref-package "lib" - _ = v.Method // @referrers ref-method "Method" - _ = v.Method - v++ //@referrers ref-local "v" - v++ - - _ = s{}.f // @referrers ref-field "f" - - var s2 s - s2.f = 1 -} diff --git a/cmd/guru/testdata/src/referrers-json/main.golden b/cmd/guru/testdata/src/referrers-json/main.golden deleted file mode 100644 index 9f8c9314364..00000000000 --- a/cmd/guru/testdata/src/referrers-json/main.golden +++ /dev/null @@ -1,234 +0,0 @@ --------- @referrers ref-package -------- -{ - "desc": "package lib" -} -{ - "package": "definition-json", - "refs": [ - { - "pos": "testdata/src/definition-json/main.go:19:8", - "text": "\tvar x lib.T // @definition lexical-pkgname \"lib\"" - }, - { - "pos": "testdata/src/definition-json/main.go:25:8", - "text": "\tvar _ lib.Type // @definition qualified-type \"Type\"" - }, - { - "pos": "testdata/src/definition-json/main.go:26:8", - "text": "\tvar _ lib.Func // @definition qualified-func \"Func\"" - }, - { - "pos": "testdata/src/definition-json/main.go:27:8", - "text": "\tvar _ lib.Var // @definition qualified-var \"Var\"" - }, - { - "pos": "testdata/src/definition-json/main.go:28:8", - "text": "\tvar _ lib.Const // @definition qualified-const \"Const\"" - }, - { - "pos": "testdata/src/definition-json/main.go:29:8", - "text": "\tvar _ lib2.Type // @definition qualified-type-renaming \"Type\"" - }, - { - "pos": "testdata/src/definition-json/main.go:30:8", - "text": "\tvar _ lib.Nonesuch // @definition qualified-nomember \"Nonesuch\"" - }, - { - "pos": "testdata/src/definition-json/main.go:63:2", - "text": "\tlib.Type // @definition embedded-other-pkg \"Type\"" - } - ] -} -{ - "package": "describe", - "refs": [ - { - "pos": "testdata/src/describe/main.go:95:8", - "text": "\tvar _ lib.Outer // @describe lib-outer \"Outer\"" - } - ] -} -{ - "package": "imports", - "refs": [ - { - "pos": "testdata/src/imports/main.go:18:12", - "text": "\tconst c = lib.Const // @describe ref-const \"Const\"" - }, - { - "pos": "testdata/src/imports/main.go:19:2", - "text": "\tlib.Func() // @describe ref-func \"Func\"" - }, - { - "pos": "testdata/src/imports/main.go:20:2", - "text": "\tlib.Var++ // @describe ref-var \"Var\"" - }, - { - "pos": "testdata/src/imports/main.go:21:8", - "text": "\tvar t lib.Type // @describe ref-type \"Type\"" - }, - { - "pos": "testdata/src/imports/main.go:26:8", - "text": "\tvar _ lib.Type // @describe ref-pkg \"lib\"" - } - ] -} -{ - "package": "referrers", - "refs": [ - { - "pos": "testdata/src/referrers/int_test.go:7:7", - "text": "\t_ = (lib.Type).Method // ref from internal test package" - } - ] -} -{ - "package": "referrers", - "refs": [ - { - "pos": "testdata/src/referrers/main.go:16:8", - "text": "\tvar v lib.Type = lib.Const // @referrers ref-package \"lib\"" - }, - { - "pos": "testdata/src/referrers/main.go:16:19", - "text": "\tvar v lib.Type = lib.Const // @referrers ref-package \"lib\"" - } - ] -} -{ - "package": "referrers-json", - "refs": [ - { - "pos": "testdata/src/referrers-json/main.go:14:8", - "text": "\tvar v lib.Type = lib.Const // @referrers ref-package \"lib\"" - }, - { - "pos": "testdata/src/referrers-json/main.go:14:19", - "text": "\tvar v lib.Type = lib.Const // @referrers ref-package \"lib\"" - } - ] -} -{ - "package": "referrers_test", - "refs": [ - { - "pos": "testdata/src/referrers/ext_test.go:10:7", - "text": "\t_ = (lib.Type).Method // ref from external test package" - } - ] -} -{ - "package": "what-json", - "refs": [ - { - "pos": "testdata/src/what-json/main.go:13:7", - "text": "var _ lib.Var // @what pkg \"lib\"" - }, - { - "pos": "testdata/src/what-json/main.go:14:8", - "text": "type _ lib.T" - } - ] -} --------- @referrers ref-method -------- -{ - "objpos": "testdata/src/lib/lib.go:5:13", - "desc": "func (lib.Type).Method(x *int) *int" -} -{ - "package": "imports", - "refs": [ - { - "pos": "testdata/src/imports/main.go:22:9", - "text": "\tp := t.Method(\u0026a) // @describe ref-method \"Method\"" - } - ] -} -{ - "package": "referrers", - "refs": [ - { - "pos": "testdata/src/referrers/int_test.go:7:17", - "text": "\t_ = (lib.Type).Method // ref from internal test package" - } - ] -} -{ - "package": "referrers", - "refs": [ - { - "pos": "testdata/src/referrers/main.go:17:8", - "text": "\t_ = v.Method // @referrers ref-method \"Method\"" - }, - { - "pos": "testdata/src/referrers/main.go:18:8", - "text": "\t_ = v.Method" - } - ] -} -{ - "package": "referrers-json", - "refs": [ - { - "pos": "testdata/src/referrers-json/main.go:15:8", - "text": "\t_ = v.Method // @referrers ref-method \"Method\"" - }, - { - "pos": "testdata/src/referrers-json/main.go:16:8", - "text": "\t_ = v.Method" - } - ] -} -{ - "package": "referrers_test", - "refs": [ - { - "pos": "testdata/src/referrers/ext_test.go:10:17", - "text": "\t_ = (lib.Type).Method // ref from external test package" - } - ] -} --------- @referrers ref-local -------- -{ - "objpos": "testdata/src/referrers-json/main.go:14:6", - "desc": "var v lib.Type" -} -{ - "package": "referrers-json", - "refs": [ - { - "pos": "testdata/src/referrers-json/main.go:15:6", - "text": "\t_ = v.Method // @referrers ref-method \"Method\"" - }, - { - "pos": "testdata/src/referrers-json/main.go:16:6", - "text": "\t_ = v.Method" - }, - { - "pos": "testdata/src/referrers-json/main.go:17:2", - "text": "\tv++ //@referrers ref-local \"v\"" - }, - { - "pos": "testdata/src/referrers-json/main.go:18:2", - "text": "\tv++" - } - ] -} --------- @referrers ref-field -------- -{ - "objpos": "testdata/src/referrers-json/main.go:10:2", - "desc": "field f int" -} -{ - "package": "referrers-json", - "refs": [ - { - "pos": "testdata/src/referrers-json/main.go:20:10", - "text": "\t_ = s{}.f // @referrers ref-field \"f\"" - }, - { - "pos": "testdata/src/referrers-json/main.go:23:5", - "text": "\ts2.f = 1" - } - ] -} diff --git a/cmd/guru/testdata/src/referrers/ext_test.go b/cmd/guru/testdata/src/referrers/ext_test.go deleted file mode 100644 index 35e3199ac27..00000000000 --- a/cmd/guru/testdata/src/referrers/ext_test.go +++ /dev/null @@ -1,12 +0,0 @@ -package main_test - -import ( - "lib" - renamed "referrers" // package has name "main", path "referrers", local name "renamed" -) - -func _() { - // This reference should be found by the ref-method query. - _ = (lib.Type).Method // ref from external test package - var _ renamed.T -} diff --git a/cmd/guru/testdata/src/referrers/int_test.go b/cmd/guru/testdata/src/referrers/int_test.go deleted file mode 100644 index 397842bd094..00000000000 --- a/cmd/guru/testdata/src/referrers/int_test.go +++ /dev/null @@ -1,10 +0,0 @@ -package main - -import "lib" - -func _() { - // This reference should be found by the ref-method query. - _ = (lib.Type).Method // ref from internal test package - - _ = notexported -} diff --git a/cmd/guru/testdata/src/referrers/main.go b/cmd/guru/testdata/src/referrers/main.go deleted file mode 100644 index acaae1fe961..00000000000 --- a/cmd/guru/testdata/src/referrers/main.go +++ /dev/null @@ -1,36 +0,0 @@ -package main // @referrers package-decl "main" - -// Tests of 'referrers' query. -// See go.tools/guru/guru_test.go for explanation. -// See referrers.golden for expected query results. - -import "lib" - -type s struct { // @referrers type " s " - f int -} - -type T int - -func main() { - var v lib.Type = lib.Const // @referrers ref-package "lib" - _ = v.Method // @referrers ref-method "Method" - _ = v.Method - v++ //@referrers ref-local "v" - v++ - - _ = s{}.f // @referrers ref-field "f" - - var s2 s - s2.f = 1 -} - -var notexported int // @referrers unexported-from-test "notexported" - -// Test //line directives: - -type U int // @referrers ref-type-U "U" - -//line nosuchfile.y:123 -var u1 U -var u2 U diff --git a/cmd/guru/testdata/src/referrers/main.golden b/cmd/guru/testdata/src/referrers/main.golden deleted file mode 100644 index 3ac8075ff94..00000000000 --- a/cmd/guru/testdata/src/referrers/main.golden +++ /dev/null @@ -1,64 +0,0 @@ --------- @referrers package-decl -------- -references to package main ("referrers") - var _ renamed.T - --------- @referrers type -------- -references to type s struct{f int} - _ = s{}.f // @referrers ref-field "f" - var s2 s - --------- @referrers ref-package -------- -references to package lib - _ = (lib.Type).Method // ref from external test package - _ = (lib.Type).Method // ref from internal test package - const c = lib.Const // @describe ref-const "Const" - lib.Func() // @describe ref-func "Func" - lib.Type // @definition embedded-other-pkg "Type" - lib.Var++ // @describe ref-var "Var" - var _ lib.Const // @definition qualified-const "Const" - var _ lib.Func // @definition qualified-func "Func" - var _ lib.Nonesuch // @definition qualified-nomember "Nonesuch" - var _ lib.Outer // @describe lib-outer "Outer" - var _ lib.Type // @definition qualified-type "Type" - var _ lib.Type // @describe ref-pkg "lib" - var _ lib.Var // @definition qualified-var "Var" - var _ lib2.Type // @definition qualified-type-renaming "Type" - var t lib.Type // @describe ref-type "Type" - var v lib.Type = lib.Const // @referrers ref-package "lib" - var v lib.Type = lib.Const // @referrers ref-package "lib" - var v lib.Type = lib.Const // @referrers ref-package "lib" - var v lib.Type = lib.Const // @referrers ref-package "lib" - var x lib.T // @definition lexical-pkgname "lib" -type _ lib.T -var _ lib.Var // @what pkg "lib" - --------- @referrers ref-method -------- -references to func (lib.Type).Method(x *int) *int - _ = (lib.Type).Method // ref from external test package - _ = (lib.Type).Method // ref from internal test package - _ = v.Method - _ = v.Method - _ = v.Method // @referrers ref-method "Method" - _ = v.Method // @referrers ref-method "Method" - p := t.Method(&a) // @describe ref-method "Method" - --------- @referrers ref-local -------- -references to var v lib.Type - _ = v.Method - _ = v.Method // @referrers ref-method "Method" - v++ - v++ //@referrers ref-local "v" - --------- @referrers ref-field -------- -references to field f int - _ = s{}.f // @referrers ref-field "f" - s2.f = 1 - --------- @referrers unexported-from-test -------- -references to var notexported int - _ = notexported - --------- @referrers ref-type-U -------- -references to type U int -open testdata/src/referrers/nosuchfile.y: no such file or directory (+ 1 more refs in this file) - diff --git a/cmd/guru/testdata/src/what-json/main.go b/cmd/guru/testdata/src/what-json/main.go deleted file mode 100644 index 27177ff5d46..00000000000 --- a/cmd/guru/testdata/src/what-json/main.go +++ /dev/null @@ -1,14 +0,0 @@ -package main - -import "lib" - -// Tests of 'what' queries, -format=json. -// See go.tools/guru/guru_test.go for explanation. -// See what-json.golden for expected query results. - -func main() { - f() // @what call "f" -} - -var _ lib.Var // @what pkg "lib" -type _ lib.T diff --git a/cmd/guru/testdata/src/what-json/main.golden b/cmd/guru/testdata/src/what-json/main.golden deleted file mode 100644 index 760f9d7e8ee..00000000000 --- a/cmd/guru/testdata/src/what-json/main.golden +++ /dev/null @@ -1,88 +0,0 @@ --------- @what call -------- -{ - "enclosing": [ - { - "desc": "identifier", - "start": 189, - "end": 190 - }, - { - "desc": "function call", - "start": 189, - "end": 192 - }, - { - "desc": "expression statement", - "start": 189, - "end": 192 - }, - { - "desc": "block", - "start": 186, - "end": 212 - }, - { - "desc": "function declaration", - "start": 174, - "end": 212 - }, - { - "desc": "source file", - "start": 0, - "end": 259 - } - ], - "modes": [ - "definition", - "describe", - "freevars", - "implements", - "referrers" - ], - "srcdir": "testdata/src", - "importpath": "what-json" -} --------- @what pkg -------- -{ - "enclosing": [ - { - "desc": "identifier", - "start": 220, - "end": 223 - }, - { - "desc": "selector", - "start": 220, - "end": 227 - }, - { - "desc": "value specification", - "start": 218, - "end": 227 - }, - { - "desc": "variable declaration", - "start": 214, - "end": 227 - }, - { - "desc": "source file", - "start": 0, - "end": 259 - } - ], - "modes": [ - "definition", - "describe", - "freevars", - "implements", - "referrers" - ], - "srcdir": "testdata/src", - "importpath": "what-json", - "object": "lib", - "sameids": [ - "$GOPATH/src/what-json/main.go:13:7", - "$GOPATH/src/what-json/main.go:14:8" - ] -} diff --git a/cmd/guru/testdata/src/what/main.go b/cmd/guru/testdata/src/what/main.go deleted file mode 100644 index 9e6a8b920a5..00000000000 --- a/cmd/guru/testdata/src/what/main.go +++ /dev/null @@ -1,11 +0,0 @@ -package main // @what pkgdecl "main" - -// Tests of 'what' queries. -// See go.tools/guru/guru_test.go for explanation. -// See what.golden for expected query results. - -func main() { - f() // @what call "f" - var ch chan int // @what var "var" - <-ch // @what recv "ch" -} diff --git a/cmd/guru/testdata/src/what/main.golden b/cmd/guru/testdata/src/what/main.golden deleted file mode 100644 index dbd1cc2afe2..00000000000 --- a/cmd/guru/testdata/src/what/main.golden +++ /dev/null @@ -1,41 +0,0 @@ --------- @what pkgdecl -------- -identifier -source file -modes: [definition describe freevars implements referrers] -srcdir: testdata/src -import path: what - --------- @what call -------- -identifier -function call -expression statement -block -function declaration -source file -modes: [definition describe freevars implements referrers] -srcdir: testdata/src -import path: what - --------- @what var -------- -variable declaration -variable declaration statement -block -function declaration -source file -modes: [describe freevars] -srcdir: testdata/src -import path: what - --------- @what recv -------- -identifier -unary <- operation -expression statement -block -function declaration -source file -modes: [definition describe freevars implements referrers] -srcdir: testdata/src -import path: what -ch -ch - diff --git a/cmd/guru/unit_test.go b/cmd/guru/unit_test.go deleted file mode 100644 index 7c24d714f19..00000000000 --- a/cmd/guru/unit_test.go +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "fmt" - "go/build" - "os" - "path/filepath" - "runtime" - "strings" - "testing" -) - -// Unit tests for internal guru functions - -func TestIssue17515(t *testing.T) { - // Tests handling of symlinks in function guessImportPath - // If we have Go code inside $HOME/go/src and create a symlink $HOME/src to it - // there are 4 possible cases that need to be tested: - // (1) absolute & absolute: GOPATH=$HOME/go/src file=$HOME/go/src/test/test.go - // (2) absolute & symlink: GOPATH=$HOME/go/src file=$HOME/src/test/test.go - // (3) symlink & symlink: GOPATH=$HOME/src file=$HOME/src/test/test.go - // (4) symlink & absolute: GOPATH=$HOME/src file= $HOME/go/src/test/test.go - - // Create a temporary home directory under /tmp - home, err := os.MkdirTemp(os.TempDir(), "home") - if err != nil { - t.Errorf("Unable to create a temporary directory in %s", os.TempDir()) - } - - defer os.RemoveAll(home) - - // create filepath /tmp/home/go/src/test/test.go - if err = os.MkdirAll(home+"/go/src/test", 0755); err != nil { - t.Fatal(err) - } - - var buildContext = build.Default - - // Success test cases - type SuccessTest struct { - gopath, filename, wantSrcdir string - } - - successTests := []SuccessTest{ - {home + "/go", home + "/go/src/test/test.go", filepath.FromSlash(home + "/go/src")}, - } - - // Add symlink cases if not on Windows, Plan 9 - if runtime.GOOS != "windows" && runtime.GOOS != "plan9" { - // symlink between /tmp/home/go/src and /tmp/home/src - if err := os.Symlink(home+"/go/src", home+"/src"); err != nil { - t.Fatal(err) - } - - successTests = append(successTests, []SuccessTest{ - {home + "/go", home + "/src/test/test.go", filepath.FromSlash(home + "/go/src")}, - {home, home + "/go/src/test/test.go", filepath.FromSlash(home + "/src")}, - {home, home + "/src/test/test.go", filepath.FromSlash(home + "/src")}, - }...) - } - - for _, test := range successTests { - buildContext.GOPATH = test.gopath - srcdir, importPath, err := guessImportPath(test.filename, &buildContext) - if srcdir != test.wantSrcdir || importPath != "test" || err != nil { - t.Errorf("guessImportPath(%q, %q) = %q, %q, %q; want %q, %q, %q", - test.filename, test.gopath, srcdir, importPath, err, test.wantSrcdir, "test", "nil") - } - } - // Function to format expected error message - errFormat := func(fpath string) string { - return fmt.Sprintf("can't evaluate symlinks of %s", fpath) - } - - // Failure test cases - type FailTest struct { - gopath, filename, wantErr string - } - - failTests := []FailTest{ - {home + "/go", home + "/go/src/fake/test.go", errFormat(filepath.FromSlash(home + "/go/src/fake"))}, - } - - if runtime.GOOS != "windows" && runtime.GOOS != "plan9" { - failTests = append(failTests, []FailTest{ - {home + "/go", home + "/src/fake/test.go", errFormat(filepath.FromSlash(home + "/src/fake"))}, - {home, home + "/src/fake/test.go", errFormat(filepath.FromSlash(home + "/src/fake"))}, - {home, home + "/go/src/fake/test.go", errFormat(filepath.FromSlash(home + "/go/src/fake"))}, - }...) - } - - for _, test := range failTests { - buildContext.GOPATH = test.gopath - srcdir, importPath, err := guessImportPath(test.filename, &buildContext) - if !strings.HasPrefix(fmt.Sprint(err), test.wantErr) { - t.Errorf("guessImportPath(%q, %q) = %q, %q, %q; want %q, %q, %q", - test.filename, test.gopath, srcdir, importPath, err, "", "", test.wantErr) - } - } -} diff --git a/cmd/guru/what.go b/cmd/guru/what.go deleted file mode 100644 index 422c6c10950..00000000000 --- a/cmd/guru/what.go +++ /dev/null @@ -1,243 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "fmt" - "go/ast" - "go/build" - "go/token" - "os" - "path" - "path/filepath" - "sort" - "strings" - - "golang.org/x/tools/cmd/guru/serial" - "golang.org/x/tools/go/ast/astutil" -) - -// what reports all the information about the query selection that can be -// obtained from parsing only its containing source file. -// It is intended to be a very low-latency query callable from GUI -// tools, e.g. to populate a menu of options of slower queries about -// the selected location. -func what(q *Query) error { - qpos, err := fastQueryPos(q.Build, q.Pos) - if err != nil { - return err - } - - // (ignore errors) - srcdir, importPath, _ := guessImportPath(qpos.fset.File(qpos.start).Name(), q.Build) - - // Determine which query modes are applicable to the selection. - enable := map[string]bool{ - "describe": true, // any syntax; always enabled - } - - if qpos.end > qpos.start { - enable["freevars"] = true // nonempty selection? - } - - for _, n := range qpos.path { - switch n.(type) { - case *ast.Ident: - enable["definition"] = true - enable["referrers"] = true - enable["implements"] = true - } - - // For implements, we approximate findInterestingNode. - if _, ok := enable["implements"]; !ok { - switch n.(type) { - case *ast.ArrayType, - *ast.StructType, - *ast.FuncType, - *ast.InterfaceType, - *ast.MapType, - *ast.ChanType: - enable["implements"] = true - } - } - } - - // If we don't have an exact selection, disable modes that need one. - if !qpos.exact { - enable["describe"] = false - } - - var modes []string - for mode := range enable { - modes = append(modes, mode) - } - sort.Strings(modes) - - // Find the object referred to by the selection (if it's an - // identifier) and report the position of each identifier - // that refers to the same object. - // - // This may return spurious matches (e.g. struct fields) because - // it uses the best-effort name resolution done by go/parser. - var sameids []token.Pos - var object string - if id, ok := qpos.path[0].(*ast.Ident); ok { - if id.Obj == nil { - // An unresolved identifier is potentially a package name. - // Resolve them with a simple importer (adds ~100µs). - importer := func(imports map[string]*ast.Object, path string) (*ast.Object, error) { - pkg, ok := imports[path] - if !ok { - pkg = &ast.Object{ - Kind: ast.Pkg, - Name: filepath.Base(path), // a guess - } - imports[path] = pkg - } - return pkg, nil - } - f := qpos.path[len(qpos.path)-1].(*ast.File) - ast.NewPackage(qpos.fset, map[string]*ast.File{"": f}, importer, nil) - } - - if id.Obj != nil { - object = id.Obj.Name - decl := qpos.path[len(qpos.path)-1] - ast.Inspect(decl, func(n ast.Node) bool { - if n, ok := n.(*ast.Ident); ok && n.Obj == id.Obj { - sameids = append(sameids, n.Pos()) - } - return true - }) - } - } - - q.Output(qpos.fset, &whatResult{ - path: qpos.path, - srcdir: srcdir, - importPath: importPath, - modes: modes, - object: object, - sameids: sameids, - }) - return nil -} - -// guessImportPath finds the package containing filename, and returns -// its source directory (an element of $GOPATH) and its import path -// relative to it. -// -// TODO(adonovan): what about _test.go files that are not part of the -// package? -func guessImportPath(filename string, buildContext *build.Context) (srcdir, importPath string, err error) { - absFile, err := filepath.Abs(filename) - if err != nil { - return "", "", fmt.Errorf("can't form absolute path of %s: %v", filename, err) - } - - absFileDir := filepath.Dir(absFile) - resolvedAbsFileDir, err := filepath.EvalSymlinks(absFileDir) - if err != nil { - return "", "", fmt.Errorf("can't evaluate symlinks of %s: %v", absFileDir, err) - } - - segmentedAbsFileDir := segments(resolvedAbsFileDir) - // Find the innermost directory in $GOPATH that encloses filename. - minD := 1024 - for _, gopathDir := range buildContext.SrcDirs() { - absDir, err := filepath.Abs(gopathDir) - if err != nil { - continue // e.g. non-existent dir on $GOPATH - } - resolvedAbsDir, err := filepath.EvalSymlinks(absDir) - if err != nil { - continue // e.g. non-existent dir on $GOPATH - } - - d := prefixLen(segments(resolvedAbsDir), segmentedAbsFileDir) - // If there are multiple matches, - // prefer the innermost enclosing directory - // (smallest d). - if d >= 0 && d < minD { - minD = d - srcdir = gopathDir - importPath = path.Join(segmentedAbsFileDir[len(segmentedAbsFileDir)-minD:]...) - } - } - if srcdir == "" { - return "", "", fmt.Errorf("directory %s is not beneath any of these GOROOT/GOPATH directories: %s", - filepath.Dir(absFile), strings.Join(buildContext.SrcDirs(), ", ")) - } - if importPath == "" { - // This happens for e.g. $GOPATH/src/a.go, but - // "" is not a valid path for (*go/build).Import. - return "", "", fmt.Errorf("cannot load package in root of source directory %s", srcdir) - } - return srcdir, importPath, nil -} - -func segments(path string) []string { - return strings.Split(path, string(os.PathSeparator)) -} - -// prefixLen returns the length of the remainder of y if x is a prefix -// of y, a negative number otherwise. -func prefixLen(x, y []string) int { - d := len(y) - len(x) - if d >= 0 { - for i := range x { - if y[i] != x[i] { - return -1 // not a prefix - } - } - } - return d -} - -type whatResult struct { - path []ast.Node - modes []string - srcdir string - importPath string - object string - sameids []token.Pos -} - -func (r *whatResult) PrintPlain(printf printfFunc) { - for _, n := range r.path { - printf(n, "%s", astutil.NodeDescription(n)) - } - printf(nil, "modes: %s", r.modes) - printf(nil, "srcdir: %s", r.srcdir) - printf(nil, "import path: %s", r.importPath) - for _, pos := range r.sameids { - printf(pos, "%s", r.object) - } -} - -func (r *whatResult) JSON(fset *token.FileSet) []byte { - var enclosing []serial.SyntaxNode - for _, n := range r.path { - enclosing = append(enclosing, serial.SyntaxNode{ - Description: astutil.NodeDescription(n), - Start: fset.Position(n.Pos()).Offset, - End: fset.Position(n.End()).Offset, - }) - } - - var sameids []string - for _, pos := range r.sameids { - sameids = append(sameids, fset.Position(pos).String()) - } - - return toJSON(&serial.What{ - Modes: r.modes, - SrcDir: r.srcdir, - ImportPath: r.importPath, - Enclosing: enclosing, - Object: r.object, - SameIDs: sameids, - }) -} diff --git a/cmd/stress/stress.go b/cmd/stress/stress.go index 6dc563d7a87..defe6eeddf7 100644 --- a/cmd/stress/stress.go +++ b/cmd/stress/stress.go @@ -21,13 +21,12 @@ import ( "flag" "fmt" "os" + "os/exec" "path/filepath" "regexp" "runtime" "syscall" "time" - - exec "golang.org/x/sys/execabs" ) var ( diff --git a/cmd/stringer/endtoend_test.go b/cmd/stringer/endtoend_test.go index d513c1b52ba..2b9afa370c5 100644 --- a/cmd/stringer/endtoend_test.go +++ b/cmd/stringer/endtoend_test.go @@ -13,7 +13,6 @@ import ( "bytes" "flag" "fmt" - "go/build" "io" "os" "path" @@ -23,7 +22,6 @@ import ( "testing" "golang.org/x/tools/internal/testenv" - "golang.org/x/tools/internal/typeparams" ) // This file contains a test that compiles and runs each program in testdata @@ -51,6 +49,8 @@ func TestMain(m *testing.M) { } func TestEndToEnd(t *testing.T) { + testenv.NeedsTool(t, "go") + stringer := stringerPath(t) // Read the testdata directory. fd, err := os.Open("testdata") @@ -62,9 +62,6 @@ func TestEndToEnd(t *testing.T) { if err != nil { t.Fatalf("Readdirnames: %s", err) } - if typeparams.Enabled { - names = append(names, moreTests(t, "testdata/typeparams", "typeparams")...) - } // Generate, compile, and run the test programs. for _, name := range names { if name == "typeparams" { @@ -80,8 +77,8 @@ func TestEndToEnd(t *testing.T) { continue } t.Run(name, func(t *testing.T) { - if name == "cgo.go" && !build.Default.CgoEnabled { - t.Skipf("cgo is not enabled for %s", name) + if name == "cgo.go" { + testenv.NeedsTool(t, "cgo") } stringerCompileAndRun(t, t.TempDir(), stringer, typeName(name), name) }) @@ -159,6 +156,8 @@ func TestTags(t *testing.T) { // TestConstValueChange verifies that if a constant value changes and // the stringer code is not regenerated, we'll get a compiler error. func TestConstValueChange(t *testing.T) { + testenv.NeedsTool(t, "go") + stringer := stringerPath(t) dir := t.TempDir() source := filepath.Join(dir, "day.go") diff --git a/cmd/stringer/testdata/typeparams/conv2.go b/cmd/stringer/testdata/conv2.go similarity index 100% rename from cmd/stringer/testdata/typeparams/conv2.go rename to cmd/stringer/testdata/conv2.go diff --git a/cmd/stringer/testdata/typeparams/prime2.go b/cmd/stringer/testdata/prime2.go similarity index 100% rename from cmd/stringer/testdata/typeparams/prime2.go rename to cmd/stringer/testdata/prime2.go diff --git a/cmd/toolstash/main.go b/cmd/toolstash/main.go index 7f38524dfb1..eda486cd6ec 100644 --- a/cmd/toolstash/main.go +++ b/cmd/toolstash/main.go @@ -129,12 +129,11 @@ import ( "io" "log" "os" + "os/exec" "path/filepath" "runtime" "strings" "time" - - exec "golang.org/x/sys/execabs" ) var usageMessage = `usage: toolstash [-n] [-v] [-cmp] command line diff --git a/copyright/copyright.go b/copyright/copyright.go index c084bd0cda8..556c6e4f69a 100644 --- a/copyright/copyright.go +++ b/copyright/copyright.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - // Package copyright checks that files have the correct copyright notices. package copyright @@ -94,7 +91,7 @@ func checkFile(toolsDir, filename string) (bool, error) { return shouldAddCopyright, nil } -// Copied from golang.org/x/tools/gopls/internal/lsp/source/util.go. +// Copied from golang.org/x/tools/gopls/internal/golang/util.go. // Matches cgo generated comment as well as the proposed standard: // // https://golang.org/s/generatedcode diff --git a/copyright/copyright_test.go b/copyright/copyright_test.go index 7f7892524f5..947fb10c1d1 100644 --- a/copyright/copyright_test.go +++ b/copyright/copyright_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package copyright import ( diff --git a/go.mod b/go.mod index 50c32a948a8..7df8141380f 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,16 @@ module golang.org/x/tools -go 1.18 +go 1.19 require ( github.com/yuin/goldmark v1.4.13 - golang.org/x/mod v0.14.0 - golang.org/x/net v0.18.0 - golang.org/x/sys v0.14.0 + golang.org/x/mod v0.17.0 + golang.org/x/net v0.24.0 ) -require golang.org/x/sync v0.5.0 +require golang.org/x/sync v0.7.0 + +require ( + golang.org/x/sys v0.19.0 // indirect + golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2 +) diff --git a/go.sum b/go.sum index 4e1df992575..24ba3962099 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,12 @@ github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= -golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= -golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= -golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= -golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= +golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2 h1:IRJeR9r1pYWsHKTRe/IInb7lYvbBVIqOgsX/u0mbOWY= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go index 0c066baa33d..95db20f4be3 100644 --- a/go/analysis/analysistest/analysistest.go +++ b/go/analysis/analysistest/analysistest.go @@ -15,6 +15,7 @@ import ( "os" "path/filepath" "regexp" + "runtime" "sort" "strconv" "strings" @@ -128,6 +129,19 @@ type Testing interface { func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns ...string) []*Result { r := Run(t, dir, a, patterns...) + // If the immediate caller of RunWithSuggestedFixes is in + // x/tools, we apply stricter checks as required by gopls. + inTools := false + { + var pcs [1]uintptr + n := runtime.Callers(1, pcs[:]) + frames := runtime.CallersFrames(pcs[:n]) + fr, _ := frames.Next() + if fr.Func != nil && strings.HasPrefix(fr.Func.Name(), "golang.org/x/tools/") { + inTools = true + } + } + // Process each result (package) separately, matching up the suggested // fixes into a diff, which we will compare to the .golden file. We have // to do this per-result in case a file appears in two packages, such as in @@ -145,16 +159,26 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns // Validate edits, prepare the fileEdits map and read the file contents. for _, diag := range act.Diagnostics { - for _, sf := range diag.SuggestedFixes { - for _, edit := range sf.TextEdits { + for _, fix := range diag.SuggestedFixes { + + // Assert that lazy fixes have a Category (#65578, #65087). + if inTools && len(fix.TextEdits) == 0 && diag.Category == "" { + t.Errorf("missing Diagnostic.Category for SuggestedFix without TextEdits (gopls requires the category for the name of the fix command") + } + + for _, edit := range fix.TextEdits { + start, end := edit.Pos, edit.End + if !end.IsValid() { + end = start + } // Validate the edit. - if edit.Pos > edit.End { + if start > end { t.Errorf( "diagnostic for analysis %v contains Suggested Fix with malformed edit: pos (%v) > end (%v)", - act.Pass.Analyzer.Name, edit.Pos, edit.End) + act.Pass.Analyzer.Name, start, end) continue } - file, endfile := act.Pass.Fset.File(edit.Pos), act.Pass.Fset.File(edit.End) + file, endfile := act.Pass.Fset.File(start), act.Pass.Fset.File(end) if file == nil || endfile == nil || file != endfile { t.Errorf( "diagnostic for analysis %v contains Suggested Fix with malformed spanning files %v and %v", @@ -171,9 +195,9 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns if _, ok := fileEdits[file]; !ok { fileEdits[file] = make(map[string][]diff.Edit) } - fileEdits[file][sf.Message] = append(fileEdits[file][sf.Message], diff.Edit{ - Start: file.Offset(edit.Pos), - End: file.Offset(edit.End), + fileEdits[file][fix.Message] = append(fileEdits[file][fix.Message], diff.Edit{ + Start: file.Offset(start), + End: file.Offset(end), New: string(edit.NewText), }) } diff --git a/go/analysis/analysistest/analysistest_test.go b/go/analysis/analysistest/analysistest_test.go index 0b5f5ed524c..8e4767a01d7 100644 --- a/go/analysis/analysistest/analysistest_test.go +++ b/go/analysis/analysistest/analysistest_test.go @@ -6,12 +6,14 @@ package analysistest_test import ( "fmt" + "go/token" "log" "os" "reflect" "strings" "testing" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/findcall" "golang.org/x/tools/internal/testenv" @@ -159,6 +161,51 @@ func println(...interface{}) { println_TEST_() } // want println:"found" "call o } } +// TestNoEnd tests that a missing SuggestedFix.End position is +// correctly interpreted as if equal to SuggestedFix.Pos (see issue #64199). +func TestNoEnd(t *testing.T) { + noend := &analysis.Analyzer{ + Name: "noend", + Doc: "inserts /*hello*/ before first decl", + Run: func(pass *analysis.Pass) (any, error) { + decl := pass.Files[0].Decls[0] + pass.Report(analysis.Diagnostic{ + Pos: decl.Pos(), + End: token.NoPos, + Message: "say hello", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "say hello", + TextEdits: []analysis.TextEdit{ + { + Pos: decl.Pos(), + End: token.NoPos, + NewText: []byte("/*hello*/"), + }, + }, + }}, + }) + return nil, nil + }, + } + + filemap := map[string]string{ + "a/a.go": `package a + +func F() {} // want "say hello"`, + "a/a.go.golden": `package a + +/*hello*/ +func F() {} // want "say hello"`, + } + dir, cleanup, err := analysistest.WriteFiles(filemap) + if err != nil { + t.Fatal(err) + } + defer cleanup() + + analysistest.RunWithSuggestedFixes(t, dir, noend, "a") +} + type errorfunc func(string) func (f errorfunc) Errorf(format string, args ...interface{}) { diff --git a/go/analysis/diagnostic.go b/go/analysis/diagnostic.go index f67c97294b5..c638f275819 100644 --- a/go/analysis/diagnostic.go +++ b/go/analysis/diagnostic.go @@ -31,14 +31,14 @@ type Diagnostic struct { // see https://pkg.go.dev/net/url#URL.ResolveReference. URL string - // SuggestedFixes contains suggested fixes for a diagnostic - // which can be used to perform edits to a file that address - // the diagnostic. - // - // Diagnostics should not contain SuggestedFixes that overlap. - SuggestedFixes []SuggestedFix // optional + // SuggestedFixes is an optional list of fixes to address the + // problem described by the diagnostic, each one representing + // an alternative strategy; at most one may be applied. + SuggestedFixes []SuggestedFix - Related []RelatedInformation // optional + // Related contains optional secondary positions and messages + // related to the primary diagnostic. + Related []RelatedInformation } // RelatedInformation contains information related to a diagnostic. @@ -55,8 +55,7 @@ type RelatedInformation struct { // user can choose to apply to their code. Usually the SuggestedFix is // meant to fix the issue flagged by the diagnostic. // -// TextEdits for a SuggestedFix should not overlap, -// nor contain edits for other packages. +// The TextEdits must not overlap, nor contain edits for other packages. type SuggestedFix struct { // A description for this suggested fix to be shown to a user deciding // whether to accept it. diff --git a/go/analysis/internal/analysisflags/flags.go b/go/analysis/internal/analysisflags/flags.go index 9e3fde72bb6..ff14ff58f9c 100644 --- a/go/analysis/internal/analysisflags/flags.go +++ b/go/analysis/internal/analysisflags/flags.go @@ -362,15 +362,24 @@ type JSONSuggestedFix struct { Edits []JSONTextEdit `json:"edits"` } -// A JSONDiagnostic can be used to encode and decode analysis.Diagnostics to and -// from JSON. -// TODO(matloob): Should the JSON diagnostics contain ranges? -// If so, how should they be formatted? +// A JSONDiagnostic describes the JSON schema of an analysis.Diagnostic. +// +// TODO(matloob): include End position if present. type JSONDiagnostic struct { - Category string `json:"category,omitempty"` - Posn string `json:"posn"` - Message string `json:"message"` - SuggestedFixes []JSONSuggestedFix `json:"suggested_fixes,omitempty"` + Category string `json:"category,omitempty"` + Posn string `json:"posn"` // e.g. "file.go:line:column" + Message string `json:"message"` + SuggestedFixes []JSONSuggestedFix `json:"suggested_fixes,omitempty"` + Related []JSONRelatedInformation `json:"related,omitempty"` +} + +// A JSONRelated describes a secondary position and message related to +// a primary diagnostic. +// +// TODO(adonovan): include End position if present. +type JSONRelatedInformation struct { + Posn string `json:"posn"` // e.g. "file.go:line:column" + Message string `json:"message"` } // Add adds the result of analysis 'name' on package 'id'. @@ -401,11 +410,19 @@ func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis. Edits: edits, }) } + var related []JSONRelatedInformation + for _, r := range f.Related { + related = append(related, JSONRelatedInformation{ + Posn: fset.Position(r.Pos).String(), + Message: r.Message, + }) + } jdiag := JSONDiagnostic{ Category: f.Category, Posn: fset.Position(f.Pos).String(), Message: f.Message, SuggestedFixes: fixes, + Related: related, } diagnostics = append(diagnostics, jdiag) } diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go index 8efb89f6bd5..3c893500890 100644 --- a/go/analysis/internal/checker/checker.go +++ b/go/analysis/internal/checker/checker.go @@ -342,20 +342,28 @@ func applyFixes(roots []*action) error { for _, edit := range sf.TextEdits { // Validate the edit. // Any error here indicates a bug in the analyzer. - file := act.pkg.Fset.File(edit.Pos) + start, end := edit.Pos, edit.End + file := act.pkg.Fset.File(start) if file == nil { return fmt.Errorf("analysis %q suggests invalid fix: missing file info for pos (%v)", - act.a.Name, edit.Pos) + act.a.Name, start) } - if edit.Pos > edit.End { + if !end.IsValid() { + end = start + } + if start > end { return fmt.Errorf("analysis %q suggests invalid fix: pos (%v) > end (%v)", - act.a.Name, edit.Pos, edit.End) + act.a.Name, start, end) } - if eof := token.Pos(file.Base() + file.Size()); edit.End > eof { + if eof := token.Pos(file.Base() + file.Size()); end > eof { return fmt.Errorf("analysis %q suggests invalid fix: end (%v) past end of file (%v)", - act.a.Name, edit.End, eof) + act.a.Name, end, eof) + } + edit := diff.Edit{ + Start: file.Offset(start), + End: file.Offset(end), + New: string(edit.NewText), } - edit := diff.Edit{Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText)} editsForTokenFile[file] = append(editsForTokenFile[file], edit) } } diff --git a/go/analysis/internal/checker/checker_test.go b/go/analysis/internal/checker/checker_test.go index be1f1c03869..b383f29a985 100644 --- a/go/analysis/internal/checker/checker_test.go +++ b/go/analysis/internal/checker/checker_test.go @@ -49,7 +49,7 @@ func Foo() { } path := filepath.Join(testdata, "src/rename/test.go") checker.Fix = true - checker.Run([]string{"file=" + path}, []*analysis.Analyzer{analyzer}) + checker.Run([]string{"file=" + path}, []*analysis.Analyzer{renameAnalyzer}) contents, err := os.ReadFile(path) if err != nil { @@ -64,16 +64,18 @@ func Foo() { defer cleanup() } -var analyzer = &analysis.Analyzer{ +var renameAnalyzer = &analysis.Analyzer{ Name: "rename", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, + Doc: "renames symbols named bar to baz", } -var other = &analysis.Analyzer{ // like analyzer but with a different Name. +var otherAnalyzer = &analysis.Analyzer{ // like analyzer but with a different Name. Name: "other", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, + Doc: "renames symbols named bar to baz only in package 'other'", } func run(pass *analysis.Pass) (interface{}, error) { @@ -173,7 +175,7 @@ func Foo(s string) int { code int }{ // parse/type errors - {name: "skip-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{analyzer}, code: 1}, + {name: "skip-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{renameAnalyzer}, code: 1}, // RunDespiteErrors allows a driver to run an Analyzer even after parse/type errors. // // The noop analyzer doesn't use facts, so the driver loads only the root @@ -186,17 +188,17 @@ func Foo(s string) int { // type error, and runs the analyzer. {name: "despite-error-fact", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noopWithFact}, code: 0}, // combination of parse/type errors and no errors - {name: "despite-error-and-no-error", pattern: []string{"file=" + path, "sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1}, + {name: "despite-error-and-no-error", pattern: []string{"file=" + path, "sort"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 1}, // non-existing package error - {name: "no-package", pattern: []string{"xyz"}, analyzers: []*analysis.Analyzer{analyzer}, code: 1}, + {name: "no-package", pattern: []string{"xyz"}, analyzers: []*analysis.Analyzer{renameAnalyzer}, code: 1}, {name: "no-package-despite-error", pattern: []string{"abc"}, analyzers: []*analysis.Analyzer{noop}, code: 1}, {name: "no-multi-package-despite-error", pattern: []string{"xyz", "abc"}, analyzers: []*analysis.Analyzer{noop}, code: 1}, // combination of type/parsing and different errors - {name: "different-errors", pattern: []string{"file=" + path, "xyz"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1}, + {name: "different-errors", pattern: []string{"file=" + path, "xyz"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 1}, // non existing dir error - {name: "no-match-dir", pattern: []string{"file=non/existing/dir"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1}, + {name: "no-match-dir", pattern: []string{"file=non/existing/dir"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 1}, // no errors - {name: "no-errors", pattern: []string{"sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 0}, + {name: "no-errors", pattern: []string{"sort"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 0}, } { if test.name == "despite-error" && testenv.Go1Point() < 20 { // The behavior in the comment on the despite-error test only occurs for Go 1.20+. diff --git a/go/analysis/internal/checker/fix_test.go b/go/analysis/internal/checker/fix_test.go index e6ac1c1f008..45cbe2f5a49 100644 --- a/go/analysis/internal/checker/fix_test.go +++ b/go/analysis/internal/checker/fix_test.go @@ -6,48 +6,89 @@ package checker_test import ( "flag" + "fmt" + "go/token" + "log" "os" "os/exec" "path" "regexp" - "runtime" + "strings" "testing" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/go/analysis/internal/checker" + "golang.org/x/tools/go/analysis/multichecker" "golang.org/x/tools/internal/testenv" ) -func main() { - checker.Fix = true - patterns := flag.Args() - - code := checker.Run(patterns, []*analysis.Analyzer{analyzer, other}) - os.Exit(code) +// These are the analyzers available to the multichecker. +// (Tests may add more in init functions as needed.) +var candidates = map[string]*analysis.Analyzer{ + renameAnalyzer.Name: renameAnalyzer, + otherAnalyzer.Name: otherAnalyzer, } -// TestFixes ensures that checker.Run applies fixes correctly. -// This test fork/execs the main function above. -func TestFixes(t *testing.T) { - oses := map[string]bool{"darwin": true, "linux": true} - if !oses[runtime.GOOS] { - t.Skipf("skipping fork/exec test on this platform") +func TestMain(m *testing.M) { + // If the ANALYZERS=a,..,z environment is set, then this + // process should behave like a multichecker with the + // named analyzers. + if s, ok := os.LookupEnv("ANALYZERS"); ok { + var analyzers []*analysis.Analyzer + for _, name := range strings.Split(s, ",") { + a := candidates[name] + if a == nil { + log.Fatalf("no such analyzer: %q", name) + } + analyzers = append(analyzers, a) + } + multichecker.Main(analyzers...) + panic("unreachable") } - if os.Getenv("TESTFIXES_CHILD") == "1" { - // child process + // ordinary test + flag.Parse() + os.Exit(m.Run()) +} - // replace [progname -test.run=TestFixes -- ...] - // by [progname ...] - os.Args = os.Args[2:] - os.Args[0] = "vet" - main() - panic("unreachable") - } +const ( + exitCodeSuccess = 0 // success (no diagnostics) + exitCodeFailed = 1 // analysis failed to run + exitCodeDiagnostics = 3 // diagnostics were reported +) +// fix runs a multichecker subprocess with -fix in the specified +// directory, applying the comma-separated list of named analyzers to +// the packages matching the patterns. It returns the CombinedOutput. +func fix(t *testing.T, dir, analyzers string, wantExit int, patterns ...string) string { + testenv.NeedsExec(t) testenv.NeedsTool(t, "go") + cmd := exec.Command(os.Args[0], "-fix") + cmd.Args = append(cmd.Args, patterns...) + cmd.Env = append(os.Environ(), + "ANALYZERS="+analyzers, + "GOPATH="+dir, + "GO111MODULE=off", + "GOPROXY=off") + + clean := func(s string) string { + return strings.ReplaceAll(s, os.TempDir(), "os.TempDir/") + } + outBytes, err := cmd.CombinedOutput() + out := clean(string(outBytes)) + t.Logf("$ %s\n%s", clean(fmt.Sprint(cmd)), out) + if err, ok := err.(*exec.ExitError); !ok { + t.Fatalf("failed to execute multichecker: %v", err) + } else if err.ExitCode() != wantExit { + t.Errorf("exit code was %d, want %d", err.ExitCode(), wantExit) + } + return out +} + +// TestFixes ensures that checker.Run applies fixes correctly. +// This test fork/execs the main function above. +func TestFixes(t *testing.T) { files := map[string]string{ "rename/foo.go": `package rename @@ -130,23 +171,7 @@ func Foo() { } defer cleanup() - args := []string{"-test.run=TestFixes", "--", "rename", "duplicate"} - cmd := exec.Command(os.Args[0], args...) - cmd.Env = append(os.Environ(), "TESTFIXES_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off") - - out, err := cmd.CombinedOutput() - if len(out) > 0 { - t.Logf("%s: out=<<%s>>", args, out) - } - var exitcode int - if err, ok := err.(*exec.ExitError); ok { - exitcode = err.ExitCode() // requires go1.12 - } - - const diagnosticsExitCode = 3 - if exitcode != diagnosticsExitCode { - t.Errorf("%s: exited %d, want %d", args, exitcode, diagnosticsExitCode) - } + fix(t, dir, "rename,other", exitCodeDiagnostics, "rename", "duplicate") for name, want := range fixed { path := path.Join(dir, "src", name) @@ -163,24 +188,6 @@ func Foo() { // TestConflict ensures that checker.Run detects conflicts correctly. // This test fork/execs the main function above. func TestConflict(t *testing.T) { - oses := map[string]bool{"darwin": true, "linux": true} - if !oses[runtime.GOOS] { - t.Skipf("skipping fork/exec test on this platform") - } - - if os.Getenv("TESTCONFLICT_CHILD") == "1" { - // child process - - // replace [progname -test.run=TestConflict -- ...] - // by [progname ...] - os.Args = os.Args[2:] - os.Args[0] = "vet" - main() - panic("unreachable") - } - - testenv.NeedsTool(t, "go") - files := map[string]string{ "conflict/foo.go": `package conflict @@ -198,26 +205,14 @@ func Foo() { } defer cleanup() - args := []string{"-test.run=TestConflict", "--", "conflict"} - cmd := exec.Command(os.Args[0], args...) - cmd.Env = append(os.Environ(), "TESTCONFLICT_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off") + out := fix(t, dir, "rename,other", exitCodeFailed, "conflict") - out, err := cmd.CombinedOutput() - var exitcode int - if err, ok := err.(*exec.ExitError); ok { - exitcode = err.ExitCode() // requires go1.12 - } - const errExitCode = 1 - if exitcode != errExitCode { - t.Errorf("%s: exited %d, want %d", args, exitcode, errExitCode) - } - - pattern := `conflicting edits from rename and rename on /.*/conflict/foo.go` - matched, err := regexp.Match(pattern, out) + pattern := `conflicting edits from rename and rename on .*foo.go` + matched, err := regexp.MatchString(pattern, out) if err != nil { t.Errorf("error matching pattern %s: %v", pattern, err) } else if !matched { - t.Errorf("%s: output was=<<%s>>. Expected it to match <<%s>>", args, out, pattern) + t.Errorf("output did not match pattern: %s", pattern) } // No files updated @@ -237,24 +232,6 @@ func Foo() { // distinct actions correctly. // This test fork/execs the main function above. func TestOther(t *testing.T) { - oses := map[string]bool{"darwin": true, "linux": true} - if !oses[runtime.GOOS] { - t.Skipf("skipping fork/exec test on this platform") - } - - if os.Getenv("TESTOTHER_CHILD") == "1" { - // child process - - // replace [progname -test.run=TestOther -- ...] - // by [progname ...] - os.Args = os.Args[2:] - os.Args[0] = "vet" - main() - panic("unreachable") - } - - testenv.NeedsTool(t, "go") - files := map[string]string{ "other/foo.go": `package other @@ -272,26 +249,14 @@ func Foo() { } defer cleanup() - args := []string{"-test.run=TestOther", "--", "other"} - cmd := exec.Command(os.Args[0], args...) - cmd.Env = append(os.Environ(), "TESTOTHER_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off") - - out, err := cmd.CombinedOutput() - var exitcode int - if err, ok := err.(*exec.ExitError); ok { - exitcode = err.ExitCode() // requires go1.12 - } - const errExitCode = 1 - if exitcode != errExitCode { - t.Errorf("%s: exited %d, want %d", args, exitcode, errExitCode) - } + out := fix(t, dir, "rename,other", exitCodeFailed, "other") - pattern := `conflicting edits from other and rename on /.*/other/foo.go` - matched, err := regexp.Match(pattern, out) + pattern := `.*conflicting edits from other and rename on .*foo.go` + matched, err := regexp.MatchString(pattern, out) if err != nil { t.Errorf("error matching pattern %s: %v", pattern, err) } else if !matched { - t.Errorf("%s: output was=<<%s>>. Expected it to match <<%s>>", args, out, pattern) + t.Errorf("output did not match pattern: %s", pattern) } // No files updated @@ -306,3 +271,53 @@ func Foo() { } } } + +// TestNoEnd tests that a missing SuggestedFix.End position is +// correctly interpreted as if equal to SuggestedFix.Pos (see issue #64199). +func TestNoEnd(t *testing.T) { + files := map[string]string{ + "a/a.go": "package a\n\nfunc F() {}", + } + dir, cleanup, err := analysistest.WriteFiles(files) + if err != nil { + t.Fatalf("Creating test files failed with %s", err) + } + defer cleanup() + + fix(t, dir, "noend", exitCodeDiagnostics, "a") + + got, err := os.ReadFile(path.Join(dir, "src/a/a.go")) + if err != nil { + t.Fatal(err) + } + const want = "package a\n\n/*hello*/\nfunc F() {}\n" + if string(got) != want { + t.Errorf("new file contents were <<%s>>, want <<%s>>", got, want) + } +} + +func init() { + candidates["noend"] = &analysis.Analyzer{ + Name: "noend", + Doc: "inserts /*hello*/ before first decl", + Run: func(pass *analysis.Pass) (any, error) { + decl := pass.Files[0].Decls[0] + pass.Report(analysis.Diagnostic{ + Pos: decl.Pos(), + End: token.NoPos, + Message: "say hello", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "say hello", + TextEdits: []analysis.TextEdit{ + { + Pos: decl.Pos(), + End: token.NoPos, + NewText: []byte("/*hello*/"), + }, + }, + }}, + }) + return nil, nil + }, + } +} diff --git a/go/analysis/passes/asmdecl/arches_go118.go b/go/analysis/passes/asmdecl/arches_go118.go deleted file mode 100644 index d8211afdc8d..00000000000 --- a/go/analysis/passes/asmdecl/arches_go118.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package asmdecl - -func additionalArches() []*asmArch { - return nil -} diff --git a/go/analysis/passes/asmdecl/arches_go119.go b/go/analysis/passes/asmdecl/arches_go119.go deleted file mode 100644 index 3018383e7f2..00000000000 --- a/go/analysis/passes/asmdecl/arches_go119.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package asmdecl - -var asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true} - -func additionalArches() []*asmArch { - return []*asmArch{&asmArchLoong64} -} diff --git a/go/analysis/passes/asmdecl/asmdecl.go b/go/analysis/passes/asmdecl/asmdecl.go index e24dac9865a..f2ca95aa9eb 100644 --- a/go/analysis/passes/asmdecl/asmdecl.go +++ b/go/analysis/passes/asmdecl/asmdecl.go @@ -96,6 +96,7 @@ var ( asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}} asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true} asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false} + asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true} arches = []*asmArch{ &asmArch386, @@ -111,11 +112,11 @@ var ( &asmArchRISCV64, &asmArchS390X, &asmArchWasm, + &asmArchLoong64, } ) func init() { - arches = append(arches, additionalArches()...) for _, arch := range arches { arch.sizes = types.SizesFor("gc", arch.name) if arch.sizes == nil { diff --git a/go/analysis/passes/assign/assign_test.go b/go/analysis/passes/assign/assign_test.go index 146385f9596..5ca612836ca 100644 --- a/go/analysis/passes/assign/assign_test.go +++ b/go/analysis/passes/assign/assign_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/assign" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, assign.Analyzer, tests...) + analysistest.RunWithSuggestedFixes(t, testdata, assign.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go index 345db277933..fc80410e78a 100644 --- a/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go +++ b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go @@ -4,8 +4,6 @@ // This file contains tests for the useless-assignment checker. -//go:build go1.18 - package testdata import "math/rand" diff --git a/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go.golden b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go.golden index d9384ed5aab..8c8c4b61f5c 100644 --- a/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go.golden +++ b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go.golden @@ -4,8 +4,6 @@ // This file contains tests for the useless-assignment checker. -//go:build go1.18 - package testdata import "math/rand" diff --git a/go/analysis/passes/atomic/atomic_test.go b/go/analysis/passes/atomic/atomic_test.go index c17064c6f31..755f5de920c 100644 --- a/go/analysis/passes/atomic/atomic_test.go +++ b/go/analysis/passes/atomic/atomic_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/atomic" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, atomic.Analyzer, tests...) + analysistest.Run(t, testdata, atomic.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/bools/bools_test.go b/go/analysis/passes/bools/bools_test.go index ea51437e412..0297deab158 100644 --- a/go/analysis/passes/bools/bools_test.go +++ b/go/analysis/passes/bools/bools_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/bools" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, bools.Analyzer, tests...) + analysistest.Run(t, testdata, bools.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/bools/testdata/src/typeparams/typeparams.go b/go/analysis/passes/bools/testdata/src/typeparams/typeparams.go index 718462593a1..3afb56a5d0c 100644 --- a/go/analysis/passes/bools/testdata/src/typeparams/typeparams.go +++ b/go/analysis/passes/bools/testdata/src/typeparams/typeparams.go @@ -4,8 +4,6 @@ // This file contains tests for the bool checker. -//go:build go1.18 - package typeparams type T[P interface{ ~int }] struct { diff --git a/go/analysis/passes/buildssa/buildssa_test.go b/go/analysis/passes/buildssa/buildssa_test.go index c61a9f01709..cc895bb4f47 100644 --- a/go/analysis/passes/buildssa/buildssa_test.go +++ b/go/analysis/passes/buildssa/buildssa_test.go @@ -11,7 +11,6 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { @@ -30,9 +29,6 @@ func Test(t *testing.T) { } func TestGenericDecls(t *testing.T) { - if !typeparams.Enabled { - t.Skip("TestGenericDecls requires type parameters.") - } testdata := analysistest.TestData() result := analysistest.Run(t, testdata, buildssa.Analyzer, "b")[0].Result @@ -48,9 +44,6 @@ func TestGenericDecls(t *testing.T) { } func TestImporting(t *testing.T) { - if !typeparams.Enabled { - t.Skip("TestImporting depends on testdata/b/b/go which uses type parameters.") - } testdata := analysistest.TestData() result := analysistest.Run(t, testdata, buildssa.Analyzer, "c")[0].Result diff --git a/go/analysis/passes/cgocall/cgocall_test.go b/go/analysis/passes/cgocall/cgocall_test.go index 45ca1da8b1e..59d2649ee9b 100644 --- a/go/analysis/passes/cgocall/cgocall_test.go +++ b/go/analysis/passes/cgocall/cgocall_test.go @@ -9,15 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/cgocall" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a", "b", "c"} - if typeparams.Enabled { - // and testdata/src/typeparams/typeparams.go when possible - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, cgocall.Analyzer, tests...) + analysistest.Run(t, testdata, cgocall.Analyzer, "a", "b", "c", "typeparams") } diff --git a/go/analysis/passes/composite/composite.go b/go/analysis/passes/composite/composite.go index c7a49776fe5..8cc6c4a058b 100644 --- a/go/analysis/passes/composite/composite.go +++ b/go/analysis/passes/composite/composite.go @@ -15,6 +15,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -71,8 +72,8 @@ func run(pass *analysis.Pass) (interface{}, error) { return } var structuralTypes []types.Type - switch typ := typ.(type) { - case *typeparams.TypeParam: + switch typ := aliases.Unalias(typ).(type) { + case *types.TypeParam: terms, err := typeparams.StructuralTerms(typ) if err != nil { return // invalid type @@ -83,9 +84,9 @@ func run(pass *analysis.Pass) (interface{}, error) { default: structuralTypes = append(structuralTypes, typ) } + for _, typ := range structuralTypes { - under := deref(typ.Underlying()) - strct, ok := under.(*types.Struct) + strct, ok := typeparams.Deref(typ).Underlying().(*types.Struct) if !ok { // skip non-struct composite literals continue @@ -142,29 +143,18 @@ func run(pass *analysis.Pass) (interface{}, error) { return nil, nil } -func deref(typ types.Type) types.Type { - for { - ptr, ok := typ.(*types.Pointer) - if !ok { - break - } - typ = ptr.Elem().Underlying() - } - return typ -} - +// isLocalType reports whether typ belongs to the same package as pass. +// TODO(adonovan): local means "internal to a function"; rename to isSamePackageType. func isLocalType(pass *analysis.Pass, typ types.Type) bool { - switch x := typ.(type) { + switch x := aliases.Unalias(typ).(type) { case *types.Struct: // struct literals are local types return true case *types.Pointer: return isLocalType(pass, x.Elem()) - case *types.Named: + case interface{ Obj() *types.TypeName }: // *Named or *TypeParam (aliases were removed already) // names in package foo are local to foo_test too return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test") - case *typeparams.TypeParam: - return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test") } return false } diff --git a/go/analysis/passes/composite/composite_test.go b/go/analysis/passes/composite/composite_test.go index 7afaaa7ffd4..5764cf5c94d 100644 --- a/go/analysis/passes/composite/composite_test.go +++ b/go/analysis/passes/composite/composite_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/composite" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, composite.Analyzer, pkgs...) + analysistest.RunWithSuggestedFixes(t, testdata, composite.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go index 20b652e88dd..00cbd70051e 100644 --- a/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go +++ b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package a import "testing" diff --git a/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden index 20b652e88dd..00cbd70051e 100644 --- a/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden +++ b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package a import "testing" diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go index 2eeb0a330ac..8f39159c0f0 100644 --- a/go/analysis/passes/copylock/copylock.go +++ b/go/analysis/passes/copylock/copylock.go @@ -18,6 +18,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -255,7 +256,7 @@ func lockPath(tpkg *types.Package, typ types.Type, seen map[types.Type]bool) typ } seen[typ] = true - if tpar, ok := typ.(*typeparams.TypeParam); ok { + if tpar, ok := aliases.Unalias(typ).(*types.TypeParam); ok { terms, err := typeparams.StructuralTerms(tpar) if err != nil { return nil // invalid type diff --git a/go/analysis/passes/copylock/copylock_test.go b/go/analysis/passes/copylock/copylock_test.go index 869955b3e82..5726806dbf9 100644 --- a/go/analysis/passes/copylock/copylock_test.go +++ b/go/analysis/passes/copylock/copylock_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/copylock" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.Run(t, testdata, copylock.Analyzer, pkgs...) + analysistest.Run(t, testdata, copylock.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/ctrlflow/ctrlflow_test.go b/go/analysis/passes/ctrlflow/ctrlflow_test.go index 1503c3376cb..5afd01cc918 100644 --- a/go/analysis/passes/ctrlflow/ctrlflow_test.go +++ b/go/analysis/passes/ctrlflow/ctrlflow_test.go @@ -10,19 +10,11 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/ctrlflow" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - - // load testdata/src/a/a.go - tests := []string{"a"} - if typeparams.Enabled { - // and testdata/src/typeparams/typeparams.go when possible - tests = append(tests, "typeparams") - } - results := analysistest.Run(t, testdata, ctrlflow.Analyzer, tests...) + results := analysistest.Run(t, testdata, ctrlflow.Analyzer, "a", "typeparams") // Perform a minimal smoke test on // the result (CFG) computed by ctrlflow. diff --git a/go/analysis/passes/deepequalerrors/deepequalerrors.go b/go/analysis/passes/deepequalerrors/deepequalerrors.go index 1a83bddbcec..95cd9a061eb 100644 --- a/go/analysis/passes/deepequalerrors/deepequalerrors.go +++ b/go/analysis/passes/deepequalerrors/deepequalerrors.go @@ -15,6 +15,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" ) const Doc = `check for calls of reflect.DeepEqual on error values @@ -101,7 +102,7 @@ func containsError(typ types.Type) bool { return true } } - case *types.Named: + case *types.Named, *aliases.Alias: return check(t.Underlying()) // We list the remaining valid type kinds for completeness. diff --git a/go/analysis/passes/deepequalerrors/deepequalerrors_test.go b/go/analysis/passes/deepequalerrors/deepequalerrors_test.go index 00941730d7f..0f21cd852a0 100644 --- a/go/analysis/passes/deepequalerrors/deepequalerrors_test.go +++ b/go/analysis/passes/deepequalerrors/deepequalerrors_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/deepequalerrors" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, deepequalerrors.Analyzer, tests...) + analysistest.Run(t, testdata, deepequalerrors.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/errorsas/errorsas_test.go b/go/analysis/passes/errorsas/errorsas_test.go index 7908e8955af..6689d8114a7 100644 --- a/go/analysis/passes/errorsas/errorsas_test.go +++ b/go/analysis/passes/errorsas/errorsas_test.go @@ -12,14 +12,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/errorsas" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, errorsas.Analyzer, tests...) + analysistest.Run(t, testdata, errorsas.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/httpmux/httpmux.go b/go/analysis/passes/httpmux/httpmux.go index fa99296b5ec..78748c5c12e 100644 --- a/go/analysis/passes/httpmux/httpmux.go +++ b/go/analysis/passes/httpmux/httpmux.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/typesinternal" ) const Doc = `report using Go 1.22 enhanced ServeMux patterns in older Go versions @@ -83,29 +84,29 @@ func isServeMuxRegisterCall(pass *analysis.Pass, call *ast.CallExpr) bool { if !isMethodNamed(fn, "net/http", "Handle", "HandleFunc") { return false } - t, ok := fn.Type().(*types.Signature).Recv().Type().(*types.Pointer) - if !ok { - return false - } - return analysisutil.IsNamedType(t.Elem(), "net/http", "ServeMux") + recv := fn.Type().(*types.Signature).Recv() // isMethodNamed() -> non-nil + isPtr, named := typesinternal.ReceiverNamed(recv) + return isPtr && analysisutil.IsNamedType(named, "net/http", "ServeMux") } +// isMethodNamed reports when a function f is a method, +// in a package with the path pkgPath and the name of f is in names. func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { if f == nil { return false } if f.Pkg() == nil || f.Pkg().Path() != pkgPath { - return false + return false // not at pkgPath } if f.Type().(*types.Signature).Recv() == nil { - return false + return false // not a method } for _, n := range names { if f.Name() == n { return true } } - return false + return false // not in names } // stringConstantExpr returns expression's string constant value. diff --git a/go/analysis/passes/httpresponse/httpresponse.go b/go/analysis/passes/httpresponse/httpresponse.go index c6b6c81b420..e1ca9b2f514 100644 --- a/go/analysis/passes/httpresponse/httpresponse.go +++ b/go/analysis/passes/httpresponse/httpresponse.go @@ -14,6 +14,8 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typesinternal" ) const Doc = `check for mistakes using HTTP responses @@ -116,7 +118,8 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { if res.Len() != 2 { return false // the function called does not return two values. } - if ptr, ok := res.At(0).Type().(*types.Pointer); !ok || !analysisutil.IsNamedType(ptr.Elem(), "net/http", "Response") { + isPtr, named := typesinternal.ReceiverNamed(res.At(0)) + if !isPtr || named == nil || !analysisutil.IsNamedType(named, "net/http", "Response") { return false // the first return type is not *http.Response. } @@ -134,7 +137,7 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { if analysisutil.IsNamedType(typ, "net/http", "Client") { return true // method on http.Client. } - ptr, ok := typ.(*types.Pointer) + ptr, ok := aliases.Unalias(typ).(*types.Pointer) return ok && analysisutil.IsNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client. } diff --git a/go/analysis/passes/httpresponse/httpresponse_test.go b/go/analysis/passes/httpresponse/httpresponse_test.go index 34dc78ce208..e5fe225395a 100644 --- a/go/analysis/passes/httpresponse/httpresponse_test.go +++ b/go/analysis/passes/httpresponse/httpresponse_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/httpresponse" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, httpresponse.Analyzer, tests...) + analysistest.Run(t, testdata, httpresponse.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/httpresponse/testdata/src/a/a.go b/go/analysis/passes/httpresponse/testdata/src/a/a.go index de41212703e..d0988fc7b0b 100644 --- a/go/analysis/passes/httpresponse/testdata/src/a/a.go +++ b/go/analysis/passes/httpresponse/testdata/src/a/a.go @@ -110,3 +110,12 @@ func badUnwrapResp() { log.Fatal(err) } } + +type i66259 struct{} + +func (_ *i66259) Foo() (*int, int) { return nil, 1 } + +func issue66259() { + var f *i66259 + f.Foo() +} diff --git a/go/analysis/passes/httpresponse/testdata/src/typeparams/typeparams.go b/go/analysis/passes/httpresponse/testdata/src/typeparams/typeparams.go index 65dd58c7f8a..b2515c950ba 100644 --- a/go/analysis/passes/httpresponse/testdata/src/typeparams/typeparams.go +++ b/go/analysis/passes/httpresponse/testdata/src/typeparams/typeparams.go @@ -4,8 +4,6 @@ // This file contains tests for the httpresponse checker. -//go:build go1.18 - package typeparams import ( diff --git a/go/analysis/passes/ifaceassert/ifaceassert.go b/go/analysis/passes/ifaceassert/ifaceassert.go index cd4a477626d..5f07ed3ffde 100644 --- a/go/analysis/passes/ifaceassert/ifaceassert.go +++ b/go/analysis/passes/ifaceassert/ifaceassert.go @@ -13,6 +13,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) //go:embed doc.go @@ -28,7 +29,7 @@ var Analyzer = &analysis.Analyzer{ // assertableTo checks whether interface v can be asserted into t. It returns // nil on success, or the first conflicting method on failure. -func assertableTo(v, t types.Type) *types.Func { +func assertableTo(free *typeparams.Free, v, t types.Type) *types.Func { if t == nil || v == nil { // not assertable to, but there is no missing method return nil @@ -42,7 +43,7 @@ func assertableTo(v, t types.Type) *types.Func { // Mitigations for interface comparisons and generics. // TODO(https://github.com/golang/go/issues/50658): Support more precise conclusion. - if isParameterized(V) || isParameterized(T) { + if free.Has(V) || free.Has(T) { return nil } if f, wrongType := types.MissingMethod(V, T, false); wrongType { @@ -57,6 +58,7 @@ func run(pass *analysis.Pass) (interface{}, error) { (*ast.TypeAssertExpr)(nil), (*ast.TypeSwitchStmt)(nil), } + var free typeparams.Free inspect.Preorder(nodeFilter, func(n ast.Node) { var ( assert *ast.TypeAssertExpr // v.(T) expression @@ -86,7 +88,7 @@ func run(pass *analysis.Pass) (interface{}, error) { V := pass.TypesInfo.TypeOf(assert.X) for _, target := range targets { T := pass.TypesInfo.TypeOf(target) - if f := assertableTo(V, T); f != nil { + if f := assertableTo(&free, V, T); f != nil { pass.Reportf( target.Pos(), "impossible type assertion: no type can implement both %v and %v (conflicting types for %v method)", diff --git a/go/analysis/passes/ifaceassert/ifaceassert_test.go b/go/analysis/passes/ifaceassert/ifaceassert_test.go index b07c276ff61..266e2ed4e28 100644 --- a/go/analysis/passes/ifaceassert/ifaceassert_test.go +++ b/go/analysis/passes/ifaceassert/ifaceassert_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/ifaceassert" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.Run(t, testdata, ifaceassert.Analyzer, pkgs...) + analysistest.Run(t, testdata, ifaceassert.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/ifaceassert/parameterized.go b/go/analysis/passes/ifaceassert/parameterized.go deleted file mode 100644 index b84577fcf85..00000000000 --- a/go/analysis/passes/ifaceassert/parameterized.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ifaceassert - -import ( - "go/types" - - "golang.org/x/tools/internal/typeparams" -) - -// isParameterized reports whether typ contains any of the type parameters of tparams. -// -// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy. -func isParameterized(typ types.Type) bool { - w := tpWalker{ - seen: make(map[types.Type]bool), - } - return w.isParameterized(typ) -} - -type tpWalker struct { - seen map[types.Type]bool -} - -func (w *tpWalker) isParameterized(typ types.Type) (res bool) { - // detect cycles - if x, ok := w.seen[typ]; ok { - return x - } - w.seen[typ] = false - defer func() { - w.seen[typ] = res - }() - - switch t := typ.(type) { - case nil, *types.Basic: // TODO(gri) should nil be handled here? - break - - case *types.Array: - return w.isParameterized(t.Elem()) - - case *types.Slice: - return w.isParameterized(t.Elem()) - - case *types.Struct: - for i, n := 0, t.NumFields(); i < n; i++ { - if w.isParameterized(t.Field(i).Type()) { - return true - } - } - - case *types.Pointer: - return w.isParameterized(t.Elem()) - - case *types.Tuple: - n := t.Len() - for i := 0; i < n; i++ { - if w.isParameterized(t.At(i).Type()) { - return true - } - } - - case *types.Signature: - // t.tparams may not be nil if we are looking at a signature - // of a generic function type (or an interface method) that is - // part of the type we're testing. We don't care about these type - // parameters. - // Similarly, the receiver of a method may declare (rather than - // use) type parameters, we don't care about those either. - // Thus, we only need to look at the input and result parameters. - return w.isParameterized(t.Params()) || w.isParameterized(t.Results()) - - case *types.Interface: - for i, n := 0, t.NumMethods(); i < n; i++ { - if w.isParameterized(t.Method(i).Type()) { - return true - } - } - terms, err := typeparams.InterfaceTermSet(t) - if err != nil { - panic(err) - } - for _, term := range terms { - if w.isParameterized(term.Type()) { - return true - } - } - - case *types.Map: - return w.isParameterized(t.Key()) || w.isParameterized(t.Elem()) - - case *types.Chan: - return w.isParameterized(t.Elem()) - - case *types.Named: - list := typeparams.NamedTypeArgs(t) - for i, n := 0, list.Len(); i < n; i++ { - if w.isParameterized(list.At(i)) { - return true - } - } - - case *typeparams.TypeParam: - return true - - default: - panic(t) // unreachable - } - - return false -} diff --git a/go/analysis/passes/internal/analysisutil/util.go b/go/analysis/passes/internal/analysisutil/util.go index c0060753f9f..89291602a5b 100644 --- a/go/analysis/passes/internal/analysisutil/util.go +++ b/go/analysis/passes/internal/analysisutil/util.go @@ -13,6 +13,9 @@ import ( "go/token" "go/types" "os" + + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/analysisinternal" ) // Format returns a string representation of the expression. @@ -113,7 +116,7 @@ func Imports(pkg *types.Package, path string) bool { // This function avoids allocating the concatenation of "pkg.Name", // which is important for the performance of syntax matching. func IsNamedType(t types.Type, pkgPath string, names ...string) bool { - n, ok := t.(*types.Named) + n, ok := aliases.Unalias(t).(*types.Named) if !ok { return false } @@ -150,3 +153,5 @@ func IsFunctionNamed(f *types.Func, pkgPath string, names ...string) bool { } return false } + +var MustExtractDoc = analysisinternal.MustExtractDoc diff --git a/go/analysis/passes/internal/analysisutil/util_test.go b/go/analysis/passes/internal/analysisutil/util_test.go index 2d7e94ed734..9f49252db3e 100644 --- a/go/analysis/passes/internal/analysisutil/util_test.go +++ b/go/analysis/passes/internal/analysisutil/util_test.go @@ -12,13 +12,9 @@ import ( "testing" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/internal/typeparams" ) func TestHasSideEffects(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type parameters are not enabled") - } src := `package p type T int diff --git a/go/analysis/passes/loopclosure/doc.go b/go/analysis/passes/loopclosure/doc.go index dc544df1bfb..c95b1c1c98f 100644 --- a/go/analysis/passes/loopclosure/doc.go +++ b/go/analysis/passes/loopclosure/doc.go @@ -14,8 +14,12 @@ // in such a way (e.g. with go or defer) that it may outlive the loop // iteration and possibly observe the wrong value of the variable. // +// Note: An iteration variable can only outlive a loop iteration in Go versions <=1.21. +// In Go 1.22 and later, the loop variable lifetimes changed to create a new +// iteration variable per loop iteration. (See go.dev/issue/60078.) +// // In this example, all the deferred functions run after the loop has -// completed, so all observe the final value of v. +// completed, so all observe the final value of v [. or // *.. - rtype := recv.Type() - if ptr, ok := recv.Type().(*types.Pointer); ok { - rtype = ptr.Elem() - } - return analysisutil.IsNamedType(rtype, pkgPath, typeName) + _, named := typesinternal.ReceiverNamed(recv) + return analysisutil.IsNamedType(named, pkgPath, typeName) } diff --git a/go/analysis/passes/loopclosure/loopclosure_test.go b/go/analysis/passes/loopclosure/loopclosure_test.go index 55fb2a4a3d6..683f91e7b73 100644 --- a/go/analysis/passes/loopclosure/loopclosure_test.go +++ b/go/analysis/passes/loopclosure/loopclosure_test.go @@ -5,18 +5,60 @@ package loopclosure_test import ( + "os" + "path/filepath" "testing" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/loopclosure" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/txtar" ) func Test(t *testing.T) { + // legacy loopclosure test expectations are incorrect > 1.21. + testenv.SkipAfterGo1Point(t, 21) + testdata := analysistest.TestData() - tests := []string{"a", "golang.org/...", "subtests"} - if typeparams.Enabled { - tests = append(tests, "typeparams") + analysistest.Run(t, testdata, loopclosure.Analyzer, + "a", "golang.org/...", "subtests", "typeparams") +} + +func TestVersions22(t *testing.T) { + testenv.NeedsGo1Point(t, 22) + + testfile := filepath.Join(analysistest.TestData(), "src", "versions", "go22.txtar") + runTxtarFile(t, testfile, loopclosure.Analyzer, "golang.org/fake/versions") +} + +func TestVersions18(t *testing.T) { + testfile := filepath.Join(analysistest.TestData(), "src", "versions", "go18.txtar") + runTxtarFile(t, testfile, loopclosure.Analyzer, "golang.org/fake/versions") +} + +// runTxtarFile unpacks a txtar archive to a directory, and runs +// analyzer on the given patterns. +// +// This is compatible with a go.mod file. +// +// TODO(taking): Consider unifying with analysistest. +func runTxtarFile(t *testing.T, path string, analyzer *analysis.Analyzer, patterns ...string) { + ar, err := txtar.ParseFile(path) + if err != nil { + t.Fatal(err) + } + + dir := t.TempDir() + for _, file := range ar.Files { + name, content := file.Name, file.Data + + filename := filepath.Join(dir, name) + os.MkdirAll(filepath.Dir(filename), 0777) // ignore error + if err := os.WriteFile(filename, content, 0666); err != nil { + t.Fatal(err) + } } - analysistest.Run(t, testdata, loopclosure.Analyzer, tests...) + + analysistest.Run(t, dir, analyzer, patterns...) } diff --git a/go/analysis/passes/loopclosure/testdata/src/a/a.go b/go/analysis/passes/loopclosure/testdata/src/a/a.go index 7a7f05f663f..eb4d2a6cc7a 100644 --- a/go/analysis/passes/loopclosure/testdata/src/a/a.go +++ b/go/analysis/passes/loopclosure/testdata/src/a/a.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// This file contains tests for the loopclosure checker. +// This file contains legacy tests for the loopclosure checker. +// Legacy expectations are incorrect after go1.22. package testdata diff --git a/go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go b/go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go index 50283ec6152..faf98387c5d 100644 --- a/go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go +++ b/go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go @@ -2,8 +2,9 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// This file contains tests that the loopclosure analyzer detects leaked +// This file contains legacy tests that the loopclosure analyzer detects leaked // references via parallel subtests. +// Legacy expectations are incorrect after go1.22. package subtests diff --git a/go/analysis/passes/loopclosure/testdata/src/typeparams/typeparams.go b/go/analysis/passes/loopclosure/testdata/src/typeparams/typeparams.go index 55e129c0ab0..85976873b9a 100644 --- a/go/analysis/passes/loopclosure/testdata/src/typeparams/typeparams.go +++ b/go/analysis/passes/loopclosure/testdata/src/typeparams/typeparams.go @@ -2,9 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// This file contains tests for the loopclosure checker. +// This file contains legacy tests for the loopclosure checker for GoVersion 3 { + _ = slice[2] // want "index of nil slice" + } + for i := 0; i < len(slice); i++ { + _ = slice[i] // want "index of nil slice" + } + } + + if array == nil { + // (The v var is necessary, otherwise the SSA + // code doesn't dereference the pointer.) + for _, v := range array { // want "nil dereference in array index operation" + _ = v + } + } + + if m == nil { + for range m { // want "range over nil map" + } + m["one"] = 1 // want "nil dereference in map update" + } + + if ch == nil { + for range ch { // want "receive from nil channel" + } + <-ch // want "receive from nil channel" + ch <- 0 // want "send to nil channel" + } +} diff --git a/go/analysis/passes/printf/printf.go b/go/analysis/passes/printf/printf.go index 070654f0124..32350192583 100644 --- a/go/analysis/passes/printf/printf.go +++ b/go/analysis/passes/printf/printf.go @@ -24,6 +24,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -959,6 +960,8 @@ func isStringer(sig *types.Signature) bool { // It is almost always a mistake to print a function value. func isFunctionValue(pass *analysis.Pass, e ast.Expr) bool { if typ := pass.TypesInfo.Types[e].Type; typ != nil { + // Don't call Underlying: a named func type with a String method is ok. + // TODO(adonovan): it would be more precise to check isStringer. _, ok := typ.(*types.Signature) return ok } @@ -1010,7 +1013,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { // Skip checking functions with unknown type. return } - if sig, ok := typ.(*types.Signature); ok { + if sig, ok := typ.Underlying().(*types.Signature); ok { if !sig.Variadic() { // Skip checking non-variadic functions. return @@ -1020,7 +1023,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { typ := params.At(firstArg).Type() typ = typ.(*types.Slice).Elem() - it, ok := typ.(*types.Interface) + it, ok := aliases.Unalias(typ).(*types.Interface) if !ok || !it.Empty() { // Skip variadic functions accepting non-interface{} args. return diff --git a/go/analysis/passes/printf/printf_test.go b/go/analysis/passes/printf/printf_test.go index ed857fe801c..853d8619b25 100644 --- a/go/analysis/passes/printf/printf_test.go +++ b/go/analysis/passes/printf/printf_test.go @@ -9,19 +9,11 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/printf" - "golang.org/x/tools/internal/testenv" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { - testenv.NeedsGo1Point(t, 19) // tests use fmt.Appendf - testdata := analysistest.TestData() printf.Analyzer.Flags.Set("funcs", "Warn,Warnf") - tests := []string{"a", "b", "nofmt"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, printf.Analyzer, tests...) + analysistest.Run(t, testdata, printf.Analyzer, "a", "b", "nofmt", "typeparams") } diff --git a/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go b/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go index c4d7e530d93..08bdb471dd1 100644 --- a/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go +++ b/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package typeparams import "fmt" diff --git a/go/analysis/passes/printf/testdata/src/typeparams/wrappers.go b/go/analysis/passes/printf/testdata/src/typeparams/wrappers.go index df8a6fa6e6a..05487ab4e60 100644 --- a/go/analysis/passes/printf/testdata/src/typeparams/wrappers.go +++ b/go/analysis/passes/printf/testdata/src/typeparams/wrappers.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package typeparams import "fmt" diff --git a/go/analysis/passes/printf/types.go b/go/analysis/passes/printf/types.go index 7cbb0bdbf5f..017c8a247ec 100644 --- a/go/analysis/passes/printf/types.go +++ b/go/analysis/passes/printf/types.go @@ -10,6 +10,7 @@ import ( "go/types" "golang.org/x/tools/go/analysis" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -72,7 +73,7 @@ func (m *argMatcher) match(typ types.Type, topLevel bool) bool { return true } - if typ, _ := typ.(*typeparams.TypeParam); typ != nil { + if typ, _ := aliases.Unalias(typ).(*types.TypeParam); typ != nil { // Avoid infinite recursion through type parameters. if m.seen[typ] { return true @@ -275,7 +276,7 @@ func (m *argMatcher) match(typ types.Type, topLevel bool) bool { } func isConvertibleToString(typ types.Type) bool { - if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil { + if bt, ok := aliases.Unalias(typ).(*types.Basic); ok && bt.Kind() == types.UntypedNil { // We explicitly don't want untyped nil, which is // convertible to both of the interfaces below, as it // would just panic anyway. diff --git a/go/analysis/passes/shift/shift.go b/go/analysis/passes/shift/shift.go index bafb9112e17..d01eb1eebe5 100644 --- a/go/analysis/passes/shift/shift.go +++ b/go/analysis/passes/shift/shift.go @@ -21,6 +21,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -89,7 +90,8 @@ func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { if v == nil { return } - amt, ok := constant.Int64Val(v) + u := constant.ToInt(v) // either an Int or Unknown + amt, ok := constant.Int64Val(u) if !ok { return } @@ -98,8 +100,8 @@ func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { return } var structuralTypes []types.Type - switch t := t.(type) { - case *typeparams.TypeParam: + switch t := aliases.Unalias(t).(type) { + case *types.TypeParam: terms, err := typeparams.StructuralTerms(t) if err != nil { return // invalid type diff --git a/go/analysis/passes/shift/shift_test.go b/go/analysis/passes/shift/shift_test.go index e60943e37cf..cdce0cf6240 100644 --- a/go/analysis/passes/shift/shift_test.go +++ b/go/analysis/passes/shift/shift_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/shift" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.Run(t, testdata, shift.Analyzer, pkgs...) + analysistest.Run(t, testdata, shift.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/shift/testdata/src/a/a.go b/go/analysis/passes/shift/testdata/src/a/a.go index 796fcaa6ec4..558ece6bf8f 100644 --- a/go/analysis/passes/shift/testdata/src/a/a.go +++ b/go/analysis/passes/shift/testdata/src/a/a.go @@ -153,3 +153,8 @@ func ShiftDeadCode() { _ = i << 64 // want "too small for shift" } } + +func issue65939() { + a := 1 + println(a << 2.0) +} diff --git a/go/analysis/passes/slog/slog.go b/go/analysis/passes/slog/slog.go index a1323c3e666..0cade7bad7e 100644 --- a/go/analysis/passes/slog/slog.go +++ b/go/analysis/passes/slog/slog.go @@ -20,6 +20,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -48,6 +49,7 @@ const ( ) func run(pass *analysis.Pass) (any, error) { + var attrType types.Type // The type of slog.Attr inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ (*ast.CallExpr)(nil), @@ -66,6 +68,11 @@ func run(pass *analysis.Pass) (any, error) { // Not a slog function that takes key-value pairs. return } + // Here we know that fn.Pkg() is "log/slog". + if attrType == nil { + attrType = fn.Pkg().Scope().Lookup("Attr").Type() + } + if isMethodExpr(pass.TypesInfo, call) { // Call is to a method value. Skip the first argument. skipArgs++ @@ -91,8 +98,19 @@ func run(pass *analysis.Pass) (any, error) { pos = key case types.IsInterface(t): // As we do not do dataflow, we do not know what the dynamic type is. - // It could be a string or an Attr so we don't know what to expect next. - pos = unknown + // But we might be able to learn enough to make a decision. + if types.AssignableTo(stringType, t) { + // t must be an empty interface. So it can also be an Attr. + // We don't know enough to make an assumption. + pos = unknown + continue + } else if attrType != nil && types.AssignableTo(attrType, t) { + // Assume it is an Attr. + pos = key + continue + } + // Can't be either a string or Attr. Definitely an error. + fallthrough default: if unknownArg == nil { pass.ReportRangef(arg, "%s arg %q should be a string or a slog.Attr (possible missing key or value)", @@ -150,14 +168,10 @@ func isAttr(t types.Type) bool { func shortName(fn *types.Func) string { var r string if recv := fn.Type().(*types.Signature).Recv(); recv != nil { - t := recv.Type() - if pt, ok := t.(*types.Pointer); ok { - t = pt.Elem() - } - if nt, ok := t.(*types.Named); ok { - r = nt.Obj().Name() + if _, named := typesinternal.ReceiverNamed(recv); named != nil { + r = named.Obj().Name() } else { - r = recv.Type().String() + r = recv.Type().String() // anon struct/interface } r += "." } @@ -173,17 +187,12 @@ func kvFuncSkipArgs(fn *types.Func) (int, bool) { return 0, false } var recvName string // by default a slog package function - recv := fn.Type().(*types.Signature).Recv() - if recv != nil { - t := recv.Type() - if pt, ok := t.(*types.Pointer); ok { - t = pt.Elem() - } - if nt, ok := t.(*types.Named); !ok { - return 0, false - } else { - recvName = nt.Obj().Name() + if recv := fn.Type().(*types.Signature).Recv(); recv != nil { + _, named := typesinternal.ReceiverNamed(recv) + if named == nil { + return 0, false // anon struct/interface } + recvName = named.Obj().Name() } skip, ok := kvFuncs[recvName][fn.Name()] return skip, ok diff --git a/go/analysis/passes/slog/testdata/src/a/a.go b/go/analysis/passes/slog/testdata/src/a/a.go index 0e76da7a983..8b3778a1462 100644 --- a/go/analysis/passes/slog/testdata/src/a/a.go +++ b/go/analysis/passes/slog/testdata/src/a/a.go @@ -107,6 +107,18 @@ func F() { slog.Debug("msg", any(nil), "a", 2) slog.Debug("msg", any(nil), "a", 2, "b") // want `call to slog.Debug has a missing or misplaced value` slog.Debug("msg", any(nil), 2, 3, 4) // want "slog.Debug arg \\\"3\\\" should probably be a string or a slog.Attr \\(previous arg \\\"2\\\" cannot be a key\\)" + + // In these cases, an argument in key position is an interface, but we can glean useful information about it. + + // An error interface in key position is definitely invalid: it can't be a string + // or slog.Attr. + var err error + slog.Error("msg", err) // want `slog.Error arg "err" should be a string or a slog.Attr` + + // slog.Attr implements fmt.Stringer, but string does not, so assume the arg is an Attr. + var stringer fmt.Stringer + slog.Info("msg", stringer, "a", 1) + slog.Info("msg", stringer, 1) // want `slog.Info arg "1" should be a string or a slog.Attr` } func All() { diff --git a/go/analysis/passes/stdmethods/stdmethods_test.go b/go/analysis/passes/stdmethods/stdmethods_test.go index 9bfa0327feb..9df50fe1b10 100644 --- a/go/analysis/passes/stdmethods/stdmethods_test.go +++ b/go/analysis/passes/stdmethods/stdmethods_test.go @@ -9,16 +9,11 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/stdmethods" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.Run(t, testdata, stdmethods.Analyzer, pkgs...) + analysistest.Run(t, testdata, stdmethods.Analyzer, "a", "typeparams") } func TestAnalyzeEncodingXML(t *testing.T) { diff --git a/go/analysis/passes/stdmethods/testdata/src/a/b.go b/go/analysis/passes/stdmethods/testdata/src/a/b.go index c0a16fb0426..9cf3994858b 100644 --- a/go/analysis/passes/stdmethods/testdata/src/a/b.go +++ b/go/analysis/passes/stdmethods/testdata/src/a/b.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package a type H int diff --git a/go/analysis/passes/stdversion/main.go b/go/analysis/passes/stdversion/main.go new file mode 100644 index 00000000000..a9efd0160eb --- /dev/null +++ b/go/analysis/passes/stdversion/main.go @@ -0,0 +1,15 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore +// +build ignore + +package main + +import ( + "golang.org/x/tools/go/analysis/singlechecker" + "golang.org/x/tools/go/analysis/passes/stdversion" +) + +func main() { singlechecker.Main(stdversion.Analyzer) } diff --git a/go/analysis/passes/stdversion/stdversion.go b/go/analysis/passes/stdversion/stdversion.go new file mode 100644 index 00000000000..75d8697759e --- /dev/null +++ b/go/analysis/passes/stdversion/stdversion.go @@ -0,0 +1,159 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package stdversion reports uses of standard library symbols that are +// "too new" for the Go version in force in the referring file. +package stdversion + +import ( + "go/ast" + "go/build" + "go/types" + "regexp" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/versions" +) + +const Doc = `report uses of too-new standard library symbols + +The stdversion analyzer reports references to symbols in the standard +library that were introduced by a Go release higher than the one in +force in the referring file. (Recall that the file's Go version is +defined by the 'go' directive its module's go.mod file, or by a +"//go:build go1.X" build tag at the top of the file.) + +The analyzer does not report a diagnostic for a reference to a "too +new" field or method of a type that is itself "too new", as this may +have false positives, for example if fields or methods are accessed +through a type alias that is guarded by a Go version constraint. +` + +var Analyzer = &analysis.Analyzer{ + Name: "stdversion", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdversion", + RunDespiteErrors: true, + Run: run, +} + +func run(pass *analysis.Pass) (any, error) { + // Prior to go1.22, versions.FileVersion returns only the + // toolchain version, which is of no use to us, so + // disable this analyzer on earlier versions. + if !slicesContains(build.Default.ReleaseTags, "go1.22") { + return nil, nil + } + + // Don't report diagnostics for modules marked before go1.21, + // since at that time the go directive wasn't clearly + // specified as a toolchain requirement. + // + // TODO(adonovan): after go1.21, call GoVersion directly. + pkgVersion := any(pass.Pkg).(interface{ GoVersion() string }).GoVersion() + if !versions.AtLeast(pkgVersion, "go1.21") { + return nil, nil + } + + // disallowedSymbols returns the set of standard library symbols + // in a given package that are disallowed at the specified Go version. + type key struct { + pkg *types.Package + version string + } + memo := make(map[key]map[types.Object]string) // records symbol's minimum Go version + disallowedSymbols := func(pkg *types.Package, version string) map[types.Object]string { + k := key{pkg, version} + disallowed, ok := memo[k] + if !ok { + disallowed = typesinternal.TooNewStdSymbols(pkg, version) + memo[k] = disallowed + } + return disallowed + } + + // Scan the syntax looking for references to symbols + // that are disallowed by the version of the file. + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.File)(nil), + (*ast.Ident)(nil), + } + var fileVersion string // "" => no check + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.File: + if isGenerated(n) { + // Suppress diagnostics in generated files (such as cgo). + fileVersion = "" + } else { + fileVersion = versions.Lang(versions.FileVersion(pass.TypesInfo, n)) + // (may be "" if unknown) + } + + case *ast.Ident: + if fileVersion != "" { + if obj, ok := pass.TypesInfo.Uses[n]; ok && obj.Pkg() != nil { + disallowed := disallowedSymbols(obj.Pkg(), fileVersion) + if minVersion, ok := disallowed[origin(obj)]; ok { + noun := "module" + if fileVersion != pkgVersion { + noun = "file" + } + pass.ReportRangef(n, "%s.%s requires %v or later (%s is %s)", + obj.Pkg().Name(), obj.Name(), minVersion, noun, fileVersion) + } + } + } + } + }) + return nil, nil +} + +// Reduced from x/tools/gopls/internal/golang/util.go. Good enough for now. +// TODO(adonovan): use ast.IsGenerated in go1.21. +func isGenerated(f *ast.File) bool { + for _, group := range f.Comments { + for _, comment := range group.List { + if matched := generatedRx.MatchString(comment.Text); matched { + return true + } + } + } + return false +} + +// Matches cgo generated comment as well as the proposed standard: +// +// https://golang.org/s/generatedcode +var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) + +// origin returns the original uninstantiated symbol for obj. +func origin(obj types.Object) types.Object { + switch obj := obj.(type) { + case *types.Var: + return obj.Origin() + case *types.Func: + return obj.Origin() + case *types.TypeName: + if named, ok := obj.Type().(*types.Named); ok { // (don't unalias) + return named.Origin().Obj() + } + } + return obj +} + +// TODO(adonovan): use go1.21 slices.Contains. +func slicesContains[S ~[]E, E comparable](slice S, x E) bool { + for _, elem := range slice { + if elem == x { + return true + } + } + return false +} diff --git a/go/analysis/passes/stdversion/stdversion_test.go b/go/analysis/passes/stdversion/stdversion_test.go new file mode 100644 index 00000000000..efee7babacb --- /dev/null +++ b/go/analysis/passes/stdversion/stdversion_test.go @@ -0,0 +1,56 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stdversion_test + +import ( + "os" + "path/filepath" + "testing" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/go/analysis/passes/stdversion" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/txtar" +) + +func Test(t *testing.T) { + // The test relies on go1.21 std symbols, but the analyzer + // itself requires the go1.22 implementation of versions.FileVersions. + testenv.NeedsGo1Point(t, 22) + + testfile := filepath.Join(analysistest.TestData(), "test.txtar") + runTxtarFile(t, testfile, stdversion.Analyzer, + "example.com/a", + "example.com/sub", + "example.com/old") +} + +// runTxtarFile unpacks a txtar archive to a directory, and runs +// analyzer on the given patterns. +// +// This is compatible with a go.mod file. +// +// Plundered from loopclosure_test.go. +// TODO(golang/go#46136): add module support to analysistest. +func runTxtarFile(t *testing.T, path string, analyzer *analysis.Analyzer, patterns ...string) { + ar, err := txtar.ParseFile(path) + if err != nil { + t.Fatal(err) + } + + dir := t.TempDir() + for _, file := range ar.Files { + name, content := file.Name, file.Data + + filename := filepath.Join(dir, name) + os.MkdirAll(filepath.Dir(filename), 0777) // ignore error + if err := os.WriteFile(filename, content, 0666); err != nil { + t.Fatal(err) + } + } + + analysistest.Run(t, dir, analyzer, patterns...) +} diff --git a/go/analysis/passes/stdversion/testdata/test.txtar b/go/analysis/passes/stdversion/testdata/test.txtar new file mode 100644 index 00000000000..796e1594042 --- /dev/null +++ b/go/analysis/passes/stdversion/testdata/test.txtar @@ -0,0 +1,101 @@ +Test of "too new" diagnostics from the stdversion analyzer. + +This test references go1.21 and go1.22 symbols from std. + +It uses a txtar file due to golang/go#37054. + +See also gopls/internal/test/marker/testdata/diagnostics/stdversion.txt +which runs the same test within the gopls analysis driver, to ensure +coverage of per-file Go version support. + +-- go.mod -- +module example.com + +go 1.21 + +-- a/a.go -- +package a + +import "go/types" + +func _() { + // old package-level type + var _ types.Info // ok: defined by go1.0 + + // new field of older type + _ = new(types.Info).FileVersions // want `types.FileVersions requires go1.22 or later \(module is go1.21\)` + + // new method of older type + new(types.Info).PkgNameOf // want `types.PkgNameOf requires go1.22 or later \(module is go1.21\)` + + // new package-level type + var a types.Alias // want `types.Alias requires go1.22 or later \(module is go1.21\)` + + // new method of new type + a.Underlying() // no diagnostic +} + +-- sub/go.mod -- +module example.com/sub + +go 1.21 + +-- sub/sub.go -- +package sub + +import "go/types" + +func _() { + // old package-level type + var _ types.Info // ok: defined by go1.0 + + // new field of older type + _ = new(types.Info).FileVersions // want `types.FileVersions requires go1.22 or later \(module is go1.21\)` + + // new method of older type + new(types.Info).PkgNameOf // want `types.PkgNameOf requires go1.22 or later \(module is go1.21\)` + + // new package-level type + var a types.Alias // want `types.Alias requires go1.22 or later \(module is go1.21\)` + + // new method of new type + a.Underlying() // no diagnostic +} + +invalid syntax // exercise RunDespiteErrors + +-- sub/tagged.go -- +//go:build go1.22 + +package sub + +import "go/types" + +func _() { + // old package-level type + var _ types.Info + + // new field of older type + _ = new(types.Info).FileVersions + + // new method of older type + new(types.Info).PkgNameOf + + // new package-level type + var a types.Alias + + // new method of new type + a.Underlying() +} + +-- old/go.mod -- +module example.com/old + +go 1.5 + +-- old/old.go -- +package old + +import "go/types" + +var _ types.Alias // no diagnostic: go.mod is too old for us to care diff --git a/go/analysis/passes/stringintconv/string.go b/go/analysis/passes/stringintconv/string.go index bb04dae6269..16a4b3e5516 100644 --- a/go/analysis/passes/stringintconv/string.go +++ b/go/analysis/passes/stringintconv/string.go @@ -15,6 +15,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -59,10 +60,12 @@ func describe(typ, inType types.Type, inName string) string { } func typeName(typ types.Type) string { - if v, _ := typ.(interface{ Name() string }); v != nil { + typ = aliases.Unalias(typ) + // TODO(adonovan): don't discard alias type, return its name. + if v, _ := typ.(*types.Basic); v != nil { return v.Name() } - if v, _ := typ.(interface{ Obj() *types.TypeName }); v != nil { + if v, _ := typ.(interface{ Obj() *types.TypeName }); v != nil { // Named, TypeParam return v.Obj().Name() } return "" @@ -194,16 +197,15 @@ func run(pass *analysis.Pass) (interface{}, error) { func structuralTypes(t types.Type) ([]types.Type, error) { var structuralTypes []types.Type - switch t := t.(type) { - case *typeparams.TypeParam: - terms, err := typeparams.StructuralTerms(t) + if tp, ok := aliases.Unalias(t).(*types.TypeParam); ok { + terms, err := typeparams.StructuralTerms(tp) if err != nil { return nil, err } for _, term := range terms { structuralTypes = append(structuralTypes, term.Type()) } - default: + } else { structuralTypes = append(structuralTypes, t) } return structuralTypes, nil diff --git a/go/analysis/passes/stringintconv/string_test.go b/go/analysis/passes/stringintconv/string_test.go index 13f2133b881..0b1f99bf862 100644 --- a/go/analysis/passes/stringintconv/string_test.go +++ b/go/analysis/passes/stringintconv/string_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/stringintconv" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, stringintconv.Analyzer, pkgs...) + analysistest.RunWithSuggestedFixes(t, testdata, stringintconv.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/testinggoroutine/doc.go b/go/analysis/passes/testinggoroutine/doc.go index a68adb12b4c..4cd5b71e9ec 100644 --- a/go/analysis/passes/testinggoroutine/doc.go +++ b/go/analysis/passes/testinggoroutine/doc.go @@ -7,7 +7,7 @@ // // # Analyzer testinggoroutine // -// testinggoroutine: report calls to (*testing.T).Fatal from goroutines started by a test. +// testinggoroutine: report calls to (*testing.T).Fatal from goroutines started by a test // // Functions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and // Skip{,f,Now} methods of *testing.T, must be called from the test goroutine itself. diff --git a/go/analysis/passes/testinggoroutine/testdata/src/a/a.go b/go/analysis/passes/testinggoroutine/testdata/src/a/a.go index c8fc91bb29b..4e46a46c55f 100644 --- a/go/analysis/passes/testinggoroutine/testdata/src/a/a.go +++ b/go/analysis/passes/testinggoroutine/testdata/src/a/a.go @@ -275,6 +275,212 @@ func TestWithCustomType(t *testing.T) { } } +func helpTB(tb testing.TB) { + tb.FailNow() +} + +func TestTB(t *testing.T) { + go helpTB(t) // want "call to .+TB.+FailNow from a non-test goroutine" +} + func TestIssue48124(t *testing.T) { - go h() + go helper(t) // want "call to .+T.+Skip from a non-test goroutine" +} + +func TestEachCall(t *testing.T) { + go helper(t) // want "call to .+T.+Skip from a non-test goroutine" + go helper(t) // want "call to .+T.+Skip from a non-test goroutine" +} + +func TestWithSubtest(t *testing.T) { + t.Run("name", func(t2 *testing.T) { + t.FailNow() // want "call to .+T.+FailNow on t defined outside of the subtest" + t2.Fatal() + }) + + f := func(t3 *testing.T) { + t.FailNow() + t3.Fatal() + } + t.Run("name", f) // want "call to .+T.+FailNow on t defined outside of the subtest" + + g := func(t4 *testing.T) { + t.FailNow() + t4.Fatal() + } + g(t) + + t.Run("name", helper) + + go t.Run("name", func(t2 *testing.T) { + t.FailNow() // want "call to .+T.+FailNow on t defined outside of the subtest" + t2.Fatal() + }) +} + +func TestMultipleVariables(t *testing.T) { + { // short decl + f, g := func(t1 *testing.T) { + t1.Fatal() + }, func(t2 *testing.T) { + t2.Error() + } + + go f(t) // want "call to .+T.+Fatal from a non-test goroutine" + go g(t) + + t.Run("name", f) + t.Run("name", g) + } + + { // var decl + var f, g = func(t1 *testing.T) { + t1.Fatal() + }, func(t2 *testing.T) { + t2.Error() + } + + go f(t) // want "call to .+T.+Fatal from a non-test goroutine" + go g(t) + + t.Run("name", f) + t.Run("name", g) + } +} + +func BadIgnoresMultipleAssignments(t *testing.T) { + { + f := func(t1 *testing.T) { + t1.Fatal() + } + go f(t) // want "call to .+T.+Fatal from a non-test goroutine" + + f = func(t2 *testing.T) { + t2.Error() + } + go f(t) // want "call to .+T.+Fatal from a non-test goroutine" + } + { + f := func(t1 *testing.T) { + t1.Error() + } + go f(t) + + f = func(t2 *testing.T) { + t2.FailNow() + } + go f(t) // false negative + } +} + +func TestGoDoesNotDescendIntoSubtest(t *testing.T) { + f := func(t2 *testing.T) { + g := func(t3 *testing.T) { + t3.Fatal() // fine + } + t2.Run("name", g) + t2.FailNow() // bad + } + go f(t) // want "call to .+T.+FailNow from a non-test goroutine" +} + +func TestFreeVariableAssignedWithinEnclosing(t *testing.T) { + f := func(t2 *testing.T) { + inner := t + inner.FailNow() + } + + go f(nil) // want "call to .+T.+FailNow from a non-test goroutine" + + t.Run("name", func(t3 *testing.T) { + go f(nil) // want "call to .+T.+FailNow from a non-test goroutine" + }) + + // Without pointer analysis we cannot tell if inner is t or t2. + // So we accept a false negatives on the following examples. + t.Run("name", f) + + go func(_ *testing.T) { + t.Run("name", f) + }(nil) + + go t.Run("name", f) +} + +func TestWithUnusedSelection(t *testing.T) { + go func() { + _ = t.FailNow + }() + t.Run("name", func(t2 *testing.T) { + _ = t.FailNow + }) +} + +func TestMethodExprsAreIgnored(t *testing.T) { + go func() { + (*testing.T).FailNow(t) + }() +} + +func TestRecursive(t *testing.T) { + t.SkipNow() + + go TestRecursive(t) // want "call to .+T.+SkipNow from a non-test goroutine" + + t.Run("name", TestRecursive) +} + +func TestMethodSelection(t *testing.T) { + var h helperType + + go h.help(t) // want "call to .+T.+SkipNow from a non-test goroutine" + t.Run("name", h.help) +} + +type helperType struct{} + +func (h *helperType) help(t *testing.T) { t.SkipNow() } + +func TestIssue63799a(t *testing.T) { + done := make(chan struct{}) + go func() { + defer close(done) + t.Run("", func(t *testing.T) { + t.Fatal() // No warning. This is in a subtest. + }) + }() + <-done +} + +func TestIssue63799b(t *testing.T) { + // Simplified from go.dev/cl/538698 + + // nondet is some unspecified boolean placeholder. + var nondet func() bool + + t.Run("nohup", func(t *testing.T) { + if nondet() { + t.Skip("ignored") + } + + go t.Run("nohup-i", func(t *testing.T) { + t.Parallel() + if nondet() { + if nondet() { + t.Skip("go.dev/cl/538698 wanted to have skip here") + } + + t.Error("ignored") + } else { + t.Log("ignored") + } + }) + }) +} + +func TestIssue63849(t *testing.T) { + go func() { + helper(t) // False negative. We do not do an actual interprodecural reachability analysis. + }() + go helper(t) // want "call to .+T.+Skip from a non-test goroutine" } diff --git a/go/analysis/passes/testinggoroutine/testdata/src/a/b.go b/go/analysis/passes/testinggoroutine/testdata/src/a/b.go index 5e95177f404..1169c3fa5de 100644 --- a/go/analysis/passes/testinggoroutine/testdata/src/a/b.go +++ b/go/analysis/passes/testinggoroutine/testdata/src/a/b.go @@ -4,4 +4,8 @@ package a -func h() {} +import "testing" + +func helper(t *testing.T) { + t.Skip() +} diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine.go b/go/analysis/passes/testinggoroutine/testinggoroutine.go index 907b71503e0..828f95bc862 100644 --- a/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -6,18 +6,29 @@ package testinggoroutine import ( _ "embed" + "fmt" "go/ast" + "go/token" + "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" ) //go:embed doc.go var doc string +var reportSubtest bool + +func init() { + Analyzer.Flags.BoolVar(&reportSubtest, "subtest", false, "whether to check if t.Run subtest is terminated correctly; experimental") +} + var Analyzer = &analysis.Analyzer{ Name: "testinggoroutine", Doc: analysisutil.MustExtractDoc(doc, "testinggoroutine"), @@ -26,15 +37,6 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -var forbidden = map[string]bool{ - "FailNow": true, - "Fatal": true, - "Fatalf": true, - "Skip": true, - "Skipf": true, - "SkipNow": true, -} - func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) @@ -42,38 +44,90 @@ func run(pass *analysis.Pass) (interface{}, error) { return nil, nil } - // Filter out anything that isn't a function declaration. - onlyFuncs := []ast.Node{ - (*ast.FuncDecl)(nil), + toDecl := localFunctionDecls(pass.TypesInfo, pass.Files) + + // asyncs maps nodes whose statements will be executed concurrently + // with respect to some test function, to the call sites where they + // are invoked asynchronously. There may be multiple such call sites + // for e.g. test helpers. + asyncs := make(map[ast.Node][]*asyncCall) + var regions []ast.Node + addCall := func(c *asyncCall) { + if c != nil { + r := c.region + if asyncs[r] == nil { + regions = append(regions, r) + } + asyncs[r] = append(asyncs[r], c) + } } - inspect.Nodes(onlyFuncs, func(node ast.Node, push bool) bool { - fnDecl, ok := node.(*ast.FuncDecl) - if !ok { + // Collect all of the go callee() and t.Run(name, callee) extents. + inspect.Nodes([]ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.GoStmt)(nil), + (*ast.CallExpr)(nil), + }, func(node ast.Node, push bool) bool { + if !push { return false } + switch node := node.(type) { + case *ast.FuncDecl: + return hasBenchmarkOrTestParams(node) - if !hasBenchmarkOrTestParams(fnDecl) { - return false + case *ast.GoStmt: + c := goAsyncCall(pass.TypesInfo, node, toDecl) + addCall(c) + + case *ast.CallExpr: + c := tRunAsyncCall(pass.TypesInfo, node) + addCall(c) } + return true + }) - // Now traverse the benchmark/test's body and check that none of the - // forbidden methods are invoked in the goroutines within the body. - ast.Inspect(fnDecl, func(n ast.Node) bool { - goStmt, ok := n.(*ast.GoStmt) + // Check for t.Forbidden() calls within each region r that is a + // callee in some go r() or a t.Run("name", r). + // + // Also considers a special case when r is a go t.Forbidden() call. + for _, region := range regions { + ast.Inspect(region, func(n ast.Node) bool { + if n == region { + return true // always descend into the region itself. + } else if asyncs[n] != nil { + return false // will be visited by another region. + } + + call, ok := n.(*ast.CallExpr) if !ok { return true } + x, sel, fn := forbiddenMethod(pass.TypesInfo, call) + if x == nil { + return true + } - checkGoStmt(pass, goStmt) + for _, e := range asyncs[region] { + if !withinScope(e.scope, x) { + forbidden := formatMethod(sel, fn) // e.g. "(*testing.T).Forbidden - // No need to further traverse the GoStmt since right - // above we manually traversed it in the ast.Inspect(goStmt, ...) - return false + var context string + var where analysis.Range = e.async // Put the report at the go fun() or t.Run(name, fun). + if _, local := e.fun.(*ast.FuncLit); local { + where = call // Put the report at the t.Forbidden() call. + } else if id, ok := e.fun.(*ast.Ident); ok { + context = fmt.Sprintf(" (%s calls %s)", id.Name, forbidden) + } + if _, ok := e.async.(*ast.GoStmt); ok { + pass.ReportRangef(where, "call to %s from a non-test goroutine%s", forbidden, context) + } else if reportSubtest { + pass.ReportRangef(where, "call to %s on %s defined outside of the subtest%s", forbidden, x.Name(), context) + } + } + } + return true }) - - return false - }) + } return nil, nil } @@ -100,7 +154,6 @@ func typeIsTestingDotTOrB(expr ast.Expr) (string, bool) { if !ok { return "", false } - varPkg := selExpr.X.(*ast.Ident) if varPkg.Name != "testing" { return "", false @@ -111,73 +164,116 @@ func typeIsTestingDotTOrB(expr ast.Expr) (string, bool) { return varTypeName, ok } -// goStmtFunc returns the ast.Node of a call expression -// that was invoked as a go statement. Currently, only -// function literals declared in the same function, and -// static calls within the same package are supported. -func goStmtFun(goStmt *ast.GoStmt) ast.Node { - switch fun := goStmt.Call.Fun.(type) { - case *ast.IndexExpr, *typeparams.IndexListExpr: - x, _, _, _ := typeparams.UnpackIndexExpr(fun) - id, _ := x.(*ast.Ident) - if id == nil { - break - } - if id.Obj == nil { - break - } - if funDecl, ok := id.Obj.Decl.(ast.Node); ok { - return funDecl - } - case *ast.Ident: - // TODO(cuonglm): improve this once golang/go#48141 resolved. - if fun.Obj == nil { - break - } - if funDecl, ok := fun.Obj.Decl.(ast.Node); ok { - return funDecl - } - case *ast.FuncLit: - return goStmt.Call.Fun +// asyncCall describes a region of code that needs to be checked for +// t.Forbidden() calls as it is started asynchronously from an async +// node go fun() or t.Run(name, fun). +type asyncCall struct { + region ast.Node // region of code to check for t.Forbidden() calls. + async ast.Node // *ast.GoStmt or *ast.CallExpr (for t.Run) + scope ast.Node // Report t.Forbidden() if t is not declared within scope. + fun ast.Expr // fun in go fun() or t.Run(name, fun) +} + +// withinScope returns true if x.Pos() is in [scope.Pos(), scope.End()]. +func withinScope(scope ast.Node, x *types.Var) bool { + if scope != nil { + return x.Pos() != token.NoPos && scope.Pos() <= x.Pos() && x.Pos() <= scope.End() } - return goStmt.Call + return false } -// checkGoStmt traverses the goroutine and checks for the -// use of the forbidden *testing.(B, T) methods. -func checkGoStmt(pass *analysis.Pass, goStmt *ast.GoStmt) { - fn := goStmtFun(goStmt) - // Otherwise examine the goroutine to check for the forbidden methods. - ast.Inspect(fn, func(n ast.Node) bool { - selExpr, ok := n.(*ast.SelectorExpr) - if !ok { - return true - } +// goAsyncCall returns the extent of a call from a go fun() statement. +func goAsyncCall(info *types.Info, goStmt *ast.GoStmt, toDecl func(*types.Func) *ast.FuncDecl) *asyncCall { + call := goStmt.Call - _, bad := forbidden[selExpr.Sel.Name] - if !bad { - return true + fun := astutil.Unparen(call.Fun) + if id := funcIdent(fun); id != nil { + if lit := funcLitInScope(id); lit != nil { + return &asyncCall{region: lit, async: goStmt, scope: nil, fun: fun} } + } - // Now filter out false positives by the import-path/type. - ident, ok := selExpr.X.(*ast.Ident) - if !ok { - return true + if fn := typeutil.StaticCallee(info, call); fn != nil { // static call or method in the package? + if decl := toDecl(fn); decl != nil { + return &asyncCall{region: decl, async: goStmt, scope: nil, fun: fun} } - if ident.Obj == nil || ident.Obj.Decl == nil { - return true - } - field, ok := ident.Obj.Decl.(*ast.Field) - if !ok { - return true - } - if typeName, ok := typeIsTestingDotTOrB(field.Type); ok { - var fnRange analysis.Range = goStmt - if _, ok := fn.(*ast.FuncLit); ok { - fnRange = selExpr - } - pass.ReportRangef(fnRange, "call to (*%s).%s from a non-test goroutine", typeName, selExpr.Sel) + } + + // Check go statement for go t.Forbidden() or go func(){t.Forbidden()}(). + return &asyncCall{region: goStmt, async: goStmt, scope: nil, fun: fun} +} + +// tRunAsyncCall returns the extent of a call from a t.Run("name", fun) expression. +func tRunAsyncCall(info *types.Info, call *ast.CallExpr) *asyncCall { + if len(call.Args) != 2 { + return nil + } + run := typeutil.Callee(info, call) + if run, ok := run.(*types.Func); !ok || !isMethodNamed(run, "testing", "Run") { + return nil + } + + fun := astutil.Unparen(call.Args[1]) + if lit, ok := fun.(*ast.FuncLit); ok { // function lit? + return &asyncCall{region: lit, async: call, scope: lit, fun: fun} + } + + if id := funcIdent(fun); id != nil { + if lit := funcLitInScope(id); lit != nil { // function lit in variable? + return &asyncCall{region: lit, async: call, scope: lit, fun: fun} } - return true - }) + } + + // Check within t.Run(name, fun) for calls to t.Forbidden, + // e.g. t.Run(name, func(t *testing.T){ t.Forbidden() }) + return &asyncCall{region: call, async: call, scope: fun, fun: fun} +} + +var forbidden = []string{ + "FailNow", + "Fatal", + "Fatalf", + "Skip", + "Skipf", + "SkipNow", +} + +// forbiddenMethod decomposes a call x.m() into (x, x.m, m) where +// x is a variable, x.m is a selection, and m is the static callee m. +// Returns (nil, nil, nil) if call is not of this form. +func forbiddenMethod(info *types.Info, call *ast.CallExpr) (*types.Var, *types.Selection, *types.Func) { + // Compare to typeutil.StaticCallee. + fun := astutil.Unparen(call.Fun) + selExpr, ok := fun.(*ast.SelectorExpr) + if !ok { + return nil, nil, nil + } + sel := info.Selections[selExpr] + if sel == nil { + return nil, nil, nil + } + + var x *types.Var + if id, ok := astutil.Unparen(selExpr.X).(*ast.Ident); ok { + x, _ = info.Uses[id].(*types.Var) + } + if x == nil { + return nil, nil, nil + } + + fn, _ := sel.Obj().(*types.Func) + if fn == nil || !isMethodNamed(fn, "testing", forbidden...) { + return nil, nil, nil + } + return x, sel, fn +} + +func formatMethod(sel *types.Selection, fn *types.Func) string { + var ptr string + rtype := sel.Recv() + if p, ok := aliases.Unalias(rtype).(*types.Pointer); ok { + ptr = "*" + rtype = p.Elem() + } + return fmt.Sprintf("(%s%s).%s", ptr, rtype.String(), fn.Name()) } diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine_test.go b/go/analysis/passes/testinggoroutine/testinggoroutine_test.go index 56c4385c546..b74d67ed88a 100644 --- a/go/analysis/passes/testinggoroutine/testinggoroutine_test.go +++ b/go/analysis/passes/testinggoroutine/testinggoroutine_test.go @@ -9,14 +9,14 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/testinggoroutine" - "golang.org/x/tools/internal/typeparams" ) +func init() { + testinggoroutine.Analyzer.Flags.Set("subtest", "true") +} + func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } + pkgs := []string{"a", "typeparams"} analysistest.Run(t, testdata, testinggoroutine.Analyzer, pkgs...) } diff --git a/go/analysis/passes/testinggoroutine/util.go b/go/analysis/passes/testinggoroutine/util.go new file mode 100644 index 00000000000..ad815f19010 --- /dev/null +++ b/go/analysis/passes/testinggoroutine/util.go @@ -0,0 +1,96 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testinggoroutine + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/internal/typeparams" +) + +// AST and types utilities that not specific to testinggoroutines. + +// localFunctionDecls returns a mapping from *types.Func to *ast.FuncDecl in files. +func localFunctionDecls(info *types.Info, files []*ast.File) func(*types.Func) *ast.FuncDecl { + var fnDecls map[*types.Func]*ast.FuncDecl // computed lazily + return func(f *types.Func) *ast.FuncDecl { + if f != nil && fnDecls == nil { + fnDecls = make(map[*types.Func]*ast.FuncDecl) + for _, file := range files { + for _, decl := range file.Decls { + if fnDecl, ok := decl.(*ast.FuncDecl); ok { + if fn, ok := info.Defs[fnDecl.Name].(*types.Func); ok { + fnDecls[fn] = fnDecl + } + } + } + } + } + // TODO: set f = f.Origin() here. + return fnDecls[f] + } +} + +// isMethodNamed returns true if f is a method defined +// in package with the path pkgPath with a name in names. +func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { + if f == nil { + return false + } + if f.Pkg() == nil || f.Pkg().Path() != pkgPath { + return false + } + if f.Type().(*types.Signature).Recv() == nil { + return false + } + for _, n := range names { + if f.Name() == n { + return true + } + } + return false +} + +func funcIdent(fun ast.Expr) *ast.Ident { + switch fun := astutil.Unparen(fun).(type) { + case *ast.IndexExpr, *ast.IndexListExpr: + x, _, _, _ := typeparams.UnpackIndexExpr(fun) // necessary? + id, _ := x.(*ast.Ident) + return id + case *ast.Ident: + return fun + default: + return nil + } +} + +// funcLitInScope returns a FuncLit that id is at least initially assigned to. +// +// TODO: This is closely tied to id.Obj which is deprecated. +func funcLitInScope(id *ast.Ident) *ast.FuncLit { + // Compare to (*ast.Object).Pos(). + if id.Obj == nil { + return nil + } + var rhs ast.Expr + switch d := id.Obj.Decl.(type) { + case *ast.AssignStmt: + for i, x := range d.Lhs { + if ident, isIdent := x.(*ast.Ident); isIdent && ident.Name == id.Name && i < len(d.Rhs) { + rhs = d.Rhs[i] + } + } + case *ast.ValueSpec: + for i, n := range d.Names { + if n.Name == id.Name && i < len(d.Values) { + rhs = d.Values[i] + } + } + } + lit, _ := rhs.(*ast.FuncLit) + return lit +} diff --git a/go/analysis/passes/tests/testdata/src/a/go118_test.go b/go/analysis/passes/tests/testdata/src/a/go118_test.go index e2bc3f3a0bd..a2ed9a4496b 100644 --- a/go/analysis/passes/tests/testdata/src/a/go118_test.go +++ b/go/analysis/passes/tests/testdata/src/a/go118_test.go @@ -1,6 +1,3 @@ -//go:build go1.18 -// +build go1.18 - package a import ( diff --git a/go/analysis/passes/tests/tests.go b/go/analysis/passes/tests/tests.go index d0b0ebb1011..39d0d9e429e 100644 --- a/go/analysis/passes/tests/tests.go +++ b/go/analysis/passes/tests/tests.go @@ -17,7 +17,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/internal/typeparams" ) //go:embed doc.go @@ -253,6 +252,8 @@ func validateFuzzArgs(pass *analysis.Pass, params *types.Tuple, expr ast.Expr) b } func isTestingType(typ types.Type, testingType string) bool { + // No Unalias here: I doubt "go test" recognizes + // "type A = *testing.T; func Test(A) {}" as a test. ptr, ok := typ.(*types.Pointer) if !ok { return false @@ -391,7 +392,7 @@ func checkExampleName(pass *analysis.Pass, fn *ast.FuncDecl) { if results := fn.Type.Results; results != nil && len(results.List) != 0 { pass.Reportf(fn.Pos(), "%s should return nothing", fnName) } - if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 { + if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 { pass.Reportf(fn.Pos(), "%s should not have type params", fnName) } @@ -460,7 +461,7 @@ func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) { return } - if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 { + if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 { // Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters. // We have currently decided to also warn before compilation/package loading. This can help users in IDEs. // TODO(adonovan): use ReportRangef(tparams). diff --git a/go/analysis/passes/tests/tests_test.go b/go/analysis/passes/tests/tests_test.go index 740adc5d5e4..745423466d7 100644 --- a/go/analysis/passes/tests/tests_test.go +++ b/go/analysis/passes/tests/tests_test.go @@ -9,18 +9,14 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/tests" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{ + analysistest.Run(t, testdata, tests.Analyzer, "a", // loads "a", "a [a.test]", and "a.test" "b_x_test", // loads "b" and "b_x_test" "divergent", - } - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.Run(t, testdata, tests.Analyzer, pkgs...) + "typeparams", + ) } diff --git a/go/analysis/passes/timeformat/timeformat.go b/go/analysis/passes/timeformat/timeformat.go index eb84502bd99..4a6c6b8bc6c 100644 --- a/go/analysis/passes/timeformat/timeformat.go +++ b/go/analysis/passes/timeformat/timeformat.go @@ -107,7 +107,7 @@ func badFormatAt(info *types.Info, e ast.Expr) int { return -1 } - t, ok := tv.Type.(*types.Basic) + t, ok := tv.Type.(*types.Basic) // sic, no unalias if !ok || t.Info()&types.IsString == 0 { return -1 } diff --git a/go/analysis/passes/unmarshal/unmarshal.go b/go/analysis/passes/unmarshal/unmarshal.go index 7043baa899b..a7889fa4590 100644 --- a/go/analysis/passes/unmarshal/unmarshal.go +++ b/go/analysis/passes/unmarshal/unmarshal.go @@ -14,7 +14,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -70,12 +70,8 @@ func run(pass *analysis.Pass) (interface{}, error) { // (*"encoding/json".Decoder).Decode // (* "encoding/gob".Decoder).Decode // (* "encoding/xml".Decoder).Decode - t := recv.Type() - if ptr, ok := t.(*types.Pointer); ok { - t = ptr.Elem() - } - tname := t.(*types.Named).Obj() - if tname.Name() == "Decoder" { + _, named := typesinternal.ReceiverNamed(recv) + if tname := named.Obj(); tname.Name() == "Decoder" { switch tname.Pkg().Path() { case "encoding/json", "encoding/xml", "encoding/gob": argidx = 0 // func(interface{}) @@ -92,7 +88,7 @@ func run(pass *analysis.Pass) (interface{}, error) { t := pass.TypesInfo.Types[call.Args[argidx]].Type switch t.Underlying().(type) { - case *types.Pointer, *types.Interface, *typeparams.TypeParam: + case *types.Pointer, *types.Interface, *types.TypeParam: return } diff --git a/go/analysis/passes/unmarshal/unmarshal_test.go b/go/analysis/passes/unmarshal/unmarshal_test.go index e6171f2f090..1659d8d5900 100644 --- a/go/analysis/passes/unmarshal/unmarshal_test.go +++ b/go/analysis/passes/unmarshal/unmarshal_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/unmarshal" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, unmarshal.Analyzer, tests...) + analysistest.Run(t, testdata, unmarshal.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/unsafeptr/unsafeptr.go b/go/analysis/passes/unsafeptr/unsafeptr.go index 32e71ef979d..14e4a6c1e4b 100644 --- a/go/analysis/passes/unsafeptr/unsafeptr.go +++ b/go/analysis/passes/unsafeptr/unsafeptr.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/aliases" ) //go:embed doc.go @@ -88,7 +89,7 @@ func isSafeUintptr(info *types.Info, x ast.Expr) bool { // by the time we get to the conversion at the end. // For now approximate by saying that *Header is okay // but Header is not. - pt, ok := info.Types[x.X].Type.(*types.Pointer) + pt, ok := aliases.Unalias(info.Types[x.X].Type).(*types.Pointer) if ok && isReflectHeader(pt.Elem()) { return true } diff --git a/go/analysis/passes/unsafeptr/unsafeptr_test.go b/go/analysis/passes/unsafeptr/unsafeptr_test.go index 424de1f04f5..b926f045374 100644 --- a/go/analysis/passes/unsafeptr/unsafeptr_test.go +++ b/go/analysis/passes/unsafeptr/unsafeptr_test.go @@ -9,14 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/unsafeptr" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() - pkgs := []string{"a"} - if typeparams.Enabled { - pkgs = append(pkgs, "typeparams") - } - analysistest.Run(t, testdata, unsafeptr.Analyzer, pkgs...) + analysistest.Run(t, testdata, unsafeptr.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go b/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go index 04d0e305842..0add516ac94 100644 --- a/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go +++ b/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go @@ -1,8 +1,6 @@ // Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// -//go:build go1.18 package typeparams diff --git a/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/userdefs.go b/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/userdefs.go index 218cc9ac75e..e31c6257469 100644 --- a/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/userdefs.go +++ b/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/userdefs.go @@ -1,8 +1,6 @@ // Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// -//go:build go1.18 package userdefs @@ -25,4 +23,4 @@ type MultiTypeParam[T any, U any] struct { func (_ *MultiTypeParam[T, U]) String() string { return "MultiTypeParam" -} \ No newline at end of file +} diff --git a/go/analysis/passes/unusedresult/unusedresult.go b/go/analysis/passes/unusedresult/unusedresult.go index 7f79b4a7543..76f42b052e4 100644 --- a/go/analysis/passes/unusedresult/unusedresult.go +++ b/go/analysis/passes/unusedresult/unusedresult.go @@ -59,7 +59,25 @@ func init() { // List standard library functions here. // The context.With{Cancel,Deadline,Timeout} entries are // effectively redundant wrt the lostcancel analyzer. - funcs.Set("errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse,context.WithValue,context.WithCancel,context.WithDeadline,context.WithTimeout") + funcs = stringSetFlag{ + "context.WithCancel": true, + "context.WithDeadline": true, + "context.WithTimeout": true, + "context.WithValue": true, + "errors.New": true, + "fmt.Errorf": true, + "fmt.Sprint": true, + "fmt.Sprintf": true, + "slices.Clip": true, + "slices.Compact": true, + "slices.CompactFunc": true, + "slices.Delete": true, + "slices.DeleteFunc": true, + "slices.Grow": true, + "slices.Insert": true, + "slices.Replace": true, + "sort.Reverse": true, + } Analyzer.Flags.Var(&funcs, "funcs", "comma-separated list of functions whose results must be used") diff --git a/go/analysis/passes/unusedresult/unusedresult_test.go b/go/analysis/passes/unusedresult/unusedresult_test.go index a2b079af00e..08dcf61bd3d 100644 --- a/go/analysis/passes/unusedresult/unusedresult_test.go +++ b/go/analysis/passes/unusedresult/unusedresult_test.go @@ -9,16 +9,11 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/unusedresult" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { testdata := analysistest.TestData() funcs := "typeparams/userdefs.MustUse,errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint" unusedresult.Analyzer.Flags.Set("funcs", funcs) - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, unusedresult.Analyzer, tests...) + analysistest.Run(t, testdata, unusedresult.Analyzer, "a", "typeparams") } diff --git a/go/analysis/passes/unusedwrite/main.go b/go/analysis/passes/unusedwrite/main.go new file mode 100644 index 00000000000..5cc182b6cfc --- /dev/null +++ b/go/analysis/passes/unusedwrite/main.go @@ -0,0 +1,16 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore + +// The unusedwrite command runs the unusedwrite analyzer +// on the specified packages. +package main + +import ( + "golang.org/x/tools/go/analysis/passes/unusedwrite" + "golang.org/x/tools/go/analysis/singlechecker" +) + +func main() { singlechecker.Main(unusedwrite.Analyzer) } diff --git a/go/analysis/passes/unusedwrite/unusedwrite.go b/go/analysis/passes/unusedwrite/unusedwrite.go index f5d0f116cad..a99c5483351 100644 --- a/go/analysis/passes/unusedwrite/unusedwrite.go +++ b/go/analysis/passes/unusedwrite/unusedwrite.go @@ -6,13 +6,14 @@ package unusedwrite import ( _ "embed" - "fmt" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/buildssa" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typeparams" ) //go:embed doc.go @@ -36,9 +37,9 @@ func run(pass *analysis.Pass) (interface{}, error) { for _, store := range reports { switch addr := store.Addr.(type) { case *ssa.FieldAddr: + field := typeparams.CoreType(typeparams.MustDeref(addr.X.Type())).(*types.Struct).Field(addr.Field) pass.Reportf(store.Pos(), - "unused write to field %s", - getFieldName(addr.X.Type(), addr.Field)) + "unused write to field %s", field.Name()) case *ssa.IndexAddr: pass.Reportf(store.Pos(), "unused write to array index %s", addr.Index) @@ -124,10 +125,7 @@ func isDeadStore(store *ssa.Store, obj ssa.Value, addr ssa.Instruction) bool { // isStructOrArray returns whether the underlying type is struct or array. func isStructOrArray(tp types.Type) bool { - if named, ok := tp.(*types.Named); ok { - tp = named.Underlying() - } - switch tp.(type) { + switch tp.Underlying().(type) { case *types.Array: return true case *types.Struct: @@ -145,7 +143,7 @@ func hasStructOrArrayType(v ssa.Value) bool { // func (t T) f() { ...} // the receiver object is of type *T: // t0 = local T (t) *T - if tp, ok := alloc.Type().(*types.Pointer); ok { + if tp, ok := aliases.Unalias(alloc.Type()).(*types.Pointer); ok { return isStructOrArray(tp.Elem()) } return false @@ -153,20 +151,3 @@ func hasStructOrArrayType(v ssa.Value) bool { } return isStructOrArray(v.Type()) } - -// getFieldName returns the name of a field in a struct. -// It the field is not found, then it returns the string format of the index. -// -// For example, for struct T {x int, y int), getFieldName(*T, 1) returns "y". -func getFieldName(tp types.Type, index int) string { - if pt, ok := tp.(*types.Pointer); ok { - tp = pt.Elem() - } - if named, ok := tp.(*types.Named); ok { - tp = named.Underlying() - } - if stp, ok := tp.(*types.Struct); ok { - return stp.Field(index).Name() - } - return fmt.Sprintf("%d", index) -} diff --git a/go/analysis/passes/usesgenerics/usesgenerics_test.go b/go/analysis/passes/usesgenerics/usesgenerics_test.go index 3dcff240d44..d5fb73ed16e 100644 --- a/go/analysis/passes/usesgenerics/usesgenerics_test.go +++ b/go/analysis/passes/usesgenerics/usesgenerics_test.go @@ -9,13 +9,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/usesgenerics" - "golang.org/x/tools/internal/typeparams" ) func Test(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type parameters are not enabled at this Go version") - } testdata := analysistest.TestData() analysistest.Run(t, testdata, usesgenerics.Analyzer, "a", "b", "c", "d") } diff --git a/go/analysis/unitchecker/separate_test.go b/go/analysis/unitchecker/separate_test.go index cf0143f8203..37e74e481ec 100644 --- a/go/analysis/unitchecker/separate_test.go +++ b/go/analysis/unitchecker/separate_test.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.19 - package unitchecker_test // This file illustrates separate analysis with an example. diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go index 0a40652c1b5..1fa0d1f68f9 100644 --- a/go/analysis/unitchecker/unitchecker.go +++ b/go/analysis/unitchecker/unitchecker.go @@ -50,7 +50,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/internal/analysisflags" "golang.org/x/tools/internal/facts" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) // A Config describes a compilation unit to be analyzed. @@ -258,10 +258,11 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), Scopes: make(map[ast.Node]*types.Scope), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - typeparams.InitInstanceInfo(info) + versions.InitFileVersions(info) pkg, err := tc.Check(cfg.ImportPath, fset, files, info) if err != nil { diff --git a/go/analysis/unitchecker/unitchecker_test.go b/go/analysis/unitchecker/unitchecker_test.go index 9f41c71f9a3..54d8fa81851 100644 --- a/go/analysis/unitchecker/unitchecker_test.go +++ b/go/analysis/unitchecker/unitchecker_test.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.19 - package unitchecker_test import ( @@ -169,6 +167,13 @@ func _() { cmd.Env = append(exported.Config.Env, "ENTRYPOINT=minivet") cmd.Dir = exported.Config.Dir + // TODO(golang/go#65729): this is unsound: any extra + // logging by the child process (e.g. due to GODEBUG + // options) will add noise to stderr, causing the + // CombinedOutput to be unparseable as JSON. But we + // can't simply use Output here as some of the tests + // look for substrings of stderr. Rework the test to + // be specific about which output stream to match. out, err := cmd.CombinedOutput() exitcode := 0 if exitErr, ok := err.(*exec.ExitError); ok { diff --git a/go/analysis/unitchecker/vet_std_test.go b/go/analysis/unitchecker/vet_std_test.go index 64d4378fe57..a79224c7188 100644 --- a/go/analysis/unitchecker/vet_std_test.go +++ b/go/analysis/unitchecker/vet_std_test.go @@ -33,6 +33,7 @@ import ( "golang.org/x/tools/go/analysis/passes/shift" "golang.org/x/tools/go/analysis/passes/sigchanyzer" "golang.org/x/tools/go/analysis/passes/stdmethods" + "golang.org/x/tools/go/analysis/passes/stdversion" "golang.org/x/tools/go/analysis/passes/stringintconv" "golang.org/x/tools/go/analysis/passes/structtag" "golang.org/x/tools/go/analysis/passes/testinggoroutine" @@ -70,6 +71,7 @@ func vet() { shift.Analyzer, sigchanyzer.Analyzer, stdmethods.Analyzer, + stdversion.Analyzer, stringintconv.Analyzer, structtag.Analyzer, testinggoroutine.Analyzer, diff --git a/go/ast/astutil/enclosing.go b/go/ast/astutil/enclosing.go index 9fa5aa192c2..2c4c4e23289 100644 --- a/go/ast/astutil/enclosing.go +++ b/go/ast/astutil/enclosing.go @@ -11,8 +11,6 @@ import ( "go/ast" "go/token" "sort" - - "golang.org/x/tools/internal/typeparams" ) // PathEnclosingInterval returns the node that encloses the source @@ -322,7 +320,7 @@ func childrenOf(n ast.Node) []ast.Node { children = append(children, n.Recv) } children = append(children, n.Name) - if tparams := typeparams.ForFuncType(n.Type); tparams != nil { + if tparams := n.Type.TypeParams; tparams != nil { children = append(children, tparams) } if n.Type.Params != nil { @@ -377,7 +375,7 @@ func childrenOf(n ast.Node) []ast.Node { tok(n.Lbrack, len("[")), tok(n.Rbrack, len("]"))) - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: children = append(children, tok(n.Lbrack, len("[")), tok(n.Rbrack, len("]"))) @@ -588,7 +586,7 @@ func NodeDescription(n ast.Node) string { return "decrement statement" case *ast.IndexExpr: return "index expression" - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: return "index list expression" case *ast.InterfaceType: return "interface type" diff --git a/go/ast/astutil/enclosing_test.go b/go/ast/astutil/enclosing_test.go index de96d44960a..1f9d06ce1f8 100644 --- a/go/ast/astutil/enclosing_test.go +++ b/go/ast/astutil/enclosing_test.go @@ -19,7 +19,6 @@ import ( "testing" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/internal/typeparams" ) // pathToString returns a string containing the concrete types of the @@ -59,10 +58,7 @@ func findInterval(t *testing.T, fset *token.FileSet, input, substr string) (f *a } // Common input for following tests. -var input = makeInput() - -func makeInput() string { - src := ` +const input = ` // Hello. package main import "fmt" @@ -71,10 +67,7 @@ func main() { z := (x + y) // add them f() // NB: ExprStmt and its CallExpr have same Pos/End } -` - if typeparams.Enabled { - src += ` func g[A any, P interface{ctype1| ~ctype2}](a1 A, p1 P) {} type PT[T constraint] struct{ t T } @@ -83,9 +76,6 @@ var v GT[targ1] var h = g[ targ2, targ3] ` - } - return src -} func TestPathEnclosingInterval_Exact(t *testing.T) { type testCase struct { @@ -129,31 +119,27 @@ func TestPathEnclosingInterval_Exact(t *testing.T) { "func f() {}"}, {" f", "f"}, - } - if typeparams.Enabled { - tests = append(tests, []testCase{ - dup("[A any, P interface{ctype1| ~ctype2}]"), - {"[", "[A any, P interface{ctype1| ~ctype2}]"}, - dup("A"), - {" any", "any"}, - dup("ctype1"), - {"|", "ctype1| ~ctype2"}, - dup("ctype2"), - {"~", "~ctype2"}, - dup("~ctype2"), - {" ~ctype2", "~ctype2"}, - {"]", "[A any, P interface{ctype1| ~ctype2}]"}, - dup("a1"), - dup("a1 A"), - dup("(a1 A, p1 P)"), - dup("type PT[T constraint] struct{ t T }"), - dup("PT"), - dup("[T constraint]"), - dup("constraint"), - dup("targ1"), - {" targ2", "targ2"}, - dup("g[ targ2, targ3]"), - }...) + dup("[A any, P interface{ctype1| ~ctype2}]"), + {"[", "[A any, P interface{ctype1| ~ctype2}]"}, + dup("A"), + {" any", "any"}, + dup("ctype1"), + {"|", "ctype1| ~ctype2"}, + dup("ctype2"), + {"~", "~ctype2"}, + dup("~ctype2"), + {" ~ctype2", "~ctype2"}, + {"]", "[A any, P interface{ctype1| ~ctype2}]"}, + dup("a1"), + dup("a1 A"), + dup("(a1 A, p1 P)"), + dup("type PT[T constraint] struct{ t T }"), + dup("PT"), + dup("[T constraint]"), + dup("constraint"), + dup("targ1"), + {" targ2", "targ2"}, + dup("g[ targ2, targ3]"), } for _, test := range tests { f, start, end := findInterval(t, new(token.FileSet), input, test.substr) @@ -218,18 +204,14 @@ func TestPathEnclosingInterval_Paths(t *testing.T) { "[Ident File],true"}, {"f() // NB", "[CallExpr ExprStmt BlockStmt FuncDecl File],true"}, - } - if typeparams.Enabled { - tests = append(tests, []testCase{ - {" any", "[Ident Field FieldList FuncDecl File],true"}, - {"|", "[BinaryExpr Field FieldList InterfaceType Field FieldList FuncDecl File],true"}, - {"ctype2", - "[Ident UnaryExpr BinaryExpr Field FieldList InterfaceType Field FieldList FuncDecl File],true"}, - {"a1", "[Ident Field FieldList FuncDecl File],true"}, - {"PT[T constraint]", "[TypeSpec GenDecl File],false"}, - {"[T constraint]", "[FieldList TypeSpec GenDecl File],true"}, - {"targ2", "[Ident IndexListExpr ValueSpec GenDecl File],true"}, - }...) + {" any", "[Ident Field FieldList FuncDecl File],true"}, + {"|", "[BinaryExpr Field FieldList InterfaceType Field FieldList FuncDecl File],true"}, + {"ctype2", + "[Ident UnaryExpr BinaryExpr Field FieldList InterfaceType Field FieldList FuncDecl File],true"}, + {"a1", "[Ident Field FieldList FuncDecl File],true"}, + {"PT[T constraint]", "[TypeSpec GenDecl File],false"}, + {"[T constraint]", "[FieldList TypeSpec GenDecl File],true"}, + {"targ2", "[Ident IndexListExpr ValueSpec GenDecl File],true"}, } for _, test := range tests { f, start, end := findInterval(t, new(token.FileSet), input, test.substr) diff --git a/go/ast/astutil/rewrite.go b/go/ast/astutil/rewrite.go index f430b21b9b9..58934f76633 100644 --- a/go/ast/astutil/rewrite.go +++ b/go/ast/astutil/rewrite.go @@ -9,8 +9,6 @@ import ( "go/ast" "reflect" "sort" - - "golang.org/x/tools/internal/typeparams" ) // An ApplyFunc is invoked by Apply for each node n, even if n is nil, @@ -252,7 +250,7 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast. a.apply(n, "X", nil, n.X) a.apply(n, "Index", nil, n.Index) - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: a.apply(n, "X", nil, n.X) a.applyList(n, "Indices") @@ -293,7 +291,7 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast. a.apply(n, "Fields", nil, n.Fields) case *ast.FuncType: - if tparams := typeparams.ForFuncType(n); tparams != nil { + if tparams := n.TypeParams; tparams != nil { a.apply(n, "TypeParams", nil, tparams) } a.apply(n, "Params", nil, n.Params) @@ -408,7 +406,7 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast. case *ast.TypeSpec: a.apply(n, "Doc", nil, n.Doc) a.apply(n, "Name", nil, n.Name) - if tparams := typeparams.ForTypeSpec(n); tparams != nil { + if tparams := n.TypeParams; tparams != nil { a.apply(n, "TypeParams", nil, tparams) } a.apply(n, "Type", nil, n.Type) diff --git a/go/ast/astutil/rewrite_test.go b/go/ast/astutil/rewrite_test.go index 4ef6fe99de7..57136a07cab 100644 --- a/go/ast/astutil/rewrite_test.go +++ b/go/ast/astutil/rewrite_test.go @@ -13,7 +13,6 @@ import ( "testing" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/internal/typeparams" ) type rewriteTest struct { @@ -191,13 +190,9 @@ var z int return true }, }, -} - -func init() { - if typeparams.Enabled { - rewriteTests = append(rewriteTests, rewriteTest{ - name: "replace", - orig: `package p + { + name: "replace", + orig: `package p type T[P1, P2 any] int @@ -205,9 +200,9 @@ type R T[int, string] func F[Q1 any](q Q1) {} `, - // TODO: note how the rewrite adds a trailing comma in "func F". - // Is that a bug in the test, or in astutil.Apply? - want: `package p + // TODO: note how the rewrite adds a trailing comma in "func F". + // Is that a bug in the test, or in astutil.Apply? + want: `package p type S[R1, P2 any] int32 @@ -215,23 +210,22 @@ type R S[int32, string] func F[X1 any](q X1,) {} `, - post: func(c *astutil.Cursor) bool { - if ident, ok := c.Node().(*ast.Ident); ok { - switch ident.Name { - case "int": - c.Replace(ast.NewIdent("int32")) - case "T": - c.Replace(ast.NewIdent("S")) - case "P1": - c.Replace(ast.NewIdent("R1")) - case "Q1": - c.Replace(ast.NewIdent("X1")) - } + post: func(c *astutil.Cursor) bool { + if ident, ok := c.Node().(*ast.Ident); ok { + switch ident.Name { + case "int": + c.Replace(ast.NewIdent("int32")) + case "T": + c.Replace(ast.NewIdent("S")) + case "P1": + c.Replace(ast.NewIdent("R1")) + case "Q1": + c.Replace(ast.NewIdent("X1")) } - return true - }, - }) - } + } + return true + }, + }, } func valspec(name, typ string) *ast.ValueSpec { diff --git a/go/ast/inspector/inspector_test.go b/go/ast/inspector/inspector_test.go index e88d584b5c0..57a2293c0cd 100644 --- a/go/ast/inspector/inspector_test.go +++ b/go/ast/inspector/inspector_test.go @@ -17,7 +17,6 @@ import ( "testing" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" ) var netFiles []*ast.File @@ -72,10 +71,6 @@ func TestInspectAllNodes(t *testing.T) { } func TestInspectGenericNodes(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type parameters are not supported at this Go version") - } - // src is using the 16 identifiers i0, i1, ... i15 so // we can easily verify that we've found all of them. const src = `package a @@ -98,7 +93,7 @@ var _ i13[i14, i15] inspect := inspector.New([]*ast.File{f}) found := make([]bool, 16) - indexListExprs := make(map[*typeparams.IndexListExpr]bool) + indexListExprs := make(map[*ast.IndexListExpr]bool) // Verify that we reach all i* identifiers, and collect IndexListExpr nodes. inspect.Preorder(nil, func(n ast.Node) { @@ -111,7 +106,7 @@ var _ i13[i14, i15] } found[index] = true } - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: indexListExprs[n] = false } }) @@ -126,8 +121,8 @@ var _ i13[i14, i15] if len(indexListExprs) == 0 { t.Fatal("no index list exprs found") } - inspect.Preorder([]ast.Node{&typeparams.IndexListExpr{}}, func(n ast.Node) { - ix := n.(*typeparams.IndexListExpr) + inspect.Preorder([]ast.Node{&ast.IndexListExpr{}}, func(n ast.Node) { + ix := n.(*ast.IndexListExpr) indexListExprs[ix] = true }) for ix, v := range indexListExprs { diff --git a/go/ast/inspector/typeof.go b/go/ast/inspector/typeof.go index 703c8139544..2a872f89d47 100644 --- a/go/ast/inspector/typeof.go +++ b/go/ast/inspector/typeof.go @@ -12,8 +12,6 @@ package inspector import ( "go/ast" "math" - - "golang.org/x/tools/internal/typeparams" ) const ( @@ -171,7 +169,7 @@ func typeOf(n ast.Node) uint64 { return 1 << nIncDecStmt case *ast.IndexExpr: return 1 << nIndexExpr - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: return 1 << nIndexListExpr case *ast.InterfaceType: return 1 << nInterfaceType diff --git a/go/buildutil/tags.go b/go/buildutil/tags.go index 7cf523bca48..32c8d1424d2 100644 --- a/go/buildutil/tags.go +++ b/go/buildutil/tags.go @@ -4,17 +4,22 @@ package buildutil -// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go. +// This duplicated logic must be kept in sync with that from go build: +// $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) +// $GOROOT/src/cmd/go/internal/base/flag.go (StringsFlag.Set) +// $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) -import "fmt" +import ( + "fmt" + "strings" +) const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " + "For more information about build tags, see the description of " + "build constraints in the documentation for the go/build package" // TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses -// a flag value in the same manner as go build's -tags flag and -// populates a []string slice. +// a flag value the same as go build's -tags flag and populates a []string slice. // // See $GOROOT/src/go/build/doc.go for description of build tags. // See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag. @@ -25,19 +30,32 @@ const TagsFlagDoc = "a list of `build tags` to consider satisfied during the bui type TagsFlag []string func (v *TagsFlag) Set(s string) error { - var err error - *v, err = splitQuotedFields(s) - if *v == nil { - *v = []string{} + // See $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) + // For compatibility with Go 1.12 and earlier, allow "-tags='a b c'" or even just "-tags='a'". + if strings.Contains(s, " ") || strings.Contains(s, "'") { + var err error + *v, err = splitQuotedFields(s) + if *v == nil { + *v = []string{} + } + return err + } + + // Starting in Go 1.13, the -tags flag is a comma-separated list of build tags. + *v = []string{} + for _, s := range strings.Split(s, ",") { + if s != "" { + *v = append(*v, s) + } } - return err + return nil } func (v *TagsFlag) Get() interface{} { return *v } func splitQuotedFields(s string) ([]string, error) { - // Split fields allowing '' or "" around elements. - // Quotes further inside the string do not count. + // See $GOROOT/src/cmd/internal/quoted/quoted.go (Split) + // This must remain in sync with that logic. var f []string for len(s) > 0 { for len(s) > 0 && isSpaceByte(s[0]) { @@ -76,5 +94,7 @@ func (v *TagsFlag) String() string { } func isSpaceByte(c byte) bool { + // See $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) + // This list must remain in sync with that. return c == ' ' || c == '\t' || c == '\n' || c == '\r' } diff --git a/go/buildutil/tags_test.go b/go/buildutil/tags_test.go index f8234314fb3..fb3afbccab7 100644 --- a/go/buildutil/tags_test.go +++ b/go/buildutil/tags_test.go @@ -5,28 +5,124 @@ package buildutil_test import ( + "bytes" "flag" "go/build" + "os/exec" "reflect" + "strings" "testing" "golang.org/x/tools/go/buildutil" + "golang.org/x/tools/internal/testenv" ) func TestTags(t *testing.T) { - f := flag.NewFlagSet("TestTags", flag.PanicOnError) - var ctxt build.Context - f.Var((*buildutil.TagsFlag)(&ctxt.BuildTags), "tags", buildutil.TagsFlagDoc) - f.Parse([]string{"-tags", ` 'one'"two" 'three "four"'`, "rest"}) - - // BuildTags - want := []string{"one", "two", "three \"four\""} - if !reflect.DeepEqual(ctxt.BuildTags, want) { - t.Errorf("BuildTags = %q, want %q", ctxt.BuildTags, want) + + type tagTestCase struct { + tags string + want []string + wantErr bool } - // Args() - if want := []string{"rest"}; !reflect.DeepEqual(f.Args(), want) { - t.Errorf("f.Args() = %q, want %q", f.Args(), want) + for name, tc := range map[string]tagTestCase{ + // Normal valid cases + "empty": { + tags: "", + want: []string{}, + }, + "commas": { + tags: "tag1,tag_2,🐹,tag/3,tag-4", + want: []string{"tag1", "tag_2", "🐹", "tag/3", "tag-4"}, + }, + "delimiters are spaces": { + tags: "a b\tc\rd\ne", + want: []string{"a", "b", "c", "d", "e"}, + }, + "old quote and space form": { + tags: "'a' 'b' 'c'", + want: []string{"a", "b", "c"}, + }, + + // Normal error cases + "unterminated": { + tags: `"missing closing quote`, + want: []string{}, + wantErr: true, + }, + "unterminated single": { + tags: `'missing closing quote`, + want: []string{}, + wantErr: true, + }, + + // Maybe surprising difference for unterminated quotes, no spaces + "unterminated no spaces": { + tags: `"missing_closing_quote`, + want: []string{"\"missing_closing_quote"}, + }, + "unterminated no spaces single": { + tags: `'missing_closing_quote`, + want: []string{}, + wantErr: true, + }, + + // Permitted but not recommended + "delimiters contiguous spaces": { + tags: "a \t\r\n, b \t\r\nc,d\te\tf", + want: []string{"a", ",", "b", "c,d", "e", "f"}, + }, + "quotes and spaces": { + tags: ` 'one'"two" 'three "four"'`, + want: []string{"one", "two", "three \"four\""}, + }, + "quotes single no spaces": { + tags: `'t1','t2',"t3"`, + want: []string{"t1", ",'t2',\"t3\""}, + }, + "quotes double no spaces": { + tags: `"t1","t2","t3"`, + want: []string{`"t1"`, `"t2"`, `"t3"`}, + }, + } { + t.Run(name, func(t *testing.T) { + f := flag.NewFlagSet("TestTags", flag.ContinueOnError) + var ctxt build.Context + f.Var((*buildutil.TagsFlag)(&ctxt.BuildTags), "tags", buildutil.TagsFlagDoc) + + // Normal case valid parsed tags + f.Parse([]string{"-tags", tc.tags, "rest"}) + + // BuildTags + if !reflect.DeepEqual(ctxt.BuildTags, tc.want) { + t.Errorf("Case = %s, BuildTags = %q, want %q", name, ctxt.BuildTags, tc.want) + } + + // Args() + if want := []string{"rest"}; !reflect.DeepEqual(f.Args(), want) { + t.Errorf("Case = %s, f.Args() = %q, want %q", name, f.Args(), want) + } + + // Regression check against base go tooling + cmd := testenv.Command(t, "go", "list", "-f", "{{context.BuildTags}}", "-tags", tc.tags, ".") + var out bytes.Buffer + cmd.Stdout = &out + if err := cmd.Run(); err != nil { + if ee, ok := err.(*exec.ExitError); ok && len(ee.Stderr) > 0 { + t.Logf("stderr:\n%s", ee.Stderr) + } + if !tc.wantErr { + t.Errorf("%v: %v", cmd, err) + } + } else if tc.wantErr { + t.Errorf("Expected failure for %v", cmd) + } else { + wantDescription := strings.Join(tc.want, " ") + output := strings.Trim(strings.TrimSuffix(out.String(), "\n"), "[]") + if output != wantDescription { + t.Errorf("Output = %s, want %s", output, wantDescription) + } + } + }) } } diff --git a/go/callgraph/cha/cha_test.go b/go/callgraph/cha/cha_test.go index 0737a981481..f99357b4eab 100644 --- a/go/callgraph/cha/cha_test.go +++ b/go/callgraph/cha/cha_test.go @@ -26,7 +26,6 @@ import ( "golang.org/x/tools/go/loader" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" - "golang.org/x/tools/internal/typeparams" ) var inputs = []string{ @@ -74,10 +73,6 @@ func TestCHA(t *testing.T) { // TestCHAGenerics is TestCHA tailored for testing generics, func TestCHAGenerics(t *testing.T) { - if !typeparams.Enabled { - t.Skip("TestCHAGenerics requires type parameters") - } - filename := "testdata/generics.go" prog, f, mainPkg, err := loadProgInfo(filename, ssa.InstantiateGenerics) if err != nil { diff --git a/go/callgraph/rta/rta.go b/go/callgraph/rta/rta.go index d0ae0fccf57..cd3afa0be74 100644 --- a/go/callgraph/rta/rta.go +++ b/go/callgraph/rta/rta.go @@ -45,7 +45,7 @@ import ( "golang.org/x/tools/go/callgraph" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/compat" + "golang.org/x/tools/internal/aliases" ) // A Result holds the results of Rapid Type Analysis, which includes the @@ -374,7 +374,7 @@ func (r *rta) interfaces(C types.Type) []*types.Interface { // and update the 'implements' relation. r.interfaceTypes.Iterate(func(I types.Type, v interface{}) { iinfo := v.(*interfaceTypeInfo) - if I := I.(*types.Interface); implements(cinfo, iinfo) { + if I := aliases.Unalias(I).(*types.Interface); implements(cinfo, iinfo) { iinfo.implementations = append(iinfo.implementations, C) cinfo.implements = append(cinfo.implements, I) } @@ -416,6 +416,9 @@ func (r *rta) implementations(I *types.Interface) []types.Type { // dynamic type of some interface or reflect.Value. // Adapted from needMethods in go/ssa/builder.go func (r *rta) addRuntimeType(T types.Type, skip bool) { + // Never record aliases. + T = aliases.Unalias(T) + if prev, ok := r.result.RuntimeTypes.At(T).(bool); ok { if skip && !prev { r.result.RuntimeTypes.Set(T, skip) @@ -453,11 +456,11 @@ func (r *rta) addRuntimeType(T types.Type, skip bool) { // Each package maintains its own set of types it has visited. var n *types.Named - switch T := T.(type) { + switch T := aliases.Unalias(T).(type) { case *types.Named: n = T case *types.Pointer: - n, _ = T.Elem().(*types.Named) + n, _ = aliases.Unalias(T.Elem()).(*types.Named) } if n != nil { owner := n.Obj().Pkg() @@ -476,6 +479,9 @@ func (r *rta) addRuntimeType(T types.Type, skip bool) { } switch t := T.(type) { + case *aliases.Alias: + panic("unreachable") + case *types.Basic: // nop @@ -539,7 +545,7 @@ func fingerprint(mset *types.MethodSet) uint64 { for i := 0; i < mset.Len(); i++ { method := mset.At(i).Obj() sig := method.Type().(*types.Signature) - sum := crc32.ChecksumIEEE(compat.Appendf(space[:], "%s/%d/%d", + sum := crc32.ChecksumIEEE(fmt.Appendf(space[:], "%s/%d/%d", method.Id(), sig.Params().Len(), sig.Results().Len())) diff --git a/go/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go index d96483b27f3..8552dc7b13c 100644 --- a/go/callgraph/rta/rta_test.go +++ b/go/callgraph/rta/rta_test.go @@ -23,7 +23,7 @@ import ( "golang.org/x/tools/go/loader" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" ) // TestRTA runs RTA on each testdata/*.go file and compares the @@ -38,10 +38,6 @@ func TestRTA(t *testing.T) { } for _, filename := range filenames { t.Run(filename, func(t *testing.T) { - if !typeparams.Enabled && strings.HasSuffix(filename, "generics.go") { - t.Skip("TestRTAGenerics requires type parameters") - } - // Load main program and build SSA. // TODO(adonovan): use go/packages instead. conf := loader.Config{ParserMode: parser.ParseComments} @@ -205,7 +201,7 @@ func check(t *testing.T, f *ast.File, pkg *ssa.Package, res *rta.Result) { got := make(stringset) res.RuntimeTypes.Iterate(func(key types.Type, value interface{}) { if !value.(bool) { // accessible to reflection - typ := types.TypeString(key, types.RelativeTo(pkg.Pkg)) + typ := types.TypeString(aliases.Unalias(key), types.RelativeTo(pkg.Pkg)) got[typ] = true } }) diff --git a/go/callgraph/static/static_test.go b/go/callgraph/static/static_test.go index 0a108d3d2b1..4b61dbffa27 100644 --- a/go/callgraph/static/static_test.go +++ b/go/callgraph/static/static_test.go @@ -16,7 +16,6 @@ import ( "golang.org/x/tools/go/loader" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" - "golang.org/x/tools/internal/typeparams" ) const input = `package P @@ -99,12 +98,6 @@ func TestStatic(t *testing.T) { "instantiated[P.B] -> (B).F", }, true}, } { - if e.typeparams && !typeparams.Enabled { - // Skip tests with type parameters when the build - // environment is not supporting any. - continue - } - conf := loader.Config{ParserMode: parser.ParseComments} f, err := conf.ParseFile("P.go", e.input) if err != nil { diff --git a/go/callgraph/util.go b/go/callgraph/util.go index 1ab039029db..54993204742 100644 --- a/go/callgraph/util.go +++ b/go/callgraph/util.go @@ -76,9 +76,12 @@ func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge { } // DeleteSyntheticNodes removes from call graph g all nodes for -// synthetic functions (except g.Root and package initializers), -// preserving the topology. In effect, calls to synthetic wrappers -// are "inlined". +// functions that do not correspond to source syntax. For historical +// reasons, nodes for g.Root and package initializers are always +// kept. +// +// As nodes are removed, edges are created to preserve the +// reachability relation of the remaining nodes. func (g *Graph) DeleteSyntheticNodes() { // Measurements on the standard library and go.tools show that // resulting graph has ~15% fewer nodes and 4-8% fewer edges @@ -99,7 +102,7 @@ func (g *Graph) DeleteSyntheticNodes() { } } for fn, cgn := range g.Nodes { - if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) { + if cgn == g.Root || isInit(cgn.Func) || fn.Syntax() != nil { continue // keep } for _, eIn := range cgn.In { diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go index 4d1d5254c6e..1f56a747f92 100644 --- a/go/callgraph/vta/graph.go +++ b/go/callgraph/vta/graph.go @@ -12,6 +12,7 @@ import ( "golang.org/x/tools/go/callgraph" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -389,7 +390,7 @@ func (b *builder) unop(u *ssa.UnOp) { // Multiplication operator * is used here as a dereference operator. b.addInFlowAliasEdges(b.nodeFromVal(u), b.nodeFromVal(u.X)) case token.ARROW: - t := u.X.Type().Underlying().(*types.Chan).Elem() + t := typeparams.CoreType(u.X.Type()).(*types.Chan).Elem() b.addInFlowAliasEdges(b.nodeFromVal(u), channelElem{typ: t}) default: // There is no interesting type flow otherwise. @@ -410,7 +411,7 @@ func (b *builder) tassert(a *ssa.TypeAssert) { // The case where a is register so there // is a flow from a.X to a[0]. Here, a[0] is represented as an // indexedLocal: an entry into local tuple register a at index 0. - tup := a.Type().Underlying().(*types.Tuple) + tup := a.Type().(*types.Tuple) t := tup.At(0).Type() local := indexedLocal{val: a, typ: t, index: 0} @@ -421,7 +422,7 @@ func (b *builder) tassert(a *ssa.TypeAssert) { // and t1 where the source is indexed local representing a value // from tuple register t2 at index i and the target is t1. func (b *builder) extract(e *ssa.Extract) { - tup := e.Tuple.Type().Underlying().(*types.Tuple) + tup := e.Tuple.Type().(*types.Tuple) t := tup.At(e.Index).Type() local := indexedLocal{val: e.Tuple, typ: t, index: e.Index} @@ -443,7 +444,7 @@ func (b *builder) fieldAddr(f *ssa.FieldAddr) { } func (b *builder) send(s *ssa.Send) { - t := s.Chan.Type().Underlying().(*types.Chan).Elem() + t := typeparams.CoreType(s.Chan.Type()).(*types.Chan).Elem() b.addInFlowAliasEdges(channelElem{typ: t}, b.nodeFromVal(s.X)) } @@ -457,7 +458,7 @@ func (b *builder) send(s *ssa.Send) { func (b *builder) selekt(s *ssa.Select) { recvIndex := 0 for _, state := range s.States { - t := state.Chan.Type().Underlying().(*types.Chan).Elem() + t := typeparams.CoreType(state.Chan.Type()).(*types.Chan).Elem() if state.Dir == types.SendOnly { b.addInFlowAliasEdges(channelElem{typ: t}, b.nodeFromVal(state.Send)) @@ -497,7 +498,13 @@ func (b *builder) lookup(l *ssa.Lookup) { // No interesting flows for string lookups. return } - b.addInFlowAliasEdges(b.nodeFromVal(l), mapValue{typ: t.Elem()}) + + if !l.CommaOk { + b.addInFlowAliasEdges(b.nodeFromVal(l), mapValue{typ: t.Elem()}) + } else { + i := indexedLocal{val: l, typ: t.Elem(), index: 0} + b.addInFlowAliasEdges(i, mapValue{typ: t.Elem()}) + } } // mapUpdate handles map update commands m[b] = a where m is of type @@ -521,7 +528,7 @@ func (b *builder) next(n *ssa.Next) { if n.IsString { return } - tup := n.Type().Underlying().(*types.Tuple) + tup := n.Type().(*types.Tuple) kt := tup.At(1).Type() vt := tup.At(2).Type() @@ -651,7 +658,7 @@ func addReturnFlows(b *builder, r *ssa.Return, site ssa.Value) { return } - tup := site.Type().Underlying().(*types.Tuple) + tup := site.Type().(*types.Tuple) for i, r := range results { local := indexedLocal{val: site, typ: tup.At(i).Type(), index: i} b.addInFlowEdge(b.nodeFromVal(r), local) @@ -661,14 +668,14 @@ func addReturnFlows(b *builder, r *ssa.Return, site ssa.Value) { func (b *builder) multiconvert(c *ssa.MultiConvert) { // TODO(zpavlinovic): decide what to do on MultiConvert long term. // TODO(zpavlinovic): add unit tests. - typeSetOf := func(typ types.Type) []*typeparams.Term { + typeSetOf := func(typ types.Type) []*types.Term { // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on. - var terms []*typeparams.Term + var terms []*types.Term var err error - switch typ := typ.(type) { - case *typeparams.TypeParam: + switch typ := aliases.Unalias(typ).(type) { + case *types.TypeParam: terms, err = typeparams.StructuralTerms(typ) - case *typeparams.Union: + case *types.Union: terms, err = typeparams.UnionTermSet(typ) case *types.Interface: terms, err = typeparams.InterfaceTermSet(typ) @@ -676,7 +683,7 @@ func (b *builder) multiconvert(c *ssa.MultiConvert) { // Common case. // Specializing the len=1 case to avoid a slice // had no measurable space/time benefit. - terms = []*typeparams.Term{typeparams.NewTerm(false, typ)} + terms = []*types.Term{types.NewTerm(false, typ)} } if err != nil { @@ -686,7 +693,7 @@ func (b *builder) multiconvert(c *ssa.MultiConvert) { } // isValuePreserving returns true if a conversion from ut_src to // ut_dst is value-preserving, i.e. just a change of type. - // Precondition: neither argument is a named type. + // Precondition: neither argument is a named or alias type. isValuePreserving := func(ut_src, ut_dst types.Type) bool { // Identical underlying types? if types.IdenticalIgnoreTags(ut_dst, ut_src) { @@ -734,7 +741,7 @@ func (b *builder) addInFlowEdge(s, d node) { // Creates const, pointer, global, func, and local nodes based on register instructions. func (b *builder) nodeFromVal(val ssa.Value) node { - if p, ok := val.Type().(*types.Pointer); ok && !types.IsInterface(p.Elem()) && !isFunction(p.Elem()) { + if p, ok := aliases.Unalias(val.Type()).(*types.Pointer); ok && !types.IsInterface(p.Elem()) && !isFunction(p.Elem()) { // Nested pointer to interfaces are modeled as a special // nestedPtrInterface node. if i := interfaceUnderPtr(p.Elem()); i != nil { diff --git a/go/callgraph/vta/graph_test.go b/go/callgraph/vta/graph_test.go index da574d71b53..42fdea7f107 100644 --- a/go/callgraph/vta/graph_test.go +++ b/go/callgraph/vta/graph_test.go @@ -15,6 +15,7 @@ import ( "golang.org/x/tools/go/callgraph/cha" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" + "golang.org/x/tools/internal/aliases" ) func TestNodeInterface(t *testing.T) { @@ -35,7 +36,7 @@ func TestNodeInterface(t *testing.T) { reg := firstRegInstr(main) // t0 := *gl X := pkg.Type("X").Type() gl := pkg.Var("gl") - glPtrType, ok := gl.Type().(*types.Pointer) + glPtrType, ok := aliases.Unalias(gl.Type()).(*types.Pointer) if !ok { t.Fatalf("could not cast gl variable to pointer type") } @@ -180,6 +181,7 @@ func TestVTAGraphConstruction(t *testing.T) { "testdata/src/store_load_alias.go", "testdata/src/phi_alias.go", "testdata/src/channels.go", + "testdata/src/generic_channels.go", "testdata/src/select.go", "testdata/src/stores_arrays.go", "testdata/src/maps.go", diff --git a/go/callgraph/vta/testdata/src/callgraph_comma_maps.go b/go/callgraph/vta/testdata/src/callgraph_comma_maps.go new file mode 100644 index 00000000000..47546d8de3e --- /dev/null +++ b/go/callgraph/vta/testdata/src/callgraph_comma_maps.go @@ -0,0 +1,84 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// go:build ignore + +package testdata + +type I interface { + Name() string + Foo() +} + +var is = make(map[string]I) + +func init() { + register(A{}) + register(B{}) +} + +func register(i I) { + is[i.Name()] = i +} + +type A struct{} + +func (a A) Foo() {} +func (a A) Name() string { return "a" } + +type B struct{} + +func (b B) Foo() {} +func (b B) Name() string { return "b" } + +func Do(n string) { + i, ok := is[n] + if !ok { + return + } + i.Foo() +} + +func Go(n string) { + if i, ok := is[n]; !ok { + return + } else { + i.Foo() + } +} + +func To(n string) { + var i I + var ok bool + + if i, ok = is[n]; !ok { + return + } + i.Foo() +} + +func Ro(n string) { + i := is[n] + i.Foo() +} + +// Relevant SSA: +// func Do(n string): +// t0 = *is +// t1 = t0[n],ok +// t2 = extract t1 #0 +// t3 = extract t1 #1 +// if t3 goto 2 else 1 +// 1: +// return +// 2: +// t4 = invoke t2.Foo() +// return + +// WANT: +// register: invoke i.Name() -> A.Name, B.Name +// Do: invoke t2.Foo() -> A.Foo, B.Foo +// Go: invoke t2.Foo() -> A.Foo, B.Foo +// To: invoke t2.Foo() -> A.Foo, B.Foo +// Ro: invoke t1.Foo() -> A.Foo, B.Foo diff --git a/go/callgraph/vta/testdata/src/generic_channels.go b/go/callgraph/vta/testdata/src/generic_channels.go new file mode 100644 index 00000000000..390f07a4392 --- /dev/null +++ b/go/callgraph/vta/testdata/src/generic_channels.go @@ -0,0 +1,33 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// go:build ignore + +package testdata + +type I1 interface{} +type I2 interface{} +type I3 interface{} + +func Foo[C interface{ ~chan I1 | ~chan<- I1 }](c C, j int) { + c <- j +} + +func Bar[C interface{ ~chan I2 | ~<-chan I2 }](c C) { + x := <-c + print(x) +} + +func Baz[C interface{ ~chan I3 | ~<-chan I3 }](c C) { + select { + case x := <-c: + print(x) + default: + } +} + +// WANT: +// Local(t0) -> Channel(chan testdata.I1) +// Channel(chan testdata.I2) -> Local(t0) +// Channel(chan testdata.I3) -> Local(t0[2]) diff --git a/go/callgraph/vta/utils.go b/go/callgraph/vta/utils.go index 3471aae3a10..ed248d73e0b 100644 --- a/go/callgraph/vta/utils.go +++ b/go/callgraph/vta/utils.go @@ -9,6 +9,7 @@ import ( "golang.org/x/tools/go/callgraph" "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -24,7 +25,7 @@ func isReferenceNode(n node) bool { return true } - if _, ok := n.Type().(*types.Pointer); ok { + if _, ok := aliases.Unalias(n.Type()).(*types.Pointer); ok { return true } @@ -166,6 +167,7 @@ func siteCallees(c ssa.CallInstruction, callgraph *callgraph.Graph) []*ssa.Funct } func canHaveMethods(t types.Type) bool { + t = aliases.Unalias(t) if _, ok := t.(*types.Named); ok { return true } diff --git a/go/callgraph/vta/vta_go117_test.go b/go/callgraph/vta/vta_go117_test.go deleted file mode 100644 index 6a5af2ced44..00000000000 --- a/go/callgraph/vta/vta_go117_test.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.17 -// +build go1.17 - -package vta - -import ( - "testing" - - "golang.org/x/tools/go/callgraph/cha" - "golang.org/x/tools/go/ssa" - "golang.org/x/tools/go/ssa/ssautil" -) - -func TestVTACallGraphGo117(t *testing.T) { - file := "testdata/src/go117.go" - prog, want, err := testProg(file, ssa.BuilderMode(0)) - if err != nil { - t.Fatalf("couldn't load test file '%s': %s", file, err) - } - if len(want) == 0 { - t.Fatalf("couldn't find want in `%s`", file) - } - - g, _ := typePropGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog)) - got := vtaGraphStr(g) - if diff := setdiff(want, got); len(diff) != 0 { - t.Errorf("`%s`: want superset of %v;\n got %v", file, want, got) - } -} diff --git a/go/callgraph/vta/vta_test.go b/go/callgraph/vta/vta_test.go index 69f218172a1..76bd85e6fb7 100644 --- a/go/callgraph/vta/vta_test.go +++ b/go/callgraph/vta/vta_test.go @@ -13,7 +13,6 @@ import ( "golang.org/x/tools/go/callgraph/cha" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" - "golang.org/x/tools/internal/typeparams" ) func TestVTACallGraph(t *testing.T) { @@ -27,6 +26,7 @@ func TestVTACallGraph(t *testing.T) { "testdata/src/callgraph_field_funcs.go", "testdata/src/callgraph_recursive_types.go", "testdata/src/callgraph_issue_57756.go", + "testdata/src/callgraph_comma_maps.go", } { t.Run(file, func(t *testing.T) { prog, want, err := testProg(file, ssa.BuilderMode(0)) @@ -119,10 +119,6 @@ func TestVTAPanicMissingDefinitions(t *testing.T) { } func TestVTACallGraphGenerics(t *testing.T) { - if !typeparams.Enabled { - t.Skip("TestVTACallGraphGenerics requires type parameters") - } - // TODO(zpavlinovic): add more tests files := []string{ "testdata/src/arrays_generics.go", @@ -148,3 +144,20 @@ func TestVTACallGraphGenerics(t *testing.T) { }) } } + +func TestVTACallGraphGo117(t *testing.T) { + file := "testdata/src/go117.go" + prog, want, err := testProg(file, ssa.BuilderMode(0)) + if err != nil { + t.Fatalf("couldn't load test file '%s': %s", file, err) + } + if len(want) == 0 { + t.Fatalf("couldn't find want in `%s`", file) + } + + g, _ := typePropGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog)) + got := vtaGraphStr(g) + if diff := setdiff(want, got); len(diff) != 0 { + t.Errorf("`%s`: want superset of %v;\n got %v", file, want, got) + } +} diff --git a/go/cfg/builder.go b/go/cfg/builder.go index dad6a444d82..ac4d63c4003 100644 --- a/go/cfg/builder.go +++ b/go/cfg/builder.go @@ -16,8 +16,8 @@ type builder struct { cfg *CFG mayReturn func(*ast.CallExpr) bool current *Block - lblocks map[*ast.Object]*lblock // labeled blocks - targets *targets // linked stack of branch targets + lblocks map[string]*lblock // labeled blocks + targets *targets // linked stack of branch targets } func (b *builder) stmt(_s ast.Stmt) { @@ -42,7 +42,7 @@ start: b.add(s) if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) { // Calls to panic, os.Exit, etc, never return. - b.current = b.newBlock("unreachable.call") + b.current = b.newBlock(KindUnreachable, s) } case *ast.DeclStmt: @@ -57,7 +57,7 @@ start: } case *ast.LabeledStmt: - label = b.labeledBlock(s.Label) + label = b.labeledBlock(s.Label, s) b.jump(label._goto) b.current = label._goto _s = s.Stmt @@ -65,7 +65,7 @@ start: case *ast.ReturnStmt: b.add(s) - b.current = b.newBlock("unreachable.return") + b.current = b.newBlock(KindUnreachable, s) case *ast.BranchStmt: b.branchStmt(s) @@ -77,11 +77,11 @@ start: if s.Init != nil { b.stmt(s.Init) } - then := b.newBlock("if.then") - done := b.newBlock("if.done") + then := b.newBlock(KindIfThen, s) + done := b.newBlock(KindIfDone, s) _else := done if s.Else != nil { - _else = b.newBlock("if.else") + _else = b.newBlock(KindIfElse, s) } b.add(s.Cond) b.ifelse(then, _else) @@ -128,7 +128,7 @@ func (b *builder) branchStmt(s *ast.BranchStmt) { switch s.Tok { case token.BREAK: if s.Label != nil { - if lb := b.labeledBlock(s.Label); lb != nil { + if lb := b.labeledBlock(s.Label, nil); lb != nil { block = lb._break } } else { @@ -139,7 +139,7 @@ func (b *builder) branchStmt(s *ast.BranchStmt) { case token.CONTINUE: if s.Label != nil { - if lb := b.labeledBlock(s.Label); lb != nil { + if lb := b.labeledBlock(s.Label, nil); lb != nil { block = lb._continue } } else { @@ -155,14 +155,14 @@ func (b *builder) branchStmt(s *ast.BranchStmt) { case token.GOTO: if s.Label != nil { - block = b.labeledBlock(s.Label)._goto + block = b.labeledBlock(s.Label, nil)._goto } } - if block == nil { - block = b.newBlock("undefined.branch") + if block == nil { // ill-typed (e.g. undefined label) + block = b.newBlock(KindUnreachable, s) } b.jump(block) - b.current = b.newBlock("unreachable.branch") + b.current = b.newBlock(KindUnreachable, s) } func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) { @@ -172,7 +172,7 @@ func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) { if s.Tag != nil { b.add(s.Tag) } - done := b.newBlock("switch.done") + done := b.newBlock(KindSwitchDone, s) if label != nil { label._break = done } @@ -188,13 +188,13 @@ func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) { for i, clause := range s.Body.List { body := fallthru if body == nil { - body = b.newBlock("switch.body") // first case only + body = b.newBlock(KindSwitchCaseBody, clause) // first case only } // Preallocate body block for the next case. fallthru = done if i+1 < ncases { - fallthru = b.newBlock("switch.body") + fallthru = b.newBlock(KindSwitchCaseBody, s.Body.List[i+1]) } cc := clause.(*ast.CaseClause) @@ -208,7 +208,7 @@ func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) { var nextCond *Block for _, cond := range cc.List { - nextCond = b.newBlock("switch.next") + nextCond = b.newBlock(KindSwitchNextCase, cc) b.add(cond) // one half of the tag==cond condition b.ifelse(body, nextCond) b.current = nextCond @@ -247,7 +247,7 @@ func (b *builder) typeSwitchStmt(s *ast.TypeSwitchStmt, label *lblock) { b.add(s.Assign) } - done := b.newBlock("typeswitch.done") + done := b.newBlock(KindSwitchDone, s) if label != nil { label._break = done } @@ -258,10 +258,10 @@ func (b *builder) typeSwitchStmt(s *ast.TypeSwitchStmt, label *lblock) { default_ = cc continue } - body := b.newBlock("typeswitch.body") + body := b.newBlock(KindSwitchCaseBody, cc) var next *Block for _, casetype := range cc.List { - next = b.newBlock("typeswitch.next") + next = b.newBlock(KindSwitchNextCase, cc) // casetype is a type, so don't call b.add(casetype). // This block logically contains a type assertion, // x.(casetype), but it's unclear how to represent x. @@ -300,7 +300,7 @@ func (b *builder) selectStmt(s *ast.SelectStmt, label *lblock) { } } - done := b.newBlock("select.done") + done := b.newBlock(KindSelectDone, s) if label != nil { label._break = done } @@ -312,8 +312,8 @@ func (b *builder) selectStmt(s *ast.SelectStmt, label *lblock) { defaultBody = &clause.Body continue } - body := b.newBlock("select.body") - next := b.newBlock("select.next") + body := b.newBlock(KindSelectCaseBody, clause) + next := b.newBlock(KindSelectAfterCase, clause) b.ifelse(body, next) b.current = body b.targets = &targets{ @@ -358,15 +358,15 @@ func (b *builder) forStmt(s *ast.ForStmt, label *lblock) { if s.Init != nil { b.stmt(s.Init) } - body := b.newBlock("for.body") - done := b.newBlock("for.done") // target of 'break' - loop := body // target of back-edge + body := b.newBlock(KindForBody, s) + done := b.newBlock(KindForDone, s) // target of 'break' + loop := body // target of back-edge if s.Cond != nil { - loop = b.newBlock("for.loop") + loop = b.newBlock(KindForLoop, s) } cont := loop // target of 'continue' if s.Post != nil { - cont = b.newBlock("for.post") + cont = b.newBlock(KindForPost, s) } if label != nil { label._break = done @@ -414,12 +414,12 @@ func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) { // jump loop // done: (target of break) - loop := b.newBlock("range.loop") + loop := b.newBlock(KindRangeLoop, s) b.jump(loop) b.current = loop - body := b.newBlock("range.body") - done := b.newBlock("range.done") + body := b.newBlock(KindRangeBody, s) + done := b.newBlock(KindRangeDone, s) b.ifelse(body, done) b.current = body @@ -461,14 +461,19 @@ type lblock struct { // labeledBlock returns the branch target associated with the // specified label, creating it if needed. -func (b *builder) labeledBlock(label *ast.Ident) *lblock { - lb := b.lblocks[label.Obj] +func (b *builder) labeledBlock(label *ast.Ident, stmt *ast.LabeledStmt) *lblock { + lb := b.lblocks[label.Name] if lb == nil { - lb = &lblock{_goto: b.newBlock(label.Name)} + lb = &lblock{_goto: b.newBlock(KindLabel, nil)} if b.lblocks == nil { - b.lblocks = make(map[*ast.Object]*lblock) + b.lblocks = make(map[string]*lblock) } - b.lblocks[label.Obj] = lb + b.lblocks[label.Name] = lb + } + // Fill in the label later (in case of forward goto). + // Stmt may be set already if labels are duplicated (ill-typed). + if stmt != nil && lb._goto.Stmt == nil { + lb._goto.Stmt = stmt } return lb } @@ -477,11 +482,12 @@ func (b *builder) labeledBlock(label *ast.Ident) *lblock { // slice and returns it. // It does not automatically become the current block. // comment is an optional string for more readable debugging output. -func (b *builder) newBlock(comment string) *Block { +func (b *builder) newBlock(kind BlockKind, stmt ast.Stmt) *Block { g := b.cfg block := &Block{ - Index: int32(len(g.Blocks)), - comment: comment, + Index: int32(len(g.Blocks)), + Kind: kind, + Stmt: stmt, } block.Succs = block.succs2[:0] g.Blocks = append(g.Blocks, block) diff --git a/go/cfg/cfg.go b/go/cfg/cfg.go index 37d799f4bc3..fad4530ff3c 100644 --- a/go/cfg/cfg.go +++ b/go/cfg/cfg.go @@ -9,7 +9,10 @@ // // The blocks of the CFG contain all the function's non-control // statements. The CFG does not contain control statements such as If, -// Switch, Select, and Branch, but does contain their subexpressions. +// Switch, Select, and Branch, but does contain their subexpressions; +// also, each block records the control statement (Block.Stmt) that +// gave rise to it and its relationship (Block.Kind) to that statement. +// // For example, this source code: // // if x := f(); x != nil { @@ -20,14 +23,14 @@ // // produces this CFG: // -// 1: x := f() +// 1: x := f() Body // x != nil // succs: 2, 3 -// 2: T() +// 2: T() IfThen // succs: 4 -// 3: F() +// 3: F() IfElse // succs: 4 -// 4: +// 4: IfDone // // The CFG does contain Return statements; even implicit returns are // materialized (at the position of the function's closing brace). @@ -50,6 +53,7 @@ import ( // // The entry point is Blocks[0]; there may be multiple return blocks. type CFG struct { + fset *token.FileSet Blocks []*Block // block[0] is entry; order otherwise undefined } @@ -64,9 +68,63 @@ type Block struct { Succs []*Block // successor nodes in the graph Index int32 // index within CFG.Blocks Live bool // block is reachable from entry + Kind BlockKind // block kind + Stmt ast.Stmt // statement that gave rise to this block (see BlockKind for details) - comment string // for debugging - succs2 [2]*Block // underlying array for Succs + succs2 [2]*Block // underlying array for Succs +} + +// A BlockKind identifies the purpose of a block. +// It also determines the possible types of its Stmt field. +type BlockKind uint8 + +const ( + KindInvalid BlockKind = iota // Stmt=nil + + KindUnreachable // unreachable block after {Branch,Return}Stmt / no-return call ExprStmt + KindBody // function body BlockStmt + KindForBody // body of ForStmt + KindForDone // block after ForStmt + KindForLoop // head of ForStmt + KindForPost // post condition of ForStmt + KindIfDone // block after IfStmt + KindIfElse // else block of IfStmt + KindIfThen // then block of IfStmt + KindLabel // labeled block of BranchStmt (Stmt may be nil for dangling label) + KindRangeBody // body of RangeStmt + KindRangeDone // block after RangeStmt + KindRangeLoop // head of RangeStmt + KindSelectCaseBody // body of SelectStmt + KindSelectDone // block after SelectStmt + KindSelectAfterCase // block after a CommClause + KindSwitchCaseBody // body of CaseClause + KindSwitchDone // block after {Type.}SwitchStmt + KindSwitchNextCase // secondary expression of a multi-expression CaseClause +) + +func (kind BlockKind) String() string { + return [...]string{ + KindInvalid: "Invalid", + KindUnreachable: "Unreachable", + KindBody: "Body", + KindForBody: "ForBody", + KindForDone: "ForDone", + KindForLoop: "ForLoop", + KindForPost: "ForPost", + KindIfDone: "IfDone", + KindIfElse: "IfElse", + KindIfThen: "IfThen", + KindLabel: "Label", + KindRangeBody: "RangeBody", + KindRangeDone: "RangeDone", + KindRangeLoop: "RangeLoop", + KindSelectCaseBody: "SelectCaseBody", + KindSelectDone: "SelectDone", + KindSelectAfterCase: "SelectAfterCase", + KindSwitchCaseBody: "SwitchCaseBody", + KindSwitchDone: "SwitchDone", + KindSwitchNextCase: "SwitchNextCase", + }[kind] } // New returns a new control-flow graph for the specified function body, @@ -82,7 +140,7 @@ func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG { mayReturn: mayReturn, cfg: new(CFG), } - b.current = b.newBlock("entry") + b.current = b.newBlock(KindBody, body) b.stmt(body) // Compute liveness (reachability from entry point), breadth-first. @@ -110,10 +168,22 @@ func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG { } func (b *Block) String() string { - return fmt.Sprintf("block %d (%s)", b.Index, b.comment) + return fmt.Sprintf("block %d (%s)", b.Index, b.comment(nil)) } -// Return returns the return statement at the end of this block if present, nil otherwise. +func (b *Block) comment(fset *token.FileSet) string { + s := b.Kind.String() + if fset != nil && b.Stmt != nil { + s = fmt.Sprintf("%s@L%d", s, fset.Position(b.Stmt.Pos()).Line) + } + return s +} + +// Return returns the return statement at the end of this block if present, nil +// otherwise. +// +// When control falls off the end of the function, the ReturnStmt is synthetic +// and its [ast.Node.End] position may be beyond the end of the file. func (b *Block) Return() (ret *ast.ReturnStmt) { if len(b.Nodes) > 0 { ret, _ = b.Nodes[len(b.Nodes)-1].(*ast.ReturnStmt) @@ -125,7 +195,7 @@ func (b *Block) Return() (ret *ast.ReturnStmt) { func (g *CFG) Format(fset *token.FileSet) string { var buf bytes.Buffer for _, b := range g.Blocks { - fmt.Fprintf(&buf, ".%d: # %s\n", b.Index, b.comment) + fmt.Fprintf(&buf, ".%d: # %s\n", b.Index, b.comment(fset)) for _, n := range b.Nodes { fmt.Fprintf(&buf, "\t%s\n", formatNode(fset, n)) } @@ -141,6 +211,34 @@ func (g *CFG) Format(fset *token.FileSet) string { return buf.String() } +// Dot returns the control-flow graph in the [Dot graph description language]. +// Use a command such as 'dot -Tsvg' to render it in a form viewable in a browser. +// This method is provided as a debugging aid; the details of the +// output are unspecified and may change. +// +// [Dot graph description language]: ​​https://en.wikipedia.org/wiki/DOT_(graph_description_language) +func (g *CFG) Dot(fset *token.FileSet) string { + var buf bytes.Buffer + buf.WriteString("digraph CFG {\n") + buf.WriteString(" node [shape=box];\n") + for _, b := range g.Blocks { + // node label + var text bytes.Buffer + text.WriteString(b.comment(fset)) + for _, n := range b.Nodes { + fmt.Fprintf(&text, "\n%s", formatNode(fset, n)) + } + + // node and edges + fmt.Fprintf(&buf, " n%d [label=%q];\n", b.Index, &text) + for _, succ := range b.Succs { + fmt.Fprintf(&buf, " n%d -> n%d;\n", b.Index, succ.Index) + } + } + buf.WriteString("}\n") + return buf.String() +} + func formatNode(fset *token.FileSet, n ast.Node) string { var buf bytes.Buffer format.Node(&buf, fset, n) diff --git a/go/cfg/cfg_test.go b/go/cfg/cfg_test.go index f22bda34113..536d2fe5df7 100644 --- a/go/cfg/cfg_test.go +++ b/go/cfg/cfg_test.go @@ -2,15 +2,20 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package cfg +package cfg_test import ( "bytes" "fmt" "go/ast" + "go/format" "go/parser" "go/token" "testing" + + "golang.org/x/tools/go/cfg" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/testenv" ) const src = `package main @@ -140,7 +145,7 @@ func TestDeadCode(t *testing.T) { } for _, decl := range f.Decls { if decl, ok := decl.(*ast.FuncDecl); ok { - g := New(decl.Body, mayReturn) + g := cfg.New(decl.Body, mayReturn) // Print statements in unreachable blocks // (in order determined by builder). @@ -165,6 +170,57 @@ func TestDeadCode(t *testing.T) { } } +// TestSmoke runs the CFG builder on every FuncDecl in the standard +// library and x/tools. (This is all well-typed code, but it gives +// some coverage.) +func TestSmoke(t *testing.T) { + if testing.Short() { + t.Skip("skipping in short mode") + } + testenv.NeedsTool(t, "go") + + // The Mode API is just hateful. + // https://github.com/golang/go/issues/48226#issuecomment-1948792315 + mode := packages.NeedDeps | packages.NeedImports | packages.NeedSyntax | packages.NeedTypes + pkgs, err := packages.Load(&packages.Config{Mode: mode}, "std", "golang.org/x/tools/...") + if err != nil { + t.Fatal(err) + } + + for _, pkg := range pkgs { + for _, file := range pkg.Syntax { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body != nil { + g := cfg.New(decl.Body, mayReturn) + + // Run a few quick sanity checks. + failed := false + for i, b := range g.Blocks { + errorf := func(format string, args ...any) { + if !failed { + t.Errorf("%s\n%s", pkg.Fset.Position(decl.Pos()), g.Format(pkg.Fset)) + failed = true + } + msg := fmt.Sprintf(format, args...) + t.Errorf("block %d: %s", i, msg) + } + + if b.Kind == cfg.KindInvalid { + errorf("invalid Block.Kind %v", b.Kind) + } + if b.Stmt == nil && b.Kind != cfg.KindLabel { + errorf("nil Block.Stmt (Kind=%v)", b.Kind) + } + if i != int(b.Index) { + errorf("invalid Block.Index") + } + } + } + } + } + } +} + // A trivial mayReturn predicate that looks only at syntax, not types. func mayReturn(call *ast.CallExpr) bool { switch fun := call.Fun.(type) { @@ -175,3 +231,10 @@ func mayReturn(call *ast.CallExpr) bool { } return true } + +func formatNode(fset *token.FileSet, n ast.Node) string { + var buf bytes.Buffer + format.Node(&buf, fset, n) + // Indent secondary lines by a tab. + return string(bytes.Replace(buf.Bytes(), []byte("\n"), []byte("\n\t"), -1)) +} diff --git a/go/cfg/main.go b/go/cfg/main.go new file mode 100644 index 00000000000..3f1b3611665 --- /dev/null +++ b/go/cfg/main.go @@ -0,0 +1,67 @@ +//go:build ignore + +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The cfg command prints the control-flow graph of the first function +// or method whose name matches 'funcname' in the specified package. +// +// Usage: cfg package funcname +// +// Example: +// +// $ go build -o cfg ./go/cfg/main.go +// $ cfg ./go/cfg stmt | dot -Tsvg > cfg.svg && open cfg.svg +package main + +import ( + "flag" + "fmt" + "go/ast" + "log" + "os" + + "golang.org/x/tools/go/cfg" + "golang.org/x/tools/go/packages" +) + +func main() { + flag.Parse() + if len(flag.Args()) != 2 { + log.Fatal("Usage: package funcname") + } + pattern, funcname := flag.Args()[0], flag.Args()[1] + pkgs, err := packages.Load(&packages.Config{Mode: packages.LoadSyntax}, pattern) + if err != nil { + log.Fatal(err) + } + if packages.PrintErrors(pkgs) > 0 { + os.Exit(1) + } + for _, pkg := range pkgs { + for _, f := range pkg.Syntax { + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + if decl.Name.Name == funcname { + g := cfg.New(decl.Body, mayReturn) + fmt.Println(g.Dot(pkg.Fset)) + os.Exit(0) + } + } + } + } + } + log.Fatalf("no function %q found in %s", funcname, pattern) +} + +// A trivial mayReturn predicate that looks only at syntax, not types. +func mayReturn(call *ast.CallExpr) bool { + switch fun := call.Fun.(type) { + case *ast.Ident: + return fun.Name != "panic" + case *ast.SelectorExpr: + return fun.Sel.Name != "Fatal" + } + return true +} diff --git a/go/gcexportdata/gcexportdata.go b/go/gcexportdata/gcexportdata.go index 03543bd4bb8..137cc8df1d8 100644 --- a/go/gcexportdata/gcexportdata.go +++ b/go/gcexportdata/gcexportdata.go @@ -47,7 +47,7 @@ import ( func Find(importPath, srcDir string) (filename, path string) { cmd := exec.Command("go", "list", "-json", "-export", "--", importPath) cmd.Dir = srcDir - out, err := cmd.CombinedOutput() + out, err := cmd.Output() if err != nil { return "", "" } diff --git a/go/internal/cgo/cgo.go b/go/internal/cgo/cgo.go index 38d5c6c7cd3..697974bb9b2 100644 --- a/go/internal/cgo/cgo.go +++ b/go/internal/cgo/cgo.go @@ -59,11 +59,10 @@ import ( "go/token" "log" "os" + "os/exec" "path/filepath" "regexp" "strings" - - exec "golang.org/x/sys/execabs" ) // ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses diff --git a/go/internal/cgo/cgo_pkgconfig.go b/go/internal/cgo/cgo_pkgconfig.go index 7d94bbc1e5f..2455be54f6e 100644 --- a/go/internal/cgo/cgo_pkgconfig.go +++ b/go/internal/cgo/cgo_pkgconfig.go @@ -8,19 +8,22 @@ import ( "errors" "fmt" "go/build" - exec "golang.org/x/sys/execabs" + "os/exec" "strings" ) // pkgConfig runs pkg-config with the specified arguments and returns the flags it prints. func pkgConfig(mode string, pkgs []string) (flags []string, err error) { cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...) - out, err := cmd.CombinedOutput() + out, err := cmd.Output() if err != nil { s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err) if len(out) > 0 { s = fmt.Sprintf("%s: %s", s, out) } + if err, ok := err.(*exec.ExitError); ok && len(err.Stderr) > 0 { + s = fmt.Sprintf("%s\nstderr:\n%s", s, err.Stderr) + } return nil, errors.New(s) } if len(out) > 0 { diff --git a/go/internal/gccgoimporter/gccgoinstallation.go b/go/internal/gccgoimporter/gccgoinstallation.go index 365521e2350..fac41005c61 100644 --- a/go/internal/gccgoimporter/gccgoinstallation.go +++ b/go/internal/gccgoimporter/gccgoinstallation.go @@ -10,8 +10,8 @@ package gccgoimporter import ( "bufio" "go/types" - exec "golang.org/x/sys/execabs" "os" + "os/exec" "path/filepath" "strings" ) diff --git a/go/internal/gccgoimporter/importer_test.go b/go/internal/gccgoimporter/importer_test.go index 7adffd0df80..d8c6e42f6ad 100644 --- a/go/internal/gccgoimporter/importer_test.go +++ b/go/internal/gccgoimporter/importer_test.go @@ -140,9 +140,12 @@ func TestObjImporter(t *testing.T) { t.Skip("no support yet for debug/xcoff") } - verout, err := exec.Command(gpath, "--version").CombinedOutput() + verout, err := exec.Command(gpath, "--version").Output() if err != nil { t.Logf("%s", verout) + if exit, ok := err.(*exec.ExitError); ok && len(exit.Stderr) > 0 { + t.Logf("stderr:\n%s", exit.Stderr) + } t.Fatal(err) } vers := regexp.MustCompile(`([0-9]+)\.([0-9]+)`).FindSubmatch(verout) @@ -182,8 +185,7 @@ func TestObjImporter(t *testing.T) { afile := filepath.Join(artmpdir, "lib"+test.pkgpath+".a") cmd := exec.Command(gpath, "-fgo-pkgpath="+test.pkgpath, "-c", "-o", ofile, gofile) - out, err := cmd.CombinedOutput() - if err != nil { + if out, err := cmd.CombinedOutput(); err != nil { t.Logf("%s", out) t.Fatalf("gccgo %s failed: %s", gofile, err) } @@ -191,8 +193,7 @@ func TestObjImporter(t *testing.T) { runImporterTest(t, imp, initmap, &test) cmd = exec.Command("ar", "cr", afile, ofile) - out, err = cmd.CombinedOutput() - if err != nil { + if out, err := cmd.CombinedOutput(); err != nil { t.Logf("%s", out) t.Fatalf("ar cr %s %s failed: %s", afile, ofile, err) } diff --git a/go/internal/gccgoimporter/parser.go b/go/internal/gccgoimporter/parser.go index 9fdb6f8b059..b0eb1ddf867 100644 --- a/go/internal/gccgoimporter/parser.go +++ b/go/internal/gccgoimporter/parser.go @@ -20,6 +20,9 @@ import ( "strings" "text/scanner" "unicode/utf8" + + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typesinternal" ) type parser struct { @@ -240,13 +243,6 @@ func (p *parser) parseName() string { return name } -func deref(typ types.Type) types.Type { - if p, _ := typ.(*types.Pointer); p != nil { - typ = p.Elem() - } - return typ -} - // parseField parses a Field: // // Field = Name Type [string] . @@ -260,7 +256,7 @@ func (p *parser) parseField(pkg *types.Package) (field *types.Var, tag string) { if aname, ok := p.aliases[n]; ok { name = aname } else { - switch typ := deref(typ).(type) { + switch typ := aliases.Unalias(typesinternal.Unpointer(typ)).(type) { case *types.Basic: name = typ.Name() case *types.Named: @@ -579,7 +575,7 @@ func (p *parser) parseNamedType(nlist []interface{}) types.Type { t := obj.Type() p.update(t, nlist) - nt, ok := t.(*types.Named) + nt, ok := aliases.Unalias(t).(*types.Named) if !ok { // This can happen for unsafe.Pointer, which is a TypeName holding a Basic type. pt := p.parseType(pkg) @@ -950,6 +946,7 @@ const ( gccgoBuiltinERROR = 19 gccgoBuiltinBYTE = 20 gccgoBuiltinRUNE = 21 + gccgoBuiltinANY = 22 ) func lookupBuiltinType(typ int) types.Type { @@ -974,6 +971,7 @@ func lookupBuiltinType(typ int) types.Type { gccgoBuiltinERROR: types.Universe.Lookup("error").Type(), gccgoBuiltinBYTE: types.Universe.Lookup("byte").Type(), gccgoBuiltinRUNE: types.Universe.Lookup("rune").Type(), + gccgoBuiltinANY: types.Universe.Lookup("any").Type(), }[typ] } @@ -1332,7 +1330,7 @@ func (p *parser) parsePackage() *types.Package { } p.fixups = nil for _, typ := range p.typeList { - if it, ok := typ.(*types.Interface); ok { + if it, ok := aliases.Unalias(typ).(*types.Interface); ok { it.Complete() } } diff --git a/go/loader/loader.go b/go/loader/loader.go index edf62c2cc03..013c0f505bb 100644 --- a/go/loader/loader.go +++ b/go/loader/loader.go @@ -23,7 +23,7 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/internal/cgo" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) var ignoreVendor build.ImportMode @@ -1033,13 +1033,14 @@ func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), Scopes: make(map[ast.Node]*types.Scope), Selections: make(map[*ast.SelectorExpr]*types.Selection), }, errorFunc: imp.conf.TypeChecker.Error, dir: dir, } - typeparams.InitInstanceInfo(&info.Info) + versions.InitFileVersions(&info.Info) // Copy the types.Config so we can vary it across PackageInfos. tc := imp.conf.TypeChecker diff --git a/go/loader/loader_test.go b/go/loader/loader_test.go index cab2217c3e2..1e0b16e7fc3 100644 --- a/go/loader/loader_test.go +++ b/go/loader/loader_test.go @@ -837,6 +837,7 @@ func loadIO(t *testing.T) { func TestCgoCwdIssue46877(t *testing.T) { testenv.NeedsTool(t, "go") + testenv.NeedsTool(t, "cgo") var conf loader.Config conf.Import("golang.org/x/tools/go/loader/testdata/issue46877") if _, err := conf.Load(); err != nil { diff --git a/go/loader/stdlib_test.go b/go/loader/stdlib_test.go index 83d70dabdca..ef51325e9c8 100644 --- a/go/loader/stdlib_test.go +++ b/go/loader/stdlib_test.go @@ -130,13 +130,11 @@ func TestCgoOption(t *testing.T) { case "darwin": t.Skipf("golang/go#58493: file locations in this test are stale on darwin") } + testenv.NeedsTool(t, "go") // In nocgo builds (e.g. linux-amd64-nocgo), // there is no "runtime/cgo" package, // so cgo-generated Go files will have a failing import. - if !build.Default.CgoEnabled { - return - } - testenv.NeedsTool(t, "go") + testenv.NeedsTool(t, "cgo") // Test that we can load cgo-using packages with // CGO_ENABLED=[01], which causes go/build to select pure diff --git a/go/packages/doc.go b/go/packages/doc.go index a7a8f73e3d1..a8d7b06ac09 100644 --- a/go/packages/doc.go +++ b/go/packages/doc.go @@ -5,12 +5,20 @@ /* Package packages loads Go packages for inspection and analysis. -The Load function takes as input a list of patterns and return a list of Package -structs describing individual packages matched by those patterns. -The LoadMode controls the amount of detail in the loaded packages. - -Load passes most patterns directly to the underlying build tool, -but all patterns with the prefix "query=", where query is a +The [Load] function takes as input a list of patterns and returns a +list of [Package] values describing individual packages matched by those +patterns. +A [Config] specifies configuration options, the most important of which is +the [LoadMode], which controls the amount of detail in the loaded packages. + +Load passes most patterns directly to the underlying build tool. +The default build tool is the go command. +Its supported patterns are described at +https://pkg.go.dev/cmd/go#hdr-Package_lists_and_patterns. +Other build systems may be supported by providing a "driver"; +see [The driver protocol]. + +All patterns with the prefix "query=", where query is a non-empty string of letters from [a-z], are reserved and may be interpreted as query operators. @@ -64,9 +72,31 @@ reported about the loaded packages. See the documentation for type LoadMode for details. Most tools should pass their command-line arguments (after any flags) -uninterpreted to the loader, so that the loader can interpret them +uninterpreted to [Load], so that it can interpret them according to the conventions of the underlying build system. + See the Example function for typical usage. + +# The driver protocol + +[Load] may be used to load Go packages even in Go projects that use +alternative build systems, by installing an appropriate "driver" +program for the build system and specifying its location in the +GOPACKAGESDRIVER environment variable. +For example, +https://github.com/bazelbuild/rules_go/wiki/Editor-and-tool-integration +explains how to use the driver for Bazel. + +The driver program is responsible for interpreting patterns in its +preferred notation and reporting information about the packages that +those patterns identify. Drivers must also support the special "file=" +and "pattern=" patterns described above. + +The patterns are provided as positional command-line arguments. A +JSON-encoded [DriverRequest] message providing additional information +is written to the driver's standard input. The driver must write a +JSON-encoded [DriverResponse] message to its standard output. (This +message differs from the JSON schema produced by 'go list'.) */ package packages // import "golang.org/x/tools/go/packages" diff --git a/go/packages/external.go b/go/packages/external.go index 7242a0a7d2b..4335c1eb14c 100644 --- a/go/packages/external.go +++ b/go/packages/external.go @@ -2,46 +2,85 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// This file enables an external tool to intercept package requests. -// If the tool is present then its results are used in preference to -// the go list command. - package packages +// This file defines the protocol that enables an external "driver" +// tool to supply package metadata in place of 'go list'. + import ( "bytes" "encoding/json" "fmt" - exec "golang.org/x/sys/execabs" "os" + "os/exec" "strings" ) -// The Driver Protocol +// DriverRequest defines the schema of a request for package metadata +// from an external driver program. The JSON-encoded DriverRequest +// message is provided to the driver program's standard input. The +// query patterns are provided as command-line arguments. // -// The driver, given the inputs to a call to Load, returns metadata about the packages specified. -// This allows for different build systems to support go/packages by telling go/packages how the -// packages' source is organized. -// The driver is a binary, either specified by the GOPACKAGESDRIVER environment variable or in -// the path as gopackagesdriver. It's given the inputs to load in its argv. See the package -// documentation in doc.go for the full description of the patterns that need to be supported. -// A driver receives as a JSON-serialized driverRequest struct in standard input and will -// produce a JSON-serialized driverResponse (see definition in packages.go) in its standard output. - -// driverRequest is used to provide the portion of Load's Config that is needed by a driver. -type driverRequest struct { +// See the package documentation for an overview. +type DriverRequest struct { Mode LoadMode `json:"mode"` + // Env specifies the environment the underlying build system should be run in. Env []string `json:"env"` + // BuildFlags are flags that should be passed to the underlying build system. BuildFlags []string `json:"build_flags"` + // Tests specifies whether the patterns should also return test packages. Tests bool `json:"tests"` + // Overlay maps file paths (relative to the driver's working directory) to the byte contents // of overlay files. Overlay map[string][]byte `json:"overlay"` } +// DriverResponse defines the schema of a response from an external +// driver program, providing the results of a query for package +// metadata. The driver program must write a JSON-encoded +// DriverResponse message to its standard output. +// +// See the package documentation for an overview. +type DriverResponse struct { + // NotHandled is returned if the request can't be handled by the current + // driver. If an external driver returns a response with NotHandled, the + // rest of the DriverResponse is ignored, and go/packages will fallback + // to the next driver. If go/packages is extended in the future to support + // lists of multiple drivers, go/packages will fall back to the next driver. + NotHandled bool + + // Compiler and Arch are the arguments pass of types.SizesFor + // to get a types.Sizes to use when type checking. + Compiler string + Arch string + + // Roots is the set of package IDs that make up the root packages. + // We have to encode this separately because when we encode a single package + // we cannot know if it is one of the roots as that requires knowledge of the + // graph it is part of. + Roots []string `json:",omitempty"` + + // Packages is the full set of packages in the graph. + // The packages are not connected into a graph. + // The Imports if populated will be stubs that only have their ID set. + // Imports will be connected and then type and syntax information added in a + // later pass (see refine). + Packages []*Package + + // GoVersion is the minor version number used by the driver + // (e.g. the go command on the PATH) when selecting .go files. + // Zero means unknown. + GoVersion int +} + +// driver is the type for functions that query the build system for the +// packages named by the patterns. +type driver func(cfg *Config, patterns ...string) (*DriverResponse, error) + // findExternalDriver returns the file path of a tool that supplies // the build system package structure, or "" if not found." // If GOPACKAGESDRIVER is set in the environment findExternalTool returns its @@ -64,8 +103,8 @@ func findExternalDriver(cfg *Config) driver { return nil } } - return func(cfg *Config, words ...string) (*driverResponse, error) { - req, err := json.Marshal(driverRequest{ + return func(cfg *Config, words ...string) (*DriverResponse, error) { + req, err := json.Marshal(DriverRequest{ Mode: cfg.Mode, Env: cfg.Env, BuildFlags: cfg.BuildFlags, @@ -92,7 +131,7 @@ func findExternalDriver(cfg *Config) driver { fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd), stderr) } - var response driverResponse + var response DriverResponse if err := json.Unmarshal(buf.Bytes(), &response); err != nil { return nil, err } diff --git a/go/packages/golist.go b/go/packages/golist.go index c1292b30f3e..22305d9c90a 100644 --- a/go/packages/golist.go +++ b/go/packages/golist.go @@ -11,6 +11,7 @@ import ( "fmt" "log" "os" + "os/exec" "path" "path/filepath" "reflect" @@ -20,7 +21,6 @@ import ( "sync" "unicode" - exec "golang.org/x/sys/execabs" "golang.org/x/tools/go/internal/packagesdriver" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/packagesinternal" @@ -35,23 +35,23 @@ type goTooOldError struct { error } -// responseDeduper wraps a driverResponse, deduplicating its contents. +// responseDeduper wraps a DriverResponse, deduplicating its contents. type responseDeduper struct { seenRoots map[string]bool seenPackages map[string]*Package - dr *driverResponse + dr *DriverResponse } func newDeduper() *responseDeduper { return &responseDeduper{ - dr: &driverResponse{}, + dr: &DriverResponse{}, seenRoots: map[string]bool{}, seenPackages: map[string]*Package{}, } } -// addAll fills in r with a driverResponse. -func (r *responseDeduper) addAll(dr *driverResponse) { +// addAll fills in r with a DriverResponse. +func (r *responseDeduper) addAll(dr *DriverResponse) { for _, pkg := range dr.Packages { r.addPackage(pkg) } @@ -128,7 +128,7 @@ func (state *golistState) mustGetEnv() map[string]string { // goListDriver uses the go list command to interpret the patterns and produce // the build system package structure. // See driver for more details. -func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) { +func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error) { // Make sure that any asynchronous go commands are killed when we return. parentCtx := cfg.Context if parentCtx == nil { @@ -146,16 +146,18 @@ func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) { } // Fill in response.Sizes asynchronously if necessary. - var sizeserr error - var sizeswg sync.WaitGroup if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { - sizeswg.Add(1) + errCh := make(chan error) go func() { compiler, arch, err := packagesdriver.GetSizesForArgsGolist(ctx, state.cfgInvocation(), cfg.gocmdRunner) - sizeserr = err response.dr.Compiler = compiler response.dr.Arch = arch - sizeswg.Done() + errCh <- err + }() + defer func() { + if sizesErr := <-errCh; sizesErr != nil { + err = sizesErr + } }() } @@ -208,10 +210,7 @@ extractQueries: } } - sizeswg.Wait() - if sizeserr != nil { - return nil, sizeserr - } + // (We may yet return an error due to defer.) return response.dr, nil } @@ -266,7 +265,7 @@ func (state *golistState) runContainsQueries(response *responseDeduper, queries // adhocPackage attempts to load or construct an ad-hoc package for a given // query, if the original call to the driver produced inadequate results. -func (state *golistState) adhocPackage(pattern, query string) (*driverResponse, error) { +func (state *golistState) adhocPackage(pattern, query string) (*DriverResponse, error) { response, err := state.createDriverResponse(query) if err != nil { return nil, err @@ -357,7 +356,7 @@ func otherFiles(p *jsonPackage) [][]string { // createDriverResponse uses the "go list" command to expand the pattern // words and return a response for the specified packages. -func (state *golistState) createDriverResponse(words ...string) (*driverResponse, error) { +func (state *golistState) createDriverResponse(words ...string) (*DriverResponse, error) { // go list uses the following identifiers in ImportPath and Imports: // // "p" -- importable package or main (command) @@ -384,7 +383,7 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse pkgs := make(map[string]*Package) additionalErrors := make(map[string][]Error) // Decode the JSON and convert it to Package form. - response := &driverResponse{ + response := &DriverResponse{ GoVersion: goVersion, } for dec := json.NewDecoder(buf); dec.More(); { diff --git a/go/packages/gopackages/main.go b/go/packages/gopackages/main.go index bf0b5043fc6..706f13a99a0 100644 --- a/go/packages/gopackages/main.go +++ b/go/packages/gopackages/main.go @@ -104,6 +104,7 @@ func (app *application) Run(ctx context.Context, args ...string) error { default: return tool.CommandLineErrorf("invalid mode: %s", app.Mode) } + cfg.Mode |= packages.NeedModule lpkgs, err := packages.Load(cfg, args...) if err != nil { @@ -162,6 +163,9 @@ func (app *application) print(lpkg *packages.Package) { kind += "package" } fmt.Printf("Go %s %q:\n", kind, lpkg.ID) // unique ID + if mod := lpkg.Module; mod != nil { + fmt.Printf("\tmodule %s@%s\n", mod.Path, mod.Version) + } fmt.Printf("\tpackage %s\n", lpkg.Name) // characterize type info diff --git a/go/packages/packages.go b/go/packages/packages.go index 6cbd3de83ec..865d90597a9 100644 --- a/go/packages/packages.go +++ b/go/packages/packages.go @@ -9,6 +9,7 @@ package packages import ( "context" "encoding/json" + "errors" "fmt" "go/ast" "go/parser" @@ -24,11 +25,13 @@ import ( "sync" "time" + "golang.org/x/sync/errgroup" + "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/packagesinternal" - "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/versions" ) // A LoadMode controls the amount of detail to return when loading. @@ -206,43 +209,6 @@ type Config struct { Overlay map[string][]byte } -// driver is the type for functions that query the build system for the -// packages named by the patterns. -type driver func(cfg *Config, patterns ...string) (*driverResponse, error) - -// driverResponse contains the results for a driver query. -type driverResponse struct { - // NotHandled is returned if the request can't be handled by the current - // driver. If an external driver returns a response with NotHandled, the - // rest of the driverResponse is ignored, and go/packages will fallback - // to the next driver. If go/packages is extended in the future to support - // lists of multiple drivers, go/packages will fall back to the next driver. - NotHandled bool - - // Compiler and Arch are the arguments pass of types.SizesFor - // to get a types.Sizes to use when type checking. - Compiler string - Arch string - - // Roots is the set of package IDs that make up the root packages. - // We have to encode this separately because when we encode a single package - // we cannot know if it is one of the roots as that requires knowledge of the - // graph it is part of. - Roots []string `json:",omitempty"` - - // Packages is the full set of packages in the graph. - // The packages are not connected into a graph. - // The Imports if populated will be stubs that only have their ID set. - // Imports will be connected and then type and syntax information added in a - // later pass (see refine). - Packages []*Package - - // GoVersion is the minor version number used by the driver - // (e.g. the go command on the PATH) when selecting .go files. - // Zero means unknown. - GoVersion int -} - // Load loads and returns the Go packages named by the given patterns. // // Config specifies loading options; @@ -291,9 +257,28 @@ func Load(cfg *Config, patterns ...string) ([]*Package, error) { // no external driver, or the driver returns a response with NotHandled set, // defaultDriver will fall back to the go list driver. // The boolean result indicates that an external driver handled the request. -func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, bool, error) { +func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, error) { + const ( + // windowsArgMax specifies the maximum command line length for + // the Windows' CreateProcess function. + windowsArgMax = 32767 + // maxEnvSize is a very rough estimation of the maximum environment + // size of a user. + maxEnvSize = 16384 + // safeArgMax specifies the maximum safe command line length to use + // by the underlying driver excl. the environment. We choose the Windows' + // ARG_MAX as the starting point because it's one of the lowest ARG_MAX + // constants out of the different supported platforms, + // e.g., https://www.in-ulm.de/~mascheck/various/argmax/#results. + safeArgMax = windowsArgMax - maxEnvSize + ) + chunks, err := splitIntoChunks(patterns, safeArgMax) + if err != nil { + return nil, false, err + } + if driver := findExternalDriver(cfg); driver != nil { - response, err := driver(cfg, patterns...) + response, err := callDriverOnChunks(driver, cfg, chunks) if err != nil { return nil, false, err } else if !response.NotHandled { @@ -302,10 +287,84 @@ func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, bool, erro // (fall through) } - response, err := goListDriver(cfg, patterns...) + response, err := callDriverOnChunks(goListDriver, cfg, chunks) + if err != nil { + return nil, false, err + } return response, false, err } +// splitIntoChunks chunks the slice so that the total number of characters +// in a chunk is no longer than argMax. +func splitIntoChunks(patterns []string, argMax int) ([][]string, error) { + if argMax <= 0 { + return nil, errors.New("failed to split patterns into chunks, negative safe argMax value") + } + var chunks [][]string + charsInChunk := 0 + nextChunkStart := 0 + for i, v := range patterns { + vChars := len(v) + if vChars > argMax { + // a single pattern is longer than the maximum safe ARG_MAX, hardly should happen + return nil, errors.New("failed to split patterns into chunks, a pattern is too long") + } + charsInChunk += vChars + 1 // +1 is for a whitespace between patterns that has to be counted too + if charsInChunk > argMax { + chunks = append(chunks, patterns[nextChunkStart:i]) + nextChunkStart = i + charsInChunk = vChars + } + } + // add the last chunk + if nextChunkStart < len(patterns) { + chunks = append(chunks, patterns[nextChunkStart:]) + } + return chunks, nil +} + +func callDriverOnChunks(driver driver, cfg *Config, chunks [][]string) (*DriverResponse, error) { + if len(chunks) == 0 { + return driver(cfg) + } + responses := make([]*DriverResponse, len(chunks)) + errNotHandled := errors.New("driver returned NotHandled") + var g errgroup.Group + for i, chunk := range chunks { + i := i + chunk := chunk + g.Go(func() (err error) { + responses[i], err = driver(cfg, chunk...) + if responses[i] != nil && responses[i].NotHandled { + err = errNotHandled + } + return err + }) + } + if err := g.Wait(); err != nil { + if errors.Is(err, errNotHandled) { + return &DriverResponse{NotHandled: true}, nil + } + return nil, err + } + return mergeResponses(responses...), nil +} + +func mergeResponses(responses ...*DriverResponse) *DriverResponse { + if len(responses) == 0 { + return nil + } + response := newDeduper() + response.dr.NotHandled = false + response.dr.Compiler = responses[0].Compiler + response.dr.Arch = responses[0].Arch + response.dr.GoVersion = responses[0].GoVersion + for _, v := range responses { + response.addAll(v) + } + return response.dr +} + // A Package describes a loaded Go package. type Package struct { // ID is a unique identifier for a package, @@ -432,12 +491,6 @@ func init() { packagesinternal.GetDepsErrors = func(p interface{}) []*packagesinternal.PackageError { return p.(*Package).depsErrors } - packagesinternal.GetGoCmdRunner = func(config interface{}) *gocommand.Runner { - return config.(*Config).gocmdRunner - } - packagesinternal.SetGoCmdRunner = func(config interface{}, runner *gocommand.Runner) { - config.(*Config).gocmdRunner = runner - } packagesinternal.SetModFile = func(config interface{}, value string) { config.(*Config).modFile = value } @@ -654,7 +707,7 @@ func newLoader(cfg *Config) *loader { // refine connects the supplied packages into a graph and then adds type // and syntax information as requested by the LoadMode. -func (ld *loader) refine(response *driverResponse) ([]*Package, error) { +func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { roots := response.Roots rootMap := make(map[string]int, len(roots)) for i, root := range roots { @@ -1020,10 +1073,11 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), Scopes: make(map[ast.Node]*types.Scope), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - typeparams.InitInstanceInfo(lpkg.TypesInfo) + versions.InitFileVersions(lpkg.TypesInfo) lpkg.TypesSizes = ld.sizes importer := importerFunc(func(path string) (*types.Package, error) { @@ -1064,7 +1118,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { Sizes: ld.sizes, // may be nil } if lpkg.Module != nil && lpkg.Module.GoVersion != "" { - typesinternal.SetGoVersion(tc, "go"+lpkg.Module.GoVersion) + tc.GoVersion = "go" + lpkg.Module.GoVersion } if (ld.Mode & typecheckCgo) != 0 { if !typesinternal.SetUsesCgo(tc) { @@ -1075,10 +1129,24 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { return } } - types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) + typErr := types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) lpkg.importErrors = nil // no longer needed + // In go/types go1.21 and go1.22, Checker.Files failed fast with a + // a "too new" error, without calling tc.Error and without + // proceeding to type-check the package (#66525). + // We rely on the runtimeVersion error to give the suggested remedy. + if typErr != nil && len(lpkg.Errors) == 0 && len(lpkg.Syntax) > 0 { + if msg := typErr.Error(); strings.HasPrefix(msg, "package requires newer Go version") { + appendError(types.Error{ + Fset: ld.Fset, + Pos: lpkg.Syntax[0].Package, + Msg: msg, + }) + } + } + // If !Cgo, the type-checker uses FakeImportC mode, so // it doesn't invoke the importer for import "C", // nor report an error for the import, @@ -1100,6 +1168,12 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { } } + // If types.Checker.Files had an error that was unreported, + // make sure to report the unknown error so the package is illTyped. + if typErr != nil && len(lpkg.Errors) == 0 { + appendError(typErr) + } + // Record accumulated errors. illTyped := len(lpkg.Errors) > 0 if !illTyped { diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index 6e461c8acad..97f34d4527d 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -367,6 +367,32 @@ func TestLoadAbsolutePath(t *testing.T) { } } +func TestLoadArgumentListIsNotTooLong(t *testing.T) { + // NOTE: this test adds about 2s to the test suite running time + + t.Parallel() + + // using the real ARG_MAX for some platforms increases the running time of this test by a lot, + // 1_000_000 seems like enough to break Windows and macOS if Load doesn't split provided patterns + argMax := 1_000_000 + exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ + Name: "golang.org/mod", + Files: map[string]interface{}{ + "main.go": `package main"`, + }}}) + defer exported.Cleanup() + numOfPatterns := argMax/16 + 1 // the pattern below is approx. 16 chars + patterns := make([]string, numOfPatterns) + for i := 0; i < numOfPatterns; i++ { + patterns[i] = fmt.Sprintf("golang.org/mod/p%d", i) + } // patterns have more than argMax number of chars combined with whitespaces b/w patterns + + _, err := packages.Load(exported.Config, patterns...) + if err != nil { + t.Fatalf("failed to load: %v", err) + } +} + func TestVendorImports(t *testing.T) { t.Parallel() @@ -1308,7 +1334,7 @@ func testNoPatterns(t *testing.T, exporter packagestest.Exporter) { func TestJSON(t *testing.T) { testAllOrModulesParallel(t, testJSON) } func testJSON(t *testing.T, exporter packagestest.Exporter) { - //TODO: add in some errors + // TODO: add in some errors exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", Files: map[string]interface{}{ @@ -2444,9 +2470,6 @@ func testIssue37098(t *testing.T, exporter packagestest.Exporter) { // causes C++ sources to be inadvertently included in // (*Package).CompiledGoFiles. - // This is fixed in Go 1.17, but not earlier. - testenv.NeedsGo1Point(t, 17) - if _, err := exec.LookPath("swig"); err != nil { t.Skip("skipping test: swig not available") } @@ -2953,3 +2976,43 @@ func TestExportFile(t *testing.T) { cfg.Mode = packages.NeedTypes packages.Load(cfg, "fmt") } + +// TestLoadEitherSucceedsOrFails is an attempt to reproduce a sporadic +// failure observed on the Android emu builders in which Load would +// return an empty list of packages but no error. We don't expect +// packages.Load to succeed on that platform, and testenv.NeedsGoBuild +// would ordinarily suppress the attempt if called early. But +// regardless of whether the 'go' command is functional, Load should +// never return an empty set of packages but no error. +func TestLoadEitherSucceedsOrFails(t *testing.T) { + const src = `package p` + dir := t.TempDir() + cfg := &packages.Config{ + Dir: dir, + Mode: packages.LoadSyntax, + Overlay: map[string][]byte{ + filepath.Join(dir, "p.go"): []byte(src), + }, + } + initial, err := packages.Load(cfg, "./p.go") + if err != nil { + // If Load failed because it needed 'go' and the + // platform doesn't have it, silently skip the test. + testenv.NeedsGoBuild(t) + + // Otherwise, it's a real failure. + t.Fatal(err) + } + + // If Load returned without error, + // it had better give us error-free packages. + if packages.PrintErrors(initial) > 0 { + t.Errorf("packages contain errors") + } + + // If Load returned without error, + // it had better give us the correct number packages. + if len(initial) != 1 { + t.Errorf("Load returned %d packages (want 1) and no error", len(initial)) + } +} diff --git a/go/packages/packagestest/expect.go b/go/packages/packagestest/expect.go index 00a30f713e2..14a6446138f 100644 --- a/go/packages/packagestest/expect.go +++ b/go/packages/packagestest/expect.go @@ -128,21 +128,6 @@ type Range struct { Start, End token.Pos // both valid and within range of TokFile } -// A rangeSetter abstracts a variable that can be set from a Range value. -// -// The parameter conversion machinery will automatically construct a -// variable of type T and call the SetRange method on its address if -// *T implements rangeSetter. This allows alternative notations of -// source ranges to interoperate transparently with this package. -// -// This type intentionally does not mention Range itself, to avoid a -// dependency from the application's range type upon this package. -// -// Currently this is a secret back door for use only by gopls. -type rangeSetter interface { - SetRange(file *token.File, start, end token.Pos) -} - // Mark adds a new marker to the known set. func (e *Exported) Mark(name string, r Range) { if e.markers == nil { @@ -243,15 +228,14 @@ func (e *Exported) getMarkers() error { } var ( - noteType = reflect.TypeOf((*expect.Note)(nil)) - identifierType = reflect.TypeOf(expect.Identifier("")) - posType = reflect.TypeOf(token.Pos(0)) - positionType = reflect.TypeOf(token.Position{}) - rangeType = reflect.TypeOf(Range{}) - rangeSetterType = reflect.TypeOf((*rangeSetter)(nil)).Elem() - fsetType = reflect.TypeOf((*token.FileSet)(nil)) - regexType = reflect.TypeOf((*regexp.Regexp)(nil)) - exportedType = reflect.TypeOf((*Exported)(nil)) + noteType = reflect.TypeOf((*expect.Note)(nil)) + identifierType = reflect.TypeOf(expect.Identifier("")) + posType = reflect.TypeOf(token.Pos(0)) + positionType = reflect.TypeOf(token.Position{}) + rangeType = reflect.TypeOf(Range{}) + fsetType = reflect.TypeOf((*token.FileSet)(nil)) + regexType = reflect.TypeOf((*regexp.Regexp)(nil)) + exportedType = reflect.TypeOf((*Exported)(nil)) ) // converter converts from a marker's argument parsed from the comment to @@ -310,17 +294,6 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { } return reflect.ValueOf(r), remains, nil }, nil - case reflect.PtrTo(pt).AssignableTo(rangeSetterType): - // (*pt).SetRange method exists: call it. - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { - r, remains, err := e.rangeConverter(n, args) - if err != nil { - return reflect.Value{}, nil, err - } - v := reflect.New(pt) - v.Interface().(rangeSetter).SetRange(r.TokFile, r.Start, r.End) - return v.Elem(), remains, nil - }, nil case pt == identifierType: return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { if len(args) < 1 { diff --git a/go/packages/packagestest/modules.go b/go/packages/packagestest/modules.go index 1299c6c3c73..7eff9432086 100644 --- a/go/packages/packagestest/modules.go +++ b/go/packages/packagestest/modules.go @@ -14,7 +14,6 @@ import ( "strings" "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/packagesinternal" "golang.org/x/tools/internal/proxydir" ) @@ -171,8 +170,6 @@ func (modules) Finalize(exported *Exported) error { "GOPROXY="+proxydir.ToURL(modProxyDir), "GOSUMDB=off", ) - gocmdRunner := &gocommand.Runner{} - packagesinternal.SetGoCmdRunner(exported.Config, gocmdRunner) // Run go mod download to recreate the mod cache dir with all the extra // stuff in cache. All the files created by Export should be recreated. @@ -183,10 +180,8 @@ func (modules) Finalize(exported *Exported) error { BuildFlags: exported.Config.BuildFlags, WorkingDir: exported.Config.Dir, } - if _, err := gocmdRunner.Run(context.Background(), inv); err != nil { - return err - } - return nil + _, err := new(gocommand.Runner).Run(context.Background(), inv) + return err } func writeModuleFiles(rootDir, module, ver string, filePaths map[string]string) error { diff --git a/go/ssa/builder.go b/go/ssa/builder.go index b64c50b4349..1f7f364eef0 100644 --- a/go/ssa/builder.go +++ b/go/ssa/builder.go @@ -79,17 +79,18 @@ import ( "go/token" "go/types" "os" + "runtime" "sync" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) -type opaqueType struct { - types.Type - name string -} +type opaqueType struct{ name string } -func (t *opaqueType) String() string { return t.name } +func (t *opaqueType) String() string { return t.name } +func (t *opaqueType) Underlying() types.Type { return t } var ( varOk = newVar("ok", tBool) @@ -102,7 +103,7 @@ var ( tInvalid = types.Typ[types.Invalid] tString = types.Typ[types.String] tUntypedNil = types.Typ[types.UntypedNil] - tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators + tRangeIter = &opaqueType{"iter"} // the type of all "range" iterators tEface = types.NewInterfaceType(nil, nil).Complete() // SSA Value constants. @@ -327,7 +328,7 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ } case "new": - return emitNew(fn, mustDeref(typ), pos, "new") + return emitNew(fn, typeparams.MustDeref(typ), pos, "new") case "len", "cap": // Special case: len or cap of an array or *array is @@ -335,7 +336,7 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ // We must still evaluate the value, though. (If it // was side-effect free, the whole call would have // been constant-folded.) - t, _ := deref(fn.typeOf(args[0])) + t := typeparams.Deref(fn.typeOf(args[0])) if at, ok := typeparams.CoreType(t).(*types.Array); ok { b.expr(fn, args[0]) // for effects only return intConst(at.Len()) @@ -391,7 +392,7 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { return &address{addr: v, pos: e.Pos(), expr: e} case *ast.CompositeLit: - typ, _ := deref(fn.typeOf(e)) + typ := typeparams.Deref(fn.typeOf(e)) var v *Alloc if escaping { v = emitNew(fn, typ, e.Lbrace, "complit") @@ -418,7 +419,7 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { wantAddr := true v := b.receiver(fn, e.X, wantAddr, escaping, sel) index := sel.index[len(sel.index)-1] - fld := fieldOf(mustDeref(v.Type()), index) // v is an addr. + fld := fieldOf(typeparams.MustDeref(v.Type()), index) // v is an addr. // Due to the two phases of resolving AssignStmt, a panic from x.f = p() // when x is nil is required to come after the side-effects of @@ -467,7 +468,7 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { v.setType(et) return fn.emit(v) } - return &lazyAddress{addr: emit, t: mustDeref(et), pos: e.Lbrack, expr: e} + return &lazyAddress{addr: emit, t: typeparams.MustDeref(et), pos: e.Lbrack, expr: e} case *ast.StarExpr: return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} @@ -512,17 +513,15 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * // A CompositeLit never evaluates to a pointer, // so if the type of the location is a pointer, // an &-operation is implied. - if _, ok := loc.(blank); !ok { // avoid calling blank.typ() - if _, ok := deref(loc.typ()); ok { - ptr := b.addr(fn, e, true).address(fn) - // copy address - if sb != nil { - sb.store(loc, ptr) - } else { - loc.store(fn, ptr) - } - return + if !is[blank](loc) && isPointerCore(loc.typ()) { // avoid calling blank.typ() + ptr := b.addr(fn, e, true).address(fn) + // copy address + if sb != nil { + sb.store(loc, ptr) + } else { + loc.store(fn, ptr) } + return } if _, ok := loc.(*address); ok { @@ -794,14 +793,14 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { // The result is a "bound". obj := sel.obj.(*types.Func) rt := fn.typ(recvType(obj)) - _, wantAddr := deref(rt) + wantAddr := isPointer(rt) escaping := true v := b.receiver(fn, e.X, wantAddr, escaping, sel) if types.IsInterface(rt) { // If v may be an interface type I (after instantiating), // we must emit a check that v is non-nil. - if recv, ok := sel.recv.(*typeparams.TypeParam); ok { + if recv, ok := aliases.Unalias(sel.recv).(*types.TypeParam); ok { // Emit a nil check if any possible instantiation of the // type parameter is an interface type. if typeSetOf(recv).Len() > 0 { @@ -847,7 +846,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { panic("unexpected expression-relative selector") - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: // f[X, Y] must be a generic function if !instance(fn.info, e.X) { panic("unexpected expression-could not match index list to instantiation") @@ -922,7 +921,7 @@ func (b *builder) stmtList(fn *Function, list []ast.Stmt) { // escaping is defined as per builder.addr(). func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value { var v Value - if _, eptr := deref(fn.typeOf(e)); wantAddr && !sel.indirect && !eptr { + if wantAddr && !sel.indirect && !isPointerCore(fn.typeOf(e)) { v = b.addr(fn, e, escaping).address(fn) } else { v = b.expr(fn, e) @@ -931,7 +930,10 @@ func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, se last := len(sel.index) - 1 // The position of implicit selection is the position of the inducing receiver expression. v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos()) - if _, vptr := deref(v.Type()); !wantAddr && vptr { + if types.IsInterface(v.Type()) { + // When v is an interface, sel.Kind()==MethodValue and v.f is invoked. + // So v is not loaded, even if v has a pointer core type. + } else if !wantAddr && isPointerCore(v.Type()) { v = emitLoad(fn, v) } return v @@ -950,7 +952,7 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { obj := sel.obj.(*types.Func) recv := recvType(obj) - _, wantAddr := deref(recv) + wantAddr := isPointer(recv) escaping := true v := b.receiver(fn, selector.X, wantAddr, escaping, sel) if types.IsInterface(recv) { @@ -1211,12 +1213,12 @@ func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { // literal has type *T behaves like &T{}. // In that case, addr must hold a T, not a *T. func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { - typ, _ := deref(fn.typeOf(e)) // type with name [may be type param] + typ := typeparams.Deref(fn.typeOf(e)) // retain the named/alias/param type, if any switch t := typeparams.CoreType(typ).(type) { case *types.Struct: if !isZero && len(e.Elts) != t.NumFields() { // memclear - zt, _ := deref(addr.Type()) + zt := typeparams.MustDeref(addr.Type()) sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt)) isZero = true } @@ -1259,7 +1261,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero if !isZero && int64(len(e.Elts)) != at.Len() { // memclear - zt, _ := deref(array.Type()) + zt := typeparams.MustDeref(array.Type()) sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt)) } } @@ -1315,7 +1317,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero // map[*struct{}]bool{&struct{}{}: true} wantAddr := false if _, ok := unparen(e.Key).(*ast.CompositeLit); ok { - _, wantAddr = deref(t.Key()) + wantAddr = isPointerCore(t.Key()) } var key Value @@ -1744,9 +1746,7 @@ func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { // Use forStmtGo122 instead if it applies. if s.Init != nil { if assign, ok := s.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE { - major, minor := parseGoVersion(fn.goversion) - afterGo122 := major >= 1 && minor >= 22 - if afterGo122 { + if versions.AtLeast(fn.goversion, versions.Go1_22) { b.forStmtGo122(fn, s, label) return } @@ -1830,6 +1830,7 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { // done: init := s.Init.(*ast.AssignStmt) + startingBlocks := len(fn.Blocks) pre := fn.currentBlock // current block before starting loop := fn.newBasicBlock("for.loop") // target of back-edge @@ -1837,15 +1838,19 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { post := fn.newBasicBlock("for.post") // target of 'continue' done := fn.newBasicBlock("for.done") // target of 'break' - // For each of the n loop variables, we create three SSA values, - // outer[i], phi[i], and next[i] in pre, loop, and post. + // For each of the n loop variables, we create five SSA values, + // outer, phi, next, load, and store in pre, loop, and post. // There is no limit on n. - lhss := init.Lhs - vars := make([]*types.Var, len(lhss)) - outers := make([]Value, len(vars)) - phis := make([]Value, len(vars)) - nexts := make([]Value, len(vars)) - for i, lhs := range lhss { + type loopVar struct { + obj *types.Var + outer *Alloc + phi *Phi + load *UnOp + next *Alloc + store *Store + } + vars := make([]loopVar, len(init.Lhs)) + for i, lhs := range init.Lhs { v := identVar(fn, lhs.(*ast.Ident)) typ := fn.typ(v.Type()) @@ -1859,31 +1864,24 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { fn.emit(phi) fn.currentBlock = post - // If next is is local, it reuses the address and zeroes the old value. - // Load before the Alloc. + // If next is local, it reuses the address and zeroes the old value so + // load before allocating next. load := emitLoad(fn, phi) next := emitLocal(fn, typ, v.Pos(), v.Name()) - emitStore(fn, next, load, token.NoPos) + store := emitStore(fn, next, load, token.NoPos) phi.Edges = []Value{outer, next} // pre edge is emitted before post edge. - vars[i] = v - outers[i] = outer - phis[i] = phi - nexts[i] = next - } - - varsCurrentlyReferTo := func(vals []Value) { - for i, v := range vars { - fn.vars[v] = vals[i] - } + vars[i] = loopVar{v, outer, phi, load, next, store} } // ...init... under fn.objects[v] = i_outer fn.currentBlock = pre - varsCurrentlyReferTo(outers) + for _, v := range vars { + fn.vars[v.obj] = v.outer + } const isDef = false // assign to already-allocated outers - b.assignStmt(fn, lhss, init.Rhs, isDef) + b.assignStmt(fn, init.Lhs, init.Rhs, isDef) if label != nil { label._break = done label._continue = post @@ -1892,7 +1890,9 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { // ...cond... under fn.objects[v] = i fn.currentBlock = loop - varsCurrentlyReferTo(phis) + for _, v := range vars { + fn.vars[v.obj] = v.phi + } if s.Cond != nil { b.cond(fn, s.Cond, body, done) } else { @@ -1911,7 +1911,9 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { emitJump(fn, post) // ...post... under fn.objects[v] = i_next - varsCurrentlyReferTo(nexts) + for _, v := range vars { + fn.vars[v.obj] = v.next + } fn.currentBlock = post if s.Post != nil { b.stmt(fn, s.Post) @@ -1919,9 +1921,53 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { emitJump(fn, loop) // back-edge fn.currentBlock = done - // TODO(taking): Optimizations for when local variables can be fused. - // Principled approach is: hoist i_next, fuse i_outer and i_next, eliminate redundant phi, and ssa-lifting. - // Unclear if we want to do any of this in general or only for range/for-loops with new lifetimes. + // For each loop variable that does not escape, + // (the common case), fuse its next cells into its + // (local) outer cell as they have disjoint live ranges. + // + // It is sufficient to test whether i_next escapes, + // because its Heap flag will be marked true if either + // the cond or post expression causes i to escape + // (because escape distributes over phi). + var nlocals int + for _, v := range vars { + if !v.next.Heap { + nlocals++ + } + } + if nlocals > 0 { + replace := make(map[Value]Value, 2*nlocals) + dead := make(map[Instruction]bool, 4*nlocals) + for _, v := range vars { + if !v.next.Heap { + replace[v.next] = v.outer + replace[v.phi] = v.outer + dead[v.phi], dead[v.next], dead[v.load], dead[v.store] = true, true, true, true + } + } + + // Replace all uses of i_next and phi with i_outer. + // Referrers have not been built for fn yet so only update Instruction operands. + // We need only look within the blocks added by the loop. + var operands []*Value // recycle storage + for _, b := range fn.Blocks[startingBlocks:] { + for _, instr := range b.Instrs { + operands = instr.Operands(operands[:0]) + for _, ptr := range operands { + k := *ptr + if v := replace[k]; v != nil { + *ptr = v + } + } + } + } + + // Remove instructions for phi, load, and store. + // lift() will remove the unused i_next *Alloc. + isDead := func(i Instruction) bool { return dead[i] } + loop.Instrs = removeInstrsIf(loop.Instrs, isDead) + post.Instrs = removeInstrsIf(post.Instrs, isDead) + } } // rangeIndexed emits to fn the header for an integer-indexed loop @@ -1944,7 +1990,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P // Determine number of iterations. var length Value - dt, _ := deref(x.Type()) + dt := typeparams.Deref(x.Type()) if arr, ok := typeparams.CoreType(dt).(*types.Array); ok { // For array or *array, the number of iterations is // known statically thanks to the type. We avoid a @@ -2195,9 +2241,7 @@ func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { } } - major, minor := parseGoVersion(fn.goversion) - afterGo122 := major >= 1 && minor >= 22 - + afterGo122 := versions.AtLeast(fn.goversion, versions.Go1_22) if s.Tok == token.DEFINE && !afterGo122 { // pre-go1.22: If iteration variables are defined (:=), this // occurs once outside the loop. @@ -2293,6 +2337,12 @@ start: } case *ast.LabeledStmt: + if s.Label.Name == "_" { + // Blank labels can't be the target of a goto, break, + // or continue statement, so we don't need a new block. + _s = s.Stmt + goto start + } label = fn.labelledBlock(s.Label) emitJump(fn, label._goto) fn.currentBlock = label._goto @@ -2571,15 +2621,20 @@ func (prog *Program) Build() { p.Build() } else { wg.Add(1) + cpuLimit <- struct{}{} // acquire a token go func(p *Package) { p.Build() wg.Done() + <-cpuLimit // release a token }(p) } } wg.Wait() } +// cpuLimit is a counting semaphore to limit CPU parallelism. +var cpuLimit = make(chan struct{}, runtime.GOMAXPROCS(0)) + // Build builds SSA code for all functions and vars in package p. // // CreatePackage must have been called for all of p's direct imports diff --git a/go/ssa/builder_generic_test.go b/go/ssa/builder_generic_test.go index 7c43b24c6c9..85c599443b7 100644 --- a/go/ssa/builder_generic_test.go +++ b/go/ssa/builder_generic_test.go @@ -483,6 +483,38 @@ func TestGenericBodies(t *testing.T) { } } `, + ` + package issue64324 + + type bar[T any] interface { + Bar(int) T + } + type foo[T any] interface { + bar[[]T] + *T + } + func Foo[T any, F foo[T]](d int) { + m := new(T) + f := F(m) + print(f.Bar(d)) /*@ types("[]T")*/ + } + `, ` + package issue64324b + + type bar[T any] interface { + Bar(int) T + } + type baz[T any] interface { + bar[*int] + *int + } + + func Baz[I baz[string]](d int) { + m := new(int) + f := I(m) + print(f.Bar(d)) /*@ types("*int")*/ + } + `, } { contents := contents pkgname := packageName(t, contents) diff --git a/go/ssa/builder_test.go b/go/ssa/builder_test.go index 2186d2578a9..607e64f6e4f 100644 --- a/go/ssa/builder_test.go +++ b/go/ssa/builder_test.go @@ -25,8 +25,8 @@ import ( "golang.org/x/tools/go/packages" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/testenv" - "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/txtar" ) @@ -689,10 +689,10 @@ func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer) p := prog.Package(lprog.Package("p").Pkg) p.Build() - if load := p.Func("Load"); typeparams.ForSignature(load.Signature).Len() != 1 { + if load := p.Func("Load"); load.Signature.TypeParams().Len() != 1 { t.Errorf("expected a single type param T for Load got %q", load.Signature) } - if ptr := p.Type("Pointer"); typeparams.ForNamed(ptr.Type().(*types.Named)).Len() != 1 { + if ptr := p.Type("Pointer"); ptr.Type().(*types.Named).TypeParams().Len() != 1 { t.Errorf("expected a single type param T for Pointer got %q", ptr.Type()) } } @@ -822,8 +822,6 @@ var indirect = R[int].M // TestTypeparamTest builds SSA over compilable examples in $GOROOT/test/typeparam/*.go. func TestTypeparamTest(t *testing.T) { - testenv.NeedsGo1Point(t, 19) // fails with infinite recursion at 1.18 -- not investigated - // Tests use a fake goroot to stub out standard libraries with delcarations in // testdata/src. Decreases runtime from ~80s to ~1s. @@ -1000,7 +998,6 @@ func TestGenericFunctionSelector(t *testing.T) { func TestIssue58491(t *testing.T) { // Test that a local type reaches type param in instantiation. - testenv.NeedsGo1Point(t, 18) src := ` package p @@ -1058,7 +1055,6 @@ func TestIssue58491(t *testing.T) { func TestIssue58491Rec(t *testing.T) { // Roughly the same as TestIssue58491 but with a recursive type. - testenv.NeedsGo1Point(t, 18) src := ` package p @@ -1092,7 +1088,7 @@ func TestIssue58491Rec(t *testing.T) { // Find the local type result instantiated with int. var found bool for _, rt := range p.Prog.RuntimeTypes() { - if n, ok := rt.(*types.Named); ok { + if n, ok := aliases.Unalias(rt).(*types.Named); ok { if u, ok := n.Underlying().(*types.Struct); ok { found = true if got, want := n.String(), "p.result"; got != want { @@ -1214,3 +1210,30 @@ func TestGo117Builtins(t *testing.T) { }) } } + +// TestLabels just tests that anonymous labels are handled. +func TestLabels(t *testing.T) { + tests := []string{ + `package main + func main() { _:println(1) }`, + `package main + func main() { _:println(1); _:println(2)}`, + } + for _, test := range tests { + conf := loader.Config{Fset: token.NewFileSet()} + f, err := parser.ParseFile(conf.Fset, "", test, 0) + if err != nil { + t.Errorf("parse error: %s", err) + return + } + conf.CreateFromFiles("main", f) + iprog, err := conf.Load() + if err != nil { + t.Error(err) + continue + } + prog := ssautil.CreateProgram(iprog, ssa.BuilderMode(0)) + pkg := prog.Package(iprog.Created[0].Pkg) + pkg.Build() + } +} diff --git a/go/ssa/const.go b/go/ssa/const.go index 4a51a2cb4bb..2a4e0dde28a 100644 --- a/go/ssa/const.go +++ b/go/ssa/const.go @@ -14,6 +14,7 @@ import ( "strconv" "strings" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -45,9 +46,9 @@ func soleTypeKind(typ types.Type) types.BasicInfo { // Candidates (perhaps all) are eliminated during the type-set // iteration, which executes at least once. state := types.IsBoolean | types.IsInteger | types.IsString - underIs(typeSetOf(typ), func(t types.Type) bool { + underIs(typeSetOf(typ), func(ut types.Type) bool { var c types.BasicInfo - if t, ok := t.(*types.Basic); ok { + if t, ok := ut.(*types.Basic); ok { c = t.Info() } if c&types.IsNumeric != 0 { // int/float/complex @@ -113,7 +114,7 @@ func zeroString(t types.Type, from *types.Package) string { } case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: return "nil" - case *types.Named: + case *types.Named, *aliases.Alias: return zeroString(t.Underlying(), from) case *types.Array, *types.Struct: return relType(t, from) + "{}" @@ -125,7 +126,7 @@ func zeroString(t types.Type, from *types.Package) string { components[i] = zeroString(t.At(i).Type(), from) } return "(" + strings.Join(components, ", ") + ")" - case *typeparams.TypeParam: + case *types.TypeParam: return "*new(" + relType(t, from) + ")" } panic(fmt.Sprint("zeroString: unexpected ", t)) diff --git a/go/ssa/const_test.go b/go/ssa/const_test.go index d8e0c8a593a..c8ecadf7f0f 100644 --- a/go/ssa/const_test.go +++ b/go/ssa/const_test.go @@ -15,7 +15,6 @@ import ( "testing" "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/typeparams" ) func TestConstString(t *testing.T) { @@ -93,7 +92,7 @@ func TestConstString(t *testing.T) { // Test type-param gen := pkg.Scope().Lookup("gen") - tp := typeparams.ForSignature(gen.Type().(*types.Signature)).At(0) + tp := gen.Type().(*types.Signature).TypeParams().At(0) if got, want := ssa.NewConst(nil, tp).String(), "0:T"; got != want { t.Errorf("ssa.NewConst(%v, %s).String() = %v, want %v", nil, tup, got, want) } diff --git a/go/ssa/coretype.go b/go/ssa/coretype.go index 128d61e4267..8c218f919fa 100644 --- a/go/ssa/coretype.go +++ b/go/ssa/coretype.go @@ -7,6 +7,7 @@ package ssa import ( "go/types" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -40,19 +41,20 @@ func isBytestring(T types.Type) bool { } // termList is a list of types. -type termList []*typeparams.Term // type terms of the type set +type termList []*types.Term // type terms of the type set func (s termList) Len() int { return len(s) } func (s termList) At(i int) types.Type { return s[i].Type() } // typeSetOf returns the type set of typ. Returns an empty typeset on an error. func typeSetOf(typ types.Type) termList { // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on. - var terms []*typeparams.Term + var terms []*types.Term var err error - switch typ := typ.(type) { - case *typeparams.TypeParam: + // typeSetOf(t) == typeSetOf(Unalias(t)) + switch typ := aliases.Unalias(typ).(type) { + case *types.TypeParam: terms, err = typeparams.StructuralTerms(typ) - case *typeparams.Union: + case *types.Union: terms, err = typeparams.UnionTermSet(typ) case *types.Interface: terms, err = typeparams.InterfaceTermSet(typ) @@ -60,7 +62,7 @@ func typeSetOf(typ types.Type) termList { // Common case. // Specializing the len=1 case to avoid a slice // had no measurable space/time benefit. - terms = []*typeparams.Term{typeparams.NewTerm(false, typ)} + terms = []*types.Term{types.NewTerm(false, typ)} } if err != nil { diff --git a/go/ssa/create.go b/go/ssa/create.go index eaaf4695e85..f4dab2decdd 100644 --- a/go/ssa/create.go +++ b/go/ssa/create.go @@ -15,7 +15,7 @@ import ( "os" "sync" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) // NewProgram returns a new SSA Program. @@ -34,13 +34,12 @@ import ( // See the Example tests for simple examples. func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { return &Program{ - Fset: fset, - imported: make(map[string]*Package), - packages: make(map[*types.Package]*Package), - mode: mode, - canon: newCanonizer(), - ctxt: typeparams.NewContext(), - parameterized: tpWalker{seen: make(map[types.Type]bool)}, + Fset: fset, + imported: make(map[string]*Package), + packages: make(map[*types.Package]*Package), + mode: mode, + canon: newCanonizer(), + ctxt: types.NewContext(), } } @@ -116,10 +115,10 @@ func createFunction(prog *Program, obj *types.Func, name string, syntax ast.Node sig := obj.Type().(*types.Signature) // Collect type parameters. - var tparams *typeparams.TypeParamList - if rtparams := typeparams.RecvTypeParams(sig); rtparams.Len() > 0 { + var tparams *types.TypeParamList + if rtparams := sig.RecvTypeParams(); rtparams.Len() > 0 { tparams = rtparams // method of generic type - } else if sigparams := typeparams.ForSignature(sig); sigparams.Len() > 0 { + } else if sigparams := sig.TypeParams(); sigparams.Len() > 0 { tparams = sigparams // generic function } @@ -245,7 +244,7 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info * if len(files) > 0 { // Go source package. for _, file := range files { - goversion := goversionOf(p, file) + goversion := versions.Lang(versions.FileVersion(p.info, file)) for _, decl := range file.Decls { membersFromDecl(p, decl, goversion) } @@ -259,6 +258,7 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info * obj := scope.Lookup(name) memberFromObject(p, obj, nil, "") if obj, ok := obj.(*types.TypeName); ok { + // No Unalias: aliases should not duplicate methods. if named, ok := obj.Type().(*types.Named); ok { for i, n := 0, named.NumMethods(); i < n; i++ { memberFromObject(p, named.Method(i), nil, "") @@ -300,7 +300,7 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info * var printMu sync.Mutex // AllPackages returns a new slice containing all packages created by -// prog.CreatePackage in in unspecified order. +// prog.CreatePackage in unspecified order. func (prog *Program) AllPackages() []*Package { pkgs := make([]*Package, 0, len(prog.packages)) for _, pkg := range prog.packages { diff --git a/go/ssa/doc.go b/go/ssa/doc.go index 56bc2fbc165..3310b5509b2 100644 --- a/go/ssa/doc.go +++ b/go/ssa/doc.go @@ -7,8 +7,6 @@ // static single-assignment (SSA) form intermediate representation // (IR) for the bodies of functions. // -// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. -// // For an introduction to SSA form, see // http://en.wikipedia.org/wiki/Static_single_assignment_form. // This page provides a broader reading list: @@ -21,15 +19,15 @@ // All looping, branching and switching constructs are replaced with // unstructured control flow. Higher-level control flow constructs // such as multi-way branch can be reconstructed as needed; see -// ssautil.Switches() for an example. +// [golang.org/x/tools/go/ssa/ssautil.Switches] for an example. // // The simplest way to create the SSA representation of a package is -// to load typed syntax trees using golang.org/x/tools/go/packages, then -// invoke the ssautil.Packages helper function. See Example_loadPackages -// and Example_loadWholeProgram for examples. -// The resulting ssa.Program contains all the packages and their +// to load typed syntax trees using [golang.org/x/tools/go/packages], then +// invoke the [golang.org/x/tools/go/ssa/ssautil.Packages] helper function. +// (See the package-level Examples named LoadPackages and LoadWholeProgram.) +// The resulting [ssa.Program] contains all the packages and their // members, but SSA code is not created for function bodies until a -// subsequent call to (*Package).Build or (*Program).Build. +// subsequent call to [Package.Build] or [Program.Build]. // // The builder initially builds a naive SSA form in which all local // variables are addresses of stack locations with explicit loads and @@ -41,13 +39,13 @@ // // The primary interfaces of this package are: // -// - Member: a named member of a Go package. -// - Value: an expression that yields a value. -// - Instruction: a statement that consumes values and performs computation. -// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph) +// - [Member]: a named member of a Go package. +// - [Value]: an expression that yields a value. +// - [Instruction]: a statement that consumes values and performs computation. +// - [Node]: a [Value] or [Instruction] (emphasizing its membership in the SSA value graph) // -// A computation that yields a result implements both the Value and -// Instruction interfaces. The following table shows for each +// A computation that yields a result implements both the [Value] and +// [Instruction] interfaces. The following table shows for each // concrete type which of these interfaces it implements. // // Value? Instruction? Member? @@ -97,15 +95,15 @@ // *TypeAssert ✔ ✔ // *UnOp ✔ ✔ // -// Other key types in this package include: Program, Package, Function -// and BasicBlock. +// Other key types in this package include: [Program], [Package], [Function] +// and [BasicBlock]. // // The program representation constructed by this package is fully // resolved internally, i.e. it does not rely on the names of Values, // Packages, Functions, Types or BasicBlocks for the correct // interpretation of the program. Only the identities of objects and // the topology of the SSA and type graphs are semantically -// significant. (There is one exception: Ids, used to identify field +// significant. (There is one exception: [types.Id] values, which identify field // and method names, contain strings.) Avoidance of name-based // operations simplifies the implementation of subsequent passes and // can make them very efficient. Many objects are nonetheless named @@ -113,8 +111,9 @@ // either accurate or unambiguous. The public API exposes a number of // name-based maps for client convenience. // -// The ssa/ssautil package provides various utilities that depend only -// on the public API of this package. +// The [golang.org/x/tools/go/ssa/ssautil] package provides various +// helper functions, for example to simplify loading a Go program into +// SSA form. // // TODO(adonovan): write a how-to document for all the various cases // of trying to determine corresponding elements across the four diff --git a/go/ssa/dom.go b/go/ssa/dom.go index 66a2f5e6ed3..02c1ae83ae3 100644 --- a/go/ssa/dom.go +++ b/go/ssa/dom.go @@ -40,20 +40,25 @@ func (b *BasicBlock) Dominates(c *BasicBlock) bool { return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post } -type byDomPreorder []*BasicBlock - -func (a byDomPreorder) Len() int { return len(a) } -func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre } - -// DomPreorder returns a new slice containing the blocks of f in -// dominator tree preorder. +// DomPreorder returns a new slice containing the blocks of f +// in a preorder traversal of the dominator tree. func (f *Function) DomPreorder() []*BasicBlock { - n := len(f.Blocks) - order := make(byDomPreorder, n) - copy(order, f.Blocks) - sort.Sort(order) - return order + slice := append([]*BasicBlock(nil), f.Blocks...) + sort.Slice(slice, func(i, j int) bool { + return slice[i].dom.pre < slice[j].dom.pre + }) + return slice +} + +// DomPostorder returns a new slice containing the blocks of f +// in a postorder traversal of the dominator tree. +// (This is not the same as a postdominance order.) +func (f *Function) DomPostorder() []*BasicBlock { + slice := append([]*BasicBlock(nil), f.Blocks...) + sort.Slice(slice, func(i, j int) bool { + return slice[i].dom.post < slice[j].dom.post + }) + return slice } // domInfo contains a BasicBlock's dominance information. diff --git a/go/ssa/dom_test.go b/go/ssa/dom_test.go new file mode 100644 index 00000000000..f78c7a6909a --- /dev/null +++ b/go/ssa/dom_test.go @@ -0,0 +1,59 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa_test + +import ( + "fmt" + "path/filepath" + "testing" + + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ssa/ssautil" + "golang.org/x/tools/internal/testenv" +) + +func TestDominatorOrder(t *testing.T) { + testenv.NeedsGoBuild(t) // for go/packages + + const src = `package p + +func f(cond bool) { + // (Print operands match BasicBlock IDs.) + print(0) + if cond { + print(1) + } else { + print(2) + } + print(3) +} +` + dir := t.TempDir() + cfg := &packages.Config{ + Dir: dir, + Mode: packages.LoadSyntax, + Overlay: map[string][]byte{ + filepath.Join(dir, "p.go"): []byte(src), + }, + } + initial, err := packages.Load(cfg, "./p.go") + if err != nil { + t.Fatal(err) + } + if packages.PrintErrors(initial) > 0 { + t.Fatal("packages contain errors") + } + _, pkgs := ssautil.Packages(initial, 0) + p := pkgs[0] + p.Build() + f := p.Func("f") + + if got, want := fmt.Sprint(f.DomPreorder()), "[0 1 2 3]"; got != want { + t.Errorf("DomPreorder: got %v, want %s", got, want) + } + if got, want := fmt.Sprint(f.DomPostorder()), "[1 2 3 0]"; got != want { + t.Errorf("DomPostorder: got %v, want %s", got, want) + } +} diff --git a/go/ssa/emit.go b/go/ssa/emit.go index d77b4407a80..716299ffe68 100644 --- a/go/ssa/emit.go +++ b/go/ssa/emit.go @@ -11,6 +11,8 @@ import ( "go/ast" "go/token" "go/types" + + "golang.org/x/tools/internal/typeparams" ) // emitAlloc emits to f a new Alloc instruction allocating a variable @@ -64,7 +66,7 @@ func emitLocalVar(f *Function, v *types.Var) *Alloc { // new temporary, and returns the value so defined. func emitLoad(f *Function, addr Value) *UnOp { v := &UnOp{Op: token.MUL, X: addr} - v.setType(mustDeref(addr.Type())) + v.setType(typeparams.MustDeref(addr.Type())) f.emit(v) return v } @@ -182,7 +184,7 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { // isValuePreserving returns true if a conversion from ut_src to // ut_dst is value-preserving, i.e. just a change of type. -// Precondition: neither argument is a named type. +// Precondition: neither argument is a named or alias type. func isValuePreserving(ut_src, ut_dst types.Type) bool { // Identical underlying types? if types.IdenticalIgnoreTags(ut_dst, ut_src) { @@ -246,7 +248,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value { // Record the types of operands to MakeInterface, if // non-parameterized, as they are the set of runtime types. t := val.Type() - if f.typeparams.Len() == 0 || !f.Prog.parameterized.isParameterized(t) { + if f.typeparams.Len() == 0 || !f.Prog.isParameterized(t) { addRuntimeType(f.Prog, t) } @@ -274,18 +276,20 @@ func emitConv(f *Function, val Value, typ types.Type) Value { sliceTo0ArrayPtr convert ) - classify := func(s, d types.Type) conversionCase { + // classify the conversion case of a source type us to a destination type ud. + // us and ud are underlying types (not *Named or *Alias) + classify := func(us, ud types.Type) conversionCase { // Just a change of type, but not value or representation? - if isValuePreserving(s, d) { + if isValuePreserving(us, ud) { return changeType } // Conversion from slice to array or slice to array pointer? - if slice, ok := s.(*types.Slice); ok { + if slice, ok := us.(*types.Slice); ok { var arr *types.Array var ptr bool // Conversion from slice to array pointer? - switch d := d.(type) { + switch d := ud.(type) { case *types.Array: arr = d case *types.Pointer: @@ -310,8 +314,8 @@ func emitConv(f *Function, val Value, typ types.Type) Value { // The only remaining case in well-typed code is a representation- // changing conversion of basic types (possibly with []byte/[]rune). - if !isBasic(s) && !isBasic(d) { - panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), s, typ, d)) + if !isBasic(us) && !isBasic(ud) { + panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), us, typ, ud)) } return convert } @@ -414,7 +418,7 @@ func emitTypeCoercion(f *Function, v Value, typ types.Type) Value { // emitStore emits to f an instruction to store value val at location // addr, applying implicit conversions as required by assignability rules. func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { - typ := mustDeref(addr.Type()) + typ := typeparams.MustDeref(addr.Type()) s := &Store{ Addr: addr, Val: emitConv(f, val, typ), @@ -520,8 +524,8 @@ func emitTailCall(f *Function, call *Call) { // value of a field. func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value { for _, index := range indices { - if st, vptr := deref(v.Type()); vptr { - fld := fieldOf(st, index) + if isPointerCore(v.Type()) { + fld := fieldOf(typeparams.MustDeref(v.Type()), index) instr := &FieldAddr{ X: v, Field: index, @@ -530,7 +534,7 @@ func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) instr.setType(types.NewPointer(fld.Type())) v = f.emit(instr) // Load the field's value iff indirectly embedded. - if _, fldptr := deref(fld.Type()); fldptr { + if isPointerCore(fld.Type()) { v = emitLoad(f, v) } } else { @@ -554,8 +558,8 @@ func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) // field's value. // Ident id is used for position and debug info. func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { - if st, vptr := deref(v.Type()); vptr { - fld := fieldOf(st, index) + if isPointerCore(v.Type()) { + fld := fieldOf(typeparams.MustDeref(v.Type()), index) instr := &FieldAddr{ X: v, Field: index, diff --git a/go/ssa/example_test.go b/go/ssa/example_test.go index 99e513cb822..cab0b84903b 100644 --- a/go/ssa/example_test.go +++ b/go/ssa/example_test.go @@ -37,9 +37,10 @@ func main() { ` // This program demonstrates how to run the SSA builder on a single -// package of one or more already-parsed files. Its dependencies are -// loaded from compiler export data. This is what you'd typically use -// for a compiler; it does not depend on golang.org/x/tools/go/loader. +// package of one or more already-parsed files. Its dependencies are +// loaded from compiler export data. This is what you'd typically use +// for a compiler; it does not depend on the obsolete +// [golang.org/x/tools/go/loader]. // // It shows the printed representation of packages, functions, and // instructions. Within the function listing, the name of each @@ -52,11 +53,11 @@ func main() { // // Build and run the ssadump.go program if you want a standalone tool // with similar functionality. It is located at -// golang.org/x/tools/cmd/ssadump. +// [golang.org/x/tools/cmd/ssadump]. // // Use ssautil.BuildPackage only if you have parsed--but not // type-checked--syntax trees. Typically, clients already have typed -// syntax, perhaps obtained from from golang.org/x/tools/go/packages. +// syntax, perhaps obtained from golang.org/x/tools/go/packages. // In that case, see the other examples for simpler approaches. func Example_buildPackage() { // Replace interface{} with any for this test. @@ -127,7 +128,7 @@ func Example_buildPackage() { } // This example builds SSA code for a set of packages using the -// x/tools/go/packages API. This is what you would typically use for a +// [golang.org/x/tools/go/packages] API. This is what you would typically use for a // analysis capable of operating on a single package. func Example_loadPackages() { // Load, parse, and type-check the initial packages. @@ -157,7 +158,7 @@ func Example_loadPackages() { } // This example builds SSA code for a set of packages plus all their dependencies, -// using the x/tools/go/packages API. +// using the [golang.org/x/tools/go/packages] API. // This is what you'd typically use for a whole-program analysis. func Example_loadWholeProgram() { // Load, parse, and type-check the whole program. diff --git a/go/ssa/func.go b/go/ssa/func.go index 65ed491bab6..f645fa1d8b0 100644 --- a/go/ssa/func.go +++ b/go/ssa/func.go @@ -37,7 +37,8 @@ func (f *Function) typeOf(e ast.Expr) types.Type { panic(fmt.Sprintf("no type for %T @ %s", e, f.Prog.Fset.Position(e.Pos()))) } -// typ is the locally instantiated type of T. T==typ(T) if f is not an instantiation. +// typ is the locally instantiated type of T. +// If f is not an instantiation, then f.typ(T)==T. func (f *Function) typ(T types.Type) types.Type { return f.subst.typ(T) } @@ -45,7 +46,7 @@ func (f *Function) typ(T types.Type) types.Type { // If id is an Instance, returns info.Instances[id].Type. // Otherwise returns f.typeOf(id). func (f *Function) instanceType(id *ast.Ident) types.Type { - if t, ok := typeparams.GetInstances(f.info)[id]; ok { + if t, ok := f.info.Instances[id]; ok { return t.Type } return f.typeOf(id) @@ -106,6 +107,7 @@ type lblock struct { // labelledBlock returns the branch target associated with the // specified label, creating it if needed. +// label should be a non-blank identifier (label.Name != "_"). func (f *Function) labelledBlock(label *ast.Ident) *lblock { obj := f.objectOf(label).(*types.Label) lb := f.lblocks[obj] @@ -533,7 +535,7 @@ func WriteFunction(buf *bytes.Buffer, f *Function) { if len(f.Locals) > 0 { buf.WriteString("# Locals:\n") for i, l := range f.Locals { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(mustDeref(l.Type()), from)) + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(typeparams.MustDeref(l.Type()), from)) } } writeSignature(buf, from, f.Name(), f.Signature) @@ -588,6 +590,12 @@ func WriteFunction(buf *bytes.Buffer, f *Function) { default: buf.WriteString(instr.String()) } + // -mode=S: show line numbers + if f.Prog.mode&LogSource != 0 { + if pos := instr.Pos(); pos.IsValid() { + fmt.Fprintf(buf, " L%d", f.Prog.Fset.Position(pos).Line) + } + } buf.WriteString("\n") } } diff --git a/go/ssa/instantiate.go b/go/ssa/instantiate.go index 370284ab72a..e5e7162a8a2 100644 --- a/go/ssa/instantiate.go +++ b/go/ssa/instantiate.go @@ -8,8 +8,6 @@ import ( "fmt" "go/types" "sync" - - "golang.org/x/tools/internal/typeparams" ) // A generic records information about a generic origin function, @@ -59,7 +57,7 @@ func createInstance(fn *Function, targs []types.Type, cr *creator) *Function { sig = obj.Type().(*types.Signature) } else { // function - instSig, err := typeparams.Instantiate(prog.ctxt, fn.Signature, targs, false) + instSig, err := types.Instantiate(prog.ctxt, fn.Signature, targs, false) if err != nil { panic(err) } @@ -77,10 +75,10 @@ func createInstance(fn *Function, targs []types.Type, cr *creator) *Function { subst *subster build buildFunc ) - if prog.mode&InstantiateGenerics != 0 && !prog.parameterized.anyParameterized(targs) { + if prog.mode&InstantiateGenerics != 0 && !prog.isParameterized(targs...) { synthetic = fmt.Sprintf("instance of %s", fn.Name()) if fn.syntax != nil { - scope := typeparams.OriginMethod(obj).Scope() + scope := obj.Origin().Scope() subst = makeSubster(prog.ctxt, scope, fn.typeparams, targs, false) build = (*builder).buildFromSyntax } else { @@ -112,3 +110,21 @@ func createInstance(fn *Function, targs []types.Type, cr *creator) *Function { cr.Add(instance) return instance } + +// isParameterized reports whether any of the specified types contains +// a free type parameter. It is safe to call concurrently. +func (prog *Program) isParameterized(ts ...types.Type) bool { + prog.hasParamsMu.Lock() + defer prog.hasParamsMu.Unlock() + + // TODO(adonovan): profile. If this operation is expensive, + // handle the most common but shallow cases such as T, pkg.T, + // *T without consulting the cache under the lock. + + for _, t := range ts { + if prog.hasParams.Has(t) { + return true + } + } + return false +} diff --git a/go/ssa/interp/interp.go b/go/ssa/interp/interp.go index 79363f57362..f677ba2b638 100644 --- a/go/ssa/interp/interp.go +++ b/go/ssa/interp/interp.go @@ -54,6 +54,7 @@ import ( "sync/atomic" "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/typeparams" ) type continuation int @@ -245,7 +246,7 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation { fr.get(instr.Chan).(chan value) <- fr.get(instr.X) case *ssa.Store: - store(deref(instr.Addr.Type()), fr.get(instr.Addr).(*value), fr.get(instr.Val)) + store(typeparams.MustDeref(instr.Addr.Type()), fr.get(instr.Addr).(*value), fr.get(instr.Val)) case *ssa.If: succ := 1 @@ -289,7 +290,7 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation { // local addr = fr.env[instr].(*value) } - *addr = zero(deref(instr.Type())) + *addr = zero(typeparams.MustDeref(instr.Type())) case *ssa.MakeSlice: slice := make([]value, asInt64(fr.get(instr.Cap))) @@ -528,7 +529,7 @@ func callSSA(i *interpreter, caller *frame, callpos token.Pos, fn *ssa.Function, fr.block = fn.Blocks[0] fr.locals = make([]value, len(fn.Locals)) for i, l := range fn.Locals { - fr.locals[i] = zero(deref(l.Type())) + fr.locals[i] = zero(typeparams.MustDeref(l.Type())) fr.env[l] = &fr.locals[i] } for i, p := range fn.Params { @@ -673,7 +674,7 @@ func Interpret(mainpkg *ssa.Package, mode Mode, sizes types.Sizes, filename stri for _, m := range pkg.Members { switch v := m.(type) { case *ssa.Global: - cell := zero(deref(v.Type())) + cell := zero(typeparams.MustDeref(v.Type())) i.globals[v] = &cell } } @@ -717,12 +718,3 @@ func Interpret(mainpkg *ssa.Package, mode Mode, sizes types.Sizes, filename stri } return } - -// deref returns a pointer's element type; otherwise it returns typ. -// TODO(adonovan): Import from ssa? -func deref(typ types.Type) types.Type { - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return typ -} diff --git a/go/ssa/interp/interp_test.go b/go/ssa/interp/interp_test.go index 7e12dd84131..2cd7ee98502 100644 --- a/go/ssa/interp/interp_test.go +++ b/go/ssa/interp/interp_test.go @@ -2,10 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// This test fails at Go 1.18 due to infinite recursion in go/types. - -//go:build go1.19 - package interp_test // This test runs the SSA interpreter over sample Go programs. @@ -37,7 +33,7 @@ import ( "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/interp" "golang.org/x/tools/go/ssa/ssautil" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/testenv" ) // Each line contains a space-separated list of $GOROOT/test/ @@ -134,19 +130,15 @@ var testdataTests = []string{ "static.go", "width32.go", "rangevarlifetime_old.go", - "fixedbugs/issue52342.go", "fixedbugs/issue55115.go", + "fixedbugs/issue52835.go", + "fixedbugs/issue55086.go", + "typeassert.go", + "zeros.go", } func init() { - if typeparams.Enabled { - testdataTests = append(testdataTests, "fixedbugs/issue52835.go") - testdataTests = append(testdataTests, "fixedbugs/issue55086.go") - testdataTests = append(testdataTests, "typeassert.go") - testdataTests = append(testdataTests, "zeros.go") - } - // GOROOT/test used to assume that GOOS and GOARCH were explicitly set in the // environment, so do that here for TestGorootTest. os.Setenv("GOOS", runtime.GOOS) @@ -172,7 +164,12 @@ func run(t *testing.T, input string, goroot string) { t.Skipf("skipping: width32.go checks behavior for a 32-bit int") } - conf := loader.Config{Build: &ctx} + gover := "" + if p := testenv.Go1Point(); p > 0 { + gover = fmt.Sprintf("go1.%d", p) + } + + conf := loader.Config{Build: &ctx, TypeChecker: types.Config{GoVersion: gover}} if _, err := conf.FromArgs([]string{input}, true); err != nil { t.Fatalf("FromArgs(%s) failed: %s", input, err) } @@ -309,9 +306,6 @@ func TestGorootTest(t *testing.T) { // in $GOROOT/test/typeparam/*.go. func TestTypeparamTest(t *testing.T) { - if !typeparams.Enabled { - return - } goroot := makeGoroot(t) // Skip known failures for the given reason. diff --git a/go/ssa/interp/ops.go b/go/ssa/interp/ops.go index 65d6452b783..99eab86e1f6 100644 --- a/go/ssa/interp/ops.go +++ b/go/ssa/interp/ops.go @@ -17,6 +17,8 @@ import ( "unsafe" "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typeparams" ) // If the target program panics, the interpreter panics with this type. @@ -234,6 +236,8 @@ func zero(t types.Type) value { return a case *types.Named: return zero(t.Underlying()) + case *aliases.Alias: + return zero(aliases.Unalias(t)) case *types.Interface: return iface{} // nil type, methodset and value case *types.Slice: @@ -881,7 +885,7 @@ func unop(instr *ssa.UnOp, x value) value { return -x } case token.MUL: - return load(deref(instr.X.Type()), x.(*value)) + return load(typeparams.MustDeref(instr.X.Type()), x.(*value)) case token.NOT: return !x.(bool) case token.XOR: diff --git a/go/ssa/interp/reflect.go b/go/ssa/interp/reflect.go index 9f2f9e1e457..d7132562290 100644 --- a/go/ssa/interp/reflect.go +++ b/go/ssa/interp/reflect.go @@ -18,6 +18,7 @@ import ( "unsafe" "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/aliases" ) type opaqueType struct { @@ -119,7 +120,7 @@ func ext۰reflect۰rtype۰NumField(fr *frame, args []value) value { func ext۰reflect۰rtype۰NumIn(fr *frame, args []value) value { // Signature: func (t reflect.rtype) int - return args[0].(rtype).t.(*types.Signature).Params().Len() + return args[0].(rtype).t.Underlying().(*types.Signature).Params().Len() } func ext۰reflect۰rtype۰NumMethod(fr *frame, args []value) value { @@ -129,13 +130,13 @@ func ext۰reflect۰rtype۰NumMethod(fr *frame, args []value) value { func ext۰reflect۰rtype۰NumOut(fr *frame, args []value) value { // Signature: func (t reflect.rtype) int - return args[0].(rtype).t.(*types.Signature).Results().Len() + return args[0].(rtype).t.Underlying().(*types.Signature).Results().Len() } func ext۰reflect۰rtype۰Out(fr *frame, args []value) value { // Signature: func (t reflect.rtype, i int) int i := args[1].(int) - return makeReflectType(rtype{args[0].(rtype).t.(*types.Signature).Results().At(i).Type()}) + return makeReflectType(rtype{args[0].(rtype).t.Underlying().(*types.Signature).Results().At(i).Type()}) } func ext۰reflect۰rtype۰Size(fr *frame, args []value) value { @@ -179,7 +180,7 @@ func ext۰reflect۰Zero(fr *frame, args []value) value { func reflectKind(t types.Type) reflect.Kind { switch t := t.(type) { - case *types.Named: + case *types.Named, *aliases.Alias: return reflectKind(t.Underlying()) case *types.Basic: switch t.Kind() { diff --git a/go/ssa/interp/value.go b/go/ssa/interp/value.go index 94018b550fc..d35da990ed1 100644 --- a/go/ssa/interp/value.go +++ b/go/ssa/interp/value.go @@ -45,6 +45,7 @@ import ( "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" ) type value interface{} @@ -117,7 +118,7 @@ func usesBuiltinMap(t types.Type) bool { switch t := t.(type) { case *types.Basic, *types.Chan, *types.Pointer: return true - case *types.Named: + case *types.Named, *aliases.Alias: return usesBuiltinMap(t.Underlying()) case *types.Interface, *types.Array, *types.Struct: return false diff --git a/go/ssa/lift.go b/go/ssa/lift.go index dbd8790c6f5..8bb1949449f 100644 --- a/go/ssa/lift.go +++ b/go/ssa/lift.go @@ -43,6 +43,8 @@ import ( "go/token" "math/big" "os" + + "golang.org/x/tools/internal/typeparams" ) // If true, show diagnostic information at each step of lifting. @@ -103,9 +105,14 @@ func buildDomFrontier(fn *Function) domFrontier { } func removeInstr(refs []Instruction, instr Instruction) []Instruction { + return removeInstrsIf(refs, func(i Instruction) bool { return i == instr }) +} + +func removeInstrsIf(refs []Instruction, p func(Instruction) bool) []Instruction { + // TODO(taking): replace with go1.22 slices.DeleteFunc. i := 0 for _, ref := range refs { - if ref == instr { + if p(ref) { continue } refs[i] = ref @@ -460,7 +467,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool *fresh++ phi.pos = alloc.Pos() - phi.setType(mustDeref(alloc.Type())) + phi.setType(typeparams.MustDeref(alloc.Type())) phi.block = v if debugLifting { fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) @@ -505,7 +512,7 @@ func replaceAll(x, y Value) { func renamed(renaming []Value, alloc *Alloc) Value { v := renaming[alloc.index] if v == nil { - v = zeroConst(mustDeref(alloc.Type())) + v = zeroConst(typeparams.MustDeref(alloc.Type())) renaming[alloc.index] = v } return v diff --git a/go/ssa/lvalue.go b/go/ssa/lvalue.go index 186cfcae704..eede307eabd 100644 --- a/go/ssa/lvalue.go +++ b/go/ssa/lvalue.go @@ -11,6 +11,8 @@ import ( "go/ast" "go/token" "go/types" + + "golang.org/x/tools/internal/typeparams" ) // An lvalue represents an assignable location that may appear on the @@ -52,7 +54,7 @@ func (a *address) address(fn *Function) Value { } func (a *address) typ() types.Type { - return mustDeref(a.addr.Type()) + return typeparams.MustDeref(a.addr.Type()) } // An element is an lvalue represented by m[k], the location of an diff --git a/go/ssa/methods.go b/go/ssa/methods.go index 03ef62521d9..58bd45b8146 100644 --- a/go/ssa/methods.go +++ b/go/ssa/methods.go @@ -11,7 +11,7 @@ import ( "go/types" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" ) // MethodValue returns the Function implementing method sel, building @@ -32,7 +32,7 @@ func (prog *Program) MethodValue(sel *types.Selection) *Function { return nil // interface method or type parameter } - if prog.parameterized.isParameterized(T) { + if prog.isParameterized(T) { return nil // generic method } @@ -58,10 +58,8 @@ func (prog *Program) MethodValue(sel *types.Selection) *Function { fn, ok := mset.mapping[id] if !ok { obj := sel.Obj().(*types.Func) - _, ptrObj := deptr(recvType(obj)) - _, ptrRecv := deptr(T) needsPromotion := len(sel.Index()) > 1 - needsIndirection := !ptrObj && ptrRecv + needsIndirection := !isPointer(recvType(obj)) && isPointer(T) if needsPromotion || needsIndirection { fn = createWrapper(prog, toSelection(sel), &cr) } else { @@ -102,7 +100,7 @@ func (prog *Program) objectMethod(obj *types.Func, cr *creator) *Function { } // Instantiation of generic? - if originObj := typeparams.OriginMethod(obj); originObj != obj { + if originObj := obj.Origin(); originObj != obj { origin := prog.objectMethod(originObj, cr) assert(origin.typeparams.Len() > 0, "origin is not generic") targs := receiverTypeArgs(obj) @@ -209,6 +207,9 @@ func forEachReachable(msets *typeutil.MethodSetCache, T types.Type, f func(types } switch T := T.(type) { + case *aliases.Alias: + visit(aliases.Unalias(T), skip) // emulates the pre-Alias behavior + case *types.Basic: // nop @@ -261,7 +262,7 @@ func forEachReachable(msets *typeutil.MethodSetCache, T types.Type, f func(types visit(T.At(i).Type(), false) } - case *typeparams.TypeParam, *typeparams.Union: + case *types.TypeParam, *types.Union: // forEachReachable must not be called on parameterized types. panic(T) diff --git a/go/ssa/parameterized.go b/go/ssa/parameterized.go deleted file mode 100644 index 656417ac8e1..00000000000 --- a/go/ssa/parameterized.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -import ( - "go/types" - "sync" - - "golang.org/x/tools/internal/typeparams" -) - -// tpWalker walks over types looking for parameterized types. -// -// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy. -type tpWalker struct { - mu sync.Mutex - seen map[types.Type]bool -} - -// isParameterized reports whether t recursively contains a type parameter. -// Thread-safe. -func (w *tpWalker) isParameterized(t types.Type) bool { - // TODO(adonovan): profile. If this operation is expensive, - // handle the most common but shallow cases such as T, pkg.T, - // *T without consulting the cache under the lock. - - w.mu.Lock() - defer w.mu.Unlock() - return w.isParameterizedLocked(t) -} - -// Requires w.mu. -func (w *tpWalker) isParameterizedLocked(typ types.Type) (res bool) { - // NOTE: Adapted from go/types/infer.go. Try to keep in sync. - - // detect cycles - if x, ok := w.seen[typ]; ok { - return x - } - w.seen[typ] = false - defer func() { - w.seen[typ] = res - }() - - switch t := typ.(type) { - case nil, *types.Basic: // TODO(gri) should nil be handled here? - break - - case *types.Array: - return w.isParameterizedLocked(t.Elem()) - - case *types.Slice: - return w.isParameterizedLocked(t.Elem()) - - case *types.Struct: - for i, n := 0, t.NumFields(); i < n; i++ { - if w.isParameterizedLocked(t.Field(i).Type()) { - return true - } - } - - case *types.Pointer: - return w.isParameterizedLocked(t.Elem()) - - case *types.Tuple: - n := t.Len() - for i := 0; i < n; i++ { - if w.isParameterizedLocked(t.At(i).Type()) { - return true - } - } - - case *types.Signature: - // t.tparams may not be nil if we are looking at a signature - // of a generic function type (or an interface method) that is - // part of the type we're testing. We don't care about these type - // parameters. - // Similarly, the receiver of a method may declare (rather than - // use) type parameters, we don't care about those either. - // Thus, we only need to look at the input and result parameters. - return w.isParameterizedLocked(t.Params()) || w.isParameterizedLocked(t.Results()) - - case *types.Interface: - for i, n := 0, t.NumMethods(); i < n; i++ { - if w.isParameterizedLocked(t.Method(i).Type()) { - return true - } - } - terms, err := typeparams.InterfaceTermSet(t) - if err != nil { - panic(err) - } - for _, term := range terms { - if w.isParameterizedLocked(term.Type()) { - return true - } - } - - case *types.Map: - return w.isParameterizedLocked(t.Key()) || w.isParameterizedLocked(t.Elem()) - - case *types.Chan: - return w.isParameterizedLocked(t.Elem()) - - case *types.Named: - args := typeparams.NamedTypeArgs(t) - // TODO(taking): this does not match go/types/infer.go. Check with rfindley. - if params := typeparams.ForNamed(t); params.Len() > args.Len() { - return true - } - for i, n := 0, args.Len(); i < n; i++ { - if w.isParameterizedLocked(args.At(i)) { - return true - } - } - return w.isParameterizedLocked(t.Underlying()) // recurse for types local to parameterized functions - - case *typeparams.TypeParam: - return true - - default: - panic(t) // unreachable - } - - return false -} - -// anyParameterized reports whether any element of ts is parameterized. -// Thread-safe. -func (w *tpWalker) anyParameterized(ts []types.Type) bool { - w.mu.Lock() - defer w.mu.Unlock() - for _, t := range ts { - if w.isParameterizedLocked(t) { - return true - } - } - return false -} diff --git a/go/ssa/parameterized_test.go b/go/ssa/parameterized_test.go deleted file mode 100644 index 7970a013c0e..00000000000 --- a/go/ssa/parameterized_test.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -import ( - "go/ast" - "go/parser" - "go/token" - "go/types" - "testing" -) - -func TestIsParameterized(t *testing.T) { - const source = ` -package P -type A int -func (A) f() -func (*A) g() - -type fer interface { f() } - -func Apply[T fer](x T) T { - x.f() - return x -} - -type V[T any] []T -func (v *V[T]) Push(x T) { *v = append(*v, x) } -` - - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, "hello.go", source, 0) - if err != nil { - t.Fatal(err) - } - - var conf types.Config - pkg, err := conf.Check("P", fset, []*ast.File{f}, nil) - if err != nil { - t.Fatal(err) - } - - for _, test := range []struct { - expr string // type expression - want bool // expected isParameterized value - }{ - {"A", false}, - {"*A", false}, - {"error", false}, - {"*error", false}, - {"struct{A}", false}, - {"*struct{A}", false}, - {"fer", false}, - {"Apply", true}, - {"Apply[A]", false}, - {"V", true}, - {"V[A]", false}, - {"*V[A]", false}, - {"(*V[A]).Push", false}, - } { - tv, err := types.Eval(fset, pkg, 0, test.expr) - if err != nil { - t.Errorf("Eval(%s) failed: %v", test.expr, err) - } - - param := tpWalker{seen: make(map[types.Type]bool)} - if got := param.isParameterized(tv.Type); got != test.want { - t.Logf("Eval(%s) returned the type %s", test.expr, tv.Type) - t.Errorf("isParameterized(%s) = %v, want %v", test.expr, got, test.want) - } - } -} diff --git a/go/ssa/print.go b/go/ssa/print.go index 7f34a7b58b7..38d8404fdc4 100644 --- a/go/ssa/print.go +++ b/go/ssa/print.go @@ -51,7 +51,7 @@ func relType(t types.Type, from *types.Package) string { return s } -func relTerm(term *typeparams.Term, from *types.Package) string { +func relTerm(term *types.Term, from *types.Package) string { s := relType(term.Type(), from) if term.Tilde() { return "~" + s @@ -95,7 +95,7 @@ func (v *Alloc) String() string { op = "new" } from := v.Parent().relPkg() - return fmt.Sprintf("%s %s (%s)", op, relType(mustDeref(v.Type()), from), v.Comment) + return fmt.Sprintf("%s %s (%s)", op, relType(typeparams.MustDeref(v.Type()), from), v.Comment) } func (v *Phi) String() string { @@ -261,7 +261,7 @@ func (v *MakeChan) String() string { func (v *FieldAddr) String() string { // Be robust against a bad index. name := "?" - if fld := fieldOf(mustDeref(v.X.Type()), v.Field); fld != nil { + if fld := fieldOf(typeparams.MustDeref(v.X.Type()), v.Field); fld != nil { name = fld.Name() } return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) @@ -450,7 +450,7 @@ func WritePackage(buf *bytes.Buffer, p *Package) { case *Global: fmt.Fprintf(buf, " var %-*s %s\n", - maxname, name, relType(mustDeref(mem.Type()), from)) + maxname, name, relType(typeparams.MustDeref(mem.Type()), from)) } } diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go index 28ec131f8c4..13bd39fe862 100644 --- a/go/ssa/sanity.go +++ b/go/ssa/sanity.go @@ -132,6 +132,11 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *BinOp: case *Call: + if common := instr.Call; common.IsInvoke() { + if !types.IsInterface(common.Value.Type()) { + s.errorf("invoke on %s (%s) which is not an interface type (or type param)", common.Value, common.Value.Type()) + } + } case *ChangeInterface: case *ChangeType: case *SliceToArrayPointer: @@ -344,7 +349,7 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) { // Check that "untyped" types only appear on constant operands. if _, ok := (*op).(*Const); !ok { - if basic, ok := (*op).Type().(*types.Basic); ok { + if basic, ok := (*op).Type().Underlying().(*types.Basic); ok { if basic.Info()&types.IsUntyped != 0 { s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) } diff --git a/go/ssa/source.go b/go/ssa/source.go index 7b1eb8527f9..7b71c88d120 100644 --- a/go/ssa/source.go +++ b/go/ssa/source.go @@ -11,12 +11,9 @@ package ssa // the originating syntax, as specified. import ( - "fmt" "go/ast" "go/token" "go/types" - - "golang.org/x/tools/internal/typeparams" ) // EnclosingFunction returns the function that contains the syntax @@ -123,7 +120,7 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { obj := mset.At(i).Obj().(*types.Func) if obj.Pos() == pos { // obj from MethodSet may not be the origin type. - m := typeparams.OriginMethod(obj) + m := obj.Origin() return pkg.objects[m].(*Function) } } @@ -132,31 +129,6 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { return nil } -// goversionOf returns the goversion of a node in the package -// where the node is either a function declaration or the initial -// value of a package level variable declaration. -func goversionOf(p *Package, file *ast.File) string { - if p.info == nil { - return "" - } - - // TODO(taking): Update to the following when internal/versions available: - // return versions.Lang(versions.FileVersions(p.info, file)) - return fileVersions(file) -} - -// TODO(taking): Remove when internal/versions is available. -var fileVersions = func(file *ast.File) string { return "" } - -// parses a goXX.YY version or returns a negative version on an error. -// TODO(taking): Switch to a permanent solution when internal/versions is submitted. -func parseGoVersion(x string) (major, minor int) { - if _, err := fmt.Sscanf(x, "go%d.%d", &major, &minor); err != nil || major < 0 || minor < 0 { - return -1, -1 - } - return -} - // ValueForExpr returns the SSA Value that corresponds to non-constant // expression e. // diff --git a/go/ssa/source_test.go b/go/ssa/source_test.go index 9cdad2bca70..112581bb55b 100644 --- a/go/ssa/source_test.go +++ b/go/ssa/source_test.go @@ -48,7 +48,7 @@ func TestObjValueLookup(t *testing.T) { // Each note of the form @ssa(x, "BinOp") in testdata/objlookup.go // specifies an expectation that an object named x declared on the - // same line is associated with an an ssa.Value of type *ssa.BinOp. + // same line is associated with an ssa.Value of type *ssa.BinOp. notes, err := expect.ExtractGo(conf.Fset, f) if err != nil { t.Fatal(err) diff --git a/go/ssa/ssa.go b/go/ssa/ssa.go index 58a641a1fdb..5ff12d2f572 100644 --- a/go/ssa/ssa.go +++ b/go/ssa/ssa.go @@ -27,13 +27,15 @@ type Program struct { mode BuilderMode // set of mode bits for SSA construction MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets - canon *canonizer // type canonicalization map - ctxt *typeparams.Context // cache for type checking instantiations + canon *canonizer // type canonicalization map + ctxt *types.Context // cache for type checking instantiations methodsMu sync.Mutex methodSets typeutil.Map // maps type to its concrete *methodSet - parameterized tpWalker // memoization of whether a type refers to type parameters + // memoization of whether a type refers to type parameters + hasParamsMu sync.Mutex + hasParams typeparams.Free runtimeTypesMu sync.Mutex runtimeTypes typeutil.Map // set of runtime types (from MakeInterface) @@ -339,10 +341,10 @@ type Function struct { referrers []Instruction // referring instructions (iff Parent() != nil) anonIdx int32 // position of a nested function in parent's AnonFuncs. fn.Parent()!=nil => fn.Parent().AnonFunc[fn.anonIdx] == fn. - typeparams *typeparams.TypeParamList // type parameters of this function. typeparams.Len() > 0 => generic or instance of generic function - typeargs []types.Type // type arguments that instantiated typeparams. len(typeargs) > 0 => instance of generic function - topLevelOrigin *Function // the origin function if this is an instance of a source function. nil if Parent()!=nil. - generic *generic // instances of this function, if generic + typeparams *types.TypeParamList // type parameters of this function. typeparams.Len() > 0 => generic or instance of generic function + typeargs []types.Type // type arguments that instantiated typeparams. len(typeargs) > 0 => instance of generic function + topLevelOrigin *Function // the origin function if this is an instance of a source function. nil if Parent()!=nil. + generic *generic // instances of this function, if generic // The following fields are cleared after building. currentBlock *BasicBlock // where to emit code @@ -690,8 +692,8 @@ type Convert struct { type MultiConvert struct { register X Value - from []*typeparams.Term - to []*typeparams.Term + from []*types.Term + to []*types.Term } // ChangeInterface constructs a value of one interface type from a @@ -1539,10 +1541,7 @@ func (v *Function) Referrers() *[]Instruction { // TypeParams are the function's type parameters if generic or the // type parameters that were instantiated if fn is an instantiation. -// -// TODO(taking): declare result type as *types.TypeParamList -// after we drop support for go1.17. -func (fn *Function) TypeParams() *typeparams.TypeParamList { +func (fn *Function) TypeParams() *types.TypeParamList { return fn.typeparams } diff --git a/go/ssa/ssautil/load.go b/go/ssa/ssautil/load.go index 67e75cb261a..3daa67a07e4 100644 --- a/go/ssa/ssautil/load.go +++ b/go/ssa/ssautil/load.go @@ -14,14 +14,14 @@ import ( "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) // Packages creates an SSA program for a set of packages. // // The packages must have been loaded from source syntax using the -// golang.org/x/tools/go/packages.Load function in LoadSyntax or -// LoadAllSyntax mode. +// [packages.Load] function in [packages.LoadSyntax] or +// [packages.LoadAllSyntax] mode. // // Packages creates an SSA package for each well-typed package in the // initial list, plus all their dependencies. The resulting list of @@ -29,9 +29,9 @@ import ( // a nil if SSA code could not be constructed for the corresponding initial // package due to type errors. // -// Code for bodies of functions is not built until Build is called on -// the resulting Program. SSA code is constructed only for the initial -// packages with well-typed syntax trees. +// Code for bodies of functions is not built until [Program.Build] is +// called on the resulting Program. SSA code is constructed only for +// the initial packages with well-typed syntax trees. // // The mode parameter controls diagnostics and checking during SSA construction. func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { @@ -60,7 +60,7 @@ func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, // their dependencies. // // The packages must have been loaded from source syntax using the -// golang.org/x/tools/go/packages.Load function in LoadAllSyntax mode. +// [packages.Load] function in [packages.LoadAllSyntax] mode. // // AllPackages creates an SSA package for each well-typed package in the // initial list, plus all their dependencies. The resulting list of @@ -120,7 +120,7 @@ func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (* // // The mode parameter controls diagnostics and checking during SSA construction. // -// Deprecated: Use golang.org/x/tools/go/packages and the Packages +// Deprecated: Use [golang.org/x/tools/go/packages] and the [Packages] // function instead; see ssa.Example_loadPackages. func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { prog := ssa.NewProgram(lprog.Fset, mode) @@ -134,16 +134,17 @@ func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { return prog } -// BuildPackage builds an SSA program with IR for a single package. +// BuildPackage builds an SSA program with SSA intermediate +// representation (IR) for all functions of a single package. // -// It populates pkg by type-checking the specified file ASTs. All +// It populates pkg by type-checking the specified file syntax trees. All // dependencies are loaded using the importer specified by tc, which // typically loads compiler export data; SSA code cannot be built for -// those packages. BuildPackage then constructs an ssa.Program with all +// those packages. BuildPackage then constructs an [ssa.Program] with all // dependency packages created, and builds and returns the SSA package // corresponding to pkg. // -// The caller must have set pkg.Path() to the import path. +// The caller must have set pkg.Path to the import path. // // The operation fails if there were any type-checking or import errors. // @@ -161,11 +162,11 @@ func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, fil Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), Scopes: make(map[ast.Node]*types.Scope), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - typeparams.InitInstanceInfo(info) - // versions.InitFileVersions(info) // TODO(taking): Enable when internal/versions is available. + versions.InitFileVersions(info) if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { return nil, nil, err } diff --git a/go/ssa/ssautil/visit.go b/go/ssa/ssautil/visit.go index 3cdd3462271..b4feb42cb3a 100644 --- a/go/ssa/ssautil/visit.go +++ b/go/ssa/ssautil/visit.go @@ -9,7 +9,6 @@ import ( "go/types" "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/typeparams" _ "unsafe" // for linkname hack ) @@ -105,7 +104,7 @@ func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool { // Consider only named types. // (Ignore aliases and unsafe.Pointer.) if named, ok := t.Type().(*types.Named); ok { - if typeparams.ForNamed(named) == nil { + if named.TypeParams() == nil { methodsOf(named) // T methodsOf(types.NewPointer(named)) // *T } diff --git a/go/ssa/subst.go b/go/ssa/subst.go index 23d19ae7383..e1b8e198c03 100644 --- a/go/ssa/subst.go +++ b/go/ssa/subst.go @@ -7,7 +7,7 @@ package ssa import ( "go/types" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" ) // Type substituter for a fixed set of replacement types. @@ -18,11 +18,11 @@ import ( // // Not concurrency-safe. type subster struct { - replacements map[*typeparams.TypeParam]types.Type // values should contain no type params - cache map[types.Type]types.Type // cache of subst results - ctxt *typeparams.Context // cache for instantiation - scope *types.Scope // *types.Named declared within this scope can be substituted (optional) - debug bool // perform extra debugging checks + replacements map[*types.TypeParam]types.Type // values should contain no type params + cache map[types.Type]types.Type // cache of subst results + ctxt *types.Context // cache for instantiation + scope *types.Scope // *types.Named declared within this scope can be substituted (optional) + debug bool // perform extra debugging checks // TODO(taking): consider adding Pos // TODO(zpavlinovic): replacements can contain type params // when generating instances inside of a generic function body. @@ -31,11 +31,11 @@ type subster struct { // Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache. // targs should not contain any types in tparams. // scope is the (optional) lexical block of the generic function for which we are substituting. -func makeSubster(ctxt *typeparams.Context, scope *types.Scope, tparams *typeparams.TypeParamList, targs []types.Type, debug bool) *subster { +func makeSubster(ctxt *types.Context, scope *types.Scope, tparams *types.TypeParamList, targs []types.Type, debug bool) *subster { assert(tparams.Len() == len(targs), "makeSubster argument count must match") subst := &subster{ - replacements: make(map[*typeparams.TypeParam]types.Type, tparams.Len()), + replacements: make(map[*types.TypeParam]types.Type, tparams.Len()), cache: make(map[types.Type]types.Type), ctxt: ctxt, scope: scope, @@ -80,9 +80,8 @@ func (subst *subster) typ(t types.Type) (res types.Type) { subst.cache[t] = res }() - // fall through if result r will be identical to t, types.Identical(r, t). switch t := t.(type) { - case *typeparams.TypeParam: + case *types.TypeParam: r := subst.replacements[t] assert(r != nil, "type param without replacement encountered") return r @@ -131,12 +130,15 @@ func (subst *subster) typ(t types.Type) (res types.Type) { case *types.Signature: return subst.signature(t) - case *typeparams.Union: + case *types.Union: return subst.union(t) case *types.Interface: return subst.interface_(t) + case *aliases.Alias: + return subst.alias(t) + case *types.Named: return subst.named(t) @@ -220,25 +222,25 @@ func (subst *subster) var_(v *types.Var) *types.Var { return v } -func (subst *subster) union(u *typeparams.Union) *typeparams.Union { - var out []*typeparams.Term // nil => no updates +func (subst *subster) union(u *types.Union) *types.Union { + var out []*types.Term // nil => no updates for i, n := 0, u.Len(); i < n; i++ { t := u.Term(i) r := subst.typ(t.Type()) if r != t.Type() && out == nil { - out = make([]*typeparams.Term, n) + out = make([]*types.Term, n) for j := 0; j < i; j++ { out[j] = u.Term(j) } } if out != nil { - out[i] = typeparams.NewTerm(t.Tilde(), r) + out[i] = types.NewTerm(t.Tilde(), r) } } if out != nil { - return typeparams.NewUnion(out) + return types.NewUnion(out) } return u } @@ -304,13 +306,25 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { return types.NewInterfaceType(methods, embeds).Complete() } +func (subst *subster) alias(t *aliases.Alias) types.Type { + // TODO(go.dev/issues/46477): support TypeParameters once these are available from go/types. + u := aliases.Unalias(t) + if s := subst.typ(u); s != u { + // If there is any change, do not create a new alias. + return s + } + // If there is no change, t did not reach any type parameter. + // Keep the Alias. + return t +} + func (subst *subster) named(t *types.Named) types.Type { // A named type may be: // (1) ordinary named type (non-local scope, no type parameters, no type arguments), // (2) locally scoped type, // (3) generic (type parameters but no type arguments), or // (4) instantiated (type parameters and type arguments). - tparams := typeparams.ForNamed(t) + tparams := t.TypeParams() if tparams.Len() == 0 { if subst.scope != nil && !subst.scope.Contains(t.Obj().Pos()) { // Outside the current function scope? @@ -344,7 +358,7 @@ func (subst *subster) named(t *types.Named) types.Type { n.SetUnderlying(subst.typ(t.Underlying())) return n } - targs := typeparams.NamedTypeArgs(t) + targs := t.TypeArgs() // insts are arguments to instantiate using. insts := make([]types.Type, tparams.Len()) @@ -367,13 +381,13 @@ func (subst *subster) named(t *types.Named) types.Type { inst := subst.typ(targs.At(i)) // TODO(generic): Check with rfindley for mutual recursion insts[i] = inst } - r, err := typeparams.Instantiate(subst.ctxt, typeparams.NamedTypeOrigin(t), insts, false) + r, err := types.Instantiate(subst.ctxt, t.Origin(), insts, false) assert(err == nil, "failed to Instantiate Named type") return r } func (subst *subster) signature(t *types.Signature) types.Type { - tparams := typeparams.ForSignature(t) + tparams := t.TypeParams() // We are choosing not to support tparams.Len() > 0 until a need has been observed in practice. // @@ -398,7 +412,7 @@ func (subst *subster) signature(t *types.Signature) types.Type { params := subst.tuple(t.Params()) results := subst.tuple(t.Results()) if recv != t.Recv() || params != t.Params() || results != t.Results() { - return typeparams.NewSignatureType(recv, nil, nil, params, results, t.Variadic()) + return types.NewSignatureType(recv, nil, nil, params, results, t.Variadic()) } return t } @@ -422,7 +436,7 @@ func reaches(t types.Type, c map[types.Type]bool) (res bool) { }() switch t := t.(type) { - case *typeparams.TypeParam, *types.Basic: + case *types.TypeParam, *types.Basic: return false case *types.Array: return reaches(t.Elem(), c) @@ -451,7 +465,7 @@ func reaches(t types.Type, c map[types.Type]bool) (res bool) { return true } return reaches(t.Params(), c) || reaches(t.Results(), c) - case *typeparams.Union: + case *types.Union: for i := 0; i < t.Len(); i++ { if reaches(t.Term(i).Type(), c) { return true @@ -468,7 +482,7 @@ func reaches(t types.Type, c map[types.Type]bool) (res bool) { return true } } - case *types.Named: + case *types.Named, *aliases.Alias: return reaches(t.Underlying(), c) default: panic("unreachable") diff --git a/go/ssa/subst_test.go b/go/ssa/subst_test.go index e4aeaa1c312..6652b1a8e97 100644 --- a/go/ssa/subst_test.go +++ b/go/ssa/subst_test.go @@ -10,8 +10,6 @@ import ( "go/token" "go/types" "testing" - - "golang.org/x/tools/internal/typeparams" ) func TestSubst(t *testing.T) { @@ -96,7 +94,7 @@ var _ L[int] = Fn0[L[int]](nil) T := tv.Type.(*types.Named) - subst := makeSubster(typeparams.NewContext(), nil, typeparams.ForNamed(T), targs, true) + subst := makeSubster(types.NewContext(), nil, T.TypeParams(), targs, true) sub := subst.typ(T.Underlying()) if got := sub.String(); got != test.want { t.Errorf("subst{%v->%v}.typ(%s) = %v, want %v", test.expr, test.args, T.Underlying(), got, test.want) diff --git a/go/ssa/util.go b/go/ssa/util.go index 63fbbc1282a..314ca2b6f7a 100644 --- a/go/ssa/util.go +++ b/go/ssa/util.go @@ -17,7 +17,9 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) //// Sanity checking utilities @@ -49,17 +51,21 @@ func isNonTypeParamInterface(t types.Type) bool { } // isBasic reports whether t is a basic type. +// t is assumed to be an Underlying type (not Named or Alias). func isBasic(t types.Type) bool { _, ok := t.(*types.Basic) return ok } // isString reports whether t is exactly a string type. +// t is assumed to be an Underlying type (not Named or Alias). func isString(t types.Type) bool { - return isBasic(t) && t.(*types.Basic).Info()&types.IsString != 0 + basic, ok := t.(*types.Basic) + return ok && basic.Info()&types.IsString != 0 } // isByteSlice reports whether t is of the form []~bytes. +// t is assumed to be an Underlying type (not Named or Alias). func isByteSlice(t types.Type) bool { if b, ok := t.(*types.Slice); ok { e, _ := b.Elem().Underlying().(*types.Basic) @@ -69,6 +75,7 @@ func isByteSlice(t types.Type) bool { } // isRuneSlice reports whether t is of the form []~runes. +// t is assumed to be an Underlying type (not Named or Alias). func isRuneSlice(t types.Type) bool { if b, ok := t.(*types.Slice); ok { e, _ := b.Elem().Underlying().(*types.Basic) @@ -94,33 +101,22 @@ func isBasicConvTypes(tset termList) bool { return all && basics >= 1 && tset.Len()-basics <= 1 } -// deptr returns a pointer's element type and true; otherwise it returns (typ, false). -// This function is oblivious to core types and is not suitable for generics. -// -// TODO: Deprecate this function once all usages have been audited. -func deptr(typ types.Type) (types.Type, bool) { - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem(), true - } - return typ, false +// isPointer reports whether t's underlying type is a pointer. +func isPointer(t types.Type) bool { + return is[*types.Pointer](t.Underlying()) } -// deref returns the element type of a type with a pointer core type and true; -// otherwise it returns (typ, false). -func deref(typ types.Type) (types.Type, bool) { - if p, ok := typeparams.CoreType(typ).(*types.Pointer); ok { - return p.Elem(), true - } - return typ, false +// isPointerCore reports whether t's core type is a pointer. +// +// (Most pointer manipulation is related to receivers, in which case +// isPointer is appropriate. tecallers can use isPointer(t). +func isPointerCore(t types.Type) bool { + return is[*types.Pointer](typeparams.CoreType(t)) } -// mustDeref returns the element type of a type with a pointer core type. -// Panics on failure. -func mustDeref(typ types.Type) types.Type { - if et, ok := deref(typ); ok { - return et - } - panic("cannot dereference type " + typ.String()) +func is[T any](x any) bool { + _, ok := x.(T) + return ok } // recvType returns the receiver type of method obj. @@ -139,8 +135,9 @@ func fieldOf(typ types.Type, index int) *types.Var { return nil } -// isUntyped returns true for types that are untyped. +// isUntyped reports whether typ is the type of an untyped constant. func isUntyped(typ types.Type) bool { + // No Underlying/Unalias: untyped constant types cannot be Named or Alias. b, ok := typ.(*types.Basic) return ok && b.Info()&types.IsUntyped != 0 } @@ -180,19 +177,15 @@ func makeLen(T types.Type) *Builtin { } } -// receiverTypeArgs returns the type arguments to a function's receiver. -// Returns an empty list if obj does not have a receiver or its receiver does not have type arguments. -func receiverTypeArgs(obj *types.Func) []types.Type { - rtype := recvType(obj) - if rtype == nil { - return nil - } - rtype, _ = deptr(rtype) - named, ok := rtype.(*types.Named) - if !ok { - return nil +// receiverTypeArgs returns the type arguments to a method's receiver. +// Returns an empty list if the receiver does not have type arguments. +func receiverTypeArgs(method *types.Func) []types.Type { + recv := method.Type().(*types.Signature).Recv() + _, named := typesinternal.ReceiverNamed(recv) + if named == nil { + return nil // recv is anonymous struct/interface } - ts := typeparams.NamedTypeArgs(named) + ts := named.TypeArgs() if ts.Len() == 0 { return nil } @@ -211,7 +204,7 @@ func recvAsFirstArg(sig *types.Signature) *types.Signature { for i := 0; i < sig.Params().Len(); i++ { params = append(params, sig.Params().At(i)) } - return typeparams.NewSignatureType(nil, nil, nil, types.NewTuple(params...), sig.Results(), sig.Variadic()) + return types.NewSignatureType(nil, nil, nil, types.NewTuple(params...), sig.Results(), sig.Variadic()) } // instance returns whether an expression is a simple or qualified identifier @@ -228,13 +221,13 @@ func instance(info *types.Info, expr ast.Expr) bool { default: return false } - _, ok := typeparams.GetInstances(info)[id] + _, ok := info.Instances[id] return ok } // instanceArgs returns the Instance[id].TypeArgs as a slice. func instanceArgs(info *types.Info, id *ast.Ident) []types.Type { - targList := typeparams.GetInstances(info)[id].TypeArgs + targList := info.Instances[id].TypeArgs if targList == nil { return nil } @@ -269,13 +262,40 @@ func (c *canonizer) List(ts []types.Type) *typeList { return nil } + unaliasAll := func(ts []types.Type) []types.Type { + // Is there some top level alias? + var found bool + for _, t := range ts { + if _, ok := t.(*aliases.Alias); ok { + found = true + break + } + } + if !found { + return ts // no top level alias + } + + cp := make([]types.Type, len(ts)) // copy with top level aliases removed. + for i, t := range ts { + cp[i] = aliases.Unalias(t) + } + return cp + } + l := unaliasAll(ts) + c.mu.Lock() defer c.mu.Unlock() - return c.lists.rep(ts) + return c.lists.rep(l) } // Type returns a canonical representative of type T. +// Removes top-level aliases. +// +// For performance, reasons the canonical instance is order-dependent, +// and may contain deeply nested aliases. func (c *canonizer) Type(T types.Type) types.Type { + T = aliases.Unalias(T) // remove the top level alias. + c.mu.Lock() defer c.mu.Unlock() @@ -352,13 +372,13 @@ func (m *typeListMap) hash(ts []types.Type) uint32 { } // instantiateMethod instantiates m with targs and returns a canonical representative for this method. -func (canon *canonizer) instantiateMethod(m *types.Func, targs []types.Type, ctxt *typeparams.Context) *types.Func { +func (canon *canonizer) instantiateMethod(m *types.Func, targs []types.Type, ctxt *types.Context) *types.Func { recv := recvType(m) - if p, ok := recv.(*types.Pointer); ok { + if p, ok := aliases.Unalias(recv).(*types.Pointer); ok { recv = p.Elem() } - named := recv.(*types.Named) - inst, err := typeparams.Instantiate(ctxt, typeparams.NamedTypeOrigin(named), targs, false) + named := aliases.Unalias(recv).(*types.Named) + inst, err := types.Instantiate(ctxt, named.Origin(), targs, false) if err != nil { panic(err) } diff --git a/go/ssa/versions_go122.go b/go/ssa/versions_go122.go deleted file mode 100644 index b74165a8e32..00000000000 --- a/go/ssa/versions_go122.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.22 -// +build go1.22 - -package ssa - -import ( - "go/ast" -) - -func init() { - fileVersions = func(file *ast.File) string { - if maj, min := parseGoVersion(file.GoVersion); maj >= 0 && min >= 0 { - return file.GoVersion - } - return "" - } -} diff --git a/go/ssa/wrappers.go b/go/ssa/wrappers.go index 7c7ee4099e3..b25c4c78979 100644 --- a/go/ssa/wrappers.go +++ b/go/ssa/wrappers.go @@ -24,6 +24,8 @@ import ( "go/token" "go/types" + + "golang.org/x/tools/internal/typeparams" ) // -- wrappers ----------------------------------------------------------- @@ -97,14 +99,12 @@ func (b *builder) buildWrapper(fn *Function) { indices := fn.method.index var v Value = fn.Locals[0] // spilled receiver - srdt, ptrRecv := deptr(fn.method.recv) - if ptrRecv { + if isPointer(fn.method.recv) { v = emitLoad(fn, v) // For simple indirection wrappers, perform an informative nil-check: // "value method (T).f called using nil *T pointer" - _, ptrObj := deptr(recvType(fn.object)) - if len(indices) == 1 && !ptrObj { + if len(indices) == 1 && !isPointer(recvType(fn.object)) { var c Call c.Call.Value = &Builtin{ name: "ssa:wrapnilchk", @@ -114,7 +114,7 @@ func (b *builder) buildWrapper(fn *Function) { } c.Call.Args = []Value{ v, - stringConst(srdt.String()), + stringConst(typeparams.MustDeref(fn.method.recv).String()), stringConst(fn.method.obj.Name()), } c.setType(v.Type()) @@ -138,7 +138,7 @@ func (b *builder) buildWrapper(fn *Function) { var c Call if r := recvType(fn.object); !types.IsInterface(r) { // concrete method - if _, ptrObj := deptr(r); !ptrObj { + if !isPointer(r) { v = emitLoad(fn, v) } c.Call.Value = fn.Prog.objectMethod(fn.object, b.created) diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go index 7a760e2d57d..c88bba5069a 100644 --- a/go/types/internal/play/play.go +++ b/go/types/internal/play/play.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.19 - // The play program is a playground for go/types: a simple web-based // text editor into which the user can enter a Go program, select a // region, and see type information about it. @@ -32,6 +30,7 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -125,12 +124,39 @@ func handleSelectJSON(w http.ResponseWriter, req *http.Request) { startOffset, endOffset, exact) var innermostExpr ast.Expr for i, n := range path { + // Show set of names defined in each scope. + scopeNames := "" + { + node := n + prefix := "" + + // A function (Func{Decl.Lit}) doesn't have a scope of its + // own, nor does its Body: only nested BlockStmts do. + // The type parameters, parameters, and locals are all + // in the scope associated with the FuncType; show it. + switch n := n.(type) { + case *ast.FuncDecl: + node = n.Type + prefix = "Type." + case *ast.FuncLit: + node = n.Type + prefix = "Type." + } + + if scope := pkg.TypesInfo.Scopes[node]; scope != nil { + scopeNames = fmt.Sprintf(" %sScope={%s}", + prefix, + strings.Join(scope.Names(), ", ")) + } + } + // TODO(adonovan): turn these into links to highlight the source. start, end := fset.Position(n.Pos()), fset.Position(n.End()) - fmt.Fprintf(out, "[%d] %T @ %d:%d-%d:%d (#%d-%d)\n", + fmt.Fprintf(out, "[%d] %T @ %d:%d-%d:%d (#%d-%d)%s\n", i, n, start.Line, start.Column, end.Line, - end.Column, start.Offset, end.Offset) + end.Column, start.Offset, end.Offset, + scopeNames) if e, ok := n.(ast.Expr); ok && innermostExpr == nil { innermostExpr = e } @@ -249,8 +275,8 @@ func formatObj(out *strings.Builder, fset *token.FileSet, ref string, obj types. origin = obj.Origin() case *types.Func: - if obj.Type().(*types.Signature).Recv() != nil { - kind = "method" + if recv := obj.Type().(*types.Signature).Recv(); recv != nil { + kind = fmt.Sprintf("method (with recv %v)", recv.Type()) } origin = obj.Origin() @@ -258,7 +284,7 @@ func formatObj(out *strings.Builder, fset *token.FileSet, ref string, obj types. if obj.IsAlias() { kind = "type alias" } - if named, ok := obj.Type().(*types.Named); ok { + if named, ok := aliases.Unalias(obj.Type()).(*types.Named); ok { origin = named.Obj() } } @@ -282,7 +308,12 @@ func formatObj(out *strings.Builder, fset *token.FileSet, ref string, obj types. // scope tree fmt.Fprintf(out, "Scopes:\n") for scope := obj.Parent(); scope != nil; scope = scope.Parent() { - fmt.Fprintln(out, scope) + var ( + start = fset.Position(scope.Pos()) + end = fset.Position(scope.End()) + ) + fmt.Fprintf(out, "%d:%d-%d:%d: %s\n", + start.Line, start.Column, end.Line, end.Column, scope) } } diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go index e742ecc4644..a2386c347a2 100644 --- a/go/types/objectpath/objectpath.go +++ b/go/types/objectpath/objectpath.go @@ -29,9 +29,12 @@ import ( "strconv" "strings" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typesinternal" ) +// TODO(adonovan): think about generic aliases. + // A Path is an opaque name that identifies a types.Object // relative to its package. Conceptually, the name consists of a // sequence of destructuring operations applied to the package scope @@ -223,7 +226,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { // Reject obviously non-viable cases. switch obj := obj.(type) { case *types.TypeName: - if _, ok := obj.Type().(*typeparams.TypeParam); !ok { + if _, ok := aliases.Unalias(obj.Type()).(*types.TypeParam); !ok { // With the exception of type parameters, only package-level type names // have a path. return "", fmt.Errorf("no path for %v", obj) @@ -283,7 +286,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { } } else { if named, _ := T.(*types.Named); named != nil { - if r := findTypeParam(obj, typeparams.ForNamed(named), path, nil); r != nil { + if r := findTypeParam(obj, named.TypeParams(), path, nil); r != nil { // generic named type return Path(r), nil } @@ -310,7 +313,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { } // Inspect declared methods of defined types. - if T, ok := o.Type().(*types.Named); ok { + if T, ok := aliases.Unalias(o.Type()).(*types.Named); ok { path = append(path, opType) // The method index here is always with respect // to the underlying go/types data structures, @@ -391,17 +394,12 @@ func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) { // of objectpath will only be giving us origin methods, anyway, as referring // to instantiated methods is usually not useful. - if typeparams.OriginMethod(meth) != meth { + if meth.Origin() != meth { return "", false } - recvT := meth.Type().(*types.Signature).Recv().Type() - if ptr, ok := recvT.(*types.Pointer); ok { - recvT = ptr.Elem() - } - - named, ok := recvT.(*types.Named) - if !ok { + _, named := typesinternal.ReceiverNamed(meth.Type().(*types.Signature).Recv()) + if named == nil { return "", false } @@ -444,6 +442,8 @@ func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) { // nil, it will be allocated as necessary. func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]bool) []byte { switch T := T.(type) { + case *aliases.Alias: + return find(obj, aliases.Unalias(T), path, seen) case *types.Basic, *types.Named: // Named types belonging to pkg were handled already, // so T must belong to another package. No path. @@ -462,7 +462,7 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] } return find(obj, T.Elem(), append(path, opElem), seen) case *types.Signature: - if r := findTypeParam(obj, typeparams.ForSignature(T), path, seen); r != nil { + if r := findTypeParam(obj, T.TypeParams(), path, seen); r != nil { return r } if r := find(obj, T.Params(), append(path, opParams), seen); r != nil { @@ -505,7 +505,7 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] } } return nil - case *typeparams.TypeParam: + case *types.TypeParam: name := T.Obj() if name == obj { return append(path, opObj) @@ -525,7 +525,7 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] panic(T) } -func findTypeParam(obj types.Object, list *typeparams.TypeParamList, path []byte, seen map[*types.TypeName]bool) []byte { +func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, seen map[*types.TypeName]bool) []byte { for i := 0; i < list.Len(); i++ { tparam := list.At(i) path2 := appendOpArg(path, opTypeParam, i) @@ -562,7 +562,7 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { } // abstraction of *types.{Named,Signature} type hasTypeParams interface { - TypeParams() *typeparams.TypeParamList + TypeParams() *types.TypeParamList } // abstraction of *types.{Named,TypeParam} type hasObj interface { @@ -616,6 +616,7 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { // Inv: t != nil, obj == nil + t = aliases.Unalias(t) switch code { case opElem: hasElem, ok := t.(hasElem) // Pointer, Slice, Array, Chan, Map @@ -664,7 +665,7 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { t = tparams.At(index) case opConstraint: - tparam, ok := t.(*typeparams.TypeParam) + tparam, ok := t.(*types.TypeParam) if !ok { return nil, fmt.Errorf("cannot apply %q to %s (got %T, want type parameter)", code, t, t) } diff --git a/go/types/objectpath/objectpath_go118_test.go b/go/types/objectpath/objectpath_go118_test.go index bc156e14d71..f061fd8d695 100644 --- a/go/types/objectpath/objectpath_go118_test.go +++ b/go/types/objectpath/objectpath_go118_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package objectpath_test import ( diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go index 90b3ab0e21c..90dc541adfe 100644 --- a/go/types/typeutil/callee.go +++ b/go/types/typeutil/callee.go @@ -22,7 +22,7 @@ func Callee(info *types.Info, call *ast.CallExpr) types.Object { // Look through type instantiation if necessary. isInstance := false switch fun.(type) { - case *ast.IndexExpr, *typeparams.IndexListExpr: + case *ast.IndexExpr, *ast.IndexListExpr: // When extracting the callee from an *IndexExpr, we need to check that // it is a *types.Func and not a *types.Var. // Example: Don't match a slice m within the expression `m[0]()`. diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go index 345236147d8..faee0f88721 100644 --- a/go/types/typeutil/callee_test.go +++ b/go/types/typeutil/callee_test.go @@ -14,7 +14,7 @@ import ( "testing" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) func TestStaticCallee(t *testing.T) { @@ -67,9 +67,6 @@ func TestStaticCallee(t *testing.T) { } func TestTypeParamStaticCallee(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type parameters are not enabled") - } testStaticCallee(t, []string{ `package q func R[T any]() {} @@ -125,10 +122,11 @@ func testStaticCallee(t *testing.T, contents []string) { packages := make(map[string]*types.Package) cfg := &types.Config{Importer: closure(packages)} info := &types.Info{ + Instances: make(map[*ast.Ident]types.Instance), Uses: make(map[*ast.Ident]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - typeparams.InitInstanceInfo(info) + versions.InitFileVersions(info) var files []*ast.File for i, content := range contents { diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go index 7bd2fdb38be..e154be0bd60 100644 --- a/go/types/typeutil/map.go +++ b/go/types/typeutil/map.go @@ -12,6 +12,7 @@ import ( "go/types" "reflect" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -219,7 +220,7 @@ type Hasher struct { // generic types or functions, and instantiated signatures do not have type // parameter lists, we should never encounter a second non-empty type // parameter list when hashing a generic signature. - sigTParams *typeparams.TypeParamList + sigTParams *types.TypeParamList } // MakeHasher returns a new Hasher instance. @@ -259,6 +260,9 @@ func (h Hasher) hashFor(t types.Type) uint32 { case *types.Basic: return uint32(t.Kind()) + case *aliases.Alias: + return h.Hash(t.Underlying()) + case *types.Array: return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) @@ -297,7 +301,7 @@ func (h Hasher) hashFor(t types.Type) uint32 { // We should never encounter a generic signature while hashing another // generic signature, but defensively set sigTParams only if h.mask is // unset. - tparams := typeparams.ForSignature(t) + tparams := t.TypeParams() if h.sigTParams == nil && tparams.Len() != 0 { h = Hasher{ // There may be something more efficient than discarding the existing @@ -318,7 +322,7 @@ func (h Hasher) hashFor(t types.Type) uint32 { return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) - case *typeparams.Union: + case *types.Union: return h.hashUnion(t) case *types.Interface: @@ -354,14 +358,14 @@ func (h Hasher) hashFor(t types.Type) uint32 { case *types.Named: hash := h.hashPtr(t.Obj()) - targs := typeparams.NamedTypeArgs(t) + targs := t.TypeArgs() for i := 0; i < targs.Len(); i++ { targ := targs.At(i) hash += 2 * h.Hash(targ) } return hash - case *typeparams.TypeParam: + case *types.TypeParam: return h.hashTypeParam(t) case *types.Tuple: @@ -381,7 +385,7 @@ func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { return hash } -func (h Hasher) hashUnion(t *typeparams.Union) uint32 { +func (h Hasher) hashUnion(t *types.Union) uint32 { // Hash type restrictions. terms, err := typeparams.UnionTermSet(t) // if err != nil t has invalid type restrictions. Fall back on a non-zero @@ -392,7 +396,7 @@ func (h Hasher) hashUnion(t *typeparams.Union) uint32 { return h.hashTermSet(terms) } -func (h Hasher) hashTermSet(terms []*typeparams.Term) uint32 { +func (h Hasher) hashTermSet(terms []*types.Term) uint32 { hash := 9157 + 2*uint32(len(terms)) for _, term := range terms { // term order is not significant. @@ -416,7 +420,7 @@ func (h Hasher) hashTermSet(terms []*typeparams.Term) uint32 { // are not identical. // // Otherwise the hash of t depends only on t's pointer identity. -func (h Hasher) hashTypeParam(t *typeparams.TypeParam) uint32 { +func (h Hasher) hashTypeParam(t *types.TypeParam) uint32 { if h.sigTParams != nil { i := t.Index() if i >= 0 && i < h.sigTParams.Len() && t == h.sigTParams.At(i) { @@ -457,6 +461,9 @@ func (h Hasher) shallowHash(t types.Type) uint32 { // elements (mostly Slice, Pointer, Basic, Named), // so there's no need to optimize anything else. switch t := t.(type) { + case *aliases.Alias: + return h.shallowHash(t.Underlying()) + case *types.Signature: var hash uint32 = 604171 if t.Variadic() { @@ -489,7 +496,7 @@ func (h Hasher) shallowHash(t types.Type) uint32 { case *types.Pointer: return 4393139 - case *typeparams.Union: + case *types.Union: return 562448657 case *types.Interface: @@ -504,7 +511,7 @@ func (h Hasher) shallowHash(t types.Type) uint32 { case *types.Named: return h.hashPtr(t.Obj()) - case *typeparams.TypeParam: + case *types.TypeParam: return h.hashPtr(t.Obj()) } panic(fmt.Sprintf("shallowHash: %T: %v", t, t)) diff --git a/go/types/typeutil/map_test.go b/go/types/typeutil/map_test.go index ee73ff9cfd5..2cc1de786dc 100644 --- a/go/types/typeutil/map_test.go +++ b/go/types/typeutil/map_test.go @@ -17,7 +17,6 @@ import ( "testing" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typeparams" ) var ( @@ -178,10 +177,6 @@ func TestMap(t *testing.T) { } func TestMapGenerics(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type params are not enabled at this Go version") - } - const src = ` package p @@ -252,6 +247,15 @@ var Issue56048 = Issue56048_I.m type Issue56048_Ib interface{ m() chan []*interface { Issue56048_Ib } } var Issue56048b = Issue56048_Ib.m +// Non-generic alias +type NonAlias int +type Alias1 = NonAlias +type Alias2 = NonAlias + +// Generic alias (requires go1.23) +// type SetOfInt = map[int]bool +// type Set[T comparable] = map[K]bool +// type SetOfInt2 = Set[int] ` fset := token.NewFileSet() @@ -281,11 +285,11 @@ var Issue56048b = Issue56048_Ib.m CI = C.Underlying().(*types.Interface) I = scope.Lookup("I").Type() II = I.Underlying().(*types.Interface) - U = CI.EmbeddedType(0).(*typeparams.Union) + U = CI.EmbeddedType(0).(*types.Union) Fa1 = scope.Lookup("Fa1").Type().(*types.Signature) Fa2 = scope.Lookup("Fa2").Type().(*types.Signature) - Fa1P = typeparams.ForSignature(Fa1).At(0) - Fa2Q = typeparams.ForSignature(Fa2).At(0) + Fa1P = Fa1.TypeParams().At(0) + Fa2Q = Fa2.TypeParams().At(0) Fb1 = scope.Lookup("Fb1").Type().(*types.Signature) Fb1x = Fb1.Params().At(0).Type() Fb1y = scope.Lookup("Fb1").(*types.Func).Scope().Lookup("y").Type() @@ -312,6 +316,16 @@ var Issue56048b = Issue56048_Ib.m Quux = scope.Lookup("Quux").Type() Issue56048 = scope.Lookup("Issue56048").Type() Issue56048b = scope.Lookup("Issue56048b").Type() + + // In go1.23 these will be *types.Alias; for now they are all int. + NonAlias = scope.Lookup("NonAlias").Type() + Alias1 = scope.Lookup("Alias1").Type() + Alias2 = scope.Lookup("Alias2").Type() + + // Requires go1.23. + // SetOfInt = scope.Lookup("SetOfInt").Type() + // Set = scope.Lookup("Set").Type().(*types.Alias) + // SetOfInt2 = scope.Lookup("SetOfInt2").Type() ) tmap := new(typeutil.Map) @@ -384,6 +398,16 @@ var Issue56048b = Issue56048_Ib.m {Issue56048, "Issue56048", true}, // (not actually about generics) {Issue56048b, "Issue56048b", true}, // (not actually about generics) + + // All three types are identical. + {NonAlias, "NonAlias", true}, + {Alias1, "Alias1", false}, + {Alias2, "Alias2", false}, + + // Generic aliases: requires go1.23. + // {SetOfInt, "SetOfInt", true}, + // {Set, "Set", false}, + // {SetOfInt2, "SetOfInt2", false}, } for _, step := range steps { @@ -396,7 +420,7 @@ var Issue56048b = Issue56048_Ib.m } func instantiate(t *testing.T, origin types.Type, targs ...types.Type) types.Type { - inst, err := typeparams.Instantiate(nil, origin, targs, true) + inst, err := types.Instantiate(nil, origin, targs, true) if err != nil { t.Fatal(err) } diff --git a/go/types/typeutil/methodsetcache.go b/go/types/typeutil/methodsetcache.go index a5d9310830c..bd71aafaaa1 100644 --- a/go/types/typeutil/methodsetcache.go +++ b/go/types/typeutil/methodsetcache.go @@ -9,6 +9,8 @@ package typeutil import ( "go/types" "sync" + + "golang.org/x/tools/internal/aliases" ) // A MethodSetCache records the method set of each type T for which @@ -32,12 +34,12 @@ func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet { cache.mu.Lock() defer cache.mu.Unlock() - switch T := T.(type) { + switch T := aliases.Unalias(T).(type) { case *types.Named: return cache.lookupNamed(T).value case *types.Pointer: - if N, ok := T.Elem().(*types.Named); ok { + if N, ok := aliases.Unalias(T.Elem()).(*types.Named); ok { return cache.lookupNamed(N).pointer } } diff --git a/go/types/typeutil/ui.go b/go/types/typeutil/ui.go index fa55b0a1e65..a0c1a60ac02 100644 --- a/go/types/typeutil/ui.go +++ b/go/types/typeutil/ui.go @@ -6,7 +6,11 @@ package typeutil // This file defines utilities for user interfaces that display types. -import "go/types" +import ( + "go/types" + + "golang.org/x/tools/internal/aliases" +) // IntuitiveMethodSet returns the intuitive method set of a type T, // which is the set of methods you can call on an addressable value of @@ -24,7 +28,7 @@ import "go/types" // The order of the result is as for types.MethodSet(T). func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { isPointerToConcrete := func(T types.Type) bool { - ptr, ok := T.(*types.Pointer) + ptr, ok := aliases.Unalias(T).(*types.Pointer) return ok && !types.IsInterface(ptr.Elem()) } diff --git a/godoc/godoc.go b/godoc/godoc.go index dfac2111a67..a9d806f7e8b 100644 --- a/godoc/godoc.go +++ b/godoc/godoc.go @@ -346,16 +346,9 @@ func isDigit(ch rune) bool { } func comment_htmlFunc(info *PageInfo, comment string) string { - var buf bytes.Buffer // TODO(gri) Provide list of words (e.g. function parameters) // to be emphasized by ToHTML. - - // godocToHTML is: - // - buf.Write(info.PDoc.HTML(comment)) on go1.19 - // - go/doc.ToHTML(&buf, comment, nil) on other versions - godocToHTML(&buf, info.PDoc, comment) - - return buf.String() + return string(info.PDoc.HTML(comment)) } // sanitizeFunc sanitizes the argument src by replacing newlines with diff --git a/godoc/godoc_test.go b/godoc/godoc_test.go index 7f3470ed077..5e54db59f94 100644 --- a/godoc/godoc_test.go +++ b/godoc/godoc_test.go @@ -10,8 +10,6 @@ import ( "go/token" "strings" "testing" - - "golang.org/x/tools/internal/typeparams" ) func TestPkgLinkFunc(t *testing.T) { @@ -372,10 +370,6 @@ func TestFilterOutBuildAnnotations(t *testing.T) { } func TestLinkifyGenerics(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type params are not enabled at this Go version") - } - got := linkifySource(t, []byte(` package foo diff --git a/godoc/linkify.go b/godoc/linkify.go index cf266d01f1d..ad773b8410b 100644 --- a/godoc/linkify.go +++ b/godoc/linkify.go @@ -17,8 +17,6 @@ import ( "go/token" "io" "strconv" - - "golang.org/x/tools/internal/typeparams" ) // LinkifyText HTML-escapes source text and writes it to w. @@ -116,7 +114,7 @@ func linksFor(node ast.Node) (links []link) { if ident, _ := x.Index.(*ast.Ident); ident != nil { typeParams[ident.Name] = true } - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: for _, index := range x.Indices { if ident, _ := index.(*ast.Ident); ident != nil { typeParams[ident.Name] = true diff --git a/godoc/server.go b/godoc/server.go index a6df6d74e68..afb28e2e187 100644 --- a/godoc/server.go +++ b/godoc/server.go @@ -29,7 +29,6 @@ import ( "golang.org/x/tools/godoc/analysis" "golang.org/x/tools/godoc/util" "golang.org/x/tools/godoc/vfs" - "golang.org/x/tools/internal/typeparams" ) // handlerServer is a migration from an old godoc http Handler type. @@ -471,7 +470,7 @@ func addNames(names map[string]bool, decl ast.Decl) { typeName = x.Name case *ast.IndexExpr: typeName = x.X.(*ast.Ident).Name - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: typeName = x.X.(*ast.Ident).Name } name = typeName + "_" + name diff --git a/godoc/server_test.go b/godoc/server_test.go index d6cc923c6ce..7fa02c53f4a 100644 --- a/godoc/server_test.go +++ b/godoc/server_test.go @@ -15,7 +15,6 @@ import ( "text/template" "golang.org/x/tools/godoc/vfs/mapfs" - "golang.org/x/tools/internal/typeparams" ) // TestIgnoredGoFiles tests the scenario where a folder has no .go or .c files, @@ -133,10 +132,6 @@ func TestMarkdown(t *testing.T) { } func TestGenerics(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type params are not enabled at this Go version") - } - c := NewCorpus(mapfs.New(map[string]string{ "blah/blah.go": `package blah diff --git a/godoc/tohtml_go119.go b/godoc/tohtml_go119.go deleted file mode 100644 index 6dbf7212b9a..00000000000 --- a/godoc/tohtml_go119.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package godoc - -import ( - "bytes" - "go/doc" -) - -func godocToHTML(buf *bytes.Buffer, pkg *doc.Package, comment string) { - buf.Write(pkg.HTML(comment)) -} diff --git a/godoc/tohtml_other.go b/godoc/tohtml_other.go deleted file mode 100644 index a1dcf2e195b..00000000000 --- a/godoc/tohtml_other.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package godoc - -import ( - "bytes" - "go/doc" -) - -func godocToHTML(buf *bytes.Buffer, pkg *doc.Package, comment string) { - doc.ToHTML(buf, comment, nil) -} diff --git a/gopls/README.md b/gopls/README.md index 396f86c0242..5c80965c153 100644 --- a/gopls/README.md +++ b/gopls/README.md @@ -94,6 +94,7 @@ version of gopls. | Go 1.12 | [gopls@v0.7.5](https://github.com/golang/tools/releases/tag/gopls%2Fv0.7.5) | | Go 1.15 | [gopls@v0.9.5](https://github.com/golang/tools/releases/tag/gopls%2Fv0.9.5) | | Go 1.17 | [gopls@v0.11.0](https://github.com/golang/tools/releases/tag/gopls%2Fv0.11.0) | +| Go 1.18 | [gopls@v0.14.2](https://github.com/golang/tools/releases/tag/gopls%2Fv0.14.2) | Our extended support is enforced via [continuous integration with older Go versions](doc/contributing.md#ci). This legacy Go CI may not block releases: diff --git a/gopls/api-diff/api_diff.go b/gopls/api-diff/api_diff.go index 8bb54186bab..7194ced9fdf 100644 --- a/gopls/api-diff/api_diff.go +++ b/gopls/api-diff/api_diff.go @@ -2,14 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package main import ( "bytes" - "context" "encoding/json" "flag" "fmt" @@ -18,7 +14,7 @@ import ( "os/exec" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/settings" ) const usage = `api-diff [] @@ -50,17 +46,16 @@ func main() { } func diffAPI(oldVer, newVer string) (string, error) { - ctx := context.Background() - previousAPI, err := loadAPI(ctx, oldVer) + previousAPI, err := loadAPI(oldVer) if err != nil { return "", fmt.Errorf("loading %s: %v", oldVer, err) } - var currentAPI *source.APIJSON + var currentAPI *settings.APIJSON if newVer == "" { - currentAPI = source.GeneratedAPIJSON + currentAPI = settings.GeneratedAPIJSON } else { var err error - currentAPI, err = loadAPI(ctx, newVer) + currentAPI, err = loadAPI(newVer) if err != nil { return "", fmt.Errorf("loading %s: %v", newVer, err) } @@ -69,7 +64,7 @@ func diffAPI(oldVer, newVer string) (string, error) { return cmp.Diff(previousAPI, currentAPI), nil } -func loadAPI(ctx context.Context, version string) (*source.APIJSON, error) { +func loadAPI(version string) (*settings.APIJSON, error) { ver := fmt.Sprintf("golang.org/x/tools/gopls@%s", version) cmd := exec.Command("go", "run", ver, "api-json") @@ -81,7 +76,7 @@ func loadAPI(ctx context.Context, version string) (*source.APIJSON, error) { if err := cmd.Run(); err != nil { return nil, fmt.Errorf("go run failed: %v; stderr:\n%s", err, stderr) } - apiJson := &source.APIJSON{} + apiJson := &settings.APIJSON{} if err := json.Unmarshal(stdout.Bytes(), apiJson); err != nil { return nil, fmt.Errorf("unmarshal: %v", err) } diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index ef1449013f6..09f1c6ee2f6 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -8,7 +8,7 @@ Details about how to enable/disable these analyses can be found ## **appends** -check for missing values after append +appends: check for missing values after append This checker reports calls to append that pass no values to be appended to the slice. @@ -19,27 +19,33 @@ no values to be appended to the slice. Such calls are always no-ops and often indicate an underlying mistake. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/appends) + **Enabled by default.** ## **asmdecl** -report mismatches between assembly files and Go declarations +asmdecl: report mismatches between assembly files and Go declarations + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/asmdecl) **Enabled by default.** ## **assign** -check for useless assignments +assign: check for useless assignments This checker reports assignments of the form x = x or a[i] = a[i]. These are almost always useless, and even when they aren't they are usually a mistake. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/assign) + **Enabled by default.** ## **atomic** -check for common mistakes using the sync/atomic package +atomic: check for common mistakes using the sync/atomic package The atomic checker looks for assignment statements of the form: @@ -47,29 +53,37 @@ The atomic checker looks for assignment statements of the form: which are not atomic. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomic) + **Enabled by default.** ## **atomicalign** -check for non-64-bits-aligned arguments to sync/atomic functions +atomicalign: check for non-64-bits-aligned arguments to sync/atomic functions + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomicalign) **Enabled by default.** ## **bools** -check for common mistakes involving boolean operators +bools: check for common mistakes involving boolean operators + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/bools) **Enabled by default.** ## **buildtag** -check //go:build and // +build directives +buildtag: check //go:build and // +build directives + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/buildtag) **Enabled by default.** ## **cgocall** -detect some violations of the cgo pointer passing rules +cgocall: detect some violations of the cgo pointer passing rules Check for invalid cgo pointer passing. This looks for code that uses cgo to call C code passing values @@ -78,11 +92,13 @@ sharing rules. Specifically, it warns about attempts to pass a Go chan, map, func, or slice to C, either directly, or via a pointer, array, or struct. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/cgocall) + **Enabled by default.** ## **composites** -check for unkeyed composite literals +composites: check for unkeyed composite literals This analyzer reports a diagnostic for composite literals of struct types imported from another package that do not use the field-keyed @@ -98,21 +114,25 @@ should be replaced by: err = &net.DNSConfigError{Err: err} +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/composite) + **Enabled by default.** ## **copylocks** -check for locks erroneously passed by value +copylocks: check for locks erroneously passed by value Inadvertently copying a value containing a lock, such as sync.Mutex or sync.WaitGroup, may cause both copies to malfunction. Generally such values should be referred to through a pointer. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/copylocks) + **Enabled by default.** ## **deepequalerrors** -check for calls of reflect.DeepEqual on error values +deepequalerrors: check for calls of reflect.DeepEqual on error values The deepequalerrors checker looks for calls of the form: @@ -121,11 +141,13 @@ The deepequalerrors checker looks for calls of the form: where err1 and err2 are errors. Using reflect.DeepEqual to compare errors is discouraged. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/deepequalerrors) + **Enabled by default.** ## **defers** -report common mistakes in defer statements +defers: report common mistakes in defer statements The defers analyzer reports a diagnostic when a defer statement would result in a non-deferred call to time.Since, as experience has shown @@ -141,22 +163,27 @@ The correct code is: defer func() { recordLatency(time.Since(start)) }() +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/defers) + **Enabled by default.** ## **deprecated** -check for use of deprecated identifiers +deprecated: check for use of deprecated identifiers -The deprecated analyzer looks for deprecated symbols and package imports. +The deprecated analyzer looks for deprecated symbols and package +imports. See https://go.dev/wiki/Deprecated to learn about Go's convention for documenting and signaling deprecated identifiers. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/deprecated) + **Enabled by default.** ## **directive** -check Go toolchain directives such as //go:debug +directive: check Go toolchain directives such as //go:debug This analyzer checks for problems with known Go toolchain directives in all Go source files in a package directory, even those excluded by @@ -172,11 +199,13 @@ This analyzer does not check //go:build, which is handled by the buildtag analyzer. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/directive) + **Enabled by default.** ## **embed** -check //go:embed directive usage +embed: check //go:embed directive usage This analyzer checks that the embed package is imported if //go:embed directives are present, providing a suggested fix to add the import if @@ -185,20 +214,24 @@ it is missing. This analyzer also checks that //go:embed directives precede the declaration of a single variable. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/embeddirective) + **Enabled by default.** ## **errorsas** -report passing non-pointer or non-error values to errors.As +errorsas: report passing non-pointer or non-error values to errors.As The errorsas analysis reports calls to errors.As where the type of the second argument is not a pointer to a type implementing error. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/errorsas) + **Enabled by default.** ## **fieldalignment** -find structs that would use less memory if their fields were sorted +fieldalignment: find structs that would use less memory if their fields were sorted This analyzer find structs that can be rearranged to use less memory, and provides a suggested edit with the most compact order. @@ -226,11 +259,36 @@ to occupy the same CPU cache line, inducing a form of memory contention known as "false sharing" that slows down both goroutines. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/fieldalignment) + **Disabled by default. Enable it by setting `"analyses": {"fieldalignment": true}`.** +## **fillreturns** + +fillreturns: suggest fixes for errors due to an incorrect number of return values + +This checker provides suggested fixes for type errors of the +type "wrong number of return values (want %d, got %d)". For example: + + func m() (int, string, *bool, error) { + return + } + +will turn into + + func m() (int, string, *bool, error) { + return 0, "", nil, nil + } + +This functionality is similar to https://github.com/sqs/goreturns. + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns) + +**Enabled by default.** + ## **httpresponse** -check for mistakes using HTTP responses +httpresponse: check for mistakes using HTTP responses A common mistake when using the net/http package is to defer a function call to close the http.Response Body before checking the error that @@ -246,11 +304,13 @@ determines whether the response is valid: This checker helps uncover latent nil dereference bugs by reporting a diagnostic for such mistakes. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/httpresponse) + **Enabled by default.** ## **ifaceassert** -detect impossible interface-to-interface type assertions +ifaceassert: detect impossible interface-to-interface type assertions This checker flags type assertions v.(T) and corresponding type-switch cases in which the static type V of v is an interface that cannot possibly implement @@ -265,19 +325,43 @@ name but different signatures. Example: The Read method in v has a different signature than the Read method in io.Reader, so this assertion cannot succeed. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ifaceassert) + +**Enabled by default.** + +## **infertypeargs** + +infertypeargs: check for unnecessary type arguments in call expressions + +Explicit type arguments may be omitted from call expressions if they can be +inferred from function arguments, or from other type arguments: + + func f[T any](T) {} + + func _() { + f[string]("foo") // string could be inferred + } + + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/infertypeargs) + **Enabled by default.** ## **loopclosure** -check references to loop variables from within nested functions +loopclosure: check references to loop variables from within nested functions This analyzer reports places where a function literal references the iteration variable of an enclosing loop, and the loop calls the function in such a way (e.g. with go or defer) that it may outlive the loop iteration and possibly observe the wrong value of the variable. +Note: An iteration variable can only outlive a loop iteration in Go versions <=1.21. +In Go 1.22 and later, the loop variable lifetimes changed to create a new +iteration variable per loop iteration. (See go.dev/issue/60078.) + In this example, all the deferred functions run after the loop has -completed, so all observe the final value of v. +completed, so all observe the final value of v [" + +This checker provides suggested fixes for type errors of the +type "undeclared name: <>". It will either insert a new statement, +such as: + + <> := + +or a new function declaration, such as: + + func <>(inferred parameters) { + panic("implement me!") + } + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/undeclaredname) + **Enabled by default.** ## **unmarshal** -report passing non-pointer or non-interface values to unmarshal +unmarshal: report passing non-pointer or non-interface values to unmarshal The unmarshal analysis reports calls to functions such as json.Unmarshal in which the argument type is not a pointer or an interface. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unmarshal) + **Enabled by default.** ## **unreachable** -check for unreachable code +unreachable: check for unreachable code The unreachable analyzer finds statements that execution can never reach because they are preceded by an return statement, a call to panic, an infinite loop, or similar constructs. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unreachable) + **Enabled by default.** ## **unsafeptr** -check for invalid conversions of uintptr to unsafe.Pointer +unsafeptr: check for invalid conversions of uintptr to unsafe.Pointer The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer to convert integers to pointers. A conversion from uintptr to @@ -654,26 +933,44 @@ unsafe.Pointer is invalid if it implies that there is a uintptr-typed word in memory that holds a pointer value, because that word will be invisible to stack copying and to the garbage collector. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unsafeptr) + **Enabled by default.** ## **unusedparams** -check for unused parameters of functions +unusedparams: check for unused parameters of functions The unusedparams analyzer checks functions to see if there are any parameters that are not being used. -To reduce false positives it ignores: -- methods -- parameters that do not have a name or have the name '_' (the blank identifier) -- functions in test files -- functions with empty bodies or those with just a return stmt +To ensure soundness, it ignores: + - "address-taken" functions, that is, functions that are used as + a value rather than being called directly; their signatures may + be required to conform to a func type. + - exported functions or methods, since they may be address-taken + in another package. + - unexported methods whose name matches an interface method + declared in the same package, since the method's signature + may be required to conform to the interface type. + - functions with empty bodies, or containing just a call to panic. + - parameters that are unnamed, or named "_", the blank identifier. + +The analyzer suggests a fix of replacing the parameter name by "_", +but in such cases a deeper fix can be obtained by invoking the +"Refactor: remove unused parameter" code action, which will +eliminate the parameter entirely, along with all corresponding +arguments at call sites, while taking care to preserve any side +effects in the argument expressions; see +https://github.com/golang/tools/releases/tag/gopls%2Fv0.14. + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams) -**Disabled by default. Enable it by setting `"analyses": {"unusedparams": true}`.** +**Enabled by default.** ## **unusedresult** -check for unused results of calls to some functions +unusedresult: check for unused results of calls to some functions Some functions like fmt.Errorf return a result and have no side effects, so it is always a mistake to discard the result. Other @@ -683,11 +980,21 @@ functions like these when the result of the call is ignored. The set of functions may be controlled using flags. +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult) + **Enabled by default.** +## **unusedvariable** + +unusedvariable: check for unused variables and suggest fixes + +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable) + +**Disabled by default. Enable it by setting `"analyses": {"unusedvariable": true}`.** + ## **unusedwrite** -checks for unused writes +unusedwrite: checks for unused writes The analyzer reports instances of writes to struct fields and arrays that are never read. Specifically, when a struct object @@ -713,125 +1020,16 @@ Another example is about non-pointer receiver: t.x = i // unused write to field x } -**Disabled by default. Enable it by setting `"analyses": {"unusedwrite": true}`.** - -## **useany** - -check for constraints that could be simplified to "any" - -**Disabled by default. Enable it by setting `"analyses": {"useany": true}`.** - -## **fillreturns** - -suggest fixes for errors due to an incorrect number of return values - -This checker provides suggested fixes for type errors of the -type "wrong number of return values (want %d, got %d)". For example: - func m() (int, string, *bool, error) { - return - } -will turn into - func m() (int, string, *bool, error) { - return 0, "", nil, nil - } - -This functionality is similar to https://github.com/sqs/goreturns. - - -**Enabled by default.** - -## **nonewvars** - -suggested fixes for "no new vars on left side of :=" - -This checker provides suggested fixes for type errors of the -type "no new vars on left side of :=". For example: - z := 1 - z := 2 -will turn into - z := 1 - z = 2 - - -**Enabled by default.** - -## **noresultvalues** - -suggested fixes for unexpected return values - -This checker provides suggested fixes for type errors of the -type "no result values expected" or "too many return values". -For example: - func z() { return nil } -will turn into - func z() { return } - - -**Enabled by default.** - -## **undeclaredname** - -suggested fixes for "undeclared name: <>" - -This checker provides suggested fixes for type errors of the -type "undeclared name: <>". It will either insert a new statement, -such as: - -"<> := " - -or a new function declaration, such as: - -func <>(inferred parameters) { - panic("implement me!") -} - - -**Enabled by default.** - -## **unusedvariable** - -check for unused variables - -The unusedvariable analyzer suggests fixes for unused variables errors. - - -**Disabled by default. Enable it by setting `"analyses": {"unusedvariable": true}`.** - -## **fillstruct** - -note incomplete struct initializations - -This analyzer provides diagnostics for any struct literals that do not have -any fields initialized. Because the suggested fix for this analysis is -expensive to compute, callers should compute it separately, using the -SuggestedFix function below. - - -**Enabled by default.** - -## **infertypeargs** - -check for unnecessary type arguments in call expressions - -Explicit type arguments may be omitted from call expressions if they can be -inferred from function arguments, or from other type arguments: - - func f[T any](T) {} - - func _() { - f[string]("foo") // string could be inferred - } - +[Full documentation](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite) **Enabled by default.** -## **stubmethods** +## **useany** -stub methods analyzer +useany: check for constraints that could be simplified to "any" -This analyzer generates method stubs for concrete types -in order to implement a target interface +[Full documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/useany) -**Enabled by default.** +**Disabled by default. Enable it by setting `"analyses": {"useany": true}`.** diff --git a/gopls/doc/commands.md b/gopls/doc/commands.md index 3404c91c7e5..46675ba3b12 100644 --- a/gopls/doc/commands.md +++ b/gopls/doc/commands.md @@ -41,7 +41,7 @@ Args: } ``` -### **update the given telemetry counters.** +### **Update the given telemetry counters** Identifier: `gopls.add_telemetry_counters` Gopls will prepend "fwd/" to all the counters updated using this command @@ -66,7 +66,14 @@ Args: ``` { - // The fix to apply. + // The name of the fix to apply. + // + // For fixes suggested by analyzers, this is a string constant + // advertised by the analyzer that matches the Category of + // the analysis.Diagnostic with a SuggestedFix containing no edits. + // + // For fixes suggested by code actions, this is a string agreed + // upon by the code action and golang.ApplyFix. "Fix": string, // The file URI for the document to fix. "URI": string, @@ -81,10 +88,51 @@ Args: "character": uint32, }, }, + // Whether to resolve and return the edits. + "ResolveEdits": bool, } ``` -### **performs a "change signature" refactoring.** +Result: + +``` +{ + // Holds changes to existing resources. + "changes": map[golang.org/x/tools/gopls/internal/protocol.DocumentURI][]golang.org/x/tools/gopls/internal/protocol.TextEdit, + // Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes + // are either an array of `TextDocumentEdit`s to express changes to n different text documents + // where each text document edit addresses a specific version of a text document. Or it can contain + // above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations. + // + // Whether a client supports versioned document edits is expressed via + // `workspace.workspaceEdit.documentChanges` client capability. + // + // If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then + // only plain `TextEdit`s using the `changes` property are supported. + "documentChanges": []{ + "TextDocumentEdit": { + "textDocument": { ... }, + "edits": { ... }, + }, + "RenameFile": { + "kind": string, + "oldUri": string, + "newUri": string, + "options": { ... }, + "ResourceOperation": { ... }, + }, + }, + // A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and + // delete file / folder operations. + // + // Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`. + // + // @since 3.16.0 + "changeAnnotations": map[string]golang.org/x/tools/gopls/internal/protocol.ChangeAnnotation, +} +``` + +### **Perform a "change signature" refactoring** Identifier: `gopls.change_signature` This command is experimental, currently only supporting parameter removal. @@ -101,6 +149,47 @@ Args: "end": { ... }, }, }, + // Whether to resolve and return the edits. + "ResolveEdits": bool, +} +``` + +Result: + +``` +{ + // Holds changes to existing resources. + "changes": map[golang.org/x/tools/gopls/internal/protocol.DocumentURI][]golang.org/x/tools/gopls/internal/protocol.TextEdit, + // Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes + // are either an array of `TextDocumentEdit`s to express changes to n different text documents + // where each text document edit addresses a specific version of a text document. Or it can contain + // above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations. + // + // Whether a client supports versioned document edits is expressed via + // `workspace.workspaceEdit.documentChanges` client capability. + // + // If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then + // only plain `TextEdit`s using the `changes` property are supported. + "documentChanges": []{ + "TextDocumentEdit": { + "textDocument": { ... }, + "edits": { ... }, + }, + "RenameFile": { + "kind": string, + "oldUri": string, + "newUri": string, + "options": { ... }, + "ResourceOperation": { ... }, + }, + }, + // A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and + // delete file / folder operations. + // + // Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`. + // + // @since 3.16.0 + "changeAnnotations": map[string]golang.org/x/tools/gopls/internal/protocol.ChangeAnnotation, } ``` @@ -120,6 +209,43 @@ Args: } ``` +### **Cause server to publish diagnostics for the specified files.** +Identifier: `gopls.diagnose_files` + +This command is needed by the 'gopls {check,fix}' CLI subcommands. + +Args: + +``` +{ + "Files": []string, +} +``` + +### **View package documentation.** +Identifier: `gopls.doc` + +Opens the Go package documentation page for the current +package in a browser. + +Args: + +``` +{ + "uri": string, + "range": { + "start": { + "line": uint32, + "character": uint32, + }, + "end": { + "line": uint32, + "character": uint32, + }, + }, +} +``` + ### **Run go mod edit -go=version** Identifier: `gopls.edit_go_directive` @@ -153,7 +279,7 @@ Args: Result: ``` -map[golang.org/x/tools/gopls/internal/lsp/protocol.DocumentURI]*golang.org/x/tools/gopls/internal/vulncheck.Result +map[golang.org/x/tools/gopls/internal/protocol.DocumentURI]*golang.org/x/tools/gopls/internal/vulncheck.Result ``` ### **Toggle gc_details** @@ -183,7 +309,7 @@ Args: } ``` -### **go get a package** +### **'go get' a package** Identifier: `gopls.go_get_package` Runs `go get` to fetch a package. @@ -258,13 +384,14 @@ Result: } ``` -### **checks for the right conditions, and then prompts** +### **Prompt user to enable telemetry** Identifier: `gopls.maybe_prompt_for_telemetry` -the user to ask if they want to enable Go telemetry uploading. If the user -responds 'Yes', the telemetry mode is set to "on". +Checks for the right conditions, and then prompts the user +to ask if they want to enable Go telemetry uploading. If +the user responds 'Yes', the telemetry mode is set to "on". -### **fetch memory statistics** +### **Fetch memory statistics** Identifier: `gopls.mem_stats` Call runtime.GC multiple times and return memory statistics as reported by @@ -334,10 +461,10 @@ Args: } ``` -### **run `go work [args...]`, and apply the resulting go.work** +### **Run `go work [args...]`, and apply the resulting go.work** Identifier: `gopls.run_go_work_command` -edits to the current go.work file. +edits to the current go.work file Args: @@ -349,7 +476,7 @@ Args: } ``` -### **Run vulncheck.** +### **Run vulncheck** Identifier: `gopls.run_govulncheck` Run vulnerability check (`govulncheck`). @@ -438,7 +565,7 @@ Result: } ``` -### **start capturing a profile of gopls' execution.** +### **Start capturing a profile of gopls' execution** Identifier: `gopls.start_profile` Start a new pprof profile. Before using the resulting file, profiling must @@ -459,7 +586,7 @@ Result: struct{} ``` -### **stop an ongoing profile.** +### **Stop an ongoing profile** Identifier: `gopls.stop_profile` This command is intended for internal use only, by the gopls benchmark @@ -567,7 +694,23 @@ Args: } ``` -### **fetch workspace statistics** +### **List current Views on the server.** +Identifier: `gopls.views` + +This command is intended for use by gopls tests only. + +Result: + +``` +[]{ + "Type": string, + "Root": string, + "Folder": string, + "EnvOverlay": []string, +} +``` + +### **Fetch workspace statistics** Identifier: `gopls.workspace_stats` Query statistics about workspace builds, modules, packages, and files. diff --git a/gopls/doc/contributing.md b/gopls/doc/contributing.md index 0ac56caf269..a2f987b63c9 100644 --- a/gopls/doc/contributing.md +++ b/gopls/doc/contributing.md @@ -18,8 +18,8 @@ claiming it. ## Getting started -Most of the `gopls` logic is in the `golang.org/x/tools/gopls/internal/lsp` -directory. +Most of the `gopls` logic is in the `golang.org/x/tools/gopls/internal` +directory. See [design/implementation.md] for an overview of the code organization. ## Build @@ -94,41 +94,43 @@ Users are invited to share it if they are willing. ## Testing -To run tests for just `gopls/`, run, +The normal command you should use to run the tests after a change is: ```bash -cd /path/to/tools/gopls -go test ./... -``` - -But, much of the gopls work involves `internal/lsp` too, so you will want to -run both: - -```bash -cd /path/to/tools -cd gopls && go test ./... -cd .. -go test ./internal/lsp/... +gopls$ go test -short ./... ``` -There is additional information about the `internal/lsp` tests in the -[internal/lsp/tests `README`](https://github.com/golang/tools/blob/master/internal/lsp/tests/README.md). - -### Regtests - -gopls has a suite of regression tests defined in the `./gopls/internal/regtest` -directory. Each of these tests writes files to a temporary directory, starts a -separate gopls session, and scripts interactions using an editor-like API. As a -result of this overhead they can be quite slow, particularly on systems where -file operations are costly. - -Due to the asynchronous nature of the LSP, regtests assertions are written -as 'expectations' that the editor state must achieve _eventually_. This can -make debugging the regtests difficult. To aid with debugging, the regtests -output their LSP logs on any failure. If your CL gets a test failure while -running the regtests, please do take a look at the description of the error and -the LSP logs, but don't hesitate to [reach out](#getting-help) to the gopls -team if you need help. +(The `-short` flag skips some slow-running ones. The trybot builders +run the complete set, on a wide range of platforms.) + +Gopls tests are a mix of two kinds. + +- [Marker tests](../internal/test/marker) express each test scenario + in a standalone text file that contains the target .go, go.mod, and + go.work files, in which special annotations embedded in comments + drive the test. These tests are generally easy to write and fast + to iterate, but have limitations on what they can express. + +- [Integration tests](../internal/test/integration) are regular Go + `func Test(*testing.T)` functions that make a series of calls to an + API for a fake LSP-enabled client editor. The API allows you to open + and edit a file, navigate to a definition, invoke other LSP + operations, and assert properties about the state. + + Due to the asynchronous nature of the LSP, integration tests make + assertions about states that the editor must achieve eventually, + even when the program goes wrong quickly, it may take a while before + the error is reported as a failure to achieve the desired state + within several minutes. We recommend that you set + `GOPLS_INTEGRATION_TEST_TIMEOUT=10s` to reduce the timeout for + integration tests when debugging. + + When they fail, the integration tests print the log of the LSP + session between client and server. Though verbose, they are very + helpful for debugging once you know how to read them. + +Don't hesitate to [reach out](#getting-help) to the gopls team if you +need help. ### CI diff --git a/gopls/doc/design/architecture.svg b/gopls/doc/design/architecture.svg new file mode 100644 index 00000000000..6c554d5670c --- /dev/null +++ b/gopls/doc/design/architecture.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/gopls/doc/design/design.md b/gopls/doc/design/design.md index 05f449d8f52..6e6e7c3bb15 100644 --- a/gopls/doc/design/design.md +++ b/gopls/doc/design/design.md @@ -1,5 +1,47 @@ # `gopls` design documentation +## _A note from the future_ + +What follows below is the original design document for gopls, aggregated from +various sources spanning 2018 and 2019. Since then, all of the features listed +below have been implemented, along with many others. The first two goals have +been achieved: gopls is a full implementation of the LSP, and the default +backend for VS Code Go and many other editors. The third goal has only been +partially realized: while gopls has gained many features, it is not extensible +in the sense used in this document: the only way to extend gopls is to modify +gopls. The fourth goal is not achieved: while some notable companies are able +to use gopls with Bazel, the experience is subpar, and the Go command is the +only officially supported build system. + +On the other hand, two of the explicit non-goals have been reconsidered. One is +minor: syntax highlighting is now supported in the LSP by way of semantic +tokens. The other is major: as gopls gained popularity, it became apparent that +its memory footprint was a problem. The size of developer workspaces was +increasing faster than the RAM available in typically development environments +(particularly with containerized development). Gopls now uses a hybrid of +on-disk indexes and in-memory caches, described in more detail in our +[blog post on scalability](https://go.dev/blog/gopls-scalability). + +Notably, in anticipating difficulties this doc turned out to be prescient. +Gopls has indeed struggled against the core standary library packages upon +which it is built, and its user experience is still limited by the LSP. +Nevertheless, sticking with the standard library and LSP was the right +approach, as despite our small team these decisions have helped gopls keep up +with the evolving Go language (i.e. generics), and to integrate with many new +text editors. + +Gopls development continues, more than four years later, with a focus on +simplicity, reliability, and extensibility. The new, opt-in +[Go telemetry](https://github.com/golang/tools/releases/tag/gopls%2Fv0.14.0) +will help us attain a higher standard of stability in our releases than we've +been able to achieve through Github issues alone. Furthermore, telemetry will +allow us to focus on high-priority features, and deprecate historical +workarounds that burden the codebase. With greater velocity, we look forward +to working with the community on improved refactoring, static analysis, and +whatever else the future brings. + +- _Rob Findley (rfindley@google.com), 2023_ + ## Goals * `gopls` should **become the default editor backend** for the major editors used by Go programmers, fully supported by the Go team. diff --git a/gopls/doc/design/implementation.md b/gopls/doc/design/implementation.md index e9b915ba393..12d655c0b5e 100644 --- a/gopls/doc/design/implementation.md +++ b/gopls/doc/design/implementation.md @@ -1,48 +1,172 @@ -# gopls implementation documentation -This is not intended as a complete description of the implementation, for the most the part the package godoc, code comments and the code itself hold that. -Instead this is meant to be a guide into finding parts of the implementation, and understanding some core concepts used throughout the implementation. +# Gopls architecture -## View/Session/Cache +Last major update: Jan 16 2024 -Throughout the code there are references to these three concepts, and they build on each other. +This doc presents a high-level overview of the structure of gopls to +help new contributors find their way. It is not intended to be a +complete description of the implementation, nor even of any key +components; for that, the package documentation (linked below) and +other comments within the code are a better guide. -At the base is the *Cache*. This is the level at which we hold information that is global in nature, for instance information about the file system and its contents. +The diagram below shows selected components of the gopls module and +their relationship to each other according to the Go import graph. +Tests and test infrastructure are not shown, nor are utility packages, +nor packages from the [x/tools] module. For brevity, packages are +referred to by their last segment, which is usually unambiguous. -Above that is the *Session*, which holds information for a connection to an editor. This layer hold things like the edited files (referred to as overlays). +The height of each blob corresponds loosely to its technical depth. +Some blocks are wide and shallow, such as [protocol], which declares +Go types for the entire LSP protocol. Others are deep, such as [cache] +and [golang], as they contain a lot of dense logic and algorithms. -The top layer is called the *View*. This holds the configuration, and the mapping to configured packages. + +![Gopls architecture](architecture.svg) -The purpose of this layering is to allow a single editor session to have multiple views active whilst still sharing as much information as possible for efficiency. -In theory if only the View layer existed, the results would be identical, but slower and using more memory. +Starting from the bottom, we'll describe the various components. -## Code location +The lowest layer defines the request and response types of the +Language Server Protocol: -gopls will be developed in the [x/tools] Go repository; the core packages are in [internal/lsp], and the binary and integration tests are located in [gopls]. +- The [protocol] package defines the standard protocol; it is mostly + generated mechanically from the schema definition provided by + Microsoft. + The most important type is DocumentURI, which represents a `file:` + URL that identifies a client editor document. It also provides + `Mapper`, which maps between the different coordinate systems used + for source positions: UTF-8, UTF-16, and token.Pos. -Below is a list of the core packages of gopls, and their primary purpose: +- The [command] package defines Gopls's non-standard commands, which + are all invoked through the `workspace/executeCommand` extension + mechanism. These commands are typically returned by the server as + continuations of Code Actions or Code Lenses; most clients do not + construct calls to them directly. -Package | Description ---- | --- -[gopls] | the main binary, plugins and integration tests -[internal/lsp] | the core message handling package -[internal/lsp/cache] | the cache layer -[internal/lsp/cmd] | the gopls command line layer -[internal/lsp/debug] | features to aid in debugging gopls -[internal/lsp/protocol] | the types of LSP request and response messages -[internal/lsp/source] | the core feature implementations -[internal/span] | a package for dealing with source file locations -[internal/memoize] | a function invocation cache used to reduce the work done -[internal/jsonrpc2] | an implementation of the JSON RPC2 specification +The next layer defines a number of important and very widely used data structures: -[gopls]: https://github.com/golang/tools/tree/master/gopls -[internal/jsonrpc2]: https://github.com/golang/tools/tree/master/internal/jsonrpc2 -[internal/lsp]: https://github.com/golang/tools/tree/master/gopls/internal/lsp -[internal/lsp/cache]: https://github.com/golang/tools/tree/master/gopls/internal/lsp/cache -[internal/lsp/cmd]: https://github.com/golang/tools/tree/master/gopls/internal/lsp/cmd -[internal/lsp/debug]: https://github.com/golang/tools/tree/master/gopls/internal/lsp/debug -[internal/lsp/protocol]: https://github.com/golang/tools/tree/master/gopls/internal/lsp/protocol -[internal/lsp/source]: https://github.com/golang/tools/tree/master/gopls/internal/lsp/source -[internal/memoize]: https://github.com/golang/tools/tree/master/internal/memoize -[internal/span]: https://github.com/golang/tools/tree/master/gopls/internal/span -[x/tools]: https://github.com/golang/tools +- The [file] package defines the primary abstractions of a client + file: its `Identity` (URI and content hash), and its `Handle` (which + additionally provides the version and content of a particular + snapshot of the file. + +- The [parsego] package defines `File`, the parsed form of a Go source + file, including its content, syntax tree, and coordinary mappings + (Mapper and token.File). The package performs various kinds of tree + repair to work around error-recovery shortcomings of the Go parser. + +- The [metadata] package defines `Package`, an abstraction of the + metadata of a Go package, similar to the output of `go list -json`. + Metadata is produced from [go/packages], which takes + care of invoking `go list`. (Users report that it works to some extent + with a GOPACKAGESDRIVER for Bazel, though we maintain no tests for this + scenario.) + + The package also provides `Graph`, the complete import graph for a + workspace; each graph node is a `Package`. + +The [settings] layer defines the data structure (effectively a large +tree) for gopls configuration options, along with its JSON encoding. + +The [cache] layer is the largest and most complex component of gopls. +It is concerned with state management, dependency analysis, and invalidation: +the `Session` of communication with the client; +the `Folder`s that the client has opened; +the `View` of a particular workspace tree with particular build +options; +the `Snapshot` of the state of all files in the workspace after a +particular edit operation; +the contents of all files, whether saved to disk (`DiskFile`) or +edited and unsaved (`Overlay`); +the `Cache` of in-memory memoized computations, +such as parsing go.mod files or build the symbol index; +and the `Package`, which holds the results of type checking a package +from Go syntax. + +The cache layer depends on various auxiliary packages, including: + +- The [filecache] package, which manages gopls' persistent, transactional, + file-based key/value store. + +- The [xrefs], [methodsets], and [typerefs] packages define algorithms + for constructing indexes of information derived from type-checking, + and for encoding and decoding these serializable indexes in the file + cache. + + Together these packages enable the fast restart, reduced memory + consumption, and synergy across processes that were delivered by the + v0.12 redesign and described in ["Scaling gopls for the growing Go + ecosystem"](https://go.dev/blog/gopls-scalability). + +The cache also defines gopls's [go/analysis] driver, which runs +modular analysis (similar to `go vet`) across the workspace. +Gopls also includes a number of analysis passes that are not part of vet. + +The next layer defines four packages, each for handling files in a +particular language: +[mod] for go.mod files; +[work] for go.work files; +[template] for files in `text/template` syntax; and +[golang], for files in Go itself. +This package, by far the largest, provides the main features of gopls: +navigation, analysis, and refactoring of Go code. +As most users imagine it, this package _is_ gopls. + +The [server] package defines the LSP service implementation, with one +handler method per LSP request type. Each handler switches on the type +of the file and dispatches to one of the four language-specific +packages. + +The [lsprpc] package connects the service interface to our [JSON RPC](jsonrpc2) +server. + +Bear in mind that the diagram is a dependency graph, a "static" +viewpoint of the program's structure. A more dynamic viewpoint would +order the packages based on the sequence in which they are encountered +during processing of a particular request; in such a view, the bottom +layer would represent the "wire" (protocol and command), the next +layer up would hold the RPC-related packages (lsprpc and server), and +features (e.g. golang, mod, work, template) would be at the top. + + + +The [cmd] package defines the command-line interface of the `gopls` +command, around which gopls's main package is just a trivial wrapper. +It is usually run without arguments, causing it to start a server and +listen indefinitely. +It also provides a number of subcommands that start a server, make a +single request to it, and exit, providing traditional batch-command +access to server functionality. These subcommands are primarily +provided as a debugging aid (but see +[#63693](https://github.com/golang/go/issues/63693)). + +[cache]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache +[cmd]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cmd +[command]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/protocol/command +[debug]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/debug +[file]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/file +[filecache]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/filecache +[go/analysis]: https://pkg.go.dev/golang.org/x/tools@master/go/analysis +[go/packages]: https://pkg.go.dev/golang.org/x/tools@master/go/packages +[gopls]: https://pkg.go.dev/golang.org/x/tools/gopls@master +[jsonrpc2]: https://pkg.go.dev/golang.org/x/tools@master/internal/jsonrpc2 +[lsprpc]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/lsprpc +[memoize]: https://github.com/golang/tools/tree/master/internal/memoize +[metadata]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/metadata +[methodsets]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/methodsets +[mod]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/mod +[parsego]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/parsego +[protocol]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/protocol +[server]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/server +[settings]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/settings +[golang]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/golang +[template]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/template +[typerefs]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/typerefs +[work]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/work +[x/tools]: https://github.com/golang/tools@master +[xrefs]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/xrefs diff --git a/gopls/doc/design/integrating.md b/gopls/doc/design/integrating.md index ba2cc07aa71..2d8e01a76c0 100644 --- a/gopls/doc/design/integrating.md +++ b/gopls/doc/design/integrating.md @@ -19,9 +19,7 @@ Many LSP requests pass position or range information. This is described in the [ > A position inside a document (see Position definition below) is expressed as a zero-based line and character offset. The offsets are based on a UTF-16 string representation. So a string of the form a𐐀b the character offset of the character a is 0, the character offset of 𐐀 is 1 and the character offset of b is 3 since 𐐀 is represented using two code units in UTF-16. This means that integrators will need to calculate UTF-16 based column offsets. - -[`golang.org/x/tools/gopls/internal/span`] has the code to do this in go. -[#31080] tracks making `span` and other useful packages non-internal. +Use `protocol.Mapper` for all the conversions. ## Edits @@ -61,9 +59,9 @@ For instance, files that are needed to do correct type checking are modified by Monitoring files inside gopls directly has a lot of awkward problems, but the [LSP specification] has methods that allow gopls to request that the client notify it of file system changes, specifically [`workspace/didChangeWatchedFiles`]. This is currently being added to gopls by a community member, and tracked in [#31553] -[InitializeResult]: https://pkg.go.dev/golang.org/x/tools/gopls/internal/lsp/protocol#InitializeResult -[ServerCapabilities]: https://pkg.go.dev/golang.org/x/tools/gopls/internal/lsp/protocol#ServerCapabilities -[`golang.org/x/tools/gopls/internal/span`]: https://pkg.go.dev/golang.org/x/tools/internal/span#NewPoint +[InitializeResult]: https://pkg.go.dev/golang.org/x/tools/gopls/internal/protocol#InitializeResult +[ServerCapabilities]: https://pkg.go.dev/golang.org/x/tools/gopls/internal/protocol#ServerCapabilities +[`golang.org/x/tools/gopls/internal/protocol`]: https://pkg.go.dev/golang.org/x/tools/internal/protocol#NewPoint [LSP specification]: https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/ [lsp-response]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#response-message diff --git a/gopls/doc/emacs.md b/gopls/doc/emacs.md index 486f49325cb..8a54cf19d0a 100644 --- a/gopls/doc/emacs.md +++ b/gopls/doc/emacs.md @@ -144,12 +144,14 @@ code action, which you can invoke as needed by running `M-x eglot-code-actions` (or a key of your choice bound to the `eglot-code-actions` function) and selecting `Organize Imports` at the prompt. -Eglot does not currently support a standalone function to execute a specific -code action (see -[joaotavora/eglot#411](https://github.com/joaotavora/eglot/issues/411)), nor an -option to organize imports as a `before-save-hook` (see -[joaotavora/eglot#574](https://github.com/joaotavora/eglot/issues/574)). In the -meantime, see those issues for discussion and possible workarounds. +To automatically organize imports before saving, add a hook: + +```elisp +(add-hook 'before-save-hook + (lambda () + (call-interactively 'eglot-code-action-organize-imports)) + nil t) +``` ## Troubleshooting diff --git a/gopls/doc/generate.go b/gopls/doc/generate.go index 34034fb4e58..595c19a2bdf 100644 --- a/gopls/doc/generate.go +++ b/gopls/doc/generate.go @@ -32,11 +32,12 @@ import ( "github.com/jba/printsrc" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/command/commandmeta" - "golang.org/x/tools/gopls/internal/lsp/mod" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/mod" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/protocol/command/commandmeta" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/safetoken" ) func main() { @@ -52,12 +53,12 @@ func doMain(write bool) (bool, error) { return false, err } - sourceDir, err := pkgDir("golang.org/x/tools/gopls/internal/lsp/source") + settingsDir, err := pkgDir("golang.org/x/tools/gopls/internal/settings") if err != nil { return false, err } - if ok, err := rewriteFile(filepath.Join(sourceDir, "api_json.go"), api, write, rewriteAPI); !ok || err != nil { + if ok, err := rewriteFile(filepath.Join(settingsDir, "api_json.go"), api, write, rewriteAPI); !ok || err != nil { return ok, err } @@ -95,24 +96,25 @@ func pkgDir(pkgPath string) (string, error) { return strings.TrimSpace(string(out)), nil } -func loadAPI() (*source.APIJSON, error) { +func loadAPI() (*settings.APIJSON, error) { pkgs, err := packages.Load( &packages.Config{ Mode: packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedDeps, }, - "golang.org/x/tools/gopls/internal/lsp/source", + "golang.org/x/tools/gopls/internal/settings", ) if err != nil { return nil, err } pkg := pkgs[0] - api := &source.APIJSON{ - Options: map[string][]*source.OptionJSON{}, + defaults := settings.DefaultOptions() + api := &settings.APIJSON{ + Options: map[string][]*settings.OptionJSON{}, + Analyzers: loadAnalyzers(defaults.DefaultAnalyzers), // no staticcheck analyzers } - defaults := source.DefaultOptions() - api.Commands, err = loadCommands(pkg) + api.Commands, err = loadCommands() if err != nil { return nil, err } @@ -122,15 +124,7 @@ func loadAPI() (*source.APIJSON, error) { for _, c := range api.Commands { c.Command = command.ID(c.Command) } - for _, m := range []map[string]*source.Analyzer{ - defaults.DefaultAnalyzers, - defaults.TypeErrorAnalyzers, - defaults.ConvenienceAnalyzers, - // Don't yet add staticcheck analyzers. - } { - api.Analyzers = append(api.Analyzers, loadAnalyzers(m)...) - } - api.Hints = loadHints(source.AllInlayHints) + api.Hints = loadHints(golang.AllInlayHints) for _, category := range []reflect.Value{ reflect.ValueOf(defaults.UserOptions), } { @@ -152,7 +146,7 @@ func loadAPI() (*source.APIJSON, error) { switch opt.Name { case "analyses": for _, a := range api.Analyzers { - opt.EnumKeys.Keys = append(opt.EnumKeys.Keys, source.EnumKey{ + opt.EnumKeys.Keys = append(opt.EnumKeys.Keys, settings.EnumKey{ Name: fmt.Sprintf("%q", a.Name), Doc: a.Doc, Default: strconv.FormatBool(a.Default), @@ -169,7 +163,7 @@ func loadAPI() (*source.APIJSON, error) { if err != nil { return nil, err } - opt.EnumKeys.Keys = append(opt.EnumKeys.Keys, source.EnumKey{ + opt.EnumKeys.Keys = append(opt.EnumKeys.Keys, settings.EnumKey{ Name: fmt.Sprintf("%q", l.Lens), Doc: l.Doc, Default: def, @@ -177,7 +171,7 @@ func loadAPI() (*source.APIJSON, error) { } case "hints": for _, a := range api.Hints { - opt.EnumKeys.Keys = append(opt.EnumKeys.Keys, source.EnumKey{ + opt.EnumKeys.Keys = append(opt.EnumKeys.Keys, settings.EnumKey{ Name: fmt.Sprintf("%q", a.Name), Doc: a.Doc, Default: strconv.FormatBool(a.Default), @@ -189,7 +183,7 @@ func loadAPI() (*source.APIJSON, error) { return api, nil } -func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Package, hierarchy string) ([]*source.OptionJSON, error) { +func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Package, hierarchy string) ([]*settings.OptionJSON, error) { file, err := fileForPos(pkg, optsType.Pos()) if err != nil { return nil, err @@ -200,7 +194,7 @@ func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Pa return nil, err } - var opts []*source.OptionJSON + var opts []*settings.OptionJSON optsStruct := optsType.Type().Underlying().(*types.Struct) for i := 0; i < optsStruct.NumFields(); i++ { // The types field gives us the type. @@ -247,8 +241,8 @@ func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Pa } name := lowerFirst(typesField.Name()) - var enumKeys source.EnumKeys - if m, ok := typesField.Type().(*types.Map); ok { + var enumKeys settings.EnumKeys + if m, ok := typesField.Type().Underlying().(*types.Map); ok { e, ok := enums[m.Key()] if ok { typ = strings.Replace(typ, m.Key().String(), m.Key().Underlying().String(), 1) @@ -269,7 +263,7 @@ func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Pa } status := reflectStructField.Tag.Get("status") - opts = append(opts, &source.OptionJSON{ + opts = append(opts, &settings.OptionJSON{ Name: name, Type: typ, Doc: lowerFirst(astField.Doc.Text()), @@ -283,8 +277,8 @@ func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Pa return opts, nil } -func loadEnums(pkg *packages.Package) (map[types.Type][]source.EnumValue, error) { - enums := map[types.Type][]source.EnumValue{} +func loadEnums(pkg *packages.Package) (map[types.Type][]settings.EnumValue, error) { + enums := map[types.Type][]settings.EnumValue{} for _, name := range pkg.Types.Scope().Names() { obj := pkg.Types.Scope().Lookup(name) cnst, ok := obj.(*types.Const) @@ -299,7 +293,7 @@ func loadEnums(pkg *packages.Package) (map[types.Type][]source.EnumValue, error) spec := path[1].(*ast.ValueSpec) value := cnst.Val().ExactString() doc := valueDoc(cnst.Name(), value, spec.Doc.Text()) - v := source.EnumValue{ + v := settings.EnumValue{ Value: value, Doc: doc, } @@ -308,18 +302,18 @@ func loadEnums(pkg *packages.Package) (map[types.Type][]source.EnumValue, error) return enums, nil } -func collectEnumKeys(name string, m *types.Map, reflectField reflect.Value, enumValues []source.EnumValue) (*source.EnumKeys, error) { +func collectEnumKeys(name string, m *types.Map, reflectField reflect.Value, enumValues []settings.EnumValue) (*settings.EnumKeys, error) { // Make sure the value type gets set for analyses and codelenses // too. if len(enumValues) == 0 && !hardcodedEnumKeys(name) { return nil, nil } - keys := &source.EnumKeys{ + keys := &settings.EnumKeys{ ValueType: m.Elem().String(), } // We can get default values for enum -> bool maps. var isEnumBoolMap bool - if basic, ok := m.Elem().(*types.Basic); ok && basic.Kind() == types.Bool { + if basic, ok := m.Elem().Underlying().(*types.Basic); ok && basic.Kind() == types.Bool { isEnumBoolMap = true } for _, v := range enumValues { @@ -331,7 +325,7 @@ func collectEnumKeys(name string, m *types.Map, reflectField reflect.Value, enum return nil, err } } - keys.Keys = append(keys.Keys, source.EnumKey{ + keys.Keys = append(keys.Keys, settings.EnumKey{ Name: v.Value, Doc: v.Doc, Default: def, @@ -409,8 +403,8 @@ func valueDoc(name, value, doc string) string { return fmt.Sprintf("`%s`: %s", value, doc) } -func loadCommands(pkg *packages.Package) ([]*source.CommandJSON, error) { - var commands []*source.CommandJSON +func loadCommands() ([]*settings.CommandJSON, error) { + var commands []*settings.CommandJSON _, cmds, err := commandmeta.Load() if err != nil { @@ -418,7 +412,7 @@ func loadCommands(pkg *packages.Package) ([]*source.CommandJSON, error) { } // Parse the objects it contains. for _, cmd := range cmds { - cmdjson := &source.CommandJSON{ + cmdjson := &settings.CommandJSON{ Command: cmd.Name, Title: cmd.Title, Doc: cmd.Doc, @@ -485,9 +479,9 @@ func structDoc(fields []*commandmeta.Field, level int) string { return b.String() } -func loadLenses(commands []*source.CommandJSON) []*source.LensJSON { +func loadLenses(commands []*settings.CommandJSON) []*settings.LensJSON { all := map[command.Command]struct{}{} - for k := range source.LensFuncs() { + for k := range golang.LensFuncs() { all[k] = struct{}{} } for k := range mod.LensFuncs() { @@ -497,11 +491,11 @@ func loadLenses(commands []*source.CommandJSON) []*source.LensJSON { all[k] = struct{}{} } - var lenses []*source.LensJSON + var lenses []*settings.LensJSON for _, cmd := range commands { if _, ok := all[command.Command(cmd.Command)]; ok { - lenses = append(lenses, &source.LensJSON{ + lenses = append(lenses, &settings.LensJSON{ Lens: cmd.Command, Title: cmd.Title, Doc: cmd.Doc, @@ -511,16 +505,16 @@ func loadLenses(commands []*source.CommandJSON) []*source.LensJSON { return lenses } -func loadAnalyzers(m map[string]*source.Analyzer) []*source.AnalyzerJSON { +func loadAnalyzers(m map[string]*settings.Analyzer) []*settings.AnalyzerJSON { var sorted []string for _, a := range m { sorted = append(sorted, a.Analyzer.Name) } sort.Strings(sorted) - var json []*source.AnalyzerJSON + var json []*settings.AnalyzerJSON for _, name := range sorted { a := m[name] - json = append(json, &source.AnalyzerJSON{ + json = append(json, &settings.AnalyzerJSON{ Name: a.Analyzer.Name, Doc: a.Analyzer.Doc, URL: a.Analyzer.URL, @@ -530,16 +524,16 @@ func loadAnalyzers(m map[string]*source.Analyzer) []*source.AnalyzerJSON { return json } -func loadHints(m map[string]*source.Hint) []*source.HintJSON { +func loadHints(m map[string]*golang.Hint) []*settings.HintJSON { var sorted []string for _, h := range m { sorted = append(sorted, h.Name) } sort.Strings(sorted) - var json []*source.HintJSON + var json []*settings.HintJSON for _, name := range sorted { h := m[name] - json = append(json, &source.HintJSON{ + json = append(json, &settings.HintJSON{ Name: h.Name, Doc: h.Doc, }) @@ -571,7 +565,7 @@ func fileForPos(pkg *packages.Package, pos token.Pos) (*ast.File, error) { return nil, fmt.Errorf("no file for pos %v", pos) } -func rewriteFile(file string, api *source.APIJSON, write bool, rewrite func([]byte, *source.APIJSON) ([]byte, error)) (bool, error) { +func rewriteFile(file string, api *settings.APIJSON, write bool, rewrite func([]byte, *settings.APIJSON) ([]byte, error)) (bool, error) { old, err := os.ReadFile(file) if err != nil { return false, err @@ -593,10 +587,10 @@ func rewriteFile(file string, api *source.APIJSON, write bool, rewrite func([]by return true, nil } -func rewriteAPI(_ []byte, api *source.APIJSON) ([]byte, error) { +func rewriteAPI(_ []byte, api *settings.APIJSON) ([]byte, error) { var buf bytes.Buffer - fmt.Fprintf(&buf, "// Code generated by \"golang.org/x/tools/gopls/doc/generate\"; DO NOT EDIT.\n\npackage source\n\nvar GeneratedAPIJSON = ") - if err := printsrc.NewPrinter("golang.org/x/tools/gopls/internal/lsp/source").Fprint(&buf, api); err != nil { + fmt.Fprintf(&buf, "// Code generated by \"golang.org/x/tools/gopls/doc/generate\"; DO NOT EDIT.\n\npackage settings\n\nvar GeneratedAPIJSON = ") + if err := printsrc.NewPrinter("golang.org/x/tools/gopls/internal/settings").Fprint(&buf, api); err != nil { return nil, err } return format.Source(buf.Bytes()) @@ -606,10 +600,10 @@ type optionsGroup struct { title string final string level int - options []*source.OptionJSON + options []*settings.OptionJSON } -func rewriteSettings(doc []byte, api *source.APIJSON) ([]byte, error) { +func rewriteSettings(doc []byte, api *settings.APIJSON) ([]byte, error) { result := doc for category, opts := range api.Options { groups := collectGroups(opts) @@ -648,8 +642,8 @@ func rewriteSettings(doc []byte, api *source.APIJSON) ([]byte, error) { return replaceSection(result, "Lenses", section.Bytes()) } -func collectGroups(opts []*source.OptionJSON) []optionsGroup { - optsByHierarchy := map[string][]*source.OptionJSON{} +func collectGroups(opts []*settings.OptionJSON) []optionsGroup { + optsByHierarchy := map[string][]*settings.OptionJSON{} for _, opt := range opts { optsByHierarchy[opt.Hierarchy] = append(optsByHierarchy[opt.Hierarchy], opt) } @@ -730,12 +724,12 @@ func capitalize(s string) string { func strMultiply(str string, count int) string { var result string for i := 0; i < count; i++ { - result += string(str) + result += str } return result } -func rewriteCommands(doc []byte, api *source.APIJSON) ([]byte, error) { +func rewriteCommands(doc []byte, api *settings.APIJSON) ([]byte, error) { section := bytes.NewBuffer(nil) for _, command := range api.Commands { command.Write(section) @@ -743,11 +737,14 @@ func rewriteCommands(doc []byte, api *source.APIJSON) ([]byte, error) { return replaceSection(doc, "Commands", section.Bytes()) } -func rewriteAnalyzers(doc []byte, api *source.APIJSON) ([]byte, error) { +func rewriteAnalyzers(doc []byte, api *settings.APIJSON) ([]byte, error) { section := bytes.NewBuffer(nil) for _, analyzer := range api.Analyzers { fmt.Fprintf(section, "## **%v**\n\n", analyzer.Name) - fmt.Fprintf(section, "%s\n\n", analyzer.Doc) + fmt.Fprintf(section, "%s: %s\n\n", analyzer.Name, analyzer.Doc) + if analyzer.URL != "" { + fmt.Fprintf(section, "[Full documentation](%s)\n\n", analyzer.URL) + } switch analyzer.Default { case true: fmt.Fprintf(section, "**Enabled by default.**\n\n") @@ -758,7 +755,7 @@ func rewriteAnalyzers(doc []byte, api *source.APIJSON) ([]byte, error) { return replaceSection(doc, "Analyzers", section.Bytes()) } -func rewriteInlayHints(doc []byte, api *source.APIJSON) ([]byte, error) { +func rewriteInlayHints(doc []byte, api *settings.APIJSON) ([]byte, error) { section := bytes.NewBuffer(nil) for _, hint := range api.Hints { fmt.Fprintf(section, "## **%v**\n\n", hint.Name) diff --git a/gopls/doc/helix.md b/gopls/doc/helix.md new file mode 100644 index 00000000000..83f923de923 --- /dev/null +++ b/gopls/doc/helix.md @@ -0,0 +1,51 @@ +# Helix + +Configuring `gopls` to work with Helix is rather straightforward. Install `gopls`, and then add it to the `PATH` variable. If it is in the `PATH` variable, Helix will be able to detect it automatically. + +The documentation explaining how to install the default language servers for Helix can be found [here](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) + +## Installing `gopls` + +The first step is to install `gopls` on your machine. +You can follow installation instructions [here](https://github.com/golang/tools/tree/master/gopls#installation). + +## Setting your path to include `gopls` + +Set your `PATH` environment variable to point to `gopls`. +If you used `go install` to download `gopls`, it should be in `$GOPATH/bin`. +If you don't have `GOPATH` set, you can use `go env GOPATH` to find it. + +## Additional information + +You can find more information about how to set up the LSP formatter [here](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers#autoformatting). + +It is possible to use `hx --health go` to see that the language server is properly set up. + +### Configuration + +The settings for `gopls` can be configured in the `languages.toml` file. +The official Helix documentation for this can be found [here](https://docs.helix-editor.com/languages.html) + +Configuration pertaining to `gopls` should be in the table `language-server.gopls`. + +#### How to set flags + +To set flags, add them to the `args` array in the `language-server.gopls` section of the `languages.toml` file. + +#### How to set LSP configuration + +Configuration options can be set in the `language-server.gopls.config` section of the `languages.toml` file, or in the `config` key of the `language-server.gopls` section of the `languages.toml` file. + +#### A minimal config example + +In the `~/.config/helix/languages.toml` file, the following snippet would set up `gopls` with a logfile located at `/tmp/gopls.log` and enable staticcheck. + +```toml +[language-server.gopls] +command = "gopls" +args = ["-logfile=/tmp/gopls.log", "serve"] +[language-server.gopls.config] +"ui.diagnostic.staticcheck" = true +``` + + diff --git a/gopls/doc/release/README b/gopls/doc/release/README new file mode 100644 index 00000000000..e489c33f183 --- /dev/null +++ b/gopls/doc/release/README @@ -0,0 +1,10 @@ +This directory contains the draft release notes for each upcoming release. + +Be sure to update the file for the forthcoming release in the same CL +that you add new features or fix noteworthy bugs. + +See https://github.com/golang/tools/releases for all past releases. + +Tip: when reviewing edits to markdown files in Gerrit, to see the +rendered form, click the "Open in Code Search" link (magnifying glass +in blue square) then click "View in > gitiles" (shortcut: `v g`). diff --git a/gopls/doc/release/v0.16.0.md b/gopls/doc/release/v0.16.0.md new file mode 100644 index 00000000000..3b3712dae9d --- /dev/null +++ b/gopls/doc/release/v0.16.0.md @@ -0,0 +1,93 @@ +gopls/v0.16.0 + +``` +go install golang.org/x/tools/gopls@v0.16.0 +``` + +## New features + +### Integrated documentation viewer + +Gopls now offers a "View package documentation" code action that opens +a local web page displaying the generated documentation for the +current Go package in a form similar to https://pkg.go.dev. +The page will be initially scrolled to the documentation for the +declaration containing the cursor. +Use this feature to preview the marked-up documentation as you prepare API +changes, or to read the documentation for locally edited packages, +even ones that have not yet been saved. Reload the page after an edit +to see updated documentation. + +TODO: demo in VS Code. + +Clicking on the source-code link associated with a declaration will +cause your editor to navigate to the declaration. + +TODO: demo of source linking. + +Editor support: + +- VS Code: use the `Source action > View package documentation` menu item. + Note: source links navigate the editor but don't yet raise the window yet. + Please upvote https://github.com/microsoft/vscode/issues/208093 and + https://github.com/microsoft/vscode/issues/207634 (temporarily closed). + +- Emacs: requires eglot v1.17. You may find this `go-doc` function a + useful shortcut: + +```lisp +(eglot--code-action eglot-code-action-doc "source.doc") + +(defalias 'go-doc #'eglot-code-action-doc + "View documentation for the current Go package.") +``` + +- TODO: test in vim, neovim, sublime, helix. + +### `unusedwrite` analyzer + +The new +[unusedwrite](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite) +analyzer reports assignments, often to fields of structs, that have no +effect because, for example, the struct is never used again: + +```go +func scheme(host string) string { + u := &url.URL{ + Host: host, // "unused write to field Host" (no need to construct a URL) + Scheme: "https:", + } + return u.Scheme +} +``` + +This is at best an indication that the code is unnecessarily complex +(for instance, some dead code could be removed), but often indicates a +bug, as in this example: + +```go +type S struct { x int } + +func (s S) set(x int) { + s.x = x // "unused write to field x" (s should be a *S pointer) +} +``` + + +### Hover shows size/offset info + +Hovering over the identifier that declares a type or struct field now +displays the size information for the type, and the offset information +for the field. In addition, it reports the percentage of wasted space +due to suboptimal ordering of struct fields, if this figure is 20% or +higher. This information may be helpful when making space +optimizations to your data structures, or when reading assembly code. + +TODO: example hover image. + +## Bugs fixed + +## Thank you to our contributors! + +@guodongli-google for the `unusedwrite` analyzer. +TODO: they're a xoogler; is there a more current GH account? \ No newline at end of file diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md index 47cd211d898..9f692cf6848 100644 --- a/gopls/doc/settings.md +++ b/gopls/doc/settings.md @@ -1,6 +1,6 @@ # Settings - + This document describes the global settings for `gopls` inside the editor. The settings block will be called `"gopls"` and contains a collection of @@ -85,34 +85,26 @@ is the part of the file name after the final dot.) Default: `[]`. -#### **memoryMode** *enum* +#### **memoryMode** *string* **This setting is experimental and may be deleted.** -memoryMode controls the tradeoff `gopls` makes between memory usage and -correctness. +obsolete, no effect -Values other than `Normal` are untested and may break in surprising ways. - -Must be one of: - -* `"DegradeClosed"`: In DegradeClosed mode, `gopls` will collect less information about -packages without open files. As a result, features like Find -References and Rename will miss results in such packages. -* `"Normal"` - -Default: `"Normal"`. +Default: `""`. #### **expandWorkspaceToModule** *bool* **This setting is experimental and may be deleted.** -expandWorkspaceToModule instructs `gopls` to adjust the scope of the -workspace to find the best available module root. `gopls` first looks for -a go.mod file in any parent directory of the workspace folder, expanding -the scope to that directory if it exists. If no viable parent directory is -found, gopls will check if there is exactly one child directory containing -a go.mod file, narrowing the scope to that directory if it exists. +expandWorkspaceToModule determines which packages are considered +"workspace packages" when the workspace is using modules. + +Workspace packages affect the scope of workspace-wide operations. Notably, +gopls diagnoses all packages considered to be part of the workspace after +every keystroke, so by setting "ExpandWorkspaceToModule" to false, and +opening a nested workspace directory, you can reduce the amount of work +gopls has to do to keep your workspace up to date. Default: `true`. @@ -294,7 +286,7 @@ Example Usage: ... "analyses": { "unreachable": false, // Disable the unreachable analyzer. - "unusedparams": true // Enable the unusedparams analyzer. + "unusedvariable": true // Enable the unusedvariable analyzer. } ... ``` @@ -553,7 +545,7 @@ Runs `go generate` for a given directory. Identifier: `regenerate_cgo` Regenerates cgo definitions. -### **Run vulncheck.** +### **Run vulncheck** Identifier: `run_govulncheck` diff --git a/gopls/doc/workspace.md b/gopls/doc/workspace.md index 4ff9994f939..cb26b3dcd43 100644 --- a/gopls/doc/workspace.md +++ b/gopls/doc/workspace.md @@ -1,101 +1,139 @@ # Setting up your workspace -`gopls` supports both Go module and GOPATH modes. However, it needs a defined -scope in which language features like references, rename, and implementation -should operate. - -The following options are available for configuring this scope: - -## Module mode - -### One module - -If you are working with a single module, you can open the module root (the -directory containing the `go.mod` file), a subdirectory within the module, -or a parent directory containing the module. - -**Note**: If you open a parent directory containing a module, it must **only** -contain that single module. Otherwise, you are working with multiple modules. - -### Multiple modules - -Gopls has several alternatives for working on multiple modules simultaneously, -described below. Starting with Go 1.18, Go workspaces are the preferred solution. - -#### Go workspaces (Go 1.18+) - -Starting with Go 1.18, the `go` command has native support for multi-module -workspaces, via [`go.work`](https://go.dev/ref/mod#workspaces) files. These -files are recognized by gopls starting with `gopls@v0.8.0`. - -The easiest way to work on multiple modules in Go 1.18 and later is therefore -to create a `go.work` file containing the modules you wish to work on, and set -your workspace root to the directory containing the `go.work` file. - -For example, suppose this repo is checked out into the `$WORK/tools` directory. -We can work on both `golang.org/x/tools` and `golang.org/x/tools/gopls` -simultaneously by creating a `go.work` file using `go work init`, followed by -`go work use MODULE_DIRECTORIES...` to add directories containing `go.mod` files to the -workspace: +In the language server protocol, a "workspace" consists of a folder along with +per-folder configuration. Some LSP clients such as VS Code allow configuring +workspaces explicitly, while others do so automatically by looking for special +files defining a workspace root (such as a `.git` directory or `go.mod` file). + +In order to function, gopls needs a defined scope in which language features +like references, rename, and implementation should operate. Put differently, +gopls needs to infer from the LSP workspace which `go build` invocations you +would use to build your workspace, including the working directory, +environment, and build flags. + +In the past, it could be tricky to set up your workspace so that gopls would +infer the correct build information. It required opening the correct directory +or using a `go.work` file to tell gopls about the modules you're working on, +and configuring the correct operating system and architecture in advance. +When this didn't work as expected, gopls would often fail in mysterious +ways--the dreaded "No packages found" error. + +Starting with gopls v0.15.0, workspace configuration is much simpler, and gopls +will typically work when you open a Go file anywhere in your workspace. If it +isn't working for you, or if you want to better understand how gopls models +your workspace, please read on. + +## Workspace builds + +Starting with gopls v0.15.0, gopls will guess the builds you are working on +based on the set of open files. When you open a file in a workspace folder, +gopls checks whether the file is contained in a module, `go.work` workspace, or +GOPATH directory, and configures the build accordingly. Additionally, if you +open a file that is constrained to a different operating system or +architecture, for example opening `foo_windows.go` when working on Linux, gopls +will create a scope with `GOOS` and `GOARCH` set to a value that matches the +file. + +For example, suppose we had a repository with three modules: `moda`, `modb`, +and `modc`, and a `go.work` file using modules `moda` and `modb`. If we open +the files `moda/a.go`, `modb/b.go`, `moda/a_windows.go`, and `modc/c.go`, gopls +will automatically create three builds: + +![Zero Config gopls](zeroconfig.png) + +This allows gopls to _just work_ when you open a Go file, but it does come with +several caveats: + +- It causes gopls to do more work, since it is now tracking three builds + instead of one. However, the recent + [scalability redesign](https://go.dev/blog/gopls-scalability) + allows much of this work to be avoided through efficient caching. +- For operations invoked from a given file, such as "References" + or "Implementations", gopls executes the operation in + _the default build for that file_. For example, finding references to + a symbol `S` from `foo_linux.go` will return references from the Linux build, + and finding references to the same symbol `S` from `foo_windows.go` will + return references from the Windows build. Gopls searches the default build + for the file, but it doesn't search all the other possible builds (even + though that would be nice) because it is liable to be too expensive. + Issues [#65757](https://go.dev/issue/65757) and + [#65755](https://go.dev/issue/65755) propose improvements to this behavior. +- When selecting a `GOOS/GOARCH` combination to match a build-constrained file, + gopls will choose the first matching combination from + [this list](https://cs.opensource.google/go/x/tools/+/master:gopls/internal/cache/port.go;l=30;drc=f872b3d6f05822d290bc7bdd29db090fd9d89f5c). + In some cases, that may be surprising. +- When working in a `GOOS/GOARCH` constrained file that does not match your + default toolchain, `CGO_ENABLED=0` is implicitly set, since a C toolchain for + that target is unlikely to be available. This means that gopls will not + work in files including `import "C"`. Issue + [#65758](https://go.dev/issue/65758) may lead to improvements in this + behavior. +- Gopls is currently unable to guess build flags that include arbitrary + user-defined build constraints, such as a file with the build directive + `//go:build mytag`. Issue [#65089](https://go.dev/issue/65089) proposes + a heuristic by which gopls could handle this automatically. + +Please provide feedback on this behavior by upvoting or commenting the issues +mentioned above, or opening a [new issue](https://go.dev/issue/new) for other +improvements you'd like to see. + +## When to use a `go.work` file for development + +Starting with Go 1.18, the `go` command has built-in support for multi-module +workspaces specified by [`go.work`](https://go.dev/ref/mod#workspaces) files. +Gopls will recognize these files if they are present in your workspace. + +Use a `go.work` file when: + +- you want to work on multiple modules simultaneously in a single logical + build, for example if you want changes to one module to be reflected in + another. +- you want to improve gopls' memory usage or performance by reducing the number + of builds it must track. +- you want gopls to know which modules you are working on in a multi-module + workspace, without opening any files. For example, it may be convenient to use + `workspace/symbol` queries before any files are open. +- you are using gopls v0.14.2 or earlier, and want to work on multiple + modules. + +For example, suppose this repo is checked out into the `$WORK/tools` directory, +and [`x/mod`](https://pkg.go.dev/golang.org/x/mod) is checked out into +`$WORK/mod`, and you are working on a new `x/mod` API for editing `go.mod` +files that you want to simultaneously integrate into gopls. + +You can work on both `golang.org/x/tools/gopls` and `golang.org/x/mod` +simultaneously by creating a `go.work` file: ```sh cd $WORK go work init -go work use ./tools/ ./tools/gopls/ +go work use tools/gopls mod ``` -...followed by opening the `$WORK` directory in our editor. - -#### DEPRECATED: Experimental workspace module (Go 1.17 and earlier) - -**This feature is deprecated and will be removed in future versions of gopls. -Please see [issue #52897](https://go.dev/issue/52897) for additional -information.** - -With earlier versions of Go, `gopls` can simulate multi-module workspaces by -creating a synthetic module requiring the modules in the workspace root. -See [the design document](https://github.com/golang/proposal/blob/master/design/37720-gopls-workspaces.md) -for more information. - -This feature is experimental, and will eventually be removed once `go.work` -files are accepted by all supported Go versions. - -You can enable this feature by configuring the -[experimentalWorkspaceModule](settings.md#experimentalworkspacemodule-bool) -setting. - -#### Multiple workspace folders - -If neither of the above solutions work, and your editor allows configuring the -set of -["workspace folders"](https://microsoft.github.io/language-server-protocol/specifications/specification-3-17/#workspaceFolder) -used during your LSP session, you can still work on multiple modules by adding -a workspace folder at each module root (the locations of `go.mod` files). This -means that each module has its own scope, and features will not work across -modules. - -In VS Code, you can create a workspace folder by setting up a -[multi-root workspace](https://code.visualstudio.com/docs/editor/multi-root-workspaces). -View the [documentation for your editor plugin](../README.md#editor) to learn how to -configure a workspace folder in your editor. - -### GOPATH mode +then opening the `$WORK` directory in your editor. -When opening a directory within your GOPATH, the workspace scope will be just -that directory. +## When to manually configure `GOOS`, `GOARCH`, or `-tags` -### At your own risk +As described in the first section, gopls v0.15.0 and later will try to +configure a new build scope automatically when you open a file that doesn't +match the system default operating system (`GOOS`) or architecture (`GOARCH`). -Some users or companies may have projects that encompass one `$GOPATH`. If you -open your entire `$GOPATH` or `$GOPATH/src` folder, the workspace scope will be -your entire `GOPATH`. If your GOPATH is large, `gopls` to be very slow to start -because it will try to find all of the Go files in the directory you have -opened. It will then load all of the files it has found. +However, per the caveats listed in that section, this automatic behavior comes +with limitations. Customize your gopls environment by setting `GOOS` or +`GOARCH` in your +[`"build.env"`](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#env-mapstringstring) +or `-tags=...` in your" +["build.buildFlags"](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags-string) +when: -To work around this case, you can create a new `$GOPATH` that contains only the -packages you want to work on. +- You want to modify the default build environment. +- Gopls is not guessing the `GOOS/GOARCH` combination you want to use for + cross platform development. +- You need to work on a file that is constrained by a user-defined build tags, + such as the build directive `//go:build mytag`. ---- +## GOPATH mode -If you have additional use cases that are not mentioned above, please -[file a new issue](https://github.com/golang/go/issues/new). +When opening a directory within a `GOPATH` directory, the workspace scope will +be just that directory and all directories contained within it. Note that +opening a large GOPATH directory can make gopls very slow to start. diff --git a/gopls/doc/zeroconfig.png b/gopls/doc/zeroconfig.png new file mode 100644 index 00000000000..49d4f8ead74 Binary files /dev/null and b/gopls/doc/zeroconfig.png differ diff --git a/gopls/go.mod b/gopls/go.mod index 093fc01be7c..1c693c141c5 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -1,29 +1,29 @@ module golang.org/x/tools/gopls -go 1.18 +go 1.19 require ( - github.com/google/go-cmp v0.5.9 + github.com/google/go-cmp v0.6.0 github.com/jba/printsrc v0.2.2 - github.com/jba/templatecheck v0.6.0 - github.com/sergi/go-diff v1.1.0 - golang.org/x/mod v0.14.0 - golang.org/x/sync v0.5.0 - golang.org/x/sys v0.14.0 - golang.org/x/telemetry v0.0.0-20231011160506-788d5629a052 + github.com/jba/templatecheck v0.7.0 + golang.org/x/mod v0.17.0 + golang.org/x/sync v0.7.0 + golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2 golang.org/x/text v0.14.0 - golang.org/x/tools v0.13.1-0.20230920233436-f9b8da7b22be - golang.org/x/vuln v1.0.1 + golang.org/x/tools v0.18.0 + golang.org/x/vuln v1.0.4 gopkg.in/yaml.v3 v3.0.1 - honnef.co/go/tools v0.4.5 - mvdan.cc/gofumpt v0.4.0 - mvdan.cc/xurls/v2 v2.4.0 + honnef.co/go/tools v0.4.7 + mvdan.cc/gofumpt v0.6.0 + mvdan.cc/xurls/v2 v2.5.0 ) require ( github.com/BurntSushi/toml v1.2.1 // indirect github.com/google/safehtml v0.1.0 // indirect golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 // indirect + golang.org/x/sys v0.19.0 // indirect + gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect ) diff --git a/gopls/go.sum b/gopls/go.sum index cdae287f16f..96968c5981f 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -1,72 +1,56 @@ github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/safehtml v0.0.2/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/safehtml v0.1.0 h1:EwLKo8qawTKfsi0orxcQAZzu07cICaBeFMegAU9eaT8= github.com/google/safehtml v0.1.0/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU= github.com/jba/printsrc v0.2.2 h1:9OHK51UT+/iMAEBlQIIXW04qvKyF3/vvLuwW/hL8tDU= github.com/jba/printsrc v0.2.2/go.mod h1:1xULjw59sL0dPdWpDoVU06TIEO/Wnfv6AHRpiElTwYM= -github.com/jba/templatecheck v0.6.0 h1:SwM8C4hlK/YNLsdcXStfnHWE2HKkuTVwy5FKQHt5ro8= -github.com/jba/templatecheck v0.6.0/go.mod h1:/1k7EajoSErFI9GLHAsiIJEaNLt3ALKNw2TV7z2SYv4= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/jba/templatecheck v0.7.0 h1:wjTb/VhGgSFeim5zjWVePBdaMo28X74bGLSABZV+zIA= +github.com/jba/templatecheck v0.7.0/go.mod h1:n1Etw+Rrw1mDDD8dDRsEKTwMZsJ98EkktgNJC6wLUGo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= -github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 h1:2O2DON6y3XMJiQRAS1UWU+54aec2uopH3x7MAiqGW6Y= golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= -golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= -golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/telemetry v0.0.0-20231011160506-788d5629a052 h1:1baVNneD/IRxmu8JQdBuki78zUqBtZxq8smZXQj0X2Y= -golang.org/x/telemetry v0.0.0-20231011160506-788d5629a052/go.mod h1:6p4ScoNeC2dhpQ1nSSMmkZ7mEj5JQUSCyc0uExBp5T4= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2 h1:IRJeR9r1pYWsHKTRe/IInb7lYvbBVIqOgsX/u0mbOWY= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/vuln v1.0.1 h1:KUas02EjQK5LTuIx1OylBQdKKZ9jeugs+HiqO5HormU= -golang.org/x/vuln v1.0.1/go.mod h1:bb2hMwln/tqxg32BNY4CcxHWtHXuYa3SbIBmtsyjxtM= +golang.org/x/vuln v1.0.4 h1:SP0mPeg2PmGCu03V+61EcQiOjmpri2XijexKdzv8Z1I= +golang.org/x/vuln v1.0.4/go.mod h1:NbJdUQhX8jY++FtuhrXs2Eyx0yePo9pF7nPlIjo9aaQ= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.4.5 h1:YGD4H+SuIOOqsyoLOpZDWcieM28W47/zRO7f+9V3nvo= -honnef.co/go/tools v0.4.5/go.mod h1:GUV+uIBCLpdf0/v6UhHHG/yzI/z6qPskBeQCjcNB96k= -mvdan.cc/gofumpt v0.4.0 h1:JVf4NN1mIpHogBj7ABpgOyZc65/UUOkKQFkoURsz4MM= -mvdan.cc/gofumpt v0.4.0/go.mod h1:PljLOHDeZqgS8opHRKLzp2It2VBuSdteAgqUfzMTxlQ= -mvdan.cc/xurls/v2 v2.4.0 h1:tzxjVAj+wSBmDcF6zBB7/myTy3gX9xvi8Tyr28AuQgc= -mvdan.cc/xurls/v2 v2.4.0/go.mod h1:+GEjq9uNjqs8LQfM9nVnM8rff0OQ5Iash5rzX+N1CSg= +honnef.co/go/tools v0.4.7 h1:9MDAWxMoSnB6QoSqiVr7P5mtkT9pOc1kSxchzPCnqJs= +honnef.co/go/tools v0.4.7/go.mod h1:+rnGS1THNh8zMwnd2oVOTL9QF6vmfyG6ZXBULae2uc0= +mvdan.cc/gofumpt v0.6.0 h1:G3QvahNDmpD+Aek/bNOLrFR2XC6ZAdo62dZu65gmwGo= +mvdan.cc/gofumpt v0.6.0/go.mod h1:4L0wf+kgIPZtcCWXynNS2e6bhmj73umwnuXSZarixzA= +mvdan.cc/xurls/v2 v2.5.0 h1:lyBNOm8Wo71UknhUs4QTFUNNMyxy2JEIaKKo0RWOh+8= +mvdan.cc/xurls/v2 v2.5.0/go.mod h1:yQgaGQ1rFtJUzkmKiHYSSfuQxqfYmd//X6PxvholpeE= diff --git a/gopls/internal/lsp/analysis/deprecated/deprecated.go b/gopls/internal/analysis/deprecated/deprecated.go similarity index 93% rename from gopls/internal/lsp/analysis/deprecated/deprecated.go rename to gopls/internal/analysis/deprecated/deprecated.go index 5f7354e4fa4..1a8c4c56766 100644 --- a/gopls/internal/lsp/analysis/deprecated/deprecated.go +++ b/gopls/internal/analysis/deprecated/deprecated.go @@ -2,7 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package deprecated defines an Analyzer that marks deprecated symbols and package imports. package deprecated import ( @@ -14,27 +13,25 @@ import ( "strconv" "strings" + _ "embed" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/analysisinternal" ) -// TODO(hyangah): use analysisutil.MustExtractDoc. -var doc = `check for use of deprecated identifiers - -The deprecated analyzer looks for deprecated symbols and package imports. - -See https://go.dev/wiki/Deprecated to learn about Go's convention -for documenting and signaling deprecated identifiers.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "deprecated", - Doc: doc, + Doc: analysisinternal.MustExtractDoc(doc, "deprecated"), Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: checkDeprecated, FactTypes: []analysis.Fact{(*deprecationFact)(nil)}, RunDespiteErrors: true, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/deprecated", } // checkDeprecated is a simplified copy of staticcheck.CheckDeprecated. @@ -69,8 +66,8 @@ func checkDeprecated(pass *analysis.Pass) (interface{}, error) { } obj := pass.TypesInfo.ObjectOf(sel.Sel) - if obj_, ok := obj.(*types.Func); ok { - obj = typeparams.OriginMethod(obj_) + if fn, ok := obj.(*types.Func); ok { + obj = fn.Origin() } if obj == nil || obj.Pkg() == nil { // skip invalid sel.Sel. diff --git a/gopls/internal/lsp/analysis/deprecated/deprecated_test.go b/gopls/internal/analysis/deprecated/deprecated_test.go similarity index 83% rename from gopls/internal/lsp/analysis/deprecated/deprecated_test.go rename to gopls/internal/analysis/deprecated/deprecated_test.go index 0242ef1fa09..89bf3bea252 100644 --- a/gopls/internal/lsp/analysis/deprecated/deprecated_test.go +++ b/gopls/internal/analysis/deprecated/deprecated_test.go @@ -8,11 +8,9 @@ import ( "testing" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/internal/testenv" ) func Test(t *testing.T) { - testenv.NeedsGo1Point(t, 19) testdata := analysistest.TestData() analysistest.Run(t, testdata, Analyzer, "a") } diff --git a/gopls/internal/analysis/deprecated/doc.go b/gopls/internal/analysis/deprecated/doc.go new file mode 100644 index 00000000000..0d96b86b302 --- /dev/null +++ b/gopls/internal/analysis/deprecated/doc.go @@ -0,0 +1,16 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package deprecated defines an Analyzer that marks deprecated symbols and package imports. +// +// # Analyzer deprecated +// +// deprecated: check for use of deprecated identifiers +// +// The deprecated analyzer looks for deprecated symbols and package +// imports. +// +// See https://go.dev/wiki/Deprecated to learn about Go's convention +// for documenting and signaling deprecated identifiers. +package deprecated diff --git a/gopls/internal/lsp/analysis/deprecated/testdata/src/a/a.go b/gopls/internal/analysis/deprecated/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/deprecated/testdata/src/a/a.go rename to gopls/internal/analysis/deprecated/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/deprecated/testdata/src/a/a_test.go b/gopls/internal/analysis/deprecated/testdata/src/a/a_test.go similarity index 100% rename from gopls/internal/lsp/analysis/deprecated/testdata/src/a/a_test.go rename to gopls/internal/analysis/deprecated/testdata/src/a/a_test.go diff --git a/gopls/internal/analysis/embeddirective/doc.go b/gopls/internal/analysis/embeddirective/doc.go new file mode 100644 index 00000000000..bfed47f14f4 --- /dev/null +++ b/gopls/internal/analysis/embeddirective/doc.go @@ -0,0 +1,18 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package embeddirective defines an Analyzer that validates //go:embed directives. +// The analyzer defers fixes to its parent golang.Analyzer. +// +// # Analyzer embed +// +// embed: check //go:embed directive usage +// +// This analyzer checks that the embed package is imported if //go:embed +// directives are present, providing a suggested fix to add the import if +// it is missing. +// +// This analyzer also checks that //go:embed directives precede the +// declaration of a single variable. +package embeddirective diff --git a/gopls/internal/analysis/embeddirective/embeddirective.go b/gopls/internal/analysis/embeddirective/embeddirective.go new file mode 100644 index 00000000000..1b0b89711c2 --- /dev/null +++ b/gopls/internal/analysis/embeddirective/embeddirective.go @@ -0,0 +1,166 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package embeddirective + +import ( + _ "embed" + "go/ast" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/analysisinternal" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "embed", + Doc: analysisinternal.MustExtractDoc(doc, "embed"), + Run: run, + RunDespiteErrors: true, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/embeddirective", +} + +const FixCategory = "addembedimport" // recognized by gopls ApplyFix + +func run(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + comments := embedDirectiveComments(f) + if len(comments) == 0 { + continue // nothing to check + } + + hasEmbedImport := false + for _, imp := range f.Imports { + if imp.Path.Value == `"embed"` { + hasEmbedImport = true + break + } + } + + for _, c := range comments { + pos, end := c.Pos(), c.Pos()+token.Pos(len("//go:embed")) + + if !hasEmbedImport { + pass.Report(analysis.Diagnostic{ + Pos: pos, + End: end, + Message: `must import "embed" when using go:embed directives`, + Category: FixCategory, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: `Add missing "embed" import`, + // No TextEdits => computed by a gopls command. + }}, + }) + } + + var msg string + spec := nextVarSpec(c, f) + switch { + case spec == nil: + msg = `go:embed directives must precede a "var" declaration` + case len(spec.Names) != 1: + msg = "declarations following go:embed directives must define a single variable" + case len(spec.Values) > 0: + msg = "declarations following go:embed directives must not specify a value" + case !embeddableType(pass.TypesInfo.Defs[spec.Names[0]]): + msg = "declarations following go:embed directives must be of type string, []byte or embed.FS" + } + if msg != "" { + pass.Report(analysis.Diagnostic{ + Pos: pos, + End: end, + Message: msg, + }) + } + } + } + return nil, nil +} + +// embedDirectiveComments returns all comments in f that contains a //go:embed directive. +func embedDirectiveComments(f *ast.File) []*ast.Comment { + comments := []*ast.Comment{} + for _, cg := range f.Comments { + for _, c := range cg.List { + if strings.HasPrefix(c.Text, "//go:embed ") { + comments = append(comments, c) + } + } + } + return comments +} + +// nextVarSpec returns the ValueSpec for the variable declaration immediately following +// the go:embed comment, or nil if the next declaration is not a variable declaration. +func nextVarSpec(com *ast.Comment, f *ast.File) *ast.ValueSpec { + // Embed directives must be followed by a declaration of one variable with no value. + // There may be comments and empty lines between the directive and the declaration. + var nextDecl ast.Decl + for _, d := range f.Decls { + if com.End() < d.End() { + nextDecl = d + break + } + } + if nextDecl == nil || nextDecl.Pos() == token.NoPos { + return nil + } + decl, ok := nextDecl.(*ast.GenDecl) + if !ok { + return nil + } + if decl.Tok != token.VAR { + return nil + } + + // var declarations can be both freestanding and blocks (with parenthesis). + // Only the first variable spec following the directive is interesting. + var nextSpec ast.Spec + for _, s := range decl.Specs { + if com.End() < s.End() { + nextSpec = s + break + } + } + if nextSpec == nil { + return nil + } + spec, ok := nextSpec.(*ast.ValueSpec) + if !ok { + // Invalid AST, but keep going. + return nil + } + return spec +} + +// embeddableType in go:embed directives are string, []byte or embed.FS. +func embeddableType(o types.Object) bool { + if o == nil { + return false + } + + // For embed.FS the underlying type is an implementation detail. + // As long as the named type resolves to embed.FS, it is OK. + if named, ok := aliases.Unalias(o.Type()).(*types.Named); ok { + obj := named.Obj() + if obj.Pkg() != nil && obj.Pkg().Path() == "embed" && obj.Name() == "FS" { + return true + } + } + + switch v := o.Type().Underlying().(type) { + case *types.Basic: + return types.Identical(v, types.Typ[types.String]) + case *types.Slice: + return types.Identical(v.Elem(), types.Typ[types.Byte]) + } + + return false +} diff --git a/gopls/internal/analysis/embeddirective/embeddirective_test.go b/gopls/internal/analysis/embeddirective/embeddirective_test.go new file mode 100644 index 00000000000..22e43af78ed --- /dev/null +++ b/gopls/internal/analysis/embeddirective/embeddirective_test.go @@ -0,0 +1,16 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package embeddirective + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, Analyzer, "a") +} diff --git a/gopls/internal/lsp/analysis/embeddirective/testdata/src/a/embedText b/gopls/internal/analysis/embeddirective/testdata/src/a/embedText similarity index 100% rename from gopls/internal/lsp/analysis/embeddirective/testdata/src/a/embedText rename to gopls/internal/analysis/embeddirective/testdata/src/a/embedText diff --git a/gopls/internal/lsp/analysis/embeddirective/testdata/src/a/import_missing.go b/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go similarity index 100% rename from gopls/internal/lsp/analysis/embeddirective/testdata/src/a/import_missing.go rename to gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go diff --git a/gopls/internal/lsp/analysis/embeddirective/testdata/src/a/import_present.go b/gopls/internal/analysis/embeddirective/testdata/src/a/import_present.go similarity index 100% rename from gopls/internal/lsp/analysis/embeddirective/testdata/src/a/import_present.go rename to gopls/internal/analysis/embeddirective/testdata/src/a/import_present.go diff --git a/gopls/internal/lsp/analysis/embeddirective/testdata/src/a/import_present_go120.go b/gopls/internal/analysis/embeddirective/testdata/src/a/import_present_go120.go similarity index 100% rename from gopls/internal/lsp/analysis/embeddirective/testdata/src/a/import_present_go120.go rename to gopls/internal/analysis/embeddirective/testdata/src/a/import_present_go120.go diff --git a/gopls/internal/analysis/fillreturns/doc.go b/gopls/internal/analysis/fillreturns/doc.go new file mode 100644 index 00000000000..584aec47db9 --- /dev/null +++ b/gopls/internal/analysis/fillreturns/doc.go @@ -0,0 +1,27 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fillreturns defines an Analyzer that will attempt to +// automatically fill in a return statement that has missing +// values with zero value elements. +// +// # Analyzer fillreturns +// +// fillreturns: suggest fixes for errors due to an incorrect number of return values +// +// This checker provides suggested fixes for type errors of the +// type "wrong number of return values (want %d, got %d)". For example: +// +// func m() (int, string, *bool, error) { +// return +// } +// +// will turn into +// +// func m() (int, string, *bool, error) { +// return 0, "", nil, nil +// } +// +// This functionality is similar to https://github.com/sqs/goreturns. +package fillreturns diff --git a/gopls/internal/lsp/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go similarity index 89% rename from gopls/internal/lsp/analysis/fillreturns/fillreturns.go rename to gopls/internal/analysis/fillreturns/fillreturns.go index c8146df2dd0..cc584a70562 100644 --- a/gopls/internal/lsp/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -2,13 +2,11 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package fillreturns defines an Analyzer that will attempt to -// automatically fill in a return statement that has missing -// values with zero value elements. package fillreturns import ( "bytes" + _ "embed" "fmt" "go/ast" "go/format" @@ -20,30 +18,17 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/fuzzy" - "golang.org/x/tools/internal/typeparams" ) -const Doc = `suggest fixes for errors due to an incorrect number of return values - -This checker provides suggested fixes for type errors of the -type "wrong number of return values (want %d, got %d)". For example: - func m() (int, string, *bool, error) { - return - } -will turn into - func m() (int, string, *bool, error) { - return 0, "", nil, nil - } - -This functionality is similar to https://github.com/sqs/goreturns. -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "fillreturns", - Doc: Doc, - Requires: []*analysis.Analyzer{}, + Doc: analysisinternal.MustExtractDoc(doc, "fillreturns"), Run: run, RunDespiteErrors: true, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns", } func run(pass *analysis.Pass) (interface{}, error) { @@ -122,7 +107,7 @@ outer: // have 0 values. // TODO(rfindley): We should be able to handle this if the return // values are all concrete types. - if tparams := typeparams.ForFuncType(enclosingFunc); tparams != nil && tparams.NumFields() > 0 { + if tparams := enclosingFunc.TypeParams; tparams != nil && tparams.NumFields() > 0 { return nil, nil } diff --git a/gopls/internal/analysis/fillreturns/fillreturns_test.go b/gopls/internal/analysis/fillreturns/fillreturns_test.go new file mode 100644 index 00000000000..e7cc3c3d486 --- /dev/null +++ b/gopls/internal/analysis/fillreturns/fillreturns_test.go @@ -0,0 +1,24 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillreturns_test + +import ( + "os" + "strings" + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/fillreturns" +) + +func Test(t *testing.T) { + // TODO(golang/go#65294): delete once gotypesalias=1 is the default. + if strings.Contains(os.Getenv("GODEBUG"), "gotypesalias=1") { + t.Skip("skipping due to gotypesalias=1, which changes (improves) the result; reenable and update the expectations once it is the default") + } + + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, fillreturns.Analyzer, "a", "typeparams") +} diff --git a/gopls/internal/lsp/analysis/fillreturns/testdata/src/a/a.go b/gopls/internal/analysis/fillreturns/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/fillreturns/testdata/src/a/a.go rename to gopls/internal/analysis/fillreturns/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/fillreturns/testdata/src/a/a.go.golden b/gopls/internal/analysis/fillreturns/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/fillreturns/testdata/src/a/a.go.golden rename to gopls/internal/analysis/fillreturns/testdata/src/a/a.go.golden diff --git a/gopls/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go b/gopls/internal/analysis/fillreturns/testdata/src/a/typeparams/a.go similarity index 100% rename from gopls/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go rename to gopls/internal/analysis/fillreturns/testdata/src/a/typeparams/a.go diff --git a/gopls/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go.golden b/gopls/internal/analysis/fillreturns/testdata/src/a/typeparams/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go.golden rename to gopls/internal/analysis/fillreturns/testdata/src/a/typeparams/a.go.golden diff --git a/gopls/internal/lsp/analysis/fillstruct/fillstruct.go b/gopls/internal/analysis/fillstruct/fillstruct.go similarity index 82% rename from gopls/internal/lsp/analysis/fillstruct/fillstruct.go rename to gopls/internal/analysis/fillstruct/fillstruct.go index 3b87ce5b0f9..fd8d04e4e00 100644 --- a/gopls/internal/lsp/analysis/fillstruct/fillstruct.go +++ b/gopls/internal/analysis/fillstruct/fillstruct.go @@ -23,49 +23,23 @@ import ( "unicode" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/gopls/internal/lsp/safetoken" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/fuzzy" "golang.org/x/tools/internal/typeparams" ) -const Doc = `note incomplete struct initializations - -This analyzer provides diagnostics for any struct literals that do not have -any fields initialized. Because the suggested fix for this analysis is -expensive to compute, callers should compute it separately, using the -SuggestedFix function below. -` - -var Analyzer = &analysis.Analyzer{ - Name: "fillstruct", - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - RunDespiteErrors: true, -} - -// TODO(rfindley): remove this thin wrapper around the fillstruct refactoring, -// and eliminate the fillstruct analyzer. +// Diagnose computes diagnostics for fillable struct literals overlapping with +// the provided start and end position. // -// Previous iterations used the analysis framework for computing refactorings, -// which proved inefficient. -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for _, d := range DiagnoseFillableStructs(inspect, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { - pass.Report(d) - } - return nil, nil -} - -// DiagnoseFillableStructs computes diagnostics for fillable struct composite -// literals overlapping with the provided start and end position. +// The diagnostic contains a lazy fix; the actual patch is computed +// (via the ApplyFix command) by a call to [SuggestedFix]. // -// If either start or end is invalid, it is considered an unbounded condition. -func DiagnoseFillableStructs(inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { +// If either start or end is invalid, the entire package is inspected. +func Diagnose(inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { var diags []analysis.Diagnostic nodeFilter := []ast.Node{(*ast.CompositeLit)(nil)} inspect.Preorder(nodeFilter, func(n ast.Node) { @@ -81,8 +55,8 @@ func DiagnoseFillableStructs(inspect *inspector.Inspector, start, end token.Pos, } // Find reference to the type declaration of the struct being initialized. - typ = deref(typ) - tStruct, ok := typ.Underlying().(*types.Struct) + typ = typeparams.Deref(typ) + tStruct, ok := typeparams.CoreType(typ).(*types.Struct) if !ok { return } @@ -130,23 +104,30 @@ func DiagnoseFillableStructs(inspect *inspector.Inspector, start, end token.Pos, if i < totalFields { fillableFields = append(fillableFields, "...") } - name = fmt.Sprintf("anonymous struct { %s }", strings.Join(fillableFields, ", ")) + name = fmt.Sprintf("anonymous struct{ %s }", strings.Join(fillableFields, ", ")) } diags = append(diags, analysis.Diagnostic{ - Message: fmt.Sprintf("Fill %s", name), - Pos: expr.Pos(), - End: expr.End(), + Message: fmt.Sprintf("%s literal has missing fields", name), + Pos: expr.Pos(), + End: expr.End(), + Category: FixCategory, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Fill %s", name), + // No TextEdits => computed later by gopls. + }}, }) }) return diags } +const FixCategory = "fillstruct" // recognized by gopls ApplyFix + // SuggestedFix computes the suggested fix for the kinds of // diagnostics produced by the Analyzer above. -func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) { +func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { if info == nil { - return nil, fmt.Errorf("nil types.Info") + return nil, nil, fmt.Errorf("nil types.Info") } pos := start // don't use the end @@ -155,7 +136,7 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil // calling PathEnclosingInterval. Switch this approach. path, _ := astutil.PathEnclosingInterval(file, pos, pos) if len(path) == 0 { - return nil, fmt.Errorf("no enclosing ast.Node") + return nil, nil, fmt.Errorf("no enclosing ast.Node") } var expr *ast.CompositeLit for _, n := range path { @@ -167,14 +148,14 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil typ := info.TypeOf(expr) if typ == nil { - return nil, fmt.Errorf("no composite literal") + return nil, nil, fmt.Errorf("no composite literal") } // Find reference to the type declaration of the struct being initialized. - typ = deref(typ) + typ = typeparams.Deref(typ) tStruct, ok := typ.Underlying().(*types.Struct) if !ok { - return nil, fmt.Errorf("%s is not a (pointer to) struct type", + return nil, nil, fmt.Errorf("%s is not a (pointer to) struct type", types.TypeString(typ, types.RelativeTo(pkg))) } // Inv: typ is the possibly-named struct type. @@ -240,7 +221,7 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil } else { names, ok := matches[fieldTyp] if !ok { - return nil, fmt.Errorf("invalid struct field type: %v", fieldTyp) + return nil, nil, fmt.Errorf("invalid struct field type: %v", fieldTyp) } // Find the name most similar to the field name. @@ -251,7 +232,7 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil } else if v := populateValue(file, pkg, fieldTyp); v != nil { kv.Value = v } else { - return nil, nil + return nil, nil, nil // no fix to suggest } } elts = append(elts, kv) @@ -260,7 +241,7 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil // If all of the struct's fields are unexported, we have nothing to do. if len(elts) == 0 { - return nil, fmt.Errorf("no elements to fill") + return nil, nil, fmt.Errorf("no elements to fill") } // Add the final line for the right brace. Offset is the number of @@ -292,7 +273,7 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil // First pass through the formatter: turn the expr into a string. var formatBuf bytes.Buffer if err := format.Node(&formatBuf, fakeFset, cl); err != nil { - return nil, fmt.Errorf("failed to run first format on:\n%s\ngot err: %v", cl.Type, err) + return nil, nil, fmt.Errorf("failed to run first format on:\n%s\ngot err: %v", cl.Type, err) } sug := indent(formatBuf.Bytes(), whitespace) @@ -304,7 +285,7 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil } } - return &analysis.SuggestedFix{ + return fset, &analysis.SuggestedFix{ TextEdits: []analysis.TextEdit{ { Pos: expr.Pos(), @@ -344,6 +325,8 @@ func indent(str, ind []byte) []byte { // // The reasoning here is that users will call fillstruct with the intention of // initializing the struct, in which case setting these fields to nil has no effect. +// +// populateValue returns nil if the value cannot be filled. func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { switch u := typ.Underlying().(type) { case *types.Basic: @@ -356,6 +339,8 @@ func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { return &ast.BasicLit{Kind: token.STRING, Value: `""`} case u.Kind() == types.UnsafePointer: return ast.NewIdent("nil") + case u.Kind() == types.Invalid: + return nil default: panic(fmt.Sprintf("unknown basic type %v", u)) } @@ -464,7 +449,7 @@ func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { } case *types.Pointer: - switch u.Elem().(type) { + switch aliases.Unalias(u.Elem()).(type) { case *types.Basic: return &ast.CallExpr{ Fun: &ast.Ident{ @@ -477,14 +462,18 @@ func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { }, } default: + x := populateValue(f, pkg, u.Elem()) + if x == nil { + return nil + } return &ast.UnaryExpr{ Op: token.AND, - X: populateValue(f, pkg, u.Elem()), + X: x, } } case *types.Interface: - if param, ok := typ.(*typeparams.TypeParam); ok { + if param, ok := aliases.Unalias(typ).(*types.TypeParam); ok { // *new(T) is the zero value of a type parameter T. // TODO(adonovan): one could give a more specific zero // value if the type has a core type that is, say, @@ -503,13 +492,3 @@ func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { } return nil } - -func deref(t types.Type) types.Type { - for { - ptr, ok := t.Underlying().(*types.Pointer) - if !ok { - return t - } - t = ptr.Elem() - } -} diff --git a/gopls/internal/analysis/fillstruct/fillstruct_test.go b/gopls/internal/analysis/fillstruct/fillstruct_test.go new file mode 100644 index 00000000000..f90998fa459 --- /dev/null +++ b/gopls/internal/analysis/fillstruct/fillstruct_test.go @@ -0,0 +1,38 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillstruct_test + +import ( + "go/token" + "testing" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/analysis/fillstruct" +) + +// analyzer allows us to test the fillstruct code action using the analysistest +// harness. (fillstruct used to be a gopls analyzer.) +var analyzer = &analysis.Analyzer{ + Name: "fillstruct", + Doc: "test only", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: func(pass *analysis.Pass) (any, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for _, d := range fillstruct.Diagnose(inspect, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { + pass.Report(d) + } + return nil, nil + }, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillstruct", + RunDespiteErrors: true, +} + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.Run(t, testdata, analyzer, "a", "typeparams") +} diff --git a/gopls/internal/analysis/fillstruct/testdata/src/a/a.go b/gopls/internal/analysis/fillstruct/testdata/src/a/a.go new file mode 100644 index 00000000000..79c51d209c1 --- /dev/null +++ b/gopls/internal/analysis/fillstruct/testdata/src/a/a.go @@ -0,0 +1,112 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillstruct + +import ( + data "b" + "go/ast" + "go/token" + "unsafe" +) + +type emptyStruct struct{} + +var _ = emptyStruct{} + +type basicStruct struct { + foo int +} + +var _ = basicStruct{} // want `basicStruct literal has missing fields` + +type twoArgStruct struct { + foo int + bar string +} + +var _ = twoArgStruct{} // want `twoArgStruct literal has missing fields` + +var _ = twoArgStruct{ // want `twoArgStruct literal has missing fields` + bar: "bar", +} + +type nestedStruct struct { + bar string + basic basicStruct +} + +var _ = nestedStruct{} // want `nestedStruct literal has missing fields` + +var _ = data.B{} // want `b.B literal has missing fields` + +type typedStruct struct { + m map[string]int + s []int + c chan int + c1 <-chan int + a [2]string +} + +var _ = typedStruct{} // want `typedStruct literal has missing fields` + +type funStruct struct { + fn func(i int) int +} + +var _ = funStruct{} // want `funStruct literal has missing fields` + +type funStructComplex struct { + fn func(i int, s string) (string, int) +} + +var _ = funStructComplex{} // want `funStructComplex literal has missing fields` + +type funStructEmpty struct { + fn func() +} + +var _ = funStructEmpty{} // want `funStructEmpty literal has missing fields` + +type Foo struct { + A int +} + +type Bar struct { + X *Foo + Y *Foo +} + +var _ = Bar{} // want `Bar literal has missing fields` + +type importedStruct struct { + m map[*ast.CompositeLit]ast.Field + s []ast.BadExpr + a [3]token.Token + c chan ast.EmptyStmt + fn func(ast_decl ast.DeclStmt) ast.Ellipsis + st ast.CompositeLit +} + +var _ = importedStruct{} // want `importedStruct literal has missing fields` + +type pointerBuiltinStruct struct { + b *bool + s *string + i *int +} + +var _ = pointerBuiltinStruct{} // want `pointerBuiltinStruct literal has missing fields` + +var _ = []ast.BasicLit{ + {}, // want `go/ast.BasicLit literal has missing fields` +} + +var _ = []ast.BasicLit{{}} // want "go/ast.BasicLit literal has missing fields" + +type unsafeStruct struct { + foo unsafe.Pointer +} + +var _ = unsafeStruct{} // want `unsafeStruct literal has missing fields` diff --git a/gopls/internal/lsp/analysis/fillstruct/testdata/src/b/b.go b/gopls/internal/analysis/fillstruct/testdata/src/b/b.go similarity index 100% rename from gopls/internal/lsp/analysis/fillstruct/testdata/src/b/b.go rename to gopls/internal/analysis/fillstruct/testdata/src/b/b.go diff --git a/gopls/internal/analysis/fillstruct/testdata/src/typeparams/typeparams.go b/gopls/internal/analysis/fillstruct/testdata/src/typeparams/typeparams.go new file mode 100644 index 00000000000..24e8a930dc2 --- /dev/null +++ b/gopls/internal/analysis/fillstruct/testdata/src/typeparams/typeparams.go @@ -0,0 +1,54 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillstruct + +type emptyStruct[A any] struct{} + +var _ = emptyStruct[int]{} + +type basicStruct[T any] struct { + foo T +} + +var _ = basicStruct[int]{} // want `basicStruct\[int\] literal has missing fields` + +type twoArgStruct[F, B any] struct { + foo F + bar B +} + +var _ = twoArgStruct[string, int]{} // want `twoArgStruct\[string, int\] literal has missing fields` + +var _ = twoArgStruct[int, string]{ // want `twoArgStruct\[int, string\] literal has missing fields` + bar: "bar", +} + +type nestedStruct struct { + bar string + basic basicStruct[int] +} + +var _ = nestedStruct{} // want "nestedStruct literal has missing fields" + +func _[T any]() { + type S struct{ t T } + x := S{} // want "S" + _ = x +} + +func Test() { + var tests = []struct { + a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p string + }{ + {}, // want "anonymous struct{ a: string, b: string, c: string, ... } literal has missing fields" + } + for _, test := range tests { + _ = test + } +} + +func _[T twoArgStruct[int, int]]() { + _ = T{} // want "T literal has missing fields" +} diff --git a/gopls/internal/analysis/fillswitch/doc.go b/gopls/internal/analysis/fillswitch/doc.go new file mode 100644 index 00000000000..076c3a1323d --- /dev/null +++ b/gopls/internal/analysis/fillswitch/doc.go @@ -0,0 +1,66 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fillswitch identifies switches with missing cases. +// +// It reports a diagnostic for each type switch or 'enum' switch that +// has missing cases, and suggests a fix to fill them in. +// +// The possible cases are: for a type switch, each accessible named +// type T or pointer *T that is assignable to the interface type; and +// for an 'enum' switch, each accessible named constant of the same +// type as the switch value. +// +// For an 'enum' switch, it will suggest cases for all possible values of the +// type. +// +// type Suit int8 +// const ( +// Spades Suit = iota +// Hearts +// Diamonds +// Clubs +// ) +// +// var s Suit +// switch s { +// case Spades: +// } +// +// It will report a diagnostic with a suggested fix to fill in the remaining +// cases: +// +// var s Suit +// switch s { +// case Spades: +// case Hearts: +// case Diamonds: +// case Clubs: +// default: +// panic(fmt.Sprintf("unexpected Suit: %v", s)) +// } +// +// For a type switch, it will suggest cases for all types that implement the +// interface. +// +// var stmt ast.Stmt +// switch stmt.(type) { +// case *ast.IfStmt: +// } +// +// It will report a diagnostic with a suggested fix to fill in the remaining +// cases: +// +// var stmt ast.Stmt +// switch stmt.(type) { +// case *ast.IfStmt: +// case *ast.ForStmt: +// case *ast.RangeStmt: +// case *ast.AssignStmt: +// case *ast.GoStmt: +// ... +// default: +// panic(fmt.Sprintf("unexpected ast.Stmt: %T", stmt)) +// } +package fillswitch diff --git a/gopls/internal/analysis/fillswitch/fillswitch.go b/gopls/internal/analysis/fillswitch/fillswitch.go new file mode 100644 index 00000000000..b93ade01065 --- /dev/null +++ b/gopls/internal/analysis/fillswitch/fillswitch.go @@ -0,0 +1,301 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillswitch + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/inspector" +) + +// Diagnose computes diagnostics for switch statements with missing cases +// overlapping with the provided start and end position. +// +// If either start or end is invalid, the entire package is inspected. +func Diagnose(inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { + var diags []analysis.Diagnostic + nodeFilter := []ast.Node{(*ast.SwitchStmt)(nil), (*ast.TypeSwitchStmt)(nil)} + inspect.Preorder(nodeFilter, func(n ast.Node) { + if start.IsValid() && n.End() < start || + end.IsValid() && n.Pos() > end { + return // non-overlapping + } + + var fix *analysis.SuggestedFix + switch n := n.(type) { + case *ast.SwitchStmt: + fix = suggestedFixSwitch(n, pkg, info) + case *ast.TypeSwitchStmt: + fix = suggestedFixTypeSwitch(n, pkg, info) + } + + if fix == nil { + return + } + + diags = append(diags, analysis.Diagnostic{ + Message: fix.Message, + Pos: n.Pos(), + End: n.Pos() + token.Pos(len("switch")), + SuggestedFixes: []analysis.SuggestedFix{*fix}, + }) + }) + + return diags +} + +func suggestedFixTypeSwitch(stmt *ast.TypeSwitchStmt, pkg *types.Package, info *types.Info) *analysis.SuggestedFix { + if hasDefaultCase(stmt.Body) { + return nil + } + + namedType := namedTypeFromTypeSwitch(stmt, info) + if namedType == nil { + return nil + } + + existingCases := caseTypes(stmt.Body, info) + // Gather accessible package-level concrete types + // that implement the switch interface type. + scope := namedType.Obj().Pkg().Scope() + var buf bytes.Buffer + for _, name := range scope.Names() { + obj := scope.Lookup(name) + if tname, ok := obj.(*types.TypeName); !ok || tname.IsAlias() { + continue // not a defined type + } + + if types.IsInterface(obj.Type()) { + continue + } + + samePkg := obj.Pkg() == pkg + if !samePkg && !obj.Exported() { + continue // inaccessible + } + + var key caseType + if types.AssignableTo(obj.Type(), namedType.Obj().Type()) { + key.named = obj.Type().(*types.Named) + } else if ptr := types.NewPointer(obj.Type()); types.AssignableTo(ptr, namedType.Obj().Type()) { + key.named = obj.Type().(*types.Named) + key.ptr = true + } + + if key.named != nil { + if existingCases[key] { + continue + } + + if buf.Len() > 0 { + buf.WriteString("\t") + } + + buf.WriteString("case ") + if key.ptr { + buf.WriteByte('*') + } + + if p := key.named.Obj().Pkg(); p != pkg { + // TODO: use the correct package name when the import is renamed + buf.WriteString(p.Name()) + buf.WriteByte('.') + } + buf.WriteString(key.named.Obj().Name()) + buf.WriteString(":\n") + } + } + + if buf.Len() == 0 { + return nil + } + + switch assign := stmt.Assign.(type) { + case *ast.AssignStmt: + addDefaultCase(&buf, namedType, assign.Lhs[0]) + case *ast.ExprStmt: + if assert, ok := assign.X.(*ast.TypeAssertExpr); ok { + addDefaultCase(&buf, namedType, assert.X) + } + } + + return &analysis.SuggestedFix{ + Message: fmt.Sprintf("Add cases for %s", namedType.Obj().Name()), + TextEdits: []analysis.TextEdit{{ + Pos: stmt.End() - token.Pos(len("}")), + End: stmt.End() - token.Pos(len("}")), + NewText: buf.Bytes(), + }}, + } +} + +func suggestedFixSwitch(stmt *ast.SwitchStmt, pkg *types.Package, info *types.Info) *analysis.SuggestedFix { + if hasDefaultCase(stmt.Body) { + return nil + } + + namedType, ok := info.TypeOf(stmt.Tag).(*types.Named) + if !ok { + return nil + } + + existingCases := caseConsts(stmt.Body, info) + // Gather accessible named constants of the same type as the switch value. + scope := namedType.Obj().Pkg().Scope() + var buf bytes.Buffer + for _, name := range scope.Names() { + obj := scope.Lookup(name) + if c, ok := obj.(*types.Const); ok && + (obj.Pkg() == pkg || obj.Exported()) && // accessible + types.Identical(obj.Type(), namedType.Obj().Type()) && + !existingCases[c] { + + if buf.Len() > 0 { + buf.WriteString("\t") + } + + buf.WriteString("case ") + if c.Pkg() != pkg { + buf.WriteString(c.Pkg().Name()) + buf.WriteByte('.') + } + buf.WriteString(c.Name()) + buf.WriteString(":\n") + } + } + + if buf.Len() == 0 { + return nil + } + + addDefaultCase(&buf, namedType, stmt.Tag) + + return &analysis.SuggestedFix{ + Message: fmt.Sprintf("Add cases for %s", namedType.Obj().Name()), + TextEdits: []analysis.TextEdit{{ + Pos: stmt.End() - token.Pos(len("}")), + End: stmt.End() - token.Pos(len("}")), + NewText: buf.Bytes(), + }}, + } +} + +func addDefaultCase(buf *bytes.Buffer, named *types.Named, expr ast.Expr) { + var dottedBuf bytes.Buffer + // writeDotted emits a dotted path a.b.c. + var writeDotted func(e ast.Expr) bool + writeDotted = func(e ast.Expr) bool { + switch e := e.(type) { + case *ast.SelectorExpr: + if !writeDotted(e.X) { + return false + } + dottedBuf.WriteByte('.') + dottedBuf.WriteString(e.Sel.Name) + return true + case *ast.Ident: + dottedBuf.WriteString(e.Name) + return true + } + return false + } + + buf.WriteString("\tdefault:\n") + typeName := fmt.Sprintf("%s.%s", named.Obj().Pkg().Name(), named.Obj().Name()) + if writeDotted(expr) { + // Switch tag expression is a dotted path. + // It is safe to re-evaluate it in the default case. + format := fmt.Sprintf("unexpected %s: %%#v", typeName) + fmt.Fprintf(buf, "\t\tpanic(fmt.Sprintf(%q, %s))\n\t", format, dottedBuf.String()) + } else { + // Emit simpler message, without re-evaluating tag expression. + fmt.Fprintf(buf, "\t\tpanic(%q)\n\t", "unexpected "+typeName) + } +} + +func namedTypeFromTypeSwitch(stmt *ast.TypeSwitchStmt, info *types.Info) *types.Named { + switch assign := stmt.Assign.(type) { + case *ast.ExprStmt: + if typ, ok := assign.X.(*ast.TypeAssertExpr); ok { + if named, ok := info.TypeOf(typ.X).(*types.Named); ok { + return named + } + } + + case *ast.AssignStmt: + if typ, ok := assign.Rhs[0].(*ast.TypeAssertExpr); ok { + if named, ok := info.TypeOf(typ.X).(*types.Named); ok { + return named + } + } + } + + return nil +} + +func hasDefaultCase(body *ast.BlockStmt) bool { + for _, clause := range body.List { + if len(clause.(*ast.CaseClause).List) == 0 { + return true + } + } + + return false +} + +func caseConsts(body *ast.BlockStmt, info *types.Info) map[*types.Const]bool { + out := map[*types.Const]bool{} + for _, stmt := range body.List { + for _, e := range stmt.(*ast.CaseClause).List { + if info.Types[e].Value == nil { + continue // not a constant + } + + if sel, ok := e.(*ast.SelectorExpr); ok { + e = sel.Sel // replace pkg.C with C + } + + if e, ok := e.(*ast.Ident); ok { + if c, ok := info.Uses[e].(*types.Const); ok { + out[c] = true + } + } + } + } + + return out +} + +type caseType struct { + named *types.Named + ptr bool +} + +func caseTypes(body *ast.BlockStmt, info *types.Info) map[caseType]bool { + out := map[caseType]bool{} + for _, stmt := range body.List { + for _, e := range stmt.(*ast.CaseClause).List { + if tv, ok := info.Types[e]; ok && tv.IsType() { + t := tv.Type + ptr := false + if p, ok := t.(*types.Pointer); ok { + t = p.Elem() + ptr = true + } + + if named, ok := t.(*types.Named); ok { + out[caseType{named, ptr}] = true + } + } + } + } + + return out +} diff --git a/gopls/internal/analysis/fillswitch/fillswitch_test.go b/gopls/internal/analysis/fillswitch/fillswitch_test.go new file mode 100644 index 00000000000..15d3ef1dd70 --- /dev/null +++ b/gopls/internal/analysis/fillswitch/fillswitch_test.go @@ -0,0 +1,38 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillswitch_test + +import ( + "go/token" + "testing" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/analysis/fillswitch" +) + +// analyzer allows us to test the fillswitch code action using the analysistest +// harness. +var analyzer = &analysis.Analyzer{ + Name: "fillswitch", + Doc: "test only", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: func(pass *analysis.Pass) (any, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for _, d := range fillswitch.Diagnose(inspect, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { + pass.Report(d) + } + return nil, nil + }, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillswitch", + RunDespiteErrors: true, +} + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.Run(t, testdata, analyzer, "a") +} diff --git a/gopls/internal/analysis/fillswitch/testdata/src/a/a.go b/gopls/internal/analysis/fillswitch/testdata/src/a/a.go new file mode 100644 index 00000000000..06d01da5f1e --- /dev/null +++ b/gopls/internal/analysis/fillswitch/testdata/src/a/a.go @@ -0,0 +1,78 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillswitch + +import ( + data "b" +) + +type typeA int + +const ( + typeAOne typeA = iota + typeATwo + typeAThree +) + +func doSwitch() { + var a typeA + switch a { // want `Add cases for typeA` + } + + switch a { // want `Add cases for typeA` + case typeAOne: + } + + switch a { + case typeAOne: + default: + } + + switch a { + case typeAOne: + case typeATwo: + case typeAThree: + } + + var b data.TypeB + switch b { // want `Add cases for TypeB` + case data.TypeBOne: + } +} + +type notification interface { + isNotification() +} + +type notificationOne struct{} + +func (notificationOne) isNotification() {} + +type notificationTwo struct{} + +func (notificationTwo) isNotification() {} + +func doTypeSwitch() { + var not notification + switch not.(type) { // want `Add cases for notification` + } + + switch not.(type) { // want `Add cases for notification` + case notificationOne: + } + + switch not.(type) { + case notificationOne: + case notificationTwo: + } + + switch not.(type) { + default: + } + + var t data.ExportedInterface + switch t { + } +} diff --git a/gopls/internal/analysis/fillswitch/testdata/src/b/b.go b/gopls/internal/analysis/fillswitch/testdata/src/b/b.go new file mode 100644 index 00000000000..f65f3a7e6f2 --- /dev/null +++ b/gopls/internal/analysis/fillswitch/testdata/src/b/b.go @@ -0,0 +1,21 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fillswitch + +type TypeB int + +const ( + TypeBOne TypeB = iota + TypeBTwo + TypeBThree +) + +type ExportedInterface interface { + isExportedInterface() +} + +type notExportedType struct{} + +func (notExportedType) isExportedInterface() {} diff --git a/gopls/internal/analysis/infertypeargs/infertypeargs.go b/gopls/internal/analysis/infertypeargs/infertypeargs.go new file mode 100644 index 00000000000..9a514ad620c --- /dev/null +++ b/gopls/internal/analysis/infertypeargs/infertypeargs.go @@ -0,0 +1,149 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package infertypeargs + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" +) + +const Doc = `check for unnecessary type arguments in call expressions + +Explicit type arguments may be omitted from call expressions if they can be +inferred from function arguments, or from other type arguments: + + func f[T any](T) {} + + func _() { + f[string]("foo") // string could be inferred + } +` + +var Analyzer = &analysis.Analyzer{ + Name: "infertypeargs", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/infertypeargs", +} + +func run(pass *analysis.Pass) (any, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for _, diag := range diagnose(pass.Fset, inspect, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { + pass.Report(diag) + } + return nil, nil +} + +// Diagnose reports diagnostics describing simplifications to type +// arguments overlapping with the provided start and end position. +// +// If start or end is token.NoPos, the corresponding bound is not checked +// (i.e. if both start and end are NoPos, all call expressions are considered). +func diagnose(fset *token.FileSet, inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { + var diags []analysis.Diagnostic + + nodeFilter := []ast.Node{(*ast.CallExpr)(nil)} + inspect.Preorder(nodeFilter, func(node ast.Node) { + call := node.(*ast.CallExpr) + x, lbrack, indices, rbrack := typeparams.UnpackIndexExpr(call.Fun) + ident := calledIdent(x) + if ident == nil || len(indices) == 0 { + return // no explicit args, nothing to do + } + + if (start.IsValid() && call.End() < start) || (end.IsValid() && call.Pos() > end) { + return // non-overlapping + } + + // Confirm that instantiation actually occurred at this ident. + idata, ok := info.Instances[ident] + if !ok { + return // something went wrong, but fail open + } + instance := idata.Type + + // Start removing argument expressions from the right, and check if we can + // still infer the call expression. + required := len(indices) // number of type expressions that are required + for i := len(indices) - 1; i >= 0; i-- { + var fun ast.Expr + if i == 0 { + // No longer an index expression: just use the parameterized operand. + fun = x + } else { + fun = typeparams.PackIndexExpr(x, lbrack, indices[:i], indices[i-1].End()) + } + newCall := &ast.CallExpr{ + Fun: fun, + Lparen: call.Lparen, + Args: call.Args, + Ellipsis: call.Ellipsis, + Rparen: call.Rparen, + } + info := &types.Info{ + Instances: make(map[*ast.Ident]types.Instance), + } + versions.InitFileVersions(info) + if err := types.CheckExpr(fset, pkg, call.Pos(), newCall, info); err != nil { + // Most likely inference failed. + break + } + newIData := info.Instances[ident] + newInstance := newIData.Type + if !types.Identical(instance, newInstance) { + // The inferred result type does not match the original result type, so + // this simplification is not valid. + break + } + required = i + } + if required < len(indices) { + var s, e token.Pos + var edit analysis.TextEdit + if required == 0 { + s, e = lbrack, rbrack+1 // erase the entire index + edit = analysis.TextEdit{Pos: s, End: e} + } else { + s = indices[required].Pos() + e = rbrack + // erase from end of last arg to include last comma & white-spaces + edit = analysis.TextEdit{Pos: indices[required-1].End(), End: e} + } + // Recheck that our (narrower) fixes overlap with the requested range. + if (start.IsValid() && e < start) || (end.IsValid() && s > end) { + return // non-overlapping + } + diags = append(diags, analysis.Diagnostic{ + Pos: s, + End: e, + Message: "unnecessary type arguments", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Simplify type arguments", + TextEdits: []analysis.TextEdit{edit}, + }}, + }) + } + }) + + return diags +} + +func calledIdent(x ast.Expr) *ast.Ident { + switch x := x.(type) { + case *ast.Ident: + return x + case *ast.SelectorExpr: + return x.Sel + } + return nil +} diff --git a/gopls/internal/analysis/infertypeargs/infertypeargs_test.go b/gopls/internal/analysis/infertypeargs/infertypeargs_test.go new file mode 100644 index 00000000000..25c88e84f29 --- /dev/null +++ b/gopls/internal/analysis/infertypeargs/infertypeargs_test.go @@ -0,0 +1,17 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package infertypeargs_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/infertypeargs" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, infertypeargs.Analyzer, "a") +} diff --git a/gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go similarity index 100% rename from gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go rename to gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go diff --git a/gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go.golden b/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go.golden rename to gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go.golden diff --git a/gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go similarity index 100% rename from gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go rename to gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go diff --git a/gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go.golden b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go.golden rename to gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go.golden diff --git a/gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/imported/imported.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported/imported.go similarity index 100% rename from gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/imported/imported.go rename to gopls/internal/analysis/infertypeargs/testdata/src/a/imported/imported.go diff --git a/gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go similarity index 100% rename from gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go rename to gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go diff --git a/gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go.golden b/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go.golden rename to gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go.golden diff --git a/gopls/internal/analysis/nonewvars/doc.go b/gopls/internal/analysis/nonewvars/doc.go new file mode 100644 index 00000000000..b0bef847e32 --- /dev/null +++ b/gopls/internal/analysis/nonewvars/doc.go @@ -0,0 +1,22 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package nonewvars defines an Analyzer that applies suggested fixes +// to errors of the type "no new variables on left side of :=". +// +// # Analyzer nonewvars +// +// nonewvars: suggested fixes for "no new vars on left side of :=" +// +// This checker provides suggested fixes for type errors of the +// type "no new vars on left side of :=". For example: +// +// z := 1 +// z := 2 +// +// will turn into +// +// z := 1 +// z = 2 +package nonewvars diff --git a/gopls/internal/lsp/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go similarity index 89% rename from gopls/internal/lsp/analysis/nonewvars/nonewvars.go rename to gopls/internal/analysis/nonewvars/nonewvars.go index 6937b36d1f5..b9c9b4d6f48 100644 --- a/gopls/internal/lsp/analysis/nonewvars/nonewvars.go +++ b/gopls/internal/analysis/nonewvars/nonewvars.go @@ -8,6 +8,7 @@ package nonewvars import ( "bytes" + _ "embed" "go/ast" "go/format" "go/token" @@ -18,23 +19,16 @@ import ( "golang.org/x/tools/internal/analysisinternal" ) -const Doc = `suggested fixes for "no new vars on left side of :=" - -This checker provides suggested fixes for type errors of the -type "no new vars on left side of :=". For example: - z := 1 - z := 2 -will turn into - z := 1 - z = 2 -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "nonewvars", - Doc: Doc, + Doc: analysisinternal.MustExtractDoc(doc, "nonewvars"), Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, RunDespiteErrors: true, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/nonewvars", } func run(pass *analysis.Pass) (interface{}, error) { diff --git a/gopls/internal/analysis/nonewvars/nonewvars_test.go b/gopls/internal/analysis/nonewvars/nonewvars_test.go new file mode 100644 index 00000000000..49e19db2f0c --- /dev/null +++ b/gopls/internal/analysis/nonewvars/nonewvars_test.go @@ -0,0 +1,17 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package nonewvars_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/nonewvars" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, nonewvars.Analyzer, "a", "typeparams") +} diff --git a/gopls/internal/lsp/analysis/nonewvars/testdata/src/a/a.go b/gopls/internal/analysis/nonewvars/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/nonewvars/testdata/src/a/a.go rename to gopls/internal/analysis/nonewvars/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/nonewvars/testdata/src/a/a.go.golden b/gopls/internal/analysis/nonewvars/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/nonewvars/testdata/src/a/a.go.golden rename to gopls/internal/analysis/nonewvars/testdata/src/a/a.go.golden diff --git a/gopls/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go b/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go similarity index 100% rename from gopls/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go rename to gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go diff --git a/gopls/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go.golden b/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go.golden rename to gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go.golden diff --git a/gopls/internal/analysis/noresultvalues/doc.go b/gopls/internal/analysis/noresultvalues/doc.go new file mode 100644 index 00000000000..87df2093e8d --- /dev/null +++ b/gopls/internal/analysis/noresultvalues/doc.go @@ -0,0 +1,21 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package noresultvalues defines an Analyzer that applies suggested fixes +// to errors of the type "no result values expected". +// +// # Analyzer noresultvalues +// +// noresultvalues: suggested fixes for unexpected return values +// +// This checker provides suggested fixes for type errors of the +// type "no result values expected" or "too many return values". +// For example: +// +// func z() { return nil } +// +// will turn into +// +// func z() { return } +package noresultvalues diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go new file mode 100644 index 00000000000..7e2e3d4f646 --- /dev/null +++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go @@ -0,0 +1,86 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package noresultvalues + +import ( + "bytes" + "go/ast" + "go/format" + "strings" + + _ "embed" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "noresultvalues", + Doc: analysisinternal.MustExtractDoc(doc, "noresultvalues"), + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, + RunDespiteErrors: true, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/noresultvars", +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + if len(pass.TypeErrors) == 0 { + return nil, nil + } + + nodeFilter := []ast.Node{(*ast.ReturnStmt)(nil)} + inspect.Preorder(nodeFilter, func(n ast.Node) { + retStmt, _ := n.(*ast.ReturnStmt) + + var file *ast.File + for _, f := range pass.Files { + if f.Pos() <= retStmt.Pos() && retStmt.Pos() < f.End() { + file = f + break + } + } + if file == nil { + return + } + + for _, err := range pass.TypeErrors { + if !FixesError(err.Msg) { + continue + } + if retStmt.Pos() >= err.Pos || err.Pos >= retStmt.End() { + continue + } + var buf bytes.Buffer + if err := format.Node(&buf, pass.Fset, file); err != nil { + continue + } + pass.Report(analysis.Diagnostic{ + Pos: err.Pos, + End: analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos), + Message: err.Msg, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Delete return values", + TextEdits: []analysis.TextEdit{{ + Pos: retStmt.Pos(), + End: retStmt.End(), + NewText: []byte("return"), + }}, + }}, + }) + } + }) + return nil, nil +} + +func FixesError(msg string) bool { + return msg == "no result values expected" || + strings.HasPrefix(msg, "too many return values") && strings.Contains(msg, "want ()") +} diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues_test.go b/gopls/internal/analysis/noresultvalues/noresultvalues_test.go new file mode 100644 index 00000000000..e9f1a36ab6f --- /dev/null +++ b/gopls/internal/analysis/noresultvalues/noresultvalues_test.go @@ -0,0 +1,17 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package noresultvalues_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/noresultvalues" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, noresultvalues.Analyzer, "a", "typeparams") +} diff --git a/gopls/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go b/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go rename to gopls/internal/analysis/noresultvalues/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go.golden b/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go.golden rename to gopls/internal/analysis/noresultvalues/testdata/src/a/a.go.golden diff --git a/gopls/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go b/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go similarity index 100% rename from gopls/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go rename to gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go diff --git a/gopls/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go.golden b/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go.golden rename to gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go.golden diff --git a/gopls/internal/analysis/simplifycompositelit/doc.go b/gopls/internal/analysis/simplifycompositelit/doc.go new file mode 100644 index 00000000000..fe40596746e --- /dev/null +++ b/gopls/internal/analysis/simplifycompositelit/doc.go @@ -0,0 +1,22 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package simplifycompositelit defines an Analyzer that simplifies composite literals. +// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go +// https://golang.org/cmd/gofmt/#hdr-The_simplify_command +// +// # Analyzer simplifycompositelit +// +// simplifycompositelit: check for composite literal simplifications +// +// An array, slice, or map composite literal of the form: +// +// []T{T{}, T{}} +// +// will be simplified to: +// +// []T{{}, {}} +// +// This is one of the simplifications that "gofmt -s" applies. +package simplifycompositelit diff --git a/gopls/internal/lsp/analysis/simplifycompositelit/simplifycompositelit.go b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go similarity index 95% rename from gopls/internal/lsp/analysis/simplifycompositelit/simplifycompositelit.go rename to gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go index c91fc7577ab..c651206b05f 100644 --- a/gopls/internal/lsp/analysis/simplifycompositelit/simplifycompositelit.go +++ b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go @@ -9,6 +9,7 @@ package simplifycompositelit import ( "bytes" + _ "embed" "fmt" "go/ast" "go/printer" @@ -18,22 +19,18 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" ) -const Doc = `check for composite literal simplifications - -An array, slice, or map composite literal of the form: - []T{T{}, T{}} -will be simplified to: - []T{{}, {}} - -This is one of the simplifications that "gofmt -s" applies.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "simplifycompositelit", - Doc: Doc, + Doc: analysisinternal.MustExtractDoc(doc, "simplifycompositelit"), Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifycompositelit", } func run(pass *analysis.Pass) (interface{}, error) { diff --git a/gopls/internal/lsp/analysis/simplifycompositelit/simplifycompositelit_test.go b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit_test.go similarity index 85% rename from gopls/internal/lsp/analysis/simplifycompositelit/simplifycompositelit_test.go rename to gopls/internal/analysis/simplifycompositelit/simplifycompositelit_test.go index b0365a6b3da..a355616e3fe 100644 --- a/gopls/internal/lsp/analysis/simplifycompositelit/simplifycompositelit_test.go +++ b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit_test.go @@ -8,7 +8,7 @@ import ( "testing" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/simplifycompositelit" + "golang.org/x/tools/gopls/internal/analysis/simplifycompositelit" ) func Test(t *testing.T) { diff --git a/gopls/internal/lsp/analysis/simplifycompositelit/testdata/src/a/a.go b/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/simplifycompositelit/testdata/src/a/a.go rename to gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/simplifycompositelit/testdata/src/a/a.go.golden b/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/simplifycompositelit/testdata/src/a/a.go.golden rename to gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go.golden diff --git a/gopls/internal/analysis/simplifyrange/doc.go b/gopls/internal/analysis/simplifyrange/doc.go new file mode 100644 index 00000000000..f55ed56b35b --- /dev/null +++ b/gopls/internal/analysis/simplifyrange/doc.go @@ -0,0 +1,30 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package simplifyrange defines an Analyzer that simplifies range statements. +// https://golang.org/cmd/gofmt/#hdr-The_simplify_command +// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go +// +// # Analyzer simplifyrange +// +// simplifyrange: check for range statement simplifications +// +// A range of the form: +// +// for x, _ = range v {...} +// +// will be simplified to: +// +// for x = range v {...} +// +// A range of the form: +// +// for _ = range v {...} +// +// will be simplified to: +// +// for range v {...} +// +// This is one of the simplifications that "gofmt -s" applies. +package simplifyrange diff --git a/gopls/internal/lsp/analysis/simplifyrange/simplifyrange.go b/gopls/internal/analysis/simplifyrange/simplifyrange.go similarity index 81% rename from gopls/internal/lsp/analysis/simplifyrange/simplifyrange.go rename to gopls/internal/analysis/simplifyrange/simplifyrange.go index c9cb3879863..364728d4c41 100644 --- a/gopls/internal/lsp/analysis/simplifyrange/simplifyrange.go +++ b/gopls/internal/analysis/simplifyrange/simplifyrange.go @@ -2,13 +2,11 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package simplifyrange defines an Analyzer that simplifies range statements. -// https://golang.org/cmd/gofmt/#hdr-The_simplify_command -// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go package simplifyrange import ( "bytes" + _ "embed" "go/ast" "go/printer" "go/token" @@ -16,27 +14,18 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" ) -const Doc = `check for range statement simplifications - -A range of the form: - for x, _ = range v {...} -will be simplified to: - for x = range v {...} - -A range of the form: - for _ = range v {...} -will be simplified to: - for range v {...} - -This is one of the simplifications that "gofmt -s" applies.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "simplifyrange", - Doc: Doc, + Doc: analysisinternal.MustExtractDoc(doc, "simplifyrange"), Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyrange", } func run(pass *analysis.Pass) (interface{}, error) { diff --git a/gopls/internal/lsp/analysis/simplifyrange/simplifyrange_test.go b/gopls/internal/analysis/simplifyrange/simplifyrange_test.go similarity index 86% rename from gopls/internal/lsp/analysis/simplifyrange/simplifyrange_test.go rename to gopls/internal/analysis/simplifyrange/simplifyrange_test.go index fbd57ec2d65..fab1bd5a202 100644 --- a/gopls/internal/lsp/analysis/simplifyrange/simplifyrange_test.go +++ b/gopls/internal/analysis/simplifyrange/simplifyrange_test.go @@ -8,7 +8,7 @@ import ( "testing" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/simplifyrange" + "golang.org/x/tools/gopls/internal/analysis/simplifyrange" ) func Test(t *testing.T) { diff --git a/gopls/internal/lsp/analysis/simplifyrange/testdata/src/a/a.go b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/simplifyrange/testdata/src/a/a.go rename to gopls/internal/analysis/simplifyrange/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/simplifyrange/testdata/src/a/a.go.golden b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/simplifyrange/testdata/src/a/a.go.golden rename to gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden diff --git a/gopls/internal/analysis/simplifyslice/doc.go b/gopls/internal/analysis/simplifyslice/doc.go new file mode 100644 index 00000000000..2fb4c461054 --- /dev/null +++ b/gopls/internal/analysis/simplifyslice/doc.go @@ -0,0 +1,22 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package simplifyslice defines an Analyzer that simplifies slice statements. +// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go +// https://golang.org/cmd/gofmt/#hdr-The_simplify_command +// +// # Analyzer simplifyslice +// +// simplifyslice: check for slice simplifications +// +// A slice expression of the form: +// +// s[a:len(s)] +// +// will be simplified to: +// +// s[a:] +// +// This is one of the simplifications that "gofmt -s" applies. +package simplifyslice diff --git a/gopls/internal/lsp/analysis/simplifyslice/simplifyslice.go b/gopls/internal/analysis/simplifyslice/simplifyslice.go similarity index 83% rename from gopls/internal/lsp/analysis/simplifyslice/simplifyslice.go rename to gopls/internal/analysis/simplifyslice/simplifyslice.go index da1728e6fb2..0c7cc3ff284 100644 --- a/gopls/internal/lsp/analysis/simplifyslice/simplifyslice.go +++ b/gopls/internal/analysis/simplifyslice/simplifyslice.go @@ -1,14 +1,12 @@ -// Copyright 2020 The Go Authors. All rights reserved. +// Copyright 2023 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package simplifyslice defines an Analyzer that simplifies slice statements. -// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go -// https://golang.org/cmd/gofmt/#hdr-The_simplify_command package simplifyslice import ( "bytes" + _ "embed" "fmt" "go/ast" "go/printer" @@ -16,22 +14,18 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" ) -const Doc = `check for slice simplifications - -A slice expression of the form: - s[a:len(s)] -will be simplified to: - s[a:] - -This is one of the simplifications that "gofmt -s" applies.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "simplifyslice", - Doc: Doc, + Doc: analysisinternal.MustExtractDoc(doc, "simplifyslice"), Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyslice", } // Note: We could also simplify slice expressions of the form s[0:b] to s[:b] diff --git a/gopls/internal/analysis/simplifyslice/simplifyslice_test.go b/gopls/internal/analysis/simplifyslice/simplifyslice_test.go new file mode 100644 index 00000000000..969161e3c90 --- /dev/null +++ b/gopls/internal/analysis/simplifyslice/simplifyslice_test.go @@ -0,0 +1,17 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simplifyslice_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/simplifyslice" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, simplifyslice.Analyzer, "a", "typeparams") +} diff --git a/gopls/internal/lsp/analysis/simplifyslice/testdata/src/a/a.go b/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/simplifyslice/testdata/src/a/a.go rename to gopls/internal/analysis/simplifyslice/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/simplifyslice/testdata/src/a/a.go.golden b/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/simplifyslice/testdata/src/a/a.go.golden rename to gopls/internal/analysis/simplifyslice/testdata/src/a/a.go.golden diff --git a/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go new file mode 100644 index 00000000000..a1a29d42deb --- /dev/null +++ b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go @@ -0,0 +1,36 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testdata + +type List[E any] []E + +// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed. +// type S interface{ ~[]int } + +var ( + a [10]byte + b [20]float32 + p List[int] + + _ = p[0:] + _ = p[1:10] + _ = p[2:len(p)] // want "unneeded: len\\(p\\)" + _ = p[3:(len(p))] + _ = p[len(a) : len(p)-1] + _ = p[0:len(b)] + _ = p[2:len(p):len(p)] + + _ = p[:] + _ = p[:10] + _ = p[:len(p)] // want "unneeded: len\\(p\\)" + _ = p[:(len(p))] + _ = p[:len(p)-1] + _ = p[:len(b)] + _ = p[:len(p):len(p)] +) + +func foo[E any](a List[E]) { + _ = a[0:len(a)] // want "unneeded: len\\(a\\)" +} diff --git a/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden new file mode 100644 index 00000000000..ce425b72276 --- /dev/null +++ b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden @@ -0,0 +1,36 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testdata + +type List[E any] []E + +// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed. +// type S interface{ ~[]int } + +var ( + a [10]byte + b [20]float32 + p List[int] + + _ = p[0:] + _ = p[1:10] + _ = p[2:] // want "unneeded: len\\(p\\)" + _ = p[3:(len(p))] + _ = p[len(a) : len(p)-1] + _ = p[0:len(b)] + _ = p[2:len(p):len(p)] + + _ = p[:] + _ = p[:10] + _ = p[:] // want "unneeded: len\\(p\\)" + _ = p[:(len(p))] + _ = p[:len(p)-1] + _ = p[:len(b)] + _ = p[:len(p):len(p)] +) + +func foo[E any](a List[E]) { + _ = a[0:] // want "unneeded: len\\(a\\)" +} diff --git a/gopls/internal/analysis/stubmethods/doc.go b/gopls/internal/analysis/stubmethods/doc.go new file mode 100644 index 00000000000..e1383cfc7e7 --- /dev/null +++ b/gopls/internal/analysis/stubmethods/doc.go @@ -0,0 +1,38 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package stubmethods defines a code action for missing interface methods. +// +// # Analyzer stubmethods +// +// stubmethods: detect missing methods and fix with stub implementations +// +// This analyzer detects type-checking errors due to missing methods +// in assignments from concrete types to interface types, and offers +// a suggested fix that will create a set of stub methods so that +// the concrete type satisfies the interface. +// +// For example, this function will not compile because the value +// NegativeErr{} does not implement the "error" interface: +// +// func sqrt(x float64) (float64, error) { +// if x < 0 { +// return 0, NegativeErr{} // error: missing method +// } +// ... +// } +// +// type NegativeErr struct{} +// +// This analyzer will suggest a fix to declare this method: +// +// // Error implements error.Error. +// func (NegativeErr) Error() string { +// panic("unimplemented") +// } +// +// (At least, it appears to behave that way, but technically it +// doesn't use the SuggestedFix mechanism and the stub is created by +// logic in gopls's golang.stub function.) +package stubmethods diff --git a/gopls/internal/analysis/stubmethods/stubmethods.go b/gopls/internal/analysis/stubmethods/stubmethods.go new file mode 100644 index 00000000000..f4c30aadd7d --- /dev/null +++ b/gopls/internal/analysis/stubmethods/stubmethods.go @@ -0,0 +1,403 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stubmethods + +import ( + "bytes" + _ "embed" + "fmt" + "go/ast" + "go/format" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "stubmethods", + Doc: analysisinternal.MustExtractDoc(doc, "stubmethods"), + Run: run, + RunDespiteErrors: true, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/stubmethods", +} + +// TODO(rfindley): remove this thin wrapper around the stubmethods refactoring, +// and eliminate the stubmethods analyzer. +// +// Previous iterations used the analysis framework for computing refactorings, +// which proved inefficient. +func run(pass *analysis.Pass) (interface{}, error) { + for _, err := range pass.TypeErrors { + var file *ast.File + for _, f := range pass.Files { + if f.Pos() <= err.Pos && err.Pos < f.End() { + file = f + break + } + } + // Get the end position of the error. + _, _, end, ok := typesinternal.ReadGo116ErrorData(err) + if !ok { + var buf bytes.Buffer + if err := format.Node(&buf, pass.Fset, file); err != nil { + continue + } + end = analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos) + } + if diag, ok := DiagnosticForError(pass.Fset, file, err.Pos, end, err.Msg, pass.TypesInfo); ok { + pass.Report(diag) + } + } + + return nil, nil +} + +// MatchesMessage reports whether msg matches the error message sought after by +// the stubmethods fix. +func MatchesMessage(msg string) bool { + return strings.Contains(msg, "missing method") || strings.HasPrefix(msg, "cannot convert") || strings.Contains(msg, "not implement") +} + +// DiagnosticForError computes a diagnostic suggesting to implement an +// interface to fix the type checking error defined by (start, end, msg). +// +// If no such fix is possible, the second result is false. +func DiagnosticForError(fset *token.FileSet, file *ast.File, start, end token.Pos, msg string, info *types.Info) (analysis.Diagnostic, bool) { + if !MatchesMessage(msg) { + return analysis.Diagnostic{}, false + } + + path, _ := astutil.PathEnclosingInterval(file, start, end) + si := GetStubInfo(fset, info, path, start) + if si == nil { + return analysis.Diagnostic{}, false + } + qf := typesutil.FileQualifier(file, si.Concrete.Obj().Pkg(), info) + iface := types.TypeString(si.Interface.Type(), qf) + return analysis.Diagnostic{ + Pos: start, + End: end, + Message: msg, + Category: FixCategory, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Declare missing methods of %s", iface), + // No TextEdits => computed later by gopls. + }}, + }, true +} + +const FixCategory = "stubmethods" // recognized by gopls ApplyFix + +// StubInfo represents a concrete type +// that wants to stub out an interface type +type StubInfo struct { + // Interface is the interface that the client wants to implement. + // When the interface is defined, the underlying object will be a TypeName. + // Note that we keep track of types.Object instead of types.Type in order + // to keep a reference to the declaring object's package and the ast file + // in the case where the concrete type file requires a new import that happens to be renamed + // in the interface file. + // TODO(marwan-at-work): implement interface literals. + Fset *token.FileSet // the FileSet used to type-check the types below + Interface *types.TypeName + Concrete *types.Named + Pointer bool +} + +// GetStubInfo determines whether the "missing method error" +// can be used to deduced what the concrete and interface types are. +// +// TODO(adonovan): this function (and its following 5 helpers) tries +// to deduce a pair of (concrete, interface) types that are related by +// an assignment, either explicitly or through a return statement or +// function call. This is essentially what the refactor/satisfy does, +// more generally. Refactor to share logic, after auditing 'satisfy' +// for safety on ill-typed code. +func GetStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos token.Pos) *StubInfo { + for _, n := range path { + switch n := n.(type) { + case *ast.ValueSpec: + return fromValueSpec(fset, info, n, pos) + case *ast.ReturnStmt: + // An error here may not indicate a real error the user should know about, but it may. + // Therefore, it would be best to log it out for debugging/reporting purposes instead of ignoring + // it. However, event.Log takes a context which is not passed via the analysis package. + // TODO(marwan-at-work): properly log this error. + si, _ := fromReturnStmt(fset, info, pos, path, n) + return si + case *ast.AssignStmt: + return fromAssignStmt(fset, info, n, pos) + case *ast.CallExpr: + // Note that some call expressions don't carry the interface type + // because they don't point to a function or method declaration elsewhere. + // For eaxmple, "var Interface = (*Concrete)(nil)". In that case, continue + // this loop to encounter other possibilities such as *ast.ValueSpec or others. + si := fromCallExpr(fset, info, pos, n) + if si != nil { + return si + } + } + } + return nil +} + +// fromCallExpr tries to find an *ast.CallExpr's function declaration and +// analyzes a function call's signature against the passed in parameter to deduce +// the concrete and interface types. +func fromCallExpr(fset *token.FileSet, info *types.Info, pos token.Pos, call *ast.CallExpr) *StubInfo { + // Find argument containing pos. + argIdx := -1 + var arg ast.Expr + for i, callArg := range call.Args { + if callArg.Pos() <= pos && pos <= callArg.End() { + argIdx = i + arg = callArg + break + } + } + if arg == nil { + return nil + } + + concType, pointer := concreteType(arg, info) + if concType == nil || concType.Obj().Pkg() == nil { + return nil + } + tv, ok := info.Types[call.Fun] + if !ok { + return nil + } + sig, ok := aliases.Unalias(tv.Type).(*types.Signature) + if !ok { + return nil + } + var paramType types.Type + if sig.Variadic() && argIdx >= sig.Params().Len()-1 { + v := sig.Params().At(sig.Params().Len() - 1) + if s, _ := v.Type().(*types.Slice); s != nil { + paramType = s.Elem() + } + } else if argIdx < sig.Params().Len() { + paramType = sig.Params().At(argIdx).Type() + } + if paramType == nil { + return nil // A type error prevents us from determining the param type. + } + iface := ifaceObjFromType(paramType) + if iface == nil { + return nil + } + return &StubInfo{ + Fset: fset, + Concrete: concType, + Pointer: pointer, + Interface: iface, + } +} + +// fromReturnStmt analyzes a "return" statement to extract +// a concrete type that is trying to be returned as an interface type. +// +// For example, func() io.Writer { return myType{} } +// would return StubInfo with the interface being io.Writer and the concrete type being myType{}. +func fromReturnStmt(fset *token.FileSet, info *types.Info, pos token.Pos, path []ast.Node, ret *ast.ReturnStmt) (*StubInfo, error) { + // Find return operand containing pos. + returnIdx := -1 + for i, r := range ret.Results { + if r.Pos() <= pos && pos <= r.End() { + returnIdx = i + break + } + } + if returnIdx == -1 { + return nil, fmt.Errorf("pos %d not within return statement bounds: [%d-%d]", pos, ret.Pos(), ret.End()) + } + + concType, pointer := concreteType(ret.Results[returnIdx], info) + if concType == nil || concType.Obj().Pkg() == nil { + return nil, nil + } + funcType := enclosingFunction(path, info) + if funcType == nil { + return nil, fmt.Errorf("could not find the enclosing function of the return statement") + } + if len(funcType.Results.List) != len(ret.Results) { + return nil, fmt.Errorf("%d-operand return statement in %d-result function", + len(ret.Results), + len(funcType.Results.List)) + } + iface := ifaceType(funcType.Results.List[returnIdx].Type, info) + if iface == nil { + return nil, nil + } + return &StubInfo{ + Fset: fset, + Concrete: concType, + Pointer: pointer, + Interface: iface, + }, nil +} + +// fromValueSpec returns *StubInfo from a variable declaration such as +// var x io.Writer = &T{} +func fromValueSpec(fset *token.FileSet, info *types.Info, spec *ast.ValueSpec, pos token.Pos) *StubInfo { + // Find RHS element containing pos. + var rhs ast.Expr + for _, r := range spec.Values { + if r.Pos() <= pos && pos <= r.End() { + rhs = r + break + } + } + if rhs == nil { + return nil // e.g. pos was on the LHS (#64545) + } + + // Possible implicit/explicit conversion to interface type? + ifaceNode := spec.Type // var _ myInterface = ... + if call, ok := rhs.(*ast.CallExpr); ok && ifaceNode == nil && len(call.Args) == 1 { + // var _ = myInterface(v) + ifaceNode = call.Fun + rhs = call.Args[0] + } + concType, pointer := concreteType(rhs, info) + if concType == nil || concType.Obj().Pkg() == nil { + return nil + } + ifaceObj := ifaceType(ifaceNode, info) + if ifaceObj == nil { + return nil + } + return &StubInfo{ + Fset: fset, + Concrete: concType, + Interface: ifaceObj, + Pointer: pointer, + } +} + +// fromAssignStmt returns *StubInfo from a variable assignment such as +// var x io.Writer +// x = &T{} +func fromAssignStmt(fset *token.FileSet, info *types.Info, assign *ast.AssignStmt, pos token.Pos) *StubInfo { + // The interface conversion error in an assignment is against the RHS: + // + // var x io.Writer + // x = &T{} // error: missing method + // ^^^^ + // + // Find RHS element containing pos. + var lhs, rhs ast.Expr + for i, r := range assign.Rhs { + if r.Pos() <= pos && pos <= r.End() { + if i >= len(assign.Lhs) { + // This should never happen as we would get a + // "cannot assign N values to M variables" + // before we get an interface conversion error. + // But be defensive. + return nil + } + lhs = assign.Lhs[i] + rhs = r + break + } + } + if lhs == nil || rhs == nil { + return nil + } + + ifaceObj := ifaceType(lhs, info) + if ifaceObj == nil { + return nil + } + concType, pointer := concreteType(rhs, info) + if concType == nil || concType.Obj().Pkg() == nil { + return nil + } + return &StubInfo{ + Fset: fset, + Concrete: concType, + Interface: ifaceObj, + Pointer: pointer, + } +} + +// ifaceType returns the named interface type to which e refers, if any. +func ifaceType(e ast.Expr, info *types.Info) *types.TypeName { + tv, ok := info.Types[e] + if !ok { + return nil + } + return ifaceObjFromType(tv.Type) +} + +func ifaceObjFromType(t types.Type) *types.TypeName { + named, ok := aliases.Unalias(t).(*types.Named) + if !ok { + return nil + } + if !types.IsInterface(named) { + return nil + } + // Interfaces defined in the "builtin" package return nil a Pkg(). + // But they are still real interfaces that we need to make a special case for. + // Therefore, protect gopls from panicking if a new interface type was added in the future. + if named.Obj().Pkg() == nil && named.Obj().Name() != "error" { + return nil + } + return named.Obj() +} + +// concreteType tries to extract the *types.Named that defines +// the concrete type given the ast.Expr where the "missing method" +// or "conversion" errors happened. If the concrete type is something +// that cannot have methods defined on it (such as basic types), this +// method will return a nil *types.Named. The second return parameter +// is a boolean that indicates whether the concreteType was defined as a +// pointer or value. +func concreteType(e ast.Expr, info *types.Info) (*types.Named, bool) { + tv, ok := info.Types[e] + if !ok { + return nil, false + } + typ := tv.Type + ptr, isPtr := aliases.Unalias(typ).(*types.Pointer) + if isPtr { + typ = ptr.Elem() + } + named, ok := aliases.Unalias(typ).(*types.Named) + if !ok { + return nil, false + } + return named, isPtr +} + +// enclosingFunction returns the signature and type of the function +// enclosing the given position. +func enclosingFunction(path []ast.Node, info *types.Info) *ast.FuncType { + for _, node := range path { + switch t := node.(type) { + case *ast.FuncDecl: + if _, ok := info.Defs[t.Name]; ok { + return t.Type + } + case *ast.FuncLit: + if _, ok := info.Types[t]; ok { + return t.Type + } + } + } + return nil +} diff --git a/gopls/internal/analysis/stubmethods/stubmethods_test.go b/gopls/internal/analysis/stubmethods/stubmethods_test.go new file mode 100644 index 00000000000..86328ae4606 --- /dev/null +++ b/gopls/internal/analysis/stubmethods/stubmethods_test.go @@ -0,0 +1,17 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stubmethods_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/stubmethods" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.Run(t, testdata, stubmethods.Analyzer, "a") +} diff --git a/gopls/internal/analysis/stubmethods/testdata/src/typeparams/implement.go b/gopls/internal/analysis/stubmethods/testdata/src/typeparams/implement.go new file mode 100644 index 00000000000..be20e1d9904 --- /dev/null +++ b/gopls/internal/analysis/stubmethods/testdata/src/typeparams/implement.go @@ -0,0 +1,15 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stubmethods + +var _ I = Y{} // want "Implement I" + +type I interface{ F() } + +type X struct{} + +func (X) F(string) {} + +type Y struct{ X } diff --git a/gopls/internal/analysis/undeclaredname/doc.go b/gopls/internal/analysis/undeclaredname/doc.go new file mode 100644 index 00000000000..02989c9d75b --- /dev/null +++ b/gopls/internal/analysis/undeclaredname/doc.go @@ -0,0 +1,23 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package undeclaredname defines an Analyzer that applies suggested fixes +// to errors of the type "undeclared name: %s". +// +// # Analyzer undeclaredname +// +// undeclaredname: suggested fixes for "undeclared name: <>" +// +// This checker provides suggested fixes for type errors of the +// type "undeclared name: <>". It will either insert a new statement, +// such as: +// +// <> := +// +// or a new function declaration, such as: +// +// func <>(inferred parameters) { +// panic("implement me!") +// } +package undeclaredname diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/a.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/a.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/channels.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/channels.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/channels.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/channels.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/consecutive_params.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/consecutive_params.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/consecutive_params.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/consecutive_params.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/error_param.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/error_param.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/error_param.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/error_param.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/literals.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/literals.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/literals.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/literals.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/operation.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/operation.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/operation.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/operation.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/selector.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/selector.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/selector.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/selector.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/slice.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/slice.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/slice.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/slice.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/tuple.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/tuple.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/tuple.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/tuple.go diff --git a/gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/unique_params.go b/gopls/internal/analysis/undeclaredname/testdata/src/a/unique_params.go similarity index 100% rename from gopls/internal/lsp/analysis/undeclaredname/testdata/src/a/unique_params.go rename to gopls/internal/analysis/undeclaredname/testdata/src/a/unique_params.go diff --git a/gopls/internal/analysis/undeclaredname/undeclared.go b/gopls/internal/analysis/undeclaredname/undeclared.go new file mode 100644 index 00000000000..afd9b652b97 --- /dev/null +++ b/gopls/internal/analysis/undeclaredname/undeclared.go @@ -0,0 +1,360 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package undeclaredname + +import ( + "bytes" + _ "embed" + "fmt" + "go/ast" + "go/format" + "go/token" + "go/types" + "strings" + "unicode" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/analysisinternal" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "undeclaredname", + Doc: analysisinternal.MustExtractDoc(doc, "undeclaredname"), + Requires: []*analysis.Analyzer{}, + Run: run, + RunDespiteErrors: true, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/undeclaredname", +} + +// The prefix for this error message changed in Go 1.20. +var undeclaredNamePrefixes = []string{"undeclared name: ", "undefined: "} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, err := range pass.TypeErrors { + runForError(pass, err) + } + return nil, nil +} + +func runForError(pass *analysis.Pass, err types.Error) { + // Extract symbol name from error. + var name string + for _, prefix := range undeclaredNamePrefixes { + if !strings.HasPrefix(err.Msg, prefix) { + continue + } + name = strings.TrimPrefix(err.Msg, prefix) + } + if name == "" { + return + } + + // Find file enclosing error. + var file *ast.File + for _, f := range pass.Files { + if f.Pos() <= err.Pos && err.Pos < f.End() { + file = f + break + } + } + if file == nil { + return + } + + // Find path to identifier in the error. + path, _ := astutil.PathEnclosingInterval(file, err.Pos, err.Pos) + if len(path) < 2 { + return + } + ident, ok := path[0].(*ast.Ident) + if !ok || ident.Name != name { + return + } + + // Skip selector expressions because it might be too complex + // to try and provide a suggested fix for fields and methods. + if _, ok := path[1].(*ast.SelectorExpr); ok { + return + } + + // Undeclared quick fixes only work in function bodies. + inFunc := false + for i := range path { + if _, inFunc = path[i].(*ast.FuncDecl); inFunc { + if i == 0 { + return + } + if _, isBody := path[i-1].(*ast.BlockStmt); !isBody { + return + } + break + } + } + if !inFunc { + return + } + + // Offer a fix. + noun := "variable" + if isCallPosition(path) { + noun = "function" + } + pass.Report(analysis.Diagnostic{ + Pos: err.Pos, + End: err.Pos + token.Pos(len(name)), + Message: err.Msg, + Category: FixCategory, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Create %s %q", noun, name), + // No TextEdits => computed by a gopls command + }}, + }) +} + +const FixCategory = "undeclaredname" // recognized by gopls ApplyFix + +// SuggestedFix computes the edits for the lazy (no-edits) fix suggested by the analyzer. +func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + pos := start // don't use the end + path, _ := astutil.PathEnclosingInterval(file, pos, pos) + if len(path) < 2 { + return nil, nil, fmt.Errorf("no expression found") + } + ident, ok := path[0].(*ast.Ident) + if !ok { + return nil, nil, fmt.Errorf("no identifier found") + } + + // Check for a possible call expression, in which case we should add a + // new function declaration. + if isCallPosition(path) { + return newFunctionDeclaration(path, file, pkg, info, fset) + } + + // Get the place to insert the new statement. + insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path) + if insertBeforeStmt == nil { + return nil, nil, fmt.Errorf("could not locate insertion point") + } + + insertBefore := safetoken.StartPosition(fset, insertBeforeStmt.Pos()).Offset + + // Get the indent to add on the line after the new statement. + // Since this will have a parse error, we can not use format.Source(). + contentBeforeStmt, indent := content[:insertBefore], "\n" + if nl := bytes.LastIndex(contentBeforeStmt, []byte("\n")); nl != -1 { + indent = string(contentBeforeStmt[nl:]) + } + + // Create the new local variable statement. + newStmt := fmt.Sprintf("%s := %s", ident.Name, indent) + return fset, &analysis.SuggestedFix{ + Message: fmt.Sprintf("Create variable %q", ident.Name), + TextEdits: []analysis.TextEdit{{ + Pos: insertBeforeStmt.Pos(), + End: insertBeforeStmt.Pos(), + NewText: []byte(newStmt), + }}, + }, nil +} + +func newFunctionDeclaration(path []ast.Node, file *ast.File, pkg *types.Package, info *types.Info, fset *token.FileSet) (*token.FileSet, *analysis.SuggestedFix, error) { + if len(path) < 3 { + return nil, nil, fmt.Errorf("unexpected set of enclosing nodes: %v", path) + } + ident, ok := path[0].(*ast.Ident) + if !ok { + return nil, nil, fmt.Errorf("no name for function declaration %v (%T)", path[0], path[0]) + } + call, ok := path[1].(*ast.CallExpr) + if !ok { + return nil, nil, fmt.Errorf("no call expression found %v (%T)", path[1], path[1]) + } + + // Find the enclosing function, so that we can add the new declaration + // below. + var enclosing *ast.FuncDecl + for _, n := range path { + if n, ok := n.(*ast.FuncDecl); ok { + enclosing = n + break + } + } + // TODO(rstambler): Support the situation when there is no enclosing + // function. + if enclosing == nil { + return nil, nil, fmt.Errorf("no enclosing function found: %v", path) + } + + pos := enclosing.End() + + var paramNames []string + var paramTypes []types.Type + // keep track of all param names to later ensure uniqueness + nameCounts := map[string]int{} + for _, arg := range call.Args { + typ := info.TypeOf(arg) + if typ == nil { + return nil, nil, fmt.Errorf("unable to determine type for %s", arg) + } + + switch t := typ.(type) { + // this is the case where another function call returning multiple + // results is used as an argument + case *types.Tuple: + n := t.Len() + for i := 0; i < n; i++ { + name := typeToArgName(t.At(i).Type()) + nameCounts[name]++ + + paramNames = append(paramNames, name) + paramTypes = append(paramTypes, types.Default(t.At(i).Type())) + } + + default: + // does the argument have a name we can reuse? + // only happens in case of a *ast.Ident + var name string + if ident, ok := arg.(*ast.Ident); ok { + name = ident.Name + } + + if name == "" { + name = typeToArgName(typ) + } + + nameCounts[name]++ + + paramNames = append(paramNames, name) + paramTypes = append(paramTypes, types.Default(typ)) + } + } + + for n, c := range nameCounts { + // Any names we saw more than once will need a unique suffix added + // on. Reset the count to 1 to act as the suffix for the first + // occurrence of that name. + if c >= 2 { + nameCounts[n] = 1 + } else { + delete(nameCounts, n) + } + } + + params := &ast.FieldList{} + + for i, name := range paramNames { + if suffix, repeats := nameCounts[name]; repeats { + nameCounts[name]++ + name = fmt.Sprintf("%s%d", name, suffix) + } + + // only worth checking after previous param in the list + if i > 0 { + // if type of parameter at hand is the same as the previous one, + // add it to the previous param list of identifiers so to have: + // (s1, s2 string) + // and not + // (s1 string, s2 string) + if paramTypes[i] == paramTypes[i-1] { + params.List[len(params.List)-1].Names = append(params.List[len(params.List)-1].Names, ast.NewIdent(name)) + continue + } + } + + params.List = append(params.List, &ast.Field{ + Names: []*ast.Ident{ + ast.NewIdent(name), + }, + Type: analysisinternal.TypeExpr(file, pkg, paramTypes[i]), + }) + } + + decl := &ast.FuncDecl{ + Name: ast.NewIdent(ident.Name), + Type: &ast.FuncType{ + Params: params, + // TODO(golang/go#47558): Also handle result + // parameters here based on context of CallExpr. + }, + Body: &ast.BlockStmt{ + List: []ast.Stmt{ + &ast.ExprStmt{ + X: &ast.CallExpr{ + Fun: ast.NewIdent("panic"), + Args: []ast.Expr{ + &ast.BasicLit{ + Value: `"unimplemented"`, + }, + }, + }, + }, + }, + }, + } + + b := bytes.NewBufferString("\n\n") + if err := format.Node(b, fset, decl); err != nil { + return nil, nil, err + } + return fset, &analysis.SuggestedFix{ + Message: fmt.Sprintf("Create function %q", ident.Name), + TextEdits: []analysis.TextEdit{{ + Pos: pos, + End: pos, + NewText: b.Bytes(), + }}, + }, nil +} + +func typeToArgName(ty types.Type) string { + s := types.Default(ty).String() + + switch t := aliases.Unalias(ty).(type) { + case *types.Basic: + // use first letter in type name for basic types + return s[0:1] + case *types.Slice: + // use element type to decide var name for slices + return typeToArgName(t.Elem()) + case *types.Array: + // use element type to decide var name for arrays + return typeToArgName(t.Elem()) + case *types.Chan: + return "ch" + } + + s = strings.TrimFunc(s, func(r rune) bool { + return !unicode.IsLetter(r) + }) + + if s == "error" { + return "err" + } + + // remove package (if present) + // and make first letter lowercase + a := []rune(s[strings.LastIndexByte(s, '.')+1:]) + a[0] = unicode.ToLower(a[0]) + return string(a) +} + +// isCallPosition reports whether the path denotes the subtree in call position, f(). +func isCallPosition(path []ast.Node) bool { + return len(path) > 1 && + is[*ast.CallExpr](path[1]) && + path[1].(*ast.CallExpr).Fun == path[0] +} + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} diff --git a/gopls/internal/analysis/undeclaredname/undeclared_test.go b/gopls/internal/analysis/undeclaredname/undeclared_test.go new file mode 100644 index 00000000000..ea3d724515b --- /dev/null +++ b/gopls/internal/analysis/undeclaredname/undeclared_test.go @@ -0,0 +1,17 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package undeclaredname_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/undeclaredname" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.Run(t, testdata, undeclaredname.Analyzer, "a") +} diff --git a/gopls/internal/analysis/unusedparams/cmd/main.go b/gopls/internal/analysis/unusedparams/cmd/main.go new file mode 100644 index 00000000000..2f35fb06083 --- /dev/null +++ b/gopls/internal/analysis/unusedparams/cmd/main.go @@ -0,0 +1,13 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The unusedparams command runs the unusedparams analyzer. +package main + +import ( + "golang.org/x/tools/go/analysis/singlechecker" + "golang.org/x/tools/gopls/internal/analysis/unusedparams" +) + +func main() { singlechecker.Main(unusedparams.Analyzer) } diff --git a/gopls/internal/analysis/unusedparams/doc.go b/gopls/internal/analysis/unusedparams/doc.go new file mode 100644 index 00000000000..07e43c0d084 --- /dev/null +++ b/gopls/internal/analysis/unusedparams/doc.go @@ -0,0 +1,34 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unusedparams defines an analyzer that checks for unused +// parameters of functions. +// +// # Analyzer unusedparams +// +// unusedparams: check for unused parameters of functions +// +// The unusedparams analyzer checks functions to see if there are +// any parameters that are not being used. +// +// To ensure soundness, it ignores: +// - "address-taken" functions, that is, functions that are used as +// a value rather than being called directly; their signatures may +// be required to conform to a func type. +// - exported functions or methods, since they may be address-taken +// in another package. +// - unexported methods whose name matches an interface method +// declared in the same package, since the method's signature +// may be required to conform to the interface type. +// - functions with empty bodies, or containing just a call to panic. +// - parameters that are unnamed, or named "_", the blank identifier. +// +// The analyzer suggests a fix of replacing the parameter name by "_", +// but in such cases a deeper fix can be obtained by invoking the +// "Refactor: remove unused parameter" code action, which will +// eliminate the parameter entirely, along with all corresponding +// arguments at call sites, while taking care to preserve any side +// effects in the argument expressions; see +// https://github.com/golang/tools/releases/tag/gopls%2Fv0.14. +package unusedparams diff --git a/gopls/internal/analysis/unusedparams/testdata/src/a/a.go b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go new file mode 100644 index 00000000000..3661e1f3cbe --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go @@ -0,0 +1,87 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package a + +import ( + "bytes" + "fmt" + "net/http" +) + +type parent interface { + n(f bool) +} + +type yuh struct { + a int +} + +func (y *yuh) n(f bool) { + for i := 0; i < 10; i++ { + fmt.Println(i) + } +} + +func a(i1 int, i2 int, i3 int) int { // want "unused parameter: i2" + i3 += i1 + _ = func(z int) int { // want "unused parameter: z" + _ = 1 + return 1 + } + return i3 +} + +func b(c bytes.Buffer) { // want "unused parameter: c" + _ = 1 +} + +func z(h http.ResponseWriter, _ *http.Request) { // no report: func z is address-taken + fmt.Println("Before") +} + +func l(h http.Handler) http.Handler { // want "unused parameter: h" + return http.HandlerFunc(z) +} + +func mult(a, b int) int { // want "unused parameter: b" + a += 1 + return a +} + +func y(a int) { + panic("yo") +} + +var _ = func(x int) {} // empty body: no diagnostic + +var _ = func(x int) { println() } // want "unused parameter: x" + +var ( + calledGlobal = func(x int) { println() } // want "unused parameter: x" + addressTakenGlobal = func(x int) { println() } // no report: function is address-taken +) + +func _() { + calledGlobal(1) + println(addressTakenGlobal) +} + +func Exported(unused int) {} // no finding: an exported function may be address-taken + +type T int + +func (T) m(f bool) { println() } // want "unused parameter: f" +func (T) n(f bool) { println() } // no finding: n may match the interface method parent.n + +func _() { + var fib func(x, y int) int + fib = func(x, y int) int { // want "unused parameter: y" + if x < 2 { + return x + } + return fib(x-1, 123) + fib(x-2, 456) + } + fib(10, 42) +} diff --git a/gopls/internal/analysis/unusedparams/testdata/src/a/a.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go.golden new file mode 100644 index 00000000000..dea8a6d44ae --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go.golden @@ -0,0 +1,87 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package a + +import ( + "bytes" + "fmt" + "net/http" +) + +type parent interface { + n(f bool) +} + +type yuh struct { + a int +} + +func (y *yuh) n(f bool) { + for i := 0; i < 10; i++ { + fmt.Println(i) + } +} + +func a(i1 int, _ int, i3 int) int { // want "unused parameter: i2" + i3 += i1 + _ = func(_ int) int { // want "unused parameter: z" + _ = 1 + return 1 + } + return i3 +} + +func b(_ bytes.Buffer) { // want "unused parameter: c" + _ = 1 +} + +func z(h http.ResponseWriter, _ *http.Request) { // no report: func z is address-taken + fmt.Println("Before") +} + +func l(_ http.Handler) http.Handler { // want "unused parameter: h" + return http.HandlerFunc(z) +} + +func mult(a, _ int) int { // want "unused parameter: b" + a += 1 + return a +} + +func y(a int) { + panic("yo") +} + +var _ = func(x int) {} // empty body: no diagnostic + +var _ = func(_ int) { println() } // want "unused parameter: x" + +var ( + calledGlobal = func(_ int) { println() } // want "unused parameter: x" + addressTakenGlobal = func(x int) { println() } // no report: function is address-taken +) + +func _() { + calledGlobal(1) + println(addressTakenGlobal) +} + +func Exported(unused int) {} // no finding: an exported function may be address-taken + +type T int + +func (T) m(_ bool) { println() } // want "unused parameter: f" +func (T) n(f bool) { println() } // no finding: n may match the interface method parent.n + +func _() { + var fib func(x, y int) int + fib = func(x, _ int) int { // want "unused parameter: y" + if x < 2 { + return x + } + return fib(x-1, 123) + fib(x-2, 456) + } + fib(10, 42) +} diff --git a/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go new file mode 100644 index 00000000000..d89926a7db5 --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go @@ -0,0 +1,55 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeparams + +import ( + "bytes" + "fmt" + "net/http" +) + +type parent[T any] interface { + n(f T) +} + +type yuh[T any] struct { + a T +} + +func (y *yuh[int]) n(f bool) { + for i := 0; i < 10; i++ { + fmt.Println(i) + } +} + +func a[T comparable](i1 int, i2 T, i3 int) int { // want "unused parameter: i2" + i3 += i1 + _ = func(z int) int { // want "unused parameter: z" + _ = 1 + return 1 + } + return i3 +} + +func b[T any](c bytes.Buffer) { // want "unused parameter: c" + _ = 1 +} + +func z[T http.ResponseWriter](h T, _ *http.Request) { // no report: func z is address-taken + fmt.Println("Before") +} + +func l(h http.Handler) http.Handler { // want "unused parameter: h" + return http.HandlerFunc(z[http.ResponseWriter]) +} + +func mult(a, b int) int { // want "unused parameter: b" + a += 1 + return a +} + +func y[T any](a T) { + panic("yo") +} diff --git a/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden new file mode 100644 index 00000000000..85479bc8b50 --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden @@ -0,0 +1,55 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeparams + +import ( + "bytes" + "fmt" + "net/http" +) + +type parent[T any] interface { + n(f T) +} + +type yuh[T any] struct { + a T +} + +func (y *yuh[int]) n(f bool) { + for i := 0; i < 10; i++ { + fmt.Println(i) + } +} + +func a[T comparable](i1 int, _ T, i3 int) int { // want "unused parameter: i2" + i3 += i1 + _ = func(_ int) int { // want "unused parameter: z" + _ = 1 + return 1 + } + return i3 +} + +func b[T any](_ bytes.Buffer) { // want "unused parameter: c" + _ = 1 +} + +func z[T http.ResponseWriter](h T, _ *http.Request) { // no report: func z is address-taken + fmt.Println("Before") +} + +func l(_ http.Handler) http.Handler { // want "unused parameter: h" + return http.HandlerFunc(z[http.ResponseWriter]) +} + +func mult(a, _ int) int { // want "unused parameter: b" + a += 1 + return a +} + +func y[T any](a T) { + panic("yo") +} diff --git a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go new file mode 100644 index 00000000000..74cd662285c --- /dev/null +++ b/gopls/internal/analysis/unusedparams/unusedparams.go @@ -0,0 +1,308 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unusedparams + +import ( + _ "embed" + "fmt" + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/internal/analysisinternal" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "unusedparams", + Doc: analysisinternal.MustExtractDoc(doc, "unusedparams"), + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams", +} + +const FixCategory = "unusedparam" // recognized by gopls ApplyFix + +func run(pass *analysis.Pass) (any, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // First find all "address-taken" functions. + // We must conservatively assume that their parameters + // are all required to conform to some signature. + // + // A named function is address-taken if it is somewhere + // used not in call position: + // + // f(...) // not address-taken + // use(f) // address-taken + // + // A literal function is address-taken if it is not + // immediately bound to a variable, or if that variable is + // used not in call position: + // + // f := func() { ... }; f() used only in call position + // var f func(); f = func() { ...f()... }; f() ditto + // use(func() { ... }) address-taken + // + + // Note: this algorithm relies on the assumption that the + // analyzer is called only for the "widest" package for a + // given file: that is, p_test in preference to p, if both + // exist. Analyzing only package p may produce diagnostics + // that would be falsified based on declarations in p_test.go + // files. The gopls analysis driver does this, but most + // drivers to not, so running this command in, say, + // unitchecker or multichecker may produce incorrect results. + + // Gather global information: + // - uses of functions not in call position + // - unexported interface methods + // - all referenced variables + + usesOutsideCall := make(map[types.Object][]*ast.Ident) + unexportedIMethodNames := make(map[string]bool) + { + callPosn := make(map[*ast.Ident]bool) // all idents f appearing in f() calls + filter := []ast.Node{ + (*ast.CallExpr)(nil), + (*ast.InterfaceType)(nil), + } + inspect.Preorder(filter, func(n ast.Node) { + switch n := n.(type) { + case *ast.CallExpr: + // Strip off any generic instantiation. + fun := n.Fun + switch fun_ := fun.(type) { + case *ast.IndexExpr: + fun = fun_.X // f[T]() (funcs[i]() is rejected below) + case *ast.IndexListExpr: + fun = fun_.X // f[K, V]() + } + + // Find object: + // record non-exported function, method, or func-typed var. + var id *ast.Ident + switch fun := fun.(type) { + case *ast.Ident: + id = fun + case *ast.SelectorExpr: + id = fun.Sel + } + if id != nil && !id.IsExported() { + switch pass.TypesInfo.Uses[id].(type) { + case *types.Func, *types.Var: + callPosn[id] = true + } + } + + case *ast.InterfaceType: + // Record the set of names of unexported interface methods. + // (It would be more precise to record signatures but + // generics makes it tricky, and this conservative + // heuristic is close enough.) + t := pass.TypesInfo.TypeOf(n).(*types.Interface) + for i := 0; i < t.NumExplicitMethods(); i++ { + m := t.ExplicitMethod(i) + if !m.Exported() && m.Name() != "_" { + unexportedIMethodNames[m.Name()] = true + } + } + } + }) + + for id, obj := range pass.TypesInfo.Uses { + if !callPosn[id] { + // This includes "f = func() {...}", which we deal with below. + usesOutsideCall[obj] = append(usesOutsideCall[obj], id) + } + } + } + + // Find all vars (notably parameters) that are used. + usedVars := make(map[*types.Var]bool) + for _, obj := range pass.TypesInfo.Uses { + if v, ok := obj.(*types.Var); ok { + if v.IsField() { + continue // no point gathering these + } + usedVars[v] = true + } + } + + // Check each non-address-taken function's parameters are all used. + filter := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + } + inspect.WithStack(filter, func(n ast.Node, push bool, stack []ast.Node) bool { + // (We always return true so that we visit nested FuncLits.) + + if !push { + return true + } + + var ( + fn types.Object // function symbol (*Func, possibly *Var for a FuncLit) + ftype *ast.FuncType + body *ast.BlockStmt + ) + switch n := n.(type) { + case *ast.FuncDecl: + // We can't analyze non-Go functions. + if n.Body == nil { + return true + } + + // Ignore exported functions and methods: we + // must assume they may be address-taken in + // another package. + if n.Name.IsExported() { + return true + } + + // Ignore methods that match the name of any + // interface method declared in this package, + // as the method's signature may need to conform + // to the interface. + if n.Recv != nil && unexportedIMethodNames[n.Name.Name] { + return true + } + + fn = pass.TypesInfo.Defs[n.Name].(*types.Func) + ftype, body = n.Type, n.Body + + case *ast.FuncLit: + // Find the symbol for the variable (if any) + // to which the FuncLit is bound. + // (We don't bother to allow ParenExprs.) + switch parent := stack[len(stack)-2].(type) { + case *ast.AssignStmt: + // f = func() {...} + // f := func() {...} + for i, rhs := range parent.Rhs { + if rhs == n { + if id, ok := parent.Lhs[i].(*ast.Ident); ok { + fn = pass.TypesInfo.ObjectOf(id) + + // Edge case: f = func() {...} + // should not count as a use. + if pass.TypesInfo.Uses[id] != nil { + usesOutsideCall[fn] = slices.Remove(usesOutsideCall[fn], id) + } + + if fn == nil && id.Name == "_" { + // Edge case: _ = func() {...} + // has no var. Fake one. + fn = types.NewVar(id.Pos(), pass.Pkg, id.Name, pass.TypesInfo.TypeOf(n)) + } + } + break + } + } + + case *ast.ValueSpec: + // var f = func() { ... } + // (unless f is an exported package-level var) + for i, val := range parent.Values { + if val == n { + v := pass.TypesInfo.Defs[parent.Names[i]] + if !(v.Parent() == pass.Pkg.Scope() && v.Exported()) { + fn = v + } + break + } + } + } + + ftype, body = n.Type, n.Body + } + + // Ignore address-taken functions and methods: unused + // parameters may be needed to conform to a func type. + if fn == nil || len(usesOutsideCall[fn]) > 0 { + return true + } + + // If there are no parameters, there are no unused parameters. + if ftype.Params.NumFields() == 0 { + return true + } + + // To reduce false positives, ignore functions with an + // empty or panic body. + // + // We choose not to ignore functions whose body is a + // single return statement (as earlier versions did) + // func f() { return } + // func f() { return g(...) } + // as we suspect that was just heuristic to reduce + // false positives in the earlier unsound algorithm. + switch len(body.List) { + case 0: + // Empty body. Although the parameter is + // unnecessary, it's pretty obvious to the + // reader that that's the case, so we allow it. + return true // func f() {} + case 1: + if stmt, ok := body.List[0].(*ast.ExprStmt); ok { + // We allow a panic body, as it is often a + // placeholder for a future implementation: + // func f() { panic(...) } + if call, ok := stmt.X.(*ast.CallExpr); ok { + if fun, ok := call.Fun.(*ast.Ident); ok && fun.Name == "panic" { + return true + } + } + } + } + + // Report each unused parameter. + for _, field := range ftype.Params.List { + for _, id := range field.Names { + if id.Name == "_" { + continue + } + param := pass.TypesInfo.Defs[id].(*types.Var) + if !usedVars[param] { + start, end := field.Pos(), field.End() + if len(field.Names) > 1 { + start, end = id.Pos(), id.End() + } + // This diagnostic carries both an edit-based fix to + // rename the unused parameter, and a command-based fix + // to remove it (see golang.RemoveUnusedParameter). + pass.Report(analysis.Diagnostic{ + Pos: start, + End: end, + Message: fmt.Sprintf("unused parameter: %s", id.Name), + Category: FixCategory, + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: `Rename parameter to "_"`, + TextEdits: []analysis.TextEdit{{ + Pos: id.Pos(), + End: id.End(), + NewText: []byte("_"), + }}, + }, + { + Message: fmt.Sprintf("Remove unused parameter %q", id.Name), + // No TextEdits => computed by gopls command + }, + }, + }) + } + } + } + + return true + }) + return nil, nil +} diff --git a/gopls/internal/analysis/unusedparams/unusedparams_test.go b/gopls/internal/analysis/unusedparams/unusedparams_test.go new file mode 100644 index 00000000000..1e2d8851b8b --- /dev/null +++ b/gopls/internal/analysis/unusedparams/unusedparams_test.go @@ -0,0 +1,17 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unusedparams_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/unusedparams" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, unusedparams.Analyzer, "a", "typeparams") +} diff --git a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go new file mode 100644 index 00000000000..0eb74e98b8c --- /dev/null +++ b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go @@ -0,0 +1,74 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package a + +import ( + "fmt" + "os" +) + +type A struct { + b int +} + +func singleAssignment() { + v := "s" // want `v.*declared (and|but) not used` + + s := []int{ // want `s.*declared (and|but) not used` + 1, + 2, + } + + a := func(s string) bool { // want `a.*declared (and|but) not used` + return false + } + + if 1 == 1 { + s := "v" // want `s.*declared (and|but) not used` + } + + panic("I should survive") +} + +func noOtherStmtsInBlock() { + v := "s" // want `v.*declared (and|but) not used` +} + +func partOfMultiAssignment() { + f, err := os.Open("file") // want `f.*declared (and|but) not used` + panic(err) +} + +func sideEffects(cBool chan bool, cInt chan int) { + b := <-c // want `b.*declared (and|but) not used` + s := fmt.Sprint("") // want `s.*declared (and|but) not used` + a := A{ // want `a.*declared (and|but) not used` + b: func() int { + return 1 + }(), + } + c := A{<-cInt} // want `c.*declared (and|but) not used` + d := fInt() + <-cInt // want `d.*declared (and|but) not used` + e := fBool() && <-cBool // want `e.*declared (and|but) not used` + f := map[int]int{ // want `f.*declared (and|but) not used` + fInt(): <-cInt, + } + g := []int{<-cInt} // want `g.*declared (and|but) not used` + h := func(s string) {} // want `h.*declared (and|but) not used` + i := func(s string) {}() // want `i.*declared (and|but) not used` +} + +func commentAbove() { + // v is a variable + v := "s" // want `v.*declared (and|but) not used` +} + +func fBool() bool { + return true +} + +func fInt() int { + return 1 +} diff --git a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden new file mode 100644 index 00000000000..fd45e2efe98 --- /dev/null +++ b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden @@ -0,0 +1,59 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package a + +import ( + "fmt" + "os" +) + +type A struct { + b int +} + +func singleAssignment() { + if 1 == 1 { + } + + panic("I should survive") +} + +func noOtherStmtsInBlock() { +} + +func partOfMultiAssignment() { + _, err := os.Open("file") // want `f.*declared (and|but) not used` + panic(err) +} + +func sideEffects(cBool chan bool, cInt chan int) { + <-c // want `b.*declared (and|but) not used` + fmt.Sprint("") // want `s.*declared (and|but) not used` + A{ // want `a.*declared (and|but) not used` + b: func() int { + return 1 + }(), + } + A{<-cInt} // want `c.*declared (and|but) not used` + fInt() + <-cInt // want `d.*declared (and|but) not used` + fBool() && <-cBool // want `e.*declared (and|but) not used` + map[int]int{ // want `f.*declared (and|but) not used` + fInt(): <-cInt, + } + []int{<-cInt} // want `g.*declared (and|but) not used` + func(s string) {}() // want `i.*declared (and|but) not used` +} + +func commentAbove() { + // v is a variable +} + +func fBool() bool { + return true +} + +func fInt() int { + return 1 +} diff --git a/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go new file mode 100644 index 00000000000..57cb4b2c972 --- /dev/null +++ b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go @@ -0,0 +1,30 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package decl + +func a() { + var b, c bool // want `b.*declared (and|but) not used` + panic(c) + + if 1 == 1 { + var s string // want `s.*declared (and|but) not used` + } +} + +func b() { + // b is a variable + var b bool // want `b.*declared (and|but) not used` +} + +func c() { + var ( + d string + + // some comment for c + c bool // want `c.*declared (and|but) not used` + ) + + panic(d) +} diff --git a/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go.golden b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go.golden new file mode 100644 index 00000000000..3fbabed18ac --- /dev/null +++ b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go.golden @@ -0,0 +1,24 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package decl + +func a() { + var c bool // want `b.*declared (and|but) not used` + panic(c) + + if 1 == 1 { + } +} + +func b() { + // b is a variable +} + +func c() { + var ( + d string + ) + panic(d) +} diff --git a/gopls/internal/lsp/analysis/unusedvariable/unusedvariable.go b/gopls/internal/analysis/unusedvariable/unusedvariable.go similarity index 90% rename from gopls/internal/lsp/analysis/unusedvariable/unusedvariable.go rename to gopls/internal/analysis/unusedvariable/unusedvariable.go index 904016be71e..106e856fee8 100644 --- a/gopls/internal/lsp/analysis/unusedvariable/unusedvariable.go +++ b/gopls/internal/analysis/unusedvariable/unusedvariable.go @@ -18,10 +18,7 @@ import ( "golang.org/x/tools/go/ast/astutil" ) -const Doc = `check for unused variables - -The unusedvariable analyzer suggests fixes for unused variables errors. -` +const Doc = `check for unused variables and suggest fixes` var Analyzer = &analysis.Analyzer{ Name: "unusedvariable", @@ -29,6 +26,7 @@ var Analyzer = &analysis.Analyzer{ Requires: []*analysis.Analyzer{}, Run: run, RunDespiteErrors: true, // an unusedvariable diagnostic is a compile error + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable", } // The suffix for this error message changed in Go 1.20. @@ -39,6 +37,9 @@ func run(pass *analysis.Pass) (interface{}, error) { for _, suffix := range unusedVariableSuffixes { if strings.HasSuffix(typeErr.Msg, suffix) { varName := strings.TrimSuffix(typeErr.Msg, suffix) + // Beginning in Go 1.23, go/types began quoting vars as `v'. + varName = strings.Trim(varName, "'`'") + err := runForError(pass, typeErr, varName) if err != nil { return nil, err @@ -106,7 +107,7 @@ func runForError(pass *analysis.Pass, err types.Error, name string) error { continue } - fixes := removeVariableFromAssignment(pass, path, stmt, ident) + fixes := removeVariableFromAssignment(path, stmt, ident) // fixes may be nil if len(fixes) > 0 { diag.SuggestedFixes = fixes @@ -157,10 +158,14 @@ func removeVariableFromSpec(pass *analysis.Pass, path []ast.Node, stmt *ast.Valu // Find parent DeclStmt and delete it for _, node := range path { if declStmt, ok := node.(*ast.DeclStmt); ok { + edits := deleteStmtFromBlock(path, declStmt) + if len(edits) == 0 { + return nil // can this happen? + } return []analysis.SuggestedFix{ { Message: suggestedFixMessage(ident.Name), - TextEdits: deleteStmtFromBlock(path, declStmt), + TextEdits: edits, }, } } @@ -187,7 +192,7 @@ func removeVariableFromSpec(pass *analysis.Pass, path []ast.Node, stmt *ast.Valu } } -func removeVariableFromAssignment(pass *analysis.Pass, path []ast.Node, stmt *ast.AssignStmt, ident *ast.Ident) []analysis.SuggestedFix { +func removeVariableFromAssignment(path []ast.Node, stmt *ast.AssignStmt, ident *ast.Ident) []analysis.SuggestedFix { // The only variable in the assignment is unused if len(stmt.Lhs) == 1 { // If LHS has only one expression to be valid it has to have 1 expression @@ -210,10 +215,14 @@ func removeVariableFromAssignment(pass *analysis.Pass, path []ast.Node, stmt *as } // RHS does not have any side effects, delete the whole statement + edits := deleteStmtFromBlock(path, stmt) + if len(edits) == 0 { + return nil // can this happen? + } return []analysis.SuggestedFix{ { Message: suggestedFixMessage(ident.Name), - TextEdits: deleteStmtFromBlock(path, stmt), + TextEdits: edits, }, } } diff --git a/gopls/internal/lsp/analysis/unusedvariable/unusedvariable_test.go b/gopls/internal/analysis/unusedvariable/unusedvariable_test.go similarity index 89% rename from gopls/internal/lsp/analysis/unusedvariable/unusedvariable_test.go rename to gopls/internal/analysis/unusedvariable/unusedvariable_test.go index 08223155f6e..5dcca007a98 100644 --- a/gopls/internal/lsp/analysis/unusedvariable/unusedvariable_test.go +++ b/gopls/internal/analysis/unusedvariable/unusedvariable_test.go @@ -8,7 +8,7 @@ import ( "testing" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/unusedvariable" + "golang.org/x/tools/gopls/internal/analysis/unusedvariable" ) func Test(t *testing.T) { diff --git a/gopls/internal/lsp/analysis/useany/testdata/src/a/a.go b/gopls/internal/analysis/useany/testdata/src/a/a.go similarity index 100% rename from gopls/internal/lsp/analysis/useany/testdata/src/a/a.go rename to gopls/internal/analysis/useany/testdata/src/a/a.go diff --git a/gopls/internal/lsp/analysis/useany/testdata/src/a/a.go.golden b/gopls/internal/analysis/useany/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/lsp/analysis/useany/testdata/src/a/a.go.golden rename to gopls/internal/analysis/useany/testdata/src/a/a.go.golden diff --git a/gopls/internal/lsp/analysis/useany/useany.go b/gopls/internal/analysis/useany/useany.go similarity index 92% rename from gopls/internal/lsp/analysis/useany/useany.go rename to gopls/internal/analysis/useany/useany.go index 73e2f763316..ff25e5945d3 100644 --- a/gopls/internal/lsp/analysis/useany/useany.go +++ b/gopls/internal/analysis/useany/useany.go @@ -15,7 +15,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" ) const Doc = `check for constraints that could be simplified to "any"` @@ -25,16 +24,13 @@ var Analyzer = &analysis.Analyzer{ Doc: Doc, Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/useany", } func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) universeAny := types.Universe.Lookup("any") - if universeAny == nil { - // Go <= 1.17. Nothing to check. - return nil, nil - } nodeFilter := []ast.Node{ (*ast.TypeSpec)(nil), @@ -45,9 +41,9 @@ func run(pass *analysis.Pass) (interface{}, error) { var tparams *ast.FieldList switch node := node.(type) { case *ast.TypeSpec: - tparams = typeparams.ForTypeSpec(node) + tparams = node.TypeParams case *ast.FuncType: - tparams = typeparams.ForFuncType(node) + tparams = node.TypeParams default: panic(fmt.Sprintf("unexpected node type %T", node)) } diff --git a/gopls/internal/analysis/useany/useany_test.go b/gopls/internal/analysis/useany/useany_test.go new file mode 100644 index 00000000000..a8cb692f359 --- /dev/null +++ b/gopls/internal/analysis/useany/useany_test.go @@ -0,0 +1,17 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package useany_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/useany" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, useany.Analyzer, "a") +} diff --git a/gopls/internal/astutil/util.go b/gopls/internal/astutil/util.go deleted file mode 100644 index e910c02a890..00000000000 --- a/gopls/internal/astutil/util.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package astutil - -import ( - "go/ast" - - "golang.org/x/tools/internal/typeparams" -) - -// UnpackRecv unpacks a receiver type expression, reporting whether it is a -// pointer recever, along with the type name identifier and any receiver type -// parameter identifiers. -// -// Copied (with modifications) from go/types. -func UnpackRecv(rtyp ast.Expr) (ptr bool, rname *ast.Ident, tparams []*ast.Ident) { -L: // unpack receiver type - // This accepts invalid receivers such as ***T and does not - // work for other invalid receivers, but we don't care. The - // validity of receiver expressions is checked elsewhere. - for { - switch t := rtyp.(type) { - case *ast.ParenExpr: - rtyp = t.X - case *ast.StarExpr: - ptr = true - rtyp = t.X - default: - break L - } - } - - // unpack type parameters, if any - switch rtyp.(type) { - case *ast.IndexExpr, *typeparams.IndexListExpr: - var indices []ast.Expr - rtyp, _, indices, _ = typeparams.UnpackIndexExpr(rtyp) - for _, arg := range indices { - var par *ast.Ident - switch arg := arg.(type) { - case *ast.Ident: - par = arg - default: - // ignore errors - } - if par == nil { - par = &ast.Ident{NamePos: arg.Pos(), Name: "_"} - } - tparams = append(tparams, par) - } - } - - // unpack receiver name - if name, _ := rtyp.(*ast.Ident); name != nil { - rname = name - } - - return -} diff --git a/gopls/internal/lsp/cache/analysis.go b/gopls/internal/cache/analysis.go similarity index 90% rename from gopls/internal/lsp/cache/analysis.go rename to gopls/internal/cache/analysis.go index ae666ba9111..fbc84730296 100644 --- a/gopls/internal/lsp/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -15,6 +15,7 @@ import ( "errors" "fmt" "go/ast" + "go/parser" "go/token" "go/types" "log" @@ -31,18 +32,22 @@ import ( "golang.org/x/sync/errgroup" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/filecache" - "golang.org/x/tools/gopls/internal/lsp/frob" - "golang.org/x/tools/gopls/internal/lsp/progress" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/filecache" + "golang.org/x/tools/gopls/internal/progress" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/frob" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" "golang.org/x/tools/internal/facts" "golang.org/x/tools/internal/gcimporter" - "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/versions" ) /* @@ -146,14 +151,14 @@ import ( // Even if the ultimate consumer decides to ignore errors, // tests and other situations want to be assured of freedom from // errors, not just missing results. This should be recorded. -// - Split this into a subpackage, gopls/internal/lsp/cache/driver, +// - Split this into a subpackage, gopls/internal/cache/driver, // consisting of this file and three helpers from errors.go. // The (*snapshot).Analyze method would stay behind and make calls // to the driver package. // Steps: // - define a narrow driver.Snapshot interface with only these methods: -// Metadata(PackageID) source.Metadata -// ReadFile(Context, URI) (source.FileHandle, error) +// Metadata(PackageID) Metadata +// ReadFile(Context, URI) (file.Handle, error) // View() *View // for Options // - share cache.{goVersionRx,parseGoImpl} @@ -168,7 +173,7 @@ const AnalysisProgressTitle = "Analyzing Dependencies" // The analyzers list must be duplicate free; order does not matter. // // Notifications of progress may be sent to the optional reporter. -func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, analyzers []*source.Analyzer, reporter *progress.Tracker) ([]*source.Diagnostic, error) { +func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Package, analyzers []*settings.Analyzer, reporter *progress.Tracker) ([]*Diagnostic, error) { start := time.Now() // for progress reporting var tagStr string // sorted comma-separated list of PackageIDs @@ -187,10 +192,10 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, // Filter and sort enabled root analyzers. // A disabled analyzer may still be run if required by another. - toSrc := make(map[*analysis.Analyzer]*source.Analyzer) + toSrc := make(map[*analysis.Analyzer]*settings.Analyzer) var enabled []*analysis.Analyzer // enabled subset + transitive requirements for _, a := range analyzers { - if a.IsEnabled(snapshot.Options()) { + if a.IsEnabled(s.Options()) { toSrc[a.Analyzer] = a enabled = append(enabled, a.Analyzer) } @@ -241,8 +246,8 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, makeNode = func(from *analysisNode, id PackageID) (*analysisNode, error) { an, ok := nodes[id] if !ok { - m := snapshot.Metadata(id) - if m == nil { + mp := s.Metadata(id) + if mp == nil { return nil, bug.Errorf("no metadata for %s", id) } @@ -250,7 +255,7 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, an = &analysisNode{ fset: fset, - m: m, + mp: mp, analyzers: facty, // all nodes run at least the facty analyzers allDeps: make(map[PackagePath]*analysisNode), exportDeps: make(map[PackagePath]*analysisNode), @@ -261,8 +266,8 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, // -- recursion -- // Build subgraphs for dependencies. - an.succs = make(map[PackageID]*analysisNode, len(m.DepsByPkgPath)) - for _, depID := range m.DepsByPkgPath { + an.succs = make(map[PackageID]*analysisNode, len(mp.DepsByPkgPath)) + for _, depID := range mp.DepsByPkgPath { dep, err := makeNode(an, depID) if err != nil { return nil, err @@ -278,7 +283,7 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, // -- postorder -- - an.allDeps[m.PkgPath] = an // add self entry (reflexive transitive closure) + an.allDeps[mp.PkgPath] = an // add self entry (reflexive transitive closure) // Add leaf nodes (no successors) directly to queue. if len(an.succs) == 0 { @@ -288,9 +293,9 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, // Load the contents of each compiled Go file through // the snapshot's cache. (These are all cache hits as // files are pre-loaded following packages.Load) - an.files = make([]source.FileHandle, len(m.CompiledGoFiles)) - for i, uri := range m.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) + an.files = make([]file.Handle, len(mp.CompiledGoFiles)) + for i, uri := range mp.CompiledGoFiles { + fh, err := s.ReadFile(ctx, uri) if err != nil { return nil, err } @@ -320,8 +325,8 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, // Now that we have read all files, // we no longer need the snapshot. // (but options are needed for progress reporting) - options := snapshot.Options() - snapshot = nil + options := s.Options() + s = nil // Progress reporting. If supported, gopls reports progress on analysis // passes that are taking a long time. @@ -433,7 +438,7 @@ func (snapshot *snapshot) Analyze(ctx context.Context, pkgs map[PackageID]unit, // begin the analysis you asked for". // Even if current callers choose to discard the // results, we should propagate the per-action errors. - var results []*source.Diagnostic + var results []*Diagnostic for _, root := range roots { for _, a := range enabled { // Skip analyzers that were added only to @@ -501,8 +506,8 @@ func (an *analysisNode) decrefPreds() { // type-checking and analyzing syntax (miss). type analysisNode struct { fset *token.FileSet // file set shared by entire batch (DAG) - m *source.Metadata // metadata for this package - files []source.FileHandle // contents of CompiledGoFiles + mp *metadata.Package // metadata for this package + files []file.Handle // contents of CompiledGoFiles analyzers []*analysis.Analyzer // set of analyzers to run preds []*analysisNode // graph edges: succs map[PackageID]*analysisNode // (preds -> self -> succs) @@ -518,19 +523,19 @@ type analysisNode struct { typesErr error // an error producing type information } -func (an *analysisNode) String() string { return string(an.m.ID) } +func (an *analysisNode) String() string { return string(an.mp.ID) } // _import imports this node's types.Package from export data, if not already done. // Precondition: analysis was a success. // Postcondition: an.types and an.exportDeps are populated. func (an *analysisNode) _import() (*types.Package, error) { an.typesOnce.Do(func() { - if an.m.PkgPath == "unsafe" { + if an.mp.PkgPath == "unsafe" { an.types = types.Unsafe return } - an.types = types.NewPackage(string(an.m.PkgPath), string(an.m.Name)) + an.types = types.NewPackage(string(an.mp.PkgPath), string(an.mp.Name)) // getPackages recursively imports each dependency // referenced by the export data, in parallel. @@ -541,7 +546,7 @@ func (an *analysisNode) _import() (*types.Package, error) { dep, ok := an.allDeps[path] if !ok { // This early return bypasses Wait; that's ok. - return fmt.Errorf("%s: unknown dependency %q", an.m, path) + return fmt.Errorf("%s: unknown dependency %q", an.mp, path) } an.exportDeps[path] = dep // record, for later fact decoding if dep == an { @@ -563,12 +568,12 @@ func (an *analysisNode) _import() (*types.Package, error) { } return g.Wait() } - pkg, err := gcimporter.IImportShallow(an.fset, getPackages, an.summary.Export, string(an.m.PkgPath), bug.Reportf) + pkg, err := gcimporter.IImportShallow(an.fset, getPackages, an.summary.Export, string(an.mp.PkgPath), bug.Reportf) if err != nil { - an.typesErr = bug.Errorf("%s: invalid export data: %v", an.m, err) + an.typesErr = bug.Errorf("%s: invalid export data: %v", an.mp, err) an.types = nil } else if pkg != an.types { - log.Fatalf("%s: inconsistent packages", an.m) + log.Fatalf("%s: inconsistent packages", an.mp) } }) return an.types, an.typesErr @@ -577,10 +582,10 @@ func (an *analysisNode) _import() (*types.Package, error) { // analyzeSummary is a gob-serializable summary of successfully // applying a list of analyzers to a package. type analyzeSummary struct { - Export []byte // encoded types of package - DeepExportHash source.Hash // hash of reflexive transitive closure of export data - Compiles bool // transitively free of list/parse/type errors - Actions actionMap // maps analyzer stablename to analysis results (*actionSummary) + Export []byte // encoded types of package + DeepExportHash file.Hash // hash of reflexive transitive closure of export data + Compiles bool // transitively free of list/parse/type errors + Actions actionMap // maps analyzer stablename to analysis results (*actionSummary) } // actionMap defines a stable Gob encoding for a map. @@ -625,8 +630,8 @@ func (m *actionMap) GobDecode(data []byte) error { // actionSummary is a gob-serializable summary of one possibly failed analysis action. // If Err is non-empty, the other fields are undefined. type actionSummary struct { - Facts []byte // the encoded facts.Set - FactsHash source.Hash // hash(Facts) + Facts []byte // the encoded facts.Set + FactsHash file.Hash // hash(Facts) Diagnostics []gobDiagnostic Err string // "" => success } @@ -679,7 +684,7 @@ func (an *analysisNode) runCached(ctx context.Context) (*analyzeSummary, error) data := analyzeSummaryCodec.Encode(summary) if false { - log.Printf("Set key=%d value=%d id=%s\n", len(key), len(data), an.m.ID) + log.Printf("Set key=%d value=%d id=%s\n", len(key), len(data), an.mp.ID) } if err := filecache.Set(cacheKind, key, data); err != nil { event.Error(ctx, "internal error updating analysis shared cache", err) @@ -713,28 +718,28 @@ func (an *analysisNode) cacheKey() [sha256.Size]byte { } // package metadata - m := an.m - fmt.Fprintf(hasher, "package: %s %s %s\n", m.ID, m.Name, m.PkgPath) + mp := an.mp + fmt.Fprintf(hasher, "package: %s %s %s\n", mp.ID, mp.Name, mp.PkgPath) // We can ignore m.DepsBy{Pkg,Import}Path: although the logic // uses those fields, we account for them by hashing vdeps. // type sizes - wordSize := an.m.TypesSizes.Sizeof(types.Typ[types.Int]) - maxAlign := an.m.TypesSizes.Alignof(types.NewPointer(types.Typ[types.Int64])) + wordSize := an.mp.TypesSizes.Sizeof(types.Typ[types.Int]) + maxAlign := an.mp.TypesSizes.Alignof(types.NewPointer(types.Typ[types.Int64])) fmt.Fprintf(hasher, "sizes: %d %d\n", wordSize, maxAlign) // metadata errors: used for 'compiles' field - fmt.Fprintf(hasher, "errors: %d", len(m.Errors)) + fmt.Fprintf(hasher, "errors: %d", len(mp.Errors)) // module Go version - if m.Module != nil && m.Module.GoVersion != "" { - fmt.Fprintf(hasher, "go %s\n", m.Module.GoVersion) + if mp.Module != nil && mp.Module.GoVersion != "" { + fmt.Fprintf(hasher, "go %s\n", mp.Module.GoVersion) } // file names and contents fmt.Fprintf(hasher, "files: %d\n", len(an.files)) for _, fh := range an.files { - fmt.Fprintln(hasher, fh.FileIdentity()) + fmt.Fprintln(hasher, fh.Identity()) } // vdeps, in PackageID order @@ -745,7 +750,7 @@ func (an *analysisNode) cacheKey() [sha256.Size]byte { sort.Strings(depIDs) // TODO(adonovan): avoid conversions by using slices.Sort[PackageID] for _, depID := range depIDs { vdep := an.succs[PackageID(depID)] - fmt.Fprintf(hasher, "dep: %s\n", vdep.m.PkgPath) + fmt.Fprintf(hasher, "dep: %s\n", vdep.mp.PkgPath) fmt.Fprintf(hasher, "export: %s\n", vdep.summary.DeepExportHash) // action results: errors and facts @@ -781,7 +786,7 @@ func (an *analysisNode) cacheKey() [sha256.Size]byte { func (an *analysisNode) run(ctx context.Context) (*analyzeSummary, error) { // Parse only the "compiled" Go files. // Do the computation in parallel. - parsed := make([]*source.ParsedGoFile, len(an.files)) + parsed := make([]*parsego.File, len(an.files)) { var group errgroup.Group group.SetLimit(4) // not too much: run itself is already called in parallel @@ -792,7 +797,7 @@ func (an *analysisNode) run(ctx context.Context) (*analyzeSummary, error) { // as cached ASTs require the global FileSet. // ast.Object resolution is unfortunately an implied part of the // go/analysis contract. - pgf, err := parseGoImpl(ctx, an.fset, fh, source.ParseFull&^source.SkipObjectResolution, false) + pgf, err := parseGoImpl(ctx, an.fset, fh, parsego.Full&^parser.SkipObjectResolution, false) parsed[i] = pgf return err }) @@ -906,34 +911,35 @@ func (an *analysisNode) run(ctx context.Context) (*analyzeSummary, error) { } // Postcondition: analysisPackage.types and an.exportDeps are populated. -func (an *analysisNode) typeCheck(parsed []*source.ParsedGoFile) *analysisPackage { - m := an.m +func (an *analysisNode) typeCheck(parsed []*parsego.File) *analysisPackage { + mp := an.mp if false { // debugging - log.Println("typeCheck", m.ID) + log.Println("typeCheck", mp.ID) } pkg := &analysisPackage{ - m: m, + mp: mp, fset: an.fset, parsed: parsed, files: make([]*ast.File, len(parsed)), - compiles: len(m.Errors) == 0, // false => list error - types: types.NewPackage(string(m.PkgPath), string(m.Name)), + compiles: len(mp.Errors) == 0, // false => list error + types: types.NewPackage(string(mp.PkgPath), string(mp.Name)), typesInfo: &types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), + Instances: make(map[*ast.Ident]types.Instance), Implicits: make(map[ast.Node]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), Scopes: make(map[ast.Node]*types.Scope), + Uses: make(map[*ast.Ident]types.Object), }, - typesSizes: m.TypesSizes, + typesSizes: mp.TypesSizes, } - typeparams.InitInstanceInfo(pkg.typesInfo) + versions.InitFileVersions(pkg.typesInfo) // Unsafe has no syntax. - if m.PkgPath == "unsafe" { + if mp.PkgPath == "unsafe" { pkg.types = types.Unsafe return pkg } @@ -952,7 +958,7 @@ func (an *analysisNode) typeCheck(parsed []*source.ParsedGoFile) *analysisPackag } cfg := &types.Config{ - Sizes: m.TypesSizes, + Sizes: mp.TypesSizes, Error: func(e error) { pkg.compiles = false // type error @@ -960,7 +966,7 @@ func (an *analysisNode) typeCheck(parsed []*source.ParsedGoFile) *analysisPackag // as parser recovery can be quite lossy (#59888). typeError := e.(types.Error) for _, p := range parsed { - if p.ParseErr != nil && source.NodeContains(p.File, typeError.Pos) { + if p.ParseErr != nil && astutil.NodeContains(p.File, typeError.Pos) { return } } @@ -974,7 +980,7 @@ func (an *analysisNode) typeCheck(parsed []*source.ParsedGoFile) *analysisPackag // are swallowed, these packages may be confusing. // Map ImportPath to ID. - id, ok := m.DepsByImpPath[ImportPath(importPath)] + id, ok := mp.DepsByImpPath[ImportPath(importPath)] if !ok { // The import syntax is inconsistent with the metadata. // This could be because the import declaration was @@ -996,7 +1002,7 @@ func (an *analysisNode) typeCheck(parsed []*source.ParsedGoFile) *analysisPackag } // (Duplicates logic from check.go.) - if !source.IsValidImport(an.m.PkgPath, dep.m.PkgPath) { + if !metadata.IsValidImport(an.mp.PkgPath, dep.mp.PkgPath) { return nil, fmt.Errorf("invalid use of internal package %s", importPath) } @@ -1005,13 +1011,10 @@ func (an *analysisNode) typeCheck(parsed []*source.ParsedGoFile) *analysisPackag } // Set Go dialect. - if m.Module != nil && m.Module.GoVersion != "" { - goVersion := "go" + m.Module.GoVersion - // types.NewChecker panics if GoVersion is invalid. - // An unparsable mod file should probably stop us - // before we get here, but double check just in case. - if goVersionRx.MatchString(goVersion) { - typesinternal.SetGoVersion(cfg, goVersion) + if mp.Module != nil && mp.Module.GoVersion != "" { + goVersion := "go" + mp.Module.GoVersion + if validGoVersion(goVersion) { + cfg.GoVersion = goVersion } } @@ -1046,21 +1049,21 @@ func (an *analysisNode) typeCheck(parsed []*source.ParsedGoFile) *analysisPackag // this package and each dependency referenced by it. // Also, populate exportDeps. hash := sha256.New() - fmt.Fprintf(hash, "%s %d\n", m.PkgPath, len(export)) + fmt.Fprintf(hash, "%s %d\n", mp.PkgPath, len(export)) hash.Write(export) paths, err := readShallowManifest(export) if err != nil { log.Fatalf("internal error: bad export data: %v", err) } for _, path := range paths { - dep, ok := an.allDeps[PackagePath(path)] + dep, ok := an.allDeps[path] if !ok { log.Fatalf("%s: missing dependency: %q", an, path) } - fmt.Fprintf(hash, "%s %s\n", dep.m.PkgPath, dep.summary.DeepExportHash) - an.exportDeps[PackagePath(path)] = dep + fmt.Fprintf(hash, "%s %s\n", dep.mp.PkgPath, dep.summary.DeepExportHash) + an.exportDeps[path] = dep } - an.exportDeps[m.PkgPath] = an // self + an.exportDeps[mp.PkgPath] = an // self hash.Sum(pkg.deepExportHash[:0]) return pkg @@ -1094,15 +1097,15 @@ func readShallowManifest(export []byte) ([]PackagePath, error) { // analysisPackage contains information about a package, including // syntax trees, used transiently during its type-checking and analysis. type analysisPackage struct { - m *source.Metadata + mp *metadata.Package fset *token.FileSet // local to this package - parsed []*source.ParsedGoFile + parsed []*parsego.File files []*ast.File // same as parsed[i].File types *types.Package compiles bool // package is transitively free of list/parse/type errors factsDecoder *facts.Decoder - export []byte // encoding of types.Package - deepExportHash source.Hash // reflexive transitive hash of export data + export []byte // encoding of types.Package + deepExportHash file.Hash // reflexive transitive hash of export data typesInfo *types.Info typeErrors []types.Error typesSizes types.Sizes @@ -1127,7 +1130,7 @@ type action struct { } func (act *action) String() string { - return fmt.Sprintf("%s@%s", act.a.Name, act.pkg.m.ID) + return fmt.Sprintf("%s@%s", act.a.Name, act.pkg.mp.ID) } // execActions executes a set of action graph nodes in parallel. @@ -1191,7 +1194,7 @@ func (act *action) exec() (interface{}, *actionSummary, error) { // Were there list/parse/type errors that might prevent analysis? if !pkg.compiles && !analyzer.RunDespiteErrors { - return nil, nil, fmt.Errorf("skipping analysis %q because package %q does not compile", analyzer.Name, pkg.m.ID) + return nil, nil, fmt.Errorf("skipping analysis %q because package %q does not compile", analyzer.Name, pkg.mp.ID) } // Inv: package is well-formed enough to proceed with analysis. @@ -1230,7 +1233,7 @@ func (act *action) exec() (interface{}, *actionSummary, error) { return nil, nil } - id, ok := pkg.m.DepsByPkgPath[PackagePath(pkgPath)] + id, ok := pkg.mp.DepsByPkgPath[PackagePath(pkgPath)] if !ok { // This may mean imp was synthesized by the type // checker because it failed to import it for any reason @@ -1264,20 +1267,43 @@ func (act *action) exec() (interface{}, *actionSummary, error) { factFilter[reflect.TypeOf(f)] = true } + // If the package contains "fixed" files, it's not necessarily an error if we + // can't convert positions. + hasFixedFiles := false + for _, p := range pkg.parsed { + if p.Fixed() { + hasFixedFiles = true + break + } + } + // posToLocation converts from token.Pos to protocol form. // TODO(adonovan): improve error messages. posToLocation := func(start, end token.Pos) (protocol.Location, error) { tokFile := pkg.fset.File(start) + for _, p := range pkg.parsed { if p.Tok == tokFile { if end == token.NoPos { end = start } + + // debugging #64547 + if start < token.Pos(tokFile.Base()) { + bug.Reportf("start < start of file") + } + if end > token.Pos(tokFile.Base()+tokFile.Size()+1) { + bug.Reportf("end > end of file + 1") + } + return p.PosLocation(start, end) } } - return protocol.Location{}, - bug.Errorf("internal error: token.Pos not within package") + errorf := bug.Errorf + if hasFixedFiles { + errorf = fmt.Errorf + } + return protocol.Location{}, errorf("token.Pos not within package") } // Now run the (pkg, analyzer) action. @@ -1294,7 +1320,9 @@ func (act *action) exec() (interface{}, *actionSummary, error) { Report: func(d analysis.Diagnostic) { diagnostic, err := toGobDiagnostic(posToLocation, analyzer, d) if err != nil { - bug.Reportf("internal error converting diagnostic from analyzer %q: %v", analyzer.Name, err) + if !hasFixedFiles { + bug.Reportf("internal error converting diagnostic from analyzer %q: %v", analyzer.Name, err) + } return } diagnostics = append(diagnostics, diagnostic) @@ -1366,7 +1394,7 @@ func (act *action) exec() (interface{}, *actionSummary, error) { return result, &actionSummary{ Diagnostics: diagnostics, Facts: factsdata, - FactsHash: source.HashOf(factsdata), + FactsHash: file.HashOf(factsdata), }, nil } @@ -1416,7 +1444,7 @@ func requiredAnalyzers(analyzers []*analysis.Analyzer) []*analysis.Analyzer { var analyzeSummaryCodec = frob.CodecFor[*analyzeSummary]() -// -- data types for serialization of analysis.Diagnostic and source.Diagnostic -- +// -- data types for serialization of analysis.Diagnostic and golang.Diagnostic -- // (The name says gob but we use frob.) var diagnosticsCodec = frob.CodecFor[[]gobDiagnostic]() diff --git a/gopls/internal/cache/cache.go b/gopls/internal/cache/cache.go new file mode 100644 index 00000000000..a6a166aab58 --- /dev/null +++ b/gopls/internal/cache/cache.go @@ -0,0 +1,76 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "reflect" + "strconv" + "sync/atomic" + + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/memoize" +) + +// New Creates a new cache for gopls operation results, using the given file +// set, shared store, and session options. +// +// Both the fset and store may be nil, but if store is non-nil so must be fset +// (and they must always be used together), otherwise it may be possible to get +// cached data referencing token.Pos values not mapped by the FileSet. +func New(store *memoize.Store) *Cache { + index := atomic.AddInt64(&cacheIndex, 1) + + if store == nil { + store = &memoize.Store{} + } + + c := &Cache{ + id: strconv.FormatInt(index, 10), + store: store, + memoizedFS: newMemoizedFS(), + modCache: &sharedModCache{ + caches: make(map[string]*imports.DirInfoCache), + timers: make(map[string]*refreshTimer), + }, + } + return c +} + +// A Cache holds content that is shared across multiple gopls sessions. +type Cache struct { + id string + + // store holds cached calculations. + // + // TODO(rfindley): at this point, these are not important, as we've moved our + // content-addressable cache to the file system (the filecache package). It + // is unlikely that this shared cache provides any shared value. We should + // consider removing it, replacing current uses with a simpler futures cache, + // as we've done for e.g. type-checked packages. + store *memoize.Store + + // memoizedFS holds a shared file.Source that caches reads. + // + // Reads are invalidated when *any* session gets a didChangeWatchedFile + // notification. This is fine: it is the responsibility of memoizedFS to hold + // our best knowledge of the current file system state. + *memoizedFS + + // modCache holds the + modCache *sharedModCache +} + +var cacheIndex, sessionIndex, viewIndex int64 + +func (c *Cache) ID() string { return c.id } +func (c *Cache) MemStats() map[reflect.Type]int { return c.store.Stats() } + +// FileStats returns information about the set of files stored in the cache. +// It is intended for debugging only. +func (c *Cache) FileStats() (stats command.FileStats) { + stats.Total, stats.Largest, stats.Errs = c.fileStats() + return +} diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go new file mode 100644 index 00000000000..6bbdf2e2541 --- /dev/null +++ b/gopls/internal/cache/check.go @@ -0,0 +1,1991 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "crypto/sha256" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "go/types" + "regexp" + "runtime" + "sort" + "strings" + "sync" + "sync/atomic" + + "golang.org/x/mod/module" + "golang.org/x/sync/errgroup" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/cache/typerefs" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/filecache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" + "golang.org/x/tools/internal/gcimporter" + "golang.org/x/tools/internal/packagesinternal" + "golang.org/x/tools/internal/tokeninternal" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/versions" +) + +// Various optimizations that should not affect correctness. +const ( + preserveImportGraph = true // hold on to the import graph for open packages +) + +type unit = struct{} + +// A typeCheckBatch holds data for a logical type-checking operation, which may +// type-check many unrelated packages. +// +// It shares state such as parsed files and imports, to optimize type-checking +// for packages with overlapping dependency graphs. +type typeCheckBatch struct { + activePackageCache interface { + getActivePackage(id PackageID) *Package + setActivePackage(id PackageID, pkg *Package) + } + syntaxIndex map[PackageID]int // requested ID -> index in ids + pre preTypeCheck + post postTypeCheck + handles map[PackageID]*packageHandle + parseCache *parseCache + fset *token.FileSet // describes all parsed or imported files + cpulimit chan unit // concurrency limiter for CPU-bound operations + + mu sync.Mutex + syntaxPackages map[PackageID]*futurePackage // results of processing a requested package; may hold (nil, nil) + importPackages map[PackageID]*futurePackage // package results to use for importing +} + +// A futurePackage is a future result of type checking or importing a package, +// to be cached in a map. +// +// The goroutine that creates the futurePackage is responsible for evaluating +// its value, and closing the done channel. +type futurePackage struct { + done chan unit + v pkgOrErr +} + +type pkgOrErr struct { + pkg *types.Package + err error +} + +// TypeCheck parses and type-checks the specified packages, +// and returns them in the same order as the ids. +// The resulting packages' types may belong to different importers, +// so types from different packages are incommensurable. +// +// The resulting packages slice always contains len(ids) entries, though some +// of them may be nil if (and only if) the resulting error is non-nil. +// +// An error is returned if any of the requested packages fail to type-check. +// This is different from having type-checking errors: a failure to type-check +// indicates context cancellation or otherwise significant failure to perform +// the type-checking operation. +// +// In general, clients should never need to type-checked syntax for an +// intermediate test variant (ITV) package. Callers should apply +// RemoveIntermediateTestVariants (or equivalent) before this method, or any +// of the potentially type-checking methods below. +func (s *Snapshot) TypeCheck(ctx context.Context, ids ...PackageID) ([]*Package, error) { + pkgs := make([]*Package, len(ids)) + + var ( + needIDs []PackageID // ids to type-check + indexes []int // original index of requested ids + ) + + // Check for existing active packages, as any package will do. + // + // This is also done inside forEachPackage, but doing it here avoids + // unnecessary set up for type checking (e.g. assembling the package handle + // graph). + for i, id := range ids { + if pkg := s.getActivePackage(id); pkg != nil { + pkgs[i] = pkg + } else { + needIDs = append(needIDs, id) + indexes = append(indexes, i) + } + } + + post := func(i int, pkg *Package) { + pkgs[indexes[i]] = pkg + } + return pkgs, s.forEachPackage(ctx, needIDs, nil, post) +} + +// getImportGraph returns a shared import graph use for this snapshot, or nil. +// +// This is purely an optimization: holding on to more imports allows trading +// memory for CPU and latency. Currently, getImportGraph returns an import +// graph containing all packages imported by open packages, since these are +// highly likely to be needed when packages change. +// +// Furthermore, since we memoize active packages, including their imports in +// the shared import graph means we don't run the risk of pinning duplicate +// copies of common imports, if active packages are computed in separate type +// checking batches. +func (s *Snapshot) getImportGraph(ctx context.Context) *importGraph { + if !preserveImportGraph { + return nil + } + s.mu.Lock() + + // Evaluate the shared import graph for the snapshot. There are three major + // codepaths here: + // + // 1. importGraphDone == nil, importGraph == nil: it is this goroutine's + // responsibility to type-check the shared import graph. + // 2. importGraphDone == nil, importGraph != nil: it is this goroutine's + // responsibility to resolve the import graph, which may result in + // type-checking only if the existing importGraph (carried over from the + // preceding snapshot) is invalid. + // 3. importGraphDone != nil: some other goroutine is doing (1) or (2), wait + // for the work to be done. + done := s.importGraphDone + if done == nil { + done = make(chan unit) + s.importGraphDone = done + release := s.Acquire() // must acquire to use the snapshot asynchronously + go func() { + defer release() + importGraph, err := s.resolveImportGraph() // may be nil + if err != nil { + if ctx.Err() == nil { + event.Error(ctx, "computing the shared import graph", err) + } + importGraph = nil + } + s.mu.Lock() + s.importGraph = importGraph + s.mu.Unlock() + close(done) + }() + } + s.mu.Unlock() + + select { + case <-done: + return s.importGraph + case <-ctx.Done(): + return nil + } +} + +// resolveImportGraph evaluates the shared import graph to use for +// type-checking in this snapshot. This may involve re-using the import graph +// of the previous snapshot (stored in s.importGraph), or computing a fresh +// import graph. +// +// resolveImportGraph should only be called from getImportGraph. +func (s *Snapshot) resolveImportGraph() (*importGraph, error) { + ctx := s.backgroundCtx + ctx, done := event.Start(event.Detach(ctx), "cache.resolveImportGraph") + defer done() + + s.mu.Lock() + lastImportGraph := s.importGraph + s.mu.Unlock() + + openPackages := make(map[PackageID]bool) + for _, fh := range s.Overlays() { + // golang/go#66145: don't call MetadataForFile here. This function, which + // builds a shared import graph, is an optimization. We don't want it to + // have the side effect of triggering a load. + // + // In the past, a call to MetadataForFile here caused a bunch of + // unnecessary loads in multi-root workspaces (and as a result, spurious + // diagnostics). + g := s.MetadataGraph() + var mps []*metadata.Package + for _, id := range g.IDs[fh.URI()] { + mps = append(mps, g.Packages[id]) + } + metadata.RemoveIntermediateTestVariants(&mps) + for _, mp := range mps { + openPackages[mp.ID] = true + } + } + + var openPackageIDs []PackageID + for id := range openPackages { + openPackageIDs = append(openPackageIDs, id) + } + + handles, err := s.getPackageHandles(ctx, openPackageIDs) + if err != nil { + return nil, err + } + + // Subtlety: we erase the upward cone of open packages from the shared import + // graph, to increase reusability. + // + // This is easiest to understand via an example: suppose A imports B, and B + // imports C. Now suppose A and B are open. If we preserve the entire set of + // shared deps by open packages, deps will be {B, C}. But this means that any + // change to the open package B will invalidate the shared import graph, + // meaning we will experience no benefit from sharing when B is edited. + // Consider that this will be a common scenario, when A is foo_test and B is + // foo. Better to just preserve the shared import C. + // + // With precise pruning, we may want to truncate this search based on + // reachability. + // + // TODO(rfindley): this logic could use a unit test. + volatileDeps := make(map[PackageID]bool) + var isVolatile func(*packageHandle) bool + isVolatile = func(ph *packageHandle) (volatile bool) { + if v, ok := volatileDeps[ph.mp.ID]; ok { + return v + } + defer func() { + volatileDeps[ph.mp.ID] = volatile + }() + if openPackages[ph.mp.ID] { + return true + } + for _, dep := range ph.mp.DepsByPkgPath { + if isVolatile(handles[dep]) { + return true + } + } + return false + } + for _, dep := range handles { + isVolatile(dep) + } + for id, volatile := range volatileDeps { + if volatile { + delete(handles, id) + } + } + + // We reuse the last import graph if and only if none of the dependencies + // have changed. Doing better would involve analyzing dependencies to find + // subgraphs that are still valid. Not worth it, especially when in the + // common case nothing has changed. + unchanged := lastImportGraph != nil && len(handles) == len(lastImportGraph.depKeys) + var ids []PackageID + depKeys := make(map[PackageID]file.Hash) + for id, ph := range handles { + ids = append(ids, id) + depKeys[id] = ph.key + if unchanged { + prevKey, ok := lastImportGraph.depKeys[id] + unchanged = ok && prevKey == ph.key + } + } + + if unchanged { + return lastImportGraph, nil + } + + b, err := s.forEachPackageInternal(ctx, nil, ids, nil, nil, nil, handles) + if err != nil { + return nil, err + } + + next := &importGraph{ + fset: b.fset, + depKeys: depKeys, + imports: make(map[PackageID]pkgOrErr), + } + for id, fut := range b.importPackages { + if fut.v.pkg == nil && fut.v.err == nil { + panic(fmt.Sprintf("internal error: import node %s is not evaluated", id)) + } + next.imports[id] = fut.v + } + return next, nil +} + +// An importGraph holds selected results of a type-checking pass, to be re-used +// by subsequent snapshots. +type importGraph struct { + fset *token.FileSet // fileset used for type checking imports + depKeys map[PackageID]file.Hash // hash of direct dependencies for this graph + imports map[PackageID]pkgOrErr // results of type checking +} + +// Package visiting functions used by forEachPackage; see the documentation of +// forEachPackage for details. +type ( + preTypeCheck = func(int, *packageHandle) bool // false => don't type check + postTypeCheck = func(int, *Package) +) + +// forEachPackage does a pre- and post- order traversal of the packages +// specified by ids using the provided pre and post functions. +// +// The pre func is optional. If set, pre is evaluated after the package +// handle has been constructed, but before type-checking. If pre returns false, +// type-checking is skipped for this package handle. +// +// post is called with a syntax package after type-checking completes +// successfully. It is only called if pre returned true. +// +// Both pre and post may be called concurrently. +func (s *Snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preTypeCheck, post postTypeCheck) error { + ctx, done := event.Start(ctx, "cache.forEachPackage", tag.PackageCount.Of(len(ids))) + defer done() + + if len(ids) == 0 { + return nil // short cut: many call sites do not handle empty ids + } + + handles, err := s.getPackageHandles(ctx, ids) + if err != nil { + return err + } + + impGraph := s.getImportGraph(ctx) + _, err = s.forEachPackageInternal(ctx, impGraph, nil, ids, pre, post, handles) + return err +} + +// forEachPackageInternal is used by both forEachPackage and loadImportGraph to +// type-check a graph of packages. +// +// If a non-nil importGraph is provided, imports in this graph will be reused. +func (s *Snapshot) forEachPackageInternal(ctx context.Context, importGraph *importGraph, importIDs, syntaxIDs []PackageID, pre preTypeCheck, post postTypeCheck, handles map[PackageID]*packageHandle) (*typeCheckBatch, error) { + b := &typeCheckBatch{ + activePackageCache: s, + pre: pre, + post: post, + handles: handles, + parseCache: s.view.parseCache, + fset: fileSetWithBase(reservedForParsing), + syntaxIndex: make(map[PackageID]int), + cpulimit: make(chan unit, runtime.GOMAXPROCS(0)), + syntaxPackages: make(map[PackageID]*futurePackage), + importPackages: make(map[PackageID]*futurePackage), + } + + if importGraph != nil { + // Clone the file set every time, to ensure we do not leak files. + b.fset = tokeninternal.CloneFileSet(importGraph.fset) + // Pre-populate future cache with 'done' futures. + done := make(chan unit) + close(done) + for id, res := range importGraph.imports { + b.importPackages[id] = &futurePackage{done, res} + } + } else { + b.fset = fileSetWithBase(reservedForParsing) + } + + for i, id := range syntaxIDs { + b.syntaxIndex[id] = i + } + + // Start a single goroutine for each requested package. + // + // Other packages are reached recursively, and will not be evaluated if they + // are not needed. + var g errgroup.Group + for _, id := range importIDs { + id := id + g.Go(func() error { + _, err := b.getImportPackage(ctx, id) + return err + }) + } + for i, id := range syntaxIDs { + i := i + id := id + g.Go(func() error { + _, err := b.handleSyntaxPackage(ctx, i, id) + return err + }) + } + return b, g.Wait() +} + +// TODO(rfindley): re-order the declarations below to read better from top-to-bottom. + +// getImportPackage returns the *types.Package to use for importing the +// package referenced by id. +// +// This may be the package produced by type-checking syntax (as in the case +// where id is in the set of requested IDs), a package loaded from export data, +// or a package type-checked for import only. +func (b *typeCheckBatch) getImportPackage(ctx context.Context, id PackageID) (pkg *types.Package, err error) { + b.mu.Lock() + f, ok := b.importPackages[id] + if ok { + b.mu.Unlock() + + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-f.done: + return f.v.pkg, f.v.err + } + } + + f = &futurePackage{done: make(chan unit)} + b.importPackages[id] = f + b.mu.Unlock() + + defer func() { + f.v = pkgOrErr{pkg, err} + close(f.done) + }() + + if index, ok := b.syntaxIndex[id]; ok { + pkg, err := b.handleSyntaxPackage(ctx, index, id) + if err != nil { + return nil, err + } + if pkg != nil { + return pkg, nil + } + // type-checking was short-circuited by the pre- func. + } + + // unsafe cannot be imported or type-checked. + if id == "unsafe" { + return types.Unsafe, nil + } + + ph := b.handles[id] + + // Do a second check for "unsafe" defensively, due to golang/go#60890. + if ph.mp.PkgPath == "unsafe" { + bug.Reportf("encountered \"unsafe\" as %s (golang/go#60890)", id) + return types.Unsafe, nil + } + + data, err := filecache.Get(exportDataKind, ph.key) + if err == filecache.ErrNotFound { + // No cached export data: type-check as fast as possible. + return b.checkPackageForImport(ctx, ph) + } + if err != nil { + return nil, fmt.Errorf("failed to read cache data for %s: %v", ph.mp.ID, err) + } + return b.importPackage(ctx, ph.mp, data) +} + +// handleSyntaxPackage handles one package from the ids slice. +// +// If type checking occurred while handling the package, it returns the +// resulting types.Package so that it may be used for importing. +// +// handleSyntaxPackage returns (nil, nil) if pre returned false. +func (b *typeCheckBatch) handleSyntaxPackage(ctx context.Context, i int, id PackageID) (pkg *types.Package, err error) { + b.mu.Lock() + f, ok := b.syntaxPackages[id] + if ok { + b.mu.Unlock() + <-f.done + return f.v.pkg, f.v.err + } + + f = &futurePackage{done: make(chan unit)} + b.syntaxPackages[id] = f + b.mu.Unlock() + defer func() { + f.v = pkgOrErr{pkg, err} + close(f.done) + }() + + ph := b.handles[id] + if b.pre != nil && !b.pre(i, ph) { + return nil, nil // skip: export data only + } + + // Check for existing active packages. + // + // Since gopls can't depend on package identity, any instance of the + // requested package must be ok to return. + // + // This is an optimization to avoid redundant type-checking: following + // changes to an open package many LSP clients send several successive + // requests for package information for the modified package (semantic + // tokens, code lens, inlay hints, etc.) + if pkg := b.activePackageCache.getActivePackage(id); pkg != nil { + b.post(i, pkg) + return nil, nil // skip: not checked in this batch + } + + // Wait for predecessors. + { + var g errgroup.Group + for _, depID := range ph.mp.DepsByPkgPath { + depID := depID + g.Go(func() error { + _, err := b.getImportPackage(ctx, depID) + return err + }) + } + if err := g.Wait(); err != nil { + // Failure to import a package should not abort the whole operation. + // Stop only if the context was cancelled, a likely cause. + // Import errors will be reported as type diagnostics. + if ctx.Err() != nil { + return nil, ctx.Err() + } + } + } + + // Wait to acquire a CPU token. + // + // Note: it is important to acquire this token only after awaiting + // predecessors, to avoid starvation. + select { + case <-ctx.Done(): + return nil, ctx.Err() + case b.cpulimit <- unit{}: + defer func() { + <-b.cpulimit // release CPU token + }() + } + + // Compute the syntax package. + p, err := b.checkPackage(ctx, ph) + if err != nil { + return nil, err + } + + // Update caches. + b.activePackageCache.setActivePackage(id, p) // store active packages in memory + go storePackageResults(ctx, ph, p) // ...and write all packages to disk + + b.post(i, p) + + return p.pkg.types, nil +} + +// storePackageResults serializes and writes information derived from p to the +// file cache. +// The context is used only for logging; cancellation does not affect the operation. +func storePackageResults(ctx context.Context, ph *packageHandle, p *Package) { + toCache := map[string][]byte{ + xrefsKind: p.pkg.xrefs(), + methodSetsKind: p.pkg.methodsets().Encode(), + diagnosticsKind: encodeDiagnostics(p.pkg.diagnostics), + } + + if p.metadata.PkgPath != "unsafe" { // unsafe cannot be exported + exportData, err := gcimporter.IExportShallow(p.pkg.fset, p.pkg.types, bug.Reportf) + if err != nil { + bug.Reportf("exporting package %v: %v", p.metadata.ID, err) + } else { + toCache[exportDataKind] = exportData + } + } else if p.metadata.ID != "unsafe" { + // golang/go#60890: we should only ever see one variant of the "unsafe" + // package. + bug.Reportf("encountered \"unsafe\" as %s (golang/go#60890)", p.metadata.ID) + } + + for kind, data := range toCache { + if err := filecache.Set(kind, ph.key, data); err != nil { + event.Error(ctx, fmt.Sprintf("storing %s data for %s", kind, ph.mp.ID), err) + } + } +} + +// importPackage loads the given package from its export data in p.exportData +// (which must already be populated). +func (b *typeCheckBatch) importPackage(ctx context.Context, mp *metadata.Package, data []byte) (*types.Package, error) { + ctx, done := event.Start(ctx, "cache.typeCheckBatch.importPackage", tag.Package.Of(string(mp.ID))) + defer done() + + impMap := b.importMap(mp.ID) + + thisPackage := types.NewPackage(string(mp.PkgPath), string(mp.Name)) + getPackages := func(items []gcimporter.GetPackagesItem) error { + for i, item := range items { + var id PackageID + var pkg *types.Package + if item.Path == string(mp.PkgPath) { + id = mp.ID + pkg = thisPackage + + // debugging issues #60904, #64235 + if pkg.Name() != item.Name { + // This would mean that mp.Name != item.Name, so the + // manifest in the export data of mp.PkgPath is + // inconsistent with mp.Name. Or perhaps there + // are duplicate PkgPath items in the manifest? + return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", + pkg.Name(), item.Name, id, item.Path) + } + } else { + id = impMap[item.Path] + var err error + pkg, err = b.getImportPackage(ctx, id) + if err != nil { + return err + } + + // We intentionally duplicate the bug.Errorf calls because + // telemetry tells us only the program counter, not the message. + + // debugging issues #60904, #64235 + if pkg.Name() != item.Name { + // This means that, while reading the manifest of the + // export data of mp.PkgPath, one of its indirect + // dependencies had a name that differs from the + // Metadata.Name + return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", + pkg.Name(), item.Name, id, item.Path) + } + } + items[i].Pkg = pkg + + } + return nil + } + + // Importing is potentially expensive, and might not encounter cancellations + // via dependencies (e.g. if they have already been evaluated). + if ctx.Err() != nil { + return nil, ctx.Err() + } + + imported, err := gcimporter.IImportShallow(b.fset, getPackages, data, string(mp.PkgPath), bug.Reportf) + if err != nil { + return nil, fmt.Errorf("import failed for %q: %v", mp.ID, err) + } + return imported, nil +} + +// checkPackageForImport type checks, but skips function bodies and does not +// record syntax information. +func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageHandle) (*types.Package, error) { + ctx, done := event.Start(ctx, "cache.typeCheckBatch.checkPackageForImport", tag.Package.Of(string(ph.mp.ID))) + defer done() + + onError := func(e error) { + // Ignore errors for exporting. + } + cfg := b.typesConfig(ctx, ph.localInputs, onError) + cfg.IgnoreFuncBodies = true + + // Parse the compiled go files, bypassing the parse cache as packages checked + // for import are unlikely to get cache hits. Additionally, we can optimize + // parsing slightly by not passing parser.ParseComments. + pgfs := make([]*parsego.File, len(ph.localInputs.compiledGoFiles)) + { + var group errgroup.Group + // Set an arbitrary concurrency limit; we want some parallelism but don't + // need GOMAXPROCS, as there is already a lot of concurrency among calls to + // checkPackageForImport. + // + // TODO(rfindley): is there a better way to limit parallelism here? We could + // have a global limit on the type-check batch, but would have to be very + // careful to avoid starvation. + group.SetLimit(4) + for i, fh := range ph.localInputs.compiledGoFiles { + i, fh := i, fh + group.Go(func() error { + pgf, err := parseGoImpl(ctx, b.fset, fh, parser.SkipObjectResolution, false) + pgfs[i] = pgf + return err + }) + } + if err := group.Wait(); err != nil { + return nil, err // cancelled, or catastrophic error (e.g. missing file) + } + } + pkg := types.NewPackage(string(ph.localInputs.pkgPath), string(ph.localInputs.name)) + check := types.NewChecker(cfg, b.fset, pkg, nil) + + files := make([]*ast.File, len(pgfs)) + for i, pgf := range pgfs { + files[i] = pgf.File + } + + // Type checking is expensive, and we may not have encountered cancellations + // via parsing (e.g. if we got nothing but cache hits for parsed files). + if ctx.Err() != nil { + return nil, ctx.Err() + } + + _ = check.Files(files) // ignore errors + + // If the context was cancelled, we may have returned a ton of transient + // errors to the type checker. Swallow them. + if ctx.Err() != nil { + return nil, ctx.Err() + } + + // Asynchronously record export data. + go func() { + exportData, err := gcimporter.IExportShallow(b.fset, pkg, bug.Reportf) + if err != nil { + bug.Reportf("exporting package %v: %v", ph.mp.ID, err) + return + } + if err := filecache.Set(exportDataKind, ph.key, exportData); err != nil { + event.Error(ctx, fmt.Sprintf("storing export data for %s", ph.mp.ID), err) + } + }() + return pkg, nil +} + +// importMap returns the map of package path -> package ID relative to the +// specified ID. +func (b *typeCheckBatch) importMap(id PackageID) map[string]PackageID { + impMap := make(map[string]PackageID) + var populateDeps func(*metadata.Package) + populateDeps = func(parent *metadata.Package) { + for _, id := range parent.DepsByPkgPath { + mp := b.handles[id].mp + if prevID, ok := impMap[string(mp.PkgPath)]; ok { + // debugging #63822 + if prevID != mp.ID { + bug.Reportf("inconsistent view of dependencies") + } + continue + } + impMap[string(mp.PkgPath)] = mp.ID + populateDeps(mp) + } + } + mp := b.handles[id].mp + populateDeps(mp) + return impMap +} + +// A packageHandle holds inputs required to compute a Package, including +// metadata, derived diagnostics, files, and settings. Additionally, +// packageHandles manage a key for these inputs, to use in looking up +// precomputed results. +// +// packageHandles may be invalid following an invalidation via snapshot.clone, +// but the handles returned by getPackageHandles will always be valid. +// +// packageHandles are critical for implementing "precise pruning" in gopls: +// packageHandle.key is a hash of a precise set of inputs, such as package +// files and "reachable" syntax, that may affect type checking. +// +// packageHandles also keep track of state that allows gopls to compute, and +// then quickly recompute, these keys. This state is split into two categories: +// - local state, which depends only on the package's local files and metadata +// - other state, which includes data derived from dependencies. +// +// Dividing the data in this way allows gopls to minimize invalidation when a +// package is modified. For example, any change to a package file fully +// invalidates the package handle. On the other hand, if that change was not +// metadata-affecting it may be the case that packages indirectly depending on +// the modified package are unaffected by the change. For that reason, we have +// two types of invalidation, corresponding to the two types of data above: +// - deletion of the handle, which occurs when the package itself changes +// - clearing of the validated field, which marks the package as possibly +// invalid. +// +// With the second type of invalidation, packageHandles are re-evaluated from the +// bottom up. If this process encounters a packageHandle whose deps have not +// changed (as detected by the depkeys field), then the packageHandle in +// question must also not have changed, and we need not re-evaluate its key. +type packageHandle struct { + mp *metadata.Package + + // loadDiagnostics memoizes the result of processing error messages from + // go/packages (i.e. `go list`). + // + // These are derived from metadata using a snapshot. Since they depend on + // file contents (for translating positions), they should theoretically be + // invalidated by file changes, but historically haven't been. In practice + // they are rare and indicate a fundamental error that needs to be corrected + // before development can continue, so it may not be worth significant + // engineering effort to implement accurate invalidation here. + // + // TODO(rfindley): loadDiagnostics are out of place here, as they don't + // directly relate to type checking. We should perhaps move the caching of + // load diagnostics to an entirely separate component, so that Packages need + // only be concerned with parsing and type checking. + // (Nevertheless, since the lifetime of load diagnostics matches that of the + // Metadata, it is convenient to memoize them here.) + loadDiagnostics []*Diagnostic + + // Local data: + + // localInputs holds all local type-checking localInputs, excluding + // dependencies. + localInputs typeCheckInputs + // localKey is a hash of localInputs. + localKey file.Hash + // refs is the result of syntactic dependency analysis produced by the + // typerefs package. + refs map[string][]typerefs.Symbol + + // Data derived from dependencies: + + // validated indicates whether the current packageHandle is known to have a + // valid key. Invalidated package handles are stored for packages whose + // type information may have changed. + validated bool + // depKeys records the key of each dependency that was used to calculate the + // key above. If the handle becomes invalid, we must re-check that each still + // matches. + depKeys map[PackageID]file.Hash + // key is the hashed key for the package. + // + // It includes the all bits of the transitive closure of + // dependencies's sources. + key file.Hash +} + +// clone returns a copy of the receiver with the validated bit set to the +// provided value. +func (ph *packageHandle) clone(validated bool) *packageHandle { + copy := *ph + copy.validated = validated + return © +} + +// getPackageHandles gets package handles for all given ids and their +// dependencies, recursively. +func (s *Snapshot) getPackageHandles(ctx context.Context, ids []PackageID) (map[PackageID]*packageHandle, error) { + // perform a two-pass traversal. + // + // On the first pass, build up a bidirectional graph of handle nodes, and collect leaves. + // Then build package handles from bottom up. + + s.mu.Lock() // guard s.meta and s.packages below + b := &packageHandleBuilder{ + s: s, + transitiveRefs: make(map[typerefs.IndexID]*partialRefs), + nodes: make(map[typerefs.IndexID]*handleNode), + } + + var leaves []*handleNode + var makeNode func(*handleNode, PackageID) *handleNode + makeNode = func(from *handleNode, id PackageID) *handleNode { + idxID := b.s.pkgIndex.IndexID(id) + n, ok := b.nodes[idxID] + if !ok { + mp := s.meta.Packages[id] + if mp == nil { + panic(fmt.Sprintf("nil metadata for %q", id)) + } + n = &handleNode{ + mp: mp, + idxID: idxID, + unfinishedSuccs: int32(len(mp.DepsByPkgPath)), + } + if entry, hit := b.s.packages.Get(mp.ID); hit { + n.ph = entry + } + if n.unfinishedSuccs == 0 { + leaves = append(leaves, n) + } else { + n.succs = make(map[PackageID]*handleNode, n.unfinishedSuccs) + } + b.nodes[idxID] = n + for _, depID := range mp.DepsByPkgPath { + n.succs[depID] = makeNode(n, depID) + } + } + // Add edge from predecessor. + if from != nil { + n.preds = append(n.preds, from) + } + return n + } + for _, id := range ids { + makeNode(nil, id) + } + s.mu.Unlock() + + g, ctx := errgroup.WithContext(ctx) + + // files are preloaded, so building package handles is CPU-bound. + // + // Note that we can't use g.SetLimit, as that could result in starvation: + // g.Go blocks until a slot is available, and so all existing goroutines + // could be blocked trying to enqueue a predecessor. + limiter := make(chan unit, runtime.GOMAXPROCS(0)) + + var enqueue func(*handleNode) + enqueue = func(n *handleNode) { + g.Go(func() error { + limiter <- unit{} + defer func() { <-limiter }() + + if ctx.Err() != nil { + return ctx.Err() + } + + b.buildPackageHandle(ctx, n) + + for _, pred := range n.preds { + if atomic.AddInt32(&pred.unfinishedSuccs, -1) == 0 { + enqueue(pred) + } + } + + return n.err + }) + } + for _, leaf := range leaves { + enqueue(leaf) + } + + if err := g.Wait(); err != nil { + return nil, err + } + + // Copy handles into the result map. + handles := make(map[PackageID]*packageHandle, len(b.nodes)) + for _, v := range b.nodes { + assert(v.ph != nil, "nil handle") + handles[v.mp.ID] = v.ph + } + + return handles, nil +} + +// A packageHandleBuilder computes a batch of packageHandles concurrently, +// sharing computed transitive reachability sets used to compute package keys. +type packageHandleBuilder struct { + s *Snapshot + + // nodes are assembled synchronously. + nodes map[typerefs.IndexID]*handleNode + + // transitiveRefs is incrementally evaluated as package handles are built. + transitiveRefsMu sync.Mutex + transitiveRefs map[typerefs.IndexID]*partialRefs // see getTransitiveRefs +} + +// A handleNode represents a to-be-computed packageHandle within a graph of +// predecessors and successors. +// +// It is used to implement a bottom-up construction of packageHandles. +type handleNode struct { + mp *metadata.Package + idxID typerefs.IndexID + ph *packageHandle + err error + preds []*handleNode + succs map[PackageID]*handleNode + unfinishedSuccs int32 +} + +// partialRefs maps names declared by a given package to their set of +// transitive references. +// +// If complete is set, refs is known to be complete for the package in +// question. Otherwise, it may only map a subset of all names declared by the +// package. +type partialRefs struct { + refs map[string]*typerefs.PackageSet + complete bool +} + +// getTransitiveRefs gets or computes the set of transitively reachable +// packages for each exported name in the package specified by id. +// +// The operation may fail if building a predecessor failed. If and only if this +// occurs, the result will be nil. +func (b *packageHandleBuilder) getTransitiveRefs(pkgID PackageID) map[string]*typerefs.PackageSet { + b.transitiveRefsMu.Lock() + defer b.transitiveRefsMu.Unlock() + + idxID := b.s.pkgIndex.IndexID(pkgID) + trefs, ok := b.transitiveRefs[idxID] + if !ok { + trefs = &partialRefs{ + refs: make(map[string]*typerefs.PackageSet), + } + b.transitiveRefs[idxID] = trefs + } + + if !trefs.complete { + trefs.complete = true + ph := b.nodes[idxID].ph + for name := range ph.refs { + if ('A' <= name[0] && name[0] <= 'Z') || token.IsExported(name) { + if _, ok := trefs.refs[name]; !ok { + pkgs := b.s.pkgIndex.NewSet() + for _, sym := range ph.refs[name] { + pkgs.Add(sym.Package) + otherSet := b.getOneTransitiveRefLocked(sym) + pkgs.Union(otherSet) + } + trefs.refs[name] = pkgs + } + } + } + } + + return trefs.refs +} + +// getOneTransitiveRefLocked computes the full set packages transitively +// reachable through the given sym reference. +// +// It may return nil if the reference is invalid (i.e. the referenced name does +// not exist). +func (b *packageHandleBuilder) getOneTransitiveRefLocked(sym typerefs.Symbol) *typerefs.PackageSet { + assert(token.IsExported(sym.Name), "expected exported symbol") + + trefs := b.transitiveRefs[sym.Package] + if trefs == nil { + trefs = &partialRefs{ + refs: make(map[string]*typerefs.PackageSet), + complete: false, + } + b.transitiveRefs[sym.Package] = trefs + } + + pkgs, ok := trefs.refs[sym.Name] + if ok && pkgs == nil { + // See below, where refs is set to nil before recursing. + bug.Reportf("cycle detected to %q in reference graph", sym.Name) + } + + // Note that if (!ok && trefs.complete), the name does not exist in the + // referenced package, and we should not write to trefs as that may introduce + // a race. + if !ok && !trefs.complete { + n := b.nodes[sym.Package] + if n == nil { + // We should always have IndexID in our node set, because symbol references + // should only be recorded for packages that actually exist in the import graph. + // + // However, it is not easy to prove this (typerefs are serialized and + // deserialized), so make this code temporarily defensive while we are on a + // point release. + // + // TODO(rfindley): in the future, we should turn this into an assertion. + bug.Reportf("missing reference to package %s", b.s.pkgIndex.PackageID(sym.Package)) + return nil + } + + // Break cycles. This is perhaps overly defensive as cycles should not + // exist at this point: metadata cycles should have been broken at load + // time, and intra-package reference cycles should have been contracted by + // the typerefs algorithm. + // + // See the "cycle detected" bug report above. + trefs.refs[sym.Name] = nil + + pkgs := b.s.pkgIndex.NewSet() + for _, sym2 := range n.ph.refs[sym.Name] { + pkgs.Add(sym2.Package) + otherSet := b.getOneTransitiveRefLocked(sym2) + pkgs.Union(otherSet) + } + trefs.refs[sym.Name] = pkgs + } + + return pkgs +} + +// buildPackageHandle gets or builds a package handle for the given id, storing +// its result in the snapshot.packages map. +// +// buildPackageHandle must only be called from getPackageHandles. +func (b *packageHandleBuilder) buildPackageHandle(ctx context.Context, n *handleNode) { + var prevPH *packageHandle + if n.ph != nil { + // Existing package handle: if it is valid, return it. Otherwise, create a + // copy to update. + if n.ph.validated { + return + } + prevPH = n.ph + // Either prevPH is still valid, or we will update the key and depKeys of + // this copy. In either case, the result will be valid. + n.ph = prevPH.clone(true) + } else { + // No package handle: read and analyze the package syntax. + inputs, err := b.s.typeCheckInputs(ctx, n.mp) + if err != nil { + n.err = err + return + } + refs, err := b.s.typerefs(ctx, n.mp, inputs.compiledGoFiles) + if err != nil { + n.err = err + return + } + n.ph = &packageHandle{ + mp: n.mp, + loadDiagnostics: computeLoadDiagnostics(ctx, b.s, n.mp), + localInputs: inputs, + localKey: localPackageKey(inputs), + refs: refs, + validated: true, + } + } + + // ph either did not exist, or was invalid. We must re-evaluate deps and key. + if err := b.evaluatePackageHandle(prevPH, n); err != nil { + n.err = err + return + } + + assert(n.ph.validated, "unvalidated handle") + + // Ensure the result (or an equivalent) is recorded in the snapshot. + b.s.mu.Lock() + defer b.s.mu.Unlock() + + // Check that the metadata has not changed + // (which should invalidate this handle). + // + // TODO(rfindley): eventually promote this to an assert. + // TODO(rfindley): move this to after building the package handle graph? + if b.s.meta.Packages[n.mp.ID] != n.mp { + bug.Reportf("stale metadata for %s", n.mp.ID) + } + + // Check the packages map again in case another goroutine got there first. + if alt, ok := b.s.packages.Get(n.mp.ID); ok && alt.validated { + if alt.mp != n.mp { + bug.Reportf("existing package handle does not match for %s", n.mp.ID) + } + n.ph = alt + } else { + b.s.packages.Set(n.mp.ID, n.ph, nil) + } +} + +// evaluatePackageHandle validates and/or computes the key of ph, setting key, +// depKeys, and the validated flag on ph. +// +// It uses prevPH to avoid recomputing keys that can't have changed, since +// their depKeys did not change. +// +// See the documentation for packageHandle for more details about packageHandle +// state, and see the documentation for the typerefs package for more details +// about precise reachability analysis. +func (b *packageHandleBuilder) evaluatePackageHandle(prevPH *packageHandle, n *handleNode) error { + // Opt: if no dep keys have changed, we need not re-evaluate the key. + if prevPH != nil { + depsChanged := false + assert(len(prevPH.depKeys) == len(n.succs), "mismatching dep count") + for id, succ := range n.succs { + oldKey, ok := prevPH.depKeys[id] + assert(ok, "missing dep") + if oldKey != succ.ph.key { + depsChanged = true + break + } + } + if !depsChanged { + return nil // key cannot have changed + } + } + + // Deps have changed, so we must re-evaluate the key. + n.ph.depKeys = make(map[PackageID]file.Hash) + + // See the typerefs package: the reachable set of packages is defined to be + // the set of packages containing syntax that is reachable through the + // exported symbols in the dependencies of n.ph. + reachable := b.s.pkgIndex.NewSet() + for depID, succ := range n.succs { + n.ph.depKeys[depID] = succ.ph.key + reachable.Add(succ.idxID) + trefs := b.getTransitiveRefs(succ.mp.ID) + if trefs == nil { + // A predecessor failed to build due to e.g. context cancellation. + return fmt.Errorf("missing transitive refs for %s", succ.mp.ID) + } + for _, set := range trefs { + reachable.Union(set) + } + } + + // Collect reachable handles. + var reachableHandles []*packageHandle + // In the presence of context cancellation, any package may be missing. + // We need all dependencies to produce a valid key. + missingReachablePackage := false + reachable.Elems(func(id typerefs.IndexID) { + dh := b.nodes[id] + if dh == nil { + missingReachablePackage = true + } else { + assert(dh.ph.validated, "unvalidated dependency") + reachableHandles = append(reachableHandles, dh.ph) + } + }) + if missingReachablePackage { + return fmt.Errorf("missing reachable package") + } + // Sort for stability. + sort.Slice(reachableHandles, func(i, j int) bool { + return reachableHandles[i].mp.ID < reachableHandles[j].mp.ID + }) + + // Key is the hash of the local key, and the local key of all reachable + // packages. + depHasher := sha256.New() + depHasher.Write(n.ph.localKey[:]) + for _, rph := range reachableHandles { + depHasher.Write(rph.localKey[:]) + } + depHasher.Sum(n.ph.key[:0]) + + return nil +} + +// typerefs returns typerefs for the package described by m and cgfs, after +// either computing it or loading it from the file cache. +func (s *Snapshot) typerefs(ctx context.Context, mp *metadata.Package, cgfs []file.Handle) (map[string][]typerefs.Symbol, error) { + imports := make(map[ImportPath]*metadata.Package) + for impPath, id := range mp.DepsByImpPath { + if id != "" { + imports[impPath] = s.Metadata(id) + } + } + + data, err := s.typerefData(ctx, mp.ID, imports, cgfs) + if err != nil { + return nil, err + } + classes := typerefs.Decode(s.pkgIndex, data) + refs := make(map[string][]typerefs.Symbol) + for _, class := range classes { + for _, decl := range class.Decls { + refs[decl] = class.Refs + } + } + return refs, nil +} + +// typerefData retrieves encoded typeref data from the filecache, or computes it on +// a cache miss. +func (s *Snapshot) typerefData(ctx context.Context, id PackageID, imports map[ImportPath]*metadata.Package, cgfs []file.Handle) ([]byte, error) { + key := typerefsKey(id, imports, cgfs) + if data, err := filecache.Get(typerefsKind, key); err == nil { + return data, nil + } else if err != filecache.ErrNotFound { + bug.Reportf("internal error reading typerefs data: %v", err) + } + + pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), parsego.Full&^parser.ParseComments, true, cgfs...) + if err != nil { + return nil, err + } + data := typerefs.Encode(pgfs, imports) + + // Store the resulting data in the cache. + go func() { + if err := filecache.Set(typerefsKind, key, data); err != nil { + event.Error(ctx, fmt.Sprintf("storing typerefs data for %s", id), err) + } + }() + + return data, nil +} + +// typerefsKey produces a key for the reference information produced by the +// typerefs package. +func typerefsKey(id PackageID, imports map[ImportPath]*metadata.Package, compiledGoFiles []file.Handle) file.Hash { + hasher := sha256.New() + + fmt.Fprintf(hasher, "typerefs: %s\n", id) + + importPaths := make([]string, 0, len(imports)) + for impPath := range imports { + importPaths = append(importPaths, string(impPath)) + } + sort.Strings(importPaths) + for _, importPath := range importPaths { + imp := imports[ImportPath(importPath)] + // TODO(rfindley): strength reduce the typerefs.Export API to guarantee + // that it only depends on these attributes of dependencies. + fmt.Fprintf(hasher, "import %s %s %s", importPath, imp.ID, imp.Name) + } + + fmt.Fprintf(hasher, "compiledGoFiles: %d\n", len(compiledGoFiles)) + for _, fh := range compiledGoFiles { + fmt.Fprintln(hasher, fh.Identity()) + } + + var hash [sha256.Size]byte + hasher.Sum(hash[:0]) + return hash +} + +// typeCheckInputs contains the inputs of a call to typeCheckImpl, which +// type-checks a package. +// +// Part of the purpose of this type is to keep type checking in-sync with the +// package handle key, by explicitly identifying the inputs to type checking. +type typeCheckInputs struct { + id PackageID + + // Used for type checking: + pkgPath PackagePath + name PackageName + goFiles, compiledGoFiles []file.Handle + sizes types.Sizes + depsByImpPath map[ImportPath]PackageID + goVersion string // packages.Module.GoVersion, e.g. "1.18" + + // Used for type check diagnostics: + // TODO(rfindley): consider storing less data in gobDiagnostics, and + // interpreting each diagnostic in the context of a fixed set of options. + // Then these fields need not be part of the type checking inputs. + relatedInformation bool + linkTarget string + moduleMode bool +} + +func (s *Snapshot) typeCheckInputs(ctx context.Context, mp *metadata.Package) (typeCheckInputs, error) { + // Read both lists of files of this package. + // + // Parallelism is not necessary here as the files will have already been + // pre-read at load time. + // + // goFiles aren't presented to the type checker--nor + // are they included in the key, unsoundly--but their + // syntax trees are available from (*pkg).File(URI). + // TODO(adonovan): consider parsing them on demand? + // The need should be rare. + goFiles, err := readFiles(ctx, s, mp.GoFiles) + if err != nil { + return typeCheckInputs{}, err + } + compiledGoFiles, err := readFiles(ctx, s, mp.CompiledGoFiles) + if err != nil { + return typeCheckInputs{}, err + } + + goVersion := "" + if mp.Module != nil && mp.Module.GoVersion != "" { + goVersion = mp.Module.GoVersion + } + + return typeCheckInputs{ + id: mp.ID, + pkgPath: mp.PkgPath, + name: mp.Name, + goFiles: goFiles, + compiledGoFiles: compiledGoFiles, + sizes: mp.TypesSizes, + depsByImpPath: mp.DepsByImpPath, + goVersion: goVersion, + + relatedInformation: s.Options().RelatedInformationSupported, + linkTarget: s.Options().LinkTarget, + moduleMode: s.view.moduleMode(), + }, nil +} + +// readFiles reads the content of each file URL from the source +// (e.g. snapshot or cache). +func readFiles(ctx context.Context, fs file.Source, uris []protocol.DocumentURI) (_ []file.Handle, err error) { + fhs := make([]file.Handle, len(uris)) + for i, uri := range uris { + fhs[i], err = fs.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + } + return fhs, nil +} + +// localPackageKey returns a key for local inputs into type-checking, excluding +// dependency information: files, metadata, and configuration. +func localPackageKey(inputs typeCheckInputs) file.Hash { + hasher := sha256.New() + + // In principle, a key must be the hash of an + // unambiguous encoding of all the relevant data. + // If it's ambiguous, we risk collisions. + + // package identifiers + fmt.Fprintf(hasher, "package: %s %s %s\n", inputs.id, inputs.name, inputs.pkgPath) + + // module Go version + fmt.Fprintf(hasher, "go %s\n", inputs.goVersion) + + // import map + importPaths := make([]string, 0, len(inputs.depsByImpPath)) + for impPath := range inputs.depsByImpPath { + importPaths = append(importPaths, string(impPath)) + } + sort.Strings(importPaths) + for _, impPath := range importPaths { + fmt.Fprintf(hasher, "import %s %s", impPath, string(inputs.depsByImpPath[ImportPath(impPath)])) + } + + // file names and contents + fmt.Fprintf(hasher, "compiledGoFiles: %d\n", len(inputs.compiledGoFiles)) + for _, fh := range inputs.compiledGoFiles { + fmt.Fprintln(hasher, fh.Identity()) + } + fmt.Fprintf(hasher, "goFiles: %d\n", len(inputs.goFiles)) + for _, fh := range inputs.goFiles { + fmt.Fprintln(hasher, fh.Identity()) + } + + // types sizes + wordSize := inputs.sizes.Sizeof(types.Typ[types.Int]) + maxAlign := inputs.sizes.Alignof(types.NewPointer(types.Typ[types.Int64])) + fmt.Fprintf(hasher, "sizes: %d %d\n", wordSize, maxAlign) + + fmt.Fprintf(hasher, "relatedInformation: %t\n", inputs.relatedInformation) + fmt.Fprintf(hasher, "linkTarget: %s\n", inputs.linkTarget) + fmt.Fprintf(hasher, "moduleMode: %t\n", inputs.moduleMode) + + var hash [sha256.Size]byte + hasher.Sum(hash[:0]) + return hash +} + +// checkPackage type checks the parsed source files in compiledGoFiles. +// (The resulting pkg also holds the parsed but not type-checked goFiles.) +// deps holds the future results of type-checking the direct dependencies. +func (b *typeCheckBatch) checkPackage(ctx context.Context, ph *packageHandle) (*Package, error) { + inputs := ph.localInputs + ctx, done := event.Start(ctx, "cache.typeCheckBatch.checkPackage", tag.Package.Of(string(inputs.id))) + defer done() + + pkg := &syntaxPackage{ + id: inputs.id, + fset: b.fset, // must match parse call below + types: types.NewPackage(string(inputs.pkgPath), string(inputs.name)), + typesSizes: inputs.sizes, + typesInfo: &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Scopes: make(map[ast.Node]*types.Scope), + }, + } + versions.InitFileVersions(pkg.typesInfo) + + // Collect parsed files from the type check pass, capturing parse errors from + // compiled files. + var err error + pkg.goFiles, err = b.parseCache.parseFiles(ctx, b.fset, parsego.Full, false, inputs.goFiles...) + if err != nil { + return nil, err + } + pkg.compiledGoFiles, err = b.parseCache.parseFiles(ctx, b.fset, parsego.Full, false, inputs.compiledGoFiles...) + if err != nil { + return nil, err + } + for _, pgf := range pkg.compiledGoFiles { + if pgf.ParseErr != nil { + pkg.parseErrors = append(pkg.parseErrors, pgf.ParseErr) + } + } + + // Use the default type information for the unsafe package. + if inputs.pkgPath == "unsafe" { + // Don't type check Unsafe: it's unnecessary, and doing so exposes a data + // race to Unsafe.completed. + pkg.types = types.Unsafe + } else { + + if len(pkg.compiledGoFiles) == 0 { + // No files most likely means go/packages failed. + // + // TODO(rfindley): in the past, we would capture go list errors in this + // case, to present go list errors to the user. However we had no tests for + // this behavior. It is unclear if anything better can be done here. + return nil, fmt.Errorf("no parsed files for package %s", inputs.pkgPath) + } + + onError := func(e error) { + pkg.typeErrors = append(pkg.typeErrors, e.(types.Error)) + } + cfg := b.typesConfig(ctx, inputs, onError) + check := types.NewChecker(cfg, pkg.fset, pkg.types, pkg.typesInfo) + + var files []*ast.File + for _, cgf := range pkg.compiledGoFiles { + files = append(files, cgf.File) + } + + // Type checking is expensive, and we may not have encountered cancellations + // via parsing (e.g. if we got nothing but cache hits for parsed files). + if ctx.Err() != nil { + return nil, ctx.Err() + } + + // Type checking errors are handled via the config, so ignore them here. + _ = check.Files(files) // 50us-15ms, depending on size of package + + // If the context was cancelled, we may have returned a ton of transient + // errors to the type checker. Swallow them. + if ctx.Err() != nil { + return nil, ctx.Err() + } + + // Collect imports by package path for the DependencyTypes API. + pkg.importMap = make(map[PackagePath]*types.Package) + var collectDeps func(*types.Package) + collectDeps = func(p *types.Package) { + pkgPath := PackagePath(p.Path()) + if _, ok := pkg.importMap[pkgPath]; ok { + return + } + pkg.importMap[pkgPath] = p + for _, imp := range p.Imports() { + collectDeps(imp) + } + } + collectDeps(pkg.types) + + // Work around golang/go#61561: interface instances aren't concurrency-safe + // as they are not completed by the type checker. + for _, inst := range pkg.typesInfo.Instances { + if iface, _ := inst.Type.Underlying().(*types.Interface); iface != nil { + iface.Complete() + } + } + } + + // Our heuristic for whether to show type checking errors is: + // + If there is a parse error _in the current file_, suppress type + // errors in that file. + // + Otherwise, show type errors even in the presence of parse errors in + // other package files. go/types attempts to suppress follow-on errors + // due to bad syntax, so on balance type checking errors still provide + // a decent signal/noise ratio as long as the file in question parses. + + // Track URIs with parse errors so that we can suppress type errors for these + // files. + unparseable := map[protocol.DocumentURI]bool{} + for _, e := range pkg.parseErrors { + diags, err := parseErrorDiagnostics(pkg, e) + if err != nil { + event.Error(ctx, "unable to compute positions for parse errors", err, tag.Package.Of(string(inputs.id))) + continue + } + for _, diag := range diags { + unparseable[diag.URI] = true + pkg.diagnostics = append(pkg.diagnostics, diag) + } + } + + diags := typeErrorsToDiagnostics(pkg, pkg.typeErrors, inputs.linkTarget, inputs.moduleMode, inputs.relatedInformation) + for _, diag := range diags { + // If the file didn't parse cleanly, it is highly likely that type + // checking errors will be confusing or redundant. But otherwise, type + // checking usually provides a good enough signal to include. + if !unparseable[diag.URI] { + pkg.diagnostics = append(pkg.diagnostics, diag) + } + } + + return &Package{ph.mp, ph.loadDiagnostics, pkg}, nil +} + +// e.g. "go1" or "go1.2" or "go1.2.3" +var goVersionRx = regexp.MustCompile(`^go[1-9][0-9]*(?:\.(0|[1-9][0-9]*)){0,2}$`) + +func (b *typeCheckBatch) typesConfig(ctx context.Context, inputs typeCheckInputs, onError func(e error)) *types.Config { + cfg := &types.Config{ + Sizes: inputs.sizes, + Error: onError, + Importer: importerFunc(func(path string) (*types.Package, error) { + // While all of the import errors could be reported + // based on the metadata before we start type checking, + // reporting them via types.Importer places the errors + // at the correct source location. + id, ok := inputs.depsByImpPath[ImportPath(path)] + if !ok { + // If the import declaration is broken, + // go list may fail to report metadata about it. + // See TestFixImportDecl for an example. + return nil, fmt.Errorf("missing metadata for import of %q", path) + } + depPH := b.handles[id] + if depPH == nil { + // e.g. missing metadata for dependencies in buildPackageHandle + return nil, missingPkgError(inputs.id, path, inputs.moduleMode) + } + if !metadata.IsValidImport(inputs.pkgPath, depPH.mp.PkgPath) { + return nil, fmt.Errorf("invalid use of internal package %q", path) + } + return b.getImportPackage(ctx, id) + }), + } + + if inputs.goVersion != "" { + goVersion := "go" + inputs.goVersion + if validGoVersion(goVersion) { + cfg.GoVersion = goVersion + } + } + + // We want to type check cgo code if go/types supports it. + // We passed typecheckCgo to go/packages when we Loaded. + typesinternal.SetUsesCgo(cfg) + return cfg +} + +// validGoVersion reports whether goVersion is a valid Go version for go/types. +// types.NewChecker panics if GoVersion is invalid. +// +// Note that, prior to go1.21, go/types required exactly two components to the +// version number. For example, go types would panic with the Go version +// go1.21.1. validGoVersion handles this case when built with go1.20 or earlier. +func validGoVersion(goVersion string) bool { + if !goVersionRx.MatchString(goVersion) { + return false // malformed version string + } + + // TODO(rfindley): remove once we no longer support building gopls with Go + // 1.20 or earlier. + if !slices.Contains(build.Default.ReleaseTags, "go1.21") && strings.Count(goVersion, ".") >= 2 { + return false // unsupported patch version + } + + return true +} + +// depsErrors creates diagnostics for each metadata error (e.g. import cycle). +// These may be attached to import declarations in the transitive source files +// of pkg, or to 'requires' declarations in the package's go.mod file. +// +// TODO(rfindley): move this to load.go +func depsErrors(ctx context.Context, snapshot *Snapshot, mp *metadata.Package) ([]*Diagnostic, error) { + // Select packages that can't be found, and were imported in non-workspace packages. + // Workspace packages already show their own errors. + var relevantErrors []*packagesinternal.PackageError + for _, depsError := range mp.DepsErrors { + // Up to Go 1.15, the missing package was included in the stack, which + // was presumably a bug. We want the next one up. + directImporterIdx := len(depsError.ImportStack) - 1 + if directImporterIdx < 0 { + continue + } + + directImporter := depsError.ImportStack[directImporterIdx] + if snapshot.isWorkspacePackage(PackageID(directImporter)) { + continue + } + relevantErrors = append(relevantErrors, depsError) + } + + // Don't build the import index for nothing. + if len(relevantErrors) == 0 { + return nil, nil + } + + // Subsequent checks require Go files. + if len(mp.CompiledGoFiles) == 0 { + return nil, nil + } + + // Build an index of all imports in the package. + type fileImport struct { + cgf *parsego.File + imp *ast.ImportSpec + } + allImports := map[string][]fileImport{} + for _, uri := range mp.CompiledGoFiles { + pgf, err := parseGoURI(ctx, snapshot, uri, parsego.Header) + if err != nil { + return nil, err + } + fset := tokeninternal.FileSetFor(pgf.Tok) + // TODO(adonovan): modify Imports() to accept a single token.File (cgf.Tok). + for _, group := range astutil.Imports(fset, pgf.File) { + for _, imp := range group { + if imp.Path == nil { + continue + } + path := strings.Trim(imp.Path.Value, `"`) + allImports[path] = append(allImports[path], fileImport{pgf, imp}) + } + } + } + + // Apply a diagnostic to any import involved in the error, stopping once + // we reach the workspace. + var errors []*Diagnostic + for _, depErr := range relevantErrors { + for i := len(depErr.ImportStack) - 1; i >= 0; i-- { + item := depErr.ImportStack[i] + if snapshot.isWorkspacePackage(PackageID(item)) { + break + } + + for _, imp := range allImports[item] { + rng, err := imp.cgf.NodeRange(imp.imp) + if err != nil { + return nil, err + } + diag := &Diagnostic{ + URI: imp.cgf.URI, + Range: rng, + Severity: protocol.SeverityError, + Source: TypeError, + Message: fmt.Sprintf("error while importing %v: %v", item, depErr.Err), + SuggestedFixes: goGetQuickFixes(mp.Module != nil, imp.cgf.URI, item), + } + if !bundleQuickFixes(diag) { + bug.Reportf("failed to bundle fixes for diagnostic %q", diag.Message) + } + errors = append(errors, diag) + } + } + } + + modFile, err := nearestModFile(ctx, mp.CompiledGoFiles[0], snapshot) + if err != nil { + return nil, err + } + pm, err := parseModURI(ctx, snapshot, modFile) + if err != nil { + return nil, err + } + + // Add a diagnostic to the module that contained the lowest-level import of + // the missing package. + for _, depErr := range relevantErrors { + for i := len(depErr.ImportStack) - 1; i >= 0; i-- { + item := depErr.ImportStack[i] + mp := snapshot.Metadata(PackageID(item)) + if mp == nil || mp.Module == nil { + continue + } + modVer := module.Version{Path: mp.Module.Path, Version: mp.Module.Version} + reference := findModuleReference(pm.File, modVer) + if reference == nil { + continue + } + rng, err := pm.Mapper.OffsetRange(reference.Start.Byte, reference.End.Byte) + if err != nil { + return nil, err + } + diag := &Diagnostic{ + URI: pm.URI, + Range: rng, + Severity: protocol.SeverityError, + Source: TypeError, + Message: fmt.Sprintf("error while importing %v: %v", item, depErr.Err), + SuggestedFixes: goGetQuickFixes(true, pm.URI, item), + } + if !bundleQuickFixes(diag) { + bug.Reportf("failed to bundle fixes for diagnostic %q", diag.Message) + } + errors = append(errors, diag) + break + } + } + return errors, nil +} + +// missingPkgError returns an error message for a missing package that varies +// based on the user's workspace mode. +func missingPkgError(from PackageID, pkgPath string, moduleMode bool) error { + // TODO(rfindley): improve this error. Previous versions of this error had + // access to the full snapshot, and could provide more information (such as + // the initialization error). + if moduleMode { + if metadata.IsCommandLineArguments(from) { + return fmt.Errorf("current file is not included in a workspace module") + } else { + // Previously, we would present the initialization error here. + return fmt.Errorf("no required module provides package %q", pkgPath) + } + } else { + // Previously, we would list the directories in GOROOT and GOPATH here. + return fmt.Errorf("cannot find package %q in GOROOT or GOPATH", pkgPath) + } +} + +// typeErrorsToDiagnostics translates a slice of types.Errors into a slice of +// Diagnostics. +// +// In addition to simply mapping data such as position information and error +// codes, this function interprets related go/types "continuation" errors as +// protocol.DiagnosticRelatedInformation. Continuation errors are go/types +// errors whose messages starts with "\t". By convention, these errors relate +// to the previous error in the errs slice (such as if they were printed in +// sequence to a terminal). +// +// The linkTarget, moduleMode, and supportsRelatedInformation parameters affect +// the construction of protocol objects (see the code for details). +func typeErrorsToDiagnostics(pkg *syntaxPackage, errs []types.Error, linkTarget string, moduleMode, supportsRelatedInformation bool) []*Diagnostic { + var result []*Diagnostic + + // batch records diagnostics for a set of related types.Errors. + batch := func(related []types.Error) { + var diags []*Diagnostic + for i, e := range related { + code, start, end, ok := typesinternal.ReadGo116ErrorData(e) + if !ok || !start.IsValid() || !end.IsValid() { + start, end = e.Pos, e.Pos + code = 0 + } + if !start.IsValid() { + // Type checker errors may be missing position information if they + // relate to synthetic syntax, such as if the file were fixed. In that + // case, we should have a parse error anyway, so skipping the type + // checker error is likely benign. + // + // TODO(golang/go#64335): we should eventually verify that all type + // checked syntax has valid positions, and promote this skip to a bug + // report. + continue + } + + // Invariant: both start and end are IsValid. + if !end.IsValid() { + panic("end is invalid") + } + + posn := safetoken.StartPosition(e.Fset, start) + if !posn.IsValid() { + // All valid positions produced by the type checker should described by + // its fileset. + // + // Note: in golang/go#64488, we observed an error that was positioned + // over fixed syntax, which overflowed its file. So it's definitely + // possible that we get here (it's hard to reason about fixing up the + // AST). Nevertheless, it's a bug. + bug.Reportf("internal error: type checker error %q outside its Fset", e) + continue + } + pgf, err := pkg.File(protocol.URIFromPath(posn.Filename)) + if err != nil { + // Sometimes type-checker errors refer to positions in other packages, + // such as when a declaration duplicates a dot-imported name. + // + // In these cases, we don't want to report an error in the other + // package (the message would be rather confusing), but we do want to + // report an error in the current package (golang/go#59005). + if i == 0 { + bug.Reportf("internal error: could not locate file for primary type checker error %v: %v", e, err) + } + continue + } + + // debugging #65960 + // + // At this point, we know 'start' IsValid, and + // StartPosition(start) worked (with e.Fset). + // + // If the asserted condition is true, 'start' + // is also in range for pgf.Tok, which means + // the PosRange failure must be caused by 'end'. + if pgf.Tok != e.Fset.File(start) { + bug.Reportf("internal error: inconsistent token.Files for pos") + } + + if end == start { + // Expand the end position to a more meaningful span. + end = analysisinternal.TypeErrorEndPos(e.Fset, pgf.Src, start) + + // debugging #65960 + if _, err := safetoken.Offset(pgf.Tok, end); err != nil { + bug.Reportf("TypeErrorEndPos returned invalid end: %v", err) + } + } else { + // debugging #65960 + if _, err := safetoken.Offset(pgf.Tok, end); err != nil { + bug.Reportf("ReadGo116ErrorData returned invalid end: %v", err) + } + } + + rng, err := pgf.Mapper.PosRange(pgf.Tok, start, end) + if err != nil { + bug.Reportf("internal error: could not compute pos to range for %v: %v", e, err) + continue + } + msg := related[0].Msg + if i > 0 { + if supportsRelatedInformation { + msg += " (see details)" + } else { + msg += fmt.Sprintf(" (this error: %v)", e.Msg) + } + } + diag := &Diagnostic{ + URI: pgf.URI, + Range: rng, + Severity: protocol.SeverityError, + Source: TypeError, + Message: msg, + } + if code != 0 { + diag.Code = code.String() + diag.CodeHref = typesCodeHref(linkTarget, code) + } + if code == typesinternal.UnusedVar || code == typesinternal.UnusedImport { + diag.Tags = append(diag.Tags, protocol.Unnecessary) + } + if match := importErrorRe.FindStringSubmatch(e.Msg); match != nil { + diag.SuggestedFixes = append(diag.SuggestedFixes, goGetQuickFixes(moduleMode, pgf.URI, match[1])...) + } + if match := unsupportedFeatureRe.FindStringSubmatch(e.Msg); match != nil { + diag.SuggestedFixes = append(diag.SuggestedFixes, editGoDirectiveQuickFix(moduleMode, pgf.URI, match[1])...) + } + + // Link up related information. For the primary error, all related errors + // are treated as related information. For secondary errors, only the + // primary is related. + // + // This is because go/types assumes that errors are read top-down, such as + // in the cycle error "A refers to...". The structure of the secondary + // error set likely only makes sense for the primary error. + if i > 0 { + primary := diags[0] + primary.Related = append(primary.Related, protocol.DiagnosticRelatedInformation{ + Location: protocol.Location{URI: diag.URI, Range: diag.Range}, + Message: related[i].Msg, // use the unmodified secondary error for related errors. + }) + diag.Related = []protocol.DiagnosticRelatedInformation{{ + Location: protocol.Location{URI: primary.URI, Range: primary.Range}, + }} + } + diags = append(diags, diag) + } + result = append(result, diags...) + } + + // Process batches of related errors. + for len(errs) > 0 { + related := []types.Error{errs[0]} + for i := 1; i < len(errs); i++ { + spl := errs[i] + if len(spl.Msg) == 0 || spl.Msg[0] != '\t' { + break + } + spl.Msg = spl.Msg[len("\t"):] + related = append(related, spl) + } + batch(related) + errs = errs[len(related):] + } + + return result +} + +// An importFunc is an implementation of the single-method +// types.Importer interface based on a function value. +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/gopls/internal/lsp/cache/constraints.go b/gopls/internal/cache/constraints.go similarity index 100% rename from gopls/internal/lsp/cache/constraints.go rename to gopls/internal/cache/constraints.go diff --git a/gopls/internal/cache/constraints_test.go b/gopls/internal/cache/constraints_test.go new file mode 100644 index 00000000000..23c9f39cb19 --- /dev/null +++ b/gopls/internal/cache/constraints_test.go @@ -0,0 +1,126 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.16 +// +build go1.16 + +package cache + +import ( + "testing" +) + +func TestIsStandaloneFile(t *testing.T) { + tests := []struct { + desc string + contents string + standaloneTags []string + want bool + }{ + { + "new syntax", + "//go:build ignore\n\npackage main\n", + []string{"ignore"}, + true, + }, + { + "legacy syntax", + "// +build ignore\n\npackage main\n", + []string{"ignore"}, + true, + }, + { + "multiple tags", + "//go:build ignore\n\npackage main\n", + []string{"exclude", "ignore"}, + true, + }, + { + "invalid tag", + "// +build ignore\n\npackage main\n", + []string{"script"}, + false, + }, + { + "non-main package", + "//go:build ignore\n\npackage p\n", + []string{"ignore"}, + false, + }, + { + "alternate tag", + "// +build script\n\npackage main\n", + []string{"script"}, + true, + }, + { + "both syntax", + "//go:build ignore\n// +build ignore\n\npackage main\n", + []string{"ignore"}, + true, + }, + { + "after comments", + "// A non-directive comment\n//go:build ignore\n\npackage main\n", + []string{"ignore"}, + true, + }, + { + "after package decl", + "package main //go:build ignore\n", + []string{"ignore"}, + false, + }, + { + "on line after package decl", + "package main\n\n//go:build ignore\n", + []string{"ignore"}, + false, + }, + { + "combined with other expressions", + "\n\n//go:build ignore || darwin\n\npackage main\n", + []string{"ignore"}, + false, + }, + } + + for _, test := range tests { + t.Run(test.desc, func(t *testing.T) { + if got := isStandaloneFile([]byte(test.contents), test.standaloneTags); got != test.want { + t.Errorf("isStandaloneFile(%q, %v) = %t, want %t", test.contents, test.standaloneTags, got, test.want) + } + }) + } +} + +func TestVersionRegexp(t *testing.T) { + // good + for _, s := range []string{ + "go1", + "go1.2", + "go1.2.3", + "go1.0.33", + } { + if !goVersionRx.MatchString(s) { + t.Errorf("Valid Go version %q does not match the regexp", s) + } + } + + // bad + for _, s := range []string{ + "go", // missing numbers + "go0", // Go starts at 1 + "go01", // leading zero + "go1.π", // non-decimal + "go1.-1", // negative + "go1.02.3", // leading zero + "go1.2.3.4", // too many segments + "go1.2.3-pre", // textual suffix + } { + if goVersionRx.MatchString(s) { + t.Errorf("Invalid Go version %q unexpectedly matches the regexp", s) + } + } +} diff --git a/gopls/internal/lsp/cache/debug.go b/gopls/internal/cache/debug.go similarity index 100% rename from gopls/internal/lsp/cache/debug.go rename to gopls/internal/cache/debug.go diff --git a/gopls/internal/cache/diagnostics.go b/gopls/internal/cache/diagnostics.go new file mode 100644 index 00000000000..5489b5645b6 --- /dev/null +++ b/gopls/internal/cache/diagnostics.go @@ -0,0 +1,190 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "encoding/json" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" +) + +// A InitializationError is an error that causes snapshot initialization to fail. +// It is either the error returned from go/packages.Load, or an error parsing a +// workspace go.work or go.mod file. +// +// Such an error generally indicates that the View is malformed, and will never +// be usable. +type InitializationError struct { + // MainError is the primary error. Must be non-nil. + MainError error + + // Diagnostics contains any supplemental (structured) diagnostics extracted + // from the load error. + Diagnostics map[protocol.DocumentURI][]*Diagnostic +} + +func byURI(d *Diagnostic) protocol.DocumentURI { return d.URI } // For use in maps.Group. + +// An Diagnostic corresponds to an LSP Diagnostic. +// https://microsoft.github.io/language-server-protocol/specification#diagnostic +// +// It is (effectively) gob-serializable; see {encode,decode}Diagnostics. +type Diagnostic struct { + URI protocol.DocumentURI // of diagnosed file (not diagnostic documentation) + Range protocol.Range + Severity protocol.DiagnosticSeverity + Code string // analysis.Diagnostic.Category (or "default" if empty) or hidden go/types error code + CodeHref string + + // Source is a human-readable description of the source of the error. + // Diagnostics generated by an analysis.Analyzer set it to Analyzer.Name. + Source DiagnosticSource + + Message string + + Tags []protocol.DiagnosticTag + Related []protocol.DiagnosticRelatedInformation + + // Fields below are used internally to generate quick fixes. They aren't + // part of the LSP spec and historically didn't leave the server. + // + // Update(2023-05): version 3.16 of the LSP spec included support for the + // Diagnostic.data field, which holds arbitrary data preserved in the + // diagnostic for codeAction requests. This field allows bundling additional + // information for quick-fixes, and gopls can (and should) use this + // information to avoid re-evaluating diagnostics in code-action handlers. + // + // In order to stage this transition incrementally, the 'BundledFixes' field + // may store a 'bundled' (=json-serialized) form of the associated + // SuggestedFixes. Not all diagnostics have their fixes bundled. + BundledFixes *json.RawMessage + SuggestedFixes []SuggestedFix +} + +func (d *Diagnostic) String() string { + return fmt.Sprintf("%v: %s", d.Range, d.Message) +} + +type DiagnosticSource string + +const ( + UnknownError DiagnosticSource = "" + ListError DiagnosticSource = "go list" + ParseError DiagnosticSource = "syntax" + TypeError DiagnosticSource = "compiler" + ModTidyError DiagnosticSource = "go mod tidy" + OptimizationDetailsError DiagnosticSource = "optimizer details" + UpgradeNotification DiagnosticSource = "upgrade available" + Vulncheck DiagnosticSource = "vulncheck imports" + Govulncheck DiagnosticSource = "govulncheck" + TemplateError DiagnosticSource = "template" + WorkFileError DiagnosticSource = "go.work file" + ConsistencyInfo DiagnosticSource = "consistency" +) + +// A SuggestedFix represents a suggested fix (for a diagnostic) +// produced by analysis, in protocol form. +// +// The fixes are reported to the client as a set of code actions in +// response to a CodeAction query for a set of diagnostics. Multiple +// SuggestedFixes may be produced for the same logical fix, varying +// only in ActionKind. For example, a fix may be both a Refactor +// (which should appear on the refactoring menu) and a SourceFixAll (a +// clear fix that can be safely applied without explicit consent). +type SuggestedFix struct { + Title string + Edits map[protocol.DocumentURI][]protocol.TextEdit + Command *protocol.Command + ActionKind protocol.CodeActionKind +} + +// SuggestedFixFromCommand returns a suggested fix to run the given command. +func SuggestedFixFromCommand(cmd protocol.Command, kind protocol.CodeActionKind) SuggestedFix { + return SuggestedFix{ + Title: cmd.Title, + Command: &cmd, + ActionKind: kind, + } +} + +// quickFixesJSON is a JSON-serializable list of quick fixes +// to be saved in the protocol.Diagnostic.Data field. +type quickFixesJSON struct { + // TODO(rfindley): pack some sort of identifier here for later + // lookup/validation? + Fixes []protocol.CodeAction +} + +// bundleQuickFixes attempts to bundle sd.SuggestedFixes into the +// sd.BundledFixes field, so that it can be round-tripped through the client. +// It returns false if the quick-fixes cannot be bundled. +func bundleQuickFixes(sd *Diagnostic) bool { + if len(sd.SuggestedFixes) == 0 { + return true + } + var actions []protocol.CodeAction + for _, fix := range sd.SuggestedFixes { + if fix.Edits != nil { + // For now, we only support bundled code actions that execute commands. + // + // In order to cleanly support bundled edits, we'd have to guarantee that + // the edits were generated on the current snapshot. But this naively + // implies that every fix would have to include a snapshot ID, which + // would require us to republish all diagnostics on each new snapshot. + // + // TODO(rfindley): in order to avoid this additional chatter, we'd need + // to build some sort of registry or other mechanism on the snapshot to + // check whether a diagnostic is still valid. + return false + } + action := protocol.CodeAction{ + Title: fix.Title, + Kind: fix.ActionKind, + Command: fix.Command, + } + actions = append(actions, action) + } + fixes := quickFixesJSON{ + Fixes: actions, + } + data, err := json.Marshal(fixes) + if err != nil { + bug.Reportf("marshalling quick fixes: %v", err) + return false + } + msg := json.RawMessage(data) + sd.BundledFixes = &msg + return true +} + +// BundledQuickFixes extracts any bundled codeActions from the +// diag.Data field. +func BundledQuickFixes(diag protocol.Diagnostic) []protocol.CodeAction { + var fix quickFixesJSON + if diag.Data != nil { + err := protocol.UnmarshalJSON(*diag.Data, &fix) + if err != nil { + bug.Reportf("unmarshalling quick fix: %v", err) + return nil + } + } + + var actions []protocol.CodeAction + for _, action := range fix.Fixes { + // See BundleQuickFixes: for now we only support bundling commands. + if action.Edit != nil { + bug.Reportf("bundled fix %q includes workspace edits", action.Title) + continue + } + // associate the action with the incoming diagnostic + // (Note that this does not mutate the fix.Fixes slice). + action.Diagnostics = []protocol.Diagnostic{diag} + actions = append(actions, action) + } + + return actions +} diff --git a/gopls/internal/cache/errors.go b/gopls/internal/cache/errors.go new file mode 100644 index 00000000000..6c95526d1ea --- /dev/null +++ b/gopls/internal/cache/errors.go @@ -0,0 +1,519 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +// This file defines routines to convert diagnostics from go list, go +// get, go/packages, parsing, type checking, and analysis into +// golang.Diagnostic form, and suggesting quick fixes. + +import ( + "context" + "fmt" + "go/parser" + "go/scanner" + "go/token" + "log" + "path/filepath" + "regexp" + "strconv" + "strings" + + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/typesinternal" +) + +// goPackagesErrorDiagnostics translates the given go/packages Error into a +// diagnostic, using the provided metadata and filesource. +// +// The slice of diagnostics may be empty. +func goPackagesErrorDiagnostics(ctx context.Context, e packages.Error, mp *metadata.Package, fs file.Source) ([]*Diagnostic, error) { + if diag, err := parseGoListImportCycleError(ctx, e, mp, fs); err != nil { + return nil, err + } else if diag != nil { + return []*Diagnostic{diag}, nil + } + + // Parse error location and attempt to convert to protocol form. + loc, err := func() (protocol.Location, error) { + filename, line, col8 := parseGoListError(e, mp.LoadDir) + uri := protocol.URIFromPath(filename) + + fh, err := fs.ReadFile(ctx, uri) + if err != nil { + return protocol.Location{}, err + } + content, err := fh.Content() + if err != nil { + return protocol.Location{}, err + } + mapper := protocol.NewMapper(uri, content) + posn, err := mapper.LineCol8Position(line, col8) + if err != nil { + return protocol.Location{}, err + } + return protocol.Location{ + URI: uri, + Range: protocol.Range{ + Start: posn, + End: posn, + }, + }, nil + }() + + // TODO(rfindley): in some cases the go command outputs invalid spans, for + // example (from TestGoListErrors): + // + // package a + // import + // + // In this case, the go command will complain about a.go:2:8, which is after + // the trailing newline but still considered to be on the second line, most + // likely because *token.File lacks information about newline termination. + // + // We could do better here by handling that case. + if err != nil { + // Unable to parse a valid position. + // Apply the error to all files to be safe. + var diags []*Diagnostic + for _, uri := range mp.CompiledGoFiles { + diags = append(diags, &Diagnostic{ + URI: uri, + Severity: protocol.SeverityError, + Source: ListError, + Message: e.Msg, + }) + } + return diags, nil + } + return []*Diagnostic{{ + URI: loc.URI, + Range: loc.Range, + Severity: protocol.SeverityError, + Source: ListError, + Message: e.Msg, + }}, nil +} + +func parseErrorDiagnostics(pkg *syntaxPackage, errList scanner.ErrorList) ([]*Diagnostic, error) { + // The first parser error is likely the root cause of the problem. + if errList.Len() <= 0 { + return nil, fmt.Errorf("no errors in %v", errList) + } + e := errList[0] + pgf, err := pkg.File(protocol.URIFromPath(e.Pos.Filename)) + if err != nil { + return nil, err + } + rng, err := pgf.Mapper.OffsetRange(e.Pos.Offset, e.Pos.Offset) + if err != nil { + return nil, err + } + return []*Diagnostic{{ + URI: pgf.URI, + Range: rng, + Severity: protocol.SeverityError, + Source: ParseError, + Message: e.Msg, + }}, nil +} + +var importErrorRe = regexp.MustCompile(`could not import ([^\s]+)`) +var unsupportedFeatureRe = regexp.MustCompile(`.*require.* go(\d+\.\d+) or later`) + +func goGetQuickFixes(moduleMode bool, uri protocol.DocumentURI, pkg string) []SuggestedFix { + // Go get only supports module mode for now. + if !moduleMode { + return nil + } + title := fmt.Sprintf("go get package %v", pkg) + cmd, err := command.NewGoGetPackageCommand(title, command.GoGetPackageArgs{ + URI: uri, + AddRequire: true, + Pkg: pkg, + }) + if err != nil { + bug.Reportf("internal error building 'go get package' fix: %v", err) + return nil + } + return []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)} +} + +func editGoDirectiveQuickFix(moduleMode bool, uri protocol.DocumentURI, version string) []SuggestedFix { + // Go mod edit only supports module mode. + if !moduleMode { + return nil + } + title := fmt.Sprintf("go mod edit -go=%s", version) + cmd, err := command.NewEditGoDirectiveCommand(title, command.EditGoDirectiveArgs{ + URI: uri, + Version: version, + }) + if err != nil { + bug.Reportf("internal error constructing 'edit go directive' fix: %v", err) + return nil + } + return []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)} +} + +// encodeDiagnostics gob-encodes the given diagnostics. +func encodeDiagnostics(srcDiags []*Diagnostic) []byte { + var gobDiags []gobDiagnostic + for _, srcDiag := range srcDiags { + var gobFixes []gobSuggestedFix + for _, srcFix := range srcDiag.SuggestedFixes { + gobFix := gobSuggestedFix{ + Message: srcFix.Title, + ActionKind: srcFix.ActionKind, + } + for uri, srcEdits := range srcFix.Edits { + for _, srcEdit := range srcEdits { + gobFix.TextEdits = append(gobFix.TextEdits, gobTextEdit{ + Location: protocol.Location{ + URI: uri, + Range: srcEdit.Range, + }, + NewText: []byte(srcEdit.NewText), + }) + } + } + if srcCmd := srcFix.Command; srcCmd != nil { + gobFix.Command = &gobCommand{ + Title: srcCmd.Title, + Command: srcCmd.Command, + Arguments: srcCmd.Arguments, + } + } + gobFixes = append(gobFixes, gobFix) + } + var gobRelated []gobRelatedInformation + for _, srcRel := range srcDiag.Related { + gobRel := gobRelatedInformation(srcRel) + gobRelated = append(gobRelated, gobRel) + } + gobDiag := gobDiagnostic{ + Location: protocol.Location{ + URI: srcDiag.URI, + Range: srcDiag.Range, + }, + Severity: srcDiag.Severity, + Code: srcDiag.Code, + CodeHref: srcDiag.CodeHref, + Source: string(srcDiag.Source), + Message: srcDiag.Message, + SuggestedFixes: gobFixes, + Related: gobRelated, + Tags: srcDiag.Tags, + } + gobDiags = append(gobDiags, gobDiag) + } + return diagnosticsCodec.Encode(gobDiags) +} + +// decodeDiagnostics decodes the given gob-encoded diagnostics. +func decodeDiagnostics(data []byte) []*Diagnostic { + var gobDiags []gobDiagnostic + diagnosticsCodec.Decode(data, &gobDiags) + var srcDiags []*Diagnostic + for _, gobDiag := range gobDiags { + var srcFixes []SuggestedFix + for _, gobFix := range gobDiag.SuggestedFixes { + srcFix := SuggestedFix{ + Title: gobFix.Message, + ActionKind: gobFix.ActionKind, + } + for _, gobEdit := range gobFix.TextEdits { + if srcFix.Edits == nil { + srcFix.Edits = make(map[protocol.DocumentURI][]protocol.TextEdit) + } + srcEdit := protocol.TextEdit{ + Range: gobEdit.Location.Range, + NewText: string(gobEdit.NewText), + } + uri := gobEdit.Location.URI + srcFix.Edits[uri] = append(srcFix.Edits[uri], srcEdit) + } + if gobCmd := gobFix.Command; gobCmd != nil { + srcFix.Command = &protocol.Command{ + Title: gobCmd.Title, + Command: gobCmd.Command, + Arguments: gobCmd.Arguments, + } + } + srcFixes = append(srcFixes, srcFix) + } + var srcRelated []protocol.DiagnosticRelatedInformation + for _, gobRel := range gobDiag.Related { + srcRel := protocol.DiagnosticRelatedInformation(gobRel) + srcRelated = append(srcRelated, srcRel) + } + srcDiag := &Diagnostic{ + URI: gobDiag.Location.URI, + Range: gobDiag.Location.Range, + Severity: gobDiag.Severity, + Code: gobDiag.Code, + CodeHref: gobDiag.CodeHref, + Source: DiagnosticSource(gobDiag.Source), + Message: gobDiag.Message, + Tags: gobDiag.Tags, + Related: srcRelated, + SuggestedFixes: srcFixes, + } + srcDiags = append(srcDiags, srcDiag) + } + return srcDiags +} + +// toSourceDiagnostic converts a gobDiagnostic to "source" form. +func toSourceDiagnostic(srcAnalyzer *settings.Analyzer, gobDiag *gobDiagnostic) *Diagnostic { + var related []protocol.DiagnosticRelatedInformation + for _, gobRelated := range gobDiag.Related { + related = append(related, protocol.DiagnosticRelatedInformation(gobRelated)) + } + + severity := srcAnalyzer.Severity + if severity == 0 { + severity = protocol.SeverityWarning + } + + diag := &Diagnostic{ + URI: gobDiag.Location.URI, + Range: gobDiag.Location.Range, + Severity: severity, + Code: gobDiag.Code, + CodeHref: gobDiag.CodeHref, + Source: DiagnosticSource(gobDiag.Source), + Message: gobDiag.Message, + Related: related, + Tags: srcAnalyzer.Tag, + } + + // We cross the set of fixes (whether edit- or command-based) + // with the set of kinds, as a single fix may represent more + // than one kind of action (e.g. refactor, quickfix, fixall), + // each corresponding to a distinct client UI element + // or operation. + kinds := srcAnalyzer.ActionKinds + if len(kinds) == 0 { + kinds = []protocol.CodeActionKind{protocol.QuickFix} + } + + var fixes []SuggestedFix + for _, fix := range gobDiag.SuggestedFixes { + if len(fix.TextEdits) > 0 { + // Accumulate edit-based fixes supplied by the diagnostic itself. + edits := make(map[protocol.DocumentURI][]protocol.TextEdit) + for _, e := range fix.TextEdits { + uri := e.Location.URI + edits[uri] = append(edits[uri], protocol.TextEdit{ + Range: e.Location.Range, + NewText: string(e.NewText), + }) + } + for _, kind := range kinds { + fixes = append(fixes, SuggestedFix{ + Title: fix.Message, + Edits: edits, + ActionKind: kind, + }) + } + + } else { + // Accumulate command-based fixes, whose edits + // are not provided by the analyzer but are computed on demand + // by logic "adjacent to" the analyzer. + // + // The analysis.Diagnostic.Category is used as the fix name. + cmd, err := command.NewApplyFixCommand(fix.Message, command.ApplyFixArgs{ + Fix: diag.Code, + URI: gobDiag.Location.URI, + Range: gobDiag.Location.Range, + }) + if err != nil { + // JSON marshalling of these argument values cannot fail. + log.Fatalf("internal error in NewApplyFixCommand: %v", err) + } + for _, kind := range kinds { + fixes = append(fixes, SuggestedFixFromCommand(cmd, kind)) + } + + // Ensure that the analyzer specifies a category for all its no-edit fixes. + // This is asserted by analysistest.RunWithSuggestedFixes, but there + // may be gaps in test coverage. + if diag.Code == "" || diag.Code == "default" { + bug.Reportf("missing Diagnostic.Code: %#v", *diag) + } + } + } + diag.SuggestedFixes = fixes + + // If the fixes only delete code, assume that the diagnostic is reporting dead code. + if onlyDeletions(diag.SuggestedFixes) { + diag.Tags = append(diag.Tags, protocol.Unnecessary) + } + return diag +} + +// onlyDeletions returns true if fixes is non-empty and all of the suggested +// fixes are deletions. +func onlyDeletions(fixes []SuggestedFix) bool { + for _, fix := range fixes { + if fix.Command != nil { + return false + } + for _, edits := range fix.Edits { + for _, edit := range edits { + if edit.NewText != "" { + return false + } + if protocol.ComparePosition(edit.Range.Start, edit.Range.End) == 0 { + return false + } + } + } + } + return len(fixes) > 0 +} + +func typesCodeHref(linkTarget string, code typesinternal.ErrorCode) string { + return BuildLink(linkTarget, "golang.org/x/tools/internal/typesinternal", code.String()) +} + +// BuildLink constructs a URL with the given target, path, and anchor. +func BuildLink(target, path, anchor string) string { + link := fmt.Sprintf("https://%s/%s", target, path) + if anchor == "" { + return link + } + return link + "#" + anchor +} + +func parseGoListError(e packages.Error, dir string) (filename string, line, col8 int) { + input := e.Pos + if input == "" { + // No position. Attempt to parse one out of a + // go list error of the form "file:line:col: + // message" by stripping off the message. + input = strings.TrimSpace(e.Msg) + if i := strings.Index(input, ": "); i >= 0 { + input = input[:i] + } + } + + filename, line, col8 = splitFileLineCol(input) + if !filepath.IsAbs(filename) { + filename = filepath.Join(dir, filename) + } + return filename, line, col8 +} + +// splitFileLineCol splits s into "filename:line:col", +// where line and col consist of decimal digits. +func splitFileLineCol(s string) (file string, line, col8 int) { + // Beware that the filename may contain colon on Windows. + + // stripColonDigits removes a ":%d" suffix, if any. + stripColonDigits := func(s string) (rest string, num int) { + if i := strings.LastIndex(s, ":"); i >= 0 { + if v, err := strconv.ParseInt(s[i+1:], 10, 32); err == nil { + return s[:i], int(v) + } + } + return s, -1 + } + + // strip col ":%d" + s, n1 := stripColonDigits(s) + if n1 < 0 { + return s, 0, 0 // "filename" + } + + // strip line ":%d" + s, n2 := stripColonDigits(s) + if n2 < 0 { + return s, n1, 0 // "filename:line" + } + + return s, n2, n1 // "filename:line:col" +} + +// parseGoListImportCycleError attempts to parse the given go/packages error as +// an import cycle, returning a diagnostic if successful. +// +// If the error is not detected as an import cycle error, it returns nil, nil. +func parseGoListImportCycleError(ctx context.Context, e packages.Error, mp *metadata.Package, fs file.Source) (*Diagnostic, error) { + re := regexp.MustCompile(`(.*): import stack: \[(.+)\]`) + matches := re.FindStringSubmatch(strings.TrimSpace(e.Msg)) + if len(matches) < 3 { + return nil, nil + } + msg := matches[1] + importList := strings.Split(matches[2], " ") + // Since the error is relative to the current package. The import that is causing + // the import cycle error is the second one in the list. + if len(importList) < 2 { + return nil, nil + } + // Imports have quotation marks around them. + circImp := strconv.Quote(importList[1]) + for _, uri := range mp.CompiledGoFiles { + pgf, err := parseGoURI(ctx, fs, uri, parsego.Header) + if err != nil { + return nil, err + } + // Search file imports for the import that is causing the import cycle. + for _, imp := range pgf.File.Imports { + if imp.Path.Value == circImp { + rng, err := pgf.NodeMappedRange(imp) + if err != nil { + return nil, nil + } + + return &Diagnostic{ + URI: pgf.URI, + Range: rng.Range(), + Severity: protocol.SeverityError, + Source: ListError, + Message: msg, + }, nil + } + } + } + return nil, nil +} + +// parseGoURI is a helper to parse the Go file at the given URI from the file +// source fs. The resulting syntax and token.File belong to an ephemeral, +// encapsulated FileSet, so this file stands only on its own: it's not suitable +// to use in a list of file of a package, for example. +// +// It returns an error if the file could not be read. +// +// TODO(rfindley): eliminate this helper. +func parseGoURI(ctx context.Context, fs file.Source, uri protocol.DocumentURI, mode parser.Mode) (*parsego.File, error) { + fh, err := fs.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + return parseGoImpl(ctx, token.NewFileSet(), fh, mode, false) +} + +// parseModURI is a helper to parse the Mod file at the given URI from the file +// source fs. +// +// It returns an error if the file could not be read. +func parseModURI(ctx context.Context, fs file.Source, uri protocol.DocumentURI) (*ParsedModule, error) { + fh, err := fs.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + return parseModImpl(ctx, fh) +} diff --git a/gopls/internal/lsp/cache/errors_test.go b/gopls/internal/cache/errors_test.go similarity index 80% rename from gopls/internal/lsp/cache/errors_test.go rename to gopls/internal/cache/errors_test.go index 933e9e87e5d..56b29c3c55b 100644 --- a/gopls/internal/lsp/cache/errors_test.go +++ b/gopls/internal/cache/errors_test.go @@ -10,9 +10,8 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/protocol" ) func TestParseErrorMessage(t *testing.T) { @@ -30,35 +29,34 @@ func TestParseErrorMessage(t *testing.T) { expectedLine: 13, expectedColumn: 1, }, + { + name: "windows driver letter", + in: "C:\\foo\\bar.go:13: message", + expectedFileName: "bar.go", + expectedLine: 13, + expectedColumn: 0, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - spn := parseGoListError(tt.in, ".") - fn := spn.URI().Filename() + fn, line, col8 := parseGoListError(packages.Error{Msg: tt.in}, ".") if !strings.HasSuffix(fn, tt.expectedFileName) { t.Errorf("expected filename with suffix %v but got %v", tt.expectedFileName, fn) } - - if !spn.HasPosition() { - t.Fatalf("expected span to have position") + if line != tt.expectedLine { + t.Errorf("expected line %v but got %v", tt.expectedLine, line) } - - pos := spn.Start() - if pos.Line() != tt.expectedLine { - t.Errorf("expected line %v but got %v", tt.expectedLine, pos.Line()) - } - - if pos.Column() != tt.expectedColumn { - t.Errorf("expected line %v but got %v", tt.expectedLine, pos.Line()) + if col8 != tt.expectedColumn { + t.Errorf("expected col %v but got %v", tt.expectedLine, col8) } }) } } func TestDiagnosticEncoding(t *testing.T) { - diags := []*source.Diagnostic{ + diags := []*Diagnostic{ {}, // empty { URI: "file///foo", @@ -87,10 +85,10 @@ func TestDiagnosticEncoding(t *testing.T) { // Fields below are used internally to generate quick fixes. They aren't // part of the LSP spec and don't leave the server. - SuggestedFixes: []source.SuggestedFix{ + SuggestedFixes: []SuggestedFix{ { Title: "fix it!", - Edits: map[span.URI][]protocol.TextEdit{ + Edits: map[protocol.DocumentURI][]protocol.TextEdit{ "file:///foo": {{ Range: protocol.Range{ Start: protocol.Position{Line: 4, Character: 2}, diff --git a/gopls/internal/cache/filemap.go b/gopls/internal/cache/filemap.go new file mode 100644 index 00000000000..ee64d7c32c3 --- /dev/null +++ b/gopls/internal/cache/filemap.go @@ -0,0 +1,151 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "path/filepath" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/persistent" +) + +// A fileMap maps files in the snapshot, with some additional bookkeeping: +// It keeps track of overlays as well as directories containing any observed +// file. +type fileMap struct { + files *persistent.Map[protocol.DocumentURI, file.Handle] + overlays *persistent.Map[protocol.DocumentURI, *overlay] // the subset of files that are overlays + dirs *persistent.Set[string] // all dirs containing files; if nil, dirs have not been initialized +} + +func newFileMap() *fileMap { + return &fileMap{ + files: new(persistent.Map[protocol.DocumentURI, file.Handle]), + overlays: new(persistent.Map[protocol.DocumentURI, *overlay]), + dirs: new(persistent.Set[string]), + } +} + +// clone creates a copy of the fileMap, incorporating the changes specified by +// the changes map. +func (m *fileMap) clone(changes map[protocol.DocumentURI]file.Handle) *fileMap { + m2 := &fileMap{ + files: m.files.Clone(), + overlays: m.overlays.Clone(), + } + if m.dirs != nil { + m2.dirs = m.dirs.Clone() + } + + // Handle file changes. + // + // Note, we can't simply delete the file unconditionally and let it be + // re-read by the snapshot, as (1) the snapshot must always observe all + // overlays, and (2) deleting a file forces directories to be reevaluated, as + // it may be the last file in a directory. We want to avoid that work in the + // common case where a file has simply changed. + // + // For that reason, we also do this in two passes, processing deletions + // first, as a set before a deletion would result in pointless work. + for uri, fh := range changes { + if !fileExists(fh) { + m2.delete(uri) + } + } + for uri, fh := range changes { + if fileExists(fh) { + m2.set(uri, fh) + } + } + return m2 +} + +func (m *fileMap) destroy() { + m.files.Destroy() + m.overlays.Destroy() + if m.dirs != nil { + m.dirs.Destroy() + } +} + +// get returns the file handle mapped by the given key, or (nil, false) if the +// key is not present. +func (m *fileMap) get(key protocol.DocumentURI) (file.Handle, bool) { + return m.files.Get(key) +} + +// foreach calls f for each (uri, fh) in the map. +func (m *fileMap) foreach(f func(uri protocol.DocumentURI, fh file.Handle)) { + m.files.Range(f) +} + +// set stores the given file handle for key, updating overlays and directories +// accordingly. +func (m *fileMap) set(key protocol.DocumentURI, fh file.Handle) { + m.files.Set(key, fh, nil) + + // update overlays + if o, ok := fh.(*overlay); ok { + m.overlays.Set(key, o, nil) + } else { + // Setting a non-overlay must delete the corresponding overlay, to preserve + // the accuracy of the overlay set. + m.overlays.Delete(key) + } + + // update dirs, if they have been computed + if m.dirs != nil { + m.addDirs(key) + } +} + +// addDirs adds all directories containing u to the dirs set. +func (m *fileMap) addDirs(u protocol.DocumentURI) { + dir := filepath.Dir(u.Path()) + for dir != "" && !m.dirs.Contains(dir) { + m.dirs.Add(dir) + dir = filepath.Dir(dir) + } +} + +// delete removes a file from the map, and updates overlays and dirs +// accordingly. +func (m *fileMap) delete(key protocol.DocumentURI) { + m.files.Delete(key) + m.overlays.Delete(key) + + // Deleting a file may cause the set of dirs to shrink; therefore we must + // re-evaluate the dir set. + // + // Do this lazily, to avoid work if there are multiple deletions in a row. + if m.dirs != nil { + m.dirs.Destroy() + m.dirs = nil + } +} + +// getOverlays returns a new unordered array of overlay files. +func (m *fileMap) getOverlays() []*overlay { + var overlays []*overlay + m.overlays.Range(func(_ protocol.DocumentURI, o *overlay) { + overlays = append(overlays, o) + }) + return overlays +} + +// getDirs reports returns the set of dirs observed by the fileMap. +// +// This operation mutates the fileMap. +// The result must not be mutated by the caller. +func (m *fileMap) getDirs() *persistent.Set[string] { + if m.dirs == nil { + m.dirs = new(persistent.Set[string]) + m.files.Range(func(u protocol.DocumentURI, _ file.Handle) { + m.addDirs(u) + }) + } + return m.dirs +} diff --git a/gopls/internal/lsp/cache/filemap_test.go b/gopls/internal/cache/filemap_test.go similarity index 80% rename from gopls/internal/lsp/cache/filemap_test.go rename to gopls/internal/cache/filemap_test.go index a1d10af2427..13f2c1a9ccd 100644 --- a/gopls/internal/lsp/cache/filemap_test.go +++ b/gopls/internal/cache/filemap_test.go @@ -10,8 +10,8 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" ) func TestFileMap(t *testing.T) { @@ -67,24 +67,24 @@ func TestFileMap(t *testing.T) { t.Run(test.label, func(t *testing.T) { m := newFileMap() for _, op := range test.ops { - uri := span.URIFromPath(filepath.FromSlash(op.path)) + uri := protocol.URIFromPath(filepath.FromSlash(op.path)) switch op.op { case set: - var fh source.FileHandle + var fh file.Handle if op.overlay { - fh = &Overlay{uri: uri} + fh = &overlay{uri: uri} } else { - fh = &DiskFile{uri: uri} + fh = &diskFile{uri: uri} } - m.Set(uri, fh) + m.set(uri, fh) case del: - m.Delete(uri) + m.delete(uri) } } var gotFiles []string - m.Range(func(uri span.URI, _ source.FileHandle) { - gotFiles = append(gotFiles, normalize(uri.Filename())) + m.foreach(func(uri protocol.DocumentURI, _ file.Handle) { + gotFiles = append(gotFiles, normalize(uri.Path())) }) sort.Strings(gotFiles) if diff := cmp.Diff(test.wantFiles, gotFiles); diff != "" { @@ -92,15 +92,15 @@ func TestFileMap(t *testing.T) { } var gotOverlays []string - for _, o := range m.Overlays() { - gotOverlays = append(gotOverlays, normalize(o.URI().Filename())) + for _, o := range m.getOverlays() { + gotOverlays = append(gotOverlays, normalize(o.URI().Path())) } if diff := cmp.Diff(test.wantOverlays, gotOverlays); diff != "" { t.Errorf("Overlays mismatch (-want +got):\n%s", diff) } var gotDirs []string - m.Dirs().Range(func(dir string) { + m.getDirs().Range(func(dir string) { gotDirs = append(gotDirs, normalize(dir)) }) sort.Strings(gotDirs) diff --git a/gopls/internal/cache/filterer.go b/gopls/internal/cache/filterer.go new file mode 100644 index 00000000000..0ec18369bdf --- /dev/null +++ b/gopls/internal/cache/filterer.go @@ -0,0 +1,83 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "path" + "path/filepath" + "regexp" + "strings" +) + +type Filterer struct { + // Whether a filter is excluded depends on the operator (first char of the raw filter). + // Slices filters and excluded then should have the same length. + filters []*regexp.Regexp + excluded []bool +} + +// NewFilterer computes regular expression form of all raw filters +func NewFilterer(rawFilters []string) *Filterer { + var f Filterer + for _, filter := range rawFilters { + filter = path.Clean(filepath.ToSlash(filter)) + // TODO(dungtuanle): fix: validate [+-] prefix. + op, prefix := filter[0], filter[1:] + // convertFilterToRegexp adds "/" at the end of prefix to handle cases where a filter is a prefix of another filter. + // For example, it prevents [+foobar, -foo] from excluding "foobar". + f.filters = append(f.filters, convertFilterToRegexp(filepath.ToSlash(prefix))) + f.excluded = append(f.excluded, op == '-') + } + + return &f +} + +// Disallow return true if the path is excluded from the filterer's filters. +func (f *Filterer) Disallow(path string) bool { + // Ensure trailing but not leading slash. + path = strings.TrimPrefix(path, "/") + if !strings.HasSuffix(path, "/") { + path += "/" + } + + // TODO(adonovan): opt: iterate in reverse and break at first match. + excluded := false + for i, filter := range f.filters { + if filter.MatchString(path) { + excluded = f.excluded[i] // last match wins + } + } + return excluded +} + +// convertFilterToRegexp replaces glob-like operator substrings in a string file path to their equivalent regex forms. +// Supporting glob-like operators: +// - **: match zero or more complete path segments +func convertFilterToRegexp(filter string) *regexp.Regexp { + if filter == "" { + return regexp.MustCompile(".*") + } + var ret strings.Builder + ret.WriteString("^") + segs := strings.Split(filter, "/") + for _, seg := range segs { + // Inv: seg != "" since path is clean. + if seg == "**" { + ret.WriteString(".*") + } else { + ret.WriteString(regexp.QuoteMeta(seg)) + } + ret.WriteString("/") + } + pattern := ret.String() + + // Remove unnecessary "^.*" prefix, which increased + // BenchmarkWorkspaceSymbols time by ~20% (even though + // filter CPU time increased by only by ~2.5%) when the + // default filter was changed to "**/node_modules". + pattern = strings.TrimPrefix(pattern, "^.*") + + return regexp.MustCompile(pattern) +} diff --git a/gopls/internal/lsp/cache/fs_memoized.go b/gopls/internal/cache/fs_memoized.go similarity index 75% rename from gopls/internal/lsp/cache/fs_memoized.go rename to gopls/internal/cache/fs_memoized.go index bfc71205765..dd8293fad75 100644 --- a/gopls/internal/lsp/cache/fs_memoized.go +++ b/gopls/internal/cache/fs_memoized.go @@ -10,8 +10,8 @@ import ( "sync" "time" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" "golang.org/x/tools/internal/robustio" @@ -24,38 +24,42 @@ type memoizedFS struct { // filesByID maps existing file inodes to the result of a read. // (The read may have failed, e.g. due to EACCES or a delete between stat+read.) // Each slice is a non-empty list of aliases: different URIs. - filesByID map[robustio.FileID][]*DiskFile + filesByID map[robustio.FileID][]*diskFile } -// A DiskFile is a file on the filesystem, or a failure to read one. -// It implements the source.FileHandle interface. -type DiskFile struct { - uri span.URI +func newMemoizedFS() *memoizedFS { + return &memoizedFS{filesByID: make(map[robustio.FileID][]*diskFile)} +} + +// A diskFile is a file in the filesystem, or a failure to read one. +// It implements the file.Source interface. +type diskFile struct { + uri protocol.DocumentURI modTime time.Time content []byte - hash source.Hash + hash file.Hash err error } -func (h *DiskFile) URI() span.URI { return h.uri } +func (h *diskFile) URI() protocol.DocumentURI { return h.uri } -func (h *DiskFile) FileIdentity() source.FileIdentity { - return source.FileIdentity{ +func (h *diskFile) Identity() file.Identity { + return file.Identity{ URI: h.uri, Hash: h.hash, } } -func (h *DiskFile) SameContentsOnDisk() bool { return true } -func (h *DiskFile) Version() int32 { return 0 } -func (h *DiskFile) Content() ([]byte, error) { return h.content, h.err } +func (h *diskFile) SameContentsOnDisk() bool { return true } +func (h *diskFile) Version() int32 { return 0 } +func (h *diskFile) Content() ([]byte, error) { return h.content, h.err } // ReadFile stats and (maybe) reads the file, updates the cache, and returns it. -func (fs *memoizedFS) ReadFile(ctx context.Context, uri span.URI) (source.FileHandle, error) { - id, mtime, err := robustio.GetFileID(uri.Filename()) +func (fs *memoizedFS) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { + id, mtime, err := robustio.GetFileID(uri.Path()) if err != nil { // file does not exist - return &DiskFile{ + return &diskFile{ err: err, uri: uri, }, nil @@ -75,7 +79,7 @@ func (fs *memoizedFS) ReadFile(ctx context.Context, uri span.URI) (source.FileHa fs.mu.Lock() fhs, ok := fs.filesByID[id] if ok && fhs[0].modTime.Equal(mtime) { - var fh *DiskFile + var fh *diskFile // We have already seen this file and it has not changed. for _, h := range fhs { if h.uri == uri { @@ -104,7 +108,7 @@ func (fs *memoizedFS) ReadFile(ctx context.Context, uri span.URI) (source.FileHa fs.mu.Lock() if !recentlyModified { - fs.filesByID[id] = []*DiskFile{fh} + fs.filesByID[id] = []*diskFile{fh} } else { delete(fs.filesByID, id) } @@ -137,7 +141,7 @@ func (fs *memoizedFS) fileStats() (files, largest, errs int) { // ioLimit limits the number of parallel file reads per process. var ioLimit = make(chan struct{}, 128) -func readFile(ctx context.Context, uri span.URI, mtime time.Time) (*DiskFile, error) { +func readFile(ctx context.Context, uri protocol.DocumentURI, mtime time.Time) (*diskFile, error) { select { case ioLimit <- struct{}{}: case <-ctx.Done(): @@ -145,7 +149,7 @@ func readFile(ctx context.Context, uri span.URI, mtime time.Time) (*DiskFile, er } defer func() { <-ioLimit }() - ctx, done := event.Start(ctx, "cache.readFile", tag.File.Of(uri.Filename())) + ctx, done := event.Start(ctx, "cache.readFile", tag.File.Of(uri.Path())) _ = ctx defer done() @@ -153,15 +157,15 @@ func readFile(ctx context.Context, uri span.URI, mtime time.Time) (*DiskFile, er // ID, or whose mtime differs from the given mtime. However, in these cases // we expect the client to notify of a subsequent file change, and the file // content should be eventually consistent. - content, err := os.ReadFile(uri.Filename()) // ~20us + content, err := os.ReadFile(uri.Path()) // ~20us if err != nil { content = nil // just in case } - return &DiskFile{ + return &diskFile{ modTime: mtime, uri: uri, content: content, - hash: source.HashOf(content), + hash: file.HashOf(content), err: err, }, nil } diff --git a/gopls/internal/cache/fs_overlay.go b/gopls/internal/cache/fs_overlay.go new file mode 100644 index 00000000000..265598bb967 --- /dev/null +++ b/gopls/internal/cache/fs_overlay.go @@ -0,0 +1,79 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "sync" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" +) + +// An overlayFS is a file.Source that keeps track of overlays on top of a +// delegate FileSource. +type overlayFS struct { + delegate file.Source + + mu sync.Mutex + overlays map[protocol.DocumentURI]*overlay +} + +func newOverlayFS(delegate file.Source) *overlayFS { + return &overlayFS{ + delegate: delegate, + overlays: make(map[protocol.DocumentURI]*overlay), + } +} + +// Overlays returns a new unordered array of overlays. +func (fs *overlayFS) Overlays() []*overlay { + fs.mu.Lock() + defer fs.mu.Unlock() + overlays := make([]*overlay, 0, len(fs.overlays)) + for _, overlay := range fs.overlays { + overlays = append(overlays, overlay) + } + return overlays +} + +func (fs *overlayFS) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { + fs.mu.Lock() + overlay, ok := fs.overlays[uri] + fs.mu.Unlock() + if ok { + return overlay, nil + } + return fs.delegate.ReadFile(ctx, uri) +} + +// An overlay is a file open in the editor. It may have unsaved edits. +// It implements the file.Handle interface, and the implicit contract +// of the debug.FileTmpl template. +type overlay struct { + uri protocol.DocumentURI + content []byte + hash file.Hash + version int32 + kind file.Kind + + // saved is true if a file matches the state on disk, + // and therefore does not need to be part of the overlay sent to go/packages. + saved bool +} + +func (o *overlay) URI() protocol.DocumentURI { return o.uri } + +func (o *overlay) Identity() file.Identity { + return file.Identity{ + URI: o.uri, + Hash: o.hash, + } +} + +func (o *overlay) Content() ([]byte, error) { return o.content, nil } +func (o *overlay) Version() int32 { return o.version } +func (o *overlay) SameContentsOnDisk() bool { return o.saved } +func (o *overlay) Kind() file.Kind { return o.kind } diff --git a/gopls/internal/cache/imports.go b/gopls/internal/cache/imports.go new file mode 100644 index 00000000000..7964427e528 --- /dev/null +++ b/gopls/internal/cache/imports.go @@ -0,0 +1,229 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "fmt" + "sync" + "time" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/keys" + "golang.org/x/tools/internal/event/tag" + "golang.org/x/tools/internal/imports" +) + +// refreshTimer implements delayed asynchronous refreshing of state. +// +// See the [refreshTimer.schedule] documentation for more details. +type refreshTimer struct { + mu sync.Mutex + duration time.Duration + timer *time.Timer + refreshFn func() +} + +// newRefreshTimer constructs a new refresh timer which schedules refreshes +// using the given function. +func newRefreshTimer(refresh func()) *refreshTimer { + return &refreshTimer{ + refreshFn: refresh, + } +} + +// schedule schedules the refresh function to run at some point in the future, +// if no existing refresh is already scheduled. +// +// At a minimum, scheduled refreshes are delayed by 30s, but they may be +// delayed longer to keep their expected execution time under 2% of wall clock +// time. +func (t *refreshTimer) schedule() { + t.mu.Lock() + defer t.mu.Unlock() + + if t.timer == nil { + // Don't refresh more than twice per minute. + delay := 30 * time.Second + // Don't spend more than ~2% of the time refreshing. + if adaptive := 50 * t.duration; adaptive > delay { + delay = adaptive + } + t.timer = time.AfterFunc(delay, func() { + start := time.Now() + t.refreshFn() + t.mu.Lock() + t.duration = time.Since(start) + t.timer = nil + t.mu.Unlock() + }) + } +} + +// A sharedModCache tracks goimports state for GOMODCACHE directories +// (each session may have its own GOMODCACHE). +// +// This state is refreshed independently of view-specific imports state. +type sharedModCache struct { + mu sync.Mutex + caches map[string]*imports.DirInfoCache // GOMODCACHE -> cache content; never invalidated + timers map[string]*refreshTimer // GOMODCACHE -> timer +} + +func (c *sharedModCache) dirCache(dir string) *imports.DirInfoCache { + c.mu.Lock() + defer c.mu.Unlock() + + cache, ok := c.caches[dir] + if !ok { + cache = imports.NewDirInfoCache() + c.caches[dir] = cache + } + return cache +} + +// refreshDir schedules a refresh of the given directory, which must be a +// module cache. +func (c *sharedModCache) refreshDir(ctx context.Context, dir string, logf func(string, ...any)) { + cache := c.dirCache(dir) + + c.mu.Lock() + defer c.mu.Unlock() + timer, ok := c.timers[dir] + if !ok { + timer = newRefreshTimer(func() { + _, done := event.Start(ctx, "cache.sharedModCache.refreshDir", tag.Directory.Of(dir)) + defer done() + imports.ScanModuleCache(dir, cache, logf) + }) + c.timers[dir] = timer + } + + timer.schedule() +} + +// importsState tracks view-specific imports state. +type importsState struct { + ctx context.Context + modCache *sharedModCache + refreshTimer *refreshTimer + + mu sync.Mutex + processEnv *imports.ProcessEnv + cachedModFileHash file.Hash +} + +// newImportsState constructs a new imports state for running goimports +// functions via [runProcessEnvFunc]. +// +// The returned state will automatically refresh itself following a call to +// runProcessEnvFunc. +func newImportsState(backgroundCtx context.Context, modCache *sharedModCache, env *imports.ProcessEnv) *importsState { + s := &importsState{ + ctx: backgroundCtx, + modCache: modCache, + processEnv: env, + } + s.refreshTimer = newRefreshTimer(s.refreshProcessEnv) + return s +} + +// runProcessEnvFunc runs goimports. +// +// Any call to runProcessEnvFunc will schedule a refresh of the imports state +// at some point in the future, if such a refresh is not already scheduled. See +// [refreshTimer] for more details. +func (s *importsState) runProcessEnvFunc(ctx context.Context, snapshot *Snapshot, fn func(context.Context, *imports.Options) error) error { + ctx, done := event.Start(ctx, "cache.importsState.runProcessEnvFunc") + defer done() + + s.mu.Lock() + defer s.mu.Unlock() + + // Find the hash of active mod files, if any. Using the unsaved content + // is slightly wasteful, since we'll drop caches a little too often, but + // the mod file shouldn't be changing while people are autocompleting. + // + // TODO(rfindley): consider instead hashing on-disk modfiles here. + var modFileHash file.Hash + for m := range snapshot.view.workspaceModFiles { + fh, err := snapshot.ReadFile(ctx, m) + if err != nil { + return err + } + modFileHash.XORWith(fh.Identity().Hash) + } + + // If anything relevant to imports has changed, clear caches and + // update the processEnv. Clearing caches blocks on any background + // scans. + if modFileHash != s.cachedModFileHash { + s.processEnv.ClearModuleInfo() + s.cachedModFileHash = modFileHash + } + + // Run the user function. + opts := &imports.Options{ + // Defaults. + AllErrors: true, + Comments: true, + Fragment: true, + FormatOnly: false, + TabIndent: true, + TabWidth: 8, + Env: s.processEnv, + LocalPrefix: snapshot.Options().Local, + } + + if err := fn(ctx, opts); err != nil { + return err + } + + // Refresh the imports resolver after usage. This may seem counterintuitive, + // since it means the first ProcessEnvFunc after a long period of inactivity + // may be stale, but in practice we run ProcessEnvFuncs frequently during + // active development (e.g. during completion), and so this mechanism will be + // active while gopls is in use, and inactive when gopls is idle. + s.refreshTimer.schedule() + + // TODO(rfindley): the GOMODCACHE value used here isn't directly tied to the + // ProcessEnv.Env["GOMODCACHE"], though they should theoretically always + // agree. It would be better if we guaranteed this, possibly by setting all + // required environment variables in ProcessEnv.Env, to avoid the redundant + // Go command invocation. + gomodcache := snapshot.view.folder.Env.GOMODCACHE + s.modCache.refreshDir(s.ctx, gomodcache, s.processEnv.Logf) + + return nil +} + +func (s *importsState) refreshProcessEnv() { + ctx, done := event.Start(s.ctx, "cache.importsState.refreshProcessEnv") + defer done() + + start := time.Now() + + s.mu.Lock() + resolver, err := s.processEnv.GetResolver() + s.mu.Unlock() + if err != nil { + return + } + + event.Log(s.ctx, "background imports cache refresh starting") + + // Prime the new resolver before updating the processEnv, so that gopls + // doesn't wait on an unprimed cache. + if err := imports.PrimeCache(context.Background(), resolver); err == nil { + event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start))) + } else { + event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start)), keys.Err.Of(err)) + } + + s.mu.Lock() + s.processEnv.UpdateResolver(resolver) + s.mu.Unlock() +} diff --git a/gopls/internal/lsp/cache/keys.go b/gopls/internal/cache/keys.go similarity index 98% rename from gopls/internal/lsp/cache/keys.go rename to gopls/internal/cache/keys.go index 449daba3a9e..664e539edbc 100644 --- a/gopls/internal/lsp/cache/keys.go +++ b/gopls/internal/cache/keys.go @@ -4,6 +4,8 @@ package cache +// session event tracing + import ( "io" diff --git a/gopls/internal/cache/load.go b/gopls/internal/cache/load.go new file mode 100644 index 00000000000..bcc551099d0 --- /dev/null +++ b/gopls/internal/cache/load.go @@ -0,0 +1,790 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "context" + "errors" + "fmt" + "path/filepath" + "sort" + "strings" + "sync/atomic" + "time" + + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/immutable" + "golang.org/x/tools/gopls/internal/util/pathutil" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/packagesinternal" + "golang.org/x/tools/internal/xcontext" +) + +var loadID uint64 // atomic identifier for loads + +// errNoPackages indicates that a load query matched no packages. +var errNoPackages = errors.New("no packages returned") + +// load calls packages.Load for the given scopes, updating package metadata, +// import graph, and mapped files with the result. +// +// The resulting error may wrap the moduleErrorMap error type, representing +// errors associated with specific modules. +// +// If scopes contains a file scope there must be exactly one scope. +func (s *Snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadScope) (err error) { + id := atomic.AddUint64(&loadID, 1) + eventName := fmt.Sprintf("go/packages.Load #%d", id) // unique name for logging + + var query []string + var containsDir bool // for logging + var standalone bool // whether this is a load of a standalone file + + // Keep track of module query -> module path so that we can later correlate query + // errors with errors. + moduleQueries := make(map[string]string) + for _, scope := range scopes { + switch scope := scope.(type) { + case packageLoadScope: + // The only time we pass package paths is when we're doing a + // partial workspace load. In those cases, the paths came back from + // go list and should already be GOPATH-vendorized when appropriate. + query = append(query, string(scope)) + + case fileLoadScope: + // Given multiple scopes, the resulting load might contain inaccurate + // information. For example go/packages returns at most one command-line + // arguments package, and does not handle a combination of standalone + // files and packages. + uri := protocol.DocumentURI(scope) + if len(scopes) > 1 { + panic(fmt.Sprintf("internal error: load called with multiple scopes when a file scope is present (file: %s)", uri)) + } + fh := s.FindFile(uri) + if fh == nil || s.FileKind(fh) != file.Go { + // Don't try to load a file that doesn't exist, or isn't a go file. + continue + } + contents, err := fh.Content() + if err != nil { + continue + } + if isStandaloneFile(contents, s.Options().StandaloneTags) { + standalone = true + query = append(query, uri.Path()) + } else { + query = append(query, fmt.Sprintf("file=%s", uri.Path())) + } + + case moduleLoadScope: + modQuery := fmt.Sprintf("%s%c...", scope.dir, filepath.Separator) + query = append(query, modQuery) + moduleQueries[modQuery] = scope.modulePath + + case viewLoadScope: + // If we are outside of GOPATH, a module, or some other known + // build system, don't load subdirectories. + if s.view.typ == AdHocView { + query = append(query, "./") + } else { + query = append(query, "./...") + } + + default: + panic(fmt.Sprintf("unknown scope type %T", scope)) + } + switch scope.(type) { + case viewLoadScope, moduleLoadScope: + containsDir = true + } + } + if len(query) == 0 { + return nil + } + sort.Strings(query) // for determinism + + ctx, done := event.Start(ctx, "cache.snapshot.load", tag.Query.Of(query)) + defer done() + + flags := LoadWorkspace + if allowNetwork { + flags |= AllowNetwork + } + _, inv, cleanup, err := s.goCommandInvocation(ctx, flags, &gocommand.Invocation{ + WorkingDir: s.view.root.Path(), + }) + if err != nil { + return err + } + + // Set a last resort deadline on packages.Load since it calls the go + // command, which may hang indefinitely if it has a bug. golang/go#42132 + // and golang/go#42255 have more context. + ctx, cancel := context.WithTimeout(ctx, 10*time.Minute) + defer cancel() + + cfg := s.config(ctx, inv) + pkgs, err := packages.Load(cfg, query...) + cleanup() + + // If the context was canceled, return early. Otherwise, we might be + // type-checking an incomplete result. Check the context directly, + // because go/packages adds extra information to the error. + if ctx.Err() != nil { + return ctx.Err() + } + + // This log message is sought for by TestReloadOnlyOnce. + labels := append(s.Labels(), tag.Query.Of(query), tag.PackageCount.Of(len(pkgs))) + if err != nil { + event.Error(ctx, eventName, err, labels...) + } else { + event.Log(ctx, eventName, labels...) + } + + if standalone { + // Handle standalone package result. + // + // In general, this should just be a single "command-line-arguments" + // package containing the requested file. However, if the file is a test + // file, go/packages may return test variants of the command-line-arguments + // package. We don't support this; theoretically we could, but it seems + // unnecessarily complicated. + // + // Prior to golang/go#64233 we just assumed that we'd get exactly one + // package here. The categorization of bug reports below may be a bit + // verbose, but anticipates that perhaps we don't fully understand + // possible failure modes. + errorf := bug.Errorf + if s.view.typ == GoPackagesDriverView { + errorf = fmt.Errorf // all bets are off + } + + var standalonePkg *packages.Package + for _, pkg := range pkgs { + if pkg.ID == "command-line-arguments" { + if standalonePkg != nil { + return errorf("internal error: go/packages returned multiple standalone packages") + } + standalonePkg = pkg + } else if packagesinternal.GetForTest(pkg) == "" && !strings.HasSuffix(pkg.ID, ".test") { + return errorf("internal error: go/packages returned unexpected package %q for standalone file", pkg.ID) + } + } + if standalonePkg == nil { + return errorf("internal error: go/packages failed to return non-test standalone package") + } + if len(standalonePkg.CompiledGoFiles) > 0 { + pkgs = []*packages.Package{standalonePkg} + } else { + pkgs = nil + } + } + + if len(pkgs) == 0 { + if err == nil { + err = errNoPackages + } + return fmt.Errorf("packages.Load error: %w", err) + } + + moduleErrs := make(map[string][]packages.Error) // module path -> errors + filterFunc := s.view.filterFunc() + newMetadata := make(map[PackageID]*metadata.Package) + for _, pkg := range pkgs { + // The Go command returns synthetic list results for module queries that + // encountered module errors. + // + // For example, given a module path a.mod, we'll query for "a.mod/..." and + // the go command will return a package named "a.mod/..." holding this + // error. Save it for later interpretation. + // + // See golang/go#50862 for more details. + if mod := moduleQueries[pkg.PkgPath]; mod != "" { // a synthetic result for the unloadable module + if len(pkg.Errors) > 0 { + moduleErrs[mod] = pkg.Errors + } + continue + } + + if !containsDir || s.Options().VerboseOutput { + event.Log(ctx, eventName, append( + s.Labels(), + tag.Package.Of(pkg.ID), + tag.Files.Of(pkg.CompiledGoFiles))...) + } + + // Ignore packages with no sources, since we will never be able to + // correctly invalidate that metadata. + if len(pkg.GoFiles) == 0 && len(pkg.CompiledGoFiles) == 0 { + continue + } + // Special case for the builtin package, as it has no dependencies. + if pkg.PkgPath == "builtin" { + if len(pkg.GoFiles) != 1 { + return fmt.Errorf("only expected 1 file for builtin, got %v", len(pkg.GoFiles)) + } + s.setBuiltin(pkg.GoFiles[0]) + continue + } + // Skip test main packages. + if isTestMain(pkg, s.view.folder.Env.GOCACHE) { + continue + } + // Skip filtered packages. They may be added anyway if they're + // dependencies of non-filtered packages. + // + // TODO(rfindley): why exclude metadata arbitrarily here? It should be safe + // to capture all metadata. + // TODO(rfindley): what about compiled go files? + if allFilesExcluded(pkg.GoFiles, filterFunc) { + continue + } + buildMetadata(newMetadata, pkg, cfg.Dir, standalone) + } + + s.mu.Lock() + + // Assert the invariant s.packages.Get(id).m == s.meta.metadata[id]. + s.packages.Range(func(id PackageID, ph *packageHandle) { + if s.meta.Packages[id] != ph.mp { + panic("inconsistent metadata") + } + }) + + // Compute the minimal metadata updates (for Clone) + // required to preserve the above invariant. + var files []protocol.DocumentURI // files to preload + seenFiles := make(map[protocol.DocumentURI]bool) + updates := make(map[PackageID]*metadata.Package) + for _, mp := range newMetadata { + if existing := s.meta.Packages[mp.ID]; existing == nil { + // Record any new files we should pre-load. + for _, uri := range mp.CompiledGoFiles { + if !seenFiles[uri] { + seenFiles[uri] = true + files = append(files, uri) + } + } + updates[mp.ID] = mp + s.shouldLoad.Delete(mp.ID) + } + } + + event.Log(ctx, fmt.Sprintf("%s: updating metadata for %d packages", eventName, len(updates))) + + meta := s.meta.Update(updates) + workspacePackages := computeWorkspacePackagesLocked(ctx, s, meta) + s.meta = meta + s.workspacePackages = workspacePackages + s.resetActivePackagesLocked() + + s.mu.Unlock() + + // Opt: preLoad files in parallel. + // + // Requesting files in batch optimizes the underlying filesystem reads. + // However, this is also currently necessary for correctness: populating all + // files in the snapshot is necessary for certain operations that rely on the + // completeness of the file map, e.g. computing the set of directories to + // watch. + // + // TODO(rfindley, golang/go#57558): determine the set of directories based on + // loaded packages, so that reading files here is not necessary for + // correctness. + s.preloadFiles(ctx, files) + + if len(moduleErrs) > 0 { + return &moduleErrorMap{moduleErrs} + } + + return nil +} + +type moduleErrorMap struct { + errs map[string][]packages.Error // module path -> errors +} + +func (m *moduleErrorMap) Error() string { + var paths []string // sort for stability + for path, errs := range m.errs { + if len(errs) > 0 { // should always be true, but be cautious + paths = append(paths, path) + } + } + sort.Strings(paths) + + var buf bytes.Buffer + fmt.Fprintf(&buf, "%d modules have errors:\n", len(paths)) + for _, path := range paths { + fmt.Fprintf(&buf, "\t%s:%s\n", path, m.errs[path][0].Msg) + } + + return buf.String() +} + +// buildMetadata populates the updates map with metadata updates to +// apply, based on the given pkg. It recurs through pkg.Imports to ensure that +// metadata exists for all dependencies. +// +// Returns the metadata.Package that was built (or which was already present in +// updates), or nil if the package could not be built. Notably, the resulting +// metadata.Package may have an ID that differs from pkg.ID. +func buildMetadata(updates map[PackageID]*metadata.Package, pkg *packages.Package, loadDir string, standalone bool) *metadata.Package { + // Allow for multiple ad-hoc packages in the workspace (see #47584). + pkgPath := PackagePath(pkg.PkgPath) + id := PackageID(pkg.ID) + + if metadata.IsCommandLineArguments(id) { + var f string // file to use as disambiguating suffix + if len(pkg.CompiledGoFiles) > 0 { + f = pkg.CompiledGoFiles[0] + + // If there are multiple files, + // we can't use only the first. + // (Can this happen? #64557) + if len(pkg.CompiledGoFiles) > 1 { + bug.Reportf("unexpected files in command-line-arguments package: %v", pkg.CompiledGoFiles) + return nil + } + } else if len(pkg.IgnoredFiles) > 0 { + // A file=empty.go query results in IgnoredFiles=[empty.go]. + f = pkg.IgnoredFiles[0] + } else { + bug.Reportf("command-line-arguments package has neither CompiledGoFiles nor IgnoredFiles: %#v", "") //*pkg.Metadata) + return nil + } + id = PackageID(pkg.ID + f) + pkgPath = PackagePath(pkg.PkgPath + f) + } + + // Duplicate? + if existing, ok := updates[id]; ok { + // A package was encountered twice due to shared + // subgraphs (common) or cycles (rare). Although "go + // list" usually breaks cycles, we don't rely on it. + // breakImportCycles in metadataGraph.Clone takes care + // of it later. + return existing + } + + if pkg.TypesSizes == nil { + panic(id + ".TypeSizes is nil") + } + + // Recreate the metadata rather than reusing it to avoid locking. + mp := &metadata.Package{ + ID: id, + PkgPath: pkgPath, + Name: PackageName(pkg.Name), + ForTest: PackagePath(packagesinternal.GetForTest(pkg)), + TypesSizes: pkg.TypesSizes, + LoadDir: loadDir, + Module: pkg.Module, + Errors: pkg.Errors, + DepsErrors: packagesinternal.GetDepsErrors(pkg), + Standalone: standalone, + } + + updates[id] = mp + + for _, filename := range pkg.CompiledGoFiles { + uri := protocol.URIFromPath(filename) + mp.CompiledGoFiles = append(mp.CompiledGoFiles, uri) + } + for _, filename := range pkg.GoFiles { + uri := protocol.URIFromPath(filename) + mp.GoFiles = append(mp.GoFiles, uri) + } + for _, filename := range pkg.IgnoredFiles { + uri := protocol.URIFromPath(filename) + mp.IgnoredFiles = append(mp.IgnoredFiles, uri) + } + + depsByImpPath := make(map[ImportPath]PackageID) + depsByPkgPath := make(map[PackagePath]PackageID) + for importPath, imported := range pkg.Imports { + importPath := ImportPath(importPath) + + // It is not an invariant that importPath == imported.PkgPath. + // For example, package "net" imports "golang.org/x/net/dns/dnsmessage" + // which refers to the package whose ID and PkgPath are both + // "vendor/golang.org/x/net/dns/dnsmessage". Notice the ImportMap, + // which maps ImportPaths to PackagePaths: + // + // $ go list -json net vendor/golang.org/x/net/dns/dnsmessage + // { + // "ImportPath": "net", + // "Name": "net", + // "Imports": [ + // "C", + // "vendor/golang.org/x/net/dns/dnsmessage", + // "vendor/golang.org/x/net/route", + // ... + // ], + // "ImportMap": { + // "golang.org/x/net/dns/dnsmessage": "vendor/golang.org/x/net/dns/dnsmessage", + // "golang.org/x/net/route": "vendor/golang.org/x/net/route" + // }, + // ... + // } + // { + // "ImportPath": "vendor/golang.org/x/net/dns/dnsmessage", + // "Name": "dnsmessage", + // ... + // } + // + // (Beware that, for historical reasons, go list uses + // the JSON field "ImportPath" for the package's + // path--effectively the linker symbol prefix.) + // + // The example above is slightly special to go list + // because it's in the std module. Otherwise, + // vendored modules are simply modules whose directory + // is vendor/ instead of GOMODCACHE, and the + // import path equals the package path. + // + // But in GOPATH (non-module) mode, it's possible for + // package vendoring to cause a non-identity ImportMap, + // as in this example: + // + // $ cd $HOME/src + // $ find . -type f + // ./b/b.go + // ./vendor/example.com/a/a.go + // $ cat ./b/b.go + // package b + // import _ "example.com/a" + // $ cat ./vendor/example.com/a/a.go + // package a + // $ GOPATH=$HOME GO111MODULE=off go list -json ./b | grep -A2 ImportMap + // "ImportMap": { + // "example.com/a": "vendor/example.com/a" + // }, + + // Don't remember any imports with significant errors. + // + // The len=0 condition is a heuristic check for imports of + // non-existent packages (for which go/packages will create + // an edge to a synthesized node). The heuristic is unsound + // because some valid packages have zero files, for example, + // a directory containing only the file p_test.go defines an + // empty package p. + // TODO(adonovan): clarify this. Perhaps go/packages should + // report which nodes were synthesized. + if importPath != "unsafe" && len(imported.CompiledGoFiles) == 0 { + depsByImpPath[importPath] = "" // missing + continue + } + + // Don't record self-import edges. + // (This simplifies metadataGraph's cycle check.) + if PackageID(imported.ID) == id { + if len(pkg.Errors) == 0 { + bug.Reportf("self-import without error in package %s", id) + } + continue + } + + dep := buildMetadata(updates, imported, loadDir, false) // only top level packages can be standalone + + // Don't record edges to packages with no name, as they cause trouble for + // the importer (golang/go#60952). + // + // Also don't record edges to packages whose ID was modified (i.e. + // command-line-arguments packages), as encountered in golang/go#66109. In + // this case, we could theoretically keep the edge through dep.ID, but + // since this import doesn't make any sense in the first place, we instead + // choose to consider it invalid. + // + // However, we do want to insert these packages into the update map + // (buildMetadata above), so that we get type-checking diagnostics for the + // invalid packages. + if dep == nil || dep.ID != PackageID(imported.ID) || imported.Name == "" { + depsByImpPath[importPath] = "" // missing + continue + } + + depsByImpPath[importPath] = PackageID(imported.ID) + depsByPkgPath[PackagePath(imported.PkgPath)] = PackageID(imported.ID) + } + mp.DepsByImpPath = depsByImpPath + mp.DepsByPkgPath = depsByPkgPath + return mp + + // m.Diagnostics is set later in the loading pass, using + // computeLoadDiagnostics. +} + +// computeLoadDiagnostics computes and sets m.Diagnostics for the given metadata m. +// +// It should only be called during package handle construction in buildPackageHandle. +func computeLoadDiagnostics(ctx context.Context, snapshot *Snapshot, mp *metadata.Package) []*Diagnostic { + var diags []*Diagnostic + for _, packagesErr := range mp.Errors { + // Filter out parse errors from go list. We'll get them when we + // actually parse, and buggy overlay support may generate spurious + // errors. (See TestNewModule_Issue38207.) + if strings.Contains(packagesErr.Msg, "expected '") { + continue + } + pkgDiags, err := goPackagesErrorDiagnostics(ctx, packagesErr, mp, snapshot) + if err != nil { + // There are certain cases where the go command returns invalid + // positions, so we cannot panic or even bug.Reportf here. + event.Error(ctx, "unable to compute positions for list errors", err, tag.Package.Of(string(mp.ID))) + continue + } + diags = append(diags, pkgDiags...) + } + + // TODO(rfindley): this is buggy: an insignificant change to a modfile + // (or an unsaved modfile) could affect the position of deps errors, + // without invalidating the package. + depsDiags, err := depsErrors(ctx, snapshot, mp) + if err != nil { + if ctx.Err() == nil { + // TODO(rfindley): consider making this a bug.Reportf. depsErrors should + // not normally fail. + event.Error(ctx, "unable to compute deps errors", err, tag.Package.Of(string(mp.ID))) + } + } else { + diags = append(diags, depsDiags...) + } + return diags +} + +// IsWorkspacePackage reports whether id points to a workspace package in s. +// +// Currently, the result depends on the current set of loaded packages, and so +// is not guaranteed to be stable. +func (s *Snapshot) IsWorkspacePackage(ctx context.Context, id PackageID) bool { + s.mu.Lock() + defer s.mu.Unlock() + + mg := s.meta + m := mg.Packages[id] + if m == nil { + return false + } + return isWorkspacePackageLocked(ctx, s, mg, m) +} + +// isWorkspacePackageLocked reports whether p is a workspace package for the +// snapshot s. +// +// Workspace packages are packages that we consider the user to be actively +// working on. As such, they are re-diagnosed on every keystroke, and searched +// for various workspace-wide queries such as references or workspace symbols. +// +// See the commentary inline for a description of the workspace package +// heuristics. +// +// s.mu must be held while calling this function. +// +// TODO(rfindley): remove 'meta' from this function signature. Whether or not a +// package is a workspace package should depend only on the package, view +// definition, and snapshot file source. While useful, the heuristic +// "allFilesHaveRealPackages" does not add that much value and is path +// dependent as it depends on the timing of loads. +func isWorkspacePackageLocked(ctx context.Context, s *Snapshot, meta *metadata.Graph, pkg *metadata.Package) bool { + if metadata.IsCommandLineArguments(pkg.ID) { + // Ad-hoc command-line-arguments packages aren't workspace packages. + // With zero-config gopls (golang/go#57979) they should be very rare, as + // they should only arise when the user opens a file outside the workspace + // which isn't present in the import graph of a workspace package. + // + // Considering them as workspace packages tends to be racy, as they don't + // deterministically belong to any view. + if !pkg.Standalone { + return false + } + + // If all the files contained in pkg have a real package, we don't need to + // keep pkg as a workspace package. + if allFilesHaveRealPackages(meta, pkg) { + return false + } + + // For now, allow open standalone packages (i.e. go:build ignore) to be + // workspace packages, but this means they could belong to multiple views. + return containsOpenFileLocked(s, pkg) + } + + // If a real package is open, consider it to be part of the workspace. + // + // TODO(rfindley): reconsider this. In golang/go#66145, we saw that even if a + // View sees a real package for a file, it doesn't mean that View is able to + // cleanly diagnose the package. Yet, we do want to show diagnostics for open + // packages outside the workspace. Is there a better way to ensure that only + // the 'best' View gets a workspace package for the open file? + if containsOpenFileLocked(s, pkg) { + return true + } + + // Apply filtering logic. + // + // Workspace packages must contain at least one non-filtered file. + filterFunc := s.view.filterFunc() + uris := make(map[protocol.DocumentURI]unit) // filtered package URIs + for _, uri := range slices.Concat(pkg.CompiledGoFiles, pkg.GoFiles) { + if !strings.Contains(string(uri), "/vendor/") && !filterFunc(uri) { + uris[uri] = struct{}{} + } + } + if len(uris) == 0 { + return false // no non-filtered files + } + + // For non-module views (of type GOPATH or AdHoc), or if + // expandWorkspaceToModule is unset, workspace packages must be contained in + // the workspace folder. + // + // For module views (of type GoMod or GoWork), packages must in any case be + // in a workspace module (enforced below). + if !s.view.moduleMode() || !s.Options().ExpandWorkspaceToModule { + folder := s.view.folder.Dir.Path() + inFolder := false + for uri := range uris { + if pathutil.InDir(folder, uri.Path()) { + inFolder = true + break + } + } + if !inFolder { + return false + } + } + + // In module mode, a workspace package must be contained in a workspace + // module. + if s.view.moduleMode() { + var modURI protocol.DocumentURI + if pkg.Module != nil { + modURI = protocol.URIFromPath(pkg.Module.GoMod) + } else { + // golang/go#65816: for std and cmd, Module is nil. + // Fall back to an inferior heuristic. + if len(pkg.CompiledGoFiles) == 0 { + return false // need at least one file to guess the go.mod file + } + dir := pkg.CompiledGoFiles[0].Dir() + var err error + modURI, err = findRootPattern(ctx, dir, "go.mod", lockedSnapshot{s}) + if err != nil || modURI == "" { + // err != nil implies context cancellation, in which case the result of + // this query does not matter. + return false + } + } + _, ok := s.view.workspaceModFiles[modURI] + return ok + } + + return true // an ad-hoc package or GOPATH package +} + +// containsOpenFileLocked reports whether any file referenced by m is open in +// the snapshot s. +// +// s.mu must be held while calling this function. +func containsOpenFileLocked(s *Snapshot, mp *metadata.Package) bool { + uris := map[protocol.DocumentURI]struct{}{} + for _, uri := range mp.CompiledGoFiles { + uris[uri] = struct{}{} + } + for _, uri := range mp.GoFiles { + uris[uri] = struct{}{} + } + + for uri := range uris { + fh, _ := s.files.get(uri) + if _, open := fh.(*overlay); open { + return true + } + } + return false +} + +// computeWorkspacePackagesLocked computes workspace packages in the +// snapshot s for the given metadata graph. The result does not +// contain intermediate test variants. +// +// s.mu must be held while calling this function. +func computeWorkspacePackagesLocked(ctx context.Context, s *Snapshot, meta *metadata.Graph) immutable.Map[PackageID, PackagePath] { + // The provided context is used for reading snapshot files, which can only + // fail due to context cancellation. Don't let this happen as it could lead + // to inconsistent results. + ctx = xcontext.Detach(ctx) + workspacePackages := make(map[PackageID]PackagePath) + for _, mp := range meta.Packages { + if !isWorkspacePackageLocked(ctx, s, meta, mp) { + continue + } + + switch { + case mp.ForTest == "": + // A normal package. + workspacePackages[mp.ID] = mp.PkgPath + case mp.ForTest == mp.PkgPath, mp.ForTest+"_test" == mp.PkgPath: + // The test variant of some workspace package or its x_test. + // To load it, we need to load the non-test variant with -test. + // + // Notably, this excludes intermediate test variants from workspace + // packages. + assert(!mp.IsIntermediateTestVariant(), "unexpected ITV") + workspacePackages[mp.ID] = mp.ForTest + } + } + return immutable.MapOf(workspacePackages) +} + +// allFilesHaveRealPackages reports whether all files referenced by m are +// contained in a "real" package (not command-line-arguments). +// +// If m is valid but all "real" packages containing any file are invalid, this +// function returns false. +// +// If m is not a command-line-arguments package, this is trivially true. +func allFilesHaveRealPackages(g *metadata.Graph, mp *metadata.Package) bool { + n := len(mp.CompiledGoFiles) +checkURIs: + for _, uri := range append(mp.CompiledGoFiles[0:n:n], mp.GoFiles...) { + for _, id := range g.IDs[uri] { + if !metadata.IsCommandLineArguments(id) { + continue checkURIs + } + } + return false + } + return true +} + +func isTestMain(pkg *packages.Package, gocache string) bool { + // Test mains must have an import path that ends with ".test". + if !strings.HasSuffix(pkg.PkgPath, ".test") { + return false + } + // Test main packages are always named "main". + if pkg.Name != "main" { + return false + } + // Test mains always have exactly one GoFile that is in the build cache. + if len(pkg.GoFiles) > 1 { + return false + } + if !pathutil.InDir(gocache, pkg.GoFiles[0]) { + return false + } + return true +} diff --git a/gopls/internal/cache/metadata/cycle_test.go b/gopls/internal/cache/metadata/cycle_test.go new file mode 100644 index 00000000000..09628d881e9 --- /dev/null +++ b/gopls/internal/cache/metadata/cycle_test.go @@ -0,0 +1,146 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package metadata + +import ( + "sort" + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/util/bug" +) + +func init() { + bug.PanicOnBugs = true +} + +// This is an internal test of the breakImportCycles logic. +func TestBreakImportCycles(t *testing.T) { + + // parse parses an import dependency graph. + // The input is a semicolon-separated list of node descriptions. + // Each node description is a package ID, optionally followed by + // "->" and a comma-separated list of successor IDs. + // Thus "a->b;b->c,d;e" represents the set of nodes {a,b,e} + // and the set of edges {a->b, b->c, b->d}. + parse := func(s string) map[PackageID]*Package { + m := make(map[PackageID]*Package) + makeNode := func(name string) *Package { + id := PackageID(name) + n, ok := m[id] + if !ok { + n = &Package{ + ID: id, + DepsByPkgPath: make(map[PackagePath]PackageID), + } + m[id] = n + } + return n + } + if s != "" { + for _, item := range strings.Split(s, ";") { + nodeID, succIDs, ok := strings.Cut(item, "->") + node := makeNode(nodeID) + if ok { + for _, succID := range strings.Split(succIDs, ",") { + node.DepsByPkgPath[PackagePath(succID)] = PackageID(succID) + } + } + } + } + return m + } + + // Sanity check of cycle detector. + { + got := cyclic(parse("a->b;b->c;c->a,d")) + has := func(s string) bool { return strings.Contains(got, s) } + if !(has("a->b") && has("b->c") && has("c->a") && !has("d")) { + t.Fatalf("cyclic: got %q, want a->b->c->a or equivalent", got) + } + } + + // format formats an import graph, in lexicographic order, + // in the notation of parse, but with a "!" after the name + // of each node that has errors. + format := func(graph map[PackageID]*Package) string { + var items []string + for _, mp := range graph { + item := string(mp.ID) + if len(mp.Errors) > 0 { + item += "!" + } + var succs []string + for _, depID := range mp.DepsByPkgPath { + succs = append(succs, string(depID)) + } + if succs != nil { + sort.Strings(succs) + item += "->" + strings.Join(succs, ",") + } + items = append(items, item) + } + sort.Strings(items) + return strings.Join(items, ";") + } + + // We needn't test self-cycles as they are eliminated at Metadata construction. + for _, test := range []struct { + metadata, updates, want string + }{ + // Simple 2-cycle. + {"a->b", "b->a", + "a->b;b!"}, // broke b->a + + {"a->b;b->c;c", "b->a,c", + "a->b;b!->c;c"}, // broke b->a + + // Reversing direction of p->s edge creates pqrs cycle. + {"a->p,q,r,s;p->q,s,z;q->r,z;r->s,z;s->z", "p->q,z;s->p,z", + "a->p,q,r,s;p!->z;q->r,z;r->s,z;s!->z"}, // broke p->q, s->p + + // We break all intra-SCC edges from updated nodes, + // which may be more than necessary (e.g. a->b). + {"a->b;b->c;c;d->a", "a->b,e;c->d", + "a!->e;b->c;c!;d->a"}, // broke a->b, c->d + } { + metadata := parse(test.metadata) + updates := parse(test.updates) + + if cycle := cyclic(metadata); cycle != "" { + t.Errorf("initial metadata %s has cycle %s: ", format(metadata), cycle) + continue + } + + t.Log("initial", format(metadata)) + + // Apply updates. + // (parse doesn't have a way to express node deletions, + // but they aren't very interesting.) + for id, mp := range updates { + metadata[id] = mp + } + + t.Log("updated", format(metadata)) + + // breakImportCycles accesses only these fields of Metadata: + // DepsByImpPath, ID - read + // DepsByPkgPath - read, updated + // Errors - updated + breakImportCycles(metadata, updates) + + t.Log("acyclic", format(metadata)) + + if cycle := cyclic(metadata); cycle != "" { + t.Errorf("resulting metadata %s has cycle %s: ", format(metadata), cycle) + } + + got := format(metadata) + if got != test.want { + t.Errorf("test.metadata=%s test.updates=%s: got=%s want=%s", + test.metadata, test.updates, got, test.want) + } + } +} diff --git a/gopls/internal/cache/metadata/graph.go b/gopls/internal/cache/metadata/graph.go new file mode 100644 index 00000000000..f09822d3575 --- /dev/null +++ b/gopls/internal/cache/metadata/graph.go @@ -0,0 +1,413 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package metadata + +import ( + "sort" + + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" +) + +// A Graph is an immutable and transitively closed graph of [Package] data. +type Graph struct { + // Packages maps package IDs to their associated Packages. + Packages map[PackageID]*Package + + // ImportedBy maps package IDs to the list of packages that import them. + ImportedBy map[PackageID][]PackageID + + // IDs maps file URIs to package IDs, sorted by (!valid, cli, packageID). + // A single file may belong to multiple packages due to tests packages. + // + // Invariant: all IDs present in the IDs map exist in the metadata map. + IDs map[protocol.DocumentURI][]PackageID +} + +// Update creates a new Graph containing the result of applying the given +// updates to the receiver, though the receiver is not itself mutated. As a +// special case, if updates is empty, Update just returns the receiver. +// +// A nil map value is used to indicate a deletion. +func (g *Graph) Update(updates map[PackageID]*Package) *Graph { + if len(updates) == 0 { + // Optimization: since the graph is immutable, we can return the receiver. + return g + } + + // Debugging golang/go#64227, golang/vscode-go#3126: + // Assert that the existing metadata graph is acyclic. + if cycle := cyclic(g.Packages); cycle != "" { + bug.Reportf("metadata is cyclic even before updates: %s", cycle) + } + // Assert that the updates contain no self-cycles. + for id, mp := range updates { + if mp != nil { + for _, depID := range mp.DepsByPkgPath { + if depID == id { + bug.Reportf("self-cycle in metadata update: %s", id) + } + } + } + } + + // Copy pkgs map then apply updates. + pkgs := make(map[PackageID]*Package, len(g.Packages)) + for id, mp := range g.Packages { + pkgs[id] = mp + } + for id, mp := range updates { + if mp == nil { + delete(pkgs, id) + } else { + pkgs[id] = mp + } + } + + // Break import cycles involving updated nodes. + breakImportCycles(pkgs, updates) + + return newGraph(pkgs) +} + +// newGraph returns a new metadataGraph, +// deriving relations from the specified metadata. +func newGraph(pkgs map[PackageID]*Package) *Graph { + // Build the import graph. + importedBy := make(map[PackageID][]PackageID) + for id, mp := range pkgs { + for _, depID := range mp.DepsByPkgPath { + importedBy[depID] = append(importedBy[depID], id) + } + } + + // Collect file associations. + uriIDs := make(map[protocol.DocumentURI][]PackageID) + for id, mp := range pkgs { + uris := map[protocol.DocumentURI]struct{}{} + for _, uri := range mp.CompiledGoFiles { + uris[uri] = struct{}{} + } + for _, uri := range mp.GoFiles { + uris[uri] = struct{}{} + } + for uri := range uris { + uriIDs[uri] = append(uriIDs[uri], id) + } + } + + // Sort and filter file associations. + for uri, ids := range uriIDs { + sort.Slice(ids, func(i, j int) bool { + cli := IsCommandLineArguments(ids[i]) + clj := IsCommandLineArguments(ids[j]) + if cli != clj { + return clj + } + + // 2. packages appear in name order. + return ids[i] < ids[j] + }) + + // Choose the best IDs for each URI, according to the following rules: + // - If there are any valid real packages, choose them. + // - Else, choose the first valid command-line-argument package, if it exists. + // + // TODO(rfindley): it might be better to track all IDs here, and exclude + // them later when type checking, but this is the existing behavior. + for i, id := range ids { + // If we've seen *anything* prior to command-line arguments package, take + // it. Note that ids[0] may itself be command-line-arguments. + if i > 0 && IsCommandLineArguments(id) { + uriIDs[uri] = ids[:i] + break + } + } + } + + return &Graph{ + Packages: pkgs, + ImportedBy: importedBy, + IDs: uriIDs, + } +} + +// ReverseReflexiveTransitiveClosure returns a new mapping containing the +// metadata for the specified packages along with any package that +// transitively imports one of them, keyed by ID, including all the initial packages. +func (g *Graph) ReverseReflexiveTransitiveClosure(ids ...PackageID) map[PackageID]*Package { + seen := make(map[PackageID]*Package) + var visitAll func([]PackageID) + visitAll = func(ids []PackageID) { + for _, id := range ids { + if seen[id] == nil { + if mp := g.Packages[id]; mp != nil { + seen[id] = mp + visitAll(g.ImportedBy[id]) + } + } + } + } + visitAll(ids) + return seen +} + +// breakImportCycles breaks import cycles in the metadata by deleting +// Deps* edges. It modifies only metadata present in the 'updates' +// subset. This function has an internal test. +func breakImportCycles(metadata, updates map[PackageID]*Package) { + // 'go list' should never report a cycle without flagging it + // as such, but we're extra cautious since we're combining + // information from multiple runs of 'go list'. Also, Bazel + // may silently report cycles. + cycles := detectImportCycles(metadata, updates) + if len(cycles) > 0 { + // There were cycles (uncommon). Break them. + // + // The naive way to break cycles would be to perform a + // depth-first traversal and to detect and delete + // cycle-forming edges as we encounter them. + // However, we're not allowed to modify the existing + // Metadata records, so we can only break edges out of + // the 'updates' subset. + // + // Another possibility would be to delete not the + // cycle forming edge but the topmost edge on the + // stack whose tail is an updated node. + // However, this would require that we retroactively + // undo all the effects of the traversals that + // occurred since that edge was pushed on the stack. + // + // We use a simpler scheme: we compute the set of cycles. + // All cyclic paths necessarily involve at least one + // updated node, so it is sufficient to break all + // edges from each updated node to other members of + // the strong component. + // + // This may result in the deletion of dominating + // edges, causing some dependencies to appear + // spuriously unreachable. Consider A <-> B -> C + // where updates={A,B}. The cycle is {A,B} so the + // algorithm will break both A->B and B->A, causing + // A to no longer depend on B or C. + // + // But that's ok: any error in Metadata.Errors is + // conservatively assumed by snapshot.clone to be a + // potential import cycle error, and causes special + // invalidation so that if B later drops its + // cycle-forming import of A, both A and B will be + // invalidated. + for _, cycle := range cycles { + cyclic := make(map[PackageID]bool) + for _, mp := range cycle { + cyclic[mp.ID] = true + } + for id := range cyclic { + if mp := updates[id]; mp != nil { + for path, depID := range mp.DepsByImpPath { + if cyclic[depID] { + delete(mp.DepsByImpPath, path) + } + } + for path, depID := range mp.DepsByPkgPath { + if cyclic[depID] { + delete(mp.DepsByPkgPath, path) + } + } + + // Set m.Errors to enable special + // invalidation logic in snapshot.clone. + if len(mp.Errors) == 0 { + mp.Errors = []packages.Error{{ + Msg: "detected import cycle", + Kind: packages.ListError, + }} + } + } + } + } + + // double-check when debugging + if false { + if cycles := detectImportCycles(metadata, updates); len(cycles) > 0 { + bug.Reportf("unbroken cycle: %v", cycles) + } + } + } +} + +// cyclic returns a description of a cycle, +// if the graph is cyclic, otherwise "". +func cyclic(graph map[PackageID]*Package) string { + const ( + unvisited = 0 + visited = 1 + onstack = 2 + ) + color := make(map[PackageID]int) + var visit func(id PackageID) string + visit = func(id PackageID) string { + switch color[id] { + case unvisited: + color[id] = onstack + case onstack: + return string(id) // cycle! + case visited: + return "" + } + if mp := graph[id]; mp != nil { + for _, depID := range mp.DepsByPkgPath { + if cycle := visit(depID); cycle != "" { + return string(id) + "->" + cycle + } + } + } + color[id] = visited + return "" + } + for id := range graph { + if cycle := visit(id); cycle != "" { + return cycle + } + } + return "" +} + +// detectImportCycles reports cycles in the metadata graph. It returns a new +// unordered array of all cycles (nontrivial strong components) in the +// metadata graph reachable from a non-nil 'updates' value. +func detectImportCycles(metadata, updates map[PackageID]*Package) [][]*Package { + // We use the depth-first algorithm of Tarjan. + // https://doi.org/10.1137/0201010 + // + // TODO(adonovan): when we can use generics, consider factoring + // in common with the other implementation of Tarjan (in typerefs), + // abstracting over the node and edge representation. + + // A node wraps a Metadata with its working state. + // (Unfortunately we can't intrude on shared Metadata.) + type node struct { + rep *node + mp *Package + index, lowlink int32 + scc int8 // TODO(adonovan): opt: cram these 1.5 bits into previous word + } + nodes := make(map[PackageID]*node, len(metadata)) + nodeOf := func(id PackageID) *node { + n, ok := nodes[id] + if !ok { + mp := metadata[id] + if mp == nil { + // Dangling import edge. + // Not sure whether a go/packages driver ever + // emits this, but create a dummy node in case. + // Obviously it won't be part of any cycle. + mp = &Package{ID: id} + } + n = &node{mp: mp} + n.rep = n + nodes[id] = n + } + return n + } + + // find returns the canonical node decl. + // (The nodes form a disjoint set forest.) + var find func(*node) *node + find = func(n *node) *node { + rep := n.rep + if rep != n { + rep = find(rep) + n.rep = rep // simple path compression (no union-by-rank) + } + return rep + } + + // global state + var ( + index int32 = 1 + stack []*node + sccs [][]*Package // set of nontrivial strongly connected components + ) + + // visit implements the depth-first search of Tarjan's SCC algorithm + // Precondition: x is canonical. + var visit func(*node) + visit = func(x *node) { + x.index = index + x.lowlink = index + index++ + + stack = append(stack, x) // push + x.scc = -1 + + for _, yid := range x.mp.DepsByPkgPath { + y := nodeOf(yid) + // Loop invariant: x is canonical. + y = find(y) + if x == y { + continue // nodes already combined (self-edges are impossible) + } + + switch { + case y.scc > 0: + // y is already a collapsed SCC + + case y.scc < 0: + // y is on the stack, and thus in the current SCC. + if y.index < x.lowlink { + x.lowlink = y.index + } + + default: + // y is unvisited; visit it now. + visit(y) + // Note: x and y are now non-canonical. + x = find(x) + if y.lowlink < x.lowlink { + x.lowlink = y.lowlink + } + } + } + + // Is x the root of an SCC? + if x.lowlink == x.index { + // Gather all metadata in the SCC (if nontrivial). + var scc []*Package + for { + // Pop y from stack. + i := len(stack) - 1 + y := stack[i] + stack = stack[:i] + if x != y || scc != nil { + scc = append(scc, y.mp) + } + if x == y { + break // complete + } + // x becomes y's canonical representative. + y.rep = x + } + if scc != nil { + sccs = append(sccs, scc) + } + x.scc = 1 + } + } + + // Visit only the updated nodes: + // the existing metadata graph has no cycles, + // so any new cycle must involve an updated node. + for id, mp := range updates { + if mp != nil { + if n := nodeOf(id); n.index == 0 { // unvisited + visit(n) + } + } + } + + return sccs +} diff --git a/gopls/internal/cache/metadata/metadata.go b/gopls/internal/cache/metadata/metadata.go new file mode 100644 index 00000000000..b6355166640 --- /dev/null +++ b/gopls/internal/cache/metadata/metadata.go @@ -0,0 +1,254 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The metadata package defines types and functions for working with package +// metadata, which describes Go packages and their relationships. +// +// Package metadata is loaded by gopls using go/packages, and the [Package] +// type is itself a projection and translation of data from +// go/packages.Package. +// +// Packages are assembled into an immutable [Graph] +package metadata + +import ( + "go/ast" + "go/types" + "sort" + "strconv" + "strings" + + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/packagesinternal" +) + +// Declare explicit types for package paths, names, and IDs to ensure that we +// never use an ID where a path belongs, and vice versa. If we confused these, +// it would result in confusing errors because package IDs often look like +// package paths. +type ( + PackageID string // go list's unique identifier for a package (e.g. "vendor/example.com/foo [vendor/example.com/bar.test]") + PackagePath string // name used to prefix linker symbols (e.g. "vendor/example.com/foo") + PackageName string // identifier in 'package' declaration (e.g. "foo") + ImportPath string // path that appears in an import declaration (e.g. "example.com/foo") +) + +// Package represents package metadata retrieved from go/packages. +// The DepsBy{Imp,Pkg}Path maps do not contain self-import edges. +// +// An ad-hoc package (without go.mod or GOPATH) has its ID, PkgPath, +// and LoadDir equal to the absolute path of its directory. +type Package struct { + ID PackageID + PkgPath PackagePath + Name PackageName + + // these three fields are as defined by go/packages.Package + GoFiles []protocol.DocumentURI + CompiledGoFiles []protocol.DocumentURI + IgnoredFiles []protocol.DocumentURI + + ForTest PackagePath // q in a "p [q.test]" package, else "" + TypesSizes types.Sizes + Errors []packages.Error // must be set for packages in import cycles + DepsByImpPath map[ImportPath]PackageID // may contain dups; empty ID => missing + DepsByPkgPath map[PackagePath]PackageID // values are unique and non-empty + Module *packages.Module + DepsErrors []*packagesinternal.PackageError + LoadDir string // directory from which go/packages was run + Standalone bool // package synthesized for a standalone file (e.g. ignore-tagged) +} + +func (mp *Package) String() string { return string(mp.ID) } + +// IsIntermediateTestVariant reports whether the given package is an +// intermediate test variant (ITV), e.g. "net/http [net/url.test]". +// +// An ITV has identical syntax to the regular variant, but different +// import metadata (DepsBy{Imp,Pkg}Path). +// +// Such test variants arise when an x_test package (in this case net/url_test) +// imports a package (in this case net/http) that itself imports the +// non-x_test package (in this case net/url). +// +// This is done so that the forward transitive closure of net/url_test has +// only one package for the "net/url" import. +// The ITV exists to hold the test variant import: +// +// net/url_test [net/url.test] +// +// | "net/http" -> net/http [net/url.test] +// | "net/url" -> net/url [net/url.test] +// | ... +// +// net/http [net/url.test] +// +// | "net/url" -> net/url [net/url.test] +// | ... +// +// This restriction propagates throughout the import graph of net/http: for +// every package imported by net/http that imports net/url, there must be an +// intermediate test variant that instead imports "net/url [net/url.test]". +// +// As one can see from the example of net/url and net/http, intermediate test +// variants can result in many additional packages that are essentially (but +// not quite) identical. For this reason, we filter these variants wherever +// possible. +// +// # Why we mostly ignore intermediate test variants +// +// In projects with complicated tests, there may be a very large +// number of ITVs--asymptotically more than the number of ordinary +// variants. Since they have identical syntax, it is fine in most +// cases to ignore them since the results of analyzing the ordinary +// variant suffice. However, this is not entirely sound. +// +// Consider this package: +// +// // p/p.go -- in all variants of p +// package p +// type T struct { io.Closer } +// +// // p/p_test.go -- in test variant of p +// package p +// func (T) Close() error { ... } +// +// The ordinary variant "p" defines T with a Close method promoted +// from io.Closer. But its test variant "p [p.test]" defines a type T +// with a Close method from p_test.go. +// +// Now consider a package q that imports p, perhaps indirectly. Within +// it, T.Close will resolve to the first Close method: +// +// // q/q.go -- in all variants of q +// package q +// import "p" +// var _ = new(p.T).Close +// +// Let's assume p also contains this file defining an external test (xtest): +// +// // p/p_x_test.go -- external test of p +// package p_test +// import ( "q"; "testing" ) +// func Test(t *testing.T) { ... } +// +// Note that q imports p, but p's xtest imports q. Now, in "q +// [p.test]", the intermediate test variant of q built for p's +// external test, T.Close resolves not to the io.Closer.Close +// interface method, but to the concrete method of T.Close +// declared in p_test.go. +// +// If we now request all references to the T.Close declaration in +// p_test.go, the result should include the reference from q's ITV. +// (It's not just methods that can be affected; fields can too, though +// it requires bizarre code to achieve.) +// +// As a matter of policy, gopls mostly ignores this subtlety, +// because to account for it would require that we type-check every +// intermediate test variant of p, of which there could be many. +// Good code doesn't rely on such trickery. +// +// Most callers of MetadataForFile call RemoveIntermediateTestVariants +// to discard them before requesting type checking, or the products of +// type-checking such as the cross-reference index or method set index. +// +// MetadataForFile doesn't do this filtering itself becaused in some +// cases we need to make a reverse dependency query on the metadata +// graph, and it's important to include the rdeps of ITVs in that +// query. But the filtering of ITVs should be applied after that step, +// before type checking. +// +// In general, we should never type check an ITV. +func (mp *Package) IsIntermediateTestVariant() bool { + return mp.ForTest != "" && mp.ForTest != mp.PkgPath && mp.ForTest+"_test" != mp.PkgPath +} + +// A Source maps package IDs to metadata for the packages. +// +// TODO(rfindley): replace this with a concrete metadata graph, once it is +// exposed from the snapshot. +type Source interface { + // Metadata returns the [Package] for the given package ID, or nil if it does + // not exist. + // TODO(rfindley): consider returning (*Metadata, bool) + // TODO(rfindley): consider renaming this method. + Metadata(PackageID) *Package +} + +// TODO(rfindley): move the utility functions below to a util.go file. + +// IsCommandLineArguments reports whether a given value denotes +// "command-line-arguments" package, which is a package with an unknown ID +// created by the go command. It can have a test variant, which is why callers +// should not check that a value equals "command-line-arguments" directly. +func IsCommandLineArguments(id PackageID) bool { + return strings.Contains(string(id), "command-line-arguments") +} + +// SortPostOrder sorts the IDs so that if x depends on y, then y appears before x. +func SortPostOrder(meta Source, ids []PackageID) { + postorder := make(map[PackageID]int) + order := 0 + var visit func(PackageID) + visit = func(id PackageID) { + if _, ok := postorder[id]; !ok { + postorder[id] = -1 // break recursion + if mp := meta.Metadata(id); mp != nil { + for _, depID := range mp.DepsByPkgPath { + visit(depID) + } + } + order++ + postorder[id] = order + } + } + for _, id := range ids { + visit(id) + } + sort.Slice(ids, func(i, j int) bool { + return postorder[ids[i]] < postorder[ids[j]] + }) +} + +// UnquoteImportPath returns the unquoted import path of s, +// or "" if the path is not properly quoted. +func UnquoteImportPath(spec *ast.ImportSpec) ImportPath { + path, err := strconv.Unquote(spec.Path.Value) + if err != nil { + return "" + } + return ImportPath(path) +} + +// RemoveIntermediateTestVariants removes intermediate test variants, modifying +// the array. We use a pointer to a slice make it impossible to forget to use +// the result. +func RemoveIntermediateTestVariants(pmetas *[]*Package) { + metas := *pmetas + res := metas[:0] + for _, mp := range metas { + if !mp.IsIntermediateTestVariant() { + res = append(res, mp) + } + } + *pmetas = res +} + +// IsValidImport returns whether importPkgPath is importable +// by pkgPath. +func IsValidImport(pkgPath, importPkgPath PackagePath) bool { + i := strings.LastIndex(string(importPkgPath), "/internal/") + if i == -1 { + return true + } + // TODO(rfindley): this looks wrong: IsCommandLineArguments is meant to + // operate on package IDs, not package paths. + if IsCommandLineArguments(PackageID(pkgPath)) { + return true + } + // TODO(rfindley): this is wrong. mod.testx/p should not be able to + // import mod.test/internal: https://go.dev/play/p/-Ca6P-E4V4q + return strings.HasPrefix(string(pkgPath), string(importPkgPath[:i])) +} diff --git a/gopls/internal/lsp/source/methodsets/methodsets.go b/gopls/internal/cache/methodsets/methodsets.go similarity index 97% rename from gopls/internal/lsp/source/methodsets/methodsets.go rename to gopls/internal/cache/methodsets/methodsets.go index d934c3c6907..ed7ead7a747 100644 --- a/gopls/internal/lsp/source/methodsets/methodsets.go +++ b/gopls/internal/cache/methodsets/methodsets.go @@ -52,9 +52,9 @@ import ( "strings" "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/gopls/internal/lsp/frob" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/gopls/internal/util/frob" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" ) // An Index records the non-empty method sets of all package-level @@ -229,7 +229,7 @@ func (b *indexBuilder) build(fset *token.FileSet, pkg *types.Package) *Index { // Instantiations of generic methods don't have an // object path, so we use the generic. - if p, err := objectpathFor(typeparams.OriginMethod(method)); err != nil { + if p, err := objectpathFor(method.Origin()); err != nil { panic(err) // can't happen for a method of a package-level type } else { m.ObjectPath = b.string(string(p)) @@ -305,7 +305,7 @@ func methodSetInfo(t types.Type, setIndexInfo func(*gobMethod, *types.Func)) gob // EnsurePointer wraps T in a types.Pointer if T is a named, non-interface type. // This is useful to make sure you consider a named type's full method set. func EnsurePointer(T types.Type) types.Type { - if _, ok := T.(*types.Named); ok && !types.IsInterface(T) { + if _, ok := aliases.Unalias(T).(*types.Named); ok && !types.IsInterface(T) { return types.NewPointer(T) } @@ -330,6 +330,9 @@ func fingerprint(method *types.Func) (string, bool) { var fprint func(t types.Type) fprint = func(t types.Type) { switch t := t.(type) { + case *aliases.Alias: + fprint(aliases.Unalias(t)) + case *types.Named: tname := t.Obj() if tname.Pkg() != nil { @@ -434,7 +437,7 @@ func fingerprint(method *types.Func) (string, bool) { buf.WriteString("interface{...}") } - case *typeparams.TypeParam: + case *types.TypeParam: tricky = true // TODO(adonovan): refine this by adding a numeric suffix // indicating the index among the receiver type's parameters. diff --git a/gopls/internal/lsp/cache/mod.go b/gopls/internal/cache/mod.go similarity index 77% rename from gopls/internal/lsp/cache/mod.go rename to gopls/internal/cache/mod.go index 8a452ab086d..a120037e221 100644 --- a/gopls/internal/lsp/cache/mod.go +++ b/gopls/internal/cache/mod.go @@ -14,33 +14,40 @@ import ( "golang.org/x/mod/modfile" "golang.org/x/mod/module" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/memoize" ) +// A ParsedModule contains the results of parsing a go.mod file. +type ParsedModule struct { + URI protocol.DocumentURI + File *modfile.File + Mapper *protocol.Mapper + ParseErrors []*Diagnostic +} + // ParseMod parses a go.mod file, using a cache. It may return partial results and an error. -func (s *snapshot) ParseMod(ctx context.Context, fh source.FileHandle) (*source.ParsedModule, error) { +func (s *Snapshot) ParseMod(ctx context.Context, fh file.Handle) (*ParsedModule, error) { uri := fh.URI() s.mu.Lock() entry, hit := s.parseModHandles.Get(uri) s.mu.Unlock() - type parseModKey source.FileIdentity + type parseModKey file.Identity type parseModResult struct { - parsed *source.ParsedModule + parsed *ParsedModule err error } // cache miss? if !hit { - promise, release := s.store.Promise(parseModKey(fh.FileIdentity()), func(ctx context.Context, _ interface{}) interface{} { + promise, release := s.store.Promise(parseModKey(fh.Identity()), func(ctx context.Context, _ interface{}) interface{} { parsed, err := parseModImpl(ctx, fh) return parseModResult{parsed, err} }) @@ -62,7 +69,7 @@ func (s *snapshot) ParseMod(ctx context.Context, fh source.FileHandle) (*source. // parseModImpl parses the go.mod file whose name and contents are in fh. // It may return partial results and an error. -func parseModImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedModule, error) { +func parseModImpl(ctx context.Context, fh file.Handle) (*ParsedModule, error) { _, done := event.Start(ctx, "cache.ParseMod", tag.URI.Of(fh.URI())) defer done() @@ -71,9 +78,9 @@ func parseModImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedModu return nil, err } m := protocol.NewMapper(fh.URI(), contents) - file, parseErr := modfile.Parse(fh.URI().Filename(), contents, nil) + file, parseErr := modfile.Parse(fh.URI().Path(), contents, nil) // Attempt to convert the error to a standardized parse error. - var parseErrors []*source.Diagnostic + var parseErrors []*Diagnostic if parseErr != nil { mfErrList, ok := parseErr.(modfile.ErrorList) if !ok { @@ -84,16 +91,16 @@ func parseModImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedModu if err != nil { return nil, err } - parseErrors = append(parseErrors, &source.Diagnostic{ + parseErrors = append(parseErrors, &Diagnostic{ URI: fh.URI(), Range: rng, Severity: protocol.SeverityError, - Source: source.ParseError, + Source: ParseError, Message: mfErr.Err.Error(), }) } } - return &source.ParsedModule{ + return &ParsedModule{ URI: fh.URI(), Mapper: m, File: file, @@ -101,24 +108,32 @@ func parseModImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedModu }, parseErr } +// A ParsedWorkFile contains the results of parsing a go.work file. +type ParsedWorkFile struct { + URI protocol.DocumentURI + File *modfile.WorkFile + Mapper *protocol.Mapper + ParseErrors []*Diagnostic +} + // ParseWork parses a go.work file, using a cache. It may return partial results and an error. // TODO(adonovan): move to new work.go file. -func (s *snapshot) ParseWork(ctx context.Context, fh source.FileHandle) (*source.ParsedWorkFile, error) { +func (s *Snapshot) ParseWork(ctx context.Context, fh file.Handle) (*ParsedWorkFile, error) { uri := fh.URI() s.mu.Lock() entry, hit := s.parseWorkHandles.Get(uri) s.mu.Unlock() - type parseWorkKey source.FileIdentity + type parseWorkKey file.Identity type parseWorkResult struct { - parsed *source.ParsedWorkFile + parsed *ParsedWorkFile err error } // cache miss? if !hit { - handle, release := s.store.Promise(parseWorkKey(fh.FileIdentity()), func(ctx context.Context, _ interface{}) interface{} { + handle, release := s.store.Promise(parseWorkKey(fh.Identity()), func(ctx context.Context, _ interface{}) interface{} { parsed, err := parseWorkImpl(ctx, fh) return parseWorkResult{parsed, err} }) @@ -139,7 +154,7 @@ func (s *snapshot) ParseWork(ctx context.Context, fh source.FileHandle) (*source } // parseWorkImpl parses a go.work file. It may return partial results and an error. -func parseWorkImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedWorkFile, error) { +func parseWorkImpl(ctx context.Context, fh file.Handle) (*ParsedWorkFile, error) { _, done := event.Start(ctx, "cache.ParseWork", tag.URI.Of(fh.URI())) defer done() @@ -148,9 +163,9 @@ func parseWorkImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedWor return nil, err } m := protocol.NewMapper(fh.URI(), content) - file, parseErr := modfile.ParseWork(fh.URI().Filename(), content, nil) + file, parseErr := modfile.ParseWork(fh.URI().Path(), content, nil) // Attempt to convert the error to a standardized parse error. - var parseErrors []*source.Diagnostic + var parseErrors []*Diagnostic if parseErr != nil { mfErrList, ok := parseErr.(modfile.ErrorList) if !ok { @@ -161,16 +176,16 @@ func parseWorkImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedWor if err != nil { return nil, err } - parseErrors = append(parseErrors, &source.Diagnostic{ + parseErrors = append(parseErrors, &Diagnostic{ URI: fh.URI(), Range: rng, Severity: protocol.SeverityError, - Source: source.ParseError, + Source: ParseError, Message: mfErr.Err.Error(), }) } } - return &source.ParsedWorkFile{ + return &ParsedWorkFile{ URI: fh.URI(), Mapper: m, File: file, @@ -180,15 +195,15 @@ func parseWorkImpl(ctx context.Context, fh source.FileHandle) (*source.ParsedWor // goSum reads the go.sum file for the go.mod file at modURI, if it exists. If // it doesn't exist, it returns nil. -func (s *snapshot) goSum(ctx context.Context, modURI span.URI) []byte { +func (s *Snapshot) goSum(ctx context.Context, modURI protocol.DocumentURI) []byte { // Get the go.sum file, either from the snapshot or directly from the // cache. Avoid (*snapshot).ReadFile here, as we don't want to add // nonexistent file handles to the snapshot if the file does not exist. // // TODO(rfindley): but that's not right. Changes to sum files should // invalidate content, even if it's nonexistent content. - sumURI := span.URIFromPath(sumFilename(modURI)) - var sumFH source.FileHandle = s.FindFile(sumURI) + sumURI := protocol.URIFromPath(sumFilename(modURI)) + sumFH := s.FindFile(sumURI) if sumFH == nil { var err error sumFH, err = s.view.fs.ReadFile(ctx, sumURI) @@ -203,17 +218,17 @@ func (s *snapshot) goSum(ctx context.Context, modURI span.URI) []byte { return content } -func sumFilename(modURI span.URI) string { - return strings.TrimSuffix(modURI.Filename(), ".mod") + ".sum" +func sumFilename(modURI protocol.DocumentURI) string { + return strings.TrimSuffix(modURI.Path(), ".mod") + ".sum" } // ModWhy returns the "go mod why" result for each module named in a // require statement in the go.mod file. // TODO(adonovan): move to new mod_why.go file. -func (s *snapshot) ModWhy(ctx context.Context, fh source.FileHandle) (map[string]string, error) { +func (s *Snapshot) ModWhy(ctx context.Context, fh file.Handle) (map[string]string, error) { uri := fh.URI() - if s.FileKind(fh) != source.Mod { + if s.FileKind(fh) != file.Mod { return nil, fmt.Errorf("%s is not a go.mod file", uri) } @@ -229,7 +244,7 @@ func (s *snapshot) ModWhy(ctx context.Context, fh source.FileHandle) (map[string // cache miss? if !hit { handle := memoize.NewPromise("modWhy", func(ctx context.Context, arg interface{}) interface{} { - why, err := modWhyImpl(ctx, arg.(*snapshot), fh) + why, err := modWhyImpl(ctx, arg.(*Snapshot), fh) return modWhyResult{why, err} }) @@ -249,7 +264,7 @@ func (s *snapshot) ModWhy(ctx context.Context, fh source.FileHandle) (map[string } // modWhyImpl returns the result of "go mod why -m" on the specified go.mod file. -func modWhyImpl(ctx context.Context, snapshot *snapshot, fh source.FileHandle) (map[string]string, error) { +func modWhyImpl(ctx context.Context, snapshot *Snapshot, fh file.Handle) (map[string]string, error) { ctx, done := event.Start(ctx, "cache.ModWhy", tag.URI.Of(fh.URI())) defer done() @@ -265,12 +280,12 @@ func modWhyImpl(ctx context.Context, snapshot *snapshot, fh source.FileHandle) ( inv := &gocommand.Invocation{ Verb: "mod", Args: []string{"why", "-m"}, - WorkingDir: filepath.Dir(fh.URI().Filename()), + WorkingDir: filepath.Dir(fh.URI().Path()), } for _, req := range pm.File.Require { inv.Args = append(inv.Args, req.Mod.Path) } - stdout, err := snapshot.RunGoCommandDirect(ctx, source.Normal, inv) + stdout, err := snapshot.RunGoCommandDirect(ctx, Normal, inv) if err != nil { return nil, err } @@ -288,7 +303,7 @@ func modWhyImpl(ctx context.Context, snapshot *snapshot, fh source.FileHandle) ( // extractGoCommandErrors tries to parse errors that come from the go command // and shape them into go.mod diagnostics. // TODO: rename this to 'load errors' -func (s *snapshot) extractGoCommandErrors(ctx context.Context, goCmdError error) []*source.Diagnostic { +func (s *Snapshot) extractGoCommandErrors(ctx context.Context, goCmdError error) []*Diagnostic { if goCmdError == nil { return nil } @@ -297,8 +312,8 @@ func (s *snapshot) extractGoCommandErrors(ctx context.Context, goCmdError error) loc protocol.Location msg string } - diagLocations := map[*source.ParsedModule]locatedErr{} - backupDiagLocations := map[*source.ParsedModule]locatedErr{} + diagLocations := map[*ParsedModule]locatedErr{} + backupDiagLocations := map[*ParsedModule]locatedErr{} // If moduleErrs is non-nil, go command errors are scoped to specific // modules. @@ -306,7 +321,7 @@ func (s *snapshot) extractGoCommandErrors(ctx context.Context, goCmdError error) _ = errors.As(goCmdError, &moduleErrs) // Match the error against all the mod files in the workspace. - for _, uri := range s.ModFiles() { + for _, uri := range s.View().ModFiles() { fh, err := s.ReadFile(ctx, uri) if err != nil { event.Error(ctx, "getting modfile for Go command error", err) @@ -335,7 +350,7 @@ func (s *snapshot) extractGoCommandErrors(ctx context.Context, goCmdError error) // file/position information, so don't even try to find it. continue } - loc, found, err := s.matchErrorToModule(ctx, pm, msg) + loc, found, err := s.matchErrorToModule(pm, msg) if err != nil { event.Error(ctx, "matching error to module", err) continue @@ -357,7 +372,7 @@ func (s *snapshot) extractGoCommandErrors(ctx context.Context, goCmdError error) diagLocations = backupDiagLocations } - var srcErrs []*source.Diagnostic + var srcErrs []*Diagnostic for pm, le := range diagLocations { diag, err := s.goCommandDiagnostic(pm, le.loc, le.msg) if err != nil { @@ -380,7 +395,7 @@ var moduleVersionInErrorRe = regexp.MustCompile(`[:\s]([+-._~0-9A-Za-z]+)@([+-._ // // It returns the location of a reference to the one of the modules and true // if one exists. If none is found it returns a fallback location and false. -func (s *snapshot) matchErrorToModule(ctx context.Context, pm *source.ParsedModule, goCmdError string) (protocol.Location, bool, error) { +func (s *Snapshot) matchErrorToModule(pm *ParsedModule, goCmdError string) (protocol.Location, bool, error) { var reference *modfile.Line matches := moduleVersionInErrorRe.FindAllStringSubmatch(goCmdError, -1) @@ -413,7 +428,7 @@ func (s *snapshot) matchErrorToModule(ctx context.Context, pm *source.ParsedModu } // goCommandDiagnostic creates a diagnostic for a given go command error. -func (s *snapshot) goCommandDiagnostic(pm *source.ParsedModule, loc protocol.Location, goCmdError string) (*source.Diagnostic, error) { +func (s *Snapshot) goCommandDiagnostic(pm *ParsedModule, loc protocol.Location, goCmdError string) (*Diagnostic, error) { matches := moduleVersionInErrorRe.FindAllStringSubmatch(goCmdError, -1) var innermost *module.Version for i := len(matches) - 1; i >= 0; i-- { @@ -427,25 +442,23 @@ func (s *snapshot) goCommandDiagnostic(pm *source.ParsedModule, loc protocol.Loc switch { case strings.Contains(goCmdError, "inconsistent vendoring"): - cmd, err := command.NewVendorCommand("Run go mod vendor", command.URIArg{URI: protocol.URIFromSpanURI(pm.URI)}) + cmd, err := command.NewVendorCommand("Run go mod vendor", command.URIArg{URI: pm.URI}) if err != nil { return nil, err } - return &source.Diagnostic{ + return &Diagnostic{ URI: pm.URI, Range: loc.Range, Severity: protocol.SeverityError, - Source: source.ListError, + Source: ListError, Message: `Inconsistent vendoring detected. Please re-run "go mod vendor". See https://github.com/golang/go/issues/39164 for more detail on this issue.`, - SuggestedFixes: []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, + SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, }, nil case strings.Contains(goCmdError, "updates to go.sum needed"), strings.Contains(goCmdError, "missing go.sum entry"): var args []protocol.DocumentURI - for _, uri := range s.ModFiles() { - args = append(args, protocol.URIFromSpanURI(uri)) - } + args = append(args, s.View().ModFiles()...) tidyCmd, err := command.NewTidyCommand("Run go mod tidy", command.URIArgs{URIs: args}) if err != nil { return nil, err @@ -458,41 +471,41 @@ See https://github.com/golang/go/issues/39164 for more detail on this issue.`, if innermost != nil { msg = fmt.Sprintf("go.sum is out of sync with go.mod: entry for %v is missing. Please updating it by applying the quick fix.", innermost) } - return &source.Diagnostic{ + return &Diagnostic{ URI: pm.URI, Range: loc.Range, Severity: protocol.SeverityError, - Source: source.ListError, + Source: ListError, Message: msg, - SuggestedFixes: []source.SuggestedFix{ - source.SuggestedFixFromCommand(tidyCmd, protocol.QuickFix), - source.SuggestedFixFromCommand(updateCmd, protocol.QuickFix), + SuggestedFixes: []SuggestedFix{ + SuggestedFixFromCommand(tidyCmd, protocol.QuickFix), + SuggestedFixFromCommand(updateCmd, protocol.QuickFix), }, }, nil case strings.Contains(goCmdError, "disabled by GOPROXY=off") && innermost != nil: title := fmt.Sprintf("Download %v@%v", innermost.Path, innermost.Version) cmd, err := command.NewAddDependencyCommand(title, command.DependencyArgs{ - URI: protocol.URIFromSpanURI(pm.URI), + URI: pm.URI, AddRequire: false, GoCmdArgs: []string{fmt.Sprintf("%v@%v", innermost.Path, innermost.Version)}, }) if err != nil { return nil, err } - return &source.Diagnostic{ + return &Diagnostic{ URI: pm.URI, Range: loc.Range, Severity: protocol.SeverityError, Message: fmt.Sprintf("%v@%v has not been downloaded", innermost.Path, innermost.Version), - Source: source.ListError, - SuggestedFixes: []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, + Source: ListError, + SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, }, nil default: - return &source.Diagnostic{ + return &Diagnostic{ URI: pm.URI, Range: loc.Range, Severity: protocol.SeverityError, - Source: source.ListError, + Source: ListError, Message: goCmdError, }, nil } diff --git a/gopls/internal/lsp/cache/mod_tidy.go b/gopls/internal/cache/mod_tidy.go similarity index 79% rename from gopls/internal/lsp/cache/mod_tidy.go rename to gopls/internal/cache/mod_tidy.go index b806edb7499..79867855e0c 100644 --- a/gopls/internal/lsp/cache/mod_tidy.go +++ b/gopls/internal/cache/mod_tidy.go @@ -6,6 +6,7 @@ package cache import ( "context" + "errors" "fmt" "go/ast" "go/token" @@ -15,19 +16,31 @@ import ( "strings" "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/memoize" ) +// This error is sought by mod diagnostics. +var ErrNoModOnDisk = errors.New("go.mod file is not on disk") + +// A TidiedModule contains the results of running `go mod tidy` on a module. +type TidiedModule struct { + // Diagnostics representing changes made by `go mod tidy`. + Diagnostics []*Diagnostic + // The bytes of the go.mod file after it was tidied. + TidiedContent []byte +} + // ModTidy returns the go.mod file that would be obtained by running // "go mod tidy". Concurrent requests are combined into a single command. -func (s *snapshot) ModTidy(ctx context.Context, pm *source.ParsedModule) (*source.TidiedModule, error) { +func (s *Snapshot) ModTidy(ctx context.Context, pm *ParsedModule) (*TidiedModule, error) { ctx, done := event.Start(ctx, "cache.snapshot.ModTidy") defer done() @@ -41,7 +54,7 @@ func (s *snapshot) ModTidy(ctx context.Context, pm *source.ParsedModule) (*sourc s.mu.Unlock() type modTidyResult struct { - tidied *source.TidiedModule + tidied *TidiedModule err error } @@ -54,27 +67,18 @@ func (s *snapshot) ModTidy(ctx context.Context, pm *source.ParsedModule) (*sourc if err != nil { return nil, err } - if _, ok := fh.(*Overlay); ok { - if info, _ := os.Stat(uri.Filename()); info == nil { - return nil, source.ErrNoModOnDisk + if _, ok := fh.(*overlay); ok { + if info, _ := os.Stat(uri.Path()); info == nil { + return nil, ErrNoModOnDisk } } - if criticalErr := s.CriticalError(ctx); criticalErr != nil { - return &source.TidiedModule{ - Diagnostics: criticalErr.Diagnostics, - }, nil - } - if ctx.Err() != nil { // must check ctx after GetCriticalError - return nil, ctx.Err() - } - if err := s.awaitLoaded(ctx); err != nil { return nil, err } handle := memoize.NewPromise("modTidy", func(ctx context.Context, arg interface{}) interface{} { - tidied, err := modTidyImpl(ctx, arg.(*snapshot), uri.Filename(), pm) + tidied, err := modTidyImpl(ctx, arg.(*Snapshot), uri.Path(), pm) return modTidyResult{tidied, err} }) @@ -94,7 +98,7 @@ func (s *snapshot) ModTidy(ctx context.Context, pm *source.ParsedModule) (*sourc } // modTidyImpl runs "go mod tidy" on a go.mod file. -func modTidyImpl(ctx context.Context, snapshot *snapshot, filename string, pm *source.ParsedModule) (*source.TidiedModule, error) { +func modTidyImpl(ctx context.Context, snapshot *Snapshot, filename string, pm *ParsedModule) (*TidiedModule, error) { ctx, done := event.Start(ctx, "cache.ModTidy", tag.URI.Of(filename)) defer done() @@ -104,7 +108,7 @@ func modTidyImpl(ctx context.Context, snapshot *snapshot, filename string, pm *s WorkingDir: filepath.Dir(filename), } // TODO(adonovan): ensure that unsaved overlays are passed through to 'go'. - tmpURI, inv, cleanup, err := snapshot.goCommandInvocation(ctx, source.WriteTemporaryModFile, inv) + tmpURI, inv, cleanup, err := snapshot.goCommandInvocation(ctx, WriteTemporaryModFile, inv) if err != nil { return nil, err } @@ -117,11 +121,11 @@ func modTidyImpl(ctx context.Context, snapshot *snapshot, filename string, pm *s // Go directly to disk to get the temporary mod file, // since it is always on disk. - tempContents, err := os.ReadFile(tmpURI.Filename()) + tempContents, err := os.ReadFile(tmpURI.Path()) if err != nil { return nil, err } - ideal, err := modfile.Parse(tmpURI.Filename(), tempContents, nil) + ideal, err := modfile.Parse(tmpURI.Path(), tempContents, nil) if err != nil { // We do not need to worry about the temporary file's parse errors // since it has been "tidied". @@ -135,7 +139,7 @@ func modTidyImpl(ctx context.Context, snapshot *snapshot, filename string, pm *s return nil, err } - return &source.TidiedModule{ + return &TidiedModule{ Diagnostics: diagnostics, TidiedContent: tempContents, }, nil @@ -144,7 +148,7 @@ func modTidyImpl(ctx context.Context, snapshot *snapshot, filename string, pm *s // modTidyDiagnostics computes the differences between the original and tidied // go.mod files to produce diagnostic and suggested fixes. Some diagnostics // may appear on the Go files that import packages from missing modules. -func modTidyDiagnostics(ctx context.Context, snapshot *snapshot, pm *source.ParsedModule, ideal *modfile.File) (diagnostics []*source.Diagnostic, err error) { +func modTidyDiagnostics(ctx context.Context, snapshot *Snapshot, pm *ParsedModule, ideal *modfile.File) (diagnostics []*Diagnostic, err error) { // First, determine which modules are unused and which are missing from the // original go.mod file. var ( @@ -168,7 +172,7 @@ func modTidyDiagnostics(ctx context.Context, snapshot *snapshot, pm *source.Pars for _, req := range wrongDirectness { // Handle dependencies that are incorrectly labeled indirect and // vice versa. - srcDiag, err := directnessDiagnostic(pm.Mapper, req, snapshot.Options().ComputeEdits) + srcDiag, err := directnessDiagnostic(pm.Mapper, req) if err != nil { // We're probably in a bad state if we can't compute a // directnessDiagnostic, but try to keep going so as to not suppress @@ -206,9 +210,9 @@ func modTidyDiagnostics(ctx context.Context, snapshot *snapshot, pm *source.Pars return diagnostics, nil } -func missingModuleDiagnostics(ctx context.Context, snapshot *snapshot, pm *source.ParsedModule, ideal *modfile.File, missing map[string]*modfile.Require) ([]*source.Diagnostic, error) { - missingModuleFixes := map[*modfile.Require][]source.SuggestedFix{} - var diagnostics []*source.Diagnostic +func missingModuleDiagnostics(ctx context.Context, snapshot *Snapshot, pm *ParsedModule, ideal *modfile.File, missing map[string]*modfile.Require) ([]*Diagnostic, error) { + missingModuleFixes := map[*modfile.Require][]SuggestedFix{} + var diagnostics []*Diagnostic for _, req := range missing { srcDiag, err := missingModuleDiagnostic(pm, req) if err != nil { @@ -225,16 +229,16 @@ func missingModuleDiagnostics(ctx context.Context, snapshot *snapshot, pm *sourc return nil, err } // TODO(adonovan): opt: opportunities for parallelism abound. - for _, m := range metas { + for _, mp := range metas { // Read both lists of files of this package. // // Parallelism is not necessary here as the files will have already been // pre-read at load time. - goFiles, err := readFiles(ctx, snapshot, m.GoFiles) + goFiles, err := readFiles(ctx, snapshot, mp.GoFiles) if err != nil { return nil, err } - compiledGoFiles, err := readFiles(ctx, snapshot, m.CompiledGoFiles) + compiledGoFiles, err := readFiles(ctx, snapshot, mp.CompiledGoFiles) if err != nil { return nil, err } @@ -278,7 +282,7 @@ func missingModuleDiagnostics(ctx context.Context, snapshot *snapshot, pm *sourc continue } for _, goFile := range compiledGoFiles { - pgf, err := snapshot.ParseGo(ctx, goFile, source.ParseHeader) + pgf, err := snapshot.ParseGo(ctx, goFile, parsego.Header) if err != nil { continue } @@ -318,34 +322,34 @@ func missingModuleDiagnostics(ctx context.Context, snapshot *snapshot, pm *sourc return diagnostics, nil } -// unusedDiagnostic returns a source.Diagnostic for an unused require. -func unusedDiagnostic(m *protocol.Mapper, req *modfile.Require, onlyDiagnostic bool) (*source.Diagnostic, error) { +// unusedDiagnostic returns a Diagnostic for an unused require. +func unusedDiagnostic(m *protocol.Mapper, req *modfile.Require, onlyDiagnostic bool) (*Diagnostic, error) { rng, err := m.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) if err != nil { return nil, err } title := fmt.Sprintf("Remove dependency: %s", req.Mod.Path) cmd, err := command.NewRemoveDependencyCommand(title, command.RemoveDependencyArgs{ - URI: protocol.URIFromSpanURI(m.URI), + URI: m.URI, OnlyDiagnostic: onlyDiagnostic, ModulePath: req.Mod.Path, }) if err != nil { return nil, err } - return &source.Diagnostic{ + return &Diagnostic{ URI: m.URI, Range: rng, Severity: protocol.SeverityWarning, - Source: source.ModTidyError, + Source: ModTidyError, Message: fmt.Sprintf("%s is not used in this module", req.Mod.Path), - SuggestedFixes: []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, + SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, }, nil } // directnessDiagnostic extracts errors when a dependency is labeled indirect when // it should be direct and vice versa. -func directnessDiagnostic(m *protocol.Mapper, req *modfile.Require, computeEdits source.DiffFunction) (*source.Diagnostic, error) { +func directnessDiagnostic(m *protocol.Mapper, req *modfile.Require) (*Diagnostic, error) { rng, err := m.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) if err != nil { return nil, err @@ -366,19 +370,19 @@ func directnessDiagnostic(m *protocol.Mapper, req *modfile.Require, computeEdits } } // If the dependency should be indirect, add the // indirect. - edits, err := switchDirectness(req, m, computeEdits) + edits, err := switchDirectness(req, m) if err != nil { return nil, err } - return &source.Diagnostic{ + return &Diagnostic{ URI: m.URI, Range: rng, Severity: protocol.SeverityWarning, - Source: source.ModTidyError, + Source: ModTidyError, Message: fmt.Sprintf("%s should be %s", req.Mod.Path, direction), - SuggestedFixes: []source.SuggestedFix{{ + SuggestedFixes: []SuggestedFix{{ Title: fmt.Sprintf("Change %s to %s", req.Mod.Path, direction), - Edits: map[span.URI][]protocol.TextEdit{ + Edits: map[protocol.DocumentURI][]protocol.TextEdit{ m.URI: edits, }, ActionKind: protocol.QuickFix, @@ -386,7 +390,7 @@ func directnessDiagnostic(m *protocol.Mapper, req *modfile.Require, computeEdits }, nil } -func missingModuleDiagnostic(pm *source.ParsedModule, req *modfile.Require) (*source.Diagnostic, error) { +func missingModuleDiagnostic(pm *ParsedModule, req *modfile.Require) (*Diagnostic, error) { var rng protocol.Range // Default to the start of the file if there is no module declaration. if pm.File != nil && pm.File.Module != nil && pm.File.Module.Syntax != nil { @@ -399,26 +403,26 @@ func missingModuleDiagnostic(pm *source.ParsedModule, req *modfile.Require) (*so } title := fmt.Sprintf("Add %s to your go.mod file", req.Mod.Path) cmd, err := command.NewAddDependencyCommand(title, command.DependencyArgs{ - URI: protocol.URIFromSpanURI(pm.Mapper.URI), + URI: pm.Mapper.URI, AddRequire: !req.Indirect, GoCmdArgs: []string{req.Mod.Path + "@" + req.Mod.Version}, }) if err != nil { return nil, err } - return &source.Diagnostic{ + return &Diagnostic{ URI: pm.Mapper.URI, Range: rng, Severity: protocol.SeverityError, - Source: source.ModTidyError, + Source: ModTidyError, Message: fmt.Sprintf("%s is not in your go.mod file", req.Mod.Path), - SuggestedFixes: []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, + SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, }, nil } // switchDirectness gets the edits needed to change an indirect dependency to // direct and vice versa. -func switchDirectness(req *modfile.Require, m *protocol.Mapper, computeEdits source.DiffFunction) ([]protocol.TextEdit, error) { +func switchDirectness(req *modfile.Require, m *protocol.Mapper) ([]protocol.TextEdit, error) { // We need a private copy of the parsed go.mod file, since we're going to // modify it. copied, err := modfile.Parse("", m.Content, nil) @@ -452,13 +456,13 @@ func switchDirectness(req *modfile.Require, m *protocol.Mapper, computeEdits sou return nil, err } // Calculate the edits to be made due to the change. - edits := computeEdits(string(m.Content), string(newContent)) - return source.ToProtocolEdits(m, edits) + edits := diff.Bytes(m.Content, newContent) + return protocol.EditsFromDiffEdits(m, edits) } // missingModuleForImport creates an error for a given import path that comes // from a missing module. -func missingModuleForImport(pgf *source.ParsedGoFile, imp *ast.ImportSpec, req *modfile.Require, fixes []source.SuggestedFix) (*source.Diagnostic, error) { +func missingModuleForImport(pgf *parsego.File, imp *ast.ImportSpec, req *modfile.Require, fixes []SuggestedFix) (*Diagnostic, error) { if req.Syntax == nil { return nil, fmt.Errorf("no syntax for %v", req) } @@ -466,11 +470,11 @@ func missingModuleForImport(pgf *source.ParsedGoFile, imp *ast.ImportSpec, req * if err != nil { return nil, err } - return &source.Diagnostic{ + return &Diagnostic{ URI: pgf.URI, Range: rng, Severity: protocol.SeverityError, - Source: source.ModTidyError, + Source: ModTidyError, Message: fmt.Sprintf("%s is not in your go.mod file", req.Mod.Path), SuggestedFixes: fixes, }, nil @@ -483,9 +487,9 @@ func missingModuleForImport(pgf *source.ParsedGoFile, imp *ast.ImportSpec, req * // (We can't simply use Metadata.Imports because it is based on // CompiledGoFiles, after cgo processing.) // -// TODO(rfindley): this should key off source.ImportPath. -func parseImports(ctx context.Context, s *snapshot, files []source.FileHandle) (map[string]bool, error) { - pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseHeader, false, files...) +// TODO(rfindley): this should key off ImportPath. +func parseImports(ctx context.Context, s *Snapshot, files []file.Handle) (map[string]bool, error) { + pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), parsego.Header, false, files...) if err != nil { // e.g. context cancellation return nil, err } diff --git a/gopls/internal/cache/mod_vuln.go b/gopls/internal/cache/mod_vuln.go new file mode 100644 index 00000000000..a92f5b5abe1 --- /dev/null +++ b/gopls/internal/cache/mod_vuln.go @@ -0,0 +1,389 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "fmt" + "io" + "os" + "sort" + "strings" + "sync" + + "golang.org/x/mod/semver" + "golang.org/x/sync/errgroup" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/vulncheck" + "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" + "golang.org/x/tools/gopls/internal/vulncheck/osv" + isem "golang.org/x/tools/gopls/internal/vulncheck/semver" + "golang.org/x/tools/internal/memoize" + "golang.org/x/vuln/scan" +) + +// ModVuln returns import vulnerability analysis for the given go.mod URI. +// Concurrent requests are combined into a single command. +func (s *Snapshot) ModVuln(ctx context.Context, modURI protocol.DocumentURI) (*vulncheck.Result, error) { + s.mu.Lock() + entry, hit := s.modVulnHandles.Get(modURI) + s.mu.Unlock() + + type modVuln struct { + result *vulncheck.Result + err error + } + + // Cache miss? + if !hit { + handle := memoize.NewPromise("modVuln", func(ctx context.Context, arg interface{}) interface{} { + result, err := modVulnImpl(ctx, arg.(*Snapshot)) + return modVuln{result, err} + }) + + entry = handle + s.mu.Lock() + s.modVulnHandles.Set(modURI, entry, nil) + s.mu.Unlock() + } + + // Await result. + v, err := s.awaitPromise(ctx, entry) + if err != nil { + return nil, err + } + res := v.(modVuln) + return res.result, res.err +} + +// GoVersionForVulnTest is an internal environment variable used in gopls +// testing to examine govulncheck behavior with a go version different +// than what `go version` returns in the system. +const GoVersionForVulnTest = "_GOPLS_TEST_VULNCHECK_GOVERSION" + +// modVulnImpl queries the vulndb and reports which vulnerabilities +// apply to this snapshot. The result contains a set of packages, +// grouped by vuln ID and by module. This implements the "import-based" +// vulnerability report on go.mod files. +func modVulnImpl(ctx context.Context, snapshot *Snapshot) (*vulncheck.Result, error) { + // TODO(hyangah): can we let 'govulncheck' take a package list + // used in the workspace and implement this function? + + // We want to report the intersection of vulnerable packages in the vulndb + // and packages transitively imported by this module ('go list -deps all'). + // We use snapshot.AllMetadata to retrieve the list of packages + // as an approximation. + // + // TODO(hyangah): snapshot.AllMetadata is a superset of + // `go list all` - e.g. when the workspace has multiple main modules + // (multiple go.mod files), that can include packages that are not + // used by this module. Vulncheck behavior with go.work is not well + // defined. Figure out the meaning, and if we decide to present + // the result as if each module is analyzed independently, make + // gopls track a separate build list for each module and use that + // information instead of snapshot.AllMetadata. + allMeta, err := snapshot.AllMetadata(ctx) + if err != nil { + return nil, err + } + + // TODO(hyangah): handle vulnerabilities in the standard library. + + // Group packages by modules since vuln db is keyed by module. + packagesByModule := map[metadata.PackagePath][]*metadata.Package{} + for _, mp := range allMeta { + modulePath := metadata.PackagePath(osv.GoStdModulePath) + if mi := mp.Module; mi != nil { + modulePath = metadata.PackagePath(mi.Path) + } + packagesByModule[modulePath] = append(packagesByModule[modulePath], mp) + } + + var ( + mu sync.Mutex + // Keys are osv.Entry.ID + osvs = map[string]*osv.Entry{} + findings []*govulncheck.Finding + ) + + goVersion := snapshot.Options().Env[GoVersionForVulnTest] + if goVersion == "" { + goVersion = snapshot.GoVersionString() + } + + stdlibModule := &packages.Module{ + Path: osv.GoStdModulePath, + Version: goVersion, + } + + // GOVULNDB may point the test db URI. + db := GetEnv(snapshot, "GOVULNDB") + + var group errgroup.Group + group.SetLimit(10) // limit govulncheck api runs + for _, mps := range packagesByModule { + mps := mps + group.Go(func() error { + effectiveModule := stdlibModule + if m := mps[0].Module; m != nil { + effectiveModule = m + } + for effectiveModule.Replace != nil { + effectiveModule = effectiveModule.Replace + } + ver := effectiveModule.Version + if ver == "" || !isem.Valid(ver) { + // skip invalid version strings. the underlying scan api is strict. + return nil + } + + // TODO(hyangah): batch these requests and add in-memory cache for efficiency. + vulns, err := osvsByModule(ctx, db, effectiveModule.Path+"@"+ver) + if err != nil { + return err + } + if len(vulns) == 0 { // No known vulnerability. + return nil + } + + // set of packages in this module known to gopls. + // This will be lazily initialized when we need it. + var knownPkgs map[metadata.PackagePath]bool + + // Report vulnerabilities that affect packages of this module. + for _, entry := range vulns { + var vulnerablePkgs []*govulncheck.Finding + fixed := fixedVersion(effectiveModule.Path, entry.Affected) + + for _, a := range entry.Affected { + if a.Module.Ecosystem != osv.GoEcosystem || a.Module.Path != effectiveModule.Path { + continue + } + for _, imp := range a.EcosystemSpecific.Packages { + if knownPkgs == nil { + knownPkgs = toPackagePathSet(mps) + } + if knownPkgs[metadata.PackagePath(imp.Path)] { + vulnerablePkgs = append(vulnerablePkgs, &govulncheck.Finding{ + OSV: entry.ID, + FixedVersion: fixed, + Trace: []*govulncheck.Frame{ + { + Module: effectiveModule.Path, + Version: effectiveModule.Version, + Package: imp.Path, + }, + }, + }) + } + } + } + if len(vulnerablePkgs) == 0 { + continue + } + mu.Lock() + osvs[entry.ID] = entry + findings = append(findings, vulnerablePkgs...) + mu.Unlock() + } + return nil + }) + } + if err := group.Wait(); err != nil { + return nil, err + } + + // Sort so the results are deterministic. + sort.Slice(findings, func(i, j int) bool { + x, y := findings[i], findings[j] + if x.OSV != y.OSV { + return x.OSV < y.OSV + } + return x.Trace[0].Package < y.Trace[0].Package + }) + ret := &vulncheck.Result{ + Entries: osvs, + Findings: findings, + Mode: vulncheck.ModeImports, + } + return ret, nil +} + +// TODO(rfindley): this function was exposed during refactoring. Reconsider it. +func GetEnv(snapshot *Snapshot, key string) string { + val, ok := snapshot.Options().Env[key] + if ok { + return val + } + return os.Getenv(key) +} + +// toPackagePathSet transforms the metadata to a set of package paths. +func toPackagePathSet(mds []*metadata.Package) map[metadata.PackagePath]bool { + pkgPaths := make(map[metadata.PackagePath]bool, len(mds)) + for _, md := range mds { + pkgPaths[md.PkgPath] = true + } + return pkgPaths +} + +func fixedVersion(modulePath string, affected []osv.Affected) string { + fixed := latestFixed(modulePath, affected) + if fixed != "" { + fixed = versionString(modulePath, fixed) + } + return fixed +} + +// latestFixed returns the latest fixed version in the list of affected ranges, +// or the empty string if there are no fixed versions. +func latestFixed(modulePath string, as []osv.Affected) string { + v := "" + for _, a := range as { + if a.Module.Path != modulePath { + continue + } + for _, r := range a.Ranges { + if r.Type == osv.RangeTypeSemver { + for _, e := range r.Events { + if e.Fixed != "" && (v == "" || + semver.Compare(isem.CanonicalizeSemverPrefix(e.Fixed), isem.CanonicalizeSemverPrefix(v)) > 0) { + v = e.Fixed + } + } + } + } + } + return v +} + +// versionString prepends a version string prefix (`v` or `go` +// depending on the modulePath) to the given semver-style version string. +func versionString(modulePath, version string) string { + if version == "" { + return "" + } + v := "v" + version + // These are internal Go module paths used by the vuln DB + // when listing vulns in standard library and the go command. + if modulePath == "stdlib" || modulePath == "toolchain" { + return semverToGoTag(v) + } + return v +} + +// semverToGoTag returns the Go standard library repository tag corresponding +// to semver, a version string without the initial "v". +// Go tags differ from standard semantic versions in a few ways, +// such as beginning with "go" instead of "v". +func semverToGoTag(v string) string { + if strings.HasPrefix(v, "v0.0.0") { + return "master" + } + // Special case: v1.0.0 => go1. + if v == "v1.0.0" { + return "go1" + } + if !semver.IsValid(v) { + return fmt.Sprintf("", v) + } + goVersion := semver.Canonical(v) + prerelease := semver.Prerelease(goVersion) + versionWithoutPrerelease := strings.TrimSuffix(goVersion, prerelease) + patch := strings.TrimPrefix(versionWithoutPrerelease, semver.MajorMinor(goVersion)+".") + if patch == "0" { + versionWithoutPrerelease = strings.TrimSuffix(versionWithoutPrerelease, ".0") + } + goVersion = fmt.Sprintf("go%s", strings.TrimPrefix(versionWithoutPrerelease, "v")) + if prerelease != "" { + // Go prereleases look like "beta1" instead of "beta.1". + // "beta1" is bad for sorting (since beta10 comes before beta9), so + // require the dot form. + i := finalDigitsIndex(prerelease) + if i >= 1 { + if prerelease[i-1] != '.' { + return fmt.Sprintf("", v) + } + // Remove the dot. + prerelease = prerelease[:i-1] + prerelease[i:] + } + goVersion += strings.TrimPrefix(prerelease, "-") + } + return goVersion +} + +// finalDigitsIndex returns the index of the first digit in the sequence of digits ending s. +// If s doesn't end in digits, it returns -1. +func finalDigitsIndex(s string) int { + // Assume ASCII (since the semver package does anyway). + var i int + for i = len(s) - 1; i >= 0; i-- { + if s[i] < '0' || s[i] > '9' { + break + } + } + if i == len(s)-1 { + return -1 + } + return i + 1 +} + +// osvsByModule runs a govulncheck database query. +func osvsByModule(ctx context.Context, db, moduleVersion string) ([]*osv.Entry, error) { + var args []string + args = append(args, "-mode=query", "-json") + if db != "" { + args = append(args, "-db="+db) + } + args = append(args, moduleVersion) + + ir, iw := io.Pipe() + handler := &osvReader{} + + var g errgroup.Group + g.Go(func() error { + defer iw.Close() // scan API doesn't close cmd.Stderr/cmd.Stdout. + cmd := scan.Command(ctx, args...) + cmd.Stdout = iw + // TODO(hakim): Do we need to set cmd.Env = getEnvSlices(), + // or is the process environment good enough? + if err := cmd.Start(); err != nil { + return err + } + return cmd.Wait() + }) + g.Go(func() error { + return govulncheck.HandleJSON(ir, handler) + }) + + if err := g.Wait(); err != nil { + return nil, err + } + return handler.entry, nil +} + +// osvReader implements govulncheck.Handler. +type osvReader struct { + entry []*osv.Entry +} + +func (h *osvReader) OSV(entry *osv.Entry) error { + h.entry = append(h.entry, entry) + return nil +} + +func (h *osvReader) Config(config *govulncheck.Config) error { + return nil +} + +func (h *osvReader) Finding(finding *govulncheck.Finding) error { + return nil +} + +func (h *osvReader) Progress(progress *govulncheck.Progress) error { + return nil +} diff --git a/gopls/internal/lsp/cache/os_darwin.go b/gopls/internal/cache/os_darwin.go similarity index 89% rename from gopls/internal/lsp/cache/os_darwin.go rename to gopls/internal/cache/os_darwin.go index 2c88be1fcbe..4c2a7236dcc 100644 --- a/gopls/internal/lsp/cache/os_darwin.go +++ b/gopls/internal/cache/os_darwin.go @@ -15,10 +15,10 @@ import ( ) func init() { - checkPathCase = darwinCheckPathCase + checkPathValid = darwinCheckPathValid } -func darwinCheckPathCase(path string) error { +func darwinCheckPathValid(path string) error { // Darwin provides fcntl(F_GETPATH) to get a path for an arbitrary FD. // Conveniently for our purposes, it gives the canonical case back. But // there's no guarantee that it will follow the same route through the @@ -52,7 +52,7 @@ func darwinCheckPathCase(path string) error { break } if g != w { - return fmt.Errorf("case mismatch in path %q: component %q is listed by macOS as %q", path, g, w) + return fmt.Errorf("invalid path %q: component %q is listed by macOS as %q", path, g, w) } } return nil diff --git a/gopls/internal/lsp/cache/os_windows.go b/gopls/internal/cache/os_windows.go similarity index 88% rename from gopls/internal/lsp/cache/os_windows.go rename to gopls/internal/cache/os_windows.go index 2feded84d7a..32fb1f40f49 100644 --- a/gopls/internal/lsp/cache/os_windows.go +++ b/gopls/internal/cache/os_windows.go @@ -11,10 +11,10 @@ import ( ) func init() { - checkPathCase = windowsCheckPathCase + checkPathValid = windowsCheckPathValid } -func windowsCheckPathCase(path string) error { +func windowsCheckPathValid(path string) error { // Back in the day, Windows used to have short and long filenames, and // it still supports those APIs. GetLongPathName gets the real case for a // path, so we can use it here. Inspired by @@ -49,7 +49,7 @@ func windowsCheckPathCase(path string) error { } for got, want := path, longstr; !isRoot(got) && !isRoot(want); got, want = filepath.Dir(got), filepath.Dir(want) { if g, w := filepath.Base(got), filepath.Base(want); g != w { - return fmt.Errorf("case mismatch in path %q: component %q is listed by Windows as %q", path, g, w) + return fmt.Errorf("invalid path %q: component %q is listed by Windows as %q", path, g, w) } } return nil diff --git a/gopls/internal/cache/package.go b/gopls/internal/cache/package.go new file mode 100644 index 00000000000..e2555c8ed98 --- /dev/null +++ b/gopls/internal/cache/package.go @@ -0,0 +1,182 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "fmt" + "go/ast" + "go/scanner" + "go/token" + "go/types" + "sync" + + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/methodsets" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/cache/xrefs" + "golang.org/x/tools/gopls/internal/protocol" +) + +// Convenient aliases for very heavily used types. +type ( + PackageID = metadata.PackageID + PackagePath = metadata.PackagePath + PackageName = metadata.PackageName + ImportPath = metadata.ImportPath +) + +// A Package is the union of package metadata and type checking results. +// +// TODO(rfindley): for now, we do not persist the post-processing of +// loadDiagnostics, because the value of the snapshot.packages map is just the +// package handle. Fix this. +type Package struct { + metadata *metadata.Package + loadDiagnostics []*Diagnostic + pkg *syntaxPackage +} + +// syntaxPackage contains parse trees and type information for a package. +type syntaxPackage struct { + // -- identifiers -- + id PackageID + + // -- outputs -- + fset *token.FileSet // for now, same as the snapshot's FileSet + goFiles []*parsego.File + compiledGoFiles []*parsego.File + diagnostics []*Diagnostic + parseErrors []scanner.ErrorList + typeErrors []types.Error + types *types.Package + typesInfo *types.Info + typesSizes types.Sizes + importMap map[PackagePath]*types.Package + + xrefsOnce sync.Once + _xrefs []byte // only used by the xrefs method + + methodsetsOnce sync.Once + _methodsets *methodsets.Index // only used by the methodsets method +} + +func (p *syntaxPackage) xrefs() []byte { + p.xrefsOnce.Do(func() { + p._xrefs = xrefs.Index(p.compiledGoFiles, p.types, p.typesInfo) + }) + return p._xrefs +} + +func (p *syntaxPackage) methodsets() *methodsets.Index { + p.methodsetsOnce.Do(func() { + p._methodsets = methodsets.NewIndex(p.fset, p.types) + }) + return p._methodsets +} + +func (p *Package) String() string { return string(p.metadata.ID) } + +func (p *Package) Metadata() *metadata.Package { return p.metadata } + +// A loadScope defines a package loading scope for use with go/packages. +// +// TODO(rfindley): move this to load.go. +type loadScope interface { + aScope() +} + +// TODO(rfindley): move to load.go +type ( + fileLoadScope protocol.DocumentURI // load packages containing a file (including command-line-arguments) + packageLoadScope string // load a specific package (the value is its PackageID) + moduleLoadScope struct { + dir string // dir containing the go.mod file + modulePath string // parsed module path + } + viewLoadScope struct{} // load the workspace +) + +// Implement the loadScope interface. +func (fileLoadScope) aScope() {} +func (packageLoadScope) aScope() {} +func (moduleLoadScope) aScope() {} +func (viewLoadScope) aScope() {} + +func (p *Package) CompiledGoFiles() []*parsego.File { + return p.pkg.compiledGoFiles +} + +func (p *Package) File(uri protocol.DocumentURI) (*parsego.File, error) { + return p.pkg.File(uri) +} + +func (pkg *syntaxPackage) File(uri protocol.DocumentURI) (*parsego.File, error) { + for _, cgf := range pkg.compiledGoFiles { + if cgf.URI == uri { + return cgf, nil + } + } + for _, gf := range pkg.goFiles { + if gf.URI == uri { + return gf, nil + } + } + return nil, fmt.Errorf("no parsed file for %s in %v", uri, pkg.id) +} + +// Syntax returns parsed compiled Go files contained in this package. +func (p *Package) Syntax() []*ast.File { + var syntax []*ast.File + for _, pgf := range p.pkg.compiledGoFiles { + syntax = append(syntax, pgf.File) + } + return syntax +} + +// FileSet returns the FileSet describing this package's positions. +// +// The returned FileSet is guaranteed to describe all Syntax, but may also +// describe additional files. +func (p *Package) FileSet() *token.FileSet { + return p.pkg.fset +} + +// Types returns the type checked go/types.Package. +func (p *Package) Types() *types.Package { + return p.pkg.types +} + +// TypesInfo returns the go/types.Info annotating the Syntax of this package +// with type information. +// +// All fields in the resulting Info are populated. +func (p *Package) TypesInfo() *types.Info { + return p.pkg.typesInfo +} + +// TypesSizes returns the sizing function used for types in this package. +func (p *Package) TypesSizes() types.Sizes { + return p.pkg.typesSizes +} + +// DependencyTypes returns the type checker's symbol for the specified +// package. It returns nil if path is not among the transitive +// dependencies of p, or if no symbols from that package were +// referenced during the type-checking of p. +func (p *Package) DependencyTypes(path PackagePath) *types.Package { + return p.pkg.importMap[path] +} + +// ParseErrors returns a slice containing all non-empty parse errors produces +// while parsing p.Syntax, or nil if the package contains no parse errors. +func (p *Package) ParseErrors() []scanner.ErrorList { + return p.pkg.parseErrors +} + +// TypeErrors returns the go/types.Errors produced during type checking this +// package, if any. +func (p *Package) TypeErrors() []types.Error { + return p.pkg.typeErrors +} diff --git a/gopls/internal/cache/parse.go b/gopls/internal/cache/parse.go new file mode 100644 index 00000000000..56130c6e1fb --- /dev/null +++ b/gopls/internal/cache/parse.go @@ -0,0 +1,45 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "fmt" + "go/parser" + "go/token" + "path/filepath" + + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" +) + +// ParseGo parses the file whose contents are provided by fh. +// The resulting tree may have been fixed up. +// If the file is not available, returns nil and an error. +func (s *Snapshot) ParseGo(ctx context.Context, fh file.Handle, mode parser.Mode) (*parsego.File, error) { + pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh) + if err != nil { + return nil, err + } + return pgfs[0], nil +} + +// parseGoImpl parses the Go source file whose content is provided by fh. +func parseGoImpl(ctx context.Context, fset *token.FileSet, fh file.Handle, mode parser.Mode, purgeFuncBodies bool) (*parsego.File, error) { + ext := filepath.Ext(fh.URI().Path()) + if ext != ".go" && ext != "" { // files generated by cgo have no extension + return nil, fmt.Errorf("cannot parse non-Go file %s", fh.URI()) + } + content, err := fh.Content() + if err != nil { + return nil, err + } + // Check for context cancellation before actually doing the parse. + if ctx.Err() != nil { + return nil, ctx.Err() + } + pgf, _ := parsego.Parse(ctx, fset, fh.URI(), content, mode, purgeFuncBodies) + return pgf, nil +} diff --git a/gopls/internal/lsp/cache/parse_cache.go b/gopls/internal/cache/parse_cache.go similarity index 93% rename from gopls/internal/lsp/cache/parse_cache.go rename to gopls/internal/cache/parse_cache.go index 438cc626981..8586f655d28 100644 --- a/gopls/internal/lsp/cache/parse_cache.go +++ b/gopls/internal/cache/parse_cache.go @@ -17,8 +17,9 @@ import ( "time" "golang.org/x/sync/errgroup" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/memoize" "golang.org/x/tools/internal/tokeninternal" ) @@ -126,15 +127,15 @@ func (c *parseCache) stop() { // parseKey uniquely identifies a parsed Go file. type parseKey struct { - uri span.URI + uri protocol.DocumentURI mode parser.Mode purgeFuncBodies bool } type parseCacheEntry struct { key parseKey - hash source.Hash - promise *memoize.Promise // memoize.Promise[*source.ParsedGoFile] + hash file.Hash + promise *memoize.Promise // memoize.Promise[*parsego.File] atime uint64 // clock time of last access, for use in LRU sorting walltime time.Time // actual time of last access, for use in time-based eviction; too coarse for LRU on some systems lruIndex int // owned by the queue implementation @@ -146,7 +147,7 @@ type parseCacheEntry struct { // The resulting slice has an entry for every given file handle, though some // entries may be nil if there was an error reading the file (in which case the // resulting error will be non-nil). -func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...source.FileHandle) ([]*memoize.Promise, error) { +func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...file.Handle) ([]*memoize.Promise, error) { c.mu.Lock() defer c.mu.Unlock() @@ -180,7 +181,7 @@ func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...s } if e, ok := c.m[key]; ok { - if e.hash == fh.FileIdentity().Hash { // cache hit + if e.hash == fh.Identity().Hash { // cache hit e.atime = c.clock e.walltime = walltime heap.Fix(&c.lru, e.lruIndex) @@ -199,7 +200,7 @@ func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...s // inside of parseGoSrc without exceeding the allocated space. base, nextBase := c.allocateSpace(2*len(content) + parsePadding) - pgf, fixes1 := ParseGoSrc(ctx, fileSetWithBase(base), uri, content, mode, purgeFuncBodies) + pgf, fixes1 := parsego.Parse(ctx, fileSetWithBase(base), uri, content, mode, purgeFuncBodies) file := pgf.Tok if file.Base()+file.Size()+1 > nextBase { // The parsed file exceeds its allocated space, likely due to multiple @@ -211,7 +212,7 @@ func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...s // there, as parseGoSrc will repeat them. actual := file.Base() + file.Size() - base // actual size consumed, after re-parsing base2, nextBase2 := c.allocateSpace(actual) - pgf2, fixes2 := ParseGoSrc(ctx, fileSetWithBase(base2), uri, content, mode, purgeFuncBodies) + pgf2, fixes2 := parsego.Parse(ctx, fileSetWithBase(base2), uri, content, mode, purgeFuncBodies) // In golang/go#59097 we observed that this panic condition was hit. // One bug was found and fixed, but record more information here in @@ -236,7 +237,7 @@ func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...s // add new entry; entries are gc'ed asynchronously e := &parseCacheEntry{ key: key, - hash: fh.FileIdentity().Hash, + hash: fh.Identity().Hash, promise: promise, atime: c.clock, walltime: walltime, @@ -302,7 +303,7 @@ func (c *parseCache) allocateSpace(size int) (int, int) { return base, c.nextBase } -// parseFiles returns a ParsedGoFile for each file handle in fhs, in the +// parseFiles returns a parsego.File for each file handle in fhs, in the // requested parse mode. // // For parsed files that already exists in the cache, access time will be @@ -316,8 +317,8 @@ func (c *parseCache) allocateSpace(size int) (int, int) { // // If parseFiles returns an error, it still returns a slice, // but with a nil entry for each file that could not be parsed. -func (c *parseCache) parseFiles(ctx context.Context, fset *token.FileSet, mode parser.Mode, purgeFuncBodies bool, fhs ...source.FileHandle) ([]*source.ParsedGoFile, error) { - pgfs := make([]*source.ParsedGoFile, len(fhs)) +func (c *parseCache) parseFiles(ctx context.Context, fset *token.FileSet, mode parser.Mode, purgeFuncBodies bool, fhs ...file.Handle) ([]*parsego.File, error) { + pgfs := make([]*parsego.File, len(fhs)) // Temporary fall-back for 32-bit systems, where reservedForParsing is too // small to be viable. We don't actually support 32-bit systems, so this @@ -350,7 +351,7 @@ func (c *parseCache) parseFiles(ctx context.Context, fset *token.FileSet, mode p if err != nil { return err } - pgfs[i] = result.(*source.ParsedGoFile) + pgfs[i] = result.(*parsego.File) return nil }) } diff --git a/gopls/internal/cache/parse_cache_test.go b/gopls/internal/cache/parse_cache_test.go new file mode 100644 index 00000000000..7aefac77c38 --- /dev/null +++ b/gopls/internal/cache/parse_cache_test.go @@ -0,0 +1,234 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "fmt" + "go/token" + "math/bits" + "testing" + "time" + + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" +) + +func skipIfNoParseCache(t *testing.T) { + if bits.UintSize == 32 { + t.Skip("the parse cache is not supported on 32-bit systems") + } +} + +func TestParseCache(t *testing.T) { + skipIfNoParseCache(t) + + ctx := context.Background() + uri := protocol.DocumentURI("file:///myfile") + fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) + fset := token.NewFileSet() + + cache := newParseCache(0) + pgfs1, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh) + if err != nil { + t.Fatal(err) + } + pgf1 := pgfs1[0] + pgfs2, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh) + pgf2 := pgfs2[0] + if err != nil { + t.Fatal(err) + } + if pgf1 != pgf2 { + t.Errorf("parseFiles(%q): unexpected cache miss on repeated call", uri) + } + + // Fill up the cache with other files, but don't evict the file above. + cache.gcOnce() + files := []file.Handle{fh} + files = append(files, dummyFileHandles(parseCacheMinFiles-1)...) + + pgfs3, err := cache.parseFiles(ctx, fset, parsego.Full, false, files...) + if err != nil { + t.Fatal(err) + } + pgf3 := pgfs3[0] + if pgf3 != pgf1 { + t.Errorf("parseFiles(%q, ...): unexpected cache miss", uri) + } + if pgf3.Tok.Base() != pgf1.Tok.Base() || pgf3.Tok.Size() != pgf1.Tok.Size() { + t.Errorf("parseFiles(%q, ...): result.Tok has base: %d, size: %d, want (%d, %d)", uri, pgf3.Tok.Base(), pgf3.Tok.Size(), pgf1.Tok.Base(), pgf1.Tok.Size()) + } + if tok := fset.File(token.Pos(pgf3.Tok.Base())); tok != pgf3.Tok { + t.Errorf("parseFiles(%q, ...): result.Tok not contained in FileSet", uri) + } + + // Now overwrite the cache, after which we should get new results. + cache.gcOnce() + files = dummyFileHandles(parseCacheMinFiles) + _, err = cache.parseFiles(ctx, fset, parsego.Full, false, files...) + if err != nil { + t.Fatal(err) + } + // force a GC, which should collect the recently parsed files + cache.gcOnce() + pgfs4, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh) + if err != nil { + t.Fatal(err) + } + if pgfs4[0] == pgf1 { + t.Errorf("parseFiles(%q): unexpected cache hit after overwriting cache", uri) + } +} + +func TestParseCache_Reparsing(t *testing.T) { + skipIfNoParseCache(t) + + defer func(padding int) { + parsePadding = padding + }(parsePadding) + parsePadding = 0 + + files := dummyFileHandles(parseCacheMinFiles) + danglingSelector := []byte("package p\nfunc _() {\n\tx.\n}") + files = append(files, makeFakeFileHandle("file:///bad1", danglingSelector)) + files = append(files, makeFakeFileHandle("file:///bad2", danglingSelector)) + + // Parsing should succeed even though we overflow the padding. + cache := newParseCache(0) + _, err := cache.parseFiles(context.Background(), token.NewFileSet(), parsego.Full, false, files...) + if err != nil { + t.Fatal(err) + } +} + +// Re-parsing the first file should not panic. +func TestParseCache_Issue59097(t *testing.T) { + skipIfNoParseCache(t) + + defer func(padding int) { + parsePadding = padding + }(parsePadding) + parsePadding = 0 + + danglingSelector := []byte("package p\nfunc _() {\n\tx.\n}") + files := []file.Handle{makeFakeFileHandle("file:///bad", danglingSelector)} + + // Parsing should succeed even though we overflow the padding. + cache := newParseCache(0) + _, err := cache.parseFiles(context.Background(), token.NewFileSet(), parsego.Full, false, files...) + if err != nil { + t.Fatal(err) + } +} + +func TestParseCache_TimeEviction(t *testing.T) { + skipIfNoParseCache(t) + + ctx := context.Background() + fset := token.NewFileSet() + uri := protocol.DocumentURI("file:///myfile") + fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) + + const gcDuration = 10 * time.Millisecond + cache := newParseCache(gcDuration) + cache.stop() // we'll manage GC manually, for testing. + + pgfs0, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh, fh) + if err != nil { + t.Fatal(err) + } + + files := dummyFileHandles(parseCacheMinFiles) + _, err = cache.parseFiles(ctx, fset, parsego.Full, false, files...) + if err != nil { + t.Fatal(err) + } + + // Even after filling up the 'min' files, we get a cache hit for our original file. + pgfs1, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh, fh) + if err != nil { + t.Fatal(err) + } + + if pgfs0[0] != pgfs1[0] { + t.Errorf("before GC, got unexpected cache miss") + } + + // But after GC, we get a cache miss. + _, err = cache.parseFiles(ctx, fset, parsego.Full, false, files...) // mark dummy files as newer + if err != nil { + t.Fatal(err) + } + time.Sleep(gcDuration) + cache.gcOnce() + + pgfs2, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh, fh) + if err != nil { + t.Fatal(err) + } + + if pgfs0[0] == pgfs2[0] { + t.Errorf("after GC, got unexpected cache hit for %s", pgfs0[0].URI) + } +} + +func TestParseCache_Duplicates(t *testing.T) { + skipIfNoParseCache(t) + + ctx := context.Background() + uri := protocol.DocumentURI("file:///myfile") + fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) + + cache := newParseCache(0) + pgfs, err := cache.parseFiles(ctx, token.NewFileSet(), parsego.Full, false, fh, fh) + if err != nil { + t.Fatal(err) + } + if pgfs[0] != pgfs[1] { + t.Errorf("parseFiles(fh, fh): = [%p, %p], want duplicate files", pgfs[0].File, pgfs[1].File) + } +} + +func dummyFileHandles(n int) []file.Handle { + var fhs []file.Handle + for i := 0; i < n; i++ { + uri := protocol.DocumentURI(fmt.Sprintf("file:///_%d", i)) + src := []byte(fmt.Sprintf("package p\nvar _ = %d", i)) + fhs = append(fhs, makeFakeFileHandle(uri, src)) + } + return fhs +} + +func makeFakeFileHandle(uri protocol.DocumentURI, src []byte) fakeFileHandle { + return fakeFileHandle{ + uri: uri, + data: src, + hash: file.HashOf(src), + } +} + +type fakeFileHandle struct { + file.Handle + uri protocol.DocumentURI + data []byte + hash file.Hash +} + +func (h fakeFileHandle) URI() protocol.DocumentURI { + return h.uri +} + +func (h fakeFileHandle) Content() ([]byte, error) { + return h.data, nil +} + +func (h fakeFileHandle) Identity() file.Identity { + return file.Identity{ + URI: h.uri, + Hash: h.hash, + } +} diff --git a/gopls/internal/cache/parsego/file.go b/gopls/internal/cache/parsego/file.go new file mode 100644 index 00000000000..3e13d5b2c43 --- /dev/null +++ b/gopls/internal/cache/parsego/file.go @@ -0,0 +1,100 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parsego + +import ( + "go/ast" + "go/parser" + "go/scanner" + "go/token" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" +) + +// A File contains the results of parsing a Go file. +type File struct { + URI protocol.DocumentURI + Mode parser.Mode + File *ast.File + Tok *token.File + // Source code used to build the AST. It may be different from the + // actual content of the file if we have fixed the AST. + Src []byte + + // fixedSrc and fixedAST report on "fixing" that occurred during parsing of + // this file. + // + // fixedSrc means Src holds file content that was modified to improve parsing. + // fixedAST means File was modified after parsing, so AST positions may not + // reflect the content of Src. + // + // TODO(rfindley): there are many places where we haphazardly use the Src or + // positions without checking these fields. Audit these places and guard + // accordingly. After doing so, we may find that we don't need to + // differentiate fixedSrc and fixedAST. + fixedSrc bool + fixedAST bool + Mapper *protocol.Mapper // may map fixed Src, not file content + ParseErr scanner.ErrorList +} + +// Fixed reports whether p was "Fixed", meaning that its source or positions +// may not correlate with the original file. +func (p File) Fixed() bool { + return p.fixedSrc || p.fixedAST +} + +// -- go/token domain convenience helpers -- + +// PositionPos returns the token.Pos of protocol position p within the file. +func (pgf *File) PositionPos(p protocol.Position) (token.Pos, error) { + offset, err := pgf.Mapper.PositionOffset(p) + if err != nil { + return token.NoPos, err + } + return safetoken.Pos(pgf.Tok, offset) +} + +// PosRange returns a protocol Range for the token.Pos interval in this file. +func (pgf *File) PosRange(start, end token.Pos) (protocol.Range, error) { + return pgf.Mapper.PosRange(pgf.Tok, start, end) +} + +// PosMappedRange returns a MappedRange for the token.Pos interval in this file. +// A MappedRange can be converted to any other form. +func (pgf *File) PosMappedRange(start, end token.Pos) (protocol.MappedRange, error) { + return pgf.Mapper.PosMappedRange(pgf.Tok, start, end) +} + +// PosLocation returns a protocol Location for the token.Pos interval in this file. +func (pgf *File) PosLocation(start, end token.Pos) (protocol.Location, error) { + return pgf.Mapper.PosLocation(pgf.Tok, start, end) +} + +// NodeRange returns a protocol Range for the ast.Node interval in this file. +func (pgf *File) NodeRange(node ast.Node) (protocol.Range, error) { + return pgf.Mapper.NodeRange(pgf.Tok, node) +} + +// NodeMappedRange returns a MappedRange for the ast.Node interval in this file. +// A MappedRange can be converted to any other form. +func (pgf *File) NodeMappedRange(node ast.Node) (protocol.MappedRange, error) { + return pgf.Mapper.NodeMappedRange(pgf.Tok, node) +} + +// NodeLocation returns a protocol Location for the ast.Node interval in this file. +func (pgf *File) NodeLocation(node ast.Node) (protocol.Location, error) { + return pgf.Mapper.PosLocation(pgf.Tok, node.Pos(), node.End()) +} + +// RangePos parses a protocol Range back into the go/token domain. +func (pgf *File) RangePos(r protocol.Range) (token.Pos, token.Pos, error) { + start, end, err := pgf.Mapper.RangeOffsets(r) + if err != nil { + return token.NoPos, token.NoPos, err + } + return pgf.Tok.Pos(start), pgf.Tok.Pos(end), nil +} diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go new file mode 100644 index 00000000000..739ee1386bd --- /dev/null +++ b/gopls/internal/cache/parsego/parse.go @@ -0,0 +1,967 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parsego + +import ( + "bytes" + "context" + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "reflect" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +// Common parse modes; these should be reused wherever possible to increase +// cache hits. +const ( + // Header specifies that the main package declaration and imports are needed. + // This is the mode used when attempting to examine the package graph structure. + Header = parser.AllErrors | parser.ParseComments | parser.ImportsOnly | parser.SkipObjectResolution + + // Full specifies the full AST is needed. + // This is used for files of direct interest where the entire contents must + // be considered. + Full = parser.AllErrors | parser.ParseComments | parser.SkipObjectResolution +) + +// Parse parses a buffer of Go source, repairing the tree if necessary. +// +// The provided ctx is used only for logging. +func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, src []byte, mode parser.Mode, purgeFuncBodies bool) (res *File, fixes []fixType) { + if purgeFuncBodies { + src = astutil.PurgeFuncBodies(src) + } + ctx, done := event.Start(ctx, "cache.ParseGoSrc", tag.File.Of(uri.Path())) + defer done() + + file, err := parser.ParseFile(fset, uri.Path(), src, mode) + var parseErr scanner.ErrorList + if err != nil { + // We passed a byte slice, so the only possible error is a parse error. + parseErr = err.(scanner.ErrorList) + } + + tok := fset.File(file.Pos()) + if tok == nil { + // file.Pos is the location of the package declaration (issue #53202). If there was + // none, we can't find the token.File that ParseFile created, and we + // have no choice but to recreate it. + tok = fset.AddFile(uri.Path(), -1, len(src)) + tok.SetLinesForContent(src) + } + + fixedSrc := false + fixedAST := false + // If there were parse errors, attempt to fix them up. + if parseErr != nil { + // Fix any badly parsed parts of the AST. + astFixes := fixAST(file, tok, src) + fixedAST = len(astFixes) > 0 + if fixedAST { + fixes = append(fixes, astFixes...) + } + + for i := 0; i < 10; i++ { + // Fix certain syntax errors that render the file unparseable. + newSrc, srcFix := fixSrc(file, tok, src) + if newSrc == nil { + break + } + + // If we thought there was something to fix 10 times in a row, + // it is likely we got stuck in a loop somehow. Log out a diff + // of the last changes we made to aid in debugging. + if i == 9 { + unified := diff.Unified("before", "after", string(src), string(newSrc)) + event.Log(ctx, fmt.Sprintf("fixSrc loop - last diff:\n%v", unified), tag.File.Of(tok.Name())) + } + + newFile, newErr := parser.ParseFile(fset, uri.Path(), newSrc, mode) + if newFile == nil { + break // no progress + } + + // Maintain the original parseError so we don't try formatting the + // doctored file. + file = newFile + src = newSrc + tok = fset.File(file.Pos()) + + // Only now that we accept the fix do we record the src fix from above. + fixes = append(fixes, srcFix) + fixedSrc = true + + if newErr == nil { + break // nothing to fix + } + + // Note that fixedAST is reset after we fix src. + astFixes = fixAST(file, tok, src) + fixedAST = len(astFixes) > 0 + if fixedAST { + fixes = append(fixes, astFixes...) + } + } + } + + return &File{ + URI: uri, + Mode: mode, + Src: src, + fixedSrc: fixedSrc, + fixedAST: fixedAST, + File: file, + Tok: tok, + Mapper: protocol.NewMapper(uri, src), + ParseErr: parseErr, + }, fixes +} + +// fixAST inspects the AST and potentially modifies any *ast.BadStmts so that it can be +// type-checked more effectively. +// +// If fixAST returns true, the resulting AST is considered "fixed", meaning +// positions have been mangled, and type checker errors may not make sense. +func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) { + var err error + walkASTWithParent(n, func(n, parent ast.Node) bool { + switch n := n.(type) { + case *ast.BadStmt: + if fixDeferOrGoStmt(n, parent, tok, src) { + fixes = append(fixes, fixedDeferOrGo) + // Recursively fix in our fixed node. + moreFixes := fixAST(parent, tok, src) + fixes = append(fixes, moreFixes...) + } else { + err = fmt.Errorf("unable to parse defer or go from *ast.BadStmt: %v", err) + } + return false + case *ast.BadExpr: + if fixArrayType(n, parent, tok, src) { + fixes = append(fixes, fixedArrayType) + // Recursively fix in our fixed node. + moreFixes := fixAST(parent, tok, src) + fixes = append(fixes, moreFixes...) + return false + } + + // Fix cases where parser interprets if/for/switch "init" + // statement as "cond" expression, e.g.: + // + // // "i := foo" is init statement, not condition. + // for i := foo + // + if fixInitStmt(n, parent, tok, src) { + fixes = append(fixes, fixedInit) + } + return false + case *ast.SelectorExpr: + // Fix cases where a keyword prefix results in a phantom "_" selector, e.g.: + // + // foo.var<> // want to complete to "foo.variance" + // + if fixPhantomSelector(n, tok, src) { + fixes = append(fixes, fixedPhantomSelector) + } + return true + + case *ast.BlockStmt: + switch parent.(type) { + case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + // Adjust closing curly brace of empty switch/select + // statements so we can complete inside them. + if fixEmptySwitch(n, tok, src) { + fixes = append(fixes, fixedEmptySwitch) + } + } + + return true + default: + return true + } + }) + return fixes +} + +// walkASTWithParent walks the AST rooted at n. The semantics are +// similar to ast.Inspect except it does not call f(nil). +func walkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) { + var ancestors []ast.Node + ast.Inspect(n, func(n ast.Node) (recurse bool) { + defer func() { + if recurse { + ancestors = append(ancestors, n) + } + }() + + if n == nil { + ancestors = ancestors[:len(ancestors)-1] + return false + } + + var parent ast.Node + if len(ancestors) > 0 { + parent = ancestors[len(ancestors)-1] + } + + return f(n, parent) + }) +} + +// TODO(rfindley): revert this intrumentation once we're certain the crash in +// #59097 is fixed. +type fixType int + +const ( + noFix fixType = iota + fixedCurlies + fixedDanglingSelector + fixedDeferOrGo + fixedArrayType + fixedInit + fixedPhantomSelector + fixedEmptySwitch +) + +// fixSrc attempts to modify the file's source code to fix certain +// syntax errors that leave the rest of the file unparsed. +// +// fixSrc returns a non-nil result if and only if a fix was applied. +func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte, fix fixType) { + walkASTWithParent(f, func(n, parent ast.Node) bool { + if newSrc != nil { + return false + } + + switch n := n.(type) { + case *ast.BlockStmt: + newSrc = fixMissingCurlies(f, n, parent, tf, src) + if newSrc != nil { + fix = fixedCurlies + } + case *ast.SelectorExpr: + newSrc = fixDanglingSelector(n, tf, src) + if newSrc != nil { + fix = fixedDanglingSelector + } + } + + return newSrc == nil + }) + + return newSrc, fix +} + +// fixMissingCurlies adds in curly braces for block statements that +// are missing curly braces. For example: +// +// if foo +// +// becomes +// +// if foo {} +func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *token.File, src []byte) []byte { + // If the "{" is already in the source code, there isn't anything to + // fix since we aren't missing curlies. + if b.Lbrace.IsValid() { + braceOffset, err := safetoken.Offset(tok, b.Lbrace) + if err != nil { + return nil + } + if braceOffset < len(src) && src[braceOffset] == '{' { + return nil + } + } + + parentLine := safetoken.Line(tok, parent.Pos()) + + if parentLine >= tok.LineCount() { + // If we are the last line in the file, no need to fix anything. + return nil + } + + // Insert curlies at the end of parent's starting line. The parent + // is the statement that contains the block, e.g. *ast.IfStmt. The + // block's Pos()/End() can't be relied upon because they are based + // on the (missing) curly braces. We assume the statement is a + // single line for now and try sticking the curly braces at the end. + insertPos := tok.LineStart(parentLine+1) - 1 + + // Scootch position backwards until it's not in a comment. For example: + // + // if foo<> // some amazing comment | + // someOtherCode() + // + // insertPos will be located at "|", so we back it out of the comment. + didSomething := true + for didSomething { + didSomething = false + for _, c := range f.Comments { + if c.Pos() < insertPos && insertPos <= c.End() { + insertPos = c.Pos() + didSomething = true + } + } + } + + // Bail out if line doesn't end in an ident or ".". This is to avoid + // cases like below where we end up making things worse by adding + // curlies: + // + // if foo && + // bar<> + switch precedingToken(insertPos, tok, src) { + case token.IDENT, token.PERIOD: + // ok + default: + return nil + } + + var buf bytes.Buffer + buf.Grow(len(src) + 3) + offset, err := safetoken.Offset(tok, insertPos) + if err != nil { + return nil + } + buf.Write(src[:offset]) + + // Detect if we need to insert a semicolon to fix "for" loop situations like: + // + // for i := foo(); foo<> + // + // Just adding curlies is not sufficient to make things parse well. + if fs, ok := parent.(*ast.ForStmt); ok { + if _, ok := fs.Cond.(*ast.BadExpr); !ok { + if xs, ok := fs.Post.(*ast.ExprStmt); ok { + if _, ok := xs.X.(*ast.BadExpr); ok { + buf.WriteByte(';') + } + } + } + } + + // Insert "{}" at insertPos. + buf.WriteByte('{') + buf.WriteByte('}') + buf.Write(src[offset:]) + return buf.Bytes() +} + +// fixEmptySwitch moves empty switch/select statements' closing curly +// brace down one line. This allows us to properly detect incomplete +// "case" and "default" keywords as inside the switch statement. For +// example: +// +// switch { +// def<> +// } +// +// gets parsed like: +// +// switch { +// } +// +// Later we manually pull out the "def" token, but we need to detect +// that our "<>" position is inside the switch block. To do that we +// move the curly brace so it looks like: +// +// switch { +// +// } +// +// The resulting bool reports whether any fixing occurred. +func fixEmptySwitch(body *ast.BlockStmt, tok *token.File, src []byte) bool { + // We only care about empty switch statements. + if len(body.List) > 0 || !body.Rbrace.IsValid() { + return false + } + + // If the right brace is actually in the source code at the + // specified position, don't mess with it. + braceOffset, err := safetoken.Offset(tok, body.Rbrace) + if err != nil { + return false + } + if braceOffset < len(src) && src[braceOffset] == '}' { + return false + } + + braceLine := safetoken.Line(tok, body.Rbrace) + if braceLine >= tok.LineCount() { + // If we are the last line in the file, no need to fix anything. + return false + } + + // Move the right brace down one line. + body.Rbrace = tok.LineStart(braceLine + 1) + return true +} + +// fixDanglingSelector inserts real "_" selector expressions in place +// of phantom "_" selectors. For example: +// +// func _() { +// x.<> +// } +// +// var x struct { i int } +// +// To fix completion at "<>", we insert a real "_" after the "." so the +// following declaration of "x" can be parsed and type checked +// normally. +func fixDanglingSelector(s *ast.SelectorExpr, tf *token.File, src []byte) []byte { + if !isPhantomUnderscore(s.Sel, tf, src) { + return nil + } + + if !s.X.End().IsValid() { + return nil + } + + insertOffset, err := safetoken.Offset(tf, s.X.End()) + if err != nil { + return nil + } + // Insert directly after the selector's ".". + insertOffset++ + if src[insertOffset-1] != '.' { + return nil + } + + var buf bytes.Buffer + buf.Grow(len(src) + 1) + buf.Write(src[:insertOffset]) + buf.WriteByte('_') + buf.Write(src[insertOffset:]) + return buf.Bytes() +} + +// fixPhantomSelector tries to fix selector expressions with phantom +// "_" selectors. In particular, we check if the selector is a +// keyword, and if so we swap in an *ast.Ident with the keyword text. For example: +// +// foo.var +// +// yields a "_" selector instead of "var" since "var" is a keyword. +// +// TODO(rfindley): should this constitute an ast 'fix'? +// +// The resulting bool reports whether any fixing occurred. +func fixPhantomSelector(sel *ast.SelectorExpr, tf *token.File, src []byte) bool { + if !isPhantomUnderscore(sel.Sel, tf, src) { + return false + } + + // Only consider selectors directly abutting the selector ".". This + // avoids false positives in cases like: + // + // foo. // don't think "var" is our selector + // var bar = 123 + // + if sel.Sel.Pos() != sel.X.End()+1 { + return false + } + + maybeKeyword := readKeyword(sel.Sel.Pos(), tf, src) + if maybeKeyword == "" { + return false + } + + return replaceNode(sel, sel.Sel, &ast.Ident{ + Name: maybeKeyword, + NamePos: sel.Sel.Pos(), + }) +} + +// isPhantomUnderscore reports whether the given ident is a phantom +// underscore. The parser sometimes inserts phantom underscores when +// it encounters otherwise unparseable situations. +func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool { + if id == nil || id.Name != "_" { + return false + } + + // Phantom underscore means the underscore is not actually in the + // program text. + offset, err := safetoken.Offset(tok, id.Pos()) + if err != nil { + return false + } + return len(src) <= offset || src[offset] != '_' +} + +// fixInitStmt fixes cases where the parser misinterprets an +// if/for/switch "init" statement as the "cond" conditional. In cases +// like "if i := 0" the user hasn't typed the semicolon yet so the +// parser is looking for the conditional expression. However, "i := 0" +// are not valid expressions, so we get a BadExpr. +// +// The resulting bool reports whether any fixing occurred. +func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool { + if !bad.Pos().IsValid() || !bad.End().IsValid() { + return false + } + + // Try to extract a statement from the BadExpr. + start, end, err := safetoken.Offsets(tok, bad.Pos(), bad.End()-1) + if err != nil { + return false + } + stmtBytes := src[start : end+1] + stmt, err := parseStmt(tok, bad.Pos(), stmtBytes) + if err != nil { + return false + } + + // If the parent statement doesn't already have an "init" statement, + // move the extracted statement into the "init" field and insert a + // dummy expression into the required "cond" field. + switch p := parent.(type) { + case *ast.IfStmt: + if p.Init != nil { + return false + } + p.Init = stmt + p.Cond = &ast.Ident{ + Name: "_", + NamePos: stmt.End(), + } + return true + case *ast.ForStmt: + if p.Init != nil { + return false + } + p.Init = stmt + p.Cond = &ast.Ident{ + Name: "_", + NamePos: stmt.End(), + } + return true + case *ast.SwitchStmt: + if p.Init != nil { + return false + } + p.Init = stmt + p.Tag = nil + return true + } + return false +} + +// readKeyword reads the keyword starting at pos, if any. +func readKeyword(pos token.Pos, tok *token.File, src []byte) string { + var kwBytes []byte + offset, err := safetoken.Offset(tok, pos) + if err != nil { + return "" + } + for i := offset; i < len(src); i++ { + // Use a simplified identifier check since keywords are always lowercase ASCII. + if src[i] < 'a' || src[i] > 'z' { + break + } + kwBytes = append(kwBytes, src[i]) + + // Stop search at arbitrarily chosen too-long-for-a-keyword length. + if len(kwBytes) > 15 { + return "" + } + } + + if kw := string(kwBytes); token.Lookup(kw).IsKeyword() { + return kw + } + + return "" +} + +// fixArrayType tries to parse an *ast.BadExpr into an *ast.ArrayType. +// go/parser often turns lone array types like "[]int" into BadExprs +// if it isn't expecting a type. +func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool { + // Our expected input is a bad expression that looks like "[]someExpr". + + from := bad.Pos() + to := bad.End() + + if !from.IsValid() || !to.IsValid() { + return false + } + + exprBytes := make([]byte, 0, int(to-from)+3) + // Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF. + // It also panics if the position is not in the range of the file, and + // badExprs may not necessarily have good positions, so check first. + fromOffset, toOffset, err := safetoken.Offsets(tok, from, to-1) + if err != nil { + return false + } + exprBytes = append(exprBytes, src[fromOffset:toOffset+1]...) + exprBytes = bytes.TrimSpace(exprBytes) + + // If our expression ends in "]" (e.g. "[]"), add a phantom selector + // so we can complete directly after the "[]". + if len(exprBytes) > 0 && exprBytes[len(exprBytes)-1] == ']' { + exprBytes = append(exprBytes, '_') + } + + // Add "{}" to turn our ArrayType into a CompositeLit. This is to + // handle the case of "[...]int" where we must make it a composite + // literal to be parseable. + exprBytes = append(exprBytes, '{', '}') + + expr, err := parseExpr(tok, from, exprBytes) + if err != nil { + return false + } + + cl, _ := expr.(*ast.CompositeLit) + if cl == nil { + return false + } + + at, _ := cl.Type.(*ast.ArrayType) + if at == nil { + return false + } + + return replaceNode(parent, bad, at) +} + +// precedingToken scans src to find the token preceding pos. +func precedingToken(pos token.Pos, tok *token.File, src []byte) token.Token { + s := &scanner.Scanner{} + s.Init(tok, src, nil, 0) + + var lastTok token.Token + for { + p, t, _ := s.Scan() + if t == token.EOF || p >= pos { + break + } + + lastTok = t + } + return lastTok +} + +// fixDeferOrGoStmt tries to parse an *ast.BadStmt into a defer or a go statement. +// +// go/parser packages a statement of the form "defer x." as an *ast.BadStmt because +// it does not include a call expression. This means that go/types skips type-checking +// this statement entirely, and we can't use the type information when completing. +// Here, we try to generate a fake *ast.DeferStmt or *ast.GoStmt to put into the AST, +// instead of the *ast.BadStmt. +func fixDeferOrGoStmt(bad *ast.BadStmt, parent ast.Node, tok *token.File, src []byte) bool { + // Check if we have a bad statement containing either a "go" or "defer". + s := &scanner.Scanner{} + s.Init(tok, src, nil, 0) + + var ( + pos token.Pos + tkn token.Token + ) + for { + if tkn == token.EOF { + return false + } + if pos >= bad.From { + break + } + pos, tkn, _ = s.Scan() + } + + var stmt ast.Stmt + switch tkn { + case token.DEFER: + stmt = &ast.DeferStmt{ + Defer: pos, + } + case token.GO: + stmt = &ast.GoStmt{ + Go: pos, + } + default: + return false + } + + var ( + from, to, last token.Pos + lastToken token.Token + braceDepth int + phantomSelectors []token.Pos + ) +FindTo: + for { + to, tkn, _ = s.Scan() + + if from == token.NoPos { + from = to + } + + switch tkn { + case token.EOF: + break FindTo + case token.SEMICOLON: + // If we aren't in nested braces, end of statement means + // end of expression. + if braceDepth == 0 { + break FindTo + } + case token.LBRACE: + braceDepth++ + } + + // This handles the common dangling selector case. For example in + // + // defer fmt. + // y := 1 + // + // we notice the dangling period and end our expression. + // + // If the previous token was a "." and we are looking at a "}", + // the period is likely a dangling selector and needs a phantom + // "_". Likewise if the current token is on a different line than + // the period, the period is likely a dangling selector. + if lastToken == token.PERIOD && (tkn == token.RBRACE || safetoken.Line(tok, to) > safetoken.Line(tok, last)) { + // Insert phantom "_" selector after the dangling ".". + phantomSelectors = append(phantomSelectors, last+1) + // If we aren't in a block then end the expression after the ".". + if braceDepth == 0 { + to = last + 1 + break + } + } + + lastToken = tkn + last = to + + switch tkn { + case token.RBRACE: + braceDepth-- + if braceDepth <= 0 { + if braceDepth == 0 { + // +1 to include the "}" itself. + to += 1 + } + break FindTo + } + } + } + + fromOffset, toOffset, err := safetoken.Offsets(tok, from, to) + if err != nil { + return false + } + if !from.IsValid() || fromOffset >= len(src) { + return false + } + if !to.IsValid() || toOffset >= len(src) { + return false + } + + // Insert any phantom selectors needed to prevent dangling "." from messing + // up the AST. + exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors)) + for i, b := range src[fromOffset:toOffset] { + if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] { + exprBytes = append(exprBytes, '_') + phantomSelectors = phantomSelectors[1:] + } + exprBytes = append(exprBytes, b) + } + + if len(phantomSelectors) > 0 { + exprBytes = append(exprBytes, '_') + } + + expr, err := parseExpr(tok, from, exprBytes) + if err != nil { + return false + } + + // Package the expression into a fake *ast.CallExpr and re-insert + // into the function. + call := &ast.CallExpr{ + Fun: expr, + Lparen: to, + Rparen: to, + } + + switch stmt := stmt.(type) { + case *ast.DeferStmt: + stmt.Call = call + case *ast.GoStmt: + stmt.Call = call + } + + return replaceNode(parent, bad, stmt) +} + +// parseStmt parses the statement in src and updates its position to +// start at pos. +// +// tok is the original file containing pos. Used to ensure that all adjusted +// positions are valid. +func parseStmt(tok *token.File, pos token.Pos, src []byte) (ast.Stmt, error) { + // Wrap our expression to make it a valid Go file we can pass to ParseFile. + fileSrc := bytes.Join([][]byte{ + []byte("package fake;func _(){"), + src, + []byte("}"), + }, nil) + + // Use ParseFile instead of ParseExpr because ParseFile has + // best-effort behavior, whereas ParseExpr fails hard on any error. + fakeFile, err := parser.ParseFile(token.NewFileSet(), "", fileSrc, 0) + if fakeFile == nil { + return nil, fmt.Errorf("error reading fake file source: %v", err) + } + + // Extract our expression node from inside the fake file. + if len(fakeFile.Decls) == 0 { + return nil, fmt.Errorf("error parsing fake file: %v", err) + } + + fakeDecl, _ := fakeFile.Decls[0].(*ast.FuncDecl) + if fakeDecl == nil || len(fakeDecl.Body.List) == 0 { + return nil, fmt.Errorf("no statement in %s: %v", src, err) + } + + stmt := fakeDecl.Body.List[0] + + // parser.ParseFile returns undefined positions. + // Adjust them for the current file. + offsetPositions(tok, stmt, pos-1-(stmt.Pos()-1)) + + return stmt, nil +} + +// parseExpr parses the expression in src and updates its position to +// start at pos. +func parseExpr(tok *token.File, pos token.Pos, src []byte) (ast.Expr, error) { + stmt, err := parseStmt(tok, pos, src) + if err != nil { + return nil, err + } + + exprStmt, ok := stmt.(*ast.ExprStmt) + if !ok { + return nil, fmt.Errorf("no expr in %s: %v", src, err) + } + + return exprStmt.X, nil +} + +var tokenPosType = reflect.TypeOf(token.NoPos) + +// offsetPositions applies an offset to the positions in an ast.Node. +func offsetPositions(tok *token.File, n ast.Node, offset token.Pos) { + fileBase := int64(tok.Base()) + fileEnd := fileBase + int64(tok.Size()) + ast.Inspect(n, func(n ast.Node) bool { + if n == nil { + return false + } + + v := reflect.ValueOf(n).Elem() + + switch v.Kind() { + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + f := v.Field(i) + if f.Type() != tokenPosType { + continue + } + + if !f.CanSet() { + continue + } + + // Don't offset invalid positions: they should stay invalid. + if !token.Pos(f.Int()).IsValid() { + continue + } + + // Clamp value to valid range; see #64335. + // + // TODO(golang/go#64335): this is a hack, because our fixes should not + // produce positions that overflow (but they do: golang/go#64488). + pos := f.Int() + int64(offset) + if pos < fileBase { + pos = fileBase + } + if pos > fileEnd { + pos = fileEnd + } + f.SetInt(pos) + } + } + + return true + }) +} + +// replaceNode updates parent's child oldChild to be newChild. It +// returns whether it replaced successfully. +func replaceNode(parent, oldChild, newChild ast.Node) bool { + if parent == nil || oldChild == nil || newChild == nil { + return false + } + + parentVal := reflect.ValueOf(parent).Elem() + if parentVal.Kind() != reflect.Struct { + return false + } + + newChildVal := reflect.ValueOf(newChild) + + tryReplace := func(v reflect.Value) bool { + if !v.CanSet() || !v.CanInterface() { + return false + } + + // If the existing value is oldChild, we found our child. Make + // sure our newChild is assignable and then make the swap. + if v.Interface() == oldChild && newChildVal.Type().AssignableTo(v.Type()) { + v.Set(newChildVal) + return true + } + + return false + } + + // Loop over parent's struct fields. + for i := 0; i < parentVal.NumField(); i++ { + f := parentVal.Field(i) + + switch f.Kind() { + // Check interface and pointer fields. + case reflect.Interface, reflect.Ptr: + if tryReplace(f) { + return true + } + + // Search through any slice fields. + case reflect.Slice: + for i := 0; i < f.Len(); i++ { + if tryReplace(f.Index(i)) { + return true + } + } + } + } + + return false +} diff --git a/gopls/internal/cache/parsego/parse_test.go b/gopls/internal/cache/parsego/parse_test.go new file mode 100644 index 00000000000..c64125427b1 --- /dev/null +++ b/gopls/internal/cache/parsego/parse_test.go @@ -0,0 +1,46 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parsego_test + +import ( + "context" + "go/ast" + "go/token" + "testing" + + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/tokeninternal" +) + +// TODO(golang/go#64335): we should have many more tests for fixed syntax. + +func TestFixPosition_Issue64488(t *testing.T) { + // This test reproduces the conditions of golang/go#64488, where a type error + // on fixed syntax overflows the token.File. + const src = ` +package foo + +func _() { + type myThing struct{} + var foo []myThing + for ${1:}, ${2:} := range foo { + $0 +} +} +` + + pgf, _ := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", []byte(src), parsego.Full, false) + fset := tokeninternal.FileSetFor(pgf.Tok) + ast.Inspect(pgf.File, func(n ast.Node) bool { + if n != nil { + posn := safetoken.StartPosition(fset, n.Pos()) + if !posn.IsValid() { + t.Fatalf("invalid position for %T (%v): %v not in [%d, %d]", n, n, n.Pos(), pgf.Tok.Base(), pgf.Tok.Base()+pgf.Tok.Size()) + } + } + return true + }) +} diff --git a/gopls/internal/cache/port.go b/gopls/internal/cache/port.go new file mode 100644 index 00000000000..e62ebe29903 --- /dev/null +++ b/gopls/internal/cache/port.go @@ -0,0 +1,204 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "go/build" + "go/parser" + "go/token" + "io" + "path/filepath" + "strings" + + "golang.org/x/tools/gopls/internal/util/bug" +) + +type port struct{ GOOS, GOARCH string } + +var ( + // preferredPorts holds GOOS/GOARCH combinations for which we dynamically + // create new Views, by setting GOOS=... and GOARCH=... on top of + // user-provided configuration when we detect that the default build + // configuration does not match an open file. Ports are matched in the order + // defined below, so that when multiple ports match a file we use the port + // occurring at a lower index in the slice. For that reason, we sort first + // class ports ahead of secondary ports, and (among first class ports) 64-bit + // ports ahead of the less common 32-bit ports. + preferredPorts = []port{ + // First class ports, from https://go.dev/wiki/PortingPolicy. + {"darwin", "amd64"}, + {"darwin", "arm64"}, + {"linux", "amd64"}, + {"linux", "arm64"}, + {"windows", "amd64"}, + {"linux", "arm"}, + {"linux", "386"}, + {"windows", "386"}, + + // Secondary ports, from GOROOT/src/internal/platform/zosarch.go. + // (First class ports are commented out.) + {"aix", "ppc64"}, + {"dragonfly", "amd64"}, + {"freebsd", "386"}, + {"freebsd", "amd64"}, + {"freebsd", "arm"}, + {"freebsd", "arm64"}, + {"illumos", "amd64"}, + {"linux", "ppc64"}, + {"linux", "ppc64le"}, + {"linux", "mips"}, + {"linux", "mipsle"}, + {"linux", "mips64"}, + {"linux", "mips64le"}, + {"linux", "riscv64"}, + {"linux", "s390x"}, + {"android", "386"}, + {"android", "amd64"}, + {"android", "arm"}, + {"android", "arm64"}, + {"ios", "arm64"}, + {"ios", "amd64"}, + {"js", "wasm"}, + {"netbsd", "386"}, + {"netbsd", "amd64"}, + {"netbsd", "arm"}, + {"netbsd", "arm64"}, + {"openbsd", "386"}, + {"openbsd", "amd64"}, + {"openbsd", "arm"}, + {"openbsd", "arm64"}, + {"openbsd", "mips64"}, + {"plan9", "386"}, + {"plan9", "amd64"}, + {"plan9", "arm"}, + {"solaris", "amd64"}, + {"windows", "arm"}, + {"windows", "arm64"}, + + {"aix", "ppc64"}, + {"android", "386"}, + {"android", "amd64"}, + {"android", "arm"}, + {"android", "arm64"}, + // {"darwin", "amd64"}, + // {"darwin", "arm64"}, + {"dragonfly", "amd64"}, + {"freebsd", "386"}, + {"freebsd", "amd64"}, + {"freebsd", "arm"}, + {"freebsd", "arm64"}, + {"freebsd", "riscv64"}, + {"illumos", "amd64"}, + {"ios", "amd64"}, + {"ios", "arm64"}, + {"js", "wasm"}, + // {"linux", "386"}, + // {"linux", "amd64"}, + // {"linux", "arm"}, + // {"linux", "arm64"}, + {"linux", "loong64"}, + {"linux", "mips"}, + {"linux", "mips64"}, + {"linux", "mips64le"}, + {"linux", "mipsle"}, + {"linux", "ppc64"}, + {"linux", "ppc64le"}, + {"linux", "riscv64"}, + {"linux", "s390x"}, + {"linux", "sparc64"}, + {"netbsd", "386"}, + {"netbsd", "amd64"}, + {"netbsd", "arm"}, + {"netbsd", "arm64"}, + {"openbsd", "386"}, + {"openbsd", "amd64"}, + {"openbsd", "arm"}, + {"openbsd", "arm64"}, + {"openbsd", "mips64"}, + {"openbsd", "ppc64"}, + {"openbsd", "riscv64"}, + {"plan9", "386"}, + {"plan9", "amd64"}, + {"plan9", "arm"}, + {"solaris", "amd64"}, + {"wasip1", "wasm"}, + // {"windows", "386"}, + // {"windows", "amd64"}, + {"windows", "arm"}, + {"windows", "arm64"}, + } +) + +// matches reports whether the port matches a file with the given absolute path +// and content. +// +// Note that this function accepts content rather than e.g. a file.Handle, +// because we trim content before matching for performance reasons, and +// therefore need to do this outside of matches when considering multiple ports. +func (p port) matches(path string, content []byte) bool { + ctxt := build.Default // make a copy + ctxt.UseAllFiles = false + dir, name := filepath.Split(path) + + // The only virtualized operation called by MatchFile is OpenFile. + ctxt.OpenFile = func(p string) (io.ReadCloser, error) { + if p != path { + return nil, bug.Errorf("unexpected file %q", p) + } + return io.NopCloser(bytes.NewReader(content)), nil + } + + ctxt.GOOS = p.GOOS + ctxt.GOARCH = p.GOARCH + ok, err := ctxt.MatchFile(dir, name) + return err == nil && ok +} + +// trimContentForPortMatch trims the given Go file content to a minimal file +// containing the same build constraints, if any. +// +// This is an unfortunate but necessary optimization, as matching build +// constraints using go/build has significant overhead, and involves parsing +// more than just the build constraint. +// +// TestMatchingPortsConsistency enforces consistency by comparing results +// without trimming content. +func trimContentForPortMatch(content []byte) []byte { + buildComment := buildComment(content) + return []byte(buildComment + "\npackage p") // package name does not matter +} + +// buildComment returns the first matching //go:build comment in the given +// content, or "" if none exists. +func buildComment(content []byte) string { + f, err := parser.ParseFile(token.NewFileSet(), "", content, parser.PackageClauseOnly|parser.ParseComments) + if err != nil { + return "" + } + + for _, cg := range f.Comments { + for _, c := range cg.List { + if isGoBuildComment(c.Text) { + return c.Text + } + } + } + return "" +} + +// Adapted from go/build/build.go. +// +// TODO(rfindley): use constraint.IsGoBuild once we are on 1.19+. +func isGoBuildComment(line string) bool { + const goBuildComment = "//go:build" + if !strings.HasPrefix(line, goBuildComment) { + return false + } + // Report whether //go:build is followed by a word boundary. + line = strings.TrimSpace(line) + rest := line[len(goBuildComment):] + return len(rest) == 0 || len(strings.TrimSpace(rest)) < len(rest) +} diff --git a/gopls/internal/cache/port_test.go b/gopls/internal/cache/port_test.go new file mode 100644 index 00000000000..a92056a9c22 --- /dev/null +++ b/gopls/internal/cache/port_test.go @@ -0,0 +1,124 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "os" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/sync/errgroup" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/testenv" +) + +func TestMain(m *testing.M) { + bug.PanicOnBugs = true + os.Exit(m.Run()) +} + +func TestMatchingPortsStdlib(t *testing.T) { + // This test checks that we don't encounter a bug when matching ports, and + // sanity checks that the optimization to use trimmed/fake file content + // before delegating to go/build.Context.MatchFile does not affect + // correctness. + if testing.Short() { + t.Skip("skipping in short mode: takes to long on slow file systems") + } + + testenv.NeedsTool(t, "go") + + // Load, parse and type-check the program. + cfg := &packages.Config{ + Mode: packages.LoadFiles, + Tests: true, + } + pkgs, err := packages.Load(cfg, "std", "cmd") + if err != nil { + t.Fatal(err) + } + + var g errgroup.Group + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + for _, f := range pkg.CompiledGoFiles { + f := f + g.Go(func() error { + content, err := os.ReadFile(f) + // We report errors via t.Error, not by returning, + // so that a single test can report multiple test failures. + if err != nil { + t.Errorf("failed to read %s: %v", f, err) + return nil + } + fh := makeFakeFileHandle(protocol.URIFromPath(f), content) + fastPorts := matchingPreferredPorts(t, fh, true) + slowPorts := matchingPreferredPorts(t, fh, false) + if diff := cmp.Diff(fastPorts, slowPorts); diff != "" { + t.Errorf("%s: ports do not match (-trimmed +untrimmed):\n%s", f, diff) + return nil + } + return nil + }) + } + }) + g.Wait() +} + +func matchingPreferredPorts(tb testing.TB, fh file.Handle, trimContent bool) map[port]unit { + content, err := fh.Content() + if err != nil { + tb.Fatal(err) + } + if trimContent { + content = trimContentForPortMatch(content) + } + path := fh.URI().Path() + matching := make(map[port]unit) + for _, port := range preferredPorts { + if port.matches(path, content) { + matching[port] = unit{} + } + } + return matching +} + +func BenchmarkMatchingPreferredPorts(b *testing.B) { + // Copy of robustio_posix.go + const src = ` +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build unix +// +build unix + +package robustio + +import ( + "os" + "syscall" + "time" +) + +func getFileID(filename string) (FileID, time.Time, error) { + fi, err := os.Stat(filename) + if err != nil { + return FileID{}, time.Time{}, err + } + stat := fi.Sys().(*syscall.Stat_t) + return FileID{ + device: uint64(stat.Dev), // (int32 on darwin, uint64 on linux) + inode: stat.Ino, + }, fi.ModTime(), nil +} +` + fh := makeFakeFileHandle("file:///path/to/test/file.go", []byte(src)) + for i := 0; i < b.N; i++ { + _ = matchingPreferredPorts(b, fh, true) + } +} diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go new file mode 100644 index 00000000000..05ed0694148 --- /dev/null +++ b/gopls/internal/cache/session.go @@ -0,0 +1,1182 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "errors" + "fmt" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" + + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/typerefs" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/persistent" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/gopls/internal/vulncheck" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/memoize" + "golang.org/x/tools/internal/xcontext" +) + +// NewSession creates a new gopls session with the given cache. +func NewSession(ctx context.Context, c *Cache) *Session { + index := atomic.AddInt64(&sessionIndex, 1) + s := &Session{ + id: strconv.FormatInt(index, 10), + cache: c, + gocmdRunner: &gocommand.Runner{}, + overlayFS: newOverlayFS(c), + parseCache: newParseCache(1 * time.Minute), // keep recently parsed files for a minute, to optimize typing CPU + viewMap: make(map[protocol.DocumentURI]*View), + } + event.Log(ctx, "New session", KeyCreateSession.Of(s)) + return s +} + +// A Session holds the state (views, file contents, parse cache, +// memoized computations) of a gopls server process. +// +// It implements the file.Source interface. +type Session struct { + // Unique identifier for this session. + id string + + // Immutable attributes shared across views. + cache *Cache // shared cache + gocmdRunner *gocommand.Runner // limits go command concurrency + + viewMu sync.Mutex + views []*View + viewMap map[protocol.DocumentURI]*View // file->best view; nil after shutdown + + // snapshots is a counting semaphore that records the number + // of unreleased snapshots associated with this session. + // Shutdown waits for it to fall to zero. + snapshotWG sync.WaitGroup + + parseCache *parseCache + + *overlayFS +} + +// ID returns the unique identifier for this session on this server. +func (s *Session) ID() string { return s.id } +func (s *Session) String() string { return s.id } + +// GoCommandRunner returns the gocommand Runner for this session. +func (s *Session) GoCommandRunner() *gocommand.Runner { + return s.gocmdRunner +} + +// Shutdown the session and all views it has created. +func (s *Session) Shutdown(ctx context.Context) { + var views []*View + s.viewMu.Lock() + views = append(views, s.views...) + s.views = nil + s.viewMap = nil + s.viewMu.Unlock() + for _, view := range views { + view.shutdown() + } + s.parseCache.stop() + s.snapshotWG.Wait() // wait for all work on associated snapshots to finish + event.Log(ctx, "Shutdown session", KeyShutdownSession.Of(s)) +} + +// Cache returns the cache that created this session, for debugging only. +func (s *Session) Cache() *Cache { + return s.cache +} + +// TODO(rfindley): is the logic surrounding this error actually necessary? +var ErrViewExists = errors.New("view already exists for session") + +// NewView creates a new View, returning it and its first snapshot. If a +// non-empty tempWorkspace directory is provided, the View will record a copy +// of its gopls workspace module in that directory, so that client tooling +// can execute in the same main module. On success it also returns a release +// function that must be called when the Snapshot is no longer needed. +func (s *Session) NewView(ctx context.Context, folder *Folder) (*View, *Snapshot, func(), error) { + s.viewMu.Lock() + defer s.viewMu.Unlock() + + // Querying the file system to check whether + // two folders denote the same existing directory. + if inode1, err := os.Stat(filepath.FromSlash(folder.Dir.Path())); err == nil { + for _, view := range s.views { + inode2, err := os.Stat(filepath.FromSlash(view.folder.Dir.Path())) + if err == nil && os.SameFile(inode1, inode2) { + return nil, nil, nil, ErrViewExists + } + } + } + + def, err := defineView(ctx, s, folder, nil) + if err != nil { + return nil, nil, nil, err + } + view, snapshot, release := s.createView(ctx, def) + s.views = append(s.views, view) + // we always need to drop the view map + s.viewMap = make(map[protocol.DocumentURI]*View) + return view, snapshot, release, nil +} + +// createView creates a new view, with an initial snapshot that retains the +// supplied context, detached from events and cancelation. +// +// The caller is responsible for calling the release function once. +func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, *Snapshot, func()) { + index := atomic.AddInt64(&viewIndex, 1) + + // We want a true background context and not a detached context here + // the spans need to be unrelated and no tag values should pollute it. + baseCtx := event.Detach(xcontext.Detach(ctx)) + backgroundCtx, cancel := context.WithCancel(baseCtx) + + // Compute a skip function to use for module cache scanning. + // + // Note that unlike other filtering operations, we definitely don't want to + // exclude the gomodcache here, even if it is contained in the workspace + // folder. + // + // TODO(rfindley): consolidate with relPathExcludedByFilter(Func), Filterer, + // View.filterFunc. + var skipPath func(string) bool + { + // Compute a prefix match, respecting segment boundaries, by ensuring + // the pattern (dir) has a trailing slash. + dirPrefix := strings.TrimSuffix(string(def.folder.Dir), "/") + "/" + filterer := NewFilterer(def.folder.Options.DirectoryFilters) + skipPath = func(dir string) bool { + uri := strings.TrimSuffix(string(protocol.URIFromPath(dir)), "/") + // Note that the logic below doesn't handle the case where uri == + // v.folder.Dir, because there is no point in excluding the entire + // workspace folder! + if rel := strings.TrimPrefix(uri, dirPrefix); rel != uri { + return filterer.Disallow(rel) + } + return false + } + } + + var ignoreFilter *ignoreFilter + { + var dirs []string + if len(def.workspaceModFiles) == 0 { + for _, entry := range filepath.SplitList(def.folder.Env.GOPATH) { + dirs = append(dirs, filepath.Join(entry, "src")) + } + } else { + dirs = append(dirs, def.folder.Env.GOMODCACHE) + for m := range def.workspaceModFiles { + dirs = append(dirs, filepath.Dir(m.Path())) + } + } + ignoreFilter = newIgnoreFilter(dirs) + } + + var pe *imports.ProcessEnv + { + env := make(map[string]string) + envSlice := slices.Concat(os.Environ(), def.folder.Options.EnvSlice(), []string{"GO111MODULE=" + def.adjustedGO111MODULE()}) + for _, kv := range envSlice { + if k, v, ok := strings.Cut(kv, "="); ok { + env[k] = v + } + } + pe = &imports.ProcessEnv{ + GocmdRunner: s.gocmdRunner, + BuildFlags: slices.Clone(def.folder.Options.BuildFlags), + // TODO(rfindley): an old comment said "processEnv operations should not mutate the modfile" + // But shouldn't we honor the default behavior of mod vendoring? + ModFlag: "readonly", + SkipPathInScan: skipPath, + Env: env, + WorkingDir: def.root.Path(), + ModCache: s.cache.modCache.dirCache(def.folder.Env.GOMODCACHE), + } + if def.folder.Options.VerboseOutput { + pe.Logf = func(format string, args ...interface{}) { + event.Log(ctx, fmt.Sprintf(format, args...)) + } + } + } + + v := &View{ + id: strconv.FormatInt(index, 10), + gocmdRunner: s.gocmdRunner, + initialWorkspaceLoad: make(chan struct{}), + initializationSema: make(chan struct{}, 1), + baseCtx: baseCtx, + parseCache: s.parseCache, + ignoreFilter: ignoreFilter, + fs: s.overlayFS, + viewDefinition: def, + importsState: newImportsState(backgroundCtx, s.cache.modCache, pe), + } + + s.snapshotWG.Add(1) + v.snapshot = &Snapshot{ + view: v, + backgroundCtx: backgroundCtx, + cancel: cancel, + store: s.cache.store, + refcount: 1, // Snapshots are born referenced. + done: s.snapshotWG.Done, + packages: new(persistent.Map[PackageID, *packageHandle]), + meta: new(metadata.Graph), + files: newFileMap(), + activePackages: new(persistent.Map[PackageID, *Package]), + symbolizeHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + shouldLoad: new(persistent.Map[PackageID, []PackagePath]), + unloadableFiles: new(persistent.Set[protocol.DocumentURI]), + parseModHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + parseWorkHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + modTidyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + modVulnHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + modWhyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + pkgIndex: typerefs.NewPackageIndex(), + moduleUpgrades: new(persistent.Map[protocol.DocumentURI, map[string]string]), + vulns: new(persistent.Map[protocol.DocumentURI, *vulncheck.Result]), + } + + // Snapshots must observe all open files, as there are some caching + // heuristics that change behavior depending on open files. + for _, o := range s.overlayFS.Overlays() { + _, _ = v.snapshot.ReadFile(ctx, o.URI()) + } + + // Record the environment of the newly created view in the log. + event.Log(ctx, viewEnv(v)) + + // Initialize the view without blocking. + initCtx, initCancel := context.WithCancel(xcontext.Detach(ctx)) + v.cancelInitialWorkspaceLoad = initCancel + snapshot := v.snapshot + + // Pass a second reference to the background goroutine. + bgRelease := snapshot.Acquire() + go func() { + defer bgRelease() + snapshot.initialize(initCtx, true) + }() + + // Return a third reference to the caller. + return v, snapshot, snapshot.Acquire() +} + +// RemoveView removes from the session the view rooted at the specified directory. +// It reports whether a view of that directory was removed. +func (s *Session) RemoveView(dir protocol.DocumentURI) bool { + s.viewMu.Lock() + defer s.viewMu.Unlock() + for _, view := range s.views { + if view.folder.Dir == dir { + i := s.dropView(view) + if i == -1 { + return false // can't happen + } + // delete this view... we don't care about order but we do want to make + // sure we can garbage collect the view + s.views = removeElement(s.views, i) + return true + } + } + return false +} + +// View returns the view with a matching id, if present. +func (s *Session) View(id string) (*View, error) { + s.viewMu.Lock() + defer s.viewMu.Unlock() + for _, view := range s.views { + if view.ID() == id { + return view, nil + } + } + return nil, fmt.Errorf("no view with ID %q", id) +} + +// SnapshotOf returns a Snapshot corresponding to the given URI. +// +// In the case where the file can be can be associated with a View by +// bestViewForURI (based on directory information alone, without package +// metadata), SnapshotOf returns the current Snapshot for that View. Otherwise, +// it awaits loading package metadata and returns a Snapshot for the first View +// containing a real (=not command-line-arguments) package for the file. +// +// If that also fails to find a View, SnapshotOf returns a Snapshot for the +// first view in s.views that is not shut down (i.e. s.views[0] unless we lose +// a race), for determinism in tests and so that we tend to aggregate the +// resulting command-line-arguments packages into a single view. +// +// SnapshotOf returns an error if a failure occurs along the way (most likely due +// to context cancellation), or if there are no Views in the Session. +// +// On success, the caller must call the returned function to release the snapshot. +func (s *Session) SnapshotOf(ctx context.Context, uri protocol.DocumentURI) (*Snapshot, func(), error) { + // Fast path: if the uri has a static association with a view, return it. + s.viewMu.Lock() + v, err := s.viewOfLocked(ctx, uri) + s.viewMu.Unlock() + + if err != nil { + return nil, nil, err + } + + if v != nil { + snapshot, release, err := v.Snapshot() + if err == nil { + return snapshot, release, nil + } + // View is shut down. Forget this association. + s.viewMu.Lock() + if s.viewMap[uri] == v { + delete(s.viewMap, uri) + } + s.viewMu.Unlock() + } + + // Fall-back: none of the views could be associated with uri based on + // directory information alone. + // + // Don't memoize the view association in viewMap, as it is not static: Views + // may change as metadata changes. + // + // TODO(rfindley): we could perhaps optimize this case by peeking at existing + // metadata before awaiting the load (after all, a load only adds metadata). + // But that seems potentially tricky, when in the common case no loading + // should be required. + views := s.Views() + for _, v := range views { + snapshot, release, err := v.Snapshot() + if err != nil { + continue // view was shut down + } + _ = snapshot.awaitLoaded(ctx) // ignore error + g := snapshot.MetadataGraph() + // We don't check the error from awaitLoaded, because a load failure (that + // doesn't result from context cancelation) should not prevent us from + // continuing to search for the best view. + if ctx.Err() != nil { + release() + return nil, nil, ctx.Err() + } + // Special handling for the builtin file, since it doesn't have packages. + if snapshot.IsBuiltin(uri) { + return snapshot, release, nil + } + // Only match this view if it loaded a real package for the file. + // + // Any view can load a command-line-arguments package; aggregate those into + // views[0] below. + for _, id := range g.IDs[uri] { + if !metadata.IsCommandLineArguments(id) || g.Packages[id].Standalone { + return snapshot, release, nil + } + } + release() + } + + for _, v := range views { + snapshot, release, err := v.Snapshot() + if err == nil { + return snapshot, release, nil // first valid snapshot + } + } + return nil, nil, errNoViews +} + +// errNoViews is sought by orphaned file diagnostics, to detect the case where +// we have no view containing a file. +var errNoViews = errors.New("no views") + +// viewOfLocked wraps bestViewForURI, memoizing its result. +// +// Precondition: caller holds s.viewMu lock. +// +// May return (nil, nil). +func (s *Session) viewOfLocked(ctx context.Context, uri protocol.DocumentURI) (*View, error) { + v, hit := s.viewMap[uri] + if !hit { + // Cache miss: compute (and memoize) the best view. + fh, err := s.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + v, err = bestView(ctx, s, fh, s.views) + if err != nil { + return nil, err + } + if s.viewMap == nil { + return nil, errors.New("session is shut down") + } + s.viewMap[uri] = v + } + return v, nil +} + +func (s *Session) Views() []*View { + s.viewMu.Lock() + defer s.viewMu.Unlock() + result := make([]*View, len(s.views)) + copy(result, s.views) + return result +} + +// selectViewDefs constructs the best set of views covering the provided workspace +// folders and open files. +// +// This implements the zero-config algorithm of golang/go#57979. +func selectViewDefs(ctx context.Context, fs file.Source, folders []*Folder, openFiles []protocol.DocumentURI) ([]*viewDefinition, error) { + var defs []*viewDefinition + + // First, compute a default view for each workspace folder. + // TODO(golang/go#57979): technically, this is path dependent, since + // DidChangeWorkspaceFolders could introduce a path-dependent ordering on + // folders. We should keep folders sorted, or sort them here. + for _, folder := range folders { + def, err := defineView(ctx, fs, folder, nil) + if err != nil { + return nil, err + } + defs = append(defs, def) + } + + // Next, ensure that the set of views covers all open files contained in a + // workspace folder. + // + // We only do this for files contained in a workspace folder, because other + // open files are most likely the result of jumping to a definition from a + // workspace file; we don't want to create additional views in those cases: + // they should be resolved after initialization. + + folderForFile := func(uri protocol.DocumentURI) *Folder { + var longest *Folder + for _, folder := range folders { + // Check that this is a better match than longest, but not through a + // vendor directory. Count occurrences of "/vendor/" as a quick check + // that the vendor directory is between the folder and the file. Note the + // addition of a trailing "/" to handle the odd case where the folder is named + // vendor (which I hope is exceedingly rare in any case). + // + // Vendored packages are, by definition, part of an existing view. + if (longest == nil || len(folder.Dir) > len(longest.Dir)) && + folder.Dir.Encloses(uri) && + strings.Count(string(uri), "/vendor/") == strings.Count(string(folder.Dir)+"/", "/vendor/") { + + longest = folder + } + } + return longest + } + +checkFiles: + for _, uri := range openFiles { + folder := folderForFile(uri) + if folder == nil || !folder.Options.ZeroConfig { + continue // only guess views for open files + } + fh, err := fs.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + def, err := bestView(ctx, fs, fh, defs) + if err != nil { + // We should never call selectViewDefs with a cancellable context, so + // this should never fail. + return nil, bug.Errorf("failed to find best view for open file: %v", err) + } + if def != nil { + continue // file covered by an existing view + } + def, err = defineView(ctx, fs, folder, fh) + if err != nil { + // We should never call selectViewDefs with a cancellable context, so + // this should never fail. + return nil, bug.Errorf("failed to define view for open file: %v", err) + } + // It need not strictly be the case that the best view for a file is + // distinct from other views, as the logic of getViewDefinition and + // bestViewForURI does not align perfectly. This is not necessarily a bug: + // there may be files for which we can't construct a valid view. + // + // Nevertheless, we should not create redundant views. + for _, alt := range defs { + if viewDefinitionsEqual(alt, def) { + continue checkFiles + } + } + defs = append(defs, def) + } + + return defs, nil +} + +// The viewDefiner interface allows the bestView algorithm to operate on both +// Views and viewDefinitions. +type viewDefiner interface{ definition() *viewDefinition } + +// BestViews returns the most relevant subset of views for a given uri. +// +// This may be used to filter diagnostics to the most relevant builds. +func BestViews[V viewDefiner](ctx context.Context, fs file.Source, uri protocol.DocumentURI, views []V) ([]V, error) { + if len(views) == 0 { + return nil, nil // avoid the call to findRootPattern + } + dir := uri.Dir() + modURI, err := findRootPattern(ctx, dir, "go.mod", fs) + if err != nil { + return nil, err + } + + // Prefer GoWork > GoMod > GOPATH > GoPackages > AdHoc. + var ( + goPackagesViews []V // prefer longest + workViews []V // prefer longest + modViews []V // exact match + gopathViews []V // prefer longest + adHocViews []V // exact match + ) + + // pushView updates the views slice with the matching view v, using the + // heuristic that views with a longer root are preferable. Accordingly, + // pushView may be a no op if v's root is shorter than the roots in the views + // slice. + // + // Invariant: the length of all roots in views is the same. + pushView := func(views *[]V, v V) { + if len(*views) == 0 { + *views = []V{v} + return + } + better := func(l, r V) bool { + return len(l.definition().root) > len(r.definition().root) + } + existing := (*views)[0] + switch { + case better(existing, v): + case better(v, existing): + *views = []V{v} + default: + *views = append(*views, v) + } + } + + for _, view := range views { + switch def := view.definition(); def.Type() { + case GoPackagesDriverView: + if def.root.Encloses(dir) { + pushView(&goPackagesViews, view) + } + case GoWorkView: + if _, ok := def.workspaceModFiles[modURI]; ok || uri == def.gowork { + pushView(&workViews, view) + } + case GoModView: + if _, ok := def.workspaceModFiles[modURI]; ok { + modViews = append(modViews, view) + } + case GOPATHView: + if def.root.Encloses(dir) { + pushView(&gopathViews, view) + } + case AdHocView: + if def.root == dir { + adHocViews = append(adHocViews, view) + } + } + } + + // Now that we've collected matching views, choose the best match, + // considering ports. + // + // We only consider one type of view, since the matching view created by + // defineView should be of the best type. + var bestViews []V + switch { + case len(workViews) > 0: + bestViews = workViews + case len(modViews) > 0: + bestViews = modViews + case len(gopathViews) > 0: + bestViews = gopathViews + case len(goPackagesViews) > 0: + bestViews = goPackagesViews + case len(adHocViews) > 0: + bestViews = adHocViews + } + + return bestViews, nil +} + +// bestView returns the best View or viewDefinition that contains the +// given file, or (nil, nil) if no matching view is found. +// +// bestView only returns an error in the event of context cancellation. +// +// Making this function generic is convenient so that we can avoid mapping view +// definitions back to views inside Session.DidModifyFiles, where performance +// matters. It is, however, not the cleanest application of generics. +// +// Note: keep this function in sync with defineView. +func bestView[V viewDefiner](ctx context.Context, fs file.Source, fh file.Handle, views []V) (V, error) { + var zero V + bestViews, err := BestViews(ctx, fs, fh.URI(), views) + if err != nil || len(bestViews) == 0 { + return zero, err + } + + content, err := fh.Content() + // Port matching doesn't apply to non-go files, or files that no longer exist. + // Note that the behavior here on non-existent files shouldn't matter much, + // since there will be a subsequent failure. But it is simpler to preserve + // the invariant that bestView only fails on context cancellation. + if fileKind(fh) != file.Go || err != nil { + return bestViews[0], nil + } + + // Find the first view that matches constraints. + // Content trimming is nontrivial, so do this outside of the loop below. + path := fh.URI().Path() + content = trimContentForPortMatch(content) + for _, v := range bestViews { + def := v.definition() + viewPort := port{def.GOOS(), def.GOARCH()} + if viewPort.matches(path, content) { + return v, nil + } + } + + return zero, nil // no view found +} + +// updateViewLocked recreates the view with the given options. +// +// If the resulting error is non-nil, the view may or may not have already been +// dropped from the session. +func (s *Session) updateViewLocked(ctx context.Context, view *View, def *viewDefinition) (*View, error) { + i := s.dropView(view) + if i == -1 { + return nil, fmt.Errorf("view %q not found", view.id) + } + + view, _, release := s.createView(ctx, def) + defer release() + + // substitute the new view into the array where the old view was + s.views[i] = view + s.viewMap = make(map[protocol.DocumentURI]*View) + return view, nil +} + +// removeElement removes the ith element from the slice replacing it with the last element. +// TODO(adonovan): generics, someday. +func removeElement(slice []*View, index int) []*View { + last := len(slice) - 1 + slice[index] = slice[last] + slice[last] = nil // aid GC + return slice[:last] +} + +// dropView removes v from the set of views for the receiver s and calls +// v.shutdown, returning the index of v in s.views (if found), or -1 if v was +// not found. s.viewMu must be held while calling this function. +func (s *Session) dropView(v *View) int { + // we always need to drop the view map + s.viewMap = make(map[protocol.DocumentURI]*View) + for i := range s.views { + if v == s.views[i] { + // we found the view, drop it and return the index it was found at + s.views[i] = nil + v.shutdown() + return i + } + } + // TODO(rfindley): it looks wrong that we don't shutdown v in this codepath. + // We should never get here. + bug.Reportf("tried to drop nonexistent view %q", v.id) + return -1 +} + +// ResetView resets the best view for the given URI. +func (s *Session) ResetView(ctx context.Context, uri protocol.DocumentURI) (*View, error) { + s.viewMu.Lock() + defer s.viewMu.Unlock() + v, err := s.viewOfLocked(ctx, uri) + if err != nil { + return nil, err + } + return s.updateViewLocked(ctx, v, v.viewDefinition) +} + +// DidModifyFiles reports a file modification to the session. It returns +// the new snapshots after the modifications have been applied, paired with +// the affected file URIs for those snapshots. +// On success, it returns a release function that +// must be called when the snapshots are no longer needed. +// +// TODO(rfindley): what happens if this function fails? It must leave us in a +// broken state, which we should surface to the user, probably as a request to +// restart gopls. +func (s *Session) DidModifyFiles(ctx context.Context, modifications []file.Modification) (map[*View][]protocol.DocumentURI, error) { + s.viewMu.Lock() + defer s.viewMu.Unlock() + + // Update overlays. + // + // This is done while holding viewMu because the set of open files affects + // the set of views, and to prevent views from seeing updated file content + // before they have processed invalidations. + replaced, err := s.updateOverlays(ctx, modifications) + if err != nil { + return nil, err + } + + // checkViews controls whether the set of views needs to be recomputed, for + // example because a go.mod file was created or deleted, or a go.work file + // changed on disk. + checkViews := false + + changed := make(map[protocol.DocumentURI]file.Handle) + for _, c := range modifications { + fh := mustReadFile(ctx, s, c.URI) + changed[c.URI] = fh + + // Any change to the set of open files causes views to be recomputed. + if c.Action == file.Open || c.Action == file.Close { + checkViews = true + } + + // Any on-disk change to a go.work or go.mod file causes recomputing views. + // + // TODO(rfindley): go.work files need not be named "go.work" -- we need to + // check each view's source to handle the case of an explicit GOWORK value. + // Write a test that fails, and fix this. + if (isGoWork(c.URI) || isGoMod(c.URI)) && (c.Action == file.Save || c.OnDisk) { + checkViews = true + } + + // Any change to the set of supported ports in a file may affect view + // selection. This is perhaps more subtle than it first seems: since the + // algorithm for selecting views considers open files in a deterministic + // order, a change in supported ports may cause a different port to be + // chosen, even if all open files still match an existing View! + // + // We endeavor to avoid that sort of path dependence, so must re-run the + // view selection algorithm whenever any input changes. + // + // However, extracting the build comment is nontrivial, so we don't want to + // pay this cost when e.g. processing a bunch of on-disk changes due to a + // branch change. Be careful to only do this if both files are open Go + // files. + if old, ok := replaced[c.URI]; ok && !checkViews && fileKind(fh) == file.Go { + if new, ok := fh.(*overlay); ok { + if buildComment(old.content) != buildComment(new.content) { + checkViews = true + } + } + } + } + + if checkViews { + // Hack: collect folders from existing views. + // TODO(golang/go#57979): we really should track folders independent of + // Views, but since we always have a default View for each folder, this + // works for now. + var folders []*Folder // preserve folder order + seen := make(map[*Folder]unit) + for _, v := range s.views { + if _, ok := seen[v.folder]; ok { + continue + } + seen[v.folder] = unit{} + folders = append(folders, v.folder) + } + + var openFiles []protocol.DocumentURI + for _, o := range s.Overlays() { + openFiles = append(openFiles, o.URI()) + } + // Sort for determinism. + sort.Slice(openFiles, func(i, j int) bool { + return openFiles[i] < openFiles[j] + }) + + // TODO(rfindley): can we avoid running the go command (go env) + // synchronously to change processing? Can we assume that the env did not + // change, and derive go.work using a combination of the configured + // GOWORK value and filesystem? + defs, err := selectViewDefs(ctx, s, folders, openFiles) + if err != nil { + // Catastrophic failure, equivalent to a failure of session + // initialization and therefore should almost never happen. One + // scenario where this failure mode could occur is if some file + // permissions have changed preventing us from reading go.mod + // files. + // + // TODO(rfindley): consider surfacing this error more loudly. We + // could report a bug, but it's not really a bug. + event.Error(ctx, "selecting new views", err) + } else { + kept := make(map[*View]unit) + var newViews []*View + for _, def := range defs { + var newView *View + // Reuse existing view? + for _, v := range s.views { + if viewDefinitionsEqual(def, v.viewDefinition) { + newView = v + kept[v] = unit{} + break + } + } + if newView == nil { + v, _, release := s.createView(ctx, def) + release() + newView = v + } + newViews = append(newViews, newView) + } + for _, v := range s.views { + if _, ok := kept[v]; !ok { + v.shutdown() + } + } + s.views = newViews + s.viewMap = make(map[protocol.DocumentURI]*View) + } + } + + // We only want to run fast-path diagnostics (i.e. diagnoseChangedFiles) once + // for each changed file, in its best view. + viewsToDiagnose := map[*View][]protocol.DocumentURI{} + for _, mod := range modifications { + v, err := s.viewOfLocked(ctx, mod.URI) + if err != nil { + // bestViewForURI only returns an error in the event of context + // cancellation. Since state changes should occur on an uncancellable + // context, an error here is a bug. + bug.Reportf("finding best view for change: %v", err) + continue + } + if v != nil { + viewsToDiagnose[v] = append(viewsToDiagnose[v], mod.URI) + } + } + + // ...but changes may be relevant to other views, for example if they are + // changes to a shared package. + for _, v := range s.views { + _, release, needsDiagnosis := s.invalidateViewLocked(ctx, v, StateChange{Modifications: modifications, Files: changed}) + release() + + if needsDiagnosis || checkViews { + if _, ok := viewsToDiagnose[v]; !ok { + viewsToDiagnose[v] = nil + } + } + } + + return viewsToDiagnose, nil +} + +// ExpandModificationsToDirectories returns the set of changes with the +// directory changes removed and expanded to include all of the files in +// the directory. +func (s *Session) ExpandModificationsToDirectories(ctx context.Context, changes []file.Modification) []file.Modification { + var snapshots []*Snapshot + s.viewMu.Lock() + for _, v := range s.views { + snapshot, release, err := v.Snapshot() + if err != nil { + continue // view is shut down; continue with others + } + defer release() + snapshots = append(snapshots, snapshot) + } + s.viewMu.Unlock() + + // Expand the modification to any file we could care about, which we define + // to be any file observed by any of the snapshots. + // + // There may be other files in the directory, but if we haven't read them yet + // we don't need to invalidate them. + var result []file.Modification + for _, c := range changes { + expanded := make(map[protocol.DocumentURI]bool) + for _, snapshot := range snapshots { + for _, uri := range snapshot.filesInDir(c.URI) { + expanded[uri] = true + } + } + if len(expanded) == 0 { + result = append(result, c) + } else { + for uri := range expanded { + result = append(result, file.Modification{ + URI: uri, + Action: c.Action, + LanguageID: "", + OnDisk: c.OnDisk, + // changes to directories cannot include text or versions + }) + } + } + } + return result +} + +// updateOverlays updates the set of overlays and returns a map of any existing +// overlay values that were replaced. +// +// Precondition: caller holds s.viewMu lock. +// TODO(rfindley): move this to fs_overlay.go. +func (fs *overlayFS) updateOverlays(ctx context.Context, changes []file.Modification) (map[protocol.DocumentURI]*overlay, error) { + fs.mu.Lock() + defer fs.mu.Unlock() + + replaced := make(map[protocol.DocumentURI]*overlay) + for _, c := range changes { + o, ok := fs.overlays[c.URI] + if ok { + replaced[c.URI] = o + } + + // If the file is not opened in an overlay and the change is on disk, + // there's no need to update an overlay. If there is an overlay, we + // may need to update the overlay's saved value. + if !ok && c.OnDisk { + continue + } + + // Determine the file kind on open, otherwise, assume it has been cached. + var kind file.Kind + switch c.Action { + case file.Open: + kind = file.KindForLang(c.LanguageID) + default: + if !ok { + return nil, fmt.Errorf("updateOverlays: modifying unopened overlay %v", c.URI) + } + kind = o.kind + } + + // Closing a file just deletes its overlay. + if c.Action == file.Close { + delete(fs.overlays, c.URI) + continue + } + + // If the file is on disk, check if its content is the same as in the + // overlay. Saves and on-disk file changes don't come with the file's + // content. + text := c.Text + if text == nil && (c.Action == file.Save || c.OnDisk) { + if !ok { + return nil, fmt.Errorf("no known content for overlay for %s", c.Action) + } + text = o.content + } + // On-disk changes don't come with versions. + version := c.Version + if c.OnDisk || c.Action == file.Save { + version = o.version + } + hash := file.HashOf(text) + var sameContentOnDisk bool + switch c.Action { + case file.Delete: + // Do nothing. sameContentOnDisk should be false. + case file.Save: + // Make sure the version and content (if present) is the same. + if false && o.version != version { // Client no longer sends the version + return nil, fmt.Errorf("updateOverlays: saving %s at version %v, currently at %v", c.URI, c.Version, o.version) + } + if c.Text != nil && o.hash != hash { + return nil, fmt.Errorf("updateOverlays: overlay %s changed on save", c.URI) + } + sameContentOnDisk = true + default: + fh := mustReadFile(ctx, fs.delegate, c.URI) + _, readErr := fh.Content() + sameContentOnDisk = (readErr == nil && fh.Identity().Hash == hash) + } + o = &overlay{ + uri: c.URI, + version: version, + content: text, + kind: kind, + hash: hash, + saved: sameContentOnDisk, + } + + // NOTE: previous versions of this code checked here that the overlay had a + // view and file kind (but we don't know why). + + fs.overlays[c.URI] = o + } + + return replaced, nil +} + +func mustReadFile(ctx context.Context, fs file.Source, uri protocol.DocumentURI) file.Handle { + ctx = xcontext.Detach(ctx) + fh, err := fs.ReadFile(ctx, uri) + if err != nil { + // ReadFile cannot fail with an uncancellable context. + bug.Reportf("reading file failed unexpectedly: %v", err) + return brokenFile{uri, err} + } + return fh +} + +// A brokenFile represents an unexpected failure to read a file. +type brokenFile struct { + uri protocol.DocumentURI + err error +} + +func (b brokenFile) URI() protocol.DocumentURI { return b.uri } +func (b brokenFile) Identity() file.Identity { return file.Identity{URI: b.uri} } +func (b brokenFile) SameContentsOnDisk() bool { return false } +func (b brokenFile) Version() int32 { return 0 } +func (b brokenFile) Content() ([]byte, error) { return nil, b.err } + +// FileWatchingGlobPatterns returns a set of glob patterns that the client is +// required to watch for changes, and notify the server of them, in order to +// keep the server's state up to date. +// +// This set includes +// 1. all go.mod and go.work files in the workspace; and +// 2. for each Snapshot, its modules (or directory for ad-hoc views). In +// module mode, this is the set of active modules (and for VS Code, all +// workspace directories within them, due to golang/go#42348). +// +// The watch for workspace go.work and go.mod files in (1) is sufficient to +// capture changes to the repo structure that may affect the set of views. +// Whenever this set changes, we reload the workspace and invalidate memoized +// files. +// +// The watch for workspace directories in (2) should keep each View up to date, +// as it should capture any newly added/modified/deleted Go files. +// +// Patterns are returned as a set of protocol.RelativePatterns, since they can +// always be later translated to glob patterns (i.e. strings) if the client +// lacks relative pattern support. By convention, any pattern returned with +// empty baseURI should be served as a glob pattern. +// +// In general, we prefer to serve relative patterns, as they work better on +// most clients that support both, and do not have issues with Windows driver +// letter casing: +// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#relativePattern +// +// TODO(golang/go#57979): we need to reset the memoizedFS when a view changes. +// Consider the case where we incidentally read a file, then it moved outside +// of an active module, and subsequently changed: we would still observe the +// original file state. +func (s *Session) FileWatchingGlobPatterns(ctx context.Context) map[protocol.RelativePattern]unit { + s.viewMu.Lock() + defer s.viewMu.Unlock() + + // Always watch files that may change the set of views. + patterns := map[protocol.RelativePattern]unit{ + {Pattern: "**/*.{mod,work}"}: {}, + } + + for _, view := range s.views { + snapshot, release, err := view.Snapshot() + if err != nil { + continue // view is shut down; continue with others + } + for k, v := range snapshot.fileWatchingGlobPatterns() { + patterns[k] = v + } + release() + } + return patterns +} + +// OrphanedFileDiagnostics reports diagnostics describing why open files have +// no packages or have only command-line-arguments packages. +// +// If the resulting diagnostic is nil, the file is either not orphaned or we +// can't produce a good diagnostic. +// +// The caller must not mutate the result. +func (s *Session) OrphanedFileDiagnostics(ctx context.Context) (map[protocol.DocumentURI][]*Diagnostic, error) { + // Note: diagnostics holds a slice for consistency with other diagnostic + // funcs. + diagnostics := make(map[protocol.DocumentURI][]*Diagnostic) + + byView := make(map[*View][]*overlay) + for _, o := range s.Overlays() { + uri := o.URI() + snapshot, release, err := s.SnapshotOf(ctx, uri) + if err != nil { + // TODO(golang/go#57979): we have to use the .go suffix as an approximation for + // file kind here, because we don't have access to Options if no View was + // matched. + // + // But Options are really a property of Folder, not View, and we could + // match a folder here. + // + // Refactor so that Folders are tracked independently of Views, and use + // the correct options here to get the most accurate file kind. + // + // TODO(golang/go#57979): once we switch entirely to the zeroconfig + // logic, we should use this diagnostic for the fallback case of + // s.views[0] in the ViewOf logic. + if errors.Is(err, errNoViews) { + if strings.HasSuffix(string(uri), ".go") { + if _, rng, ok := orphanedFileDiagnosticRange(ctx, s.parseCache, o); ok { + diagnostics[uri] = []*Diagnostic{{ + URI: uri, + Range: rng, + Severity: protocol.SeverityWarning, + Source: ListError, + Message: fmt.Sprintf("No active builds contain %s: consider opening a new workspace folder containing it", uri.Path()), + }} + } + } + continue + } + return nil, err + } + v := snapshot.View() + release() + byView[v] = append(byView[v], o) + } + + for view, overlays := range byView { + snapshot, release, err := view.Snapshot() + if err != nil { + continue // view is shutting down + } + defer release() + diags, err := snapshot.orphanedFileDiagnostics(ctx, overlays) + if err != nil { + return nil, err + } + for _, d := range diags { + diagnostics[d.URI] = append(diagnostics[d.URI], d) + } + } + return diagnostics, nil +} diff --git a/gopls/internal/cache/session_test.go b/gopls/internal/cache/session_test.go new file mode 100644 index 00000000000..913c3bd1f27 --- /dev/null +++ b/gopls/internal/cache/session_test.go @@ -0,0 +1,405 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "os" + "path" + "path/filepath" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/test/integration/fake" + "golang.org/x/tools/internal/testenv" +) + +func TestZeroConfigAlgorithm(t *testing.T) { + testenv.NeedsExec(t) // executes the Go command + t.Setenv("GOPACKAGESDRIVER", "off") + + type viewSummary struct { + // fields exported for cmp.Diff + Type ViewType + Root string + Env []string + } + + type folderSummary struct { + dir string + options func(dir string) map[string]any // options may refer to the temp dir + } + + includeReplaceInWorkspace := func(string) map[string]any { + return map[string]any{ + "includeReplaceInWorkspace": true, + } + } + + type test struct { + name string + files map[string]string // use a map rather than txtar as file content is tiny + folders []folderSummary + open []string // open files + want []viewSummary + } + + tests := []test{ + // TODO(rfindley): add a test for GOPACKAGESDRIVER. + // Doing so doesn't yet work using options alone (user env is not honored) + + // TODO(rfindley): add a test for degenerate cases, such as missing + // workspace folders (once we decide on the correct behavior). + { + "basic go.work workspace", + map[string]string{ + "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + }, + []folderSummary{{dir: "."}}, + nil, + []viewSummary{{GoWorkView, ".", nil}}, + }, + { + "basic go.mod workspace", + map[string]string{ + "go.mod": "module golang.org/a\ngo 1.18\n", + }, + []folderSummary{{dir: "."}}, + nil, + []viewSummary{{GoModView, ".", nil}}, + }, + { + "basic GOPATH workspace", + map[string]string{ + "src/golang.org/a/a.go": "package a", + "src/golang.org/b/b.go": "package b", + }, + []folderSummary{{ + dir: "src", + options: func(dir string) map[string]any { + return map[string]any{ + "env": map[string]any{ + "GOPATH": dir, + }, + } + }, + }}, + []string{"src/golang.org/a//a.go", "src/golang.org/b/b.go"}, + []viewSummary{{GOPATHView, "src", nil}}, + }, + { + "basic AdHoc workspace", + map[string]string{ + "foo.go": "package foo", + }, + []folderSummary{{dir: "."}}, + nil, + []viewSummary{{AdHocView, ".", nil}}, + }, + { + "multi-folder workspace", + map[string]string{ + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + }, + []folderSummary{{dir: "a"}, {dir: "b"}}, + nil, + []viewSummary{{GoModView, "a", nil}, {GoModView, "b", nil}}, + }, + { + "multi-module workspace", + map[string]string{ + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + }, + []folderSummary{{dir: "."}}, + nil, + []viewSummary{{AdHocView, ".", nil}}, + }, + { + "zero-config open module", + map[string]string{ + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "a/a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + }, + []folderSummary{{dir: "."}}, + []string{"a/a.go"}, + []viewSummary{ + {AdHocView, ".", nil}, + {GoModView, "a", nil}, + }, + }, + { + "zero-config open modules", + map[string]string{ + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "a/a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + }, + []folderSummary{{dir: "."}}, + []string{"a/a.go", "b/b.go"}, + []viewSummary{ + {AdHocView, ".", nil}, + {GoModView, "a", nil}, + {GoModView, "b", nil}, + }, + }, + { + "unified workspace", + map[string]string{ + "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "a/a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + }, + []folderSummary{{dir: "."}}, + []string{"a/a.go", "b/b.go"}, + []viewSummary{{GoWorkView, ".", nil}}, + }, + { + "go.work from env", + map[string]string{ + "nested/go.work": "go 1.18\nuse (\n\t../a\n\t../b\n)\n", + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "a/a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + }, + []folderSummary{{ + dir: ".", + options: func(dir string) map[string]any { + return map[string]any{ + "env": map[string]any{ + "GOWORK": filepath.Join(dir, "nested", "go.work"), + }, + } + }, + }}, + []string{"a/a.go", "b/b.go"}, + []viewSummary{{GoWorkView, ".", nil}}, + }, + { + "independent module view", + map[string]string{ + "go.work": "go 1.18\nuse (\n\t./a\n)\n", // not using b + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "a/a.go": "package a", + "b/go.mod": "module golang.org/a\ngo 1.18\n", + "b/b.go": "package b", + }, + []folderSummary{{dir: "."}}, + []string{"a/a.go", "b/b.go"}, + []viewSummary{ + {GoWorkView, ".", nil}, + {GoModView, "b", []string{"GOWORK=off"}}, + }, + }, + { + "multiple go.work", + map[string]string{ + "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "a/a.go": "package a", + "b/go.work": "go 1.18\nuse (\n\t.\n\t./c\n)\n", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + "b/c/go.mod": "module golang.org/c\ngo 1.18\n", + }, + []folderSummary{{dir: "."}}, + []string{"a/a.go", "b/b.go", "b/c/c.go"}, + []viewSummary{{GoWorkView, ".", nil}, {GoWorkView, "b", nil}}, + }, + { + "multiple go.work, c unused", + map[string]string{ + "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", + "a/go.mod": "module golang.org/a\ngo 1.18\n", + "a/a.go": "package a", + "b/go.work": "go 1.18\nuse (\n\t.\n)\n", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + "b/c/go.mod": "module golang.org/c\ngo 1.18\n", + }, + []folderSummary{{dir: "."}}, + []string{"a/a.go", "b/b.go", "b/c/c.go"}, + []viewSummary{{GoWorkView, ".", nil}, {GoModView, "b/c", []string{"GOWORK=off"}}}, + }, + { + "go.mod with nested replace", + map[string]string{ + "go.mod": "module golang.org/a\n require golang.org/b v1.2.3\nreplace example.com/b => ./b", + "a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + }, + []folderSummary{{dir: ".", options: includeReplaceInWorkspace}}, + []string{"a/a.go", "b/b.go"}, + []viewSummary{{GoModView, ".", nil}}, + }, + { + "go.mod with parent replace, parent folder", + map[string]string{ + "go.mod": "module golang.org/a", + "a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\nrequire golang.org/a v1.2.3\nreplace golang.org/a => ../", + "b/b.go": "package b", + }, + []folderSummary{{dir: ".", options: includeReplaceInWorkspace}}, + []string{"a/a.go", "b/b.go"}, + []viewSummary{{GoModView, ".", nil}, {GoModView, "b", nil}}, + }, + { + "go.mod with multiple replace", + map[string]string{ + "go.mod": ` +module golang.org/root + +require ( + golang.org/a v1.2.3 + golang.org/b v1.2.3 + golang.org/c v1.2.3 +) + +replace ( + golang.org/b => ./b + golang.org/c => ./c + // Note: d is not replaced +) +`, + "a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18", + "b/b.go": "package b", + "c/go.mod": "module golang.org/c\ngo 1.18", + "c/c.go": "package c", + "d/go.mod": "module golang.org/d\ngo 1.18", + "d/d.go": "package d", + }, + []folderSummary{{dir: ".", options: includeReplaceInWorkspace}}, + []string{"b/b.go", "c/c.go", "d/d.go"}, + []viewSummary{{GoModView, ".", nil}, {GoModView, "d", nil}}, + }, + { + "go.mod with replace outside the workspace", + map[string]string{ + "go.mod": "module golang.org/a\ngo 1.18", + "a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\nrequire golang.org/a v1.2.3\nreplace golang.org/a => ../", + "b/b.go": "package b", + }, + []folderSummary{{dir: "b"}}, + []string{"a.go", "b/b.go"}, + []viewSummary{{GoModView, "b", nil}}, + }, + { + "go.mod with replace directive; workspace replace off", + map[string]string{ + "go.mod": "module golang.org/a\n require golang.org/b v1.2.3\nreplace example.com/b => ./b", + "a.go": "package a", + "b/go.mod": "module golang.org/b\ngo 1.18\n", + "b/b.go": "package b", + }, + []folderSummary{{ + dir: ".", + options: func(string) map[string]any { + return map[string]any{ + "includeReplaceInWorkspace": false, + } + }, + }}, + []string{"a/a.go", "b/b.go"}, + []viewSummary{{GoModView, ".", nil}, {GoModView, "b", nil}}, + }, + } + + for _, test := range tests { + ctx := context.Background() + t.Run(test.name, func(t *testing.T) { + dir := writeFiles(t, test.files) + rel := fake.RelativeTo(dir) + fs := newMemoizedFS() + + toURI := func(path string) protocol.DocumentURI { + return protocol.URIFromPath(rel.AbsPath(path)) + } + + var folders []*Folder + for _, f := range test.folders { + opts := settings.DefaultOptions() + if f.options != nil { + results := settings.SetOptions(opts, f.options(dir)) + for _, r := range results { + if r.Error != nil { + t.Fatalf("setting option %v: %v", r.Name, r.Error) + } + } + } + env, err := FetchGoEnv(ctx, toURI(f.dir), opts) + if err != nil { + t.Fatalf("FetchGoEnv failed: %v", err) + } + folders = append(folders, &Folder{ + Dir: toURI(f.dir), + Name: path.Base(f.dir), + Options: opts, + Env: env, + }) + } + + var openFiles []protocol.DocumentURI + for _, path := range test.open { + openFiles = append(openFiles, toURI(path)) + } + + defs, err := selectViewDefs(ctx, fs, folders, openFiles) + if err != nil { + t.Fatal(err) + } + var got []viewSummary + for _, def := range defs { + got = append(got, viewSummary{ + Type: def.Type(), + Root: rel.RelPath(def.root.Path()), + Env: def.EnvOverlay(), + }) + } + if diff := cmp.Diff(test.want, got); diff != "" { + t.Errorf("selectViews() mismatch (-want +got):\n%s", diff) + } + }) + } +} + +// TODO(rfindley): this function could be meaningfully factored with the +// various other test helpers of this nature. +func writeFiles(t *testing.T, files map[string]string) string { + root := t.TempDir() + + // This unfortunate step is required because gopls output + // expands symbolic links in its input file names (arguably it + // should not), and on macOS the temp dir is in /var -> private/var. + root, err := filepath.EvalSymlinks(root) + if err != nil { + t.Fatal(err) + } + + for name, content := range files { + filename := filepath.Join(root, name) + if err := os.MkdirAll(filepath.Dir(filename), 0777); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filename, []byte(content), 0666); err != nil { + t.Fatal(err) + } + } + return root +} diff --git a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go new file mode 100644 index 00000000000..e3f57eedfd7 --- /dev/null +++ b/gopls/internal/cache/snapshot.go @@ -0,0 +1,2356 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "context" + "errors" + "fmt" + "go/ast" + "go/build/constraint" + "go/parser" + "go/token" + "go/types" + "io" + "os" + "path" + "path/filepath" + "regexp" + "runtime" + "sort" + "strconv" + "strings" + "sync" + + "golang.org/x/sync/errgroup" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/objectpath" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/methodsets" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/cache/typerefs" + "golang.org/x/tools/gopls/internal/cache/xrefs" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/filecache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/constraints" + "golang.org/x/tools/gopls/internal/util/immutable" + "golang.org/x/tools/gopls/internal/util/pathutil" + "golang.org/x/tools/gopls/internal/util/persistent" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/gopls/internal/vulncheck" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/label" + "golang.org/x/tools/internal/event/tag" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/memoize" + "golang.org/x/tools/internal/packagesinternal" + "golang.org/x/tools/internal/typesinternal" +) + +// A Snapshot represents the current state for a given view. +// +// It is first and foremost an idempotent implementation of file.Source whose +// ReadFile method returns consistent information about the existence and +// content of each file throughout its lifetime. +// +// However, the snapshot also manages additional state (such as parsed files +// and packages) that are derived from file content. +// +// Snapshots are responsible for bookkeeping and invalidation of this state, +// implemented in Snapshot.clone. +type Snapshot struct { + // sequenceID is the monotonically increasing ID of this snapshot within its View. + // + // Sequence IDs for Snapshots from different Views cannot be compared. + sequenceID uint64 + + // TODO(rfindley): the snapshot holding a reference to the view poses + // lifecycle problems: a view may be shut down and waiting for work + // associated with this snapshot to complete. While most accesses of the view + // are benign (options or workspace information), this is not formalized and + // it is wrong for the snapshot to use a shutdown view. + // + // Fix this by passing options and workspace information to the snapshot, + // both of which should be immutable for the snapshot. + view *View + + cancel func() + backgroundCtx context.Context + + store *memoize.Store // cache of handles shared by all snapshots + + refMu sync.Mutex + + // refcount holds the number of outstanding references to the current + // Snapshot. When refcount is decremented to 0, the Snapshot maps are + // destroyed and the done function is called. + // + // TODO(rfindley): use atomic.Int32 on Go 1.19+. + refcount int + done func() // for implementing Session.Shutdown + + // mu guards all of the maps in the snapshot, as well as the builtin URI and + // initialized. + mu sync.Mutex + + // initialized reports whether the snapshot has been initialized. Concurrent + // initialization is guarded by the view.initializationSema. Each snapshot is + // initialized at most once: concurrent initialization is guarded by + // view.initializationSema. + initialized bool + + // initialErr holds the last error resulting from initialization. If + // initialization fails, we only retry when the workspace modules change, + // to avoid too many go/packages calls. + // If initialized is false, initialErr stil holds the error resulting from + // the previous initialization. + // TODO(rfindley): can we unify the lifecycle of initialized and initialErr. + initialErr *InitializationError + + // builtin is the location of builtin.go in GOROOT. + // + // TODO(rfindley): would it make more sense to eagerly parse builtin, and + // instead store a *parsego.File here? + builtin protocol.DocumentURI + + // meta holds loaded metadata. + // + // meta is guarded by mu, but the Graph itself is immutable. + // + // TODO(rfindley): in many places we hold mu while operating on meta, even + // though we only need to hold mu while reading the pointer. + meta *metadata.Graph + + // files maps file URIs to their corresponding FileHandles. + // It may invalidated when a file's content changes. + files *fileMap + + // symbolizeHandles maps each file URI to a handle for the future + // result of computing the symbols declared in that file. + symbolizeHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[symbolizeResult] + + // packages maps a packageKey to a *packageHandle. + // It may be invalidated when a file's content changes. + // + // Invariants to preserve: + // - packages.Get(id).meta == meta.metadata[id] for all ids + // - if a package is in packages, then all of its dependencies should also + // be in packages, unless there is a missing import + packages *persistent.Map[PackageID, *packageHandle] + + // activePackages maps a package ID to a memoized active package, or nil if + // the package is known not to be open. + // + // IDs not contained in the map are not known to be open or not open. + activePackages *persistent.Map[PackageID, *Package] + + // workspacePackages contains the workspace's packages, which are loaded + // when the view is created. It does not contain intermediate test variants. + workspacePackages immutable.Map[PackageID, PackagePath] + + // shouldLoad tracks packages that need to be reloaded, mapping a PackageID + // to the package paths that should be used to reload it + // + // When we try to load a package, we clear it from the shouldLoad map + // regardless of whether the load succeeded, to prevent endless loads. + shouldLoad *persistent.Map[PackageID, []PackagePath] + + // unloadableFiles keeps track of files that we've failed to load. + unloadableFiles *persistent.Set[protocol.DocumentURI] + + // TODO(rfindley): rename the handles below to "promises". A promise is + // different from a handle (we mutate the package handle.) + + // parseModHandles keeps track of any parseModHandles for the snapshot. + // The handles need not refer to only the view's go.mod file. + parseModHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[parseModResult] + + // parseWorkHandles keeps track of any parseWorkHandles for the snapshot. + // The handles need not refer to only the view's go.work file. + parseWorkHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[parseWorkResult] + + // Preserve go.mod-related handles to avoid garbage-collecting the results + // of various calls to the go command. The handles need not refer to only + // the view's go.mod file. + modTidyHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modTidyResult] + modWhyHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modWhyResult] + modVulnHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modVulnResult] + + // importGraph holds a shared import graph to use for type-checking. Adding + // more packages to this import graph can speed up type checking, at the + // expense of in-use memory. + // + // See getImportGraph for additional documentation. + importGraphDone chan struct{} // closed when importGraph is set; may be nil + importGraph *importGraph // copied from preceding snapshot and re-evaluated + + // pkgIndex is an index of package IDs, for efficient storage of typerefs. + pkgIndex *typerefs.PackageIndex + + // moduleUpgrades tracks known upgrades for module paths in each modfile. + // Each modfile has a map of module name to upgrade version. + moduleUpgrades *persistent.Map[protocol.DocumentURI, map[string]string] + + // vulns maps each go.mod file's URI to its known vulnerabilities. + vulns *persistent.Map[protocol.DocumentURI, *vulncheck.Result] + + // gcOptimizationDetails describes the packages for which we want + // optimization details to be included in the diagnostics. + gcOptimizationDetails map[metadata.PackageID]unit +} + +var _ memoize.RefCounted = (*Snapshot)(nil) // snapshots are reference-counted + +func (s *Snapshot) awaitPromise(ctx context.Context, p *memoize.Promise) (interface{}, error) { + return p.Get(ctx, s) +} + +// Acquire prevents the snapshot from being destroyed until the returned +// function is called. +// +// (s.Acquire().release() could instead be expressed as a pair of +// method calls s.IncRef(); s.DecRef(). The latter has the advantage +// that the DecRefs are fungible and don't require holding anything in +// addition to the refcounted object s, but paradoxically that is also +// an advantage of the current approach, which forces the caller to +// consider the release function at every stage, making a reference +// leak more obvious.) +func (s *Snapshot) Acquire() func() { + s.refMu.Lock() + defer s.refMu.Unlock() + assert(s.refcount > 0, "non-positive refs") + s.refcount++ + + return s.decref +} + +// decref should only be referenced by Acquire, and by View when it frees its +// reference to View.snapshot. +func (s *Snapshot) decref() { + s.refMu.Lock() + defer s.refMu.Unlock() + + assert(s.refcount > 0, "non-positive refs") + s.refcount-- + if s.refcount == 0 { + s.packages.Destroy() + s.activePackages.Destroy() + s.files.destroy() + s.symbolizeHandles.Destroy() + s.parseModHandles.Destroy() + s.parseWorkHandles.Destroy() + s.modTidyHandles.Destroy() + s.modVulnHandles.Destroy() + s.modWhyHandles.Destroy() + s.unloadableFiles.Destroy() + s.moduleUpgrades.Destroy() + s.vulns.Destroy() + s.done() + } +} + +// SequenceID is the sequence id of this snapshot within its containing +// view. +// +// Relative to their view sequence ids are monotonically increasing, but this +// does not hold globally: when new views are created their initial snapshot +// has sequence ID 0. +func (s *Snapshot) SequenceID() uint64 { + return s.sequenceID +} + +// SnapshotLabels returns a new slice of labels that should be used for events +// related to a snapshot. +func (s *Snapshot) Labels() []label.Label { + return []label.Label{tag.Snapshot.Of(s.SequenceID()), tag.Directory.Of(s.Folder())} +} + +// Folder returns the folder at the base of this snapshot. +func (s *Snapshot) Folder() protocol.DocumentURI { + return s.view.folder.Dir +} + +// View returns the View associated with this snapshot. +func (s *Snapshot) View() *View { + return s.view +} + +// FileKind returns the kind of a file. +// +// We can't reliably deduce the kind from the file name alone, +// as some editors can be told to interpret a buffer as +// language different from the file name heuristic, e.g. that +// an .html file actually contains Go "html/template" syntax, +// or even that a .go file contains Python. +func (s *Snapshot) FileKind(fh file.Handle) file.Kind { + if k := fileKind(fh); k != file.UnknownKind { + return k + } + fext := filepath.Ext(fh.URI().Path()) + exts := s.Options().TemplateExtensions + for _, ext := range exts { + if fext == ext || fext == "."+ext { + return file.Tmpl + } + } + + // and now what? This should never happen, but it does for cgo before go1.15 + // + // TODO(rfindley): this doesn't look right. We should default to UnknownKind. + // Also, I don't understand the comment above, though I'd guess before go1.15 + // we encountered cgo files without the .go extension. + return file.Go +} + +// fileKind returns the default file kind for a file, before considering +// template file extensions. See [Snapshot.FileKind]. +func fileKind(fh file.Handle) file.Kind { + // The kind of an unsaved buffer comes from the + // TextDocumentItem.LanguageID field in the didChange event, + // not from the file name. They may differ. + if o, ok := fh.(*overlay); ok { + if o.kind != file.UnknownKind { + return o.kind + } + } + + fext := filepath.Ext(fh.URI().Path()) + switch fext { + case ".go": + return file.Go + case ".mod": + return file.Mod + case ".sum": + return file.Sum + case ".work": + return file.Work + } + return file.UnknownKind +} + +// Options returns the options associated with this snapshot. +func (s *Snapshot) Options() *settings.Options { + return s.view.folder.Options +} + +// BackgroundContext returns a context used for all background processing +// on behalf of this snapshot. +func (s *Snapshot) BackgroundContext() context.Context { + return s.backgroundCtx +} + +// Templates returns the .tmpl files. +func (s *Snapshot) Templates() map[protocol.DocumentURI]file.Handle { + s.mu.Lock() + defer s.mu.Unlock() + + tmpls := map[protocol.DocumentURI]file.Handle{} + s.files.foreach(func(k protocol.DocumentURI, fh file.Handle) { + if s.FileKind(fh) == file.Tmpl { + tmpls[k] = fh + } + }) + return tmpls +} + +// config returns the configuration used for the snapshot's interaction with +// the go/packages API. It uses the given working directory. +// +// TODO(rstambler): go/packages requires that we do not provide overlays for +// multiple modules in on config, so buildOverlay needs to filter overlays by +// module. +func (s *Snapshot) config(ctx context.Context, inv *gocommand.Invocation) *packages.Config { + + cfg := &packages.Config{ + Context: ctx, + Dir: inv.WorkingDir, + Env: inv.Env, + BuildFlags: inv.BuildFlags, + Mode: packages.NeedName | + packages.NeedFiles | + packages.NeedCompiledGoFiles | + packages.NeedImports | + packages.NeedDeps | + packages.NeedTypesSizes | + packages.NeedModule | + packages.NeedEmbedFiles | + packages.LoadMode(packagesinternal.DepsErrors) | + packages.LoadMode(packagesinternal.ForTest), + Fset: nil, // we do our own parsing + Overlay: s.buildOverlay(), + ParseFile: func(*token.FileSet, string, []byte) (*ast.File, error) { + panic("go/packages must not be used to parse files") + }, + Logf: func(format string, args ...interface{}) { + if s.Options().VerboseOutput { + event.Log(ctx, fmt.Sprintf(format, args...)) + } + }, + Tests: true, + } + packagesinternal.SetModFile(cfg, inv.ModFile) + packagesinternal.SetModFlag(cfg, inv.ModFlag) + // We want to type check cgo code if go/types supports it. + if typesinternal.SetUsesCgo(&types.Config{}) { + cfg.Mode |= packages.LoadMode(packagesinternal.TypecheckCgo) + } + return cfg +} + +// InvocationFlags represents the settings of a particular go command invocation. +// It is a mode, plus a set of flag bits. +type InvocationFlags int + +const ( + // Normal is appropriate for commands that might be run by a user and don't + // deliberately modify go.mod files, e.g. `go test`. + Normal InvocationFlags = iota + // WriteTemporaryModFile is for commands that need information from a + // modified version of the user's go.mod file, e.g. `go mod tidy` used to + // generate diagnostics. + WriteTemporaryModFile + // LoadWorkspace is for packages.Load, and other operations that should + // consider the whole workspace at once. + LoadWorkspace + // AllowNetwork is a flag bit that indicates the invocation should be + // allowed to access the network. + AllowNetwork InvocationFlags = 1 << 10 +) + +func (m InvocationFlags) Mode() InvocationFlags { + return m & (AllowNetwork - 1) +} + +func (m InvocationFlags) AllowNetwork() bool { + return m&AllowNetwork != 0 +} + +// RunGoCommandDirect runs the given `go` command. Verb, Args, and +// WorkingDir must be specified. +func (s *Snapshot) RunGoCommandDirect(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation) (*bytes.Buffer, error) { + _, inv, cleanup, err := s.goCommandInvocation(ctx, mode, inv) + if err != nil { + return nil, err + } + defer cleanup() + + return s.view.gocmdRunner.Run(ctx, *inv) +} + +// RunGoCommandPiped runs the given `go` command, writing its output +// to stdout and stderr. Verb, Args, and WorkingDir must be specified. +// +// RunGoCommandPiped runs the command serially using gocommand.RunPiped, +// enforcing that this command executes exclusively to other commands on the +// server. +func (s *Snapshot) RunGoCommandPiped(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation, stdout, stderr io.Writer) error { + _, inv, cleanup, err := s.goCommandInvocation(ctx, mode, inv) + if err != nil { + return err + } + defer cleanup() + return s.view.gocmdRunner.RunPiped(ctx, *inv, stdout, stderr) +} + +// RunGoModUpdateCommands runs a series of `go` commands that updates the go.mod +// and go.sum file for wd, and returns their updated contents. +// +// TODO(rfindley): the signature of RunGoModUpdateCommands is very confusing. +// Simplify it. +func (s *Snapshot) RunGoModUpdateCommands(ctx context.Context, wd string, run func(invoke func(...string) (*bytes.Buffer, error)) error) ([]byte, []byte, error) { + flags := WriteTemporaryModFile | AllowNetwork + tmpURI, inv, cleanup, err := s.goCommandInvocation(ctx, flags, &gocommand.Invocation{WorkingDir: wd}) + if err != nil { + return nil, nil, err + } + defer cleanup() + invoke := func(args ...string) (*bytes.Buffer, error) { + inv.Verb = args[0] + inv.Args = args[1:] + return s.view.gocmdRunner.Run(ctx, *inv) + } + if err := run(invoke); err != nil { + return nil, nil, err + } + if flags.Mode() != WriteTemporaryModFile { + return nil, nil, nil + } + var modBytes, sumBytes []byte + modBytes, err = os.ReadFile(tmpURI.Path()) + if err != nil && !os.IsNotExist(err) { + return nil, nil, err + } + sumBytes, err = os.ReadFile(strings.TrimSuffix(tmpURI.Path(), ".mod") + ".sum") + if err != nil && !os.IsNotExist(err) { + return nil, nil, err + } + return modBytes, sumBytes, nil +} + +// goCommandInvocation populates inv with configuration for running go commands on the snapshot. +// +// TODO(rfindley): refactor this function to compose the required configuration +// explicitly, rather than implicitly deriving it from flags and inv. +// +// TODO(adonovan): simplify cleanup mechanism. It's hard to see, but +// it used only after call to tempModFile. +func (s *Snapshot) goCommandInvocation(ctx context.Context, flags InvocationFlags, inv *gocommand.Invocation) (tmpURI protocol.DocumentURI, updatedInv *gocommand.Invocation, cleanup func(), err error) { + allowModfileModificationOption := s.Options().AllowModfileModifications + allowNetworkOption := s.Options().AllowImplicitNetworkAccess + + // TODO(rfindley): it's not clear that this is doing the right thing. + // Should inv.Env really overwrite view.options? Should s.view.envOverlay + // overwrite inv.Env? (Do we ever invoke this with a non-empty inv.Env?) + // + // We should survey existing uses and write down rules for how env is + // applied. + inv.Env = slices.Concat( + os.Environ(), + s.Options().EnvSlice(), + inv.Env, + []string{"GO111MODULE=" + s.view.adjustedGO111MODULE()}, + s.view.EnvOverlay(), + ) + inv.BuildFlags = append([]string{}, s.Options().BuildFlags...) + cleanup = func() {} // fallback + + // All logic below is for module mode. + if len(s.view.workspaceModFiles) == 0 { + return "", inv, cleanup, nil + } + + mode, allowNetwork := flags.Mode(), flags.AllowNetwork() + if !allowNetwork && !allowNetworkOption { + inv.Env = append(inv.Env, "GOPROXY=off") + } + + // What follows is rather complicated logic for how to actually run the go + // command. A word of warning: this is the result of various incremental + // features added to gopls, and varying behavior of the Go command across Go + // versions. It can surely be cleaned up significantly, but tread carefully. + // + // Roughly speaking we need to resolve four things: + // - the working directory. + // - the -mod flag + // - the -modfile flag + // + // These are dependent on a number of factors: whether we need to run in a + // synthetic workspace, whether flags are supported at the current go + // version, and what we're actually trying to achieve (the + // InvocationFlags). + // + // TODO(rfindley): should we set -overlays here? + + const mutableModFlag = "mod" + + // If the mod flag isn't set, populate it based on the mode and workspace. + // + // (As noted in various TODOs throughout this function, this is very + // confusing and not obviously correct, but tests pass and we will eventually + // rewrite this entire function.) + if inv.ModFlag == "" { + switch mode { + case LoadWorkspace, Normal: + if allowModfileModificationOption { + inv.ModFlag = mutableModFlag + } + case WriteTemporaryModFile: + inv.ModFlag = mutableModFlag + // -mod must be readonly when using go.work files - see issue #48941 + inv.Env = append(inv.Env, "GOWORK=off") + } + } + + // TODO(rfindley): if inv.ModFlag was already set to "mod", we may not have + // set GOWORK=off here. But that doesn't happen. Clean up this entire API so + // that we don't have this mutation of the invocation, which is quite hard to + // follow. + + // If the invocation needs to mutate the modfile, we must use a temp mod. + if inv.ModFlag == mutableModFlag { + var modURI protocol.DocumentURI + // Select the module context to use. + // If we're type checking, we need to use the workspace context, meaning + // the main (workspace) module. Otherwise, we should use the module for + // the passed-in working dir. + if mode == LoadWorkspace { + // TODO(rfindley): this seems unnecessary and overly complicated. Remove + // this along with 'allowModFileModifications'. + if s.view.typ == GoModView { + modURI = s.view.gomod + } + } else { + modURI = s.GoModForFile(protocol.URIFromPath(inv.WorkingDir)) + } + + var modContent []byte + if modURI != "" { + modFH, err := s.ReadFile(ctx, modURI) + if err != nil { + return "", nil, cleanup, err + } + modContent, err = modFH.Content() + if err != nil { + return "", nil, cleanup, err + } + } + if modURI == "" { + return "", nil, cleanup, fmt.Errorf("no go.mod file found in %s", inv.WorkingDir) + } + // Use the go.sum if it happens to be available. + gosum := s.goSum(ctx, modURI) + tmpURI, cleanup, err = tempModFile(modURI, modContent, gosum) + if err != nil { + return "", nil, cleanup, err + } + inv.ModFile = tmpURI.Path() + } + + return tmpURI, inv, cleanup, nil +} + +func (s *Snapshot) buildOverlay() map[string][]byte { + overlays := make(map[string][]byte) + for _, overlay := range s.Overlays() { + if overlay.saved { + continue + } + // TODO(rfindley): previously, there was a todo here to make sure we don't + // send overlays outside of the current view. IMO we should instead make + // sure this doesn't matter. + overlays[overlay.URI().Path()] = overlay.content + } + return overlays +} + +// Overlays returns the set of overlays at this snapshot. +// +// Note that this may differ from the set of overlays on the server, if the +// snapshot observed a historical state. +func (s *Snapshot) Overlays() []*overlay { + s.mu.Lock() + defer s.mu.Unlock() + + return s.files.getOverlays() +} + +// Package data kinds, identifying various package data that may be stored in +// the file cache. +const ( + xrefsKind = "xrefs" + methodSetsKind = "methodsets" + exportDataKind = "export" + diagnosticsKind = "diagnostics" + typerefsKind = "typerefs" +) + +// PackageDiagnostics returns diagnostics for files contained in specified +// packages. +// +// If these diagnostics cannot be loaded from cache, the requested packages +// may be type-checked. +func (s *Snapshot) PackageDiagnostics(ctx context.Context, ids ...PackageID) (map[protocol.DocumentURI][]*Diagnostic, error) { + ctx, done := event.Start(ctx, "cache.snapshot.PackageDiagnostics") + defer done() + + var mu sync.Mutex + perFile := make(map[protocol.DocumentURI][]*Diagnostic) + collect := func(diags []*Diagnostic) { + mu.Lock() + defer mu.Unlock() + for _, diag := range diags { + perFile[diag.URI] = append(perFile[diag.URI], diag) + } + } + pre := func(_ int, ph *packageHandle) bool { + data, err := filecache.Get(diagnosticsKind, ph.key) + if err == nil { // hit + collect(ph.loadDiagnostics) + collect(decodeDiagnostics(data)) + return false + } else if err != filecache.ErrNotFound { + event.Error(ctx, "reading diagnostics from filecache", err) + } + return true + } + post := func(_ int, pkg *Package) { + collect(pkg.loadDiagnostics) + collect(pkg.pkg.diagnostics) + } + return perFile, s.forEachPackage(ctx, ids, pre, post) +} + +// References returns cross-reference indexes for the specified packages. +// +// If these indexes cannot be loaded from cache, the requested packages may +// be type-checked. +func (s *Snapshot) References(ctx context.Context, ids ...PackageID) ([]xrefIndex, error) { + ctx, done := event.Start(ctx, "cache.snapshot.References") + defer done() + + indexes := make([]xrefIndex, len(ids)) + pre := func(i int, ph *packageHandle) bool { + data, err := filecache.Get(xrefsKind, ph.key) + if err == nil { // hit + indexes[i] = xrefIndex{mp: ph.mp, data: data} + return false + } else if err != filecache.ErrNotFound { + event.Error(ctx, "reading xrefs from filecache", err) + } + return true + } + post := func(i int, pkg *Package) { + indexes[i] = xrefIndex{mp: pkg.metadata, data: pkg.pkg.xrefs()} + } + return indexes, s.forEachPackage(ctx, ids, pre, post) +} + +// An xrefIndex is a helper for looking up references in a given package. +type xrefIndex struct { + mp *metadata.Package + data []byte +} + +func (index xrefIndex) Lookup(targets map[PackagePath]map[objectpath.Path]struct{}) []protocol.Location { + return xrefs.Lookup(index.mp, index.data, targets) +} + +// MethodSets returns method-set indexes for the specified packages. +// +// If these indexes cannot be loaded from cache, the requested packages may +// be type-checked. +func (s *Snapshot) MethodSets(ctx context.Context, ids ...PackageID) ([]*methodsets.Index, error) { + ctx, done := event.Start(ctx, "cache.snapshot.MethodSets") + defer done() + + indexes := make([]*methodsets.Index, len(ids)) + pre := func(i int, ph *packageHandle) bool { + data, err := filecache.Get(methodSetsKind, ph.key) + if err == nil { // hit + indexes[i] = methodsets.Decode(data) + return false + } else if err != filecache.ErrNotFound { + event.Error(ctx, "reading methodsets from filecache", err) + } + return true + } + post := func(i int, pkg *Package) { + indexes[i] = pkg.pkg.methodsets() + } + return indexes, s.forEachPackage(ctx, ids, pre, post) +} + +// MetadataForFile returns a new slice containing metadata for each +// package containing the Go file identified by uri, ordered by the +// number of CompiledGoFiles (i.e. "narrowest" to "widest" package), +// and secondarily by IsIntermediateTestVariant (false < true). +// The result may include tests and intermediate test variants of +// importable packages. +// It returns an error if the context was cancelled. +func (s *Snapshot) MetadataForFile(ctx context.Context, uri protocol.DocumentURI) ([]*metadata.Package, error) { + if s.view.typ == AdHocView { + // As described in golang/go#57209, in ad-hoc workspaces (where we load ./ + // rather than ./...), preempting the directory load with file loads can + // lead to an inconsistent outcome, where certain files are loaded with + // command-line-arguments packages and others are loaded only in the ad-hoc + // package. Therefore, ensure that the workspace is loaded before doing any + // file loads. + if err := s.awaitLoaded(ctx); err != nil { + return nil, err + } + } + + s.mu.Lock() + + // Start with the set of package associations derived from the last load. + ids := s.meta.IDs[uri] + + shouldLoad := false // whether any packages containing uri are marked 'shouldLoad' + for _, id := range ids { + if pkgs, _ := s.shouldLoad.Get(id); len(pkgs) > 0 { + shouldLoad = true + } + } + + // Check if uri is known to be unloadable. + unloadable := s.unloadableFiles.Contains(uri) + + s.mu.Unlock() + + // Reload if loading is likely to improve the package associations for uri: + // - uri is not contained in any valid packages + // - ...or one of the packages containing uri is marked 'shouldLoad' + // - ...but uri is not unloadable + if (shouldLoad || len(ids) == 0) && !unloadable { + scope := fileLoadScope(uri) + err := s.load(ctx, false, scope) + + // + // Return the context error here as the current operation is no longer + // valid. + if err != nil { + // Guard against failed loads due to context cancellation. We don't want + // to mark loads as completed if they failed due to context cancellation. + if ctx.Err() != nil { + return nil, ctx.Err() + } + + // Don't return an error here, as we may still return stale IDs. + // Furthermore, the result of MetadataForFile should be consistent upon + // subsequent calls, even if the file is marked as unloadable. + if !errors.Is(err, errNoPackages) { + event.Error(ctx, "MetadataForFile", err) + } + } + + // We must clear scopes after loading. + // + // TODO(rfindley): unlike reloadWorkspace, this is simply marking loaded + // packages as loaded. We could do this from snapshot.load and avoid + // raciness. + s.clearShouldLoad(scope) + } + + // Retrieve the metadata. + s.mu.Lock() + defer s.mu.Unlock() + ids = s.meta.IDs[uri] + metas := make([]*metadata.Package, len(ids)) + for i, id := range ids { + metas[i] = s.meta.Packages[id] + if metas[i] == nil { + panic("nil metadata") + } + } + // Metadata is only ever added by loading, + // so if we get here and still have + // no IDs, uri is unloadable. + if !unloadable && len(ids) == 0 { + s.unloadableFiles.Add(uri) + } + + // Sort packages "narrowest" to "widest" (in practice: + // non-tests before tests), and regular packages before + // their intermediate test variants (which have the same + // files but different imports). + sort.Slice(metas, func(i, j int) bool { + x, y := metas[i], metas[j] + xfiles, yfiles := len(x.CompiledGoFiles), len(y.CompiledGoFiles) + if xfiles != yfiles { + return xfiles < yfiles + } + return boolLess(x.IsIntermediateTestVariant(), y.IsIntermediateTestVariant()) + }) + + return metas, nil +} + +func boolLess(x, y bool) bool { return !x && y } // false < true + +// ReverseDependencies returns a new mapping whose entries are +// the ID and Metadata of each package in the workspace that +// directly or transitively depend on the package denoted by id, +// excluding id itself. +func (s *Snapshot) ReverseDependencies(ctx context.Context, id PackageID, transitive bool) (map[PackageID]*metadata.Package, error) { + if err := s.awaitLoaded(ctx); err != nil { + return nil, err + } + + meta := s.MetadataGraph() + var rdeps map[PackageID]*metadata.Package + if transitive { + rdeps = meta.ReverseReflexiveTransitiveClosure(id) + + // Remove the original package ID from the map. + // (Callers all want irreflexivity but it's easier + // to compute reflexively then subtract.) + delete(rdeps, id) + + } else { + // direct reverse dependencies + rdeps = make(map[PackageID]*metadata.Package) + for _, rdepID := range meta.ImportedBy[id] { + if rdep := meta.Packages[rdepID]; rdep != nil { + rdeps[rdepID] = rdep + } + } + } + + return rdeps, nil +} + +// -- Active package tracking -- +// +// We say a package is "active" if any of its files are open. +// This is an optimization: the "active" concept is an +// implementation detail of the cache and is not exposed +// in the source or Snapshot API. +// After type-checking we keep active packages in memory. +// The activePackages persistent map does bookkeeping for +// the set of active packages. + +// getActivePackage returns a the memoized active package for id, if it exists. +// If id is not active or has not yet been type-checked, it returns nil. +func (s *Snapshot) getActivePackage(id PackageID) *Package { + s.mu.Lock() + defer s.mu.Unlock() + + if value, ok := s.activePackages.Get(id); ok { + return value + } + return nil +} + +// setActivePackage checks if pkg is active, and if so either records it in +// the active packages map or returns the existing memoized active package for id. +func (s *Snapshot) setActivePackage(id PackageID, pkg *Package) { + s.mu.Lock() + defer s.mu.Unlock() + + if _, ok := s.activePackages.Get(id); ok { + return // already memoized + } + + if containsOpenFileLocked(s, pkg.Metadata()) { + s.activePackages.Set(id, pkg, nil) + } else { + s.activePackages.Set(id, (*Package)(nil), nil) // remember that pkg is not open + } +} + +func (s *Snapshot) resetActivePackagesLocked() { + s.activePackages.Destroy() + s.activePackages = new(persistent.Map[PackageID, *Package]) +} + +// See Session.FileWatchingGlobPatterns for a description of gopls' file +// watching heuristic. +func (s *Snapshot) fileWatchingGlobPatterns() map[protocol.RelativePattern]unit { + // Always watch files that may change the view definition. + patterns := make(map[protocol.RelativePattern]unit) + + // If GOWORK is outside the folder, ensure we are watching it. + if s.view.gowork != "" && !s.view.folder.Dir.Encloses(s.view.gowork) { + workPattern := protocol.RelativePattern{ + BaseURI: s.view.gowork.Dir(), + Pattern: path.Base(string(s.view.gowork)), + } + patterns[workPattern] = unit{} + } + + extensions := "go,mod,sum,work" + for _, ext := range s.Options().TemplateExtensions { + extensions += "," + ext + } + watchGoFiles := fmt.Sprintf("**/*.{%s}", extensions) + + var dirs []string + if s.view.moduleMode() { + if s.view.typ == GoWorkView { + workVendorDir := filepath.Join(s.view.gowork.Dir().Path(), "vendor") + workVendorURI := protocol.URIFromPath(workVendorDir) + patterns[protocol.RelativePattern{BaseURI: workVendorURI, Pattern: watchGoFiles}] = unit{} + } + + // In module mode, watch directories containing active modules, and collect + // these dirs for later filtering the set of known directories. + // + // The assumption is that the user is not actively editing non-workspace + // modules, so don't pay the price of file watching. + for modFile := range s.view.workspaceModFiles { + dir := filepath.Dir(modFile.Path()) + dirs = append(dirs, dir) + + // TODO(golang/go#64724): thoroughly test these patterns, particularly on + // on Windows. + // + // Note that glob patterns should use '/' on Windows: + // https://code.visualstudio.com/docs/editor/glob-patterns + patterns[protocol.RelativePattern{BaseURI: modFile.Dir(), Pattern: watchGoFiles}] = unit{} + } + } else { + // In non-module modes (GOPATH or AdHoc), we just watch the workspace root. + dirs = []string{s.view.root.Path()} + patterns[protocol.RelativePattern{Pattern: watchGoFiles}] = unit{} + } + + if s.watchSubdirs() { + // Some clients (e.g. VS Code) do not send notifications for changes to + // directories that contain Go code (golang/go#42348). To handle this, + // explicitly watch all of the directories in the workspace. We find them + // by adding the directories of every file in the snapshot's workspace + // directories. There may be thousands of patterns, each a single + // directory. + // + // We compute this set by looking at files that we've previously observed. + // This may miss changed to directories that we haven't observed, but that + // shouldn't matter as there is nothing to invalidate (if a directory falls + // in forest, etc). + // + // (A previous iteration created a single glob pattern holding a union of + // all the directories, but this was found to cause VS Code to get stuck + // for several minutes after a buffer was saved twice in a workspace that + // had >8000 watched directories.) + // + // Some clients (notably coc.nvim, which uses watchman for globs) perform + // poorly with a large list of individual directories. + s.addKnownSubdirs(patterns, dirs) + } + + return patterns +} + +func (s *Snapshot) addKnownSubdirs(patterns map[protocol.RelativePattern]unit, wsDirs []string) { + s.mu.Lock() + defer s.mu.Unlock() + + s.files.getDirs().Range(func(dir string) { + for _, wsDir := range wsDirs { + if pathutil.InDir(wsDir, dir) { + patterns[protocol.RelativePattern{Pattern: filepath.ToSlash(dir)}] = unit{} + } + } + }) +} + +// watchSubdirs reports whether gopls should request separate file watchers for +// each relevant subdirectory. This is necessary only for clients (namely VS +// Code) that do not send notifications for individual files in a directory +// when the entire directory is deleted. +func (s *Snapshot) watchSubdirs() bool { + switch p := s.Options().SubdirWatchPatterns; p { + case settings.SubdirWatchPatternsOn: + return true + case settings.SubdirWatchPatternsOff: + return false + case settings.SubdirWatchPatternsAuto: + // See the documentation of InternalOptions.SubdirWatchPatterns for an + // explanation of why VS Code gets a different default value here. + // + // Unfortunately, there is no authoritative list of client names, nor any + // requirements that client names do not change. We should update the VS + // Code extension to set a default value of "subdirWatchPatterns" to "on", + // so that this workaround is only temporary. + if s.Options().ClientInfo != nil && s.Options().ClientInfo.Name == "Visual Studio Code" { + return true + } + return false + default: + bug.Reportf("invalid subdirWatchPatterns: %q", p) + return false + } +} + +// filesInDir returns all files observed by the snapshot that are contained in +// a directory with the provided URI. +func (s *Snapshot) filesInDir(uri protocol.DocumentURI) []protocol.DocumentURI { + s.mu.Lock() + defer s.mu.Unlock() + + dir := uri.Path() + if !s.files.getDirs().Contains(dir) { + return nil + } + var files []protocol.DocumentURI + s.files.foreach(func(uri protocol.DocumentURI, _ file.Handle) { + if pathutil.InDir(dir, uri.Path()) { + files = append(files, uri) + } + }) + return files +} + +// WorkspaceMetadata returns a new, unordered slice containing +// metadata for all ordinary and test packages (but not +// intermediate test variants) in the workspace. +// +// The workspace is the set of modules typically defined by a +// go.work file. It is not transitively closed: for example, +// the standard library is not usually part of the workspace +// even though every module in the workspace depends on it. +// +// Operations that must inspect all the dependencies of the +// workspace packages should instead use AllMetadata. +func (s *Snapshot) WorkspaceMetadata(ctx context.Context) ([]*metadata.Package, error) { + if err := s.awaitLoaded(ctx); err != nil { + return nil, err + } + + s.mu.Lock() + defer s.mu.Unlock() + + meta := make([]*metadata.Package, 0, s.workspacePackages.Len()) + s.workspacePackages.Range(func(id PackageID, _ PackagePath) { + meta = append(meta, s.meta.Packages[id]) + }) + return meta, nil +} + +// isWorkspacePackage reports whether the given package ID refers to a +// workspace package for the snapshot. +func (s *Snapshot) isWorkspacePackage(id PackageID) bool { + s.mu.Lock() + defer s.mu.Unlock() + _, ok := s.workspacePackages.Value(id) + return ok +} + +// Symbols extracts and returns symbol information for every file contained in +// a loaded package. It awaits snapshot loading. +// +// If workspaceOnly is set, this only includes symbols from files in a +// workspace package. Otherwise, it returns symbols from all loaded packages. +// +// TODO(rfindley): move to symbols.go. +func (s *Snapshot) Symbols(ctx context.Context, workspaceOnly bool) (map[protocol.DocumentURI][]Symbol, error) { + var ( + meta []*metadata.Package + err error + ) + if workspaceOnly { + meta, err = s.WorkspaceMetadata(ctx) + } else { + meta, err = s.AllMetadata(ctx) + } + if err != nil { + return nil, fmt.Errorf("loading metadata: %v", err) + } + + goFiles := make(map[protocol.DocumentURI]struct{}) + for _, mp := range meta { + for _, uri := range mp.GoFiles { + goFiles[uri] = struct{}{} + } + for _, uri := range mp.CompiledGoFiles { + goFiles[uri] = struct{}{} + } + } + + // Symbolize them in parallel. + var ( + group errgroup.Group + nprocs = 2 * runtime.GOMAXPROCS(-1) // symbolize is a mix of I/O and CPU + resultMu sync.Mutex + result = make(map[protocol.DocumentURI][]Symbol) + ) + group.SetLimit(nprocs) + for uri := range goFiles { + uri := uri + group.Go(func() error { + symbols, err := s.symbolize(ctx, uri) + if err != nil { + return err + } + resultMu.Lock() + result[uri] = symbols + resultMu.Unlock() + return nil + }) + } + // Keep going on errors, but log the first failure. + // Partial results are better than no symbol results. + if err := group.Wait(); err != nil { + event.Error(ctx, "getting snapshot symbols", err) + } + return result, nil +} + +// AllMetadata returns a new unordered array of metadata for +// all packages known to this snapshot, which includes the +// packages of all workspace modules plus their transitive +// import dependencies. +// +// It may also contain ad-hoc packages for standalone files. +// It includes all test variants. +// +// TODO(rfindley): Replace this with s.MetadataGraph(). +func (s *Snapshot) AllMetadata(ctx context.Context) ([]*metadata.Package, error) { + if err := s.awaitLoaded(ctx); err != nil { + return nil, err + } + + g := s.MetadataGraph() + + meta := make([]*metadata.Package, 0, len(g.Packages)) + for _, mp := range g.Packages { + meta = append(meta, mp) + } + return meta, nil +} + +// GoModForFile returns the URI of the go.mod file for the given URI. +// +// TODO(rfindley): clarify that this is only active modules. Or update to just +// use findRootPattern. +func (s *Snapshot) GoModForFile(uri protocol.DocumentURI) protocol.DocumentURI { + return moduleForURI(s.view.workspaceModFiles, uri) +} + +func moduleForURI(modFiles map[protocol.DocumentURI]struct{}, uri protocol.DocumentURI) protocol.DocumentURI { + var match protocol.DocumentURI + for modURI := range modFiles { + if !modURI.Dir().Encloses(uri) { + continue + } + if len(modURI) > len(match) { + match = modURI + } + } + return match +} + +// nearestModFile finds the nearest go.mod file contained in the directory +// containing uri, or a parent of that directory. +// +// The given uri must be a file, not a directory. +func nearestModFile(ctx context.Context, uri protocol.DocumentURI, fs file.Source) (protocol.DocumentURI, error) { + dir := filepath.Dir(uri.Path()) + return findRootPattern(ctx, protocol.URIFromPath(dir), "go.mod", fs) +} + +// Metadata returns the metadata for the specified package, +// or nil if it was not found. +func (s *Snapshot) Metadata(id PackageID) *metadata.Package { + s.mu.Lock() + defer s.mu.Unlock() + return s.meta.Packages[id] +} + +// clearShouldLoad clears package IDs that no longer need to be reloaded after +// scopes has been loaded. +func (s *Snapshot) clearShouldLoad(scopes ...loadScope) { + s.mu.Lock() + defer s.mu.Unlock() + + for _, scope := range scopes { + switch scope := scope.(type) { + case packageLoadScope: + scopePath := PackagePath(scope) + var toDelete []PackageID + s.shouldLoad.Range(func(id PackageID, pkgPaths []PackagePath) { + for _, pkgPath := range pkgPaths { + if pkgPath == scopePath { + toDelete = append(toDelete, id) + } + } + }) + for _, id := range toDelete { + s.shouldLoad.Delete(id) + } + case fileLoadScope: + uri := protocol.DocumentURI(scope) + ids := s.meta.IDs[uri] + for _, id := range ids { + s.shouldLoad.Delete(id) + } + } + } +} + +// FindFile returns the FileHandle for the given URI, if it is already +// in the given snapshot. +// TODO(adonovan): delete this operation; use ReadFile instead. +func (s *Snapshot) FindFile(uri protocol.DocumentURI) file.Handle { + s.mu.Lock() + defer s.mu.Unlock() + + result, _ := s.files.get(uri) + return result +} + +// ReadFile returns a File for the given URI. If the file is unknown it is added +// to the managed set. +// +// ReadFile succeeds even if the file does not exist. A non-nil error return +// indicates some type of internal error, for example if ctx is cancelled. +func (s *Snapshot) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { + s.mu.Lock() + defer s.mu.Unlock() + + return lockedSnapshot{s}.ReadFile(ctx, uri) +} + +// lockedSnapshot implements the file.Source interface, while holding s.mu. +// +// TODO(rfindley): This unfortunate type had been eliminated, but it had to be +// restored to fix golang/go#65801. We should endeavor to remove it again. +type lockedSnapshot struct { + s *Snapshot +} + +func (s lockedSnapshot) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { + fh, ok := s.s.files.get(uri) + if !ok { + var err error + fh, err = s.s.view.fs.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + s.s.files.set(uri, fh) + } + return fh, nil +} + +// preloadFiles delegates to the view FileSource to read the requested uris in +// parallel, without holding the snapshot lock. +func (s *Snapshot) preloadFiles(ctx context.Context, uris []protocol.DocumentURI) { + files := make([]file.Handle, len(uris)) + var wg sync.WaitGroup + iolimit := make(chan struct{}, 20) // I/O concurrency limiting semaphore + for i, uri := range uris { + wg.Add(1) + iolimit <- struct{}{} + go func(i int, uri protocol.DocumentURI) { + defer wg.Done() + fh, err := s.view.fs.ReadFile(ctx, uri) + <-iolimit + if err != nil && ctx.Err() == nil { + event.Error(ctx, fmt.Sprintf("reading %s", uri), err) + return + } + files[i] = fh + }(i, uri) + } + wg.Wait() + + s.mu.Lock() + defer s.mu.Unlock() + + for i, fh := range files { + if fh == nil { + continue // error logged above + } + uri := uris[i] + if _, ok := s.files.get(uri); !ok { + s.files.set(uri, fh) + } + } +} + +// IsOpen returns whether the editor currently has a file open. +func (s *Snapshot) IsOpen(uri protocol.DocumentURI) bool { + s.mu.Lock() + defer s.mu.Unlock() + + fh, _ := s.files.get(uri) + _, open := fh.(*overlay) + return open +} + +// MetadataGraph returns the current metadata graph for the Snapshot. +func (s *Snapshot) MetadataGraph() *metadata.Graph { + s.mu.Lock() + defer s.mu.Unlock() + return s.meta +} + +// InitializationError returns the last error from initialization. +func (s *Snapshot) InitializationError() *InitializationError { + s.mu.Lock() + defer s.mu.Unlock() + return s.initialErr +} + +// awaitLoaded awaits initialization and package reloading, and returns +// ctx.Err(). +func (s *Snapshot) awaitLoaded(ctx context.Context) error { + // Do not return results until the snapshot's view has been initialized. + s.AwaitInitialized(ctx) + s.reloadWorkspace(ctx) + return ctx.Err() +} + +// AwaitInitialized waits until the snapshot's view is initialized. +func (s *Snapshot) AwaitInitialized(ctx context.Context) { + select { + case <-ctx.Done(): + return + case <-s.view.initialWorkspaceLoad: + } + // We typically prefer to run something as intensive as the IWL without + // blocking. I'm not sure if there is a way to do that here. + s.initialize(ctx, false) +} + +// reloadWorkspace reloads the metadata for all invalidated workspace packages. +func (s *Snapshot) reloadWorkspace(ctx context.Context) { + var scopes []loadScope + var seen map[PackagePath]bool + s.mu.Lock() + s.shouldLoad.Range(func(_ PackageID, pkgPaths []PackagePath) { + for _, pkgPath := range pkgPaths { + if seen == nil { + seen = make(map[PackagePath]bool) + } + if seen[pkgPath] { + continue + } + seen[pkgPath] = true + scopes = append(scopes, packageLoadScope(pkgPath)) + } + }) + s.mu.Unlock() + + if len(scopes) == 0 { + return + } + + // For an ad-hoc view, we cannot reload by package path. Just reload the view. + if s.view.typ == AdHocView { + scopes = []loadScope{viewLoadScope{}} + } + + err := s.load(ctx, false, scopes...) + + // Unless the context was canceled, set "shouldLoad" to false for all + // of the metadata we attempted to load. + if !errors.Is(err, context.Canceled) { + s.clearShouldLoad(scopes...) + if err != nil { + event.Error(ctx, "reloading workspace", err, s.Labels()...) + } + } +} + +func (s *Snapshot) orphanedFileDiagnostics(ctx context.Context, overlays []*overlay) ([]*Diagnostic, error) { + if err := s.awaitLoaded(ctx); err != nil { + return nil, err + } + + var diagnostics []*Diagnostic + var orphaned []*overlay +searchOverlays: + for _, o := range overlays { + uri := o.URI() + if s.IsBuiltin(uri) || s.FileKind(o) != file.Go { + continue + } + mps, err := s.MetadataForFile(ctx, uri) + if err != nil { + return nil, err + } + for _, mp := range mps { + if !metadata.IsCommandLineArguments(mp.ID) || mp.Standalone { + continue searchOverlays + } + } + metadata.RemoveIntermediateTestVariants(&mps) + + // With zero-config gopls (golang/go#57979), orphaned file diagnostics + // include diagnostics for orphaned files -- not just diagnostics relating + // to the reason the files are opened. + // + // This is because orphaned files are never considered part of a workspace + // package: if they are loaded by a view, that view is arbitrary, and they + // may be loaded by multiple views. If they were to be diagnosed by + // multiple views, their diagnostics may become inconsistent. + if len(mps) > 0 { + diags, err := s.PackageDiagnostics(ctx, mps[0].ID) + if err != nil { + return nil, err + } + diagnostics = append(diagnostics, diags[uri]...) + } + orphaned = append(orphaned, o) + } + + if len(orphaned) == 0 { + return nil, nil + } + + loadedModFiles := make(map[protocol.DocumentURI]struct{}) // all mod files, including dependencies + ignoredFiles := make(map[protocol.DocumentURI]bool) // files reported in packages.Package.IgnoredFiles + + g := s.MetadataGraph() + for _, meta := range g.Packages { + if meta.Module != nil && meta.Module.GoMod != "" { + gomod := protocol.URIFromPath(meta.Module.GoMod) + loadedModFiles[gomod] = struct{}{} + } + for _, ignored := range meta.IgnoredFiles { + ignoredFiles[ignored] = true + } + } + + initialErr := s.InitializationError() + + for _, fh := range orphaned { + pgf, rng, ok := orphanedFileDiagnosticRange(ctx, s.view.parseCache, fh) + if !ok { + continue // e.g. cancellation or parse error + } + + var ( + msg string // if non-empty, report a diagnostic with this message + suggestedFixes []SuggestedFix // associated fixes, if any + ) + if initialErr != nil { + msg = fmt.Sprintf("initialization failed: %v", initialErr.MainError) + } else if goMod, err := nearestModFile(ctx, fh.URI(), s); err == nil && goMod != "" { + // If we have a relevant go.mod file, check whether the file is orphaned + // due to its go.mod file being inactive. We could also offer a + // prescriptive diagnostic in the case that there is no go.mod file, but it + // is harder to be precise in that case, and less important. + if _, ok := loadedModFiles[goMod]; !ok { + modDir := filepath.Dir(goMod.Path()) + viewDir := s.view.folder.Dir.Path() + + // When the module is underneath the view dir, we offer + // "use all modules" quick-fixes. + inDir := pathutil.InDir(viewDir, modDir) + + if rel, err := filepath.Rel(viewDir, modDir); err == nil { + modDir = rel + } + + var fix string + if s.view.folder.Env.GoVersion >= 18 { + if s.view.gowork != "" { + fix = fmt.Sprintf("To fix this problem, you can add this module to your go.work file (%s)", s.view.gowork) + if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work use`", command.RunGoWorkArgs{ + ViewID: s.view.ID(), + Args: []string{"use", modDir}, + }); err == nil { + suggestedFixes = append(suggestedFixes, SuggestedFix{ + Title: "Use this module in your go.work file", + Command: &cmd, + ActionKind: protocol.QuickFix, + }) + } + + if inDir { + if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work use -r`", command.RunGoWorkArgs{ + ViewID: s.view.ID(), + Args: []string{"use", "-r", "."}, + }); err == nil { + suggestedFixes = append(suggestedFixes, SuggestedFix{ + Title: "Use all modules in your workspace", + Command: &cmd, + ActionKind: protocol.QuickFix, + }) + } + } + } else { + fix = "To fix this problem, you can add a go.work file that uses this directory." + + if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work init && go work use`", command.RunGoWorkArgs{ + ViewID: s.view.ID(), + InitFirst: true, + Args: []string{"use", modDir}, + }); err == nil { + suggestedFixes = []SuggestedFix{ + { + Title: "Add a go.work file using this module", + Command: &cmd, + ActionKind: protocol.QuickFix, + }, + } + } + + if inDir { + if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work init && go work use -r`", command.RunGoWorkArgs{ + ViewID: s.view.ID(), + InitFirst: true, + Args: []string{"use", "-r", "."}, + }); err == nil { + suggestedFixes = append(suggestedFixes, SuggestedFix{ + Title: "Add a go.work file using all modules in your workspace", + Command: &cmd, + ActionKind: protocol.QuickFix, + }) + } + } + } + } else { + fix = `To work with multiple modules simultaneously, please upgrade to Go 1.18 or +later, reinstall gopls, and use a go.work file.` + } + + msg = fmt.Sprintf(`This file is within module %q, which is not included in your workspace. +%s +See the documentation for more information on setting up your workspace: +https://github.com/golang/tools/blob/master/gopls/doc/workspace.md.`, modDir, fix) + } + } + + if msg == "" { + if ignoredFiles[fh.URI()] { + // TODO(rfindley): use the constraint package to check if the file + // _actually_ satisfies the current build context. + hasConstraint := false + walkConstraints(pgf.File, func(constraint.Expr) bool { + hasConstraint = true + return false + }) + var fix string + if hasConstraint { + fix = `This file may be excluded due to its build tags; try adding "-tags=" to your gopls "buildFlags" configuration +See the documentation for more information on working with build tags: +https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags-string.` + } else if strings.Contains(filepath.Base(fh.URI().Path()), "_") { + fix = `This file may be excluded due to its GOOS/GOARCH, or other build constraints.` + } else { + fix = `This file is ignored by your gopls build.` // we don't know why + } + msg = fmt.Sprintf("No packages found for open file %s.\n%s", fh.URI().Path(), fix) + } else { + // Fall back: we're not sure why the file is orphaned. + // TODO(rfindley): we could do better here, diagnosing the lack of a + // go.mod file and malformed file names (see the perc%ent marker test). + msg = fmt.Sprintf("No packages found for open file %s.", fh.URI().Path()) + } + } + + if msg != "" { + d := &Diagnostic{ + URI: fh.URI(), + Range: rng, + Severity: protocol.SeverityWarning, + Source: ListError, + Message: msg, + SuggestedFixes: suggestedFixes, + } + if ok := bundleQuickFixes(d); !ok { + bug.Reportf("failed to bundle quick fixes for %v", d) + } + // Only report diagnostics if we detect an actual exclusion. + diagnostics = append(diagnostics, d) + } + } + return diagnostics, nil +} + +// orphanedFileDiagnosticRange returns the position to use for orphaned file diagnostics. +// We only warn about an orphaned file if it is well-formed enough to actually +// be part of a package. Otherwise, we need more information. +func orphanedFileDiagnosticRange(ctx context.Context, cache *parseCache, fh file.Handle) (*parsego.File, protocol.Range, bool) { + pgfs, err := cache.parseFiles(ctx, token.NewFileSet(), parsego.Header, false, fh) + if err != nil { + return nil, protocol.Range{}, false + } + pgf := pgfs[0] + if !pgf.File.Name.Pos().IsValid() { + return nil, protocol.Range{}, false + } + rng, err := pgf.PosRange(pgf.File.Name.Pos(), pgf.File.Name.End()) + if err != nil { + return nil, protocol.Range{}, false + } + return pgf, rng, true +} + +// TODO(golang/go#53756): this function needs to consider more than just the +// absolute URI, for example: +// - the position of /vendor/ with respect to the relevant module root +// - whether or not go.work is in use (as vendoring isn't supported in workspace mode) +// +// Most likely, each call site of inVendor needs to be reconsidered to +// understand and correctly implement the desired behavior. +func inVendor(uri protocol.DocumentURI) bool { + _, after, found := strings.Cut(string(uri), "/vendor/") + // Only subdirectories of /vendor/ are considered vendored + // (/vendor/a/foo.go is vendored, /vendor/foo.go is not). + return found && strings.Contains(after, "/") +} + +// clone copies state from the receiver into a new Snapshot, applying the given +// state changes. +// +// The caller of clone must call Snapshot.decref on the returned +// snapshot when they are finished using it. +// +// The resulting bool reports whether the change invalidates any derived +// diagnostics for the snapshot, for example because it invalidates Packages or +// parsed go.mod files. This is used to mark a view as needing diagnosis in the +// server. +// +// TODO(rfindley): long term, it may be better to move responsibility for +// diagnostics into the Snapshot (e.g. a Snapshot.Diagnostics method), at which +// point the Snapshot could be responsible for tracking and forwarding a +// 'viewsToDiagnose' field. As is, this field is instead externalized in the +// server.viewsToDiagnose map. Moving it to the snapshot would entirely +// eliminate any 'relevance' heuristics from Session.DidModifyFiles, but would +// also require more strictness about diagnostic dependencies. For example, +// template.Diagnostics currently re-parses every time: there is no Snapshot +// data responsible for providing these diagnostics. +func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done func()) (*Snapshot, bool) { + changedFiles := changed.Files + ctx, stop := event.Start(ctx, "cache.snapshot.clone") + defer stop() + + s.mu.Lock() + defer s.mu.Unlock() + + // TODO(rfindley): reorganize this function to make the derivation of + // needsDiagnosis clearer. + needsDiagnosis := len(changed.GCDetails) > 0 || len(changed.ModuleUpgrades) > 0 || len(changed.Vulns) > 0 + + bgCtx, cancel := context.WithCancel(bgCtx) + result := &Snapshot{ + sequenceID: s.sequenceID + 1, + store: s.store, + refcount: 1, // Snapshots are born referenced. + done: done, + view: s.view, + backgroundCtx: bgCtx, + cancel: cancel, + builtin: s.builtin, + initialized: s.initialized, + initialErr: s.initialErr, + packages: s.packages.Clone(), + activePackages: s.activePackages.Clone(), + files: s.files.clone(changedFiles), + symbolizeHandles: cloneWithout(s.symbolizeHandles, changedFiles, nil), + workspacePackages: s.workspacePackages, + shouldLoad: s.shouldLoad.Clone(), // not cloneWithout: shouldLoad is cleared on loads + unloadableFiles: s.unloadableFiles.Clone(), // not cloneWithout: typing in a file doesn't necessarily make it loadable + parseModHandles: cloneWithout(s.parseModHandles, changedFiles, &needsDiagnosis), + parseWorkHandles: cloneWithout(s.parseWorkHandles, changedFiles, &needsDiagnosis), + modTidyHandles: cloneWithout(s.modTidyHandles, changedFiles, &needsDiagnosis), + modWhyHandles: cloneWithout(s.modWhyHandles, changedFiles, &needsDiagnosis), + modVulnHandles: cloneWithout(s.modVulnHandles, changedFiles, &needsDiagnosis), + importGraph: s.importGraph, + pkgIndex: s.pkgIndex, + moduleUpgrades: cloneWith(s.moduleUpgrades, changed.ModuleUpgrades), + vulns: cloneWith(s.vulns, changed.Vulns), + } + + // Compute the new set of packages for which we want gc details, after + // applying changed.GCDetails. + if len(s.gcOptimizationDetails) > 0 || len(changed.GCDetails) > 0 { + newGCDetails := make(map[metadata.PackageID]unit) + for id := range s.gcOptimizationDetails { + if _, ok := changed.GCDetails[id]; !ok { + newGCDetails[id] = unit{} // no change + } + } + for id, want := range changed.GCDetails { + if want { + newGCDetails[id] = unit{} + } + } + if len(newGCDetails) > 0 { + result.gcOptimizationDetails = newGCDetails + } + } + + reinit := false + + // Changes to vendor tree may require reinitialization, + // either because of an initialization error + // (e.g. "inconsistent vendoring detected"), or because + // one or more modules may have moved into or out of the + // vendor tree after 'go mod vendor' or 'rm -fr vendor/'. + // + // In this case, we consider the actual modification to see if was a creation + // or deletion. + // + // TODO(rfindley): revisit the location of this check. + for _, mod := range changed.Modifications { + if inVendor(mod.URI) && (mod.Action == file.Create || mod.Action == file.Delete) || + strings.HasSuffix(string(mod.URI), "/vendor/modules.txt") { + + reinit = true + break + } + } + + // Collect observed file handles for changed URIs from the old snapshot, if + // they exist. Importantly, we don't call ReadFile here: consider the case + // where a file is added on disk; we don't want to read the newly added file + // into the old snapshot, as that will break our change detection below. + // + // TODO(rfindley): it may be more accurate to rely on the modification type + // here, similarly to what we do for vendored files above. If we happened not + // to have read a file in the previous snapshot, that's not the same as it + // actually being created. + oldFiles := make(map[protocol.DocumentURI]file.Handle) + for uri := range changedFiles { + if fh, ok := s.files.get(uri); ok { + oldFiles[uri] = fh + } + } + // changedOnDisk determines if the new file handle may have changed on disk. + // It over-approximates, returning true if the new file is saved and either + // the old file wasn't saved, or the on-disk contents changed. + // + // oldFH may be nil. + changedOnDisk := func(oldFH, newFH file.Handle) bool { + if !newFH.SameContentsOnDisk() { + return false + } + if oe, ne := (oldFH != nil && fileExists(oldFH)), fileExists(newFH); !oe || !ne { + return oe != ne + } + return !oldFH.SameContentsOnDisk() || oldFH.Identity() != newFH.Identity() + } + + // Reinitialize if any workspace mod file has changed on disk. + for uri, newFH := range changedFiles { + if _, ok := result.view.workspaceModFiles[uri]; ok && changedOnDisk(oldFiles[uri], newFH) { + reinit = true + } + } + + // Finally, process sumfile changes that may affect loading. + for uri, newFH := range changedFiles { + if !changedOnDisk(oldFiles[uri], newFH) { + continue // like with go.mod files, we only reinit when things change on disk + } + dir, base := filepath.Split(uri.Path()) + if base == "go.work.sum" && s.view.typ == GoWorkView && dir == filepath.Dir(s.view.gowork.Path()) { + reinit = true + } + if base == "go.sum" { + modURI := protocol.URIFromPath(filepath.Join(dir, "go.mod")) + if _, active := result.view.workspaceModFiles[modURI]; active { + reinit = true + } + } + } + + // The snapshot should be initialized if either s was uninitialized, or we've + // detected a change that triggers reinitialization. + if reinit { + result.initialized = false + needsDiagnosis = true + } + + // directIDs keeps track of package IDs that have directly changed. + // Note: this is not a set, it's a map from id to invalidateMetadata. + directIDs := map[PackageID]bool{} + + // Invalidate all package metadata if the workspace module has changed. + if reinit { + for k := range s.meta.Packages { + // TODO(rfindley): this seems brittle; can we just start over? + directIDs[k] = true + } + } + + // Compute invalidations based on file changes. + anyImportDeleted := false // import deletions can resolve cycles + anyFileOpenedOrClosed := false // opened files affect workspace packages + anyFileAdded := false // adding a file can resolve missing dependencies + + for uri, newFH := range changedFiles { + // The original FileHandle for this URI is cached on the snapshot. + oldFH := oldFiles[uri] // may be nil + _, oldOpen := oldFH.(*overlay) + _, newOpen := newFH.(*overlay) + + anyFileOpenedOrClosed = anyFileOpenedOrClosed || (oldOpen != newOpen) + anyFileAdded = anyFileAdded || (oldFH == nil || !fileExists(oldFH)) && fileExists(newFH) + + // If uri is a Go file, check if it has changed in a way that would + // invalidate metadata. Note that we can't use s.view.FileKind here, + // because the file type that matters is not what the *client* tells us, + // but what the Go command sees. + var invalidateMetadata, pkgFileChanged, importDeleted bool + if strings.HasSuffix(uri.Path(), ".go") { + invalidateMetadata, pkgFileChanged, importDeleted = metadataChanges(ctx, s, oldFH, newFH) + } + if invalidateMetadata { + // If this is a metadata-affecting change, perhaps a reload will succeed. + result.unloadableFiles.Remove(uri) + needsDiagnosis = true + } + + invalidateMetadata = invalidateMetadata || reinit + anyImportDeleted = anyImportDeleted || importDeleted + + // Mark all of the package IDs containing the given file. + filePackageIDs := invalidatedPackageIDs(uri, s.meta.IDs, pkgFileChanged) + for id := range filePackageIDs { + directIDs[id] = directIDs[id] || invalidateMetadata // may insert 'false' + } + + // Invalidate the previous modTidyHandle if any of the files have been + // saved or if any of the metadata has been invalidated. + // + // TODO(rfindley): this seems like too-aggressive invalidation of mod + // results. We should instead thread through overlays to the Go command + // invocation and only run this if invalidateMetadata (and perhaps then + // still do it less frequently). + if invalidateMetadata || fileWasSaved(oldFH, newFH) { + // Only invalidate mod tidy results for the most relevant modfile in the + // workspace. This is a potentially lossy optimization for workspaces + // with many modules (such as google-cloud-go, which has 145 modules as + // of writing). + // + // While it is theoretically possible that a change in workspace module A + // could affect the mod-tidiness of workspace module B (if B transitively + // requires A), such changes are probably unlikely and not worth the + // penalty of re-running go mod tidy for everything. Note that mod tidy + // ignores GOWORK, so the two modules would have to be related by a chain + // of replace directives. + // + // We could improve accuracy by inspecting replace directives, using + // overlays in go mod tidy, and/or checking for metadata changes from the + // on-disk content. + // + // Note that we iterate the modTidyHandles map here, rather than e.g. + // using nearestModFile, because we don't have access to an accurate + // FileSource at this point in the snapshot clone. + const onlyInvalidateMostRelevant = true + if onlyInvalidateMostRelevant { + deleteMostRelevantModFile(result.modTidyHandles, uri) + } else { + result.modTidyHandles.Clear() + } + + // TODO(rfindley): should we apply the above heuristic to mod vuln or mod + // why handles as well? + // + // TODO(rfindley): no tests fail if I delete the line below. + result.modWhyHandles.Clear() + result.modVulnHandles.Clear() + } + } + + // Deleting an import can cause list errors due to import cycles to be + // resolved. The best we can do without parsing the list error message is to + // hope that list errors may have been resolved by a deleted import. + // + // We could do better by parsing the list error message. We already do this + // to assign a better range to the list error, but for such critical + // functionality as metadata, it's better to be conservative until it proves + // impractical. + // + // We could also do better by looking at which imports were deleted and + // trying to find cycles they are involved in. This fails when the file goes + // from an unparseable state to a parseable state, as we don't have a + // starting point to compare with. + if anyImportDeleted { + for id, mp := range s.meta.Packages { + if len(mp.Errors) > 0 { + directIDs[id] = true + } + } + } + + // Adding a file can resolve missing dependencies from existing packages. + // + // We could be smart here and try to guess which packages may have been + // fixed, but until that proves necessary, just invalidate metadata for any + // package with missing dependencies. + if anyFileAdded { + for id, mp := range s.meta.Packages { + for _, impID := range mp.DepsByImpPath { + if impID == "" { // missing import + directIDs[id] = true + break + } + } + } + } + + // Invalidate reverse dependencies too. + // idsToInvalidate keeps track of transitive reverse dependencies. + // If an ID is present in the map, invalidate its types. + // If an ID's value is true, invalidate its metadata too. + idsToInvalidate := map[PackageID]bool{} + var addRevDeps func(PackageID, bool) + addRevDeps = func(id PackageID, invalidateMetadata bool) { + current, seen := idsToInvalidate[id] + newInvalidateMetadata := current || invalidateMetadata + + // If we've already seen this ID, and the value of invalidate + // metadata has not changed, we can return early. + if seen && current == newInvalidateMetadata { + return + } + idsToInvalidate[id] = newInvalidateMetadata + for _, rid := range s.meta.ImportedBy[id] { + addRevDeps(rid, invalidateMetadata) + } + } + for id, invalidateMetadata := range directIDs { + addRevDeps(id, invalidateMetadata) + } + + // Invalidated package information. + for id, invalidateMetadata := range idsToInvalidate { + if _, ok := directIDs[id]; ok || invalidateMetadata { + if result.packages.Delete(id) { + needsDiagnosis = true + } + } else { + if entry, hit := result.packages.Get(id); hit { + needsDiagnosis = true + ph := entry.clone(false) + result.packages.Set(id, ph, nil) + } + } + if result.activePackages.Delete(id) { + needsDiagnosis = true + } + } + + // Compute which metadata updates are required. We only need to invalidate + // packages directly containing the affected file, and only if it changed in + // a relevant way. + metadataUpdates := make(map[PackageID]*metadata.Package) + for id, mp := range s.meta.Packages { + invalidateMetadata := idsToInvalidate[id] + + // For metadata that has been newly invalidated, capture package paths + // requiring reloading in the shouldLoad map. + if invalidateMetadata && !metadata.IsCommandLineArguments(mp.ID) { + needsReload := []PackagePath{mp.PkgPath} + if mp.ForTest != "" && mp.ForTest != mp.PkgPath { + // When reloading test variants, always reload their ForTest package as + // well. Otherwise, we may miss test variants in the resulting load. + // + // TODO(rfindley): is this actually sufficient? Is it possible that + // other test variants may be invalidated? Either way, we should + // determine exactly what needs to be reloaded here. + needsReload = append(needsReload, mp.ForTest) + } + result.shouldLoad.Set(id, needsReload, nil) + } + + // Check whether the metadata should be deleted. + if invalidateMetadata { + needsDiagnosis = true + metadataUpdates[id] = nil + continue + } + } + + // Update metadata, if necessary. + result.meta = s.meta.Update(metadataUpdates) + + // Update workspace and active packages, if necessary. + if result.meta != s.meta || anyFileOpenedOrClosed { + needsDiagnosis = true + result.workspacePackages = computeWorkspacePackagesLocked(ctx, result, result.meta) + result.resetActivePackagesLocked() + } else { + result.workspacePackages = s.workspacePackages + } + + return result, needsDiagnosis +} + +// cloneWithout clones m then deletes from it the keys of changes. +// +// The optional didDelete variable is set to true if there were deletions. +func cloneWithout[K constraints.Ordered, V1, V2 any](m *persistent.Map[K, V1], changes map[K]V2, didDelete *bool) *persistent.Map[K, V1] { + m2 := m.Clone() + for k := range changes { + if m2.Delete(k) && didDelete != nil { + *didDelete = true + } + } + return m2 +} + +// cloneWith clones m then inserts the changes into it. +func cloneWith[K constraints.Ordered, V any](m *persistent.Map[K, V], changes map[K]V) *persistent.Map[K, V] { + m2 := m.Clone() + for k, v := range changes { + m2.Set(k, v, nil) + } + return m2 +} + +// deleteMostRelevantModFile deletes the mod file most likely to be the mod +// file for the changed URI, if it exists. +// +// Specifically, this is the longest mod file path in a directory containing +// changed. This might not be accurate if there is another mod file closer to +// changed that happens not to be present in the map, but that's OK: the goal +// of this function is to guarantee that IF the nearest mod file is present in +// the map, it is invalidated. +func deleteMostRelevantModFile(m *persistent.Map[protocol.DocumentURI, *memoize.Promise], changed protocol.DocumentURI) { + var mostRelevant protocol.DocumentURI + changedFile := changed.Path() + + m.Range(func(modURI protocol.DocumentURI, _ *memoize.Promise) { + if len(modURI) > len(mostRelevant) { + if pathutil.InDir(filepath.Dir(modURI.Path()), changedFile) { + mostRelevant = modURI + } + } + }) + if mostRelevant != "" { + m.Delete(mostRelevant) + } +} + +// invalidatedPackageIDs returns all packages invalidated by a change to uri. +// If we haven't seen this URI before, we guess based on files in the same +// directory. This is of course incorrect in build systems where packages are +// not organized by directory. +// +// If packageFileChanged is set, the file is either a new file, or has a new +// package name. In this case, all known packages in the directory will be +// invalidated. +func invalidatedPackageIDs(uri protocol.DocumentURI, known map[protocol.DocumentURI][]PackageID, packageFileChanged bool) map[PackageID]struct{} { + invalidated := make(map[PackageID]struct{}) + + // At a minimum, we invalidate packages known to contain uri. + for _, id := range known[uri] { + invalidated[id] = struct{}{} + } + + // If the file didn't move to a new package, we should only invalidate the + // packages it is currently contained inside. + if !packageFileChanged && len(invalidated) > 0 { + return invalidated + } + + // This is a file we don't yet know about, or which has moved packages. Guess + // relevant packages by considering files in the same directory. + + // Cache of FileInfo to avoid unnecessary stats for multiple files in the + // same directory. + stats := make(map[string]struct { + os.FileInfo + error + }) + getInfo := func(dir string) (os.FileInfo, error) { + if res, ok := stats[dir]; ok { + return res.FileInfo, res.error + } + fi, err := os.Stat(dir) + stats[dir] = struct { + os.FileInfo + error + }{fi, err} + return fi, err + } + dir := filepath.Dir(uri.Path()) + fi, err := getInfo(dir) + if err == nil { + // Aggregate all possibly relevant package IDs. + for knownURI, ids := range known { + knownDir := filepath.Dir(knownURI.Path()) + knownFI, err := getInfo(knownDir) + if err != nil { + continue + } + if os.SameFile(fi, knownFI) { + for _, id := range ids { + invalidated[id] = struct{}{} + } + } + } + } + return invalidated +} + +// fileWasSaved reports whether the FileHandle passed in has been saved. It +// accomplishes this by checking to see if the original and current FileHandles +// are both overlays, and if the current FileHandle is saved while the original +// FileHandle was not saved. +func fileWasSaved(originalFH, currentFH file.Handle) bool { + c, ok := currentFH.(*overlay) + if !ok || c == nil { + return true + } + o, ok := originalFH.(*overlay) + if !ok || o == nil { + return c.saved + } + return !o.saved && c.saved +} + +// metadataChanges detects features of the change from oldFH->newFH that may +// affect package metadata. +// +// It uses lockedSnapshot to access cached parse information. lockedSnapshot +// must be locked. +// +// The result parameters have the following meaning: +// - invalidate means that package metadata for packages containing the file +// should be invalidated. +// - pkgFileChanged means that the file->package associates for the file have +// changed (possibly because the file is new, or because its package name has +// changed). +// - importDeleted means that an import has been deleted, or we can't +// determine if an import was deleted due to errors. +func metadataChanges(ctx context.Context, lockedSnapshot *Snapshot, oldFH, newFH file.Handle) (invalidate, pkgFileChanged, importDeleted bool) { + if oe, ne := oldFH != nil && fileExists(oldFH), fileExists(newFH); !oe || !ne { // existential changes + changed := oe != ne + return changed, changed, !ne // we don't know if an import was deleted + } + + // If the file hasn't changed, there's no need to reload. + if oldFH.Identity() == newFH.Identity() { + return false, false, false + } + + fset := token.NewFileSet() + // Parse headers to compare package names and imports. + oldHeads, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Header, false, oldFH) + newHeads, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Header, false, newFH) + + if oldErr != nil || newErr != nil { + errChanged := (oldErr == nil) != (newErr == nil) + return errChanged, errChanged, (newErr != nil) // we don't know if an import was deleted + } + + oldHead := oldHeads[0] + newHead := newHeads[0] + + // `go list` fails completely if the file header cannot be parsed. If we go + // from a non-parsing state to a parsing state, we should reload. + if oldHead.ParseErr != nil && newHead.ParseErr == nil { + return true, true, true // We don't know what changed, so fall back on full invalidation. + } + + // If a package name has changed, the set of package imports may have changed + // in ways we can't detect here. Assume an import has been deleted. + if oldHead.File.Name.Name != newHead.File.Name.Name { + return true, true, true + } + + // Check whether package imports have changed. Only consider potentially + // valid imports paths. + oldImports := validImports(oldHead.File.Imports) + newImports := validImports(newHead.File.Imports) + + for path := range newImports { + if _, ok := oldImports[path]; ok { + delete(oldImports, path) + } else { + invalidate = true // a new, potentially valid import was added + } + } + + if len(oldImports) > 0 { + invalidate = true + importDeleted = true + } + + // If the change does not otherwise invalidate metadata, get the full ASTs in + // order to check magic comments. + // + // Note: if this affects performance we can probably avoid parsing in the + // common case by first scanning the source for potential comments. + if !invalidate { + origFulls, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Full, false, oldFH) + newFulls, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Full, false, newFH) + if oldErr == nil && newErr == nil { + invalidate = magicCommentsChanged(origFulls[0].File, newFulls[0].File) + } else { + // At this point, we shouldn't ever fail to produce a parsego.File, as + // we're already past header parsing. + bug.Reportf("metadataChanges: unparseable file %v (old error: %v, new error: %v)", oldFH.URI(), oldErr, newErr) + } + } + + return invalidate, pkgFileChanged, importDeleted +} + +func magicCommentsChanged(original *ast.File, current *ast.File) bool { + oldComments := extractMagicComments(original) + newComments := extractMagicComments(current) + if len(oldComments) != len(newComments) { + return true + } + for i := range oldComments { + if oldComments[i] != newComments[i] { + return true + } + } + return false +} + +// validImports extracts the set of valid import paths from imports. +func validImports(imports []*ast.ImportSpec) map[string]struct{} { + m := make(map[string]struct{}) + for _, spec := range imports { + if path := spec.Path.Value; validImportPath(path) { + m[path] = struct{}{} + } + } + return m +} + +func validImportPath(path string) bool { + path, err := strconv.Unquote(path) + if err != nil { + return false + } + if path == "" { + return false + } + if path[len(path)-1] == '/' { + return false + } + return true +} + +var buildConstraintOrEmbedRe = regexp.MustCompile(`^//(go:embed|go:build|\s*\+build).*`) + +// extractMagicComments finds magic comments that affect metadata in f. +func extractMagicComments(f *ast.File) []string { + var results []string + for _, cg := range f.Comments { + for _, c := range cg.List { + if buildConstraintOrEmbedRe.MatchString(c.Text) { + results = append(results, c.Text) + } + } + } + return results +} + +// BuiltinFile returns information about the special builtin package. +func (s *Snapshot) BuiltinFile(ctx context.Context) (*parsego.File, error) { + s.AwaitInitialized(ctx) + + s.mu.Lock() + builtin := s.builtin + s.mu.Unlock() + + if builtin == "" { + return nil, fmt.Errorf("no builtin package for view %s", s.view.folder.Name) + } + + fh, err := s.ReadFile(ctx, builtin) + if err != nil { + return nil, err + } + // For the builtin file only, we need syntactic object resolution + // (since we can't type check). + mode := parsego.Full &^ parser.SkipObjectResolution + pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh) + if err != nil { + return nil, err + } + return pgfs[0], nil +} + +// IsBuiltin reports whether uri is part of the builtin package. +func (s *Snapshot) IsBuiltin(uri protocol.DocumentURI) bool { + s.mu.Lock() + defer s.mu.Unlock() + // We should always get the builtin URI in a canonical form, so use simple + // string comparison here. span.CompareURI is too expensive. + return uri == s.builtin +} + +func (s *Snapshot) setBuiltin(path string) { + s.mu.Lock() + defer s.mu.Unlock() + + s.builtin = protocol.URIFromPath(path) +} + +// WantGCDetails reports whether to compute GC optimization details for the +// specified package. +func (s *Snapshot) WantGCDetails(id metadata.PackageID) bool { + _, ok := s.gcOptimizationDetails[id] + return ok +} diff --git a/gopls/internal/cache/symbols.go b/gopls/internal/cache/symbols.go new file mode 100644 index 00000000000..9954c747798 --- /dev/null +++ b/gopls/internal/cache/symbols.go @@ -0,0 +1,201 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "go/ast" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/astutil" +) + +// Symbol holds a precomputed symbol value. Note: we avoid using the +// protocol.SymbolInformation struct here in order to reduce the size of each +// symbol. +type Symbol struct { + Name string + Kind protocol.SymbolKind + Range protocol.Range +} + +// symbolize returns the result of symbolizing the file identified by uri, using a cache. +func (s *Snapshot) symbolize(ctx context.Context, uri protocol.DocumentURI) ([]Symbol, error) { + + s.mu.Lock() + entry, hit := s.symbolizeHandles.Get(uri) + s.mu.Unlock() + + type symbolizeResult struct { + symbols []Symbol + err error + } + + // Cache miss? + if !hit { + fh, err := s.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + type symbolHandleKey file.Hash + key := symbolHandleKey(fh.Identity().Hash) + promise, release := s.store.Promise(key, func(ctx context.Context, arg interface{}) interface{} { + symbols, err := symbolizeImpl(ctx, arg.(*Snapshot), fh) + return symbolizeResult{symbols, err} + }) + + entry = promise + + s.mu.Lock() + s.symbolizeHandles.Set(uri, entry, func(_, _ interface{}) { release() }) + s.mu.Unlock() + } + + // Await result. + v, err := s.awaitPromise(ctx, entry) + if err != nil { + return nil, err + } + res := v.(symbolizeResult) + return res.symbols, res.err +} + +// symbolizeImpl reads and parses a file and extracts symbols from it. +func symbolizeImpl(ctx context.Context, snapshot *Snapshot, fh file.Handle) ([]Symbol, error) { + pgfs, err := snapshot.view.parseCache.parseFiles(ctx, token.NewFileSet(), parsego.Full, false, fh) + if err != nil { + return nil, err + } + + w := &symbolWalker{ + tokFile: pgfs[0].Tok, + mapper: pgfs[0].Mapper, + } + w.fileDecls(pgfs[0].File.Decls) + + return w.symbols, w.firstError +} + +type symbolWalker struct { + // for computing positions + tokFile *token.File + mapper *protocol.Mapper + + symbols []Symbol + firstError error +} + +func (w *symbolWalker) atNode(node ast.Node, name string, kind protocol.SymbolKind, path ...*ast.Ident) { + var b strings.Builder + for _, ident := range path { + if ident != nil { + b.WriteString(ident.Name) + b.WriteString(".") + } + } + b.WriteString(name) + + rng, err := w.mapper.NodeRange(w.tokFile, node) + if err != nil { + w.error(err) + return + } + sym := Symbol{ + Name: b.String(), + Kind: kind, + Range: rng, + } + w.symbols = append(w.symbols, sym) +} + +func (w *symbolWalker) error(err error) { + if err != nil && w.firstError == nil { + w.firstError = err + } +} + +func (w *symbolWalker) fileDecls(decls []ast.Decl) { + for _, decl := range decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + kind := protocol.Function + var recv *ast.Ident + if decl.Recv.NumFields() > 0 { + kind = protocol.Method + _, recv, _ = astutil.UnpackRecv(decl.Recv.List[0].Type) + } + w.atNode(decl.Name, decl.Name.Name, kind, recv) + case *ast.GenDecl: + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.TypeSpec: + kind := guessKind(spec) + w.atNode(spec.Name, spec.Name.Name, kind) + w.walkType(spec.Type, spec.Name) + case *ast.ValueSpec: + for _, name := range spec.Names { + kind := protocol.Variable + if decl.Tok == token.CONST { + kind = protocol.Constant + } + w.atNode(name, name.Name, kind) + } + } + } + } + } +} + +func guessKind(spec *ast.TypeSpec) protocol.SymbolKind { + switch spec.Type.(type) { + case *ast.InterfaceType: + return protocol.Interface + case *ast.StructType: + return protocol.Struct + case *ast.FuncType: + return protocol.Function + } + return protocol.Class +} + +// walkType processes symbols related to a type expression. path is path of +// nested type identifiers to the type expression. +func (w *symbolWalker) walkType(typ ast.Expr, path ...*ast.Ident) { + switch st := typ.(type) { + case *ast.StructType: + for _, field := range st.Fields.List { + w.walkField(field, protocol.Field, protocol.Field, path...) + } + case *ast.InterfaceType: + for _, field := range st.Methods.List { + w.walkField(field, protocol.Interface, protocol.Method, path...) + } + } +} + +// walkField processes symbols related to the struct field or interface method. +// +// unnamedKind and namedKind are the symbol kinds if the field is resp. unnamed +// or named. path is the path of nested identifiers containing the field. +func (w *symbolWalker) walkField(field *ast.Field, unnamedKind, namedKind protocol.SymbolKind, path ...*ast.Ident) { + if len(field.Names) == 0 { + switch typ := field.Type.(type) { + case *ast.SelectorExpr: + // embedded qualified type + w.atNode(field, typ.Sel.Name, unnamedKind, path...) + default: + w.atNode(field, types.ExprString(field.Type), unnamedKind, path...) + } + } + for _, name := range field.Names { + w.atNode(name, name.Name, namedKind, path...) + w.walkType(field.Type, append(path, name)...) + } +} diff --git a/gopls/internal/cache/typerefs/doc.go b/gopls/internal/cache/typerefs/doc.go new file mode 100644 index 00000000000..18042c623bc --- /dev/null +++ b/gopls/internal/cache/typerefs/doc.go @@ -0,0 +1,151 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typerefs extracts symbol-level reachability information +// from the syntax of a Go package. +// +// # Background +// +// The goal of this analysis is to determine, for each package P, a nearly +// minimal set of packages that could affect the type checking of P. This set +// may contain false positives, but the smaller this set the better we can +// invalidate and prune packages in gopls. +// +// More precisely, for each package P we define the set of "reachable" packages +// from P as the set of packages that may affect the (deep) export data of the +// direct dependencies of P. By this definition, the complement of this set +// cannot affect any information derived from type checking P, such as +// diagnostics, cross references, or method sets. Therefore we need not +// invalidate any results for P when a package in the complement of this set +// changes. +// +// # Computing references +// +// For a given declaration D, references are computed based on identifiers or +// dotted identifiers referenced in the declaration of D, that may affect +// the type of D. However, these references reflect only local knowledge of the +// package and its dependency metadata, and do not depend on any analysis of +// the dependencies themselves. This allows the reference information for +// a package to be cached independent of all others. +// +// Specifically, if a referring identifier I appears in the declaration, we +// record an edge from D to each object possibly referenced by I. We search for +// references within type syntax, but do not actually type-check, so we can't +// reliably determine whether an expression is a type or a term, or whether a +// function is a builtin or generic. For example, the type of x in var x = +// p.F(W) only depends on W if p.F is a builtin or generic function, which we +// cannot know without type-checking package p. So we may over-approximate in +// this way. +// +// - If I is declared in the current package, record a reference to its +// declaration. +// - Otherwise, if there are any dot imports in the current +// file and I is exported, record a (possibly dangling) edge to +// the corresponding declaration in each dot-imported package. +// +// If a dotted identifier q.I appears in the declaration, we +// perform a similar operation: +// +// - If q is declared in the current package, we record a reference to that +// object. It may be a var or const that has a field or method I. +// - Otherwise, if q is a valid import name based on imports in the current file +// and the provided metadata for dependency package names, record a +// reference to the object I in that package. +// - Additionally, handle the case where Q is exported, and Q.I may refer to +// a field or method in a dot-imported package. +// +// That is essentially the entire algorithm, though there is some subtlety to +// visiting the set of identifiers or dotted identifiers that may affect the +// declaration type. See the visitDeclOrSpec function for the details of this +// analysis. Notably, we also skip identifiers that refer to type parameters in +// generic declarations. +// +// # Graph optimizations +// +// The references extracted from the syntax are used to construct +// edges between nodes representing declarations. Edges are of two +// kinds: internal references, from one package-level declaration to +// another; and external references, from a symbol in this package to +// a symbol imported from a direct dependency. +// +// Once the symbol reference graph is constructed, we find its +// strongly connected components (SCCs) using Tarjan's algorithm. +// As we coalesce the nodes of each SCC we compute the union of +// external references reached by each package-level declaration. +// The final result is the mapping from each exported package-level +// declaration to the set of external (imported) declarations that it +// reaches. +// +// Because it is common for many package members to have the same +// reachability, the result takes the form of a set of equivalence +// classes, each mapping a set of package-level declarations to a set +// of external symbols. We use a hash table to canonicalize sets so that +// repeated occurrences of the same set (which are common) are only +// represented once in memory or in the file system. +// For example, all declarations that ultimately reference only +// {fmt.Println,strings.Join} would be classed as equivalent. +// +// This approach was inspired by the Hash-Value Numbering (HVN) +// optimization described by Hardekopf and Lin. See +// golang.org/x/tools/go/pointer/hvn.go for an implementation. (Like +// pointer analysis, this problem is fundamentally one of graph +// reachability.) The HVN algorithm takes the compression a step +// further by preserving the topology of the SCC DAG, in which edges +// represent "is a superset of" constraints. Redundant edges that +// don't increase the solution can be deleted. We could apply the same +// technique here to further reduce the worst-case size of the result, +// but the current implementation seems adequate. +// +// # API +// +// The main entry point for this analysis is the [Encode] function, +// which implements the analysis described above for one package, and +// encodes the result as a binary message. +// +// The [Decode] function decodes the message into a usable form: a set +// of equivalence classes. The decoder uses a shared [PackageIndex] to +// enable more compact representations of sets of packages +// ([PackageSet]) during the global reacahability computation. +// +// The [BuildPackageGraph] constructor implements a whole-graph analysis similar +// to that which will be implemented by gopls, but for various reasons the +// logic for this analysis will eventually live in the +// [golang.org/x/tools/gopls/internal/cache] package. Nevertheless, +// BuildPackageGraph and its test serve to verify the syntactic analysis, and +// may serve as a proving ground for new optimizations of the whole-graph analysis. +// +// # Export data is insufficient +// +// At first it may seem that the simplest way to implement this analysis would +// be to consider the types.Packages of the dependencies of P, for example +// during export. After all, it makes sense that the type checked packages +// themselves could describe their dependencies. However, this does not work as +// type information does not describe certain syntactic relationships. +// +// For example, the following scenarios cause type information to miss +// syntactic relationships: +// +// Named type forwarding: +// +// package a; type A b.B +// package b; type B int +// +// Aliases: +// +// package a; func A(f b.B) +// package b; type B = func() +// +// Initializers: +// +// package a; var A = b.B() +// package b; func B() string { return "hi" } +// +// Use of the unsafe package: +// +// package a; type A [unsafe.Sizeof(B{})]int +// package b; type B struct { f1, f2, f3 int } +// +// In all of these examples, types do not contain information about the edge +// between the a.A and b.B declarations. +package typerefs diff --git a/gopls/internal/lsp/source/typerefs/packageset.go b/gopls/internal/cache/typerefs/packageset.go similarity index 86% rename from gopls/internal/lsp/source/typerefs/packageset.go rename to gopls/internal/cache/typerefs/packageset.go index 0893670fdfb..29c37cd1c4c 100644 --- a/gopls/internal/lsp/source/typerefs/packageset.go +++ b/gopls/internal/cache/typerefs/packageset.go @@ -11,7 +11,7 @@ import ( "strings" "sync" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache/metadata" ) // PackageIndex stores common data to enable efficient representation of @@ -20,21 +20,21 @@ type PackageIndex struct { // For now, PackageIndex just indexes package ids, to save space and allow for // faster unions via sparse int vectors. mu sync.Mutex - ids []source.PackageID - m map[source.PackageID]IndexID + ids []metadata.PackageID + m map[metadata.PackageID]IndexID } // NewPackageIndex creates a new PackageIndex instance for use in building // reference and package sets. func NewPackageIndex() *PackageIndex { return &PackageIndex{ - m: make(map[source.PackageID]IndexID), + m: make(map[metadata.PackageID]IndexID), } } // IndexID returns the packageIdx referencing id, creating one if id is not yet // tracked by the receiver. -func (index *PackageIndex) IndexID(id source.PackageID) IndexID { +func (index *PackageIndex) IndexID(id metadata.PackageID) IndexID { index.mu.Lock() defer index.mu.Unlock() if i, ok := index.m[id]; ok { @@ -49,13 +49,13 @@ func (index *PackageIndex) IndexID(id source.PackageID) IndexID { // PackageID returns the PackageID for idx. // // idx must have been created by this PackageIndex instance. -func (index *PackageIndex) PackageID(idx IndexID) source.PackageID { +func (index *PackageIndex) PackageID(idx IndexID) metadata.PackageID { index.mu.Lock() defer index.mu.Unlock() return index.ids[idx] } -// A PackageSet is a set of source.PackageIDs, optimized for inuse memory +// A PackageSet is a set of metadata.PackageIDs, optimized for inuse memory // footprint and efficient union operations. type PackageSet struct { // PackageSet is a sparse int vector of package indexes from parent. @@ -79,12 +79,12 @@ func (index *PackageIndex) NewSet() *PackageSet { // DeclaringPackage returns the ID of the symbol's declaring package. // The package index must be the one used during decoding. -func (index *PackageIndex) DeclaringPackage(sym Symbol) source.PackageID { +func (index *PackageIndex) DeclaringPackage(sym Symbol) metadata.PackageID { return index.PackageID(sym.Package) } // Add records a new element in the package set, for the provided package ID. -func (s *PackageSet) AddPackage(id source.PackageID) { +func (s *PackageSet) AddPackage(id metadata.PackageID) { s.Add(s.parent.IndexID(id)) } @@ -116,7 +116,7 @@ func (s *PackageSet) Union(other *PackageSet) { } // Contains reports whether id is contained in the receiver set. -func (s *PackageSet) Contains(id source.PackageID) bool { +func (s *PackageSet) Contains(id metadata.PackageID) bool { i := int(s.parent.IndexID(id)) return s.sparse[i/blockSize]&(1<<(i%blockSize)) != 0 } diff --git a/gopls/internal/lsp/source/typerefs/pkggraph_test.go b/gopls/internal/cache/typerefs/pkggraph_test.go similarity index 78% rename from gopls/internal/lsp/source/typerefs/pkggraph_test.go rename to gopls/internal/cache/typerefs/pkggraph_test.go index e4236b09717..01cd1a86f0f 100644 --- a/gopls/internal/lsp/source/typerefs/pkggraph_test.go +++ b/gopls/internal/cache/typerefs/pkggraph_test.go @@ -17,9 +17,10 @@ import ( "sync" "golang.org/x/sync/errgroup" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/typerefs" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/cache/typerefs" + "golang.org/x/tools/gopls/internal/protocol" ) const ( @@ -31,8 +32,8 @@ const ( // A Package holds reference information for a single package. type Package struct { - // metadata holds metadata about this package and its dependencies. - metadata *source.Metadata + // metapkg holds metapkg about this package and its dependencies. + metapkg *metadata.Package // transitiveRefs records, for each exported declaration in the package, the // transitive set of packages within the containing graph that are @@ -49,11 +50,11 @@ type Package struct { // dependencies. type PackageGraph struct { pkgIndex *typerefs.PackageIndex - meta source.MetadataSource - parse func(context.Context, span.URI) (*source.ParsedGoFile, error) + meta metadata.Source + parse func(context.Context, protocol.DocumentURI) (*parsego.File, error) mu sync.Mutex - packages map[source.PackageID]*futurePackage + packages map[metadata.PackageID]*futurePackage } // BuildPackageGraph analyzes the package graph for the requested ids, whose @@ -66,14 +67,14 @@ type PackageGraph struct { // // See the package documentation for more information on the package reference // algorithm. -func BuildPackageGraph(ctx context.Context, meta source.MetadataSource, ids []source.PackageID, parse func(context.Context, span.URI) (*source.ParsedGoFile, error)) (*PackageGraph, error) { +func BuildPackageGraph(ctx context.Context, meta metadata.Source, ids []metadata.PackageID, parse func(context.Context, protocol.DocumentURI) (*parsego.File, error)) (*PackageGraph, error) { g := &PackageGraph{ pkgIndex: typerefs.NewPackageIndex(), meta: meta, parse: parse, - packages: make(map[source.PackageID]*futurePackage), + packages: make(map[metadata.PackageID]*futurePackage), } - source.SortPostOrder(meta, ids) + metadata.SortPostOrder(meta, ids) workers := runtime.GOMAXPROCS(0) if trace { @@ -100,7 +101,7 @@ type futurePackage struct { } // Package gets the result of analyzing references for a single package. -func (g *PackageGraph) Package(ctx context.Context, id source.PackageID) (*Package, error) { +func (g *PackageGraph) Package(ctx context.Context, id metadata.PackageID) (*Package, error) { g.mu.Lock() fut, ok := g.packages[id] if ok { @@ -122,28 +123,28 @@ func (g *PackageGraph) Package(ctx context.Context, id source.PackageID) (*Packa // buildPackage parses a package and extracts its reference graph. It should // only be called from Package. -func (g *PackageGraph) buildPackage(ctx context.Context, id source.PackageID) (*Package, error) { +func (g *PackageGraph) buildPackage(ctx context.Context, id metadata.PackageID) (*Package, error) { p := &Package{ - metadata: g.meta.Metadata(id), + metapkg: g.meta.Metadata(id), transitiveRefs: make(map[string]*typerefs.PackageSet), } - var files []*source.ParsedGoFile - for _, filename := range p.metadata.CompiledGoFiles { + var files []*parsego.File + for _, filename := range p.metapkg.CompiledGoFiles { f, err := g.parse(ctx, filename) if err != nil { return nil, err } files = append(files, f) } - imports := make(map[source.ImportPath]*source.Metadata) - for impPath, depID := range p.metadata.DepsByImpPath { + imports := make(map[metadata.ImportPath]*metadata.Package) + for impPath, depID := range p.metapkg.DepsByImpPath { if depID != "" { imports[impPath] = g.meta.Metadata(depID) } } // Compute the symbol-level dependencies through this package. - data := typerefs.Encode(files, id, imports) + data := typerefs.Encode(files, imports) // data can be persisted in a filecache, keyed // by hash(id, CompiledGoFiles, imports). @@ -156,7 +157,7 @@ func (g *PackageGraph) buildPackage(ctx context.Context, id source.PackageID) (* // package and declarations in this package or another // package. See the package documentation for a detailed // description of what these edges do (and do not) represent. - classes := typerefs.Decode(g.pkgIndex, id, data) + classes := typerefs.Decode(g.pkgIndex, data) // Debug if trace && len(classes) > 0 { @@ -198,7 +199,7 @@ func (g *PackageGraph) buildPackage(ctx context.Context, id source.PackageID) (* if symPkgID == id { panic("intra-package edge") } - if depP.metadata.ID != symPkgID { + if depP.metapkg.ID != symPkgID { // package changed var err error depP, err = g.Package(ctx, symPkgID) @@ -216,7 +217,7 @@ func (g *PackageGraph) buildPackage(ctx context.Context, id source.PackageID) (* // Finally compute the union of transitiveRefs // across the direct deps of this package. - byDeps, err := g.reachesByDeps(ctx, p.metadata) + byDeps, err := g.reachesByDeps(ctx, p.metapkg) if err != nil { return nil, err } @@ -227,14 +228,14 @@ func (g *PackageGraph) buildPackage(ctx context.Context, id source.PackageID) (* // reachesByDeps computes the set of packages that are reachable through // dependencies of the package m. -func (g *PackageGraph) reachesByDeps(ctx context.Context, m *source.Metadata) (*typerefs.PackageSet, error) { +func (g *PackageGraph) reachesByDeps(ctx context.Context, mp *metadata.Package) (*typerefs.PackageSet, error) { transitive := g.pkgIndex.NewSet() - for _, depID := range m.DepsByPkgPath { + for _, depID := range mp.DepsByPkgPath { dep, err := g.Package(ctx, depID) if err != nil { return nil, err } - transitive.AddPackage(dep.metadata.ID) + transitive.AddPackage(dep.metapkg.ID) for _, set := range dep.transitiveRefs { transitive.Union(set) } diff --git a/gopls/internal/lsp/source/typerefs/pkgrefs_test.go b/gopls/internal/cache/typerefs/pkgrefs_test.go similarity index 84% rename from gopls/internal/lsp/source/typerefs/pkgrefs_test.go rename to gopls/internal/cache/typerefs/pkgrefs_test.go index d75205581af..9d4b5c011d3 100644 --- a/gopls/internal/lsp/source/typerefs/pkgrefs_test.go +++ b/gopls/internal/cache/typerefs/pkgrefs_test.go @@ -20,11 +20,11 @@ import ( "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/astutil" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/typerefs" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/cache/typerefs" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/internal/packagesinternal" "golang.org/x/tools/internal/testenv" ) @@ -36,13 +36,12 @@ var ( ) type ( - packageName = source.PackageName - PackageID = source.PackageID - ImportPath = source.ImportPath - PackagePath = source.PackagePath - Metadata = source.Metadata - MetadataSource = source.MetadataSource - ParsedGoFile = source.ParsedGoFile + packageName = metadata.PackageName + PackageID = metadata.PackageID + ImportPath = metadata.ImportPath + PackagePath = metadata.PackagePath + Metadata = metadata.Package + MetadataSource = metadata.Source ) // TestBuildPackageGraph tests the BuildPackageGraph constructor, which uses @@ -71,7 +70,7 @@ func TestBuildPackageGraph(t *testing.T) { testenv.NeedsGoBuild(t) // for go/packages t0 := time.Now() - exports, meta, err := load(*query, *verify) + exports, meta, err := loadPackages(*query, *verify) if err != nil { t.Fatalf("loading failed: %v", err) } @@ -129,10 +128,10 @@ func TestBuildPackageGraph(t *testing.T) { if exportFile == "" { return nil // no exported symbols } - m := meta.Metadata(id) + mp := meta.Metadata(id) tpkg, ok := exportedPackages[id] if !ok { - pkgPath := string(m.PkgPath) + pkgPath := string(mp.PkgPath) tpkg, err = importFromExportData(pkgPath, exportFile) if err != nil { t.Fatalf("importFromExportData(%s, %s) failed: %v", pkgPath, exportFile, err) @@ -204,12 +203,12 @@ func importMap(id PackageID, meta MetadataSource) map[PackagePath]PackageID { imports := make(map[PackagePath]PackageID) var recordIDs func(PackageID) recordIDs = func(id PackageID) { - m := meta.Metadata(id) - if _, ok := imports[m.PkgPath]; ok { + mp := meta.Metadata(id) + if _, ok := imports[mp.PkgPath]; ok { return } - imports[m.PkgPath] = id - for _, id := range m.DepsByPkgPath { + imports[mp.PkgPath] = id + for _, id := range mp.DepsByPkgPath { recordIDs(id) } } @@ -250,7 +249,7 @@ func importFromExportData(pkgPath, exportFile string) (*types.Package, error) { func BenchmarkBuildPackageGraph(b *testing.B) { t0 := time.Now() - exports, meta, err := load(*query, *verify) + exports, meta, err := loadPackages(*query, *verify) if err != nil { b.Fatalf("loading failed: %v", err) } @@ -272,30 +271,30 @@ func BenchmarkBuildPackageGraph(b *testing.B) { type memoizedParser struct { mu sync.Mutex - files map[span.URI]*futureParse + files map[protocol.DocumentURI]*futureParse } type futureParse struct { done chan struct{} - pgf *ParsedGoFile + pgf *parsego.File err error } func newParser() *memoizedParser { return &memoizedParser{ - files: make(map[span.URI]*futureParse), + files: make(map[protocol.DocumentURI]*futureParse), } } -func (p *memoizedParser) parse(ctx context.Context, uri span.URI) (*ParsedGoFile, error) { - doParse := func(ctx context.Context, uri span.URI) (*ParsedGoFile, error) { +func (p *memoizedParser) parse(ctx context.Context, uri protocol.DocumentURI) (*parsego.File, error) { + doParse := func(ctx context.Context, uri protocol.DocumentURI) (*parsego.File, error) { // TODO(adonovan): hoist this operation outside the benchmark critsec. - content, err := os.ReadFile(uri.Filename()) + content, err := os.ReadFile(uri.Path()) if err != nil { return nil, err } content = astutil.PurgeFuncBodies(content) - pgf, _ := cache.ParseGoSrc(ctx, token.NewFileSet(), uri, content, source.ParseFull, false) + pgf, _ := parsego.Parse(ctx, token.NewFileSet(), uri, content, parsego.Full, false) return pgf, nil } @@ -327,11 +326,11 @@ func (s mapMetadataSource) Metadata(id PackageID) *Metadata { } // This function is a compressed version of snapshot.load from the -// internal/lsp/cache package, for use in testing. +// internal/cache package, for use in testing. // // TODO(rfindley): it may be valuable to extract this logic from the snapshot, // since it is otherwise standalone. -func load(query string, needExport bool) (map[PackageID]string, MetadataSource, error) { +func loadPackages(query string, needExport bool) (map[PackageID]string, MetadataSource, error) { cfg := &packages.Config{ Dir: *dir, Mode: packages.NeedName | @@ -361,7 +360,7 @@ func load(query string, needExport bool) (map[PackageID]string, MetadataSource, if meta[id] != nil { return } - m := &Metadata{ + mp := &Metadata{ ID: id, PkgPath: PackagePath(pkg.PkgPath), Name: packageName(pkg.Name), @@ -372,28 +371,28 @@ func load(query string, needExport bool) (map[PackageID]string, MetadataSource, Errors: pkg.Errors, DepsErrors: packagesinternal.GetDepsErrors(pkg), } - meta[id] = m + meta[id] = mp for _, filename := range pkg.CompiledGoFiles { - m.CompiledGoFiles = append(m.CompiledGoFiles, span.URIFromPath(filename)) + mp.CompiledGoFiles = append(mp.CompiledGoFiles, protocol.URIFromPath(filename)) } for _, filename := range pkg.GoFiles { - m.GoFiles = append(m.GoFiles, span.URIFromPath(filename)) + mp.GoFiles = append(mp.GoFiles, protocol.URIFromPath(filename)) } - m.DepsByImpPath = make(map[ImportPath]PackageID) - m.DepsByPkgPath = make(map[PackagePath]PackageID) + mp.DepsByImpPath = make(map[ImportPath]PackageID) + mp.DepsByPkgPath = make(map[PackagePath]PackageID) for importPath, imported := range pkg.Imports { importPath := ImportPath(importPath) - // see note in gopls/internal/lsp/cache/load.go for an explanation of this check. + // see note in gopls/internal/cache/load.go for an explanation of this check. if importPath != "unsafe" && len(imported.CompiledGoFiles) == 0 { - m.DepsByImpPath[importPath] = "" // missing + mp.DepsByImpPath[importPath] = "" // missing continue } - m.DepsByImpPath[importPath] = PackageID(imported.ID) - m.DepsByPkgPath[PackagePath(imported.PkgPath)] = PackageID(imported.ID) + mp.DepsByImpPath[importPath] = PackageID(imported.ID) + mp.DepsByPkgPath[PackagePath(imported.PkgPath)] = PackageID(imported.ID) buildMetadata(imported) } } diff --git a/gopls/internal/lsp/source/typerefs/refs.go b/gopls/internal/cache/typerefs/refs.go similarity index 94% rename from gopls/internal/lsp/source/typerefs/refs.go rename to gopls/internal/cache/typerefs/refs.go index 9adbb88fe4c..b389667ae7f 100644 --- a/gopls/internal/lsp/source/typerefs/refs.go +++ b/gopls/internal/cache/typerefs/refs.go @@ -11,10 +11,10 @@ import ( "sort" "strings" - "golang.org/x/tools/gopls/internal/astutil" - "golang.org/x/tools/gopls/internal/lsp/frob" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/gopls/internal/util/frob" ) // Encode analyzes the Go syntax trees of a package, constructs a @@ -24,8 +24,8 @@ import ( // // It returns a serializable index of this information. // Use Decode to expand the result. -func Encode(files []*source.ParsedGoFile, id source.PackageID, imports map[source.ImportPath]*source.Metadata) []byte { - return index(files, id, imports) +func Encode(files []*parsego.File, imports map[metadata.ImportPath]*metadata.Package) []byte { + return index(files, imports) } // Decode decodes a serializable index of symbol @@ -38,8 +38,8 @@ func Encode(files []*source.ParsedGoFile, id source.PackageID, imports map[sourc // // See the package documentation for more details as to what a // reference does (and does not) represent. -func Decode(pkgIndex *PackageIndex, id source.PackageID, data []byte) []Class { - return decode(pkgIndex, id, data) +func Decode(pkgIndex *PackageIndex, data []byte) []Class { + return decode(pkgIndex, data) } // A Class is a reachability equivalence class. @@ -77,7 +77,7 @@ type symbolSet map[symbol]bool // A symbol is the internal representation of an external // (imported) symbol referenced by the analyzed package. type symbol struct { - pkg source.PackageID + pkg metadata.PackageID name string } @@ -162,7 +162,7 @@ func classKey(set symbolSet) string { } // index builds the reference graph and encodes the index. -func index(pgfs []*source.ParsedGoFile, id source.PackageID, imports map[source.ImportPath]*source.Metadata) []byte { +func index(pgfs []*parsego.File, imports map[metadata.ImportPath]*metadata.Package) []byte { // First pass: gather package-level names and create a declNode for each. // // In ill-typed code, there may be multiple declarations of the @@ -255,12 +255,12 @@ func index(pgfs []*source.ParsedGoFile, id source.PackageID, imports map[source. // visitFile inspects the file syntax for referring identifiers, and // populates the internal and external references of decls. -func visitFile(file *ast.File, imports map[source.ImportPath]*source.Metadata, decls map[string]*declNode) { +func visitFile(file *ast.File, imports map[metadata.ImportPath]*metadata.Package, decls map[string]*declNode) { // Import information for this file. Multiple packages // may be referenced by a given name in the presence // of type errors (or multiple dot imports, which are // keyed by "."). - fileImports := make(map[string][]source.PackageID) + fileImports := make(map[string][]metadata.PackageID) // importEdge records a reference from decl to an imported symbol // (pkgname.name). The package name may be ".". @@ -339,7 +339,7 @@ func visitFile(file *ast.File, imports map[source.ImportPath]*source.Metadata, d // Record local import names for this file. for _, spec := range d.Specs { spec := spec.(*ast.ImportSpec) - path := source.UnquoteImportPath(spec) + path := metadata.UnquoteImportPath(spec) if path == "" { continue } @@ -364,7 +364,7 @@ func visitFile(file *ast.File, imports map[source.ImportPath]*source.Metadata, d case token.TYPE: for _, spec := range d.Specs { spec := spec.(*ast.TypeSpec) - tparams := tparamsMap(typeparams.ForTypeSpec(spec)) + tparams := tparamsMap(spec.TypeParams) visit(spec.Name, spec, tparams) } @@ -399,7 +399,7 @@ func visitFile(file *ast.File, imports map[source.ImportPath]*source.Metadata, d } } else { // Non-method. - tparams := tparamsMap(typeparams.ForFuncType(d.Type)) + tparams := tparamsMap(d.Type.TypeParams) visit(d.Name, d, tparams) } } @@ -449,7 +449,7 @@ func visitDeclOrSpec(node ast.Node, f refVisitor) { case *ast.TypeSpec: // Skip Doc, Name, and Comment, which do not affect the decl type. - if tparams := typeparams.ForTypeSpec(n); tparams != nil { + if tparams := n.TypeParams; tparams != nil { visitFieldList(tparams, f) } visitExpr(n.Type, f) @@ -522,7 +522,7 @@ func visitExpr(expr ast.Expr, f refVisitor) { visitExpr(n.X, f) visitExpr(n.Index, f) // may affect type for instantiations - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: visitExpr(n.X, f) for _, index := range n.Indices { visitExpr(index, f) // may affect the type for instantiations @@ -562,7 +562,7 @@ func visitExpr(expr ast.Expr, f refVisitor) { visitFieldList(n.Fields, f) case *ast.FuncType: - if tparams := typeparams.ForFuncType(n); tparams != nil { + if tparams := n.TypeParams; tparams != nil { visitFieldList(tparams, f) } if n.Params != nil { @@ -800,7 +800,7 @@ func encode(classNames map[int][]string, classes []symbolSet) []byte { return classesCodec.Encode(payload) } -func decode(pkgIndex *PackageIndex, id source.PackageID, data []byte) []Class { +func decode(pkgIndex *PackageIndex, data []byte) []Class { var payload gobClasses classesCodec.Decode(data, &payload) @@ -812,7 +812,7 @@ func decode(pkgIndex *PackageIndex, id source.PackageID, data []byte) []Class { } refs := make([]Symbol, len(gobClass.Refs)/2) for i := range refs { - pkgID := pkgIndex.IndexID(source.PackageID(payload.Strings[gobClass.Refs[2*i]])) + pkgID := pkgIndex.IndexID(metadata.PackageID(payload.Strings[gobClass.Refs[2*i]])) name := payload.Strings[gobClass.Refs[2*i+1]] refs[i] = Symbol{Package: pkgID, Name: name} } diff --git a/gopls/internal/lsp/source/typerefs/refs_test.go b/gopls/internal/cache/typerefs/refs_test.go similarity index 91% rename from gopls/internal/lsp/source/typerefs/refs_test.go rename to gopls/internal/cache/typerefs/refs_test.go index 388dceddf1c..1e98fb585ed 100644 --- a/gopls/internal/lsp/source/typerefs/refs_test.go +++ b/gopls/internal/cache/typerefs/refs_test.go @@ -12,11 +12,10 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/typerefs" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/cache/typerefs" + "golang.org/x/tools/gopls/internal/protocol" ) // TestRefs checks that the analysis reports, for each exported member @@ -30,7 +29,6 @@ func TestRefs(t *testing.T) { srcs []string // source for the local package; package name must be p imports map[string]string // for simplicity: importPath -> pkgID/pkgName (we set pkgName == pkgID); 'ext' is always available. want map[string][]string // decl name -> id. - go118 bool // test uses generics allowErrs bool // whether we expect parsing errors }{ { @@ -110,7 +108,6 @@ type D ext.D "C": {"ext.C"}, "D": {"ext.D"}, }, - go118: true, }, { label: "funcs", @@ -410,7 +407,6 @@ func (A[B]) M(C) {} "T": {"ext.T"}, "T3": {"ext.T3"}, }, - go118: true, }, { label: "instances", @@ -432,7 +428,6 @@ type E ext.E "D": {"ext.A", "ext.B", "ext.E"}, "E": {"ext.E"}, }, - go118: true, }, { label: "duplicate decls", @@ -509,35 +504,31 @@ type Z map[ext.A]ext.B for _, test := range tests { t.Run(test.label, func(t *testing.T) { - if test.go118 { - testenv.NeedsGo1Point(t, 18) - } - - var pgfs []*source.ParsedGoFile + var pgfs []*parsego.File for i, src := range test.srcs { - uri := span.URI(fmt.Sprintf("file:///%d.go", i)) - pgf, _ := cache.ParseGoSrc(ctx, token.NewFileSet(), uri, []byte(src), source.ParseFull, false) + uri := protocol.DocumentURI(fmt.Sprintf("file:///%d.go", i)) + pgf, _ := parsego.Parse(ctx, token.NewFileSet(), uri, []byte(src), parsego.Full, false) if !test.allowErrs && pgf.ParseErr != nil { t.Fatalf("ParseGoSrc(...) returned parse errors: %v", pgf.ParseErr) } pgfs = append(pgfs, pgf) } - imports := map[source.ImportPath]*source.Metadata{ + imports := map[metadata.ImportPath]*metadata.Package{ "ext": {ID: "ext", Name: "ext"}, // this one comes for free } - for path, m := range test.imports { - imports[source.ImportPath(path)] = &source.Metadata{ - ID: source.PackageID(m), - Name: source.PackageName(m), + for path, mp := range test.imports { + imports[metadata.ImportPath(path)] = &metadata.Package{ + ID: metadata.PackageID(mp), + Name: metadata.PackageName(mp), } } - data := typerefs.Encode(pgfs, "p", imports) + data := typerefs.Encode(pgfs, imports) got := make(map[string][]string) index := typerefs.NewPackageIndex() - for _, class := range typerefs.Decode(index, "p", data) { + for _, class := range typerefs.Decode(index, data) { // We redundantly expand out the name x refs cross product // here since that's what the existing tests expect. for _, name := range class.Decls { diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go new file mode 100644 index 00000000000..ed52646f31d --- /dev/null +++ b/gopls/internal/cache/view.go @@ -0,0 +1,1299 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cache is the core of gopls: it is concerned with state +// management, dependency analysis, and invalidation; and it holds the +// machinery of type checking and modular static analysis. Its +// principal types are [Session], [Folder], [View], [Snapshot], +// [Cache], and [Package]. +package cache + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "os" + "os/exec" + "path" + "path/filepath" + "regexp" + "sort" + "strings" + "sync" + "time" + + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/maps" + "golang.org/x/tools/gopls/internal/util/pathutil" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/gopls/internal/vulncheck" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/xcontext" +) + +// A Folder represents an LSP workspace folder, together with its per-folder +// options and environment variables that affect build configuration. +// +// Folders (Name and Dir) are specified by the 'initialize' and subsequent +// 'didChangeWorkspaceFolders' requests; their options come from +// didChangeConfiguration. +// +// Folders must not be mutated, as they may be shared across multiple views. +type Folder struct { + Dir protocol.DocumentURI + Name string // decorative name for UI; not necessarily unique + Options *settings.Options + Env *GoEnv +} + +// GoEnv holds the environment variables and data from the Go command that is +// required for operating on a workspace folder. +type GoEnv struct { + // Go environment variables. These correspond directly with the Go env var of + // the same name. + GOOS string + GOARCH string + GOCACHE string + GOMODCACHE string + GOPATH string + GOPRIVATE string + GOFLAGS string + GO111MODULE string + + // Go version output. + GoVersion int // The X in Go 1.X + GoVersionOutput string // complete go version output + + // OS environment variables (notably not go env). + GOWORK string + GOPACKAGESDRIVER string +} + +// View represents a single build for a workspace. +// +// A View is a logical build (the viewDefinition) along with a state of that +// build (the Snapshot). +type View struct { + id string // a unique string to identify this View in (e.g.) serialized Commands + + *viewDefinition // build configuration + + gocmdRunner *gocommand.Runner // limits go command concurrency + + // baseCtx is the context handed to NewView. This is the parent of all + // background contexts created for this view. + baseCtx context.Context + + importsState *importsState + + // parseCache holds an LRU cache of recently parsed files. + parseCache *parseCache + + // fs is the file source used to populate this view. + fs *overlayFS + + // ignoreFilter is used for fast checking of ignored files. + ignoreFilter *ignoreFilter + + // cancelInitialWorkspaceLoad can be used to terminate the view's first + // attempt at initialization. + cancelInitialWorkspaceLoad context.CancelFunc + + snapshotMu sync.Mutex + snapshot *Snapshot // latest snapshot; nil after shutdown has been called + + // initialWorkspaceLoad is closed when the first workspace initialization has + // completed. If we failed to load, we only retry if the go.mod file changes, + // to avoid too many go/packages calls. + initialWorkspaceLoad chan struct{} + + // initializationSema is used limit concurrent initialization of snapshots in + // the view. We use a channel instead of a mutex to avoid blocking when a + // context is canceled. + // + // This field (along with snapshot.initialized) guards against duplicate + // initialization of snapshots. Do not change it without adjusting snapshot + // accordingly. + initializationSema chan struct{} + + // Document filters are constructed once, in View.filterFunc. + filterFuncOnce sync.Once + _filterFunc func(protocol.DocumentURI) bool // only accessed by View.filterFunc +} + +// definition implements the viewDefiner interface. +func (v *View) definition() *viewDefinition { return v.viewDefinition } + +// A viewDefinition is a logical build, i.e. configuration (Folder) along with +// a build directory and possibly an environment overlay (e.g. GOWORK=off or +// GOOS, GOARCH=...) to affect the build. +// +// This type is immutable, and compared to see if the View needs to be +// reconstructed. +// +// Note: whenever modifying this type, also modify the equivalence relation +// implemented by viewDefinitionsEqual. +// +// TODO(golang/go#57979): viewDefinition should be sufficient for running +// go/packages. Enforce this in the API. +type viewDefinition struct { + folder *Folder // pointer comparison is OK, as any new Folder creates a new def + + typ ViewType + root protocol.DocumentURI // root directory; where to run the Go command + gomod protocol.DocumentURI // the nearest go.mod file, or "" + gowork protocol.DocumentURI // the nearest go.work file, or "" + + // workspaceModFiles holds the set of mod files active in this snapshot. + // + // For a go.work workspace, this is the set of workspace modfiles. For a + // go.mod workspace, this contains the go.mod file defining the workspace + // root, as well as any locally replaced modules (if + // "includeReplaceInWorkspace" is set). + // + // TODO(rfindley): should we just run `go list -m` to compute this set? + workspaceModFiles map[protocol.DocumentURI]struct{} + workspaceModFilesErr error // error encountered computing workspaceModFiles + + // envOverlay holds additional environment to apply to this viewDefinition. + envOverlay map[string]string +} + +// definition implements the viewDefiner interface. +func (d *viewDefinition) definition() *viewDefinition { return d } + +// Type returns the ViewType type, which determines how go/packages are loaded +// for this View. +func (d *viewDefinition) Type() ViewType { return d.typ } + +// Root returns the view root, which determines where packages are loaded from. +func (d *viewDefinition) Root() protocol.DocumentURI { return d.root } + +// GoMod returns the nearest go.mod file for this view's root, or "". +func (d *viewDefinition) GoMod() protocol.DocumentURI { return d.gomod } + +// GoWork returns the nearest go.work file for this view's root, or "". +func (d *viewDefinition) GoWork() protocol.DocumentURI { return d.gowork } + +// EnvOverlay returns a new sorted slice of environment variables (in the form +// "k=v") for this view definition's env overlay. +func (d *viewDefinition) EnvOverlay() []string { + var env []string + for k, v := range d.envOverlay { + env = append(env, fmt.Sprintf("%s=%s", k, v)) + } + sort.Strings(env) + return env +} + +// GOOS returns the effective GOOS value for this view definition, accounting +// for its env overlay. +func (d *viewDefinition) GOOS() string { + if goos, ok := d.envOverlay["GOOS"]; ok { + return goos + } + return d.folder.Env.GOOS +} + +// GOOS returns the effective GOARCH value for this view definition, accounting +// for its env overlay. +func (d *viewDefinition) GOARCH() string { + if goarch, ok := d.envOverlay["GOARCH"]; ok { + return goarch + } + return d.folder.Env.GOARCH +} + +// adjustedGO111MODULE is the value of GO111MODULE to use for loading packages. +// It is adjusted to default to "auto" rather than "on", since if we are in +// GOPATH and have no module, we may as well allow a GOPATH view to work. +func (d viewDefinition) adjustedGO111MODULE() string { + if d.folder.Env.GO111MODULE != "" { + return d.folder.Env.GO111MODULE + } + return "auto" +} + +// ModFiles are the go.mod files enclosed in the snapshot's view and known +// to the snapshot. +func (d viewDefinition) ModFiles() []protocol.DocumentURI { + var uris []protocol.DocumentURI + for modURI := range d.workspaceModFiles { + uris = append(uris, modURI) + } + return uris +} + +// viewDefinitionsEqual reports whether x and y are equivalent. +func viewDefinitionsEqual(x, y *viewDefinition) bool { + if (x.workspaceModFilesErr == nil) != (y.workspaceModFilesErr == nil) { + return false + } + if x.workspaceModFilesErr != nil { + if x.workspaceModFilesErr.Error() != y.workspaceModFilesErr.Error() { + return false + } + } else if !maps.SameKeys(x.workspaceModFiles, y.workspaceModFiles) { + return false + } + if len(x.envOverlay) != len(y.envOverlay) { + return false + } + for i, xv := range x.envOverlay { + if xv != y.envOverlay[i] { + return false + } + } + return x.folder == y.folder && + x.typ == y.typ && + x.root == y.root && + x.gomod == y.gomod && + x.gowork == y.gowork +} + +// A ViewType describes how we load package information for a view. +// +// This is used for constructing the go/packages.Load query, and for +// interpreting missing packages, imports, or errors. +// +// See the documentation for individual ViewType values for details. +type ViewType int + +const ( + // GoPackagesDriverView is a view with a non-empty GOPACKAGESDRIVER + // environment variable. + // + // Load: ./... from the workspace folder. + GoPackagesDriverView ViewType = iota + + // GOPATHView is a view in GOPATH mode. + // + // I.e. in GOPATH, with GO111MODULE=off, or GO111MODULE=auto with no + // go.mod file. + // + // Load: ./... from the workspace folder. + GOPATHView + + // GoModView is a view in module mode with a single Go module. + // + // Load: /... from the module root. + GoModView + + // GoWorkView is a view in module mode with a go.work file. + // + // Load: /... from the workspace folder, for each module. + GoWorkView + + // An AdHocView is a collection of files in a given directory, not in GOPATH + // or a module. + // + // Load: . from the workspace folder. + AdHocView +) + +func (t ViewType) String() string { + switch t { + case GoPackagesDriverView: + return "GoPackagesDriverView" + case GOPATHView: + return "GOPATHView" + case GoModView: + return "GoModView" + case GoWorkView: + return "GoWorkView" + case AdHocView: + return "AdHocView" + default: + return "Unknown" + } +} + +// moduleMode reports whether the view uses Go modules. +func (w viewDefinition) moduleMode() bool { + switch w.typ { + case GoModView, GoWorkView: + return true + default: + return false + } +} + +func (v *View) ID() string { return v.id } + +// tempModFile creates a temporary go.mod file based on the contents +// of the given go.mod file. On success, it is the caller's +// responsibility to call the cleanup function when the file is no +// longer needed. +func tempModFile(modURI protocol.DocumentURI, gomod, gosum []byte) (tmpURI protocol.DocumentURI, cleanup func(), err error) { + filenameHash := file.HashOf([]byte(modURI.Path())) + tmpMod, err := os.CreateTemp("", fmt.Sprintf("go.%s.*.mod", filenameHash)) + if err != nil { + return "", nil, err + } + defer tmpMod.Close() + + tmpURI = protocol.URIFromPath(tmpMod.Name()) + tmpSumName := sumFilename(tmpURI) + + if _, err := tmpMod.Write(gomod); err != nil { + return "", nil, err + } + + // We use a distinct name here to avoid subtlety around the fact + // that both 'return' and 'defer' update the "cleanup" variable. + doCleanup := func() { + _ = os.Remove(tmpSumName) + _ = os.Remove(tmpURI.Path()) + } + + // Be careful to clean up if we return an error from this function. + defer func() { + if err != nil { + doCleanup() + cleanup = nil + } + }() + + // Create an analogous go.sum, if one exists. + if gosum != nil { + if err := os.WriteFile(tmpSumName, gosum, 0655); err != nil { + return "", nil, err + } + } + + return tmpURI, doCleanup, nil +} + +// Folder returns the folder at the base of this view. +func (v *View) Folder() *Folder { + return v.folder +} + +// UpdateFolders updates the set of views for the new folders. +// +// Calling this causes each view to be reinitialized. +func (s *Session) UpdateFolders(ctx context.Context, newFolders []*Folder) error { + s.viewMu.Lock() + defer s.viewMu.Unlock() + + overlays := s.Overlays() + var openFiles []protocol.DocumentURI + for _, o := range overlays { + openFiles = append(openFiles, o.URI()) + } + + defs, err := selectViewDefs(ctx, s, newFolders, openFiles) + if err != nil { + return err + } + var newViews []*View + for _, def := range defs { + v, _, release := s.createView(ctx, def) + release() + newViews = append(newViews, v) + } + for _, v := range s.views { + v.shutdown() + } + s.views = newViews + return nil +} + +// viewEnv returns a string describing the environment of a newly created view. +// +// It must not be called concurrently with any other view methods. +// TODO(rfindley): rethink this function, or inline sole call. +func viewEnv(v *View) string { + var buf bytes.Buffer + fmt.Fprintf(&buf, `go info for %v +(view type %v) +(root dir %s) +(go version %s) +(build flags: %v) +(go env: %+v) +(env overlay: %v) +`, + v.folder.Dir.Path(), + v.typ, + v.root.Path(), + strings.TrimRight(v.folder.Env.GoVersionOutput, "\n"), + v.folder.Options.BuildFlags, + *v.folder.Env, + v.envOverlay, + ) + + return buf.String() +} + +// RunProcessEnvFunc runs fn with the process env for this snapshot's view. +// Note: the process env contains cached module and filesystem state. +func (s *Snapshot) RunProcessEnvFunc(ctx context.Context, fn func(context.Context, *imports.Options) error) error { + return s.view.importsState.runProcessEnvFunc(ctx, s, fn) +} + +// separated out from its sole use in locateTemplateFiles for testability +func fileHasExtension(path string, suffixes []string) bool { + ext := filepath.Ext(path) + if ext != "" && ext[0] == '.' { + ext = ext[1:] + } + for _, s := range suffixes { + if s != "" && ext == s { + return true + } + } + return false +} + +// locateTemplateFiles ensures that the snapshot has mapped template files +// within the workspace folder. +func (s *Snapshot) locateTemplateFiles(ctx context.Context) { + suffixes := s.Options().TemplateExtensions + if len(suffixes) == 0 { + return + } + + searched := 0 + filterFunc := s.view.filterFunc() + err := filepath.WalkDir(s.view.folder.Dir.Path(), func(path string, entry os.DirEntry, err error) error { + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + if fileLimit > 0 && searched > fileLimit { + return errExhausted + } + searched++ + if !fileHasExtension(path, suffixes) { + return nil + } + uri := protocol.URIFromPath(path) + if filterFunc(uri) { + return nil + } + // Get the file in order to include it in the snapshot. + // TODO(golang/go#57558): it is fundamentally broken to track files in this + // way; we may lose them if configuration or layout changes cause a view to + // be recreated. + // + // Furthermore, this operation must ignore errors, including context + // cancellation, or risk leaving the snapshot in an undefined state. + s.ReadFile(ctx, uri) + return nil + }) + if err != nil { + event.Error(ctx, "searching for template files failed", err) + } +} + +// filterFunc returns a func that reports whether uri is filtered by the currently configured +// directoryFilters. +func (v *View) filterFunc() func(protocol.DocumentURI) bool { + v.filterFuncOnce.Do(func() { + folderDir := v.folder.Dir.Path() + gomodcache := v.folder.Env.GOMODCACHE + var filters []string + filters = append(filters, v.folder.Options.DirectoryFilters...) + if pref := strings.TrimPrefix(gomodcache, folderDir); pref != gomodcache { + modcacheFilter := "-" + strings.TrimPrefix(filepath.ToSlash(pref), "/") + filters = append(filters, modcacheFilter) + } + filterer := NewFilterer(filters) + v._filterFunc = func(uri protocol.DocumentURI) bool { + // Only filter relative to the configured root directory. + if pathutil.InDir(folderDir, uri.Path()) { + return relPathExcludedByFilter(strings.TrimPrefix(uri.Path(), folderDir), filterer) + } + return false + } + }) + return v._filterFunc +} + +// shutdown releases resources associated with the view. +func (v *View) shutdown() { + // Cancel the initial workspace load if it is still running. + v.cancelInitialWorkspaceLoad() + + v.snapshotMu.Lock() + if v.snapshot != nil { + v.snapshot.cancel() + v.snapshot.decref() + v.snapshot = nil + } + v.snapshotMu.Unlock() +} + +// IgnoredFile reports if a file would be ignored by a `go list` of the whole +// workspace. +// +// While go list ./... skips directories starting with '.', '_', or 'testdata', +// gopls may still load them via file queries. Explicitly filter them out. +func (s *Snapshot) IgnoredFile(uri protocol.DocumentURI) bool { + // Fast path: if uri doesn't contain '.', '_', or 'testdata', it is not + // possible that it is ignored. + { + uriStr := string(uri) + if !strings.Contains(uriStr, ".") && !strings.Contains(uriStr, "_") && !strings.Contains(uriStr, "testdata") { + return false + } + } + + return s.view.ignoreFilter.ignored(uri.Path()) +} + +// An ignoreFilter implements go list's exclusion rules via its 'ignored' method. +type ignoreFilter struct { + prefixes []string // root dirs, ending in filepath.Separator +} + +// newIgnoreFilter returns a new ignoreFilter implementing exclusion rules +// relative to the provided directories. +func newIgnoreFilter(dirs []string) *ignoreFilter { + f := new(ignoreFilter) + for _, d := range dirs { + f.prefixes = append(f.prefixes, filepath.Clean(d)+string(filepath.Separator)) + } + return f +} + +func (f *ignoreFilter) ignored(filename string) bool { + for _, prefix := range f.prefixes { + if suffix := strings.TrimPrefix(filename, prefix); suffix != filename { + if checkIgnored(suffix) { + return true + } + } + } + return false +} + +// checkIgnored implements go list's exclusion rules. +// Quoting “go help list”: +// +// Directory and file names that begin with "." or "_" are ignored +// by the go tool, as are directories named "testdata". +func checkIgnored(suffix string) bool { + // Note: this could be further optimized by writing a HasSegment helper, a + // segment-boundary respecting variant of strings.Contains. + for _, component := range strings.Split(suffix, string(filepath.Separator)) { + if len(component) == 0 { + continue + } + if component[0] == '.' || component[0] == '_' || component == "testdata" { + return true + } + } + return false +} + +// Snapshot returns the current snapshot for the view, and a +// release function that must be called when the Snapshot is +// no longer needed. +// +// The resulting error is non-nil if and only if the view is shut down, in +// which case the resulting release function will also be nil. +func (v *View) Snapshot() (*Snapshot, func(), error) { + v.snapshotMu.Lock() + defer v.snapshotMu.Unlock() + if v.snapshot == nil { + return nil, nil, errors.New("view is shutdown") + } + return v.snapshot, v.snapshot.Acquire(), nil +} + +// initialize loads the metadata (and currently, file contents, due to +// golang/go#57558) for the main package query of the View, which depends on +// the view type (see ViewType). If s.initialized is already true, initialize +// is a no op. +// +// The first attempt--which populates the first snapshot for a new view--must +// be allowed to run to completion without being cancelled. +// +// Subsequent attempts are triggered by conditions where gopls can't enumerate +// specific packages that require reloading, such as a change to a go.mod file. +// These attempts may be cancelled, and then retried by a later call. +// +// Postcondition: if ctx was not cancelled, s.initialized is true, s.initialErr +// holds the error resulting from initialization, if any, and s.metadata holds +// the resulting metadata graph. +func (s *Snapshot) initialize(ctx context.Context, firstAttempt bool) { + // Acquire initializationSema, which is + // (in effect) a mutex with a timeout. + select { + case <-ctx.Done(): + return + case s.view.initializationSema <- struct{}{}: + } + + defer func() { + <-s.view.initializationSema + }() + + s.mu.Lock() + initialized := s.initialized + s.mu.Unlock() + + if initialized { + return + } + + defer func() { + if firstAttempt { + close(s.view.initialWorkspaceLoad) + } + }() + + // TODO(rFindley): we should only locate template files on the first attempt, + // or guard it via a different mechanism. + s.locateTemplateFiles(ctx) + + // Collect module paths to load by parsing go.mod files. If a module fails to + // parse, capture the parsing failure as a critical diagnostic. + var scopes []loadScope // scopes to load + var modDiagnostics []*Diagnostic // diagnostics for broken go.mod files + addError := func(uri protocol.DocumentURI, err error) { + modDiagnostics = append(modDiagnostics, &Diagnostic{ + URI: uri, + Severity: protocol.SeverityError, + Source: ListError, + Message: err.Error(), + }) + } + + if len(s.view.workspaceModFiles) > 0 { + for modURI := range s.view.workspaceModFiles { + // Verify that the modfile is valid before trying to load it. + // + // TODO(rfindley): now that we no longer need to parse the modfile in + // order to load scope, we could move these diagnostics to a more general + // location where we diagnose problems with modfiles or the workspace. + // + // Be careful not to add context cancellation errors as critical module + // errors. + fh, err := s.ReadFile(ctx, modURI) + if err != nil { + if ctx.Err() != nil { + return + } + addError(modURI, err) + continue + } + parsed, err := s.ParseMod(ctx, fh) + if err != nil { + if ctx.Err() != nil { + return + } + addError(modURI, err) + continue + } + if parsed.File == nil || parsed.File.Module == nil { + addError(modURI, fmt.Errorf("no module path for %s", modURI)) + continue + } + moduleDir := filepath.Dir(modURI.Path()) + // Previously, we loaded /... for each module path, but that + // is actually incorrect when the pattern may match packages in more than + // one module. See golang/go#59458 for more details. + scopes = append(scopes, moduleLoadScope{dir: moduleDir, modulePath: parsed.File.Module.Mod.Path}) + } + } else { + scopes = append(scopes, viewLoadScope{}) + } + + // If we're loading anything, ensure we also load builtin, + // since it provides fake definitions (and documentation) + // for types like int that are used everywhere. + if len(scopes) > 0 { + scopes = append(scopes, packageLoadScope("builtin")) + } + loadErr := s.load(ctx, true, scopes...) + + // A failure is retryable if it may have been due to context cancellation, + // and this is not the initial workspace load (firstAttempt==true). + // + // The IWL runs on a detached context with a long (~10m) timeout, so + // if the context was canceled we consider loading to have failed + // permanently. + if loadErr != nil && ctx.Err() != nil && !firstAttempt { + return + } + + var initialErr *InitializationError + switch { + case loadErr != nil && ctx.Err() != nil: + event.Error(ctx, fmt.Sprintf("initial workspace load: %v", loadErr), loadErr) + initialErr = &InitializationError{ + MainError: loadErr, + } + case loadErr != nil: + event.Error(ctx, "initial workspace load failed", loadErr) + extractedDiags := s.extractGoCommandErrors(ctx, loadErr) + initialErr = &InitializationError{ + MainError: loadErr, + Diagnostics: maps.Group(extractedDiags, byURI), + } + case s.view.workspaceModFilesErr != nil: + initialErr = &InitializationError{ + MainError: s.view.workspaceModFilesErr, + } + case len(modDiagnostics) > 0: + initialErr = &InitializationError{ + MainError: fmt.Errorf(modDiagnostics[0].Message), + } + } + + s.mu.Lock() + defer s.mu.Unlock() + + s.initialized = true + s.initialErr = initialErr +} + +// A StateChange describes external state changes that may affect a snapshot. +// +// By far the most common of these is a change to file state, but a query of +// module upgrade information or vulnerabilities also affects gopls' behavior. +type StateChange struct { + Modifications []file.Modification // if set, the raw modifications originating this change + Files map[protocol.DocumentURI]file.Handle + ModuleUpgrades map[protocol.DocumentURI]map[string]string + Vulns map[protocol.DocumentURI]*vulncheck.Result + GCDetails map[metadata.PackageID]bool // package -> whether or not we want details +} + +// InvalidateView processes the provided state change, invalidating any derived +// results that depend on the changed state. +// +// The resulting snapshot is non-nil, representing the outcome of the state +// change. The second result is a function that must be called to release the +// snapshot when the snapshot is no longer needed. +// +// An error is returned if the given view is no longer active in the session. +func (s *Session) InvalidateView(ctx context.Context, view *View, changed StateChange) (*Snapshot, func(), error) { + s.viewMu.Lock() + defer s.viewMu.Unlock() + + if !slices.Contains(s.views, view) { + return nil, nil, fmt.Errorf("view is no longer active") + } + snapshot, release, _ := s.invalidateViewLocked(ctx, view, changed) + return snapshot, release, nil +} + +// invalidateViewLocked invalidates the content of the given view. +// (See [Session.InvalidateView]). +// +// The resulting bool reports whether the View needs to be re-diagnosed. +// (See [Snapshot.clone]). +// +// s.viewMu must be held while calling this method. +func (s *Session) invalidateViewLocked(ctx context.Context, v *View, changed StateChange) (*Snapshot, func(), bool) { + // Detach the context so that content invalidation cannot be canceled. + ctx = xcontext.Detach(ctx) + + // This should be the only time we hold the view's snapshot lock for any period of time. + v.snapshotMu.Lock() + defer v.snapshotMu.Unlock() + + prevSnapshot := v.snapshot + + if prevSnapshot == nil { + panic("invalidateContent called after shutdown") + } + + // Cancel all still-running previous requests, since they would be + // operating on stale data. + prevSnapshot.cancel() + + // Do not clone a snapshot until its view has finished initializing. + // + // TODO(rfindley): shouldn't we do this before canceling? + prevSnapshot.AwaitInitialized(ctx) + + var needsDiagnosis bool + s.snapshotWG.Add(1) + v.snapshot, needsDiagnosis = prevSnapshot.clone(ctx, v.baseCtx, changed, s.snapshotWG.Done) + + // Remove the initial reference created when prevSnapshot was created. + prevSnapshot.decref() + + // Return a second lease to the caller. + return v.snapshot, v.snapshot.Acquire(), needsDiagnosis +} + +// defineView computes the view definition for the provided workspace folder +// and URI. +// +// If forURI is non-empty, this view should be the best view including forURI. +// Otherwise, it is the default view for the folder. +// +// defineView only returns an error in the event of context cancellation. +// +// Note: keep this function in sync with bestView. +// +// TODO(rfindley): we should be able to remove the error return, as +// findModules is going away, and all other I/O is memoized. +// +// TODO(rfindley): pass in a narrower interface for the file.Source +// (e.g. fileExists func(DocumentURI) bool) to make clear that this +// process depends only on directory information, not file contents. +func defineView(ctx context.Context, fs file.Source, folder *Folder, forFile file.Handle) (*viewDefinition, error) { + if err := checkPathValid(folder.Dir.Path()); err != nil { + return nil, fmt.Errorf("invalid workspace folder path: %w; check that the spelling of the configured workspace folder path agrees with the spelling reported by the operating system", err) + } + dir := folder.Dir.Path() + if forFile != nil { + dir = filepath.Dir(forFile.URI().Path()) + } + + def := new(viewDefinition) + def.folder = folder + + if forFile != nil && fileKind(forFile) == file.Go { + // If the file has GOOS/GOARCH build constraints that + // don't match the folder's environment (which comes from + // 'go env' in the folder, plus user options), + // add those constraints to the viewDefinition's environment. + + // Content trimming is nontrivial, so do this outside of the loop below. + // Keep this in sync with bestView. + path := forFile.URI().Path() + if content, err := forFile.Content(); err == nil { + // Note the err == nil condition above: by convention a non-existent file + // does not have any constraints. See the related note in bestView: this + // choice of behavior shouldn't actually matter. In this case, we should + // only call defineView with Overlays, which always have content. + content = trimContentForPortMatch(content) + viewPort := port{def.folder.Env.GOOS, def.folder.Env.GOARCH} + if !viewPort.matches(path, content) { + for _, p := range preferredPorts { + if p.matches(path, content) { + if def.envOverlay == nil { + def.envOverlay = make(map[string]string) + } + def.envOverlay["GOOS"] = p.GOOS + def.envOverlay["GOARCH"] = p.GOARCH + break + } + } + } + } + } + + var err error + dirURI := protocol.URIFromPath(dir) + goworkFromEnv := false + if folder.Env.GOWORK != "off" && folder.Env.GOWORK != "" { + goworkFromEnv = true + def.gowork = protocol.URIFromPath(folder.Env.GOWORK) + } else { + def.gowork, err = findRootPattern(ctx, dirURI, "go.work", fs) + if err != nil { + return nil, err + } + } + + // When deriving the best view for a given file, we only want to search + // up the directory hierarchy for modfiles. + def.gomod, err = findRootPattern(ctx, dirURI, "go.mod", fs) + if err != nil { + return nil, err + } + + // Determine how we load and where to load package information for this view + // + // Specifically, set + // - def.typ + // - def.root + // - def.workspaceModFiles, and + // - def.envOverlay. + + // If GOPACKAGESDRIVER is set it takes precedence. + { + // The value of GOPACKAGESDRIVER is not returned through the go command. + gopackagesdriver := os.Getenv("GOPACKAGESDRIVER") + // A user may also have a gopackagesdriver binary on their machine, which + // works the same way as setting GOPACKAGESDRIVER. + // + // TODO(rfindley): remove this call to LookPath. We should not support this + // undocumented method of setting GOPACKAGESDRIVER. + tool, err := exec.LookPath("gopackagesdriver") + if gopackagesdriver != "off" && (gopackagesdriver != "" || (err == nil && tool != "")) { + def.typ = GoPackagesDriverView + def.root = dirURI + return def, nil + } + } + + // From go.dev/ref/mod, module mode is active if GO111MODULE=on, or + // GO111MODULE=auto or "" and we are inside a module or have a GOWORK value. + // But gopls is less strict, allowing GOPATH mode if GO111MODULE="", and + // AdHoc views if no module is found. + + // gomodWorkspace is a helper to compute the correct set of workspace + // modfiles for a go.mod file, based on folder options. + gomodWorkspace := func() map[protocol.DocumentURI]unit { + modFiles := map[protocol.DocumentURI]struct{}{def.gomod: {}} + if folder.Options.IncludeReplaceInWorkspace { + includingReplace, err := goModModules(ctx, def.gomod, fs) + if err == nil { + modFiles = includingReplace + } else { + // If the go.mod file fails to parse, we don't know anything about + // replace directives, so fall back to a view of just the root module. + } + } + return modFiles + } + + // Prefer a go.work file if it is available and contains the module relevant + // to forURI. + if def.adjustedGO111MODULE() != "off" && folder.Env.GOWORK != "off" && def.gowork != "" { + def.typ = GoWorkView + if goworkFromEnv { + // The go.work file could be anywhere, which can lead to confusing error + // messages. + def.root = dirURI + } else { + // The go.work file could be anywhere, which can lead to confusing error + def.root = def.gowork.Dir() + } + def.workspaceModFiles, def.workspaceModFilesErr = goWorkModules(ctx, def.gowork, fs) + + // If forURI is in a module but that module is not + // included in the go.work file, use a go.mod view with GOWORK=off. + if forFile != nil && def.workspaceModFilesErr == nil && def.gomod != "" { + if _, ok := def.workspaceModFiles[def.gomod]; !ok { + def.typ = GoModView + def.root = def.gomod.Dir() + def.workspaceModFiles = gomodWorkspace() + if def.envOverlay == nil { + def.envOverlay = make(map[string]string) + } + def.envOverlay["GOWORK"] = "off" + } + } + return def, nil + } + + // Otherwise, use the active module, if in module mode. + // + // Note, we could override GO111MODULE here via envOverlay if we wanted to + // support the case where someone opens a module with GO111MODULE=off. But + // that is probably not worth worrying about (at this point, folks probably + // shouldn't be setting GO111MODULE). + if def.adjustedGO111MODULE() != "off" && def.gomod != "" { + def.typ = GoModView + def.root = def.gomod.Dir() + def.workspaceModFiles = gomodWorkspace() + return def, nil + } + + // Check if the workspace is within any GOPATH directory. + inGOPATH := false + for _, gp := range filepath.SplitList(folder.Env.GOPATH) { + if pathutil.InDir(filepath.Join(gp, "src"), dir) { + inGOPATH = true + break + } + } + if def.adjustedGO111MODULE() != "on" && inGOPATH { + def.typ = GOPATHView + def.root = dirURI + return def, nil + } + + // We're not in a workspace, module, or GOPATH, so have no better choice than + // an ad-hoc view. + def.typ = AdHocView + def.root = dirURI + return def, nil +} + +// FetchGoEnv queries the environment and Go command to collect environment +// variables necessary for the workspace folder. +func FetchGoEnv(ctx context.Context, folder protocol.DocumentURI, opts *settings.Options) (*GoEnv, error) { + dir := folder.Path() + // All of the go commands invoked here should be fast. No need to share a + // runner with other operations. + runner := new(gocommand.Runner) + inv := gocommand.Invocation{ + WorkingDir: dir, + Env: opts.EnvSlice(), + } + + var ( + env = new(GoEnv) + err error + ) + envvars := map[string]*string{ + "GOOS": &env.GOOS, + "GOARCH": &env.GOARCH, + "GOCACHE": &env.GOCACHE, + "GOPATH": &env.GOPATH, + "GOPRIVATE": &env.GOPRIVATE, + "GOMODCACHE": &env.GOMODCACHE, + "GOFLAGS": &env.GOFLAGS, + "GO111MODULE": &env.GO111MODULE, + } + if err := loadGoEnv(ctx, dir, opts.EnvSlice(), runner, envvars); err != nil { + return nil, err + } + + env.GoVersion, err = gocommand.GoVersion(ctx, inv, runner) + if err != nil { + return nil, err + } + env.GoVersionOutput, err = gocommand.GoVersionOutput(ctx, inv, runner) + if err != nil { + return nil, err + } + + // The value of GOPACKAGESDRIVER is not returned through the go command. + if driver, ok := opts.Env["GOPACKAGESDRIVER"]; ok { + env.GOPACKAGESDRIVER = driver + } else { + env.GOPACKAGESDRIVER = os.Getenv("GOPACKAGESDRIVER") + // A user may also have a gopackagesdriver binary on their machine, which + // works the same way as setting GOPACKAGESDRIVER. + // + // TODO(rfindley): remove this call to LookPath. We should not support this + // undocumented method of setting GOPACKAGESDRIVER. + if env.GOPACKAGESDRIVER == "" { + tool, err := exec.LookPath("gopackagesdriver") + if err == nil && tool != "" { + env.GOPACKAGESDRIVER = tool + } + } + } + + // While GOWORK is available through the Go command, we want to differentiate + // between an explicit GOWORK value and one which is implicit from the file + // system. The former doesn't change unless the environment changes. + if gowork, ok := opts.Env["GOWORK"]; ok { + env.GOWORK = gowork + } else { + env.GOWORK = os.Getenv("GOWORK") + } + return env, nil +} + +// loadGoEnv loads `go env` values into the provided map, keyed by Go variable +// name. +func loadGoEnv(ctx context.Context, dir string, configEnv []string, runner *gocommand.Runner, vars map[string]*string) error { + // We can save ~200 ms by requesting only the variables we care about. + args := []string{"-json"} + for k := range vars { + args = append(args, k) + } + + inv := gocommand.Invocation{ + Verb: "env", + Args: args, + Env: configEnv, + WorkingDir: dir, + } + stdout, err := runner.Run(ctx, inv) + if err != nil { + return err + } + envMap := make(map[string]string) + if err := json.Unmarshal(stdout.Bytes(), &envMap); err != nil { + return fmt.Errorf("internal error unmarshaling JSON from 'go env': %w", err) + } + for key, ptr := range vars { + *ptr = envMap[key] + } + + return nil +} + +// findRootPattern looks for files with the given basename in dir or any parent +// directory of dir, using the provided FileSource. It returns the first match, +// starting from dir and search parents. +// +// The resulting string is either the file path of a matching file with the +// given basename, or "" if none was found. +// +// findRootPattern only returns an error in the case of context cancellation. +func findRootPattern(ctx context.Context, dirURI protocol.DocumentURI, basename string, fs file.Source) (protocol.DocumentURI, error) { + dir := dirURI.Path() + for dir != "" { + target := filepath.Join(dir, basename) + uri := protocol.URIFromPath(target) + fh, err := fs.ReadFile(ctx, uri) + if err != nil { + return "", err // context cancelled + } + if fileExists(fh) { + return uri, nil + } + // Trailing separators must be trimmed, otherwise filepath.Split is a noop. + next, _ := filepath.Split(strings.TrimRight(dir, string(filepath.Separator))) + if next == dir { + break + } + dir = next + } + return "", nil +} + +// checkPathValid performs an OS-specific path validity check. The +// implementation varies for filesystems that are case-insensitive +// (e.g. macOS, Windows), and for those that disallow certain file +// names (e.g. path segments ending with a period on Windows, or +// reserved names such as "com"; see +// https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file). +var checkPathValid = defaultCheckPathValid + +// CheckPathValid checks whether a directory is suitable as a workspace folder. +func CheckPathValid(dir string) error { return checkPathValid(dir) } + +func defaultCheckPathValid(path string) error { + return nil +} + +// IsGoPrivatePath reports whether target is a private import path, as identified +// by the GOPRIVATE environment variable. +func (s *Snapshot) IsGoPrivatePath(target string) bool { + return globsMatchPath(s.view.folder.Env.GOPRIVATE, target) +} + +// ModuleUpgrades returns known module upgrades for the dependencies of +// modfile. +func (s *Snapshot) ModuleUpgrades(modfile protocol.DocumentURI) map[string]string { + s.mu.Lock() + defer s.mu.Unlock() + upgrades := map[string]string{} + orig, _ := s.moduleUpgrades.Get(modfile) + for mod, ver := range orig { + upgrades[mod] = ver + } + return upgrades +} + +// MaxGovulncheckResultsAge defines the maximum vulnerability age considered +// valid by gopls. +// +// Mutable for testing. +var MaxGovulncheckResultAge = 1 * time.Hour + +// Vulnerabilities returns known vulnerabilities for the given modfile. +// +// Results more than an hour old are excluded. +// +// TODO(suzmue): replace command.Vuln with a different type, maybe +// https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck/govulnchecklib#Summary? +// +// TODO(rfindley): move to snapshot.go +func (s *Snapshot) Vulnerabilities(modfiles ...protocol.DocumentURI) map[protocol.DocumentURI]*vulncheck.Result { + m := make(map[protocol.DocumentURI]*vulncheck.Result) + now := time.Now() + + s.mu.Lock() + defer s.mu.Unlock() + + if len(modfiles) == 0 { // empty means all modfiles + modfiles = s.vulns.Keys() + } + for _, modfile := range modfiles { + vuln, _ := s.vulns.Get(modfile) + if vuln != nil && now.Sub(vuln.AsOf) > MaxGovulncheckResultAge { + vuln = nil + } + m[modfile] = vuln + } + return m +} + +// GoVersion returns the effective release Go version (the X in go1.X) for this +// view. +func (v *View) GoVersion() int { + return v.folder.Env.GoVersion +} + +// GoVersionString returns the effective Go version string for this view. +// +// Unlike [GoVersion], this encodes the minor version and commit hash information. +func (v *View) GoVersionString() string { + return gocommand.ParseGoVersionOutput(v.folder.Env.GoVersionOutput) +} + +// GoVersionString is temporarily available from the snapshot. +// +// TODO(rfindley): refactor so that this method is not necessary. +func (s *Snapshot) GoVersionString() string { + return s.view.GoVersionString() +} + +// Copied from +// https://cs.opensource.google/go/go/+/master:src/cmd/go/internal/str/path.go;l=58;drc=2910c5b4a01a573ebc97744890a07c1a3122c67a +func globsMatchPath(globs, target string) bool { + for globs != "" { + // Extract next non-empty glob in comma-separated list. + var glob string + if i := strings.Index(globs, ","); i >= 0 { + glob, globs = globs[:i], globs[i+1:] + } else { + glob, globs = globs, "" + } + if glob == "" { + continue + } + + // A glob with N+1 path elements (N slashes) needs to be matched + // against the first N+1 path elements of target, + // which end just before the N+1'th slash. + n := strings.Count(glob, "/") + prefix := target + // Walk target, counting slashes, truncating at the N+1'th slash. + for i := 0; i < len(target); i++ { + if target[i] == '/' { + if n == 0 { + prefix = target[:i] + break + } + n-- + } + } + if n > 0 { + // Not enough prefix elements. + continue + } + matched, _ := path.Match(glob, prefix) + if matched { + return true + } + } + return false +} + +var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`) + +// TODO(rfindley): clean up the redundancy of allFilesExcluded, +// pathExcludedByFilterFunc, pathExcludedByFilter, view.filterFunc... +func allFilesExcluded(files []string, filterFunc func(protocol.DocumentURI) bool) bool { + for _, f := range files { + uri := protocol.URIFromPath(f) + if !filterFunc(uri) { + return false + } + } + return true +} + +func relPathExcludedByFilter(path string, filterer *Filterer) bool { + path = strings.TrimPrefix(filepath.ToSlash(path), "/") + return filterer.Disallow(path) +} diff --git a/gopls/internal/cache/view_test.go b/gopls/internal/cache/view_test.go new file mode 100644 index 00000000000..992a3d61828 --- /dev/null +++ b/gopls/internal/cache/view_test.go @@ -0,0 +1,175 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +package cache + +import ( + "os" + "path/filepath" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" +) + +func TestCaseInsensitiveFilesystem(t *testing.T) { + base := t.TempDir() + + inner := filepath.Join(base, "a/B/c/DEFgh") + if err := os.MkdirAll(inner, 0777); err != nil { + t.Fatal(err) + } + file := filepath.Join(inner, "f.go") + if err := os.WriteFile(file, []byte("hi"), 0777); err != nil { + t.Fatal(err) + } + if _, err := os.Stat(filepath.Join(inner, "F.go")); err != nil { + t.Skip("filesystem is case-sensitive") + } + + tests := []struct { + path string + err bool + }{ + {file, false}, + {filepath.Join(inner, "F.go"), true}, + {filepath.Join(base, "a/b/c/defgh/f.go"), true}, + } + for _, tt := range tests { + err := checkPathValid(tt.path) + if err != nil != tt.err { + t.Errorf("checkPathValid(%q) = %v, wanted error: %v", tt.path, err, tt.err) + } + } +} + +func TestInVendor(t *testing.T) { + for _, tt := range []struct { + path string + inVendor bool + }{ + {"foo/vendor/x.go", false}, + {"foo/vendor/x/x.go", true}, + {"foo/x.go", false}, + {"foo/vendor/foo.txt", false}, + {"foo/vendor/modules.txt", false}, + } { + if got := inVendor(protocol.URIFromPath(tt.path)); got != tt.inVendor { + t.Errorf("expected %s inVendor %v, got %v", tt.path, tt.inVendor, got) + } + } +} + +func TestFilters(t *testing.T) { + tests := []struct { + filters []string + included []string + excluded []string + }{ + { + included: []string{"x"}, + }, + { + filters: []string{"-"}, + excluded: []string{"x", "x/a"}, + }, + { + filters: []string{"-x", "+y"}, + included: []string{"y", "y/a", "z"}, + excluded: []string{"x", "x/a"}, + }, + { + filters: []string{"-x", "+x/y", "-x/y/z"}, + included: []string{"x/y", "x/y/a", "a"}, + excluded: []string{"x", "x/a", "x/y/z/a"}, + }, + { + filters: []string{"+foobar", "-foo"}, + included: []string{"foobar", "foobar/a"}, + excluded: []string{"foo", "foo/a"}, + }, + } + + for _, tt := range tests { + filterer := NewFilterer(tt.filters) + for _, inc := range tt.included { + if relPathExcludedByFilter(inc, filterer) { + t.Errorf("filters %q excluded %v, wanted included", tt.filters, inc) + } + } + for _, exc := range tt.excluded { + if !relPathExcludedByFilter(exc, filterer) { + t.Errorf("filters %q included %v, wanted excluded", tt.filters, exc) + } + } + } +} + +func TestSuffixes(t *testing.T) { + type file struct { + path string + want bool + } + type cases struct { + option []string + files []file + } + tests := []cases{ + {[]string{"tmpl", "gotmpl"}, []file{ // default + {"foo", false}, + {"foo.tmpl", true}, + {"foo.gotmpl", true}, + {"tmpl", false}, + {"tmpl.go", false}}, + }, + {[]string{"tmpl", "gotmpl", "html", "gohtml"}, []file{ + {"foo.gotmpl", true}, + {"foo.html", true}, + {"foo.gohtml", true}, + {"html", false}}, + }, + {[]string{"tmpl", "gotmpl", ""}, []file{ // possible user mistake + {"foo.gotmpl", true}, + {"foo.go", false}, + {"foo", false}}, + }, + } + for _, a := range tests { + suffixes := a.option + for _, b := range a.files { + got := fileHasExtension(b.path, suffixes) + if got != b.want { + t.Errorf("got %v, want %v, option %q, file %q (%+v)", + got, b.want, a.option, b.path, b) + } + } + } +} + +func TestIgnoreFilter(t *testing.T) { + tests := []struct { + dirs []string + path string + want bool + }{ + {[]string{"a"}, "a/testdata/foo", true}, + {[]string{"a"}, "a/_ignore/foo", true}, + {[]string{"a"}, "a/.ignore/foo", true}, + {[]string{"a"}, "b/testdata/foo", false}, + {[]string{"a"}, "testdata/foo", false}, + {[]string{"a", "b"}, "b/testdata/foo", true}, + {[]string{"a"}, "atestdata/foo", false}, + } + + for _, test := range tests { + // convert to filepaths, for convenience + for i, dir := range test.dirs { + test.dirs[i] = filepath.FromSlash(dir) + } + test.path = filepath.FromSlash(test.path) + + f := newIgnoreFilter(test.dirs) + if got := f.ignored(test.path); got != test.want { + t.Errorf("newIgnoreFilter(%q).ignore(%q) = %t, want %t", test.dirs, test.path, got, test.want) + } + } +} diff --git a/gopls/internal/cache/workspace.go b/gopls/internal/cache/workspace.go new file mode 100644 index 00000000000..07134b3da00 --- /dev/null +++ b/gopls/internal/cache/workspace.go @@ -0,0 +1,112 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "errors" + "fmt" + "path/filepath" + + "golang.org/x/mod/modfile" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" +) + +// isGoWork reports if uri is a go.work file. +func isGoWork(uri protocol.DocumentURI) bool { + return filepath.Base(uri.Path()) == "go.work" +} + +// goWorkModules returns the URIs of go.mod files named by the go.work file. +func goWorkModules(ctx context.Context, gowork protocol.DocumentURI, fs file.Source) (map[protocol.DocumentURI]unit, error) { + fh, err := fs.ReadFile(ctx, gowork) + if err != nil { + return nil, err // canceled + } + content, err := fh.Content() + if err != nil { + return nil, err + } + filename := gowork.Path() + dir := filepath.Dir(filename) + workFile, err := modfile.ParseWork(filename, content, nil) + if err != nil { + return nil, fmt.Errorf("parsing go.work: %w", err) + } + var usedDirs []string + for _, use := range workFile.Use { + usedDirs = append(usedDirs, use.Path) + } + return localModFiles(dir, usedDirs), nil +} + +// localModFiles builds a set of local go.mod files referenced by +// goWorkOrModPaths, which is a slice of paths as contained in a go.work 'use' +// directive or go.mod 'replace' directive (and which therefore may use either +// '/' or '\' as a path separator). +func localModFiles(relativeTo string, goWorkOrModPaths []string) map[protocol.DocumentURI]unit { + modFiles := make(map[protocol.DocumentURI]unit) + for _, path := range goWorkOrModPaths { + modDir := filepath.FromSlash(path) + if !filepath.IsAbs(modDir) { + modDir = filepath.Join(relativeTo, modDir) + } + modURI := protocol.URIFromPath(filepath.Join(modDir, "go.mod")) + modFiles[modURI] = unit{} + } + return modFiles +} + +// isGoMod reports if uri is a go.mod file. +func isGoMod(uri protocol.DocumentURI) bool { + return filepath.Base(uri.Path()) == "go.mod" +} + +// goModModules returns the URIs of "workspace" go.mod files defined by a +// go.mod file. This set is defined to be the given go.mod file itself, as well +// as the modfiles of any locally replaced modules in the go.mod file. +func goModModules(ctx context.Context, gomod protocol.DocumentURI, fs file.Source) (map[protocol.DocumentURI]unit, error) { + fh, err := fs.ReadFile(ctx, gomod) + if err != nil { + return nil, err // canceled + } + content, err := fh.Content() + if err != nil { + return nil, err + } + filename := gomod.Path() + dir := filepath.Dir(filename) + modFile, err := modfile.Parse(filename, content, nil) + if err != nil { + return nil, err + } + var localReplaces []string + for _, replace := range modFile.Replace { + if modfile.IsDirectoryPath(replace.New.Path) { + localReplaces = append(localReplaces, replace.New.Path) + } + } + modFiles := localModFiles(dir, localReplaces) + modFiles[gomod] = unit{} + return modFiles, nil +} + +// fileExists reports whether the file has a Content (which may be empty). +// An overlay exists even if it is not reflected in the file system. +func fileExists(fh file.Handle) bool { + _, err := fh.Content() + return err == nil +} + +// errExhausted is returned by findModules if the file scan limit is reached. +var errExhausted = errors.New("exhausted") + +// Limit go.mod search to 1 million files. As a point of reference, +// Kubernetes has 22K files (as of 2020-11-24). +// +// Note: per golang/go#56496, the previous limit of 1M files was too slow, at +// which point this limit was decreased to 100K. +const fileLimit = 100_000 diff --git a/gopls/internal/lsp/source/xrefs/xrefs.go b/gopls/internal/cache/xrefs/xrefs.go similarity index 87% rename from gopls/internal/lsp/source/xrefs/xrefs.go rename to gopls/internal/cache/xrefs/xrefs.go index 88f76b1eb64..b29b80aebf2 100644 --- a/gopls/internal/lsp/source/xrefs/xrefs.go +++ b/gopls/internal/cache/xrefs/xrefs.go @@ -14,15 +14,16 @@ import ( "sort" "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/gopls/internal/lsp/frob" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/frob" + "golang.org/x/tools/gopls/internal/util/typesutil" ) // Index constructs a serializable index of outbound cross-references // for the specified type-checked package. -func Index(files []*source.ParsedGoFile, pkg *types.Package, info *types.Info) []byte { +func Index(files []*parsego.File, pkg *types.Package, info *types.Info) []byte { // pkgObjects maps each referenced package Q to a mapping: // from each referenced symbol in Q to the ordered list // of references to that symbol from this package. @@ -66,7 +67,7 @@ func Index(files []*source.ParsedGoFile, pkg *types.Package, info *types.Info) [ // For instantiations of generic methods, // use the generic object (see issue #60622). if fn, ok := obj.(*types.Func); ok { - obj = typeparams.OriginMethod(fn) + obj = fn.Origin() } objects := getObjects(obj.Pkg()) @@ -92,7 +93,7 @@ func Index(files []*source.ParsedGoFile, pkg *types.Package, info *types.Info) [ case *ast.ImportSpec: // Report a reference from each import path // string to the imported package. - pkgname, ok := source.ImportedPkgName(info, n) + pkgname, ok := typesutil.ImportedPkgName(info, n) if !ok { return true // missing import } @@ -116,7 +117,7 @@ func Index(files []*source.ParsedGoFile, pkg *types.Package, info *types.Info) [ for p := range pkgObjects { objects := pkgObjects[p] gp := &gobPackage{ - PkgPath: source.PackagePath(p.Path()), + PkgPath: metadata.PackagePath(p.Path()), Objects: make([]*gobObject, 0, len(objects)), } for _, gobObj := range objects { @@ -138,7 +139,7 @@ func Index(files []*source.ParsedGoFile, pkg *types.Package, info *types.Info) [ // operation on m, and returns the locations of all references from m // to any object in the target set. Each object is denoted by a pair // of (package path, object path). -func Lookup(m *source.Metadata, data []byte, targets map[source.PackagePath]map[objectpath.Path]struct{}) (locs []protocol.Location) { +func Lookup(mp *metadata.Package, data []byte, targets map[metadata.PackagePath]map[objectpath.Path]struct{}) (locs []protocol.Location) { var packages []*gobPackage packageCodec.Decode(data, &packages) for _, gp := range packages { @@ -146,9 +147,9 @@ func Lookup(m *source.Metadata, data []byte, targets map[source.PackagePath]map[ for _, gobObj := range gp.Objects { if _, ok := objectSet[gobObj.Path]; ok { for _, ref := range gobObj.Refs { - uri := m.CompiledGoFiles[ref.FileIndex] + uri := mp.CompiledGoFiles[ref.FileIndex] locs = append(locs, protocol.Location{ - URI: protocol.URIFromSpanURI(uri), + URI: uri, Range: ref.Range, }) } @@ -177,8 +178,8 @@ var packageCodec = frob.CodecFor[[]*gobPackage]() // A gobPackage records the set of outgoing references from the index // package to symbols defined in a dependency package. type gobPackage struct { - PkgPath source.PackagePath // defining package (Q) - Objects []*gobObject // set of Q objects referenced by P + PkgPath metadata.PackagePath // defining package (Q) + Objects []*gobObject // set of Q objects referenced by P } // A gobObject records all references to a particular symbol. diff --git a/gopls/internal/cmd/call_hierarchy.go b/gopls/internal/cmd/call_hierarchy.go new file mode 100644 index 00000000000..82c18d0d28f --- /dev/null +++ b/gopls/internal/cmd/call_hierarchy.go @@ -0,0 +1,143 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + "strings" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// callHierarchy implements the callHierarchy verb for gopls. +type callHierarchy struct { + app *Application +} + +func (c *callHierarchy) Name() string { return "call_hierarchy" } +func (c *callHierarchy) Parent() string { return c.app.Name() } +func (c *callHierarchy) Usage() string { return "" } +func (c *callHierarchy) ShortHelp() string { return "display selected identifier's call hierarchy" } +func (c *callHierarchy) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + + $ # 1-indexed location (:line:column or :#offset) of the target identifier + $ gopls call_hierarchy helper/helper.go:8:6 + $ gopls call_hierarchy helper/helper.go:#53 +`) + printFlagDefaults(f) +} + +func (c *callHierarchy) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("call_hierarchy expects 1 argument (position)") + } + + conn, err := c.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + file, err := conn.openFile(ctx, from.URI()) + if err != nil { + return err + } + + loc, err := file.spanLocation(from) + if err != nil { + return err + } + + p := protocol.CallHierarchyPrepareParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), + } + + callItems, err := conn.PrepareCallHierarchy(ctx, &p) + if err != nil { + return err + } + if len(callItems) == 0 { + return fmt.Errorf("function declaration identifier not found at %v", args[0]) + } + + for _, item := range callItems { + incomingCalls, err := conn.IncomingCalls(ctx, &protocol.CallHierarchyIncomingCallsParams{Item: item}) + if err != nil { + return err + } + for i, call := range incomingCalls { + // From the spec: CallHierarchyIncomingCall.FromRanges is relative to + // the caller denoted by CallHierarchyIncomingCall.from. + printString, err := callItemPrintString(ctx, conn, call.From, call.From.URI, call.FromRanges) + if err != nil { + return err + } + fmt.Printf("caller[%d]: %s\n", i, printString) + } + + printString, err := callItemPrintString(ctx, conn, item, "", nil) + if err != nil { + return err + } + fmt.Printf("identifier: %s\n", printString) + + outgoingCalls, err := conn.OutgoingCalls(ctx, &protocol.CallHierarchyOutgoingCallsParams{Item: item}) + if err != nil { + return err + } + for i, call := range outgoingCalls { + // From the spec: CallHierarchyOutgoingCall.FromRanges is the range + // relative to the caller, e.g the item passed to + printString, err := callItemPrintString(ctx, conn, call.To, item.URI, call.FromRanges) + if err != nil { + return err + } + fmt.Printf("callee[%d]: %s\n", i, printString) + } + } + + return nil +} + +// callItemPrintString returns a protocol.CallHierarchyItem object represented as a string. +// item and call ranges (protocol.Range) are converted to user friendly spans (1-indexed). +func callItemPrintString(ctx context.Context, conn *connection, item protocol.CallHierarchyItem, callsURI protocol.DocumentURI, calls []protocol.Range) (string, error) { + itemFile, err := conn.openFile(ctx, item.URI) + if err != nil { + return "", err + } + itemSpan, err := itemFile.rangeSpan(item.Range) + if err != nil { + return "", err + } + + var callRanges []string + if callsURI != "" { + callsFile, err := conn.openFile(ctx, callsURI) + if err != nil { + return "", err + } + for _, rng := range calls { + call, err := callsFile.rangeSpan(rng) + if err != nil { + return "", err + } + callRange := fmt.Sprintf("%d:%d-%d", call.Start().Line(), call.Start().Column(), call.End().Column()) + callRanges = append(callRanges, callRange) + } + } + + printString := fmt.Sprintf("function %s in %v", item.Name, itemSpan) + if len(calls) > 0 { + printString = fmt.Sprintf("ranges %s in %s from/to %s", strings.Join(callRanges, ", "), callsURI.Path(), printString) + } + return printString, nil +} diff --git a/gopls/internal/lsp/cmd/capabilities_test.go b/gopls/internal/cmd/capabilities_test.go similarity index 92% rename from gopls/internal/lsp/cmd/capabilities_test.go rename to gopls/internal/cmd/capabilities_test.go index e952b0dcbe7..47670572285 100644 --- a/gopls/internal/lsp/cmd/capabilities_test.go +++ b/gopls/internal/cmd/capabilities_test.go @@ -11,10 +11,10 @@ import ( "path/filepath" "testing" - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/server" + "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/internal/testenv" ) @@ -40,17 +40,17 @@ func TestCapabilities(t *testing.T) { } defer os.RemoveAll(tmpDir) - app := New("gopls-test", tmpDir, os.Environ(), nil) + app := New(nil) params := &protocol.ParamInitialize{} - params.RootURI = protocol.URIFromPath(app.wd) + params.RootURI = protocol.URIFromPath(tmpDir) params.Capabilities.Workspace.Configuration = true // Send an initialize request to the server. ctx := context.Background() client := newClient(app, nil) - options := source.DefaultOptions(app.options) - server := lsp.NewServer(cache.NewSession(ctx, cache.New(nil)), client, options) + options := settings.DefaultOptions(app.options) + server := server.New(cache.NewSession(ctx, cache.New(nil)), client, options) result, err := server.Initialize(ctx, params) if err != nil { t.Fatal(err) diff --git a/gopls/internal/cmd/check.go b/gopls/internal/cmd/check.go new file mode 100644 index 00000000000..2d7a7674226 --- /dev/null +++ b/gopls/internal/cmd/check.go @@ -0,0 +1,73 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// check implements the check verb for gopls. +type check struct { + app *Application +} + +func (c *check) Name() string { return "check" } +func (c *check) Parent() string { return c.app.Name() } +func (c *check) Usage() string { return "" } +func (c *check) ShortHelp() string { return "show diagnostic results for the specified file" } +func (c *check) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: show the diagnostic results of this file: + + $ gopls check internal/cmd/check.go +`) + printFlagDefaults(f) +} + +// Run performs the check on the files specified by args and prints the +// results to stdout. +func (c *check) Run(ctx context.Context, args ...string) error { + if len(args) == 0 { + // no files, so no results + return nil + } + checking := map[protocol.DocumentURI]*cmdFile{} + var uris []protocol.DocumentURI + // now we ready to kick things off + conn, err := c.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + for _, arg := range args { + uri := protocol.URIFromPath(arg) + uris = append(uris, uri) + file, err := conn.openFile(ctx, uri) + if err != nil { + return err + } + checking[uri] = file + } + if err := conn.diagnoseFiles(ctx, uris); err != nil { + return err + } + conn.client.filesMu.Lock() + defer conn.client.filesMu.Unlock() + + for _, file := range checking { + for _, d := range file.diagnostics { + spn, err := file.rangeSpan(d.Range) + if err != nil { + return fmt.Errorf("Could not convert position %v for %q", d.Range, d.Message) + } + fmt.Printf("%v: %v\n", spn, d.Message) + } + } + return nil +} diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go new file mode 100644 index 00000000000..31ca0981c87 --- /dev/null +++ b/gopls/internal/cmd/cmd.go @@ -0,0 +1,843 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cmd handles the gopls command line. +// It contains a handler for each of the modes, along with all the flag handling +// and the command line output format. +package cmd + +import ( + "context" + "flag" + "fmt" + "log" + "os" + "path/filepath" + "reflect" + "sort" + "strings" + "sync" + "text/tabwriter" + "time" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/filecache" + "golang.org/x/tools/gopls/internal/lsprpc" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/server" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/browser" + bugpkg "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/constraints" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/jsonrpc2" + "golang.org/x/tools/internal/tool" +) + +// Application is the main application as passed to tool.Main +// It handles the main command line parsing and dispatch to the sub commands. +type Application struct { + // Core application flags + + // Embed the basic profiling flags supported by the tool package + tool.Profile + + // We include the server configuration directly for now, so the flags work + // even without the verb. + // TODO: Remove this when we stop allowing the serve verb by default. + Serve Serve + + // the options configuring function to invoke when building a server + options func(*settings.Options) + + // Support for remote LSP server. + Remote string `flag:"remote" help:"forward all commands to a remote lsp specified by this flag. With no special prefix, this is assumed to be a TCP address. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. If 'auto', or prefixed by 'auto;', the remote address is automatically resolved based on the executing environment."` + + // Verbose enables verbose logging. + Verbose bool `flag:"v,verbose" help:"verbose output"` + + // VeryVerbose enables a higher level of verbosity in logging output. + VeryVerbose bool `flag:"vv,veryverbose" help:"very verbose output"` + + // Control ocagent export of telemetry + OCAgent string `flag:"ocagent" help:"the address of the ocagent (e.g. http://localhost:55678), or off"` + + // PrepareOptions is called to update the options when a new view is built. + // It is primarily to allow the behavior of gopls to be modified by hooks. + PrepareOptions func(*settings.Options) + + // editFlags holds flags that control how file edit operations + // are applied, in particular when the server makes an ApplyEdits + // downcall to the client. Present only for commands that apply edits. + editFlags *EditFlags +} + +// EditFlags defines flags common to {fix,format,imports,rename} +// that control how edits are applied to the client's files. +// +// The type is exported for flag reflection. +// +// The -write, -diff, and -list flags are orthogonal but any +// of them suppresses the default behavior, which is to print +// the edited file contents. +type EditFlags struct { + Write bool `flag:"w,write" help:"write edited content to source files"` + Preserve bool `flag:"preserve" help:"with -write, make copies of original files"` + Diff bool `flag:"d,diff" help:"display diffs instead of edited file content"` + List bool `flag:"l,list" help:"display names of edited files"` +} + +func (app *Application) verbose() bool { + return app.Verbose || app.VeryVerbose +} + +// New returns a new Application ready to run. +func New(options func(*settings.Options)) *Application { + app := &Application{ + options: options, + OCAgent: "off", //TODO: Remove this line to default the exporter to on + + Serve: Serve{ + RemoteListenTimeout: 1 * time.Minute, + }, + } + app.Serve.app = app + return app +} + +// Name implements tool.Application returning the binary name. +func (app *Application) Name() string { return "gopls" } + +// Usage implements tool.Application returning empty extra argument usage. +func (app *Application) Usage() string { return "" } + +// ShortHelp implements tool.Application returning the main binary help. +func (app *Application) ShortHelp() string { + return "" +} + +// DetailedHelp implements tool.Application returning the main binary help. +// This includes the short help for all the sub commands. +func (app *Application) DetailedHelp(f *flag.FlagSet) { + w := tabwriter.NewWriter(f.Output(), 0, 0, 2, ' ', 0) + defer w.Flush() + + fmt.Fprint(w, ` +gopls is a Go language server. + +It is typically used with an editor to provide language features. When no +command is specified, gopls will default to the 'serve' command. The language +features can also be accessed via the gopls command-line interface. + +Usage: + gopls help [] + +Command: +`) + fmt.Fprint(w, "\nMain\t\n") + for _, c := range app.mainCommands() { + fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) + } + fmt.Fprint(w, "\t\nFeatures\t\n") + for _, c := range app.featureCommands() { + fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) + } + if app.verbose() { + fmt.Fprint(w, "\t\nInternal Use Only\t\n") + for _, c := range app.internalCommands() { + fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) + } + } + fmt.Fprint(w, "\nflags:\n") + printFlagDefaults(f) +} + +// this is a slightly modified version of flag.PrintDefaults to give us control +func printFlagDefaults(s *flag.FlagSet) { + var flags [][]*flag.Flag + seen := map[flag.Value]int{} + s.VisitAll(func(f *flag.Flag) { + if i, ok := seen[f.Value]; !ok { + seen[f.Value] = len(flags) + flags = append(flags, []*flag.Flag{f}) + } else { + flags[i] = append(flags[i], f) + } + }) + for _, entry := range flags { + sort.SliceStable(entry, func(i, j int) bool { + return len(entry[i].Name) < len(entry[j].Name) + }) + var b strings.Builder + for i, f := range entry { + switch i { + case 0: + b.WriteString(" -") + default: + b.WriteString(",-") + } + b.WriteString(f.Name) + } + + f := entry[0] + name, usage := flag.UnquoteUsage(f) + if len(name) > 0 { + b.WriteString("=") + b.WriteString(name) + } + // Boolean flags of one ASCII letter are so common we + // treat them specially, putting their usage on the same line. + if b.Len() <= 4 { // space, space, '-', 'x'. + b.WriteString("\t") + } else { + // Four spaces before the tab triggers good alignment + // for both 4- and 8-space tab stops. + b.WriteString("\n \t") + } + b.WriteString(strings.ReplaceAll(usage, "\n", "\n \t")) + if !isZeroValue(f, f.DefValue) { + if reflect.TypeOf(f.Value).Elem().Name() == "stringValue" { + fmt.Fprintf(&b, " (default %q)", f.DefValue) + } else { + fmt.Fprintf(&b, " (default %v)", f.DefValue) + } + } + fmt.Fprint(s.Output(), b.String(), "\n") + } +} + +// isZeroValue is copied from the flags package +func isZeroValue(f *flag.Flag, value string) bool { + // Build a zero value of the flag's Value type, and see if the + // result of calling its String method equals the value passed in. + // This works unless the Value type is itself an interface type. + typ := reflect.TypeOf(f.Value) + var z reflect.Value + if typ.Kind() == reflect.Ptr { + z = reflect.New(typ.Elem()) + } else { + z = reflect.Zero(typ) + } + return value == z.Interface().(flag.Value).String() +} + +// Run takes the args after top level flag processing, and invokes the correct +// sub command as specified by the first argument. +// If no arguments are passed it will invoke the server sub command, as a +// temporary measure for compatibility. +func (app *Application) Run(ctx context.Context, args ...string) error { + // In the category of "things we can do while waiting for the Go command": + // Pre-initialize the filecache, which takes ~50ms to hash the gopls + // executable, and immediately runs a gc. + filecache.Start() + + ctx = debug.WithInstance(ctx, app.OCAgent) + if len(args) == 0 { + s := flag.NewFlagSet(app.Name(), flag.ExitOnError) + return tool.Run(ctx, s, &app.Serve, args) + } + command, args := args[0], args[1:] + for _, c := range app.Commands() { + if c.Name() == command { + s := flag.NewFlagSet(app.Name(), flag.ExitOnError) + return tool.Run(ctx, s, c, args) + } + } + return tool.CommandLineErrorf("Unknown command %v", command) +} + +// Commands returns the set of commands supported by the gopls tool on the +// command line. +// The command is specified by the first non flag argument. +func (app *Application) Commands() []tool.Application { + var commands []tool.Application + commands = append(commands, app.mainCommands()...) + commands = append(commands, app.featureCommands()...) + commands = append(commands, app.internalCommands()...) + return commands +} + +func (app *Application) mainCommands() []tool.Application { + return []tool.Application{ + &app.Serve, + &version{app: app}, + &bug{app: app}, + &help{app: app}, + &apiJSON{app: app}, + &licenses{app: app}, + } +} + +func (app *Application) internalCommands() []tool.Application { + return []tool.Application{ + &vulncheck{app: app}, + } +} + +func (app *Application) featureCommands() []tool.Application { + return []tool.Application{ + &callHierarchy{app: app}, + &check{app: app}, + &codelens{app: app}, + &definition{app: app}, + &execute{app: app}, + &foldingRanges{app: app}, + &format{app: app}, + &highlight{app: app}, + &implementation{app: app}, + &imports{app: app}, + newRemote(app, ""), + newRemote(app, "inspect"), + &links{app: app}, + &prepareRename{app: app}, + &references{app: app}, + &rename{app: app}, + &semtok{app: app}, + &signature{app: app}, + &stats{app: app}, + &suggestedFix{app: app}, + &symbols{app: app}, + + &workspaceSymbol{app: app}, + } +} + +var ( + internalMu sync.Mutex + internalConnections = make(map[string]*connection) +) + +// connect creates and initializes a new in-process gopls session. +// +// If onProgress is set, it is called for each new progress notification. +func (app *Application) connect(ctx context.Context, onProgress func(*protocol.ProgressParams)) (*connection, error) { + switch { + case app.Remote == "": + client := newClient(app, onProgress) + options := settings.DefaultOptions(app.options) + server := server.New(cache.NewSession(ctx, cache.New(nil)), client, options) + conn := newConnection(server, client) + if err := conn.initialize(protocol.WithClient(ctx, client), app.options); err != nil { + return nil, err + } + return conn, nil + + default: + return app.connectRemote(ctx, app.Remote) + } +} + +func (app *Application) connectRemote(ctx context.Context, remote string) (*connection, error) { + conn, err := lsprpc.ConnectToRemote(ctx, remote) + if err != nil { + return nil, err + } + stream := jsonrpc2.NewHeaderStream(conn) + cc := jsonrpc2.NewConn(stream) + server := protocol.ServerDispatcher(cc) + client := newClient(app, nil) + connection := newConnection(server, client) + ctx = protocol.WithClient(ctx, connection.client) + cc.Go(ctx, + protocol.Handlers( + protocol.ClientHandler(client, jsonrpc2.MethodNotFound))) + return connection, connection.initialize(ctx, app.options) +} + +func (c *connection) initialize(ctx context.Context, options func(*settings.Options)) error { + wd, err := os.Getwd() + if err != nil { + return fmt.Errorf("finding workdir: %v", err) + } + params := &protocol.ParamInitialize{} + params.RootURI = protocol.URIFromPath(wd) + params.Capabilities.Workspace.Configuration = true + + // Make sure to respect configured options when sending initialize request. + opts := settings.DefaultOptions(options) + // If you add an additional option here, you must update the map key in connect. + params.Capabilities.TextDocument.Hover = &protocol.HoverClientCapabilities{ + ContentFormat: []protocol.MarkupKind{opts.PreferredContentFormat}, + } + params.Capabilities.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport = opts.HierarchicalDocumentSymbolSupport + params.Capabilities.TextDocument.SemanticTokens = protocol.SemanticTokensClientCapabilities{} + params.Capabilities.TextDocument.SemanticTokens.Formats = []protocol.TokenFormat{"relative"} + params.Capabilities.TextDocument.SemanticTokens.Requests.Range = &protocol.Or_ClientSemanticTokensRequestOptions_range{Value: true} + //params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true + params.Capabilities.TextDocument.SemanticTokens.Requests.Full = &protocol.Or_ClientSemanticTokensRequestOptions_full{Value: true} + params.Capabilities.TextDocument.SemanticTokens.TokenTypes = protocol.SemanticTypes() + params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = protocol.SemanticModifiers() + + // If the subcommand has registered a progress handler, report the progress + // capability. + if c.client.onProgress != nil { + params.Capabilities.Window.WorkDoneProgress = true + } + + params.InitializationOptions = map[string]interface{}{ + "symbolMatcher": string(opts.SymbolMatcher), + } + if _, err := c.Server.Initialize(ctx, params); err != nil { + return err + } + if err := c.Server.Initialized(ctx, &protocol.InitializedParams{}); err != nil { + return err + } + return nil +} + +type connection struct { + protocol.Server + client *cmdClient +} + +// cmdClient defines the protocol.Client interface behavior of the gopls CLI tool. +type cmdClient struct { + app *Application + onProgress func(*protocol.ProgressParams) + + filesMu sync.Mutex // guards files map and each cmdFile.diagnostics + files map[protocol.DocumentURI]*cmdFile +} + +type cmdFile struct { + uri protocol.DocumentURI + mapper *protocol.Mapper + err error + diagnostics []protocol.Diagnostic +} + +func newClient(app *Application, onProgress func(*protocol.ProgressParams)) *cmdClient { + return &cmdClient{ + app: app, + onProgress: onProgress, + files: make(map[protocol.DocumentURI]*cmdFile), + } +} + +func newConnection(server protocol.Server, client *cmdClient) *connection { + return &connection{ + Server: server, + client: client, + } +} + +func (c *cmdClient) CodeLensRefresh(context.Context) error { return nil } + +func (c *cmdClient) FoldingRangeRefresh(context.Context) error { return nil } + +func (c *cmdClient) LogTrace(context.Context, *protocol.LogTraceParams) error { return nil } + +func (c *cmdClient) ShowMessage(ctx context.Context, p *protocol.ShowMessageParams) error { + fmt.Fprintf(os.Stderr, "%s: %s\n", p.Type, p.Message) + return nil +} + +func (c *cmdClient) ShowMessageRequest(ctx context.Context, p *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) { + return nil, nil +} + +func (c *cmdClient) LogMessage(ctx context.Context, p *protocol.LogMessageParams) error { + // This logic causes server logging to be double-prefixed with a timestamp. + // 2023/11/08 10:50:21 Error:2023/11/08 10:50:21 + // TODO(adonovan): print just p.Message, plus a newline if needed? + switch p.Type { + case protocol.Error: + log.Print("Error:", p.Message) + case protocol.Warning: + log.Print("Warning:", p.Message) + case protocol.Info: + if c.app.verbose() { + log.Print("Info:", p.Message) + } + case protocol.Log: + if c.app.verbose() { + log.Print("Log:", p.Message) + } + default: + if c.app.verbose() { + log.Print(p.Message) + } + } + return nil +} + +func (c *cmdClient) Event(ctx context.Context, t *interface{}) error { return nil } + +func (c *cmdClient) RegisterCapability(ctx context.Context, p *protocol.RegistrationParams) error { + return nil +} + +func (c *cmdClient) UnregisterCapability(ctx context.Context, p *protocol.UnregistrationParams) error { + return nil +} + +func (c *cmdClient) WorkspaceFolders(ctx context.Context) ([]protocol.WorkspaceFolder, error) { + return nil, nil +} + +func (c *cmdClient) Configuration(ctx context.Context, p *protocol.ParamConfiguration) ([]interface{}, error) { + results := make([]interface{}, len(p.Items)) + for i, item := range p.Items { + if item.Section != "gopls" { + continue + } + m := map[string]interface{}{ + "analyses": map[string]any{ + "fillreturns": true, + "nonewvars": true, + "noresultvalues": true, + "undeclaredname": true, + }, + } + if c.app.VeryVerbose { + m["verboseOutput"] = true + } + results[i] = m + } + return results, nil +} + +func (c *cmdClient) ApplyEdit(ctx context.Context, p *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) { + if err := c.applyWorkspaceEdit(&p.Edit); err != nil { + return &protocol.ApplyWorkspaceEditResult{FailureReason: err.Error()}, nil + } + return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil +} + +// applyWorkspaceEdit applies a complete WorkspaceEdit to the client's +// files, honoring the preferred edit mode specified by cli.app.editMode. +// (Used by rename and by ApplyEdit downcalls.) +func (cli *cmdClient) applyWorkspaceEdit(edit *protocol.WorkspaceEdit) error { + var orderedURIs []protocol.DocumentURI + edits := map[protocol.DocumentURI][]protocol.TextEdit{} + for _, c := range edit.DocumentChanges { + if c.TextDocumentEdit != nil { + uri := c.TextDocumentEdit.TextDocument.URI + edits[uri] = append(edits[uri], protocol.AsTextEdits(c.TextDocumentEdit.Edits)...) + orderedURIs = append(orderedURIs, uri) + } + if c.RenameFile != nil { + return fmt.Errorf("client does not support file renaming (%s -> %s)", + c.RenameFile.OldURI, + c.RenameFile.NewURI) + } + } + sortSlice(orderedURIs) + for _, uri := range orderedURIs { + f := cli.openFile(uri) + if f.err != nil { + return f.err + } + if err := applyTextEdits(f.mapper, edits[uri], cli.app.editFlags); err != nil { + return err + } + } + return nil +} + +func sortSlice[T constraints.Ordered](slice []T) { + sort.Slice(slice, func(i, j int) bool { return slice[i] < slice[j] }) +} + +// applyTextEdits applies a list of edits to the mapper file content, +// using the preferred edit mode. It is a no-op if there are no edits. +func applyTextEdits(mapper *protocol.Mapper, edits []protocol.TextEdit, flags *EditFlags) error { + if len(edits) == 0 { + return nil + } + newContent, renameEdits, err := protocol.ApplyEdits(mapper, edits) + if err != nil { + return err + } + + filename := mapper.URI.Path() + + if flags.List { + fmt.Println(filename) + } + + if flags.Write { + if flags.Preserve { + if err := os.Rename(filename, filename+".orig"); err != nil { + return err + } + } + if err := os.WriteFile(filename, newContent, 0644); err != nil { + return err + } + } + + if flags.Diff { + unified, err := diff.ToUnified(filename+".orig", filename, string(mapper.Content), renameEdits, diff.DefaultContextLines) + if err != nil { + return err + } + fmt.Print(unified) + } + + // No flags: just print edited file content. + // TODO(adonovan): how is this ever useful with multiple files? + if !(flags.List || flags.Write || flags.Diff) { + os.Stdout.Write(newContent) + } + + return nil +} + +func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishDiagnosticsParams) error { + // Don't worry about diagnostics without versions. + if p.Version == 0 { + return nil + } + + c.filesMu.Lock() + defer c.filesMu.Unlock() + + file := c.getFile(p.URI) + file.diagnostics = append(file.diagnostics, p.Diagnostics...) + + // Perform a crude in-place deduplication. + // TODO(golang/go#60122): replace the gopls.diagnose_files + // command with support for textDocument/diagnostic, + // so that we don't need to do this de-duplication. + type key [6]interface{} + seen := make(map[key]bool) + out := file.diagnostics[:0] + for _, d := range file.diagnostics { + var codeHref string + if desc := d.CodeDescription; desc != nil { + codeHref = desc.Href + } + k := key{d.Range, d.Severity, d.Code, codeHref, d.Source, d.Message} + if !seen[k] { + seen[k] = true + out = append(out, d) + } + } + file.diagnostics = out + + return nil +} + +func (c *cmdClient) Progress(_ context.Context, params *protocol.ProgressParams) error { + if c.onProgress != nil { + c.onProgress(params) + } + return nil +} + +func (c *cmdClient) ShowDocument(ctx context.Context, params *protocol.ShowDocumentParams) (*protocol.ShowDocumentResult, error) { + var success bool + if params.External { + // Open URI in external browser. + success = browser.Open(params.URI) + } else { + // Open file in editor, optionally taking focus and selecting a range. + // (cmdClient has no editor. Should it fork+exec $EDITOR?) + log.Printf("Server requested that client editor open %q (takeFocus=%t, selection=%+v)", + params.URI, params.TakeFocus, params.Selection) + success = true + } + return &protocol.ShowDocumentResult{Success: success}, nil +} + +func (c *cmdClient) WorkDoneProgressCreate(context.Context, *protocol.WorkDoneProgressCreateParams) error { + return nil +} + +func (c *cmdClient) DiagnosticRefresh(context.Context) error { + return nil +} + +func (c *cmdClient) InlayHintRefresh(context.Context) error { + return nil +} + +func (c *cmdClient) SemanticTokensRefresh(context.Context) error { + return nil +} + +func (c *cmdClient) InlineValueRefresh(context.Context) error { + return nil +} + +func (c *cmdClient) getFile(uri protocol.DocumentURI) *cmdFile { + file, found := c.files[uri] + if !found || file.err != nil { + file = &cmdFile{ + uri: uri, + } + c.files[uri] = file + } + if file.mapper == nil { + content, err := os.ReadFile(uri.Path()) + if err != nil { + file.err = fmt.Errorf("getFile: %v: %v", uri, err) + return file + } + file.mapper = protocol.NewMapper(uri, content) + } + return file +} + +func (c *cmdClient) openFile(uri protocol.DocumentURI) *cmdFile { + c.filesMu.Lock() + defer c.filesMu.Unlock() + return c.getFile(uri) +} + +// TODO(adonovan): provide convenience helpers to: +// - map a (URI, protocol.Range) to a MappedRange; +// - parse a command-line argument to a MappedRange. +func (c *connection) openFile(ctx context.Context, uri protocol.DocumentURI) (*cmdFile, error) { + file := c.client.openFile(uri) + if file.err != nil { + return nil, file.err + } + + p := &protocol.DidOpenTextDocumentParams{ + TextDocument: protocol.TextDocumentItem{ + URI: uri, + LanguageID: "go", + Version: 1, + Text: string(file.mapper.Content), + }, + } + if err := c.Server.DidOpen(ctx, p); err != nil { + // TODO(adonovan): is this assignment concurrency safe? + file.err = fmt.Errorf("%v: %v", uri, err) + return nil, file.err + } + return file, nil +} + +func (c *connection) semanticTokens(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { + // use range to avoid limits on full + resp, err := c.Server.SemanticTokensRange(ctx, p) + if err != nil { + return nil, err + } + return resp, nil +} + +func (c *connection) diagnoseFiles(ctx context.Context, files []protocol.DocumentURI) error { + cmd, err := command.NewDiagnoseFilesCommand("Diagnose files", command.DiagnoseFilesArgs{ + Files: files, + }) + if err != nil { + return err + } + _, err = c.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ + Command: cmd.Command, + Arguments: cmd.Arguments, + }) + return err +} + +func (c *connection) terminate(ctx context.Context) { + //TODO: do we need to handle errors on these calls? + c.Shutdown(ctx) + //TODO: right now calling exit terminates the process, we should rethink that + //server.Exit(ctx) +} + +// Implement io.Closer. +func (c *cmdClient) Close() error { + return nil +} + +// -- conversions to span (UTF-8) domain -- + +// locationSpan converts a protocol (UTF-16) Location to a (UTF-8) span. +// Precondition: the URIs of Location and Mapper match. +func (f *cmdFile) locationSpan(loc protocol.Location) (span, error) { + // TODO(adonovan): check that l.URI matches m.URI. + return f.rangeSpan(loc.Range) +} + +// rangeSpan converts a protocol (UTF-16) range to a (UTF-8) span. +// The resulting span has valid Positions and Offsets. +func (f *cmdFile) rangeSpan(r protocol.Range) (span, error) { + start, end, err := f.mapper.RangeOffsets(r) + if err != nil { + return span{}, err + } + return f.offsetSpan(start, end) +} + +// offsetSpan converts a byte-offset interval to a (UTF-8) span. +// The resulting span contains line, column, and offset information. +func (f *cmdFile) offsetSpan(start, end int) (span, error) { + if start > end { + return span{}, fmt.Errorf("start offset (%d) > end (%d)", start, end) + } + startPoint, err := offsetPoint(f.mapper, start) + if err != nil { + return span{}, fmt.Errorf("start: %v", err) + } + endPoint, err := offsetPoint(f.mapper, end) + if err != nil { + return span{}, fmt.Errorf("end: %v", err) + } + return newSpan(f.mapper.URI, startPoint, endPoint), nil +} + +// offsetPoint converts a byte offset to a span (UTF-8) point. +// The resulting point contains line, column, and offset information. +func offsetPoint(m *protocol.Mapper, offset int) (point, error) { + if !(0 <= offset && offset <= len(m.Content)) { + return point{}, fmt.Errorf("invalid offset %d (want 0-%d)", offset, len(m.Content)) + } + line, col8 := m.OffsetLineCol8(offset) + return newPoint(line, col8, offset), nil +} + +// -- conversions from span (UTF-8) domain -- + +// spanLocation converts a (UTF-8) span to a protocol (UTF-16) range. +// Precondition: the URIs of spanLocation and Mapper match. +func (f *cmdFile) spanLocation(s span) (protocol.Location, error) { + rng, err := f.spanRange(s) + if err != nil { + return protocol.Location{}, err + } + return f.mapper.RangeLocation(rng), nil +} + +// spanRange converts a (UTF-8) span to a protocol (UTF-16) range. +// Precondition: the URIs of span and Mapper match. +func (f *cmdFile) spanRange(s span) (protocol.Range, error) { + // Assert that we aren't using the wrong mapper. + // We check only the base name, and case insensitively, + // because we can't assume clean paths, no symbolic links, + // case-sensitive directories. The authoritative answer + // requires querying the file system, and we don't want + // to do that. + if !strings.EqualFold(filepath.Base(string(f.mapper.URI)), filepath.Base(string(s.URI()))) { + return protocol.Range{}, bugpkg.Errorf("mapper is for file %q instead of %q", f.mapper.URI, s.URI()) + } + start, err := pointPosition(f.mapper, s.Start()) + if err != nil { + return protocol.Range{}, fmt.Errorf("start: %w", err) + } + end, err := pointPosition(f.mapper, s.End()) + if err != nil { + return protocol.Range{}, fmt.Errorf("end: %w", err) + } + return protocol.Range{Start: start, End: end}, nil +} + +// pointPosition converts a valid span (UTF-8) point to a protocol (UTF-16) position. +func pointPosition(m *protocol.Mapper, p point) (protocol.Position, error) { + if p.HasPosition() { + return m.LineCol8Position(p.Line(), p.Column()) + } + if p.HasOffset() { + return m.OffsetPosition(p.Offset()) + } + return protocol.Position{}, fmt.Errorf("point has neither offset nor line/column") +} diff --git a/gopls/internal/cmd/codelens.go b/gopls/internal/cmd/codelens.go new file mode 100644 index 00000000000..28986cc6bbb --- /dev/null +++ b/gopls/internal/cmd/codelens.go @@ -0,0 +1,138 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/tool" +) + +// codelens implements the codelens verb for gopls. +type codelens struct { + EditFlags + app *Application + + Exec bool `flag:"exec" help:"execute the first matching code lens"` +} + +func (r *codelens) Name() string { return "codelens" } +func (r *codelens) Parent() string { return r.app.Name() } +func (r *codelens) Usage() string { return "[codelens-flags] file[:line[:col]] [title]" } +func (r *codelens) ShortHelp() string { return "List or execute code lenses for a file" } +func (r *codelens) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +The codelens command lists or executes code lenses for the specified +file, or line within a file. A code lens is a command associated with +a position in the code. + +With an optional title argment, only code lenses matching that +title are considered. + +By default, the codelens command lists the available lenses for the +specified file or line within a file, including the title and +title of the command. With the -exec flag, the first matching command +is executed, and its output is printed to stdout. + +Example: + + $ gopls codelens a_test.go # list code lenses in a file + $ gopls codelens a_test.go:10 # list code lenses on line 10 + $ gopls codelens a_test.go gopls.test # list gopls.test commands + $ gopls codelens -run a_test.go:10 gopls.test # run a specific test + +codelens-flags: +`) + printFlagDefaults(f) +} + +func (r *codelens) Run(ctx context.Context, args ...string) error { + var filename, title string + switch len(args) { + case 0: + return tool.CommandLineErrorf("codelens requires a file name") + case 2: + title = args[1] + fallthrough + case 1: + filename = args[0] + default: + return tool.CommandLineErrorf("codelens expects at most two arguments") + } + + r.app.editFlags = &r.EditFlags // in case a codelens perform an edit + + // Override the default setting for codelenses[Test], which is + // off by default because VS Code has a superior client-side + // implementation. But this client is not VS Code. + // See golang.LensFuncs(). + origOptions := r.app.options + r.app.options = func(opts *settings.Options) { + origOptions(opts) + if opts.Codelenses == nil { + opts.Codelenses = make(map[string]bool) + } + opts.Codelenses["test"] = true + } + + // TODO(adonovan): cleanup: factor progress with stats subcommand. + cmdDone, onProgress := commandProgress() + + conn, err := r.app.connect(ctx, onProgress) + if err != nil { + return err + } + defer conn.terminate(ctx) + + filespan := parseSpan(filename) + file, err := conn.openFile(ctx, filespan.URI()) + if err != nil { + return err + } + loc, err := file.spanLocation(filespan) + if err != nil { + return err + } + + p := protocol.CodeLensParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, + } + lenses, err := conn.CodeLens(ctx, &p) + if err != nil { + return err + } + + for _, lens := range lenses { + sp, err := file.rangeSpan(lens.Range) + if err != nil { + return nil + } + + if title != "" && lens.Command.Title != title { + continue // title was specified but does not match + } + if filespan.HasPosition() && !protocol.Intersect(loc.Range, lens.Range) { + continue // position was specified but does not match + } + + // -exec: run the first matching code lens. + if r.Exec { + _, err := conn.executeCommand(ctx, cmdDone, lens.Command) + return err + } + + // No -exec: list matching code lenses. + fmt.Printf("%v: %q [%s]\n", sp, lens.Command.Title, lens.Command.Command) + } + + if r.Exec { + return fmt.Errorf("no code lens at %s with title %q", filespan, title) + } + return nil +} diff --git a/gopls/internal/cmd/definition.go b/gopls/internal/cmd/definition.go new file mode 100644 index 00000000000..e5e119b8da8 --- /dev/null +++ b/gopls/internal/cmd/definition.go @@ -0,0 +1,137 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "encoding/json" + "flag" + "fmt" + "os" + "strings" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/tool" +) + +// A Definition is the result of a 'definition' query. +type Definition struct { + Span span `json:"span"` // span of the definition + Description string `json:"description"` // description of the denoted object +} + +// These constant is printed in the help, and then used in a test to verify the +// help is still valid. +// They refer to "Set" in "flag.FlagSet" from the DetailedHelp method below. +const ( + exampleLine = 44 + exampleColumn = 47 + exampleOffset = 1270 +) + +// definition implements the definition verb for gopls. +type definition struct { + app *Application + + JSON bool `flag:"json" help:"emit output in JSON format"` + MarkdownSupported bool `flag:"markdown" help:"support markdown in responses"` +} + +func (d *definition) Name() string { return "definition" } +func (d *definition) Parent() string { return d.app.Name() } +func (d *definition) Usage() string { return "[definition-flags] " } +func (d *definition) ShortHelp() string { return "show declaration of selected identifier" } +func (d *definition) DetailedHelp(f *flag.FlagSet) { + fmt.Fprintf(f.Output(), ` +Example: show the definition of the identifier at syntax at offset %[1]v in this file (flag.FlagSet): + + $ gopls definition internal/cmd/definition.go:%[1]v:%[2]v + $ gopls definition internal/cmd/definition.go:#%[3]v + +definition-flags: +`, exampleLine, exampleColumn, exampleOffset) + printFlagDefaults(f) +} + +// Run performs the definition query as specified by args and prints the +// results to stdout. +func (d *definition) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("definition expects 1 argument") + } + // Plaintext makes more sense for the command line. + opts := d.app.options + d.app.options = func(o *settings.Options) { + if opts != nil { + opts(o) + } + o.PreferredContentFormat = protocol.PlainText + if d.MarkdownSupported { + o.PreferredContentFormat = protocol.Markdown + } + } + conn, err := d.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + from := parseSpan(args[0]) + file, err := conn.openFile(ctx, from.URI()) + if err != nil { + return err + } + loc, err := file.spanLocation(from) + if err != nil { + return err + } + p := protocol.DefinitionParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), + } + locs, err := conn.Definition(ctx, &p) + if err != nil { + return fmt.Errorf("%v: %v", from, err) + } + + if len(locs) == 0 { + return fmt.Errorf("%v: not an identifier", from) + } + file, err = conn.openFile(ctx, locs[0].URI) + if err != nil { + return fmt.Errorf("%v: %v", from, err) + } + definition, err := file.locationSpan(locs[0]) + if err != nil { + return fmt.Errorf("%v: %v", from, err) + } + + q := protocol.HoverParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), + } + hover, err := conn.Hover(ctx, &q) + if err != nil { + return fmt.Errorf("%v: %v", from, err) + } + var description string + if hover != nil { + description = strings.TrimSpace(hover.Contents.Value) + } + + result := &Definition{ + Span: definition, + Description: description, + } + if d.JSON { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", "\t") + return enc.Encode(result) + } + fmt.Printf("%v", result.Span) + if len(result.Description) > 0 { + fmt.Printf(": defined here as %s", result.Description) + } + fmt.Printf("\n") + return nil +} diff --git a/gopls/internal/cmd/execute.go b/gopls/internal/cmd/execute.go new file mode 100644 index 00000000000..381c2a7aa95 --- /dev/null +++ b/gopls/internal/cmd/execute.go @@ -0,0 +1,155 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "encoding/json" + "flag" + "fmt" + "log" + "os" + "strings" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/server" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/internal/tool" +) + +// execute implements the LSP ExecuteCommand verb for gopls. +type execute struct { + EditFlags + app *Application +} + +func (e *execute) Name() string { return "execute" } +func (e *execute) Parent() string { return e.app.Name() } +func (e *execute) Usage() string { return "[flags] command argument..." } +func (e *execute) ShortHelp() string { return "Execute a gopls custom LSP command" } +func (e *execute) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +The execute command sends an LSP ExecuteCommand request to gopls, +with a set of optional JSON argument values. +Some commands return a result, also JSON. + +Available commands are documented at: + + https://github.com/golang/tools/blob/master/gopls/doc/commands.md + +This interface is experimental and commands may change or disappear without notice. + +Examples: + + $ gopls execute gopls.add_import '{"ImportPath": "fmt", "URI": "file:///hello.go"}' + $ gopls execute gopls.run_tests '{"URI": "file:///a_test.go", "Tests": ["Test"]}' + $ gopls execute gopls.list_known_packages '{"URI": "file:///hello.go"}' + +execute-flags: +`) + printFlagDefaults(f) +} + +func (e *execute) Run(ctx context.Context, args ...string) error { + if len(args) == 0 { + return tool.CommandLineErrorf("execute requires a command name") + } + cmd := args[0] + if !slices.Contains(command.Commands, command.Command(strings.TrimPrefix(cmd, "gopls."))) { + return tool.CommandLineErrorf("unrecognized command: %s", cmd) + } + + // A command may have multiple arguments, though the only one + // that currently does so is the "legacy" gopls.test, + // so we don't show an example of it. + var jsonArgs []json.RawMessage + for i, arg := range args[1:] { + var dummy any + if err := json.Unmarshal([]byte(arg), &dummy); err != nil { + return fmt.Errorf("argument %d is not valid JSON: %v", i+1, err) + } + jsonArgs = append(jsonArgs, json.RawMessage(arg)) + } + + e.app.editFlags = &e.EditFlags // in case command performs an edit + + cmdDone, onProgress := commandProgress() + conn, err := e.app.connect(ctx, onProgress) + if err != nil { + return err + } + defer conn.terminate(ctx) + + res, err := conn.executeCommand(ctx, cmdDone, &protocol.Command{ + Command: cmd, + Arguments: jsonArgs, + }) + if err != nil { + return err + } + if res != nil { + data, err := json.MarshalIndent(res, "", "\t") + if err != nil { + log.Fatal(err) + } + fmt.Printf("%s\n", data) + } + return nil +} + +// -- shared command helpers -- + +const cmdProgressToken = "cmd-progress" + +// TODO(adonovan): disentangle this from app.connect, and factor with +// conn.executeCommand used by codelens and execute. Seems like +// connection needs a way to register and unregister independent +// handlers, later than at connect time. +func commandProgress() (<-chan bool, func(p *protocol.ProgressParams)) { + cmdDone := make(chan bool, 1) + onProgress := func(p *protocol.ProgressParams) { + switch v := p.Value.(type) { + case *protocol.WorkDoneProgressReport: + // TODO(adonovan): how can we segregate command's stdout and + // stderr so that structure is preserved? + fmt.Fprintln(os.Stderr, v.Message) + + case *protocol.WorkDoneProgressEnd: + if p.Token == cmdProgressToken { + // commandHandler.run sends message = canceled | failed | completed + cmdDone <- v.Message == server.CommandCompleted + } + } + } + return cmdDone, onProgress +} + +func (conn *connection) executeCommand(ctx context.Context, done <-chan bool, cmd *protocol.Command) (any, error) { + res, err := conn.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ + Command: cmd.Command, + Arguments: cmd.Arguments, + WorkDoneProgressParams: protocol.WorkDoneProgressParams{ + WorkDoneToken: cmdProgressToken, + }, + }) + if err != nil { + return nil, err + } + + // Wait for it to finish (by watching for a progress token). + // + // In theory this is only necessary for the two async + // commands (RunGovulncheck and RunTests), but the tests + // fail for Test as well (why?), and there is no cost to + // waiting in all cases. TODO(adonovan): investigate. + if success := <-done; !success { + // TODO(adonovan): suppress this message; + // the command's stderr should suffice. + return nil, fmt.Errorf("command failed") + } + + return res, nil +} diff --git a/gopls/internal/cmd/folding_range.go b/gopls/internal/cmd/folding_range.go new file mode 100644 index 00000000000..13f78c197a5 --- /dev/null +++ b/gopls/internal/cmd/folding_range.go @@ -0,0 +1,71 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// foldingRanges implements the folding_ranges verb for gopls +type foldingRanges struct { + app *Application +} + +func (r *foldingRanges) Name() string { return "folding_ranges" } +func (r *foldingRanges) Parent() string { return r.app.Name() } +func (r *foldingRanges) Usage() string { return "" } +func (r *foldingRanges) ShortHelp() string { return "display selected file's folding ranges" } +func (r *foldingRanges) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + + $ gopls folding_ranges helper/helper.go +`) + printFlagDefaults(f) +} + +func (r *foldingRanges) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("folding_ranges expects 1 argument (file)") + } + + conn, err := r.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + if _, err := conn.openFile(ctx, from.URI()); err != nil { + return err + } + + p := protocol.FoldingRangeParams{ + TextDocument: protocol.TextDocumentIdentifier{ + URI: from.URI(), + }, + } + + ranges, err := conn.FoldingRange(ctx, &p) + if err != nil { + return err + } + + for _, r := range ranges { + fmt.Printf("%v:%v-%v:%v\n", + r.StartLine+1, + r.StartCharacter+1, + r.EndLine+1, + r.EndCharacter+1, + ) + } + + return nil +} diff --git a/gopls/internal/cmd/format.go b/gopls/internal/cmd/format.go new file mode 100644 index 00000000000..75982c9efba --- /dev/null +++ b/gopls/internal/cmd/format.go @@ -0,0 +1,75 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// format implements the format verb for gopls. +type format struct { + EditFlags + app *Application +} + +func (c *format) Name() string { return "format" } +func (c *format) Parent() string { return c.app.Name() } +func (c *format) Usage() string { return "[format-flags] " } +func (c *format) ShortHelp() string { return "format the code according to the go standard" } +func (c *format) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +The arguments supplied may be simple file names, or ranges within files. + +Example: reformat this file: + + $ gopls format -w internal/cmd/check.go + +format-flags: +`) + printFlagDefaults(f) +} + +// Run performs the check on the files specified by args and prints the +// results to stdout. +func (c *format) Run(ctx context.Context, args ...string) error { + if len(args) == 0 { + return nil + } + c.app.editFlags = &c.EditFlags + conn, err := c.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + for _, arg := range args { + spn := parseSpan(arg) + file, err := conn.openFile(ctx, spn.URI()) + if err != nil { + return err + } + loc, err := file.spanLocation(spn) + if err != nil { + return err + } + if loc.Range.Start != loc.Range.End { + return fmt.Errorf("only full file formatting supported") + } + p := protocol.DocumentFormattingParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, + } + edits, err := conn.Formatting(ctx, &p) + if err != nil { + return fmt.Errorf("%v: %v", spn, err) + } + if err := applyTextEdits(file.mapper, edits, c.app.editFlags); err != nil { + return err + } + } + return nil +} diff --git a/gopls/internal/lsp/cmd/help_test.go b/gopls/internal/cmd/help_test.go similarity index 84% rename from gopls/internal/lsp/cmd/help_test.go rename to gopls/internal/cmd/help_test.go index 6d8f10af46f..dd79c2f7e02 100644 --- a/gopls/internal/lsp/cmd/help_test.go +++ b/gopls/internal/cmd/help_test.go @@ -4,6 +4,14 @@ package cmd_test +// This file defines tests to ensure the cmd/usage/*.hlp files match +// the output of the tool. The .hlp files are not actually needed by +// the executable (they are not //go:embed-ded, say), but they make it +// easier to review changes to the gopls command's help logic since +// any effects are manifest as changes to these files. + +//go:generate go test -run Help -update-help-files + import ( "bytes" "context" @@ -13,20 +21,18 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/cmd" + "golang.org/x/tools/gopls/internal/cmd" "golang.org/x/tools/internal/testenv" "golang.org/x/tools/internal/tool" ) -//go:generate go test -run Help -update-help-files - var updateHelpFiles = flag.Bool("update-help-files", false, "Write out the help files instead of checking them") const appName = "gopls" func TestHelpFiles(t *testing.T) { testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code. - app := cmd.New(appName, "", nil, nil) + app := cmd.New(nil) ctx := context.Background() for _, page := range append(app.Commands(), app) { t.Run(page.Name(), func(t *testing.T) { @@ -59,7 +65,7 @@ func TestHelpFiles(t *testing.T) { func TestVerboseHelp(t *testing.T) { testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code. - app := cmd.New(appName, "", nil, nil) + app := cmd.New(nil) ctx := context.Background() var buf bytes.Buffer s := flag.NewFlagSet(appName, flag.ContinueOnError) diff --git a/gopls/internal/cmd/highlight.go b/gopls/internal/cmd/highlight.go new file mode 100644 index 00000000000..9c1488b30be --- /dev/null +++ b/gopls/internal/cmd/highlight.go @@ -0,0 +1,81 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// highlight implements the highlight verb for gopls. +type highlight struct { + app *Application +} + +func (r *highlight) Name() string { return "highlight" } +func (r *highlight) Parent() string { return r.app.Name() } +func (r *highlight) Usage() string { return "" } +func (r *highlight) ShortHelp() string { return "display selected identifier's highlights" } +func (r *highlight) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + + $ # 1-indexed location (:line:column or :#offset) of the target identifier + $ gopls highlight helper/helper.go:8:6 + $ gopls highlight helper/helper.go:#53 +`) + printFlagDefaults(f) +} + +func (r *highlight) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("highlight expects 1 argument (position)") + } + + conn, err := r.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + file, err := conn.openFile(ctx, from.URI()) + if err != nil { + return err + } + + loc, err := file.spanLocation(from) + if err != nil { + return err + } + + p := protocol.DocumentHighlightParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), + } + highlights, err := conn.DocumentHighlight(ctx, &p) + if err != nil { + return err + } + + var results []span + for _, h := range highlights { + s, err := file.rangeSpan(h.Range) + if err != nil { + return err + } + results = append(results, s) + } + // Sort results to make tests deterministic since DocumentHighlight uses a map. + sortSpans(results) + + for _, s := range results { + fmt.Println(s) + } + return nil +} diff --git a/gopls/internal/cmd/implementation.go b/gopls/internal/cmd/implementation.go new file mode 100644 index 00000000000..fcfb63185b4 --- /dev/null +++ b/gopls/internal/cmd/implementation.go @@ -0,0 +1,86 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + "sort" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// implementation implements the implementation verb for gopls +type implementation struct { + app *Application +} + +func (i *implementation) Name() string { return "implementation" } +func (i *implementation) Parent() string { return i.app.Name() } +func (i *implementation) Usage() string { return "" } +func (i *implementation) ShortHelp() string { return "display selected identifier's implementation" } +func (i *implementation) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + + $ # 1-indexed location (:line:column or :#offset) of the target identifier + $ gopls implementation helper/helper.go:8:6 + $ gopls implementation helper/helper.go:#53 +`) + printFlagDefaults(f) +} + +func (i *implementation) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("implementation expects 1 argument (position)") + } + + conn, err := i.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + file, err := conn.openFile(ctx, from.URI()) + if err != nil { + return err + } + + loc, err := file.spanLocation(from) + if err != nil { + return err + } + + p := protocol.ImplementationParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), + } + implementations, err := conn.Implementation(ctx, &p) + if err != nil { + return err + } + + var spans []string + for _, impl := range implementations { + f, err := conn.openFile(ctx, impl.URI) + if err != nil { + return err + } + span, err := f.locationSpan(impl) + if err != nil { + return err + } + spans = append(spans, fmt.Sprint(span)) + } + sort.Strings(spans) + + for _, s := range spans { + fmt.Println(s) + } + + return nil +} diff --git a/gopls/internal/cmd/imports.go b/gopls/internal/cmd/imports.go new file mode 100644 index 00000000000..414ce3473b0 --- /dev/null +++ b/gopls/internal/cmd/imports.go @@ -0,0 +1,80 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// imports implements the import verb for gopls. +type imports struct { + EditFlags + app *Application +} + +func (t *imports) Name() string { return "imports" } +func (t *imports) Parent() string { return t.app.Name() } +func (t *imports) Usage() string { return "[imports-flags] " } +func (t *imports) ShortHelp() string { return "updates import statements" } +func (t *imports) DetailedHelp(f *flag.FlagSet) { + fmt.Fprintf(f.Output(), ` +Example: update imports statements in a file: + + $ gopls imports -w internal/cmd/check.go + +imports-flags: +`) + printFlagDefaults(f) +} + +// Run performs diagnostic checks on the file specified and either; +// - if -w is specified, updates the file in place; +// - if -d is specified, prints out unified diffs of the changes; or +// - otherwise, prints the new versions to stdout. +func (t *imports) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("imports expects 1 argument") + } + t.app.editFlags = &t.EditFlags + conn, err := t.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + uri := from.URI() + file, err := conn.openFile(ctx, uri) + if err != nil { + return err + } + actions, err := conn.CodeAction(ctx, &protocol.CodeActionParams{ + TextDocument: protocol.TextDocumentIdentifier{ + URI: uri, + }, + }) + if err != nil { + return fmt.Errorf("%v: %v", from, err) + } + var edits []protocol.TextEdit + for _, a := range actions { + if a.Title != "Organize Imports" { + continue + } + for _, c := range a.Edit.DocumentChanges { + if c.TextDocumentEdit != nil { + if c.TextDocumentEdit.TextDocument.URI == uri { + edits = append(edits, protocol.AsTextEdits(c.TextDocumentEdit.Edits)...) + } + } + } + } + return applyTextEdits(file.mapper, edits, t.app.editFlags) +} diff --git a/gopls/internal/cmd/info.go b/gopls/internal/cmd/info.go new file mode 100644 index 00000000000..95e15fc18d7 --- /dev/null +++ b/gopls/internal/cmd/info.go @@ -0,0 +1,313 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +// This file defines the help, bug, version, api-json, licenses commands. + +import ( + "bytes" + "context" + "encoding/json" + "flag" + "fmt" + "net/url" + "os" + "sort" + "strings" + + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/filecache" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/browser" + goplsbug "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/tool" +) + +// help implements the help command. +type help struct { + app *Application +} + +func (h *help) Name() string { return "help" } +func (h *help) Parent() string { return h.app.Name() } +func (h *help) Usage() string { return "" } +func (h *help) ShortHelp() string { return "print usage information for subcommands" } +func (h *help) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` + +Examples: +$ gopls help # main gopls help message +$ gopls help remote # help on 'remote' command +$ gopls help remote sessions # help on 'remote sessions' subcommand +`) + printFlagDefaults(f) +} + +// Run prints help information about a subcommand. +func (h *help) Run(ctx context.Context, args ...string) error { + find := func(cmds []tool.Application, name string) tool.Application { + for _, cmd := range cmds { + if cmd.Name() == name { + return cmd + } + } + return nil + } + + // Find the subcommand denoted by args (empty => h.app). + var cmd tool.Application = h.app + for i, arg := range args { + cmd = find(getSubcommands(cmd), arg) + if cmd == nil { + return tool.CommandLineErrorf( + "no such subcommand: %s", strings.Join(args[:i+1], " ")) + } + } + + // 'gopls help cmd subcmd' is equivalent to 'gopls cmd subcmd -h'. + // The flag package prints the usage information (defined by tool.Run) + // when it sees the -h flag. + fs := flag.NewFlagSet(cmd.Name(), flag.ExitOnError) + return tool.Run(ctx, fs, h.app, append(args[:len(args):len(args)], "-h")) +} + +// version implements the version command. +type version struct { + JSON bool `flag:"json" help:"outputs in json format."` + + app *Application +} + +func (v *version) Name() string { return "version" } +func (v *version) Parent() string { return v.app.Name() } +func (v *version) Usage() string { return "" } +func (v *version) ShortHelp() string { return "print the gopls version information" } +func (v *version) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ``) + printFlagDefaults(f) +} + +// Run prints version information to stdout. +func (v *version) Run(ctx context.Context, args ...string) error { + var mode = debug.PlainText + if v.JSON { + mode = debug.JSON + } + + return debug.PrintVersionInfo(ctx, os.Stdout, v.app.verbose(), mode) +} + +// bug implements the bug command. +type bug struct { + app *Application +} + +func (b *bug) Name() string { return "bug" } +func (b *bug) Parent() string { return b.app.Name() } +func (b *bug) Usage() string { return "" } +func (b *bug) ShortHelp() string { return "report a bug in gopls" } +func (b *bug) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ``) + printFlagDefaults(f) +} + +const goplsBugPrefix = "x/tools/gopls: " +const goplsBugHeader = `ATTENTION: Please answer these questions BEFORE submitting your issue. Thanks! + +#### What did you do? +If possible, provide a recipe for reproducing the error. +A complete runnable program is good. +A link on play.golang.org is better. +A failing unit test is the best. + +#### What did you expect to see? + + +#### What did you see instead? + + +` + +// Run collects some basic information and then prepares an issue ready to +// be reported. +func (b *bug) Run(ctx context.Context, args ...string) error { + // This undocumented environment variable allows + // the cmd integration test (and maintainers) to + // trigger a call to bug.Report. + if msg := os.Getenv("TEST_GOPLS_BUG"); msg != "" { + filecache.Start() // register bug handler + goplsbug.Report(msg) + return nil + } + + // Enumerate bug reports, grouped and sorted. + _, reports := filecache.BugReports() + sort.Slice(reports, func(i, j int) bool { + x, y := reports[i], reports[i] + if x.Key != y.Key { + return x.Key < y.Key // ascending key order + } + return y.AtTime.Before(x.AtTime) // most recent first + }) + keyDenom := make(map[string]int) // key is "file:line" + for _, report := range reports { + keyDenom[report.Key]++ + } + + // Privacy: the content of 'public' will be posted to GitHub + // to populate an issue textarea. Even though the user must + // submit the form to share the information with the world, + // merely populating the form causes us to share the + // information with GitHub itself. + // + // For that reason, we cannot write private information to + // public, such as bug reports, which may quote source code. + public := &bytes.Buffer{} + fmt.Fprint(public, goplsBugHeader) + if len(reports) > 0 { + fmt.Fprintf(public, "#### Internal errors\n\n") + fmt.Fprintf(public, "Gopls detected %d internal errors, %d distinct:\n", + len(reports), len(keyDenom)) + for key, denom := range keyDenom { + fmt.Fprintf(public, "- %s (%d)\n", key, denom) + } + fmt.Fprintf(public, "\nPlease copy the full information printed by `gopls bug` here, if you are comfortable sharing it.\n\n") + } + debug.PrintVersionInfo(ctx, public, true, debug.Markdown) + body := public.String() + title := strings.Join(args, " ") + if !strings.HasPrefix(title, goplsBugPrefix) { + title = goplsBugPrefix + title + } + if !browser.Open("https://github.com/golang/go/issues/new?title=" + url.QueryEscape(title) + "&body=" + url.QueryEscape(body)) { + fmt.Print("Please file a new issue at golang.org/issue/new using this template:\n\n") + fmt.Print(body) + } + + // Print bug reports to stdout (not GitHub). + keyNum := make(map[string]int) + for _, report := range reports { + fmt.Printf("-- %v -- \n", report.AtTime) + + // Append seq number (e.g. " (1/2)") for repeated keys. + var seq string + if denom := keyDenom[report.Key]; denom > 1 { + keyNum[report.Key]++ + seq = fmt.Sprintf(" (%d/%d)", keyNum[report.Key], denom) + } + + // Privacy: + // - File and Stack may contain the name of the user that built gopls. + // - Description may contain names of the user's packages/files/symbols. + fmt.Printf("%s:%d: %s%s\n\n", report.File, report.Line, report.Description, seq) + fmt.Printf("%s\n\n", report.Stack) + } + if len(reports) > 0 { + fmt.Printf("Please copy the above information into the GitHub issue, if you are comfortable sharing it.\n") + } + + return nil +} + +type apiJSON struct { + app *Application +} + +func (j *apiJSON) Name() string { return "api-json" } +func (j *apiJSON) Parent() string { return j.app.Name() } +func (j *apiJSON) Usage() string { return "" } +func (j *apiJSON) ShortHelp() string { return "print JSON describing gopls API" } +func (j *apiJSON) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ``) + printFlagDefaults(f) +} + +func (j *apiJSON) Run(ctx context.Context, args ...string) error { + js, err := json.MarshalIndent(settings.GeneratedAPIJSON, "", "\t") + if err != nil { + return err + } + fmt.Fprint(os.Stdout, string(js)) + return nil +} + +type licenses struct { + app *Application +} + +func (l *licenses) Name() string { return "licenses" } +func (l *licenses) Parent() string { return l.app.Name() } +func (l *licenses) Usage() string { return "" } +func (l *licenses) ShortHelp() string { return "print licenses of included software" } +func (l *licenses) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ``) + printFlagDefaults(f) +} + +const licensePreamble = ` +gopls is made available under the following BSD-style license: + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +gopls implements the LSP specification, which is made available under the following license: + +Copyright (c) Microsoft Corporation + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT +OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +gopls also includes software made available under these licenses: +` + +func (l *licenses) Run(ctx context.Context, args ...string) error { + opts := settings.DefaultOptions(l.app.options) + txt := licensePreamble + if opts.LicensesText == "" { + txt += "(development gopls, license information not available)" + } else { + txt += opts.LicensesText + } + fmt.Fprint(os.Stdout, txt) + return nil +} diff --git a/gopls/internal/lsp/cmd/test/integration_test.go b/gopls/internal/cmd/integration_test.go similarity index 84% rename from gopls/internal/lsp/cmd/test/integration_test.go rename to gopls/internal/cmd/integration_test.go index c14f1d9cf70..aabb8c223b9 100644 --- a/gopls/internal/lsp/cmd/test/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -3,7 +3,7 @@ // license that can be found in the LICENSE file. // Package cmdtest contains the test suite for the command line behavior of gopls. -package cmdtest +package cmd_test // This file defines integration tests of each gopls subcommand that // fork+exec the command in a separate process. @@ -32,17 +32,18 @@ import ( "fmt" "math/rand" "os" + "os/exec" "path/filepath" "regexp" "strings" "testing" - exec "golang.org/x/sys/execabs" - "golang.org/x/tools/gopls/internal/bug" + "golang.org/x/tools/gopls/internal/cmd" + "golang.org/x/tools/gopls/internal/debug" "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/cmd" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/version" "golang.org/x/tools/internal/testenv" "golang.org/x/tools/internal/tool" "golang.org/x/tools/txtar" @@ -55,7 +56,7 @@ func TestVersion(t *testing.T) { tree := writeTree(t, "") // There's not much we can robustly assert about the actual version. - want := debug.Version() // e.g. "master" + want := version.Version() // e.g. "master" // basic { @@ -64,6 +65,13 @@ func TestVersion(t *testing.T) { res.checkStdout(want) } + // basic, with version override + { + res := goplsWithEnv(t, tree, []string{"TEST_GOPLS_VERSION=v1.2.3"}, "version") + res.checkExit(true) + res.checkStdout(`v1\.2\.3`) + } + // -json flag { res := gopls(t, tree, "version", "-json") @@ -165,6 +173,58 @@ func h() { } } +// TestCodeLens tests the 'codelens' subcommand (../codelens.go). +func TestCodeLens(t *testing.T) { + t.Parallel() + + tree := writeTree(t, ` +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a +-- a/a_test.go -- +package a_test +import "testing" +func TestPass(t *testing.T) {} +func TestFail(t *testing.T) { t.Fatal("fail") } +`) + // missing position + { + res := gopls(t, tree, "codelens") + res.checkExit(false) + res.checkStderr("requires a file name") + } + // list code lenses + { + res := gopls(t, tree, "codelens", "./a/a_test.go") + res.checkExit(true) + res.checkStdout(`a_test.go:3: "run test" \[gopls.test\]`) + res.checkStdout(`a_test.go:4: "run test" \[gopls.test\]`) + } + // no codelens with title/position + { + res := gopls(t, tree, "codelens", "-exec", "./a/a_test.go:1", "nope") + res.checkExit(false) + res.checkStderr(`no code lens at .* with title "nope"`) + } + // run the passing test + { + res := gopls(t, tree, "codelens", "-exec", "./a/a_test.go:3", "run test") + res.checkExit(true) + res.checkStderr(`PASS: TestPass`) // from go test + res.checkStderr("Info: all tests passed") // from gopls.test + } + // run the failing test + { + res := gopls(t, tree, "codelens", "-exec", "./a/a_test.go:4", "run test") + res.checkExit(false) + res.checkStderr(`FAIL example.com/a`) + res.checkStderr("Info: 1 / 1 tests failed") + } +} + // TestDefinition tests the 'definition' subcommand (../definition.go). func TestDefinition(t *testing.T) { t.Parallel() @@ -216,6 +276,80 @@ func g() { } } +// TestExecute tests the 'execute' subcommand (../execute.go). +func TestExecute(t *testing.T) { + t.Parallel() + + tree := writeTree(t, ` +-- go.mod -- +module example.com +go 1.18 + +-- hello.go -- +package a +func main() {} + +-- hello_test.go -- +package a +import "testing" +func TestHello(t *testing.T) { + t.Fatal("oops") +} +`) + // missing command name + { + res := gopls(t, tree, "execute") + res.checkExit(false) + res.checkStderr("requires a command") + } + // bad command + { + res := gopls(t, tree, "execute", "gopls.foo") + res.checkExit(false) + res.checkStderr("unrecognized command: gopls.foo") + } + // too few arguments + { + res := gopls(t, tree, "execute", "gopls.run_tests") + res.checkExit(false) + res.checkStderr("expected 1 input arguments, got 0") + } + // too many arguments + { + res := gopls(t, tree, "execute", "gopls.run_tests", "null", "null") + res.checkExit(false) + res.checkStderr("expected 1 input arguments, got 2") + } + // argument is not JSON + { + res := gopls(t, tree, "execute", "gopls.run_tests", "hello") + res.checkExit(false) + res.checkStderr("argument 1 is not valid JSON: invalid character 'h'") + } + // add import, show diff + hello := "file://" + filepath.ToSlash(tree) + "/hello.go" + { + res := gopls(t, tree, "execute", "-d", "gopls.add_import", `{"ImportPath": "fmt", "URI": "`+hello+`"}`) + res.checkExit(true) + res.checkStdout(`[+]import "fmt"`) + } + // list known packages (has a result) + { + res := gopls(t, tree, "execute", "gopls.list_known_packages", `{"URI": "`+hello+`"}`) + res.checkExit(true) + res.checkStdout(`"fmt"`) + res.checkStdout(`"encoding/json"`) + } + // run tests + { + helloTest := "file://" + filepath.ToSlash(tree) + "/hello_test.go" + res := gopls(t, tree, "execute", "gopls.run_tests", `{"URI": "`+helloTest+`", "Tests": ["TestHello"]}`) + res.checkExit(false) + res.checkStderr(`hello_test.go:4: oops`) + res.checkStderr(`1 / 1 tests failed`) + } +} + // TestFoldingRanges tests the 'folding_ranges' subcommand (../folding_range.go). func TestFoldingRanges(t *testing.T) { t.Parallel() @@ -768,13 +902,13 @@ package foo if got := len(stats2.BugReports); got > 0 { t.Errorf("Got %d bug reports with -anon, want 0. Reports:%+v", got, stats2.BugReports) } - var stats2AsMap map[string]interface{} + var stats2AsMap map[string]any if err := json.Unmarshal([]byte(res2.stdout), &stats2AsMap); err != nil { t.Fatalf("failed to unmarshal JSON output of stats command: %v", err) } // GOPACKAGESDRIVER is user information, but is ok to print zero value. - if v, ok := stats2AsMap["GOPACKAGESDRIVER"]; !ok || v != "" { - t.Errorf(`Got GOPACKAGESDRIVER=(%q, %v); want ("", true(found))`, v, ok) + if v, ok := stats2AsMap["GOPACKAGESDRIVER"]; ok && v != "" { + t.Errorf(`Got GOPACKAGESDRIVER=(%v, %v); want ("", true(found))`, v, ok) } } @@ -852,7 +986,7 @@ var _ io.Reader = C{} type C struct{} // Read implements io.Reader. -func (C) Read(p []byte) (n int, err error) { +func (c C) Read(p []byte) (n int, err error) { panic("unimplemented") } `[1:] @@ -908,7 +1042,11 @@ func goplsMain() { bug.PanicOnBugs = true } - tool.Main(context.Background(), cmd.New("gopls", "", nil, hooks.Options), os.Args[1:]) + if v := os.Getenv("TEST_GOPLS_VERSION"); v != "" { + version.VersionOverride = v + } + + tool.Main(context.Background(), cmd.New(hooks.Options), os.Args[1:]) } // writeTree extracts a txtar archive into a new directory and returns its path. @@ -916,7 +1054,7 @@ func writeTree(t *testing.T, archive string) string { root := t.TempDir() // This unfortunate step is required because gopls output - // expands symbolic links it its input file names (arguably it + // expands symbolic links in its input file names (arguably it // should not), and on macOS the temp dir is in /var -> private/var. root, err := filepath.EvalSymlinks(root) if err != nil { @@ -951,6 +1089,7 @@ func goplsWithEnv(t *testing.T, dir string, env []string, args ...string) *resul goplsCmd := exec.Command(os.Args[0], args...) goplsCmd.Env = append(os.Environ(), "ENTRYPOINT=goplsMain") + goplsCmd.Env = append(goplsCmd.Env, "GOPACKAGESDRIVER=off") goplsCmd.Env = append(goplsCmd.Env, env...) goplsCmd.Dir = dir goplsCmd.Stdout = new(bytes.Buffer) diff --git a/gopls/internal/lsp/cmd/links.go b/gopls/internal/cmd/links.go similarity index 89% rename from gopls/internal/lsp/cmd/links.go rename to gopls/internal/cmd/links.go index e011664bcdd..0f1d671a503 100644 --- a/gopls/internal/lsp/cmd/links.go +++ b/gopls/internal/cmd/links.go @@ -11,8 +11,7 @@ import ( "fmt" "os" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/tool" ) @@ -31,7 +30,7 @@ func (l *links) DetailedHelp(f *flag.FlagSet) { fmt.Fprintf(f.Output(), ` Example: list links contained within a file: - $ gopls links internal/lsp/cmd/check.go + $ gopls links internal/cmd/check.go links-flags: `) @@ -51,7 +50,7 @@ func (l *links) Run(ctx context.Context, args ...string) error { } defer conn.terminate(ctx) - from := span.Parse(args[0]) + from := parseSpan(args[0]) uri := from.URI() if _, err := conn.openFile(ctx, uri); err != nil { @@ -59,7 +58,7 @@ func (l *links) Run(ctx context.Context, args ...string) error { } results, err := conn.DocumentLink(ctx, &protocol.DocumentLinkParams{ TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), + URI: uri, }, }) if err != nil { diff --git a/gopls/internal/cmd/parsespan.go b/gopls/internal/cmd/parsespan.go new file mode 100644 index 00000000000..556beb9730e --- /dev/null +++ b/gopls/internal/cmd/parsespan.go @@ -0,0 +1,106 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "strconv" + "strings" + "unicode/utf8" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// parseSpan returns the location represented by the input. +// Only file paths are accepted, not URIs. +// The returned span will be normalized, and thus if printed may produce a +// different string. +func parseSpan(input string) span { + uri := protocol.URIFromPath + + // :0:0#0-0:0#0 + valid := input + var hold, offset int + hadCol := false + suf := rstripSuffix(input) + if suf.sep == "#" { + offset = suf.num + suf = rstripSuffix(suf.remains) + } + if suf.sep == ":" { + valid = suf.remains + hold = suf.num + hadCol = true + suf = rstripSuffix(suf.remains) + } + switch { + case suf.sep == ":": + return newSpan(uri(suf.remains), newPoint(suf.num, hold, offset), point{}) + case suf.sep == "-": + // we have a span, fall out of the case to continue + default: + // separator not valid, rewind to either the : or the start + return newSpan(uri(valid), newPoint(hold, 0, offset), point{}) + } + // only the span form can get here + // at this point we still don't know what the numbers we have mean + // if have not yet seen a : then we might have either a line or a column depending + // on whether start has a column or not + // we build an end point and will fix it later if needed + end := newPoint(suf.num, hold, offset) + hold, offset = 0, 0 + suf = rstripSuffix(suf.remains) + if suf.sep == "#" { + offset = suf.num + suf = rstripSuffix(suf.remains) + } + if suf.sep != ":" { + // turns out we don't have a span after all, rewind + return newSpan(uri(valid), end, point{}) + } + valid = suf.remains + hold = suf.num + suf = rstripSuffix(suf.remains) + if suf.sep != ":" { + // line#offset only + return newSpan(uri(valid), newPoint(hold, 0, offset), end) + } + // we have a column, so if end only had one number, it is also the column + if !hadCol { + end = newPoint(suf.num, end.v.Line, end.v.Offset) + } + return newSpan(uri(suf.remains), newPoint(suf.num, hold, offset), end) +} + +type suffix struct { + remains string + sep string + num int +} + +func rstripSuffix(input string) suffix { + if len(input) == 0 { + return suffix{"", "", -1} + } + remains := input + + // Remove optional trailing decimal number. + num := -1 + last := strings.LastIndexFunc(remains, func(r rune) bool { return r < '0' || r > '9' }) + if last >= 0 && last < len(remains)-1 { + number, err := strconv.ParseInt(remains[last+1:], 10, 64) + if err == nil { + num = int(number) + remains = remains[:last+1] + } + } + // now see if we have a trailing separator + r, w := utf8.DecodeLastRuneInString(remains) + // TODO(adonovan): this condition is clearly wrong. Should the third byte be '-'? + if r != ':' && r != '#' && r == '#' { + return suffix{input, "", -1} + } + remains = remains[:len(remains)-w] + return suffix{remains, string(r), num} +} diff --git a/gopls/internal/lsp/cmd/prepare_rename.go b/gopls/internal/cmd/prepare_rename.go similarity index 90% rename from gopls/internal/lsp/cmd/prepare_rename.go rename to gopls/internal/cmd/prepare_rename.go index 31e2029fb29..c7901e6484d 100644 --- a/gopls/internal/lsp/cmd/prepare_rename.go +++ b/gopls/internal/cmd/prepare_rename.go @@ -10,8 +10,7 @@ import ( "flag" "fmt" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/tool" ) @@ -50,12 +49,12 @@ func (r *prepareRename) Run(ctx context.Context, args ...string) error { } defer conn.terminate(ctx) - from := span.Parse(args[0]) + from := parseSpan(args[0]) file, err := conn.openFile(ctx, from.URI()) if err != nil { return err } - loc, err := file.mapper.SpanLocation(from) + loc, err := file.spanLocation(from) if err != nil { return err } @@ -70,7 +69,7 @@ func (r *prepareRename) Run(ctx context.Context, args ...string) error { return ErrInvalidRenamePosition } - s, err := file.mapper.RangeSpan(result.Range) + s, err := file.rangeSpan(result.Range) if err != nil { return err } diff --git a/gopls/internal/cmd/references.go b/gopls/internal/cmd/references.go new file mode 100644 index 00000000000..3c294c71b14 --- /dev/null +++ b/gopls/internal/cmd/references.go @@ -0,0 +1,91 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + "sort" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// references implements the references verb for gopls +type references struct { + IncludeDeclaration bool `flag:"d,declaration" help:"include the declaration of the specified identifier in the results"` + + app *Application +} + +func (r *references) Name() string { return "references" } +func (r *references) Parent() string { return r.app.Name() } +func (r *references) Usage() string { return "[references-flags] " } +func (r *references) ShortHelp() string { return "display selected identifier's references" } +func (r *references) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + + $ # 1-indexed location (:line:column or :#offset) of the target identifier + $ gopls references helper/helper.go:8:6 + $ gopls references helper/helper.go:#53 + +references-flags: +`) + printFlagDefaults(f) +} + +func (r *references) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("references expects 1 argument (position)") + } + + conn, err := r.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + file, err := conn.openFile(ctx, from.URI()) + if err != nil { + return err + } + loc, err := file.spanLocation(from) + if err != nil { + return err + } + p := protocol.ReferenceParams{ + Context: protocol.ReferenceContext{ + IncludeDeclaration: r.IncludeDeclaration, + }, + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), + } + locations, err := conn.References(ctx, &p) + if err != nil { + return err + } + var spans []string + for _, l := range locations { + f, err := conn.openFile(ctx, l.URI) + if err != nil { + return err + } + // convert location to span for user-friendly 1-indexed line + // and column numbers + span, err := f.locationSpan(l) + if err != nil { + return err + } + spans = append(spans, fmt.Sprint(span)) + } + + sort.Strings(spans) + for _, s := range spans { + fmt.Println(s) + } + return nil +} diff --git a/gopls/internal/lsp/cmd/remote.go b/gopls/internal/cmd/remote.go similarity index 97% rename from gopls/internal/lsp/cmd/remote.go rename to gopls/internal/cmd/remote.go index 684981cfff8..8de4365fc9d 100644 --- a/gopls/internal/lsp/cmd/remote.go +++ b/gopls/internal/cmd/remote.go @@ -13,8 +13,8 @@ import ( "log" "os" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/lsprpc" + "golang.org/x/tools/gopls/internal/lsprpc" + "golang.org/x/tools/gopls/internal/protocol/command" ) type remote struct { diff --git a/gopls/internal/cmd/rename.go b/gopls/internal/cmd/rename.go new file mode 100644 index 00000000000..6d831681c19 --- /dev/null +++ b/gopls/internal/cmd/rename.go @@ -0,0 +1,73 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// rename implements the rename verb for gopls. +type rename struct { + EditFlags + app *Application +} + +func (r *rename) Name() string { return "rename" } +func (r *rename) Parent() string { return r.app.Name() } +func (r *rename) Usage() string { return "[rename-flags] " } +func (r *rename) ShortHelp() string { return "rename selected identifier" } +func (r *rename) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + + $ # 1-based location (:line:column or :#position) of the thing to change + $ gopls rename helper/helper.go:8:6 Foo + $ gopls rename helper/helper.go:#53 Foo + +rename-flags: +`) + printFlagDefaults(f) +} + +// Run renames the specified identifier and either; +// - if -w is specified, updates the file(s) in place; +// - if -d is specified, prints out unified diffs of the changes; or +// - otherwise, prints the new versions to stdout. +func (r *rename) Run(ctx context.Context, args ...string) error { + if len(args) != 2 { + return tool.CommandLineErrorf("rename expects 2 arguments (position, new name)") + } + r.app.editFlags = &r.EditFlags + conn, err := r.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + file, err := conn.openFile(ctx, from.URI()) + if err != nil { + return err + } + loc, err := file.spanLocation(from) + if err != nil { + return err + } + p := protocol.RenameParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, + Position: loc.Range.Start, + NewName: args[1], + } + edit, err := conn.Rename(ctx, &p) + if err != nil { + return err + } + return conn.client.applyWorkspaceEdit(edit) +} diff --git a/gopls/internal/lsp/cmd/semantictokens.go b/gopls/internal/cmd/semantictokens.go similarity index 79% rename from gopls/internal/lsp/cmd/semantictokens.go rename to gopls/internal/cmd/semantictokens.go index 1acd83a2ac0..f181f30c420 100644 --- a/gopls/internal/lsp/cmd/semantictokens.go +++ b/gopls/internal/cmd/semantictokens.go @@ -9,16 +9,12 @@ import ( "context" "flag" "fmt" - "go/parser" - "go/token" "log" "os" "unicode/utf8" - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" ) // generate semantic tokens and interpolate them in the file @@ -48,8 +44,6 @@ type semtok struct { app *Application } -var colmap *protocol.Mapper - func (c *semtok) Name() string { return "semtok" } func (c *semtok) Parent() string { return c.app.Name() } func (c *semtok) Usage() string { return "" } @@ -58,7 +52,7 @@ func (c *semtok) DetailedHelp(f *flag.FlagSet) { fmt.Fprint(f.Output(), ` Example: show the semantic tokens for this file: - $ gopls semtok internal/lsp/cmd/semtok.go + $ gopls semtok internal/cmd/semtok.go `) printFlagDefaults(f) } @@ -71,7 +65,7 @@ func (c *semtok) Run(ctx context.Context, args ...string) error { } // perhaps simpler if app had just had a FlagSet member origOptions := c.app.options - c.app.options = func(opts *source.Options) { + c.app.options = func(opts *settings.Options) { origOptions(opts) opts.SemanticTokens = true } @@ -80,20 +74,16 @@ func (c *semtok) Run(ctx context.Context, args ...string) error { return err } defer conn.terminate(ctx) - uri := span.URIFromPath(args[0]) + uri := protocol.URIFromPath(args[0]) file, err := conn.openFile(ctx, uri) if err != nil { return err } - buf, err := os.ReadFile(args[0]) - if err != nil { - return err - } - lines := bytes.Split(buf, []byte{'\n'}) + lines := bytes.Split(file.mapper.Content, []byte{'\n'}) p := &protocol.SemanticTokensRangeParams{ TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), + URI: uri, }, Range: protocol.Range{Start: protocol.Position{Line: 0, Character: 0}, End: protocol.Position{ @@ -105,23 +95,7 @@ func (c *semtok) Run(ctx context.Context, args ...string) error { if err != nil { return err } - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, args[0], buf, 0) - if err != nil { - log.Printf("parsing %s failed %v", args[0], err) - return err - } - tok := fset.File(f.Pos()) - if tok == nil { - // can't happen; just parsed this file - return fmt.Errorf("can't find %s in fset", args[0]) - } - colmap = protocol.NewMapper(uri, buf) - err = decorate(file.uri.Filename(), resp.Data) - if err != nil { - return err - } - return nil + return decorate(file, resp.Data) } type mark struct { @@ -160,16 +134,12 @@ func markLine(m mark, lines [][]byte) { lines[m.line-1] = l } -func decorate(file string, result []uint32) error { - buf, err := os.ReadFile(file) - if err != nil { - return err - } - marks := newMarks(result) +func decorate(file *cmdFile, result []uint32) error { + marks := newMarks(file, result) if len(marks) == 0 { return nil } - lines := bytes.Split(buf, []byte{'\n'}) + lines := bytes.Split(file.mapper.Content, []byte{'\n'}) for i := len(marks) - 1; i >= 0; i-- { mx := marks[i] markLine(mx, lines) @@ -178,7 +148,7 @@ func decorate(file string, result []uint32) error { return nil } -func newMarks(d []uint32) []mark { +func newMarks(file *cmdFile, d []uint32) []mark { ans := []mark{} // the following two loops could be merged, at the cost // of making the logic slightly more complicated to understand @@ -207,7 +177,7 @@ func newMarks(d []uint32) []mark { Character: lspChar[i] + d[5*i+2], }, } - spn, err := colmap.RangeSpan(pr) + spn, err := file.rangeSpan(pr) if err != nil { log.Fatal(err) } @@ -215,8 +185,8 @@ func newMarks(d []uint32) []mark { line: spn.Start().Line(), offset: spn.Start().Column(), len: spn.End().Column() - spn.Start().Column(), - typ: lsp.SemType(int(d[5*i+3])), - mods: lsp.SemMods(int(d[5*i+4])), + typ: protocol.SemType(int(d[5*i+3])), + mods: protocol.SemMods(int(d[5*i+4])), } ans = append(ans, m) } diff --git a/gopls/internal/cmd/serve.go b/gopls/internal/cmd/serve.go new file mode 100644 index 00000000000..3b79ccb6a8c --- /dev/null +++ b/gopls/internal/cmd/serve.go @@ -0,0 +1,150 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "errors" + "flag" + "fmt" + "io" + "log" + "os" + "time" + + "golang.org/x/telemetry/upload" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/lsprpc" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/fakenet" + "golang.org/x/tools/internal/jsonrpc2" + "golang.org/x/tools/internal/tool" +) + +// Serve is a struct that exposes the configurable parts of the LSP server as +// flags, in the right form for tool.Main to consume. +type Serve struct { + Logfile string `flag:"logfile" help:"filename to log to. if value is \"auto\", then logging to a default output file is enabled"` + Mode string `flag:"mode" help:"no effect"` + Port int `flag:"port" help:"port on which to run gopls for debugging purposes"` + Address string `flag:"listen" help:"address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used."` + IdleTimeout time.Duration `flag:"listen.timeout" help:"when used with -listen, shut down the server when there are no connected clients for this duration"` + Trace bool `flag:"rpc.trace" help:"print the full rpc trace in lsp inspector format"` + Debug string `flag:"debug" help:"serve debug information on the supplied address"` + + RemoteListenTimeout time.Duration `flag:"remote.listen.timeout" help:"when used with -remote=auto, the -listen.timeout value used to start the daemon"` + RemoteDebug string `flag:"remote.debug" help:"when used with -remote=auto, the -debug value used to start the daemon"` + RemoteLogfile string `flag:"remote.logfile" help:"when used with -remote=auto, the -logfile value used to start the daemon"` + + app *Application +} + +func (s *Serve) Name() string { return "serve" } +func (s *Serve) Parent() string { return s.app.Name() } +func (s *Serve) Usage() string { return "[server-flags]" } +func (s *Serve) ShortHelp() string { + return "run a server for Go code using the Language Server Protocol" +} +func (s *Serve) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` gopls [flags] [server-flags] + +The server communicates using JSONRPC2 on stdin and stdout, and is intended to be run directly as +a child of an editor process. + +server-flags: +`) + printFlagDefaults(f) +} + +func (s *Serve) remoteArgs(network, address string) []string { + args := []string{"serve", + "-listen", fmt.Sprintf(`%s;%s`, network, address), + } + if s.RemoteDebug != "" { + args = append(args, "-debug", s.RemoteDebug) + } + if s.RemoteListenTimeout != 0 { + args = append(args, "-listen.timeout", s.RemoteListenTimeout.String()) + } + if s.RemoteLogfile != "" { + args = append(args, "-logfile", s.RemoteLogfile) + } + return args +} + +// Run configures a server based on the flags, and then runs it. +// It blocks until the server shuts down. +func (s *Serve) Run(ctx context.Context, args ...string) error { + // TODO(adonovan): eliminate this once telemetry.Start has this effect. + go upload.Run(nil) // start telemetry uploader + + if len(args) > 0 { + return tool.CommandLineErrorf("server does not take arguments, got %v", args) + } + + di := debug.GetInstance(ctx) + isDaemon := s.Address != "" || s.Port != 0 + if di != nil { + closeLog, err := di.SetLogFile(s.Logfile, isDaemon) + if err != nil { + return err + } + defer closeLog() + di.ServerAddress = s.Address + di.Serve(ctx, s.Debug) + } + var ss jsonrpc2.StreamServer + if s.app.Remote != "" { + var err error + ss, err = lsprpc.NewForwarder(s.app.Remote, s.remoteArgs) + if err != nil { + return fmt.Errorf("creating forwarder: %w", err) + } + } else { + ss = lsprpc.NewStreamServer(cache.New(nil), isDaemon, s.app.options) + } + + var network, addr string + if s.Address != "" { + network, addr = lsprpc.ParseAddr(s.Address) + } + if s.Port != 0 { + network = "tcp" + // TODO(adonovan): should gopls ever be listening on network + // sockets, or only local ones? + // + // Ian says this was added in anticipation of + // something related to "VS Code remote" that turned + // out to be unnecessary. So I propose we limit it to + // localhost, if only so that we avoid the macOS + // firewall prompt. + // + // Hana says: "s.Address is for the remote access (LSP) + // and s.Port is for debugging purpose (according to + // the Server type documentation). I am not sure why the + // existing code here is mixing up and overwriting addr. + // For debugging endpoint, I think localhost makes perfect sense." + // + // TODO(adonovan): disentangle Address and Port, + // and use only localhost for the latter. + addr = fmt.Sprintf(":%v", s.Port) + } + if addr != "" { + log.Printf("Gopls daemon: listening on %s network, address %s...", network, addr) + defer log.Printf("Gopls daemon: exiting") + return jsonrpc2.ListenAndServe(ctx, network, addr, ss, s.IdleTimeout) + } + stream := jsonrpc2.NewHeaderStream(fakenet.NewConn("stdio", os.Stdin, os.Stdout)) + if s.Trace && di != nil { + stream = protocol.LoggingStream(stream, di.LogWriter) + } + conn := jsonrpc2.NewConn(stream) + err := ss.ServeStream(ctx, conn) + if errors.Is(err, io.EOF) { + return nil + } + return err +} diff --git a/gopls/internal/lsp/cmd/signature.go b/gopls/internal/cmd/signature.go similarity index 92% rename from gopls/internal/lsp/cmd/signature.go rename to gopls/internal/cmd/signature.go index d8141a43294..cf976a64859 100644 --- a/gopls/internal/lsp/cmd/signature.go +++ b/gopls/internal/cmd/signature.go @@ -9,8 +9,7 @@ import ( "flag" "fmt" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/tool" ) @@ -45,13 +44,13 @@ func (r *signature) Run(ctx context.Context, args ...string) error { } defer conn.terminate(ctx) - from := span.Parse(args[0]) + from := parseSpan(args[0]) file, err := conn.openFile(ctx, from.URI()) if err != nil { return err } - loc, err := file.mapper.SpanLocation(from) + loc, err := file.spanLocation(from) if err != nil { return err } diff --git a/gopls/internal/cmd/span.go b/gopls/internal/cmd/span.go new file mode 100644 index 00000000000..4753d534350 --- /dev/null +++ b/gopls/internal/cmd/span.go @@ -0,0 +1,238 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +// span and point represent positions and ranges in text files. + +import ( + "encoding/json" + "fmt" + "path" + "sort" + "strings" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// A span represents a range of text within a source file. The start +// and end points of a valid span may be hold either its byte offset, +// or its (line, column) pair, or both. Columns are measured in bytes. +// +// Spans are appropriate in user interfaces (e.g. command-line tools) +// and tests where a position is notated without access to the content +// of the file. +// +// Use protocol.Mapper to convert between span and other +// representations, such as go/token (also UTF-8) or the LSP protocol +// (UTF-16). The latter requires access to file contents. +// +// See overview comments at ../protocol/mapper.go. +type span struct { + v _span +} + +// point represents a single point within a file. +// In general this should only be used as part of a span, as on its own it +// does not carry enough information. +type point struct { + v _point +} + +// The span_/point_ types have public fields to support JSON encoding, +// but the span/point types hide these fields by defining methods that +// shadow them. (This is used by a few of the command-line tool +// subcommands, which emit spans and have a -json flag.) +// +// TODO(adonovan): simplify now that it's all internal to cmd. + +type _span struct { + URI protocol.DocumentURI `json:"uri"` + Start _point `json:"start"` + End _point `json:"end"` +} + +type _point struct { + Line int `json:"line"` // 1-based line number + Column int `json:"column"` // 1-based, UTF-8 codes (bytes) + Offset int `json:"offset"` // 0-based byte offset +} + +func newSpan(uri protocol.DocumentURI, start, end point) span { + s := span{v: _span{URI: uri, Start: start.v, End: end.v}} + s.v.clean() + return s +} + +func newPoint(line, col, offset int) point { + p := point{v: _point{Line: line, Column: col, Offset: offset}} + p.v.clean() + return p +} + +// sortSpans sorts spans into a stable but unspecified order. +func sortSpans(spans []span) { + sort.SliceStable(spans, func(i, j int) bool { + return compare(spans[i], spans[j]) < 0 + }) +} + +// compare implements a three-valued ordered comparison of Spans. +func compare(a, b span) int { + // This is a textual comparison. It does not perform path + // cleaning, case folding, resolution of symbolic links, + // testing for existence, or any I/O. + if cmp := strings.Compare(string(a.URI()), string(b.URI())); cmp != 0 { + return cmp + } + if cmp := comparePoint(a.v.Start, b.v.Start); cmp != 0 { + return cmp + } + return comparePoint(a.v.End, b.v.End) +} + +func comparePoint(a, b _point) int { + if !a.hasPosition() { + if a.Offset < b.Offset { + return -1 + } + if a.Offset > b.Offset { + return 1 + } + return 0 + } + if a.Line < b.Line { + return -1 + } + if a.Line > b.Line { + return 1 + } + if a.Column < b.Column { + return -1 + } + if a.Column > b.Column { + return 1 + } + return 0 +} + +func (s span) HasPosition() bool { return s.v.Start.hasPosition() } +func (s span) HasOffset() bool { return s.v.Start.hasOffset() } +func (s span) IsValid() bool { return s.v.Start.isValid() } +func (s span) IsPoint() bool { return s.v.Start == s.v.End } +func (s span) URI() protocol.DocumentURI { return s.v.URI } +func (s span) Start() point { return point{s.v.Start} } +func (s span) End() point { return point{s.v.End} } +func (s *span) MarshalJSON() ([]byte, error) { return json.Marshal(&s.v) } +func (s *span) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &s.v) } + +func (p point) HasPosition() bool { return p.v.hasPosition() } +func (p point) HasOffset() bool { return p.v.hasOffset() } +func (p point) IsValid() bool { return p.v.isValid() } +func (p *point) MarshalJSON() ([]byte, error) { return json.Marshal(&p.v) } +func (p *point) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &p.v) } +func (p point) Line() int { + if !p.v.hasPosition() { + panic(fmt.Errorf("position not set in %v", p.v)) + } + return p.v.Line +} +func (p point) Column() int { + if !p.v.hasPosition() { + panic(fmt.Errorf("position not set in %v", p.v)) + } + return p.v.Column +} +func (p point) Offset() int { + if !p.v.hasOffset() { + panic(fmt.Errorf("offset not set in %v", p.v)) + } + return p.v.Offset +} + +func (p _point) hasPosition() bool { return p.Line > 0 } +func (p _point) hasOffset() bool { return p.Offset >= 0 } +func (p _point) isValid() bool { return p.hasPosition() || p.hasOffset() } +func (p _point) isZero() bool { + return (p.Line == 1 && p.Column == 1) || (!p.hasPosition() && p.Offset == 0) +} + +func (s *_span) clean() { + //this presumes the points are already clean + if !s.End.isValid() || (s.End == _point{}) { + s.End = s.Start + } +} + +func (p *_point) clean() { + if p.Line < 0 { + p.Line = 0 + } + if p.Column <= 0 { + if p.Line > 0 { + p.Column = 1 + } else { + p.Column = 0 + } + } + if p.Offset == 0 && (p.Line > 1 || p.Column > 1) { + p.Offset = -1 + } +} + +// Format implements fmt.Formatter to print the Location in a standard form. +// The format produced is one that can be read back in using parseSpan. +// +// TODO(adonovan): this is esoteric, and the formatting options are +// never used outside of TestFormat. Replace with something simpler +// along the lines of MappedRange.String. +func (s span) Format(f fmt.State, c rune) { + fullForm := f.Flag('+') + preferOffset := f.Flag('#') + // we should always have a uri, simplify if it is file format + //TODO: make sure the end of the uri is unambiguous + uri := string(s.v.URI) + if c == 'f' { + uri = path.Base(uri) + } else if !fullForm { + uri = s.v.URI.Path() + } + fmt.Fprint(f, uri) + if !s.IsValid() || (!fullForm && s.v.Start.isZero() && s.v.End.isZero()) { + return + } + // see which bits of start to write + printOffset := s.HasOffset() && (fullForm || preferOffset || !s.HasPosition()) + printLine := s.HasPosition() && (fullForm || !printOffset) + printColumn := printLine && (fullForm || (s.v.Start.Column > 1 || s.v.End.Column > 1)) + fmt.Fprint(f, ":") + if printLine { + fmt.Fprintf(f, "%d", s.v.Start.Line) + } + if printColumn { + fmt.Fprintf(f, ":%d", s.v.Start.Column) + } + if printOffset { + fmt.Fprintf(f, "#%d", s.v.Start.Offset) + } + // start is written, do we need end? + if s.IsPoint() { + return + } + // we don't print the line if it did not change + printLine = fullForm || (printLine && s.v.End.Line > s.v.Start.Line) + fmt.Fprint(f, "-") + if printLine { + fmt.Fprintf(f, "%d", s.v.End.Line) + } + if printColumn { + if printLine { + fmt.Fprint(f, ":") + } + fmt.Fprintf(f, "%d", s.v.End.Column) + } + if printOffset { + fmt.Fprintf(f, "#%d", s.v.End.Offset) + } +} diff --git a/gopls/internal/cmd/spanformat_test.go b/gopls/internal/cmd/spanformat_test.go new file mode 100644 index 00000000000..659d59ce2b3 --- /dev/null +++ b/gopls/internal/cmd/spanformat_test.go @@ -0,0 +1,55 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "fmt" + "path/filepath" + "strings" + "testing" +) + +func TestSpanFormat(t *testing.T) { + formats := []string{"%v", "%#v", "%+v"} + + // Element 0 is the input, and the elements 0-2 are the expected + // output in [%v %#v %+v] formats. Thus the first must be in + // canonical form (invariant under parseSpan + fmt.Sprint). + // The '#' form displays offsets; the '+' form outputs a URI. + // If len=4, element 0 is a noncanonical input and 1-3 are expected outputs. + for _, test := range [][]string{ + {"C:/file_a", "C:/file_a", "file:///C:/file_a:#0"}, + {"C:/file_b:1:2", "C:/file_b:1:2", "file:///C:/file_b:1:2"}, + {"C:/file_c:1000", "C:/file_c:1000", "file:///C:/file_c:1000:1"}, + {"C:/file_d:14:9", "C:/file_d:14:9", "file:///C:/file_d:14:9"}, + {"C:/file_e:1:2-7", "C:/file_e:1:2-7", "file:///C:/file_e:1:2-1:7"}, + {"C:/file_f:500-502", "C:/file_f:500-502", "file:///C:/file_f:500:1-502:1"}, + {"C:/file_g:3:7-8", "C:/file_g:3:7-8", "file:///C:/file_g:3:7-3:8"}, + {"C:/file_h:3:7-4:8", "C:/file_h:3:7-4:8", "file:///C:/file_h:3:7-4:8"}, + {"C:/file_i:#100", "C:/file_i:#100", "file:///C:/file_i:#100"}, + {"C:/file_j:#26-#28", "C:/file_j:#26-#28", "file:///C:/file_j:#26-0#28"}, // 0#28? + {"C:/file_h:3:7#26-4:8#37", // not canonical + "C:/file_h:3:7-4:8", "C:/file_h:#26-#37", "file:///C:/file_h:3:7#26-4:8#37"}} { + input := test[0] + spn := parseSpan(input) + wants := test[0:3] + if len(test) == 4 { + wants = test[1:4] + } + for i, format := range formats { + want := toPath(wants[i]) + if got := fmt.Sprintf(format, spn); got != want { + t.Errorf("Sprintf(%q, %q) = %q, want %q", format, input, got, want) + } + } + } +} + +func toPath(value string) string { + if strings.HasPrefix(value, "file://") { + return value + } + return filepath.FromSlash(value) +} diff --git a/gopls/internal/lsp/cmd/stats.go b/gopls/internal/cmd/stats.go similarity index 90% rename from gopls/internal/lsp/cmd/stats.go rename to gopls/internal/cmd/stats.go index 1fd3367945b..8da1a1a6ae8 100644 --- a/gopls/internal/lsp/cmd/stats.go +++ b/gopls/internal/cmd/stats.go @@ -19,13 +19,13 @@ import ( "sync" "time" - goplsbug "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/filecache" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/filecache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/server" + "golang.org/x/tools/gopls/internal/settings" + bugpkg "golang.org/x/tools/gopls/internal/util/bug" + versionpkg "golang.org/x/tools/gopls/internal/version" "golang.org/x/tools/internal/event" ) @@ -74,12 +74,12 @@ func (s *stats) Run(ctx context.Context, args ...string) error { GOARCH: runtime.GOARCH, GOPLSCACHE: os.Getenv("GOPLSCACHE"), GoVersion: runtime.Version(), - GoplsVersion: debug.Version(), + GoplsVersion: versionpkg.Version(), GOPACKAGESDRIVER: os.Getenv("GOPACKAGESDRIVER"), } opts := s.app.options - s.app.options = func(o *source.Options) { + s.app.options = func(o *settings.Options) { if opts != nil { opts(o) } @@ -94,7 +94,7 @@ func (s *stats) Run(ctx context.Context, args ...string) error { onProgress := func(p *protocol.ProgressParams) { switch v := p.Value.(type) { case *protocol.WorkDoneProgressBegin: - if v.Title == lsp.DiagnosticWorkTitle(lsp.FromInitialWorkspaceLoad) { + if v.Title == server.DiagnosticWorkTitle(server.FromInitialWorkspaceLoad) { iwlMu.Lock() iwlToken = p.Token iwlMu.Unlock() @@ -147,7 +147,7 @@ func (s *stats) Run(ctx context.Context, args ...string) error { do("Gathering bug reports", func() error { stats.CacheDir, stats.BugReports = filecache.BugReports() if stats.BugReports == nil { - stats.BugReports = []goplsbug.Bug{} // non-nil for JSON + stats.BugReports = []bugpkg.Bug{} // non-nil for JSON } return nil }) @@ -180,7 +180,7 @@ func (s *stats) Run(ctx context.Context, args ...string) error { if _, err := do("Collecting directory info", func() error { var err error - stats.DirStats, err = findDirStats(ctx) + stats.DirStats, err = findDirStats() if err != nil { return err } @@ -232,7 +232,7 @@ type GoplsStats struct { GOPACKAGESDRIVER string InitialWorkspaceLoadDuration string `anon:"ok"` // in time.Duration string form CacheDir string - BugReports []goplsbug.Bug + BugReports []bugpkg.Bug MemStats command.MemStatsResult `anon:"ok"` WorkspaceStats command.WorkspaceStatsResult `anon:"ok"` DirStats dirStats `anon:"ok"` @@ -248,7 +248,7 @@ type dirStats struct { // findDirStats collects information about the current directory and its // subdirectories. -func findDirStats(ctx context.Context) (dirStats, error) { +func findDirStats() (dirStats, error) { var ds dirStats filepath.WalkDir(".", func(path string, d fs.DirEntry, err error) error { if err != nil { diff --git a/gopls/internal/lsp/cmd/subcommands.go b/gopls/internal/cmd/subcommands.go similarity index 100% rename from gopls/internal/lsp/cmd/subcommands.go rename to gopls/internal/cmd/subcommands.go diff --git a/gopls/internal/lsp/cmd/suggested_fix.go b/gopls/internal/cmd/suggested_fix.go similarity index 75% rename from gopls/internal/lsp/cmd/suggested_fix.go rename to gopls/internal/cmd/suggested_fix.go index a3e6093912a..f6a88be91ce 100644 --- a/gopls/internal/lsp/cmd/suggested_fix.go +++ b/gopls/internal/cmd/suggested_fix.go @@ -9,8 +9,8 @@ import ( "flag" "fmt" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/slices" "golang.org/x/tools/internal/tool" ) @@ -35,7 +35,7 @@ func (s *suggestedFix) DetailedHelp(f *flag.FlagSet) { fmt.Fprintf(f.Output(), ` Example: apply fixes to this file, rewriting it: - $ gopls fix -a -w internal/lsp/cmd/check.go + $ gopls fix -a -w internal/cmd/check.go The -a (-all) flag causes all fixes, not just preferred ones, to be applied, but since no fixes are currently preferred, this flag is @@ -59,7 +59,7 @@ enumerate all kinds. Example: apply any "refactor.rewrite" fixes at the specific byte offset within this file: - $ gopls fix -a internal/lsp/cmd/check.go:#43 refactor.rewrite + $ gopls fix -a internal/cmd/check.go:#43 refactor.rewrite fix-flags: `) @@ -81,19 +81,19 @@ func (s *suggestedFix) Run(ctx context.Context, args ...string) error { } defer conn.terminate(ctx) - from := span.Parse(args[0]) + from := parseSpan(args[0]) uri := from.URI() file, err := conn.openFile(ctx, uri) if err != nil { return err } - rng, err := file.mapper.SpanRange(from) + rng, err := file.spanRange(from) if err != nil { return err } // Get diagnostics. - if err := conn.diagnoseFiles(ctx, []span.URI{uri}); err != nil { + if err := conn.diagnoseFiles(ctx, []protocol.DocumentURI{uri}); err != nil { return err } diagnostics := []protocol.Diagnostic{} // LSP wants non-nil slice @@ -111,7 +111,7 @@ func (s *suggestedFix) Run(ctx context.Context, args ...string) error { } p := protocol.CodeActionParams{ TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), + URI: uri, }, Context: protocol.CodeActionContext{ Only: codeActionKinds, @@ -149,42 +149,22 @@ func (s *suggestedFix) Run(ctx context.Context, args ...string) error { continue } - // Partially apply CodeAction.Edit, a WorkspaceEdit. - // (See also conn.Client.applyWorkspaceEdit(a.Edit)). - if !from.HasPosition() { - for _, c := range a.Edit.DocumentChanges { - if c.TextDocumentEdit != nil { - if fileURI(c.TextDocumentEdit.TextDocument.URI) == uri { - edits = append(edits, c.TextDocumentEdit.Edits...) - } - } - } + // If the provided span has a position (not just offsets), + // and the action has diagnostics, the action must have a + // diagnostic with the same range as it. + if from.HasPosition() && len(a.Diagnostics) > 0 && + !slices.ContainsFunc(a.Diagnostics, func(diag protocol.Diagnostic) bool { + return diag.Range.Start == rng.Start + }) { continue } - // The provided span has a position (not just offsets). - // Find the code action that has the same range as it. - for _, diag := range a.Diagnostics { - if diag.Range.Start == rng.Start { - for _, c := range a.Edit.DocumentChanges { - if c.TextDocumentEdit != nil { - if fileURI(c.TextDocumentEdit.TextDocument.URI) == uri { - edits = append(edits, c.TextDocumentEdit.Edits...) - } - } - } - break - } - } - - // If suggested fix is not a diagnostic, still must collect edits. - if len(a.Diagnostics) == 0 { - for _, c := range a.Edit.DocumentChanges { - if c.TextDocumentEdit != nil { - if fileURI(c.TextDocumentEdit.TextDocument.URI) == uri { - edits = append(edits, c.TextDocumentEdit.Edits...) - } - } + // Partially apply CodeAction.Edit, a WorkspaceEdit. + // (See also conn.Client.applyWorkspaceEdit(a.Edit)). + for _, c := range a.Edit.DocumentChanges { + tde := c.TextDocumentEdit + if tde != nil && tde.TextDocument.URI == uri { + edits = append(edits, protocol.AsTextEdits(tde.Edits)...) } } } diff --git a/gopls/internal/cmd/symbols.go b/gopls/internal/cmd/symbols.go new file mode 100644 index 00000000000..249397d320f --- /dev/null +++ b/gopls/internal/cmd/symbols.go @@ -0,0 +1,115 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "encoding/json" + "flag" + "fmt" + "sort" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/tool" +) + +// symbols implements the symbols verb for gopls +type symbols struct { + app *Application +} + +func (r *symbols) Name() string { return "symbols" } +func (r *symbols) Parent() string { return r.app.Name() } +func (r *symbols) Usage() string { return "" } +func (r *symbols) ShortHelp() string { return "display selected file's symbols" } +func (r *symbols) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + $ gopls symbols helper/helper.go +`) + printFlagDefaults(f) +} +func (r *symbols) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("symbols expects 1 argument (position)") + } + + conn, err := r.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + from := parseSpan(args[0]) + p := protocol.DocumentSymbolParams{ + TextDocument: protocol.TextDocumentIdentifier{ + URI: from.URI(), + }, + } + symbols, err := conn.DocumentSymbol(ctx, &p) + if err != nil { + return err + } + for _, s := range symbols { + if m, ok := s.(map[string]interface{}); ok { + s, err = mapToSymbol(m) + if err != nil { + return err + } + } + switch t := s.(type) { + case protocol.DocumentSymbol: + printDocumentSymbol(t) + case protocol.SymbolInformation: + printSymbolInformation(t) + } + } + return nil +} + +func mapToSymbol(m map[string]interface{}) (interface{}, error) { + b, err := json.Marshal(m) + if err != nil { + return nil, err + } + + if _, ok := m["selectionRange"]; ok { + var s protocol.DocumentSymbol + if err := json.Unmarshal(b, &s); err != nil { + return nil, err + } + return s, nil + } + + var s protocol.SymbolInformation + if err := json.Unmarshal(b, &s); err != nil { + return nil, err + } + return s, nil +} + +func printDocumentSymbol(s protocol.DocumentSymbol) { + fmt.Printf("%s %s %s\n", s.Name, s.Kind, positionToString(s.SelectionRange)) + // Sort children for consistency + sort.Slice(s.Children, func(i, j int) bool { + return s.Children[i].Name < s.Children[j].Name + }) + for _, c := range s.Children { + fmt.Printf("\t%s %s %s\n", c.Name, c.Kind, positionToString(c.SelectionRange)) + } +} + +func printSymbolInformation(s protocol.SymbolInformation) { + fmt.Printf("%s %s %s\n", s.Name, s.Kind, positionToString(s.Location.Range)) +} + +func positionToString(r protocol.Range) string { + return fmt.Sprintf("%v:%v-%v:%v", + r.Start.Line+1, + r.Start.Character+1, + r.End.Line+1, + r.End.Character+1, + ) +} diff --git a/gopls/internal/cmd/usage/api-json.hlp b/gopls/internal/cmd/usage/api-json.hlp new file mode 100644 index 00000000000..529cca976ba --- /dev/null +++ b/gopls/internal/cmd/usage/api-json.hlp @@ -0,0 +1,4 @@ +print JSON describing gopls API + +Usage: + gopls [flags] api-json diff --git a/gopls/internal/lsp/cmd/usage/bug.hlp b/gopls/internal/cmd/usage/bug.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/bug.hlp rename to gopls/internal/cmd/usage/bug.hlp diff --git a/gopls/internal/lsp/cmd/usage/call_hierarchy.hlp b/gopls/internal/cmd/usage/call_hierarchy.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/call_hierarchy.hlp rename to gopls/internal/cmd/usage/call_hierarchy.hlp diff --git a/gopls/internal/lsp/cmd/usage/check.hlp b/gopls/internal/cmd/usage/check.hlp similarity index 77% rename from gopls/internal/lsp/cmd/usage/check.hlp rename to gopls/internal/cmd/usage/check.hlp index ba89588d50a..eda1a25a191 100644 --- a/gopls/internal/lsp/cmd/usage/check.hlp +++ b/gopls/internal/cmd/usage/check.hlp @@ -5,4 +5,4 @@ Usage: Example: show the diagnostic results of this file: - $ gopls check internal/lsp/cmd/check.go + $ gopls check internal/cmd/check.go diff --git a/gopls/internal/cmd/usage/codelens.hlp b/gopls/internal/cmd/usage/codelens.hlp new file mode 100644 index 00000000000..5766d7fd189 --- /dev/null +++ b/gopls/internal/cmd/usage/codelens.hlp @@ -0,0 +1,35 @@ +List or execute code lenses for a file + +Usage: + gopls [flags] codelens [codelens-flags] file[:line[:col]] [title] + +The codelens command lists or executes code lenses for the specified +file, or line within a file. A code lens is a command associated with +a position in the code. + +With an optional title argment, only code lenses matching that +title are considered. + +By default, the codelens command lists the available lenses for the +specified file or line within a file, including the title and +title of the command. With the -exec flag, the first matching command +is executed, and its output is printed to stdout. + +Example: + + $ gopls codelens a_test.go # list code lenses in a file + $ gopls codelens a_test.go:10 # list code lenses on line 10 + $ gopls codelens a_test.go gopls.test # list gopls.test commands + $ gopls codelens -run a_test.go:10 gopls.test # run a specific test + +codelens-flags: + -d,-diff + display diffs instead of edited file content + -exec + execute the first matching code lens + -l,-list + display names of edited files + -preserve + with -write, make copies of original files + -w,-write + write edited content to source files diff --git a/gopls/internal/cmd/usage/definition.hlp b/gopls/internal/cmd/usage/definition.hlp new file mode 100644 index 00000000000..80825c3b049 --- /dev/null +++ b/gopls/internal/cmd/usage/definition.hlp @@ -0,0 +1,15 @@ +show declaration of selected identifier + +Usage: + gopls [flags] definition [definition-flags] + +Example: show the definition of the identifier at syntax at offset 44 in this file (flag.FlagSet): + + $ gopls definition internal/cmd/definition.go:44:47 + $ gopls definition internal/cmd/definition.go:#1270 + +definition-flags: + -json + emit output in JSON format + -markdown + support markdown in responses diff --git a/gopls/internal/cmd/usage/execute.hlp b/gopls/internal/cmd/usage/execute.hlp new file mode 100644 index 00000000000..9fb9ece2988 --- /dev/null +++ b/gopls/internal/cmd/usage/execute.hlp @@ -0,0 +1,30 @@ +Execute a gopls custom LSP command + +Usage: + gopls [flags] execute [flags] command argument... + +The execute command sends an LSP ExecuteCommand request to gopls, +with a set of optional JSON argument values. +Some commands return a result, also JSON. + +Available commands are documented at: + + https://github.com/golang/tools/blob/master/gopls/doc/commands.md + +This interface is experimental and commands may change or disappear without notice. + +Examples: + + $ gopls execute gopls.add_import '{"ImportPath": "fmt", "URI": "file:///hello.go"}' + $ gopls execute gopls.run_tests '{"URI": "file:///a_test.go", "Tests": ["Test"]}' + $ gopls execute gopls.list_known_packages '{"URI": "file:///hello.go"}' + +execute-flags: + -d,-diff + display diffs instead of edited file content + -l,-list + display names of edited files + -preserve + with -write, make copies of original files + -w,-write + write edited content to source files diff --git a/gopls/internal/lsp/cmd/usage/fix.hlp b/gopls/internal/cmd/usage/fix.hlp similarity index 90% rename from gopls/internal/lsp/cmd/usage/fix.hlp rename to gopls/internal/cmd/usage/fix.hlp index 39e464da59d..68367b1fa13 100644 --- a/gopls/internal/lsp/cmd/usage/fix.hlp +++ b/gopls/internal/cmd/usage/fix.hlp @@ -5,7 +5,7 @@ Usage: Example: apply fixes to this file, rewriting it: - $ gopls fix -a -w internal/lsp/cmd/check.go + $ gopls fix -a -w internal/cmd/check.go The -a (-all) flag causes all fixes, not just preferred ones, to be applied, but since no fixes are currently preferred, this flag is @@ -29,7 +29,7 @@ enumerate all kinds. Example: apply any "refactor.rewrite" fixes at the specific byte offset within this file: - $ gopls fix -a internal/lsp/cmd/check.go:#43 refactor.rewrite + $ gopls fix -a internal/cmd/check.go:#43 refactor.rewrite fix-flags: -a,-all diff --git a/gopls/internal/lsp/cmd/usage/folding_ranges.hlp b/gopls/internal/cmd/usage/folding_ranges.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/folding_ranges.hlp rename to gopls/internal/cmd/usage/folding_ranges.hlp diff --git a/gopls/internal/lsp/cmd/usage/format.hlp b/gopls/internal/cmd/usage/format.hlp similarity index 90% rename from gopls/internal/lsp/cmd/usage/format.hlp rename to gopls/internal/cmd/usage/format.hlp index fedb5895282..389532babf4 100644 --- a/gopls/internal/lsp/cmd/usage/format.hlp +++ b/gopls/internal/cmd/usage/format.hlp @@ -7,7 +7,7 @@ The arguments supplied may be simple file names, or ranges within files. Example: reformat this file: - $ gopls format -w internal/lsp/cmd/check.go + $ gopls format -w internal/cmd/check.go format-flags: -d,-diff diff --git a/gopls/internal/lsp/cmd/usage/help.hlp b/gopls/internal/cmd/usage/help.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/help.hlp rename to gopls/internal/cmd/usage/help.hlp diff --git a/gopls/internal/lsp/cmd/usage/highlight.hlp b/gopls/internal/cmd/usage/highlight.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/highlight.hlp rename to gopls/internal/cmd/usage/highlight.hlp diff --git a/gopls/internal/lsp/cmd/usage/implementation.hlp b/gopls/internal/cmd/usage/implementation.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/implementation.hlp rename to gopls/internal/cmd/usage/implementation.hlp diff --git a/gopls/internal/lsp/cmd/usage/imports.hlp b/gopls/internal/cmd/usage/imports.hlp similarity index 88% rename from gopls/internal/lsp/cmd/usage/imports.hlp rename to gopls/internal/cmd/usage/imports.hlp index 6e0517296ec..789c832f471 100644 --- a/gopls/internal/lsp/cmd/usage/imports.hlp +++ b/gopls/internal/cmd/usage/imports.hlp @@ -5,7 +5,7 @@ Usage: Example: update imports statements in a file: - $ gopls imports -w internal/lsp/cmd/check.go + $ gopls imports -w internal/cmd/check.go imports-flags: -d,-diff diff --git a/gopls/internal/lsp/cmd/usage/inspect.hlp b/gopls/internal/cmd/usage/inspect.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/inspect.hlp rename to gopls/internal/cmd/usage/inspect.hlp diff --git a/gopls/internal/lsp/cmd/usage/licenses.hlp b/gopls/internal/cmd/usage/licenses.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/licenses.hlp rename to gopls/internal/cmd/usage/licenses.hlp diff --git a/gopls/internal/lsp/cmd/usage/links.hlp b/gopls/internal/cmd/usage/links.hlp similarity index 81% rename from gopls/internal/lsp/cmd/usage/links.hlp rename to gopls/internal/cmd/usage/links.hlp index 7f7612ce792..1550625961d 100644 --- a/gopls/internal/lsp/cmd/usage/links.hlp +++ b/gopls/internal/cmd/usage/links.hlp @@ -5,7 +5,7 @@ Usage: Example: list links contained within a file: - $ gopls links internal/lsp/cmd/check.go + $ gopls links internal/cmd/check.go links-flags: -json diff --git a/gopls/internal/lsp/cmd/usage/prepare_rename.hlp b/gopls/internal/cmd/usage/prepare_rename.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/prepare_rename.hlp rename to gopls/internal/cmd/usage/prepare_rename.hlp diff --git a/gopls/internal/lsp/cmd/usage/references.hlp b/gopls/internal/cmd/usage/references.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/references.hlp rename to gopls/internal/cmd/usage/references.hlp diff --git a/gopls/internal/lsp/cmd/usage/remote.hlp b/gopls/internal/cmd/usage/remote.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/remote.hlp rename to gopls/internal/cmd/usage/remote.hlp diff --git a/gopls/internal/lsp/cmd/usage/rename.hlp b/gopls/internal/cmd/usage/rename.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/rename.hlp rename to gopls/internal/cmd/usage/rename.hlp diff --git a/gopls/internal/lsp/cmd/usage/semtok.hlp b/gopls/internal/cmd/usage/semtok.hlp similarity index 76% rename from gopls/internal/lsp/cmd/usage/semtok.hlp rename to gopls/internal/cmd/usage/semtok.hlp index 459ed596c87..e368212f255 100644 --- a/gopls/internal/lsp/cmd/usage/semtok.hlp +++ b/gopls/internal/cmd/usage/semtok.hlp @@ -5,4 +5,4 @@ Usage: Example: show the semantic tokens for this file: - $ gopls semtok internal/lsp/cmd/semtok.go + $ gopls semtok internal/cmd/semtok.go diff --git a/gopls/internal/lsp/cmd/usage/serve.hlp b/gopls/internal/cmd/usage/serve.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/serve.hlp rename to gopls/internal/cmd/usage/serve.hlp diff --git a/gopls/internal/lsp/cmd/usage/signature.hlp b/gopls/internal/cmd/usage/signature.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/signature.hlp rename to gopls/internal/cmd/usage/signature.hlp diff --git a/gopls/internal/lsp/cmd/usage/stats.hlp b/gopls/internal/cmd/usage/stats.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/stats.hlp rename to gopls/internal/cmd/usage/stats.hlp diff --git a/gopls/internal/lsp/cmd/usage/symbols.hlp b/gopls/internal/cmd/usage/symbols.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/symbols.hlp rename to gopls/internal/cmd/usage/symbols.hlp diff --git a/gopls/internal/lsp/cmd/usage/usage-v.hlp b/gopls/internal/cmd/usage/usage-v.hlp similarity index 95% rename from gopls/internal/lsp/cmd/usage/usage-v.hlp rename to gopls/internal/cmd/usage/usage-v.hlp index 0edb37e9300..46c3f57b02d 100644 --- a/gopls/internal/lsp/cmd/usage/usage-v.hlp +++ b/gopls/internal/cmd/usage/usage-v.hlp @@ -15,13 +15,15 @@ Main version print the gopls version information bug report a bug in gopls help print usage information for subcommands - api-json print json describing gopls API + api-json print JSON describing gopls API licenses print licenses of included software Features call_hierarchy display selected identifier's call hierarchy check show diagnostic results for the specified file + codelens List or execute code lenses for a file definition show declaration of selected identifier + execute Execute a gopls custom LSP command folding_ranges display selected file's folding ranges format format the code according to the go standard highlight display selected identifier's highlights diff --git a/gopls/internal/lsp/cmd/usage/usage.hlp b/gopls/internal/cmd/usage/usage.hlp similarity index 95% rename from gopls/internal/lsp/cmd/usage/usage.hlp rename to gopls/internal/cmd/usage/usage.hlp index c9cc12a943f..e791bbc0d55 100644 --- a/gopls/internal/lsp/cmd/usage/usage.hlp +++ b/gopls/internal/cmd/usage/usage.hlp @@ -15,13 +15,15 @@ Main version print the gopls version information bug report a bug in gopls help print usage information for subcommands - api-json print json describing gopls API + api-json print JSON describing gopls API licenses print licenses of included software Features call_hierarchy display selected identifier's call hierarchy check show diagnostic results for the specified file + codelens List or execute code lenses for a file definition show declaration of selected identifier + execute Execute a gopls custom LSP command folding_ranges display selected file's folding ranges format format the code according to the go standard highlight display selected identifier's highlights diff --git a/gopls/internal/lsp/cmd/usage/version.hlp b/gopls/internal/cmd/usage/version.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/version.hlp rename to gopls/internal/cmd/usage/version.hlp diff --git a/gopls/internal/lsp/cmd/usage/vulncheck.hlp b/gopls/internal/cmd/usage/vulncheck.hlp similarity index 76% rename from gopls/internal/lsp/cmd/usage/vulncheck.hlp rename to gopls/internal/cmd/usage/vulncheck.hlp index d16cb130871..7f2818dd40c 100644 --- a/gopls/internal/lsp/cmd/usage/vulncheck.hlp +++ b/gopls/internal/cmd/usage/vulncheck.hlp @@ -6,7 +6,7 @@ Usage: WARNING: this command is for internal-use only. By default, the command outputs a JSON-encoded - golang.org/x/tools/gopls/internal/lsp/command.VulncheckResult + golang.org/x/tools/gopls/internal/protocol/command.VulncheckResult message. Example: $ gopls vulncheck diff --git a/gopls/internal/lsp/cmd/usage/workspace_symbol.hlp b/gopls/internal/cmd/usage/workspace_symbol.hlp similarity index 100% rename from gopls/internal/lsp/cmd/usage/workspace_symbol.hlp rename to gopls/internal/cmd/usage/workspace_symbol.hlp diff --git a/gopls/internal/lsp/cmd/vulncheck.go b/gopls/internal/cmd/vulncheck.go similarity index 94% rename from gopls/internal/lsp/cmd/vulncheck.go rename to gopls/internal/cmd/vulncheck.go index 855b9eef830..7babf0d14d7 100644 --- a/gopls/internal/lsp/cmd/vulncheck.go +++ b/gopls/internal/cmd/vulncheck.go @@ -30,7 +30,7 @@ func (v *vulncheck) DetailedHelp(f *flag.FlagSet) { WARNING: this command is for internal-use only. By default, the command outputs a JSON-encoded - golang.org/x/tools/gopls/internal/lsp/command.VulncheckResult + golang.org/x/tools/gopls/internal/protocol/command.VulncheckResult message. Example: $ gopls vulncheck diff --git a/gopls/internal/cmd/workspace_symbol.go b/gopls/internal/cmd/workspace_symbol.go new file mode 100644 index 00000000000..9fa7526a24d --- /dev/null +++ b/gopls/internal/cmd/workspace_symbol.go @@ -0,0 +1,89 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "context" + "flag" + "fmt" + "strings" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/tool" +) + +// workspaceSymbol implements the workspace_symbol verb for gopls. +type workspaceSymbol struct { + Matcher string `flag:"matcher" help:"specifies the type of matcher: fuzzy, fastfuzzy, casesensitive, or caseinsensitive.\nThe default is caseinsensitive."` + + app *Application +} + +func (r *workspaceSymbol) Name() string { return "workspace_symbol" } +func (r *workspaceSymbol) Parent() string { return r.app.Name() } +func (r *workspaceSymbol) Usage() string { return "[workspace_symbol-flags] " } +func (r *workspaceSymbol) ShortHelp() string { return "search symbols in workspace" } +func (r *workspaceSymbol) DetailedHelp(f *flag.FlagSet) { + fmt.Fprint(f.Output(), ` +Example: + + $ gopls workspace_symbol -matcher fuzzy 'wsymbols' + +workspace_symbol-flags: +`) + printFlagDefaults(f) +} + +func (r *workspaceSymbol) Run(ctx context.Context, args ...string) error { + if len(args) != 1 { + return tool.CommandLineErrorf("workspace_symbol expects 1 argument") + } + + opts := r.app.options + r.app.options = func(o *settings.Options) { + if opts != nil { + opts(o) + } + switch strings.ToLower(r.Matcher) { + case "fuzzy": + o.SymbolMatcher = settings.SymbolFuzzy + case "casesensitive": + o.SymbolMatcher = settings.SymbolCaseSensitive + case "fastfuzzy": + o.SymbolMatcher = settings.SymbolFastFuzzy + default: + o.SymbolMatcher = settings.SymbolCaseInsensitive + } + } + + conn, err := r.app.connect(ctx, nil) + if err != nil { + return err + } + defer conn.terminate(ctx) + + p := protocol.WorkspaceSymbolParams{ + Query: args[0], + } + + symbols, err := conn.Symbol(ctx, &p) + if err != nil { + return err + } + for _, s := range symbols { + f, err := conn.openFile(ctx, s.Location.URI) + if err != nil { + return err + } + span, err := f.locationSpan(s.Location) + if err != nil { + return err + } + fmt.Printf("%s %s %s\n", span, s.Name, s.Kind) + } + + return nil +} diff --git a/gopls/internal/coverage/coverage.go b/gopls/internal/coverage/coverage.go deleted file mode 100644 index 9a7d219945e..00000000000 --- a/gopls/internal/coverage/coverage.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go.1.16 -// +build go.1.16 - -// Running this program in the tools directory will produce a coverage file /tmp/cover.out -// and a coverage report for all the packages under internal/lsp, accumulated by all the tests -// under gopls. -// -// -o controls where the coverage file is written, defaulting to /tmp/cover.out -// -i coverage-file will generate the report from an existing coverage file -// -v controls verbosity (0: only report coverage, 1: report as each directory is finished, -// -// 2: report on each test, 3: more details, 4: too much) -// -// -t tests only tests packages in the given comma-separated list of directories in gopls. -// -// The names should start with ., as in ./internal/regtest/bench -// -// -run tests. If set, -run tests is passed on to the go test command. -// -// Despite gopls' use of goroutines, the counts are almost deterministic. -package main - -import ( - "bytes" - "encoding/json" - "flag" - "fmt" - "log" - "os" - "os/exec" - "path/filepath" - "sort" - "strings" - "time" - - "golang.org/x/tools/cover" -) - -var ( - proFile = flag.String("i", "", "existing profile file") - outFile = flag.String("o", "/tmp/cover.out", "where to write the coverage file") - verbose = flag.Int("v", 0, "how much detail to print as tests are running") - tests = flag.String("t", "", "list of tests to run") - run = flag.String("run", "", "value of -run to pass to go test") -) - -func main() { - log.SetFlags(log.Lshortfile) - flag.Parse() - - if *proFile != "" { - report(*proFile) - return - } - - checkCwd() - // find the packages under gopls containing tests - tests := listDirs("gopls") - tests = onlyTests(tests) - tests = realTestName(tests) - - // report coverage for packages under internal/lsp - parg := "golang.org/x/tools/gopls/internal/lsp/..." - - accum := []string{} - seen := make(map[string]bool) - now := time.Now() - for _, toRun := range tests { - if excluded(toRun) { - continue - } - x := runTest(toRun, parg) - if *verbose > 0 { - fmt.Printf("finished %s %.1fs\n", toRun, time.Since(now).Seconds()) - } - lines := bytes.Split(x, []byte{'\n'}) - for _, l := range lines { - if len(l) == 0 { - continue - } - if !seen[string(l)] { - // not accumulating counts, so only works for mode:set - seen[string(l)] = true - accum = append(accum, string(l)) - } - } - } - sort.Strings(accum[1:]) - if err := os.WriteFile(*outFile, []byte(strings.Join(accum, "\n")), 0644); err != nil { - log.Print(err) - } - report(*outFile) -} - -type result struct { - Time time.Time - Test string - Action string - Package string - Output string - Elapsed float64 -} - -func runTest(tName, parg string) []byte { - args := []string{"test", "-short", "-coverpkg", parg, "-coverprofile", *outFile, - "-json"} - if *run != "" { - args = append(args, fmt.Sprintf("-run=%s", *run)) - } - args = append(args, tName) - cmd := exec.Command("go", args...) - cmd.Dir = "./gopls" - ans, err := cmd.Output() - if *verbose > 1 { - got := strings.Split(string(ans), "\n") - for _, g := range got { - if g == "" { - continue - } - var m result - if err := json.Unmarshal([]byte(g), &m); err != nil { - log.Printf("%T/%v", err, err) // shouldn't happen - continue - } - maybePrint(m) - } - } - if err != nil { - log.Printf("%s: %q, cmd=%s", tName, ans, cmd.String()) - } - buf, err := os.ReadFile(*outFile) - if err != nil { - log.Fatal(err) - } - return buf -} - -func report(fn string) { - profs, err := cover.ParseProfiles(fn) - if err != nil { - log.Fatal(err) - } - for _, p := range profs { - statements, counts := 0, 0 - for _, x := range p.Blocks { - statements += x.NumStmt - if x.Count != 0 { - counts += x.NumStmt // sic: if any were executed, all were - } - } - pc := 100 * float64(counts) / float64(statements) - fmt.Printf("%3.0f%% %3d/%3d %s\n", pc, counts, statements, p.FileName) - } -} - -var todo []string // tests to run - -func excluded(tname string) bool { - if *tests == "" { // run all tests - return false - } - if todo == nil { - todo = strings.Split(*tests, ",") - } - for _, nm := range todo { - if tname == nm { // run this test - return false - } - } - // not in list, skip it - return true -} - -// should m.Package be printed sometime? -func maybePrint(m result) { - switch m.Action { - case "pass", "fail", "skip": - fmt.Printf("%s %s %.3f\n", m.Action, m.Test, m.Elapsed) - case "run": - if *verbose > 2 { - fmt.Printf("%s %s %.3f\n", m.Action, m.Test, m.Elapsed) - } - case "output": - if *verbose > 3 { - fmt.Printf("%s %s %q %.3f\n", m.Action, m.Test, m.Output, m.Elapsed) - } - case "pause", "cont": - if *verbose > 2 { - fmt.Printf("%s %s %.3f\n", m.Action, m.Test, m.Elapsed) - } - default: - fmt.Printf("%#v\n", m) - log.Fatalf("unknown action %s\n", m.Action) - } -} - -// return only the directories that contain tests -func onlyTests(s []string) []string { - ans := []string{} -outer: - for _, d := range s { - files, err := os.ReadDir(d) - if err != nil { - log.Fatalf("%s: %v", d, err) - } - for _, de := range files { - if strings.Contains(de.Name(), "_test.go") { - ans = append(ans, d) - continue outer - } - } - } - return ans -} - -// replace the prefix gopls/ with ./ as the tests are run in the gopls directory -func realTestName(p []string) []string { - ans := []string{} - for _, x := range p { - x = x[len("gopls/"):] - ans = append(ans, "./"+x) - } - return ans -} - -// make sure we start in a tools directory -func checkCwd() { - dir, err := os.Getwd() - if err != nil { - log.Fatal(err) - } - // we expect to be at the root of golang.org/x/tools - cmd := exec.Command("go", "list", "-m", "-f", "{{.Dir}}", "golang.org/x/tools") - buf, err := cmd.Output() - buf = bytes.Trim(buf, "\n \t") // remove \n at end - if err != nil { - log.Fatal(err) - } - if string(buf) != dir { - log.Fatalf("wrong directory: in %q, should be in %q", dir, string(buf)) - } - // and we expect gopls and internal/lsp as subdirectories - _, err = os.Stat("gopls") - if err != nil { - log.Fatalf("expected a gopls directory, %v", err) - } -} - -func listDirs(dir string) []string { - ans := []string{} - f := func(path string, dirEntry os.DirEntry, err error) error { - if strings.HasSuffix(path, "/testdata") || strings.HasSuffix(path, "/typescript") { - return filepath.SkipDir - } - if dirEntry.IsDir() { - ans = append(ans, path) - } - return nil - } - filepath.WalkDir(dir, f) - return ans -} diff --git a/gopls/internal/debug/info.go b/gopls/internal/debug/info.go new file mode 100644 index 00000000000..b2824d86f38 --- /dev/null +++ b/gopls/internal/debug/info.go @@ -0,0 +1,139 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package debug exports debug information for gopls. +package debug + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + "runtime" + "runtime/debug" + "strings" + + "golang.org/x/tools/gopls/internal/version" +) + +type PrintMode int + +const ( + PlainText = PrintMode(iota) + Markdown + HTML + JSON +) + +// ServerVersion is the format used by gopls to report its version to the +// client. This format is structured so that the client can parse it easily. +type ServerVersion struct { + *debug.BuildInfo + Version string +} + +// VersionInfo returns the build info for the gopls process. If it was not +// built in module mode, we return a GOPATH-specific message with the +// hardcoded version. +func VersionInfo() *ServerVersion { + if info, ok := debug.ReadBuildInfo(); ok { + return &ServerVersion{ + Version: version.Version(), + BuildInfo: info, + } + } + return &ServerVersion{ + Version: version.Version(), + BuildInfo: &debug.BuildInfo{ + Path: "gopls, built in GOPATH mode", + GoVersion: runtime.Version(), + }, + } +} + +// PrintServerInfo writes HTML debug info to w for the Instance. +func (i *Instance) PrintServerInfo(ctx context.Context, w io.Writer) { + workDir, _ := os.Getwd() + section(w, HTML, "Server Instance", func() { + fmt.Fprintf(w, "Start time: %v\n", i.StartTime) + fmt.Fprintf(w, "LogFile: %s\n", i.Logfile) + fmt.Fprintf(w, "pid: %d\n", os.Getpid()) + fmt.Fprintf(w, "Working directory: %s\n", workDir) + fmt.Fprintf(w, "Address: %s\n", i.ServerAddress) + fmt.Fprintf(w, "Debug address: %s\n", i.DebugAddress()) + }) + PrintVersionInfo(ctx, w, true, HTML) + section(w, HTML, "Command Line", func() { + fmt.Fprintf(w, "cmdline") + }) +} + +// PrintVersionInfo writes version information to w, using the output format +// specified by mode. verbose controls whether additional information is +// written, including section headers. +func PrintVersionInfo(_ context.Context, w io.Writer, verbose bool, mode PrintMode) error { + info := VersionInfo() + if mode == JSON { + return printVersionInfoJSON(w, info) + } + + if !verbose { + printBuildInfo(w, info, false, mode) + return nil + } + section(w, mode, "Build info", func() { + printBuildInfo(w, info, true, mode) + }) + return nil +} + +func printVersionInfoJSON(w io.Writer, info *ServerVersion) error { + js, err := json.MarshalIndent(info, "", "\t") + if err != nil { + return err + } + _, err = fmt.Fprint(w, string(js)) + return err +} + +func section(w io.Writer, mode PrintMode, title string, body func()) { + switch mode { + case PlainText: + fmt.Fprintln(w, title) + fmt.Fprintln(w, strings.Repeat("-", len(title))) + body() + case Markdown: + fmt.Fprintf(w, "#### %s\n\n```\n", title) + body() + fmt.Fprintf(w, "```\n") + case HTML: + fmt.Fprintf(w, "

%s

\n
\n", title)
+		body()
+		fmt.Fprint(w, "
\n") + } +} + +func printBuildInfo(w io.Writer, info *ServerVersion, verbose bool, mode PrintMode) { + fmt.Fprintf(w, "%v %v\n", info.Path, version.Version()) + if !verbose { + return + } + printModuleInfo(w, info.Main, mode) + for _, dep := range info.Deps { + printModuleInfo(w, *dep, mode) + } + fmt.Fprintf(w, "go: %v\n", info.GoVersion) +} + +func printModuleInfo(w io.Writer, m debug.Module, _ PrintMode) { + fmt.Fprintf(w, " %s@%s", m.Path, m.Version) + if m.Sum != "" { + fmt.Fprintf(w, " %s", m.Sum) + } + if m.Replace != nil { + fmt.Fprintf(w, " => %v", m.Replace.Path) + } + fmt.Fprintf(w, "\n") +} diff --git a/gopls/internal/lsp/debug/info_test.go b/gopls/internal/debug/info_test.go similarity index 88% rename from gopls/internal/lsp/debug/info_test.go rename to gopls/internal/debug/info_test.go index 3bc9290c157..7f24b696682 100644 --- a/gopls/internal/lsp/debug/info_test.go +++ b/gopls/internal/debug/info_test.go @@ -11,6 +11,8 @@ import ( "encoding/json" "runtime" "testing" + + "golang.org/x/tools/gopls/internal/version" ) func TestPrintVersionInfoJSON(t *testing.T) { @@ -27,7 +29,7 @@ func TestPrintVersionInfoJSON(t *testing.T) { if g, w := got.GoVersion, runtime.Version(); g != w { t.Errorf("go version = %v, want %v", g, w) } - if g, w := got.Version, Version(); g != w { + if g, w := got.Version, version.Version(); g != w { t.Errorf("gopls version = %v, want %v", g, w) } // Other fields of BuildInfo may not be available during test. @@ -41,7 +43,7 @@ func TestPrintVersionInfoPlainText(t *testing.T) { res := buf.Bytes() // Other fields of BuildInfo may not be available during test. - wantGoplsVersion, wantGoVersion := Version(), runtime.Version() + wantGoplsVersion, wantGoVersion := version.Version(), runtime.Version() if !bytes.Contains(res, []byte(wantGoplsVersion)) || !bytes.Contains(res, []byte(wantGoVersion)) { t.Errorf("plaintext output = %q,\nwant (version: %v, go: %v)", res, wantGoplsVersion, wantGoVersion) } diff --git a/gopls/internal/lsp/debug/log/log.go b/gopls/internal/debug/log/log.go similarity index 100% rename from gopls/internal/lsp/debug/log/log.go rename to gopls/internal/debug/log/log.go diff --git a/gopls/internal/lsp/debug/metrics.go b/gopls/internal/debug/metrics.go similarity index 100% rename from gopls/internal/lsp/debug/metrics.go rename to gopls/internal/debug/metrics.go diff --git a/gopls/internal/lsp/debug/rpc.go b/gopls/internal/debug/rpc.go similarity index 93% rename from gopls/internal/lsp/debug/rpc.go rename to gopls/internal/debug/rpc.go index 5610021479c..0fee0f4a435 100644 --- a/gopls/internal/lsp/debug/rpc.go +++ b/gopls/internal/debug/rpc.go @@ -84,19 +84,19 @@ func (r *Rpcs) ProcessEvent(ctx context.Context, ev core.Event, lm label.Map) co defer r.mu.Unlock() switch { case event.IsStart(ev): - if _, stats := r.getRPCSpan(ctx, ev); stats != nil { + if _, stats := r.getRPCSpan(ctx); stats != nil { stats.Started++ } case event.IsEnd(ev): - span, stats := r.getRPCSpan(ctx, ev) + span, stats := r.getRPCSpan(ctx) if stats != nil { - endRPC(ctx, ev, span, stats) + endRPC(span, stats) } case event.IsMetric(ev): sent := byteUnits(tag.SentBytes.Get(lm)) rec := byteUnits(tag.ReceivedBytes.Get(lm)) if sent != 0 || rec != 0 { - if _, stats := r.getRPCSpan(ctx, ev); stats != nil { + if _, stats := r.getRPCSpan(ctx); stats != nil { stats.Sent += sent stats.Received += rec } @@ -105,7 +105,7 @@ func (r *Rpcs) ProcessEvent(ctx context.Context, ev core.Event, lm label.Map) co return ctx } -func endRPC(ctx context.Context, ev core.Event, span *export.Span, stats *rpcStats) { +func endRPC(span *export.Span, stats *rpcStats) { // update the basic counts stats.Completed++ @@ -152,7 +152,7 @@ func endRPC(ctx context.Context, ev core.Event, span *export.Span, stats *rpcSta } } -func (r *Rpcs) getRPCSpan(ctx context.Context, ev core.Event) (*export.Span, *rpcStats) { +func (r *Rpcs) getRPCSpan(ctx context.Context) (*export.Span, *rpcStats) { // get the span span := export.GetSpan(ctx) if span == nil { diff --git a/gopls/internal/debug/serve.go b/gopls/internal/debug/serve.go new file mode 100644 index 00000000000..62e416829fe --- /dev/null +++ b/gopls/internal/debug/serve.go @@ -0,0 +1,850 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package debug + +import ( + "bytes" + "context" + "errors" + "fmt" + "html/template" + "io" + stdlog "log" + "net" + "net/http" + "net/http/pprof" + "os" + "path" + "path/filepath" + "runtime" + "strconv" + "strings" + "sync" + "time" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug/log" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/core" + "golang.org/x/tools/internal/event/export" + "golang.org/x/tools/internal/event/export/metric" + "golang.org/x/tools/internal/event/export/ocagent" + "golang.org/x/tools/internal/event/export/prometheus" + "golang.org/x/tools/internal/event/keys" + "golang.org/x/tools/internal/event/label" + "golang.org/x/tools/internal/event/tag" +) + +type contextKeyType int + +const ( + instanceKey contextKeyType = iota + traceKey +) + +// An Instance holds all debug information associated with a gopls instance. +type Instance struct { + Logfile string + StartTime time.Time + ServerAddress string + OCAgentConfig string + + LogWriter io.Writer + + exporter event.Exporter + + ocagent *ocagent.Exporter + prometheus *prometheus.Exporter + rpcs *Rpcs + traces *traces + State *State + + serveMu sync.Mutex + debugAddress string + listenedDebugAddress string +} + +// State holds debugging information related to the server state. +type State struct { + mu sync.Mutex + clients []*Client + servers []*Server +} + +func (st *State) Bugs() []bug.Bug { + return bug.List() +} + +// Caches returns the set of Cache objects currently being served. +func (st *State) Caches() []*cache.Cache { + var caches []*cache.Cache + seen := make(map[string]struct{}) + for _, client := range st.Clients() { + cache := client.Session.Cache() + if _, found := seen[cache.ID()]; found { + continue + } + seen[cache.ID()] = struct{}{} + caches = append(caches, cache) + } + return caches +} + +// Cache returns the Cache that matches the supplied id. +func (st *State) Cache(id string) *cache.Cache { + for _, c := range st.Caches() { + if c.ID() == id { + return c + } + } + return nil +} + +// Analysis returns the global Analysis template value. +func (st *State) Analysis() (_ analysisTmpl) { return } + +type analysisTmpl struct{} + +func (analysisTmpl) AnalyzerRunTimes() []cache.LabelDuration { return cache.AnalyzerRunTimes() } + +// Sessions returns the set of Session objects currently being served. +func (st *State) Sessions() []*cache.Session { + var sessions []*cache.Session + for _, client := range st.Clients() { + sessions = append(sessions, client.Session) + } + return sessions +} + +// Session returns the Session that matches the supplied id. +func (st *State) Session(id string) *cache.Session { + for _, s := range st.Sessions() { + if s.ID() == id { + return s + } + } + return nil +} + +// Views returns the set of View objects currently being served. +func (st *State) Views() []*cache.View { + var views []*cache.View + for _, s := range st.Sessions() { + views = append(views, s.Views()...) + } + return views +} + +// View returns the View that matches the supplied id. +func (st *State) View(id string) *cache.View { + for _, v := range st.Views() { + if v.ID() == id { + return v + } + } + return nil +} + +// Clients returns the set of Clients currently being served. +func (st *State) Clients() []*Client { + st.mu.Lock() + defer st.mu.Unlock() + clients := make([]*Client, len(st.clients)) + copy(clients, st.clients) + return clients +} + +// Client returns the Client matching the supplied id. +func (st *State) Client(id string) *Client { + for _, c := range st.Clients() { + if c.Session.ID() == id { + return c + } + } + return nil +} + +// Servers returns the set of Servers the instance is currently connected to. +func (st *State) Servers() []*Server { + st.mu.Lock() + defer st.mu.Unlock() + servers := make([]*Server, len(st.servers)) + copy(servers, st.servers) + return servers +} + +// A Client is an incoming connection from a remote client. +type Client struct { + Session *cache.Session + DebugAddress string + Logfile string + GoplsPath string + ServerID string + Service protocol.Server +} + +// A Server is an outgoing connection to a remote LSP server. +type Server struct { + ID string + DebugAddress string + Logfile string + GoplsPath string + ClientID string +} + +// addClient adds a client to the set being served. +func (st *State) addClient(session *cache.Session) { + st.mu.Lock() + defer st.mu.Unlock() + st.clients = append(st.clients, &Client{Session: session}) +} + +// dropClient removes a client from the set being served. +func (st *State) dropClient(session *cache.Session) { + st.mu.Lock() + defer st.mu.Unlock() + for i, c := range st.clients { + if c.Session == session { + copy(st.clients[i:], st.clients[i+1:]) + st.clients[len(st.clients)-1] = nil + st.clients = st.clients[:len(st.clients)-1] + return + } + } +} + +// updateServer updates a server to the set being queried. In practice, there should +// be at most one remote server. +func (st *State) updateServer(server *Server) { + st.mu.Lock() + defer st.mu.Unlock() + for i, existing := range st.servers { + if existing.ID == server.ID { + // Replace, rather than mutate, to avoid a race. + newServers := make([]*Server, len(st.servers)) + copy(newServers, st.servers[:i]) + newServers[i] = server + copy(newServers[i+1:], st.servers[i+1:]) + st.servers = newServers + return + } + } + st.servers = append(st.servers, server) +} + +// dropServer drops a server from the set being queried. +func (st *State) dropServer(id string) { + st.mu.Lock() + defer st.mu.Unlock() + for i, s := range st.servers { + if s.ID == id { + copy(st.servers[i:], st.servers[i+1:]) + st.servers[len(st.servers)-1] = nil + st.servers = st.servers[:len(st.servers)-1] + return + } + } +} + +// an http.ResponseWriter that filters writes +type filterResponse struct { + w http.ResponseWriter + edit func([]byte) []byte +} + +func (c filterResponse) Header() http.Header { + return c.w.Header() +} + +func (c filterResponse) Write(buf []byte) (int, error) { + ans := c.edit(buf) + return c.w.Write(ans) +} + +func (c filterResponse) WriteHeader(n int) { + c.w.WriteHeader(n) +} + +// replace annoying nuls by spaces +func cmdline(w http.ResponseWriter, r *http.Request) { + fake := filterResponse{ + w: w, + edit: func(buf []byte) []byte { + return bytes.ReplaceAll(buf, []byte{0}, []byte{' '}) + }, + } + pprof.Cmdline(fake, r) +} + +func (i *Instance) getCache(r *http.Request) interface{} { + return i.State.Cache(path.Base(r.URL.Path)) +} + +func (i *Instance) getAnalysis(r *http.Request) interface{} { + return i.State.Analysis() +} + +func (i *Instance) getSession(r *http.Request) interface{} { + return i.State.Session(path.Base(r.URL.Path)) +} + +func (i *Instance) getClient(r *http.Request) interface{} { + return i.State.Client(path.Base(r.URL.Path)) +} + +func (i *Instance) getServer(r *http.Request) interface{} { + i.State.mu.Lock() + defer i.State.mu.Unlock() + id := path.Base(r.URL.Path) + for _, s := range i.State.servers { + if s.ID == id { + return s + } + } + return nil +} + +func (i *Instance) getView(r *http.Request) interface{} { + return i.State.View(path.Base(r.URL.Path)) +} + +func (i *Instance) getFile(r *http.Request) interface{} { + identifier := path.Base(r.URL.Path) + sid := path.Base(path.Dir(r.URL.Path)) + s := i.State.Session(sid) + if s == nil { + return nil + } + for _, o := range s.Overlays() { + // TODO(adonovan): understand and document this comparison. + if o.Identity().Hash.String() == identifier { + return o + } + } + return nil +} + +func (i *Instance) getInfo(r *http.Request) interface{} { + buf := &bytes.Buffer{} + i.PrintServerInfo(r.Context(), buf) + return template.HTML(buf.String()) +} + +func (i *Instance) AddService(s protocol.Server, session *cache.Session) { + for _, c := range i.State.clients { + if c.Session == session { + c.Service = s + return + } + } + stdlog.Printf("unable to find a Client to add the protocol.Server to") +} + +func getMemory(_ *http.Request) interface{} { + var m runtime.MemStats + runtime.ReadMemStats(&m) + return m +} + +func init() { + event.SetExporter(makeGlobalExporter(os.Stderr)) +} + +func GetInstance(ctx context.Context) *Instance { + if ctx == nil { + return nil + } + v := ctx.Value(instanceKey) + if v == nil { + return nil + } + return v.(*Instance) +} + +// WithInstance creates debug instance ready for use using the supplied +// configuration and stores it in the returned context. +func WithInstance(ctx context.Context, agent string) context.Context { + i := &Instance{ + StartTime: time.Now(), + OCAgentConfig: agent, + } + i.LogWriter = os.Stderr + ocConfig := ocagent.Discover() + //TODO: we should not need to adjust the discovered configuration + ocConfig.Address = i.OCAgentConfig + i.ocagent = ocagent.Connect(ocConfig) + i.prometheus = prometheus.New() + i.rpcs = &Rpcs{} + i.traces = &traces{} + i.State = &State{} + i.exporter = makeInstanceExporter(i) + return context.WithValue(ctx, instanceKey, i) +} + +// SetLogFile sets the logfile for use with this instance. +func (i *Instance) SetLogFile(logfile string, isDaemon bool) (func(), error) { + // TODO: probably a better solution for deferring closure to the caller would + // be for the debug instance to itself be closed, but this fixes the + // immediate bug of logs not being captured. + closeLog := func() {} + if logfile != "" { + if logfile == "auto" { + if isDaemon { + logfile = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-daemon-%d.log", os.Getpid())) + } else { + logfile = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.log", os.Getpid())) + } + } + f, err := os.Create(logfile) + if err != nil { + return nil, fmt.Errorf("unable to create log file: %w", err) + } + closeLog = func() { + defer f.Close() + } + stdlog.SetOutput(io.MultiWriter(os.Stderr, f)) + i.LogWriter = f + } + i.Logfile = logfile + return closeLog, nil +} + +// Serve starts and runs a debug server in the background on the given addr. +// It also logs the port the server starts on, to allow for :0 auto assigned +// ports. +func (i *Instance) Serve(ctx context.Context, addr string) (string, error) { + stdlog.SetFlags(stdlog.Lshortfile) + if addr == "" { + return "", nil + } + i.serveMu.Lock() + defer i.serveMu.Unlock() + + if i.listenedDebugAddress != "" { + // Already serving. Return the bound address. + return i.listenedDebugAddress, nil + } + + i.debugAddress = addr + listener, err := net.Listen("tcp", i.debugAddress) + if err != nil { + return "", err + } + i.listenedDebugAddress = listener.Addr().String() + + port := listener.Addr().(*net.TCPAddr).Port + if strings.HasSuffix(i.debugAddress, ":0") { + stdlog.Printf("debug server listening at http://localhost:%d", port) + } + event.Log(ctx, "Debug serving", tag.Port.Of(port)) + go func() { + mux := http.NewServeMux() + mux.HandleFunc("/", render(MainTmpl, func(*http.Request) interface{} { return i })) + mux.HandleFunc("/debug/", render(DebugTmpl, nil)) + mux.HandleFunc("/debug/pprof/", pprof.Index) + mux.HandleFunc("/debug/pprof/cmdline", cmdline) + mux.HandleFunc("/debug/pprof/profile", pprof.Profile) + mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) + mux.HandleFunc("/debug/pprof/trace", pprof.Trace) + if i.prometheus != nil { + mux.HandleFunc("/metrics/", i.prometheus.Serve) + } + if i.rpcs != nil { + mux.HandleFunc("/rpc/", render(RPCTmpl, i.rpcs.getData)) + } + if i.traces != nil { + mux.HandleFunc("/trace/", render(TraceTmpl, i.traces.getData)) + } + mux.HandleFunc("/analysis/", render(AnalysisTmpl, i.getAnalysis)) + mux.HandleFunc("/cache/", render(CacheTmpl, i.getCache)) + mux.HandleFunc("/session/", render(SessionTmpl, i.getSession)) + mux.HandleFunc("/client/", render(ClientTmpl, i.getClient)) + mux.HandleFunc("/server/", render(ServerTmpl, i.getServer)) + mux.HandleFunc("/file/", render(FileTmpl, i.getFile)) + mux.HandleFunc("/info", render(InfoTmpl, i.getInfo)) + mux.HandleFunc("/memory", render(MemoryTmpl, getMemory)) + + // Internal debugging helpers. + mux.HandleFunc("/gc", func(w http.ResponseWriter, r *http.Request) { + runtime.GC() + runtime.GC() + runtime.GC() + http.Redirect(w, r, "/memory", http.StatusTemporaryRedirect) + }) + mux.HandleFunc("/_makeabug", func(w http.ResponseWriter, r *http.Request) { + bug.Report("bug here") + http.Error(w, "made a bug", http.StatusOK) + }) + + if err := http.Serve(listener, mux); err != nil { + event.Error(ctx, "Debug server failed", err) + return + } + event.Log(ctx, "Debug server finished") + }() + return i.listenedDebugAddress, nil +} + +func (i *Instance) DebugAddress() string { + i.serveMu.Lock() + defer i.serveMu.Unlock() + return i.debugAddress +} + +func (i *Instance) ListenedDebugAddress() string { + i.serveMu.Lock() + defer i.serveMu.Unlock() + return i.listenedDebugAddress +} + +func makeGlobalExporter(stderr io.Writer) event.Exporter { + p := export.Printer{} + var pMu sync.Mutex + return func(ctx context.Context, ev core.Event, lm label.Map) context.Context { + i := GetInstance(ctx) + + if event.IsLog(ev) { + // Don't log context cancellation errors. + if err := keys.Err.Get(ev); errors.Is(err, context.Canceled) { + return ctx + } + // Make sure any log messages without an instance go to stderr. + if i == nil { + pMu.Lock() + p.WriteEvent(stderr, ev, lm) + pMu.Unlock() + } + level := log.LabeledLevel(lm) + // Exclude trace logs from LSP logs. + if level < log.Trace { + ctx = protocol.LogEvent(ctx, ev, lm, messageType(level)) + } + } + if i == nil { + return ctx + } + return i.exporter(ctx, ev, lm) + } +} + +func messageType(l log.Level) protocol.MessageType { + switch l { + case log.Error: + return protocol.Error + case log.Warning: + return protocol.Warning + case log.Debug: + return protocol.Log + } + return protocol.Info +} + +func makeInstanceExporter(i *Instance) event.Exporter { + exporter := func(ctx context.Context, ev core.Event, lm label.Map) context.Context { + if i.ocagent != nil { + ctx = i.ocagent.ProcessEvent(ctx, ev, lm) + } + if i.prometheus != nil { + ctx = i.prometheus.ProcessEvent(ctx, ev, lm) + } + if i.rpcs != nil { + ctx = i.rpcs.ProcessEvent(ctx, ev, lm) + } + if i.traces != nil { + ctx = i.traces.ProcessEvent(ctx, ev, lm) + } + if event.IsLog(ev) { + if s := cache.KeyCreateSession.Get(ev); s != nil { + i.State.addClient(s) + } + if sid := tag.NewServer.Get(ev); sid != "" { + i.State.updateServer(&Server{ + ID: sid, + Logfile: tag.Logfile.Get(ev), + DebugAddress: tag.DebugAddress.Get(ev), + GoplsPath: tag.GoplsPath.Get(ev), + ClientID: tag.ClientID.Get(ev), + }) + } + if s := cache.KeyShutdownSession.Get(ev); s != nil { + i.State.dropClient(s) + } + if sid := tag.EndServer.Get(ev); sid != "" { + i.State.dropServer(sid) + } + if s := cache.KeyUpdateSession.Get(ev); s != nil { + if c := i.State.Client(s.ID()); c != nil { + c.DebugAddress = tag.DebugAddress.Get(ev) + c.Logfile = tag.Logfile.Get(ev) + c.ServerID = tag.ServerID.Get(ev) + c.GoplsPath = tag.GoplsPath.Get(ev) + } + } + } + return ctx + } + // StdTrace must be above export.Spans below (by convention, export + // middleware applies its wrapped exporter last). + exporter = StdTrace(exporter) + metrics := metric.Config{} + registerMetrics(&metrics) + exporter = metrics.Exporter(exporter) + exporter = export.Spans(exporter) + exporter = export.Labels(exporter) + return exporter +} + +type dataFunc func(*http.Request) interface{} + +func render(tmpl *template.Template, fun dataFunc) func(http.ResponseWriter, *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + var data interface{} + if fun != nil { + data = fun(r) + } + if err := tmpl.Execute(w, data); err != nil { + event.Error(context.Background(), "", err) + http.Error(w, err.Error(), http.StatusInternalServerError) + } + } +} + +func commas(s string) string { + for i := len(s); i > 3; { + i -= 3 + s = s[:i] + "," + s[i:] + } + return s +} + +func fuint64(v uint64) string { + return commas(strconv.FormatUint(v, 10)) +} + +func fuint32(v uint32) string { + return commas(strconv.FormatUint(uint64(v), 10)) +} + +func fcontent(v []byte) string { + return string(v) +} + +var BaseTemplate = template.Must(template.New("").Parse(` + + +{{template "title" .}} + +{{block "head" .}}{{end}} + + +Main +Info +Memory +Profiling +Metrics +RPC +Trace +Analysis +
+

{{template "title" .}}

+{{block "body" .}} +Unknown page +{{end}} + + + +{{define "cachelink"}}Cache {{.}}{{end}} +{{define "clientlink"}}Client {{.}}{{end}} +{{define "serverlink"}}Server {{.}}{{end}} +{{define "sessionlink"}}Session {{.}}{{end}} +`)).Funcs(template.FuncMap{ + "fuint64": fuint64, + "fuint32": fuint32, + "fcontent": fcontent, + "localAddress": func(s string) string { + // Try to translate loopback addresses to localhost, both for cosmetics and + // because unspecified ipv6 addresses can break links on Windows. + // + // TODO(rfindley): In the future, it would be better not to assume the + // server is running on localhost, and instead construct this address using + // the remote host. + host, port, err := net.SplitHostPort(s) + if err != nil { + return s + } + ip := net.ParseIP(host) + if ip == nil { + return s + } + if ip.IsLoopback() || ip.IsUnspecified() { + return "localhost:" + port + } + return s + }, + // TODO(rfindley): re-enable option inspection. + // "options": func(s *cache.Session) []sessionOption { + // return showOptions(s.Options()) + // }, +}) + +var MainTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Gopls server information{{end}} +{{define "body"}} +

Caches

+
    {{range .State.Caches}}
  • {{template "cachelink" .ID}}
  • {{end}}
+

Sessions

+
    {{range .State.Sessions}}
  • {{template "sessionlink" .ID}} from {{template "cachelink" .Cache.ID}}
  • {{end}}
+

Clients

+
    {{range .State.Clients}}
  • {{template "clientlink" .Session.ID}}
  • {{end}}
+

Servers

+
    {{range .State.Servers}}
  • {{template "serverlink" .ID}}
  • {{end}}
+

Bug reports

+
{{range .State.Bugs}}
{{.Key}}
{{.Description}}
{{end}}
+{{end}} +`)) + +var InfoTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Gopls version information{{end}} +{{define "body"}} +{{.}} +{{end}} +`)) + +var MemoryTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Gopls memory usage{{end}} +{{define "head"}}{{end}} +{{define "body"}} +
+

Stats

+ + + + + + + + + + + + + + + + +
Allocated bytes{{fuint64 .HeapAlloc}}
Total allocated bytes{{fuint64 .TotalAlloc}}
System bytes{{fuint64 .Sys}}
Heap system bytes{{fuint64 .HeapSys}}
Malloc calls{{fuint64 .Mallocs}}
Frees{{fuint64 .Frees}}
Idle heap bytes{{fuint64 .HeapIdle}}
In use bytes{{fuint64 .HeapInuse}}
Released to system bytes{{fuint64 .HeapReleased}}
Heap object count{{fuint64 .HeapObjects}}
Stack in use bytes{{fuint64 .StackInuse}}
Stack from system bytes{{fuint64 .StackSys}}
Bucket hash bytes{{fuint64 .BuckHashSys}}
GC metadata bytes{{fuint64 .GCSys}}
Off heap bytes{{fuint64 .OtherSys}}
+

By size

+ + +{{range .BySize}}{{end}} +
SizeMallocsFrees
{{fuint32 .Size}}{{fuint64 .Mallocs}}{{fuint64 .Frees}}
+{{end}} +`)) + +var DebugTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}GoPls Debug pages{{end}} +{{define "body"}} +Profiling +{{end}} +`)) + +var CacheTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Cache {{.ID}}{{end}} +{{define "body"}} +

memoize.Store entries

+
    {{range $k,$v := .MemStats}}
  • {{$k}} - {{$v}}
  • {{end}}
+

File stats

+

+{{- $stats := .FileStats -}} +Total: {{$stats.Total}}
+Largest: {{$stats.Largest}}
+Errors: {{$stats.Errs}}
+

+{{end}} +`)) + +var AnalysisTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Analysis{{end}} +{{define "body"}} +

Analyzer.Run times

+
    {{range .AnalyzerRunTimes}}
  • {{.Duration}} {{.Label}}
  • {{end}}
+{{end}} +`)) + +var ClientTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Client {{.Session.ID}}{{end}} +{{define "body"}} +Using session: {{template "sessionlink" .Session.ID}}
+{{if .DebugAddress}}Debug this client at: {{localAddress .DebugAddress}}
{{end}} +Logfile: {{.Logfile}}
+Gopls Path: {{.GoplsPath}}
+{{end}} +`)) + +var ServerTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Server {{.ID}}{{end}} +{{define "body"}} +{{if .DebugAddress}}Debug this server at: {{localAddress .DebugAddress}}
{{end}} +Logfile: {{.Logfile}}
+Gopls Path: {{.GoplsPath}}
+{{end}} +`)) + +var SessionTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Session {{.ID}}{{end}} +{{define "body"}} +From: {{template "cachelink" .Cache.ID}}
+

Views

+
    {{range .Views}} +{{- $envOverlay := .EnvOverlay -}} +
  • ID: {{.ID}}
    +Type: {{.Type}}
    +Root: {{.Root}}
    +{{- if $envOverlay}} +Env overlay: {{$envOverlay}})
    +{{end -}} +Folder: {{.Folder.Name}}:{{.Folder.Dir}}
  • +{{end}}
+

Overlays

+{{$session := .}} + +{{end}} +`)) + +var FileTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +{{define "title"}}Overlay {{.Identity.Hash}}{{end}} +{{define "body"}} +{{with .}} + URI: {{.URI}}
+ Identifier: {{.Identity.Hash}}
+ Version: {{.Version}}
+ Kind: {{.Kind}}
+{{end}} +

Contents

+
{{fcontent .Content}}
+{{end}} +`)) diff --git a/gopls/internal/debug/template_test.go b/gopls/internal/debug/template_test.go new file mode 100644 index 00000000000..db940efc602 --- /dev/null +++ b/gopls/internal/debug/template_test.go @@ -0,0 +1,156 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package debug_test + +// Provide 'static type checking' of the templates. This guards against changes in various +// gopls datastructures causing template execution to fail. The checking is done by +// the github.com/jba/templatecheck package. Before that is run, the test checks that +// its list of templates and their arguments corresponds to the arguments in +// calls to render(). The test assumes that all uses of templates are done through render(). + +import ( + "go/ast" + "html/template" + "os" + "runtime" + "sort" + "strings" + "testing" + + "github.com/jba/templatecheck" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/internal/testenv" +) + +var templates = map[string]struct { + tmpl *template.Template + data interface{} // a value of the needed type +}{ + "MainTmpl": {debug.MainTmpl, &debug.Instance{}}, + "DebugTmpl": {debug.DebugTmpl, nil}, + "RPCTmpl": {debug.RPCTmpl, &debug.Rpcs{}}, + "TraceTmpl": {debug.TraceTmpl, debug.TraceResults{}}, + "CacheTmpl": {debug.CacheTmpl, &cache.Cache{}}, + "SessionTmpl": {debug.SessionTmpl, &cache.Session{}}, + "ClientTmpl": {debug.ClientTmpl, &debug.Client{}}, + "ServerTmpl": {debug.ServerTmpl, &debug.Server{}}, + "FileTmpl": {debug.FileTmpl, *new(interface { + file.Handle + Kind() file.Kind // (overlay files only) + })}, + "InfoTmpl": {debug.InfoTmpl, "something"}, + "MemoryTmpl": {debug.MemoryTmpl, runtime.MemStats{}}, + "AnalysisTmpl": {debug.AnalysisTmpl, new(debug.State).Analysis()}, +} + +func TestTemplates(t *testing.T) { + testenv.NeedsGoPackages(t) + testenv.NeedsLocalXTools(t) + + cfg := &packages.Config{ + Mode: packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo, + } + cfg.Env = os.Environ() + cfg.Env = append(cfg.Env, + "GOPACKAGESDRIVER=off", + "GOWORK=off", // necessary for -mod=mod below + "GOFLAGS=-mod=mod", + ) + + pkgs, err := packages.Load(cfg, "golang.org/x/tools/gopls/internal/debug") + if err != nil { + t.Fatal(err) + } + if len(pkgs) != 1 { + t.Fatalf("expected a single package, but got %d", len(pkgs)) + } + p := pkgs[0] + if len(p.Errors) != 0 { + t.Fatalf("compiler error, e.g. %v", p.Errors[0]) + } + // find the calls to render in serve.go + tree := treeOf(p, "serve.go") + if tree == nil { + t.Fatalf("found no syntax tree for %s", "serve.go") + } + renders := callsOf(tree, "render") + if len(renders) == 0 { + t.Fatalf("found no calls to render") + } + var found = make(map[string]bool) + for _, r := range renders { + if len(r.Args) != 2 { + // template, func + t.Fatalf("got %d args, expected 2", len(r.Args)) + } + t0, ok := p.TypesInfo.Types[r.Args[0]] + if !ok || !t0.IsValue() || t0.Type.String() != "*html/template.Template" { + t.Fatalf("no type info for template") + } + if id, ok := r.Args[0].(*ast.Ident); !ok { + t.Errorf("expected *ast.Ident, got %T", r.Args[0]) + } else { + found[id.Name] = true + } + } + // make sure found and templates have the same templates + for k := range found { + if _, ok := templates[k]; !ok { + t.Errorf("code has template %s, but test does not", k) + } + } + for k := range templates { + if _, ok := found[k]; !ok { + t.Errorf("test has template %s, code does not", k) + } + } + // now check all the known templates, in alphabetic order, for determinacy + keys := []string{} + for k := range templates { + keys = append(keys, k) + } + sort.Strings(keys) + for _, k := range keys { + v := templates[k] + // the FuncMap is an annoyance; should not be necessary + if err := templatecheck.CheckHTML(v.tmpl, v.data); err != nil { + t.Errorf("%s: %v", k, err) + continue + } + t.Logf("%s ok", k) + } +} + +func callsOf(tree *ast.File, name string) []*ast.CallExpr { + var ans []*ast.CallExpr + f := func(n ast.Node) bool { + x, ok := n.(*ast.CallExpr) + if !ok { + return true + } + if y, ok := x.Fun.(*ast.Ident); ok { + if y.Name == name { + ans = append(ans, x) + } + } + return true + } + ast.Inspect(tree, f) + return ans +} + +func treeOf(p *packages.Package, fname string) *ast.File { + for _, tree := range p.Syntax { + loc := tree.Package + pos := p.Fset.PositionFor(loc, false) + if strings.HasSuffix(pos.Filename, fname) { + return tree + } + } + return nil +} diff --git a/gopls/internal/lsp/debug/trace.go b/gopls/internal/debug/trace.go similarity index 98% rename from gopls/internal/lsp/debug/trace.go rename to gopls/internal/debug/trace.go index 31c5a5376ac..9314a04d241 100644 --- a/gopls/internal/lsp/debug/trace.go +++ b/gopls/internal/debug/trace.go @@ -150,14 +150,14 @@ func StdTrace(exporter event.Exporter) event.Exporter { ctx = context.WithValue(ctx, traceKey, task) } // Log the start event as it may contain useful labels. - msg := formatEvent(ctx, ev, lm) + msg := formatEvent(ev, lm) trace.Log(ctx, "start", msg) case event.IsLog(ev): category := "" if event.IsError(ev) { category = "error" } - msg := formatEvent(ctx, ev, lm) + msg := formatEvent(ev, lm) trace.Log(ctx, category, msg) case event.IsEnd(ev): if v := ctx.Value(traceKey); v != nil { @@ -168,7 +168,7 @@ func StdTrace(exporter event.Exporter) event.Exporter { } } -func formatEvent(ctx context.Context, ev core.Event, lm label.Map) string { +func formatEvent(ev core.Event, lm label.Map) string { buf := &bytes.Buffer{} p := export.Printer{} p.WriteEvent(buf, ev, lm) diff --git a/gopls/internal/file/file.go b/gopls/internal/file/file.go new file mode 100644 index 00000000000..5f8be06cf60 --- /dev/null +++ b/gopls/internal/file/file.go @@ -0,0 +1,62 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The file package defines types used for working with LSP files. +package file + +import ( + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// An Identity identifies the name and contents of a file. +// +// TODO(rfindley): Identity may not carry its weight. Consider instead just +// exposing Handle.Hash, and using an ad-hoc key type where necessary. +// Or perhaps if mod/work parsing is moved outside of the memoize cache, +// a notion of Identity simply isn't needed. +type Identity struct { + URI protocol.DocumentURI + Hash Hash // digest of file contents +} + +func (id Identity) String() string { + return fmt.Sprintf("%s%s", id.URI, id.Hash) +} + +// A FileHandle represents the URI, content, hash, and optional +// version of a file tracked by the LSP session. +// +// File content may be provided by the file system (for Saved files) +// or from an overlay, for open files with unsaved edits. +// A FileHandle may record an attempt to read a non-existent file, +// in which case Content returns an error. +type Handle interface { + // URI is the URI for this file handle. + URI() protocol.DocumentURI + // Identity returns an Identity for the file, even if there was an error + // reading it. + Identity() Identity + // SameContentsOnDisk reports whether the file has the same content on disk: + // it is false for files open on an editor with unsaved edits. + SameContentsOnDisk() bool + // Version returns the file version, as defined by the LSP client. + // For on-disk file handles, Version returns 0. + Version() int32 + // Content returns the contents of a file. + // If the file is not available, returns a nil slice and an error. + Content() ([]byte, error) +} + +// A Source maps URIs to Handles. +type Source interface { + // ReadFile returns the Handle for a given URI, either by reading the content + // of the file or by obtaining it from a cache. + // + // Invariant: ReadFile must only return an error in the case of context + // cancellation. If ctx.Err() is nil, the resulting error must also be nil. + ReadFile(ctx context.Context, uri protocol.DocumentURI) (Handle, error) +} diff --git a/gopls/internal/file/hash.go b/gopls/internal/file/hash.go new file mode 100644 index 00000000000..eb182536ab7 --- /dev/null +++ b/gopls/internal/file/hash.go @@ -0,0 +1,33 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package file + +import ( + "crypto/sha256" + "fmt" +) + +// A Hash is a cryptographic digest of the contents of a file. +// (Although at 32B it is larger than a 16B string header, it is smaller +// and has better locality than the string header + 64B of hex digits.) +type Hash [sha256.Size]byte + +// HashOf returns the hash of some data. +func HashOf(data []byte) Hash { + return Hash(sha256.Sum256(data)) +} + +// String returns the digest as a string of hex digits. +func (h Hash) String() string { + return fmt.Sprintf("%64x", [sha256.Size]byte(h)) +} + +// XORWith updates *h to *h XOR h2. +func (h *Hash) XORWith(h2 Hash) { + // Small enough that we don't need crypto/subtle.XORBytes. + for i := range h { + h[i] ^= h2[i] + } +} diff --git a/gopls/internal/file/kind.go b/gopls/internal/file/kind.go new file mode 100644 index 00000000000..087a57f32d0 --- /dev/null +++ b/gopls/internal/file/kind.go @@ -0,0 +1,68 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package file + +import ( + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// Kind describes the kind of the file in question. +// It can be one of Go,mod, Sum, or Tmpl. +type Kind int + +const ( + // UnknownKind is a file type we don't know about. + UnknownKind = Kind(iota) + + // Go is a Go source file. + Go + // Mod is a go.mod file. + Mod + // Sum is a go.sum file. + Sum + // Tmpl is a template file. + Tmpl + // Work is a go.work file. + Work +) + +func (k Kind) String() string { + switch k { + case Go: + return "go" + case Mod: + return "go.mod" + case Sum: + return "go.sum" + case Tmpl: + return "tmpl" + case Work: + return "go.work" + default: + return fmt.Sprintf("internal error: unknown file kind %d", k) + } +} + +// KindForLang returns the gopls file [Kind] associated with the given LSP +// LanguageKind string from protocol.TextDocumentItem.LanguageID, +// or UnknownKind if the language is not one recognized by gopls. +func KindForLang(langID protocol.LanguageKind) Kind { + switch langID { + case "go": + return Go + case "go.mod": + return Mod + case "go.sum": + return Sum + case "tmpl", "gotmpl": + return Tmpl + case "go.work": + return Work + default: + return UnknownKind + } +} diff --git a/gopls/internal/file/modification.go b/gopls/internal/file/modification.go new file mode 100644 index 00000000000..a53bb17898a --- /dev/null +++ b/gopls/internal/file/modification.go @@ -0,0 +1,57 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package file + +import "golang.org/x/tools/gopls/internal/protocol" + +// Modification represents a modification to a file. +type Modification struct { + URI protocol.DocumentURI + Action Action + + // OnDisk is true if a watched file is changed on disk. + // If true, Version will be -1 and Text will be nil. + OnDisk bool + + // Version will be -1 and Text will be nil when they are not supplied, + // specifically on textDocument/didClose and for on-disk changes. + Version int32 + Text []byte + + // LanguageID is only sent from the language client on textDocument/didOpen. + LanguageID protocol.LanguageKind +} + +// An Action is a type of file state change. +type Action int + +const ( + UnknownAction = Action(iota) + Open + Change + Close + Save + Create + Delete +) + +func (a Action) String() string { + switch a { + case Open: + return "Open" + case Change: + return "Change" + case Close: + return "Close" + case Save: + return "Save" + case Create: + return "Create" + case Delete: + return "Delete" + default: + return "Unknown" + } +} diff --git a/gopls/internal/lsp/filecache/filecache.go b/gopls/internal/filecache/filecache.go similarity index 99% rename from gopls/internal/lsp/filecache/filecache.go rename to gopls/internal/filecache/filecache.go index 6877780c29c..af917578e4f 100644 --- a/gopls/internal/lsp/filecache/filecache.go +++ b/gopls/internal/filecache/filecache.go @@ -38,8 +38,8 @@ import ( "sync/atomic" "time" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/lru" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/lru" ) // Start causes the filecache to initialize and start garbage gollection. diff --git a/gopls/internal/lsp/filecache/filecache_test.go b/gopls/internal/filecache/filecache_test.go similarity index 99% rename from gopls/internal/lsp/filecache/filecache_test.go rename to gopls/internal/filecache/filecache_test.go index a078fd5cf69..3419db4b513 100644 --- a/gopls/internal/lsp/filecache/filecache_test.go +++ b/gopls/internal/filecache/filecache_test.go @@ -22,7 +22,7 @@ import ( "testing" "golang.org/x/sync/errgroup" - "golang.org/x/tools/gopls/internal/lsp/filecache" + "golang.org/x/tools/gopls/internal/filecache" "golang.org/x/tools/internal/testenv" ) diff --git a/gopls/internal/golang/add_import.go b/gopls/internal/golang/add_import.go new file mode 100644 index 00000000000..a43256a6a08 --- /dev/null +++ b/gopls/internal/golang/add_import.go @@ -0,0 +1,29 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/imports" +) + +// AddImport adds a single import statement to the given file +func AddImport(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, importPath string) ([]protocol.TextEdit, error) { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + return ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ + StmtInfo: imports.ImportInfo{ + ImportPath: importPath, + }, + FixType: imports.AddImport, + }) +} diff --git a/gopls/internal/golang/call_hierarchy.go b/gopls/internal/golang/call_hierarchy.go new file mode 100644 index 00000000000..7e88df1a1cf --- /dev/null +++ b/gopls/internal/golang/call_hierarchy.go @@ -0,0 +1,313 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + "path/filepath" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +// PrepareCallHierarchy returns an array of CallHierarchyItem for a file and the position within the file. +func PrepareCallHierarchy(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.CallHierarchyItem, error) { + ctx, done := event.Start(ctx, "golang.PrepareCallHierarchy") + defer done() + + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + pos, err := pgf.PositionPos(pp) + if err != nil { + return nil, err + } + + _, obj, _ := referencedObject(pkg, pgf, pos) + if obj == nil { + return nil, nil + } + + if _, ok := obj.Type().Underlying().(*types.Signature); !ok { + return nil, nil + } + + declLoc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj)) + if err != nil { + return nil, err + } + rng := declLoc.Range + + callHierarchyItem := protocol.CallHierarchyItem{ + Name: obj.Name(), + Kind: protocol.Function, + Tags: []protocol.SymbolTag{}, + Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(declLoc.URI.Path())), + URI: declLoc.URI, + Range: rng, + SelectionRange: rng, + } + return []protocol.CallHierarchyItem{callHierarchyItem}, nil +} + +// IncomingCalls returns an array of CallHierarchyIncomingCall for a file and the position within the file. +func IncomingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pos protocol.Position) ([]protocol.CallHierarchyIncomingCall, error) { + ctx, done := event.Start(ctx, "golang.IncomingCalls") + defer done() + + refs, err := references(ctx, snapshot, fh, pos, false) + if err != nil { + if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) { + return nil, nil + } + return nil, err + } + + // Group references by their enclosing function declaration. + incomingCalls := make(map[protocol.Location]*protocol.CallHierarchyIncomingCall) + for _, ref := range refs { + callItem, err := enclosingNodeCallItem(ctx, snapshot, ref.pkgPath, ref.location) + if err != nil { + event.Error(ctx, "error getting enclosing node", err, tag.Method.Of(string(ref.pkgPath))) + continue + } + loc := protocol.Location{ + URI: callItem.URI, + Range: callItem.Range, + } + call, ok := incomingCalls[loc] + if !ok { + call = &protocol.CallHierarchyIncomingCall{From: callItem} + incomingCalls[loc] = call + } + call.FromRanges = append(call.FromRanges, ref.location.Range) + } + + // Flatten the map of pointers into a slice of values. + incomingCallItems := make([]protocol.CallHierarchyIncomingCall, 0, len(incomingCalls)) + for _, callItem := range incomingCalls { + incomingCallItems = append(incomingCallItems, *callItem) + } + return incomingCallItems, nil +} + +// enclosingNodeCallItem creates a CallHierarchyItem representing the function call at loc. +func enclosingNodeCallItem(ctx context.Context, snapshot *cache.Snapshot, pkgPath PackagePath, loc protocol.Location) (protocol.CallHierarchyItem, error) { + // Parse the file containing the reference. + fh, err := snapshot.ReadFile(ctx, loc.URI) + if err != nil { + return protocol.CallHierarchyItem{}, err + } + // TODO(adonovan): opt: before parsing, trim the bodies of functions + // that don't contain the reference, using either a scanner-based + // implementation such as https://go.dev/play/p/KUrObH1YkX8 + // (~31% speedup), or a byte-oriented implementation (2x speedup). + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return protocol.CallHierarchyItem{}, err + } + start, end, err := pgf.RangePos(loc.Range) + if err != nil { + return protocol.CallHierarchyItem{}, err + } + + // Find the enclosing function, if any, and the number of func literals in between. + var funcDecl *ast.FuncDecl + var funcLit *ast.FuncLit // innermost function literal + var litCount int + path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) +outer: + for _, node := range path { + switch n := node.(type) { + case *ast.FuncDecl: + funcDecl = n + break outer + case *ast.FuncLit: + litCount++ + if litCount > 1 { + continue + } + funcLit = n + } + } + + nameIdent := path[len(path)-1].(*ast.File).Name + kind := protocol.Package + if funcDecl != nil { + nameIdent = funcDecl.Name + kind = protocol.Function + } + + nameStart, nameEnd := nameIdent.Pos(), nameIdent.End() + if funcLit != nil { + nameStart, nameEnd = funcLit.Type.Func, funcLit.Type.Params.Pos() + kind = protocol.Function + } + rng, err := pgf.PosRange(nameStart, nameEnd) + if err != nil { + return protocol.CallHierarchyItem{}, err + } + + name := nameIdent.Name + for i := 0; i < litCount; i++ { + name += ".func()" + } + + return protocol.CallHierarchyItem{ + Name: name, + Kind: kind, + Tags: []protocol.SymbolTag{}, + Detail: fmt.Sprintf("%s • %s", pkgPath, filepath.Base(fh.URI().Path())), + URI: loc.URI, + Range: rng, + SelectionRange: rng, + }, nil +} + +// OutgoingCalls returns an array of CallHierarchyOutgoingCall for a file and the position within the file. +func OutgoingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.CallHierarchyOutgoingCall, error) { + ctx, done := event.Start(ctx, "golang.OutgoingCalls") + defer done() + + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + pos, err := pgf.PositionPos(pp) + if err != nil { + return nil, err + } + + _, obj, _ := referencedObject(pkg, pgf, pos) + if obj == nil { + return nil, nil + } + + if _, ok := obj.Type().Underlying().(*types.Signature); !ok { + return nil, nil + } + + // Skip builtins. + if obj.Pkg() == nil { + return nil, nil + } + + if !obj.Pos().IsValid() { + return nil, bug.Errorf("internal error: object %s.%s missing position", obj.Pkg().Path(), obj.Name()) + } + + declFile := pkg.FileSet().File(obj.Pos()) + if declFile == nil { + return nil, bug.Errorf("file not found for %d", obj.Pos()) + } + + uri := protocol.URIFromPath(declFile.Name()) + offset, err := safetoken.Offset(declFile, obj.Pos()) + if err != nil { + return nil, err + } + + // Use TypecheckFull as we want to inspect the body of the function declaration. + declPkg, declPGF, err := NarrowestPackageForFile(ctx, snapshot, uri) + if err != nil { + return nil, err + } + + declPos, err := safetoken.Pos(declPGF.Tok, offset) + if err != nil { + return nil, err + } + + declNode, _, _ := findDeclInfo([]*ast.File{declPGF.File}, declPos) + if declNode == nil { + // TODO(rfindley): why don't we return an error here, or even bug.Errorf? + return nil, nil + // return nil, bug.Errorf("failed to find declaration for object %s.%s", obj.Pkg().Path(), obj.Name()) + } + + type callRange struct { + start, end token.Pos + } + callRanges := []callRange{} + ast.Inspect(declNode, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok { + var start, end token.Pos + switch n := call.Fun.(type) { + case *ast.SelectorExpr: + start, end = n.Sel.NamePos, call.Lparen + case *ast.Ident: + start, end = n.NamePos, call.Lparen + case *ast.FuncLit: + // while we don't add the function literal as an 'outgoing' call + // we still want to traverse into it + return true + default: + // ignore any other kind of call expressions + // for ex: direct function literal calls since that's not an 'outgoing' call + return false + } + callRanges = append(callRanges, callRange{start: start, end: end}) + } + return true + }) + + outgoingCalls := map[token.Pos]*protocol.CallHierarchyOutgoingCall{} + for _, callRange := range callRanges { + _, obj, _ := referencedObject(declPkg, declPGF, callRange.start) + if obj == nil { + continue + } + + // ignore calls to builtin functions + if obj.Pkg() == nil { + continue + } + + outgoingCall, ok := outgoingCalls[obj.Pos()] + if !ok { + loc, err := mapPosition(ctx, declPkg.FileSet(), snapshot, obj.Pos(), obj.Pos()+token.Pos(len(obj.Name()))) + if err != nil { + return nil, err + } + outgoingCall = &protocol.CallHierarchyOutgoingCall{ + To: protocol.CallHierarchyItem{ + Name: obj.Name(), + Kind: protocol.Function, + Tags: []protocol.SymbolTag{}, + Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(loc.URI.Path())), + URI: loc.URI, + Range: loc.Range, + SelectionRange: loc.Range, + }, + } + outgoingCalls[obj.Pos()] = outgoingCall + } + + rng, err := declPGF.PosRange(callRange.start, callRange.end) + if err != nil { + return nil, err + } + outgoingCall.FromRanges = append(outgoingCall.FromRanges, rng) + } + + outgoingCallItems := make([]protocol.CallHierarchyOutgoingCall, 0, len(outgoingCalls)) + for _, callItem := range outgoingCalls { + outgoingCallItems = append(outgoingCallItems, *callItem) + } + return outgoingCallItems, nil +} diff --git a/gopls/internal/golang/change_quote.go b/gopls/internal/golang/change_quote.go new file mode 100644 index 00000000000..919b935e79c --- /dev/null +++ b/gopls/internal/golang/change_quote.go @@ -0,0 +1,85 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "go/ast" + "go/token" + "strconv" + "strings" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/diff" +) + +// ConvertStringLiteral reports whether we can convert between raw and interpreted +// string literals in the [start, end), along with a CodeAction containing the edits. +// +// Only the following conditions are true, the action in result is valid +// - [start, end) is enclosed by a string literal +// - if the string is interpreted string, need check whether the convert is allowed +func ConvertStringLiteral(pgf *parsego.File, fh file.Handle, rng protocol.Range) (protocol.CodeAction, bool) { + startPos, endPos, err := pgf.RangePos(rng) + if err != nil { + return protocol.CodeAction{}, false // e.g. invalid range + } + path, _ := astutil.PathEnclosingInterval(pgf.File, startPos, endPos) + lit, ok := path[0].(*ast.BasicLit) + if !ok || lit.Kind != token.STRING { + return protocol.CodeAction{}, false + } + + str, err := strconv.Unquote(lit.Value) + if err != nil { + return protocol.CodeAction{}, false + } + + interpreted := lit.Value[0] == '"' + // Not all "..." strings can be represented as `...` strings. + if interpreted && !strconv.CanBackquote(strings.ReplaceAll(str, "\n", "")) { + return protocol.CodeAction{}, false + } + + var ( + title string + newText string + ) + if interpreted { + title = "Convert to raw string literal" + newText = "`" + str + "`" + } else { + title = "Convert to interpreted string literal" + newText = strconv.Quote(str) + } + + start, end, err := safetoken.Offsets(pgf.Tok, lit.Pos(), lit.End()) + if err != nil { + bug.Reportf("failed to get string literal offset by token.Pos:%v", err) + return protocol.CodeAction{}, false + } + edits := []diff.Edit{{ + Start: start, + End: end, + New: newText, + }} + pedits, err := protocol.EditsFromDiffEdits(pgf.Mapper, edits) + if err != nil { + bug.Reportf("failed to convert diff.Edit to protocol.TextEdit:%v", err) + return protocol.CodeAction{}, false + } + + return protocol.CodeAction{ + Title: title, + Kind: protocol.RefactorRewrite, + Edit: &protocol.WorkspaceEdit{ + DocumentChanges: documentChanges(fh, pedits), + }, + }, true +} diff --git a/gopls/internal/lsp/source/change_signature.go b/gopls/internal/golang/change_signature.go similarity index 81% rename from gopls/internal/lsp/source/change_signature.go rename to gopls/internal/golang/change_signature.go index 8dfd0135950..d2f9ea674f1 100644 --- a/gopls/internal/lsp/source/change_signature.go +++ b/gopls/internal/golang/change_signature.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "bytes" @@ -16,16 +16,19 @@ import ( "regexp" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/imports" internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/refactor/inline" "golang.org/x/tools/internal/tokeninternal" "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/versions" ) // RemoveUnusedParameter computes a refactoring to remove the parameter @@ -37,12 +40,15 @@ import ( // - Improve the extra newlines in output. // - Stream type checking via ForEachPackage. // - Avoid unnecessary additional type checking. -func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Range, snapshot Snapshot) ([]protocol.DocumentChanges, error) { +func RemoveUnusedParameter(ctx context.Context, fh file.Handle, rng protocol.Range, snapshot *cache.Snapshot) ([]protocol.DocumentChanges, error) { pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) if err != nil { return nil, err } - if perrors, terrors := pkg.GetParseErrors(), pkg.GetTypeErrors(); len(perrors) > 0 || len(terrors) > 0 { + + // Changes to our heuristics for whether we can remove a parameter must also + // be reflected in the canRemoveParameter helper. + if perrors, terrors := pkg.ParseErrors(), pkg.TypeErrors(); len(perrors) > 0 || len(terrors) > 0 { var sample string if len(perrors) > 0 { sample = perrors[0].Error() @@ -52,12 +58,9 @@ func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Rang return nil, fmt.Errorf("can't change signatures for packages with parse or type errors: (e.g. %s)", sample) } - info := FindParam(pgf, rng) - if info.Decl == nil { - return nil, fmt.Errorf("failed to find declaration") - } - if info.Decl.Recv != nil { - return nil, fmt.Errorf("can't change signature of methods (yet)") + info, err := FindParam(pgf, rng) + if err != nil { + return nil, err // e.g. invalid range } if info.Field == nil { return nil, fmt.Errorf("failed to find field") @@ -131,6 +134,7 @@ func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Rang if err != nil { return nil, err } + // Finally, rewrite the original declaration. We do this after inlining all // calls, as there may be calls in the same file as the declaration. But none // of the inlining should have changed the location of the original @@ -146,7 +150,10 @@ func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Rang src = pgf.Src } fset := tokeninternal.FileSetFor(pgf.Tok) - src, err = rewriteSignature(fset, idx, src, newDecl) + src, err := rewriteSignature(fset, idx, src, newDecl) + if err != nil { + return nil, err + } newContent[pgf.URI] = src } @@ -163,19 +170,11 @@ func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Rang } edits := diff.Bytes(before, after) mapper := protocol.NewMapper(uri, before) - pedits, err := ToProtocolEdits(mapper, edits) + pedits, err := protocol.EditsFromDiffEdits(mapper, edits) if err != nil { return nil, fmt.Errorf("computing edits for %s: %v", uri, err) } - changes = append(changes, protocol.DocumentChanges{ - TextDocumentEdit: &protocol.TextDocumentEdit{ - TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ - Version: fh.Version(), - TextDocumentIdentifier: protocol.TextDocumentIdentifier{URI: protocol.URIFromSpanURI(uri)}, - }, - Edits: pedits, - }, - }) + changes = append(changes, documentChanges(fh, pedits)...) } return changes, nil } @@ -239,20 +238,18 @@ func rewriteSignature(fset *token.FileSet, declIdx int, src0 []byte, newDecl *as // ParamInfo records information about a param identified by a position. type ParamInfo struct { - Decl *ast.FuncDecl // enclosing func decl, or nil + Decl *ast.FuncDecl // enclosing func decl (non-nil) FieldIndex int // index of Field in Decl.Type.Params, or -1 - Field *ast.Field // enclosing field of Decl, or nil + Field *ast.Field // enclosing field of Decl, or nil if range not among parameters NameIndex int // index of Name in Field.Names, or nil Name *ast.Ident // indicated name (either enclosing, or Field.Names[0] if len(Field.Names) == 1) } // FindParam finds the parameter information spanned by the given range. -func FindParam(pgf *ParsedGoFile, rng protocol.Range) ParamInfo { - info := ParamInfo{FieldIndex: -1, NameIndex: -1} +func FindParam(pgf *parsego.File, rng protocol.Range) (*ParamInfo, error) { start, end, err := pgf.RangePos(rng) if err != nil { - bug.Reportf("(file=%v).RangePos(%v) failed: %v", pgf.URI, rng, err) - return info + return nil, err } path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) @@ -275,9 +272,13 @@ func FindParam(pgf *ParsedGoFile, rng protocol.Range) ParamInfo { } // Check the conditions described in the docstring. if decl == nil { - return info + return nil, fmt.Errorf("range is not within a function declaration") + } + info := &ParamInfo{ + FieldIndex: -1, + NameIndex: -1, + Decl: decl, } - info.Decl = decl for fi, f := range decl.Type.Params.List { if f == field { info.FieldIndex = fi @@ -296,16 +297,16 @@ func FindParam(pgf *ParsedGoFile, rng protocol.Range) ParamInfo { break } } - return info + return info, nil } // signatureRewrite defines a rewritten function signature. // // See rewriteCalls for more details. type signatureRewrite struct { - snapshot Snapshot - pkg Package - pgf *ParsedGoFile + snapshot *cache.Snapshot + pkg *cache.Package + pgf *parsego.File origDecl, newDecl *ast.FuncDecl params *ast.FieldList callArgs []ast.Expr @@ -352,7 +353,7 @@ type signatureRewrite struct { // By passing an entirely new declaration, rewriteCalls may be used for // signature refactorings that may affect the function body, such as removing // or adding return values. -func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[span.URI][]byte, error) { +func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[protocol.DocumentURI][]byte, error) { // tag is a unique prefix that is added to the delegated declaration. // // It must have a ~0% probability of causing collisions with existing names. @@ -366,14 +367,56 @@ func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[span.URI][]byte { delegate := internalastutil.CloneNode(rw.newDecl) // clone before modifying delegate.Name.Name = tag + delegate.Name.Name - if obj := rw.pkg.GetTypes().Scope().Lookup(delegate.Name.Name); obj != nil { + if obj := rw.pkg.Types().Scope().Lookup(delegate.Name.Name); obj != nil { return nil, fmt.Errorf("synthetic name %q conflicts with an existing declaration", delegate.Name.Name) } wrapper := internalastutil.CloneNode(rw.origDecl) wrapper.Type.Params = rw.params + + // Get the receiver name, creating it if necessary. + var recv string // nonempty => call is a method call with receiver recv + if wrapper.Recv.NumFields() > 0 { + if len(wrapper.Recv.List[0].Names) > 0 { + recv = wrapper.Recv.List[0].Names[0].Name + } else { + // Create unique name for the temporary receiver, which will be inlined away. + // + // We use the lexical scope of the original function to avoid conflicts + // with (e.g.) named result variables. However, since the parameter syntax + // may have been modified/renamed from the original function, we must + // reject those names too. + usedParams := make(map[string]bool) + for _, fld := range wrapper.Type.Params.List { + for _, name := range fld.Names { + usedParams[name.Name] = true + } + } + scope := rw.pkg.TypesInfo().Scopes[rw.origDecl.Type] + if scope == nil { + return nil, bug.Errorf("missing function scope for %v", rw.origDecl.Name.Name) + } + for i := 0; ; i++ { + recv = fmt.Sprintf("r%d", i) + _, obj := scope.LookupParent(recv, token.NoPos) + if obj == nil && !usedParams[recv] { + break + } + } + wrapper.Recv.List[0].Names = []*ast.Ident{{Name: recv}} + } + } + + name := &ast.Ident{Name: delegate.Name.Name} + var fun ast.Expr = name + if recv != "" { + fun = &ast.SelectorExpr{ + X: &ast.Ident{Name: recv}, + Sel: name, + } + } call := &ast.CallExpr{ - Fun: &ast.Ident{Name: delegate.Name.Name}, + Fun: fun, Args: rw.callArgs, } if rw.variadic { @@ -404,7 +447,7 @@ func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[span.URI][]byte // by returning the modified AST from replaceDecl. Investigate if that is // accurate. modifiedSrc = append(modifiedSrc, []byte("\n\n"+FormatNode(fset, wrapper))...) - modifiedFile, err = parser.ParseFile(rw.pkg.FileSet(), rw.pgf.URI.Filename(), modifiedSrc, parser.ParseComments|parser.SkipObjectResolution) + modifiedFile, err = parser.ParseFile(rw.pkg.FileSet(), rw.pgf.URI.Path(), modifiedSrc, parser.ParseComments|parser.SkipObjectResolution) if err != nil { return nil, err } @@ -414,7 +457,7 @@ func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[span.URI][]byte // Type check pkg again with the modified file, to compute the synthetic // callee. logf := logger(ctx, "change signature", rw.snapshot.Options().VerboseOutput) - pkg2, info, err := reTypeCheck(logf, rw.pkg, map[span.URI]*ast.File{rw.pgf.URI: modifiedFile}, false) + pkg2, info, err := reTypeCheck(logf, rw.pkg, map[protocol.DocumentURI]*ast.File{rw.pgf.URI: modifiedFile}, false) if err != nil { return nil, err } @@ -435,7 +478,7 @@ func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[span.URI][]byte // If expectErrors is true, reTypeCheck allows errors in the new package. // TODO(rfindley): perhaps this should be a filter to specify which errors are // acceptable. -func reTypeCheck(logf func(string, ...any), orig Package, fileMask map[span.URI]*ast.File, expectErrors bool) (*types.Package, *types.Info, error) { +func reTypeCheck(logf func(string, ...any), orig *cache.Package, fileMask map[protocol.DocumentURI]*ast.File, expectErrors bool) (*types.Package, *types.Info, error) { pkg := types.NewPackage(string(orig.Metadata().PkgPath), string(orig.Metadata().Name)) info := &types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), @@ -446,6 +489,7 @@ func reTypeCheck(logf func(string, ...any), orig Package, fileMask map[span.URI] Scopes: make(map[ast.Node]*types.Scope), Instances: make(map[*ast.Ident]types.Instance), } + versions.InitFileVersions(info) { var files []*ast.File for _, pgf := range orig.CompiledGoFiles() { @@ -465,9 +509,9 @@ func reTypeCheck(logf func(string, ...any), orig Package, fileMask map[span.URI] var importer func(importPath string) (*types.Package, error) { var ( - importsByPath = make(map[string]*types.Package) // cached imports - toSearch = []*types.Package{orig.GetTypes()} // packages to search - searched = make(map[string]bool) // path -> (false, if present in toSearch; true, if already searched) + importsByPath = make(map[string]*types.Package) // cached imports + toSearch = []*types.Package{orig.Types()} // packages to search + searched = make(map[string]bool) // path -> (false, if present in toSearch; true, if already searched) ) importer = func(path string) (*types.Package, error) { if p, ok := importsByPath[path]; ok { @@ -513,7 +557,7 @@ func reTypeCheck(logf func(string, ...any), orig Package, fileMask map[span.URI] // An unparsable mod file should probably stop us // before we get here, but double check just in case. if goVersionRx.MatchString(goVersion) { - typesinternal.SetGoVersion(cfg, goVersion) + cfg.GoVersion = goVersion } } if expectErrors { @@ -540,7 +584,7 @@ func remove[T any](s []T, i int) []T { // replaceFileDecl replaces old with new in the file described by pgf. // // TODO(rfindley): generalize, and combine with rewriteSignature. -func replaceFileDecl(pgf *ParsedGoFile, old, new ast.Decl) ([]byte, error) { +func replaceFileDecl(pgf *parsego.File, old, new ast.Decl) ([]byte, error) { i := findDecl(pgf.File, old) if i == -1 { return nil, bug.Errorf("didn't find old declaration") diff --git a/gopls/internal/golang/code_lens.go b/gopls/internal/golang/code_lens.go new file mode 100644 index 00000000000..4fc923d0985 --- /dev/null +++ b/gopls/internal/golang/code_lens.go @@ -0,0 +1,250 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "go/ast" + "go/token" + "go/types" + "regexp" + "strings" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" +) + +type LensFunc func(context.Context, *cache.Snapshot, file.Handle) ([]protocol.CodeLens, error) + +// LensFuncs returns the supported lensFuncs for Go files. +func LensFuncs() map[command.Command]LensFunc { + return map[command.Command]LensFunc{ + command.Generate: goGenerateCodeLens, + command.Test: runTestCodeLens, + command.RegenerateCgo: regenerateCgoLens, + command.GCDetails: toggleDetailsCodeLens, + } +} + +var ( + testRe = regexp.MustCompile(`^Test([^a-z]|$)`) // TestFoo or Test but not Testable + benchmarkRe = regexp.MustCompile(`^Benchmark([^a-z]|$)`) +) + +func runTestCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + var codeLens []protocol.CodeLens + + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + fns, err := TestsAndBenchmarks(pkg, pgf) + if err != nil { + return nil, err + } + puri := fh.URI() + for _, fn := range fns.Tests { + cmd, err := command.NewTestCommand("run test", puri, []string{fn.Name}, nil) + if err != nil { + return nil, err + } + rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start} + codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) + } + + for _, fn := range fns.Benchmarks { + cmd, err := command.NewTestCommand("run benchmark", puri, nil, []string{fn.Name}) + if err != nil { + return nil, err + } + rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start} + codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) + } + + if len(fns.Benchmarks) > 0 { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + // add a code lens to the top of the file which runs all benchmarks in the file + rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package) + if err != nil { + return nil, err + } + var benches []string + for _, fn := range fns.Benchmarks { + benches = append(benches, fn.Name) + } + cmd, err := command.NewTestCommand("run file benchmarks", puri, nil, benches) + if err != nil { + return nil, err + } + codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) + } + return codeLens, nil +} + +type TestFn struct { + Name string + Rng protocol.Range +} + +type TestFns struct { + Tests []TestFn + Benchmarks []TestFn +} + +func TestsAndBenchmarks(pkg *cache.Package, pgf *parsego.File) (TestFns, error) { + var out TestFns + + if !strings.HasSuffix(pgf.URI.Path(), "_test.go") { + return out, nil + } + + for _, d := range pgf.File.Decls { + fn, ok := d.(*ast.FuncDecl) + if !ok { + continue + } + + rng, err := pgf.NodeRange(fn) + if err != nil { + return out, err + } + + if matchTestFunc(fn, pkg, testRe, "T") { + out.Tests = append(out.Tests, TestFn{fn.Name.Name, rng}) + } + + if matchTestFunc(fn, pkg, benchmarkRe, "B") { + out.Benchmarks = append(out.Benchmarks, TestFn{fn.Name.Name, rng}) + } + } + + return out, nil +} + +func matchTestFunc(fn *ast.FuncDecl, pkg *cache.Package, nameRe *regexp.Regexp, paramID string) bool { + // Make sure that the function name matches a test function. + if !nameRe.MatchString(fn.Name.Name) { + return false + } + info := pkg.TypesInfo() + if info == nil { + return false + } + obj, ok := info.ObjectOf(fn.Name).(*types.Func) + if !ok { + return false + } + sig := obj.Type().(*types.Signature) + // Test functions should have only one parameter. + if sig.Params().Len() != 1 { + return false + } + + // Check the type of the only parameter + // (We don't Unalias or use typesinternal.ReceiverNamed + // in the two checks below because "go test" can't see + // through aliases when enumerating Test* functions; + // it's syntactic.) + paramTyp, ok := sig.Params().At(0).Type().(*types.Pointer) + if !ok { + return false + } + named, ok := paramTyp.Elem().(*types.Named) + if !ok { + return false + } + namedObj := named.Obj() + if namedObj.Pkg().Path() != "testing" { + return false + } + return namedObj.Id() == paramID +} + +func goGenerateCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + const ggDirective = "//go:generate" + for _, c := range pgf.File.Comments { + for _, l := range c.List { + if !strings.HasPrefix(l.Text, ggDirective) { + continue + } + rng, err := pgf.PosRange(l.Pos(), l.Pos()+token.Pos(len(ggDirective))) + if err != nil { + return nil, err + } + dir := fh.URI().Dir() + nonRecursiveCmd, err := command.NewGenerateCommand("run go generate", command.GenerateArgs{Dir: dir, Recursive: false}) + if err != nil { + return nil, err + } + recursiveCmd, err := command.NewGenerateCommand("run go generate ./...", command.GenerateArgs{Dir: dir, Recursive: true}) + if err != nil { + return nil, err + } + return []protocol.CodeLens{ + {Range: rng, Command: &recursiveCmd}, + {Range: rng, Command: &nonRecursiveCmd}, + }, nil + + } + } + return nil, nil +} + +func regenerateCgoLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + var c *ast.ImportSpec + for _, imp := range pgf.File.Imports { + if imp.Path.Value == `"C"` { + c = imp + } + } + if c == nil { + return nil, nil + } + rng, err := pgf.NodeRange(c) + if err != nil { + return nil, err + } + puri := fh.URI() + cmd, err := command.NewRegenerateCgoCommand("regenerate cgo definitions", command.URIArg{URI: puri}) + if err != nil { + return nil, err + } + return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil +} + +func toggleDetailsCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + if !pgf.File.Package.IsValid() { + // Without a package name we have nowhere to put the codelens, so give up. + return nil, nil + } + rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package) + if err != nil { + return nil, err + } + puri := fh.URI() + cmd, err := command.NewGCDetailsCommand("Toggle gc annotation details", puri) + if err != nil { + return nil, err + } + return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil +} diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go new file mode 100644 index 00000000000..0b50e7bb31c --- /dev/null +++ b/gopls/internal/golang/codeaction.go @@ -0,0 +1,517 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "encoding/json" + "fmt" + "go/ast" + "strings" + + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/analysis/fillstruct" + "golang.org/x/tools/gopls/internal/analysis/fillswitch" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" + "golang.org/x/tools/internal/imports" +) + +// CodeActions returns all code actions (edits and other commands) +// available for the selected range. +func CodeActions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range, diagnostics []protocol.Diagnostic, want map[protocol.CodeActionKind]bool) (actions []protocol.CodeAction, _ error) { + // Only compute quick fixes if there are any diagnostics to fix. + wantQuickFixes := want[protocol.QuickFix] && len(diagnostics) > 0 + + // Code actions requiring syntax information alone. + if wantQuickFixes || want[protocol.SourceOrganizeImports] || want[protocol.RefactorExtract] { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + + // Process any missing imports and pair them with the diagnostics they fix. + if wantQuickFixes || want[protocol.SourceOrganizeImports] { + importEdits, importEditsPerFix, err := allImportsFixes(ctx, snapshot, pgf) + if err != nil { + event.Error(ctx, "imports fixes", err, tag.File.Of(fh.URI().Path())) + importEdits = nil + importEditsPerFix = nil + } + + // Separate this into a set of codeActions per diagnostic, where + // each action is the addition, removal, or renaming of one import. + if wantQuickFixes { + for _, importFix := range importEditsPerFix { + fixed := fixedByImportFix(importFix.fix, diagnostics) + if len(fixed) == 0 { + continue + } + actions = append(actions, protocol.CodeAction{ + Title: importFixTitle(importFix.fix), + Kind: protocol.QuickFix, + Edit: &protocol.WorkspaceEdit{ + DocumentChanges: documentChanges(fh, importFix.edits), + }, + Diagnostics: fixed, + }) + } + } + + // Send all of the import edits as one code action if the file is + // being organized. + if want[protocol.SourceOrganizeImports] && len(importEdits) > 0 { + actions = append(actions, protocol.CodeAction{ + Title: "Organize Imports", + Kind: protocol.SourceOrganizeImports, + Edit: &protocol.WorkspaceEdit{ + DocumentChanges: documentChanges(fh, importEdits), + }, + }) + } + } + + if want[protocol.RefactorExtract] { + extractions, err := getExtractCodeActions(pgf, rng, snapshot.Options()) + if err != nil { + return nil, err + } + actions = append(actions, extractions...) + } + } + + // Code actions requiring type information. + if want[protocol.RefactorRewrite] || + want[protocol.RefactorInline] || + want[protocol.GoTest] || + want[protocol.GoDoc] { + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + if want[protocol.RefactorRewrite] { + rewrites, err := getRewriteCodeActions(ctx, pkg, snapshot, pgf, fh, rng, snapshot.Options()) + if err != nil { + return nil, err + } + actions = append(actions, rewrites...) + } + + if want[protocol.RefactorInline] { + rewrites, err := getInlineCodeActions(pkg, pgf, rng, snapshot.Options()) + if err != nil { + return nil, err + } + actions = append(actions, rewrites...) + } + + if want[protocol.GoTest] { + fixes, err := getGoTestCodeActions(pkg, pgf, rng) + if err != nil { + return nil, err + } + actions = append(actions, fixes...) + } + + if want[protocol.GoDoc] { + loc := protocol.Location{URI: pgf.URI, Range: rng} + cmd, err := command.NewDocCommand("View package documentation", loc) + if err != nil { + return nil, err + } + actions = append(actions, protocol.CodeAction{ + Title: cmd.Title, + Kind: protocol.GoDoc, + Command: &cmd, + }) + } + } + return actions, nil +} + +func supportsResolveEdits(options *settings.Options) bool { + return options.CodeActionResolveOptions != nil && slices.Contains(options.CodeActionResolveOptions, "edit") +} + +func importFixTitle(fix *imports.ImportFix) string { + var str string + switch fix.FixType { + case imports.AddImport: + str = fmt.Sprintf("Add import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) + case imports.DeleteImport: + str = fmt.Sprintf("Delete import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) + case imports.SetImportName: + str = fmt.Sprintf("Rename import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) + } + return str +} + +// fixedByImportFix filters the provided slice of diagnostics to those that +// would be fixed by the provided imports fix. +func fixedByImportFix(fix *imports.ImportFix, diagnostics []protocol.Diagnostic) []protocol.Diagnostic { + var results []protocol.Diagnostic + for _, diagnostic := range diagnostics { + switch { + // "undeclared name: X" may be an unresolved import. + case strings.HasPrefix(diagnostic.Message, "undeclared name: "): + ident := strings.TrimPrefix(diagnostic.Message, "undeclared name: ") + if ident == fix.IdentName { + results = append(results, diagnostic) + } + // "undefined: X" may be an unresolved import at Go 1.20+. + case strings.HasPrefix(diagnostic.Message, "undefined: "): + ident := strings.TrimPrefix(diagnostic.Message, "undefined: ") + if ident == fix.IdentName { + results = append(results, diagnostic) + } + // "could not import: X" may be an invalid import. + case strings.HasPrefix(diagnostic.Message, "could not import: "): + ident := strings.TrimPrefix(diagnostic.Message, "could not import: ") + if ident == fix.IdentName { + results = append(results, diagnostic) + } + // "X imported but not used" is an unused import. + // "X imported but not used as Y" is an unused import. + case strings.Contains(diagnostic.Message, " imported but not used"): + idx := strings.Index(diagnostic.Message, " imported but not used") + importPath := diagnostic.Message[:idx] + if importPath == fmt.Sprintf("%q", fix.StmtInfo.ImportPath) { + results = append(results, diagnostic) + } + } + } + return results +} + +// getExtractCodeActions returns any refactor.extract code actions for the selection. +func getExtractCodeActions(pgf *parsego.File, rng protocol.Range, options *settings.Options) ([]protocol.CodeAction, error) { + if rng.Start == rng.End { + return nil, nil + } + + start, end, err := pgf.RangePos(rng) + if err != nil { + return nil, err + } + puri := pgf.URI + var commands []protocol.Command + if _, ok, methodOk, _ := CanExtractFunction(pgf.Tok, start, end, pgf.Src, pgf.File); ok { + cmd, err := command.NewApplyFixCommand("Extract function", command.ApplyFixArgs{ + Fix: fixExtractFunction, + URI: puri, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + if methodOk { + cmd, err := command.NewApplyFixCommand("Extract method", command.ApplyFixArgs{ + Fix: fixExtractMethod, + URI: puri, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + } + if _, _, ok, _ := CanExtractVariable(start, end, pgf.File); ok { + cmd, err := command.NewApplyFixCommand("Extract variable", command.ApplyFixArgs{ + Fix: fixExtractVariable, + URI: puri, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + var actions []protocol.CodeAction + for i := range commands { + actions = append(actions, newCodeAction(commands[i].Title, protocol.RefactorExtract, &commands[i], nil, options)) + } + return actions, nil +} + +func newCodeAction(title string, kind protocol.CodeActionKind, cmd *protocol.Command, diagnostics []protocol.Diagnostic, options *settings.Options) protocol.CodeAction { + action := protocol.CodeAction{ + Title: title, + Kind: kind, + Diagnostics: diagnostics, + } + if !supportsResolveEdits(options) { + action.Command = cmd + } else { + data, err := json.Marshal(cmd) + if err != nil { + panic("unable to marshal") + } + msg := json.RawMessage(data) + action.Data = &msg + } + return action +} + +func getRewriteCodeActions(ctx context.Context, pkg *cache.Package, snapshot *cache.Snapshot, pgf *parsego.File, fh file.Handle, rng protocol.Range, options *settings.Options) (_ []protocol.CodeAction, rerr error) { + // golang/go#61693: code actions were refactored to run outside of the + // analysis framework, but as a result they lost their panic recovery. + // + // These code actions should never fail, but put back the panic recovery as a + // defensive measure. + defer func() { + if r := recover(); r != nil { + rerr = bug.Errorf("refactor.rewrite code actions panicked: %v", r) + } + }() + + var actions []protocol.CodeAction + + if canRemoveParameter(pkg, pgf, rng) { + cmd, err := command.NewChangeSignatureCommand("remove unused parameter", command.ChangeSignatureArgs{ + RemoveParameter: protocol.Location{ + URI: pgf.URI, + Range: rng, + }, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + actions = append(actions, newCodeAction("Refactor: remove unused parameter", protocol.RefactorRewrite, &cmd, nil, options)) + } + + if action, ok := ConvertStringLiteral(pgf, fh, rng); ok { + actions = append(actions, action) + } + + start, end, err := pgf.RangePos(rng) + if err != nil { + return nil, err + } + + var commands []protocol.Command + if _, ok, _ := CanInvertIfCondition(pgf.File, start, end); ok { + cmd, err := command.NewApplyFixCommand("Invert 'if' condition", command.ApplyFixArgs{ + Fix: fixInvertIfCondition, + URI: pgf.URI, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + + if msg, ok, _ := CanSplitLines(pgf.File, pkg.FileSet(), start, end); ok { + cmd, err := command.NewApplyFixCommand(msg, command.ApplyFixArgs{ + Fix: fixSplitLines, + URI: pgf.URI, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + + if msg, ok, _ := CanJoinLines(pgf.File, pkg.FileSet(), start, end); ok { + cmd, err := command.NewApplyFixCommand(msg, command.ApplyFixArgs{ + Fix: fixJoinLines, + URI: pgf.URI, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + + // N.B.: an inspector only pays for itself after ~5 passes, which means we're + // currently not getting a good deal on this inspection. + // + // TODO: Consider removing the inspection after convenienceAnalyzers are removed. + inspect := inspector.New([]*ast.File{pgf.File}) + for _, diag := range fillstruct.Diagnose(inspect, start, end, pkg.Types(), pkg.TypesInfo()) { + rng, err := pgf.Mapper.PosRange(pgf.Tok, diag.Pos, diag.End) + if err != nil { + return nil, err + } + for _, fix := range diag.SuggestedFixes { + cmd, err := command.NewApplyFixCommand(fix.Message, command.ApplyFixArgs{ + Fix: diag.Category, + URI: pgf.URI, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + } + + for _, diag := range fillswitch.Diagnose(inspect, start, end, pkg.Types(), pkg.TypesInfo()) { + edits, err := suggestedFixToEdits(ctx, snapshot, pkg.FileSet(), &diag.SuggestedFixes[0]) + if err != nil { + return nil, err + } + + changes := []protocol.DocumentChanges{} // must be a slice + for _, edit := range edits { + edit := edit + changes = append(changes, protocol.DocumentChanges{ + TextDocumentEdit: &edit, + }) + } + + actions = append(actions, protocol.CodeAction{ + Title: diag.Message, + Kind: protocol.RefactorRewrite, + Edit: &protocol.WorkspaceEdit{ + DocumentChanges: changes, + }, + }) + } + for i := range commands { + actions = append(actions, newCodeAction(commands[i].Title, protocol.RefactorRewrite, &commands[i], nil, options)) + } + + return actions, nil +} + +// canRemoveParameter reports whether we can remove the function parameter +// indicated by the given [start, end) range. +// +// This is true if: +// - there are no parse or type errors, and +// - [start, end) is contained within an unused field or parameter name +// - ... of a non-method function declaration. +// +// (Note that the unusedparam analyzer also computes this property, but +// much more precisely, allowing it to report its findings as diagnostics.) +func canRemoveParameter(pkg *cache.Package, pgf *parsego.File, rng protocol.Range) bool { + if perrors, terrors := pkg.ParseErrors(), pkg.TypeErrors(); len(perrors) > 0 || len(terrors) > 0 { + return false // can't remove parameters from packages with errors + } + info, err := FindParam(pgf, rng) + if err != nil { + return false // e.g. invalid range + } + if info.Field == nil { + return false // range does not span a parameter + } + if info.Decl.Body == nil { + return false // external function + } + if len(info.Field.Names) == 0 { + return true // no names => field is unused + } + if info.Name == nil { + return false // no name is indicated + } + if info.Name.Name == "_" { + return true // trivially unused + } + + obj := pkg.TypesInfo().Defs[info.Name] + if obj == nil { + return false // something went wrong + } + + used := false + ast.Inspect(info.Decl.Body, func(node ast.Node) bool { + if n, ok := node.(*ast.Ident); ok && pkg.TypesInfo().Uses[n] == obj { + used = true + } + return !used // keep going until we find a use + }) + return !used +} + +// getInlineCodeActions returns refactor.inline actions available at the specified range. +func getInlineCodeActions(pkg *cache.Package, pgf *parsego.File, rng protocol.Range, options *settings.Options) ([]protocol.CodeAction, error) { + start, end, err := pgf.RangePos(rng) + if err != nil { + return nil, err + } + + // If range is within call expression, offer to inline the call. + var commands []protocol.Command + if _, fn, err := EnclosingStaticCall(pkg, pgf, start, end); err == nil { + cmd, err := command.NewApplyFixCommand(fmt.Sprintf("Inline call to %s", fn.Name()), command.ApplyFixArgs{ + Fix: fixInlineCall, + URI: pgf.URI, + Range: rng, + ResolveEdits: supportsResolveEdits(options), + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + + // Convert commands to actions. + var actions []protocol.CodeAction + for i := range commands { + actions = append(actions, newCodeAction(commands[i].Title, protocol.RefactorInline, &commands[i], nil, options)) + } + return actions, nil +} + +// getGoTestCodeActions returns any "run this test/benchmark" code actions for the selection. +func getGoTestCodeActions(pkg *cache.Package, pgf *parsego.File, rng protocol.Range) ([]protocol.CodeAction, error) { + fns, err := TestsAndBenchmarks(pkg, pgf) + if err != nil { + return nil, err + } + + var tests, benchmarks []string + for _, fn := range fns.Tests { + if !protocol.Intersect(fn.Rng, rng) { + continue + } + tests = append(tests, fn.Name) + } + for _, fn := range fns.Benchmarks { + if !protocol.Intersect(fn.Rng, rng) { + continue + } + benchmarks = append(benchmarks, fn.Name) + } + + if len(tests) == 0 && len(benchmarks) == 0 { + return nil, nil + } + + cmd, err := command.NewTestCommand("Run tests and benchmarks", pgf.URI, tests, benchmarks) + if err != nil { + return nil, err + } + return []protocol.CodeAction{{ + Title: cmd.Title, + Kind: protocol.GoTest, + Command: &cmd, + }}, nil +} + +func documentChanges(fh file.Handle, edits []protocol.TextEdit) []protocol.DocumentChanges { + return protocol.TextEditsToDocumentChanges(fh.URI(), fh.Version(), edits) +} diff --git a/gopls/internal/golang/comment.go b/gopls/internal/golang/comment.go new file mode 100644 index 00000000000..95f0df98293 --- /dev/null +++ b/gopls/internal/golang/comment.go @@ -0,0 +1,41 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "fmt" + "go/doc/comment" + + "golang.org/x/tools/gopls/internal/settings" +) + +// CommentToMarkdown converts comment text to formatted markdown. +// The comment was prepared by DocReader, +// so it is known not to have leading, trailing blank lines +// nor to have trailing spaces at the end of lines. +// The comment markers have already been removed. +func CommentToMarkdown(text string, options *settings.Options) string { + var p comment.Parser + doc := p.Parse(text) + var pr comment.Printer + // The default produces {#Hdr-...} tags for headings. + // vscode displays thems, which is undesirable. + // The godoc for comment.Printer says the tags + // avoid a security problem. + pr.HeadingID = func(*comment.Heading) string { return "" } + pr.DocLinkURL = func(link *comment.DocLink) string { + msg := fmt.Sprintf("https://%s/%s", options.LinkTarget, link.ImportPath) + if link.Name != "" { + msg += "#" + if link.Recv != "" { + msg += link.Recv + "." + } + msg += link.Name + } + return msg + } + easy := pr.Markdown(doc) + return string(easy) +} diff --git a/gopls/internal/lsp/source/completion/builtin.go b/gopls/internal/golang/completion/builtin.go similarity index 92% rename from gopls/internal/lsp/source/completion/builtin.go rename to gopls/internal/golang/completion/builtin.go index 39732d86434..68f773e09ae 100644 --- a/gopls/internal/lsp/source/completion/builtin.go +++ b/gopls/internal/golang/completion/builtin.go @@ -82,7 +82,7 @@ func (c *completer) builtinArgType(obj types.Object, call *ast.CallExpr, parentI // For non-initial append() args, infer slice type from the first // append() arg, or from parent context. if len(call.Args) > 0 { - inf.objType = c.pkg.GetTypesInfo().TypeOf(call.Args[0]) + inf.objType = c.pkg.TypesInfo().TypeOf(call.Args[0]) } if inf.objType == nil { inf.objType = parentInf.objType @@ -98,13 +98,13 @@ func (c *completer) builtinArgType(obj types.Object, call *ast.CallExpr, parentI // Penalize the first append() argument as a candidate. You // don't normally append a slice to itself. - if sliceChain := objChain(c.pkg.GetTypesInfo(), call.Args[0]); len(sliceChain) > 0 { + if sliceChain := objChain(c.pkg.TypesInfo(), call.Args[0]); len(sliceChain) > 0 { inf.penalized = append(inf.penalized, penalizedObj{objChain: sliceChain, penalty: 0.9}) } case "delete": if exprIdx > 0 && len(call.Args) > 0 { // Try to fill in expected type of map key. - firstArgType := c.pkg.GetTypesInfo().TypeOf(call.Args[0]) + firstArgType := c.pkg.TypesInfo().TypeOf(call.Args[0]) if firstArgType != nil { if mt, ok := firstArgType.Underlying().(*types.Map); ok { inf.objType = mt.Key() @@ -114,9 +114,9 @@ func (c *completer) builtinArgType(obj types.Object, call *ast.CallExpr, parentI case "copy": var t1, t2 types.Type if len(call.Args) > 0 { - t1 = c.pkg.GetTypesInfo().TypeOf(call.Args[0]) + t1 = c.pkg.TypesInfo().TypeOf(call.Args[0]) if len(call.Args) > 1 { - t2 = c.pkg.GetTypesInfo().TypeOf(call.Args[1]) + t2 = c.pkg.TypesInfo().TypeOf(call.Args[1]) } } diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go new file mode 100644 index 00000000000..4da492762c8 --- /dev/null +++ b/gopls/internal/golang/completion/completion.go @@ -0,0 +1,3356 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package completion provides core functionality for code completion in Go +// editors and tools. +package completion + +import ( + "context" + "fmt" + "go/ast" + "go/build" + "go/constant" + "go/parser" + "go/printer" + "go/scanner" + "go/token" + "go/types" + "math" + "reflect" + "sort" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" + "unicode" + + "golang.org/x/sync/errgroup" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/golang/completion/snippet" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + goplsastutil "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/fuzzy" + "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/stdlib" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/versions" +) + +// A CompletionItem represents a possible completion suggested by the algorithm. +type CompletionItem struct { + + // Invariant: CompletionItem does not refer to syntax or types. + + // Label is the primary text the user sees for this completion item. + Label string + + // Detail is supplemental information to present to the user. + // This often contains the type or return type of the completion item. + Detail string + + // InsertText is the text to insert if this item is selected. + // Any of the prefix that has already been typed is not trimmed. + // The insert text does not contain snippets. + InsertText string + + Kind protocol.CompletionItemKind + Tags []protocol.CompletionItemTag + Deprecated bool // Deprecated, prefer Tags if available + + // An optional array of additional TextEdits that are applied when + // selecting this completion. + // + // Additional text edits should be used to change text unrelated to the current cursor position + // (for example adding an import statement at the top of the file if the completion item will + // insert an unqualified type). + AdditionalTextEdits []protocol.TextEdit + + // Depth is how many levels were searched to find this completion. + // For example when completing "foo<>", "fooBar" is depth 0, and + // "fooBar.Baz" is depth 1. + Depth int + + // Score is the internal relevance score. + // A higher score indicates that this completion item is more relevant. + Score float64 + + // snippet is the LSP snippet for the completion item. The LSP + // specification contains details about LSP snippets. For example, a + // snippet for a function with the following signature: + // + // func foo(a, b, c int) + // + // would be: + // + // foo(${1:a int}, ${2: b int}, ${3: c int}) + // + // If Placeholders is false in the CompletionOptions, the above + // snippet would instead be: + // + // foo(${1:}) + snippet *snippet.Builder + + // Documentation is the documentation for the completion item. + Documentation string + + // isSlice reports whether the underlying type of the object + // from which this candidate was derived is a slice. + // (Used to complete append() calls.) + isSlice bool +} + +// completionOptions holds completion specific configuration. +type completionOptions struct { + unimported bool + documentation bool + fullDocumentation bool + placeholders bool + snippets bool + postfix bool + matcher settings.Matcher + budget time.Duration + completeFunctionCalls bool +} + +// Snippet is a convenience returns the snippet if available, otherwise +// the InsertText. +// used for an item, depending on if the callee wants placeholders or not. +func (i *CompletionItem) Snippet() string { + if i.snippet != nil { + return i.snippet.String() + } + return i.InsertText +} + +// Scoring constants are used for weighting the relevance of different candidates. +const ( + // stdScore is the base score for all completion items. + stdScore float64 = 1.0 + + // highScore indicates a very relevant completion item. + highScore float64 = 10.0 + + // lowScore indicates an irrelevant or not useful completion item. + lowScore float64 = 0.01 +) + +// matcher matches a candidate's label against the user input. The +// returned score reflects the quality of the match. A score of zero +// indicates no match, and a score of one means a perfect match. +type matcher interface { + Score(candidateLabel string) (score float32) +} + +// prefixMatcher implements case sensitive prefix matching. +type prefixMatcher string + +func (pm prefixMatcher) Score(candidateLabel string) float32 { + if strings.HasPrefix(candidateLabel, string(pm)) { + return 1 + } + return -1 +} + +// insensitivePrefixMatcher implements case insensitive prefix matching. +type insensitivePrefixMatcher string + +func (ipm insensitivePrefixMatcher) Score(candidateLabel string) float32 { + if strings.HasPrefix(strings.ToLower(candidateLabel), string(ipm)) { + return 1 + } + return -1 +} + +// completer contains the necessary information for a single completion request. +type completer struct { + snapshot *cache.Snapshot + pkg *cache.Package + qf types.Qualifier // for qualifying typed expressions + mq golang.MetadataQualifier // for syntactic qualifying + opts *completionOptions + + // completionContext contains information about the trigger for this + // completion request. + completionContext completionContext + + // fh is a handle to the file associated with this completion request. + fh file.Handle + + // filename is the name of the file associated with this completion request. + filename string + + // file is the AST of the file associated with this completion request. + file *ast.File + + // goversion is the version of Go in force in the file, as + // defined by x/tools/internal/versions. Empty if unknown. + // TODO(adonovan): with go1.22+ it should always be known. + goversion string + + // (tokFile, pos) is the position at which the request was triggered. + tokFile *token.File + pos token.Pos + + // path is the path of AST nodes enclosing the position. + path []ast.Node + + // seen is the map that ensures we do not return duplicate results. + seen map[types.Object]bool + + // items is the list of completion items returned. + items []CompletionItem + + // completionCallbacks is a list of callbacks to collect completions that + // require expensive operations. This includes operations where we search + // through the entire module cache. + completionCallbacks []func(context.Context, *imports.Options) error + + // surrounding describes the identifier surrounding the position. + surrounding *Selection + + // inference contains information we've inferred about ideal + // candidates such as the candidate's type. + inference candidateInference + + // enclosingFunc contains information about the function enclosing + // the position. + enclosingFunc *funcInfo + + // enclosingCompositeLiteral contains information about the composite literal + // enclosing the position. + enclosingCompositeLiteral *compLitInfo + + // deepState contains the current state of our deep completion search. + deepState deepCompletionState + + // matcher matches the candidates against the surrounding prefix. + matcher matcher + + // methodSetCache caches the types.NewMethodSet call, which is relatively + // expensive and can be called many times for the same type while searching + // for deep completions. + methodSetCache map[methodSetKey]*types.MethodSet + + // tooNewSymbolsCache is a cache of + // [typesinternal.TooNewStdSymbols], recording for each std + // package which of its exported symbols are too new for + // the version of Go in force in the completion file. + // (The value is the minimum version in the form "go1.%d".) + tooNewSymbolsCache map[*types.Package]map[types.Object]string + + // mapper converts the positions in the file from which the completion originated. + mapper *protocol.Mapper + + // startTime is when we started processing this completion request. It does + // not include any time the request spent in the queue. + // + // Note: in CL 503016, startTime move to *after* type checking, but it was + // subsequently determined that it was better to keep setting it *before* + // type checking, so that the completion budget best approximates the user + // experience. See golang/go#62665 for more details. + startTime time.Time + + // scopes contains all scopes defined by nodes in our path, + // including nil values for nodes that don't defined a scope. It + // also includes our package scope and the universal scope at the + // end. + scopes []*types.Scope +} + +// tooNew reports whether obj is a standard library symbol that is too +// new for the specified Go version. +func (c *completer) tooNew(obj types.Object) bool { + pkg := obj.Pkg() + if pkg == nil { + return false // unsafe.Pointer or error.Error + } + disallowed, ok := c.tooNewSymbolsCache[pkg] + if !ok { + disallowed = typesinternal.TooNewStdSymbols(pkg, c.goversion) + c.tooNewSymbolsCache[pkg] = disallowed + } + return disallowed[obj] != "" +} + +// funcInfo holds info about a function object. +type funcInfo struct { + // sig is the function declaration enclosing the position. + sig *types.Signature + + // body is the function's body. + body *ast.BlockStmt +} + +type compLitInfo struct { + // cl is the *ast.CompositeLit enclosing the position. + cl *ast.CompositeLit + + // clType is the type of cl. + clType types.Type + + // kv is the *ast.KeyValueExpr enclosing the position, if any. + kv *ast.KeyValueExpr + + // inKey is true if we are certain the position is in the key side + // of a key-value pair. + inKey bool + + // maybeInFieldName is true if inKey is false and it is possible + // we are completing a struct field name. For example, + // "SomeStruct{<>}" will be inKey=false, but maybeInFieldName=true + // because we _could_ be completing a field name. + maybeInFieldName bool +} + +type importInfo struct { + importPath string + name string +} + +type methodSetKey struct { + typ types.Type + addressable bool +} + +type completionContext struct { + // triggerCharacter is the character used to trigger completion at current + // position, if any. + triggerCharacter string + + // triggerKind is information about how a completion was triggered. + triggerKind protocol.CompletionTriggerKind + + // commentCompletion is true if we are completing a comment. + commentCompletion bool + + // packageCompletion is true if we are completing a package name. + packageCompletion bool +} + +// A Selection represents the cursor position and surrounding identifier. +type Selection struct { + content string + tokFile *token.File + start, end, cursor token.Pos // relative to rng.TokFile + mapper *protocol.Mapper +} + +func (p Selection) Range() (protocol.Range, error) { + return p.mapper.PosRange(p.tokFile, p.start, p.end) +} + +func (p Selection) Prefix() string { + return p.content[:p.cursor-p.start] +} + +func (p Selection) Suffix() string { + return p.content[p.cursor-p.start:] +} + +func (c *completer) setSurrounding(ident *ast.Ident) { + if c.surrounding != nil { + return + } + if !(ident.Pos() <= c.pos && c.pos <= ident.End()) { + return + } + + c.surrounding = &Selection{ + content: ident.Name, + cursor: c.pos, + // Overwrite the prefix only. + tokFile: c.tokFile, + start: ident.Pos(), + end: ident.End(), + mapper: c.mapper, + } + + c.setMatcherFromPrefix(c.surrounding.Prefix()) +} + +func (c *completer) setMatcherFromPrefix(prefix string) { + switch c.opts.matcher { + case settings.Fuzzy: + c.matcher = fuzzy.NewMatcher(prefix) + case settings.CaseSensitive: + c.matcher = prefixMatcher(prefix) + default: + c.matcher = insensitivePrefixMatcher(strings.ToLower(prefix)) + } +} + +func (c *completer) getSurrounding() *Selection { + if c.surrounding == nil { + c.surrounding = &Selection{ + content: "", + cursor: c.pos, + tokFile: c.tokFile, + start: c.pos, + end: c.pos, + mapper: c.mapper, + } + } + return c.surrounding +} + +// candidate represents a completion candidate. +type candidate struct { + // obj is the types.Object to complete to. + // TODO(adonovan): eliminate dependence on go/types throughout this struct. + // See comment in (*completer).selector for explanation. + obj types.Object + + // score is used to rank candidates. + score float64 + + // name is the deep object name path, e.g. "foo.bar" + name string + + // detail is additional information about this item. If not specified, + // defaults to type string for the object. + detail string + + // path holds the path from the search root (excluding the candidate + // itself) for a deep candidate. + path []types.Object + + // pathInvokeMask is a bit mask tracking whether each entry in path + // should be formatted with "()" (i.e. whether it is a function + // invocation). + pathInvokeMask uint16 + + // mods contains modifications that should be applied to the + // candidate when inserted. For example, "foo" may be inserted as + // "*foo" or "foo()". + mods []typeModKind + + // addressable is true if a pointer can be taken to the candidate. + addressable bool + + // convertTo is a type that this candidate should be cast to. For + // example, if convertTo is float64, "foo" should be formatted as + // "float64(foo)". + convertTo types.Type + + // imp is the import that needs to be added to this package in order + // for this candidate to be valid. nil if no import needed. + imp *importInfo +} + +func (c candidate) hasMod(mod typeModKind) bool { + for _, m := range c.mods { + if m == mod { + return true + } + } + return false +} + +// ErrIsDefinition is an error that informs the user they got no +// completions because they tried to complete the name of a new object +// being defined. +type ErrIsDefinition struct { + objStr string +} + +func (e ErrIsDefinition) Error() string { + msg := "this is a definition" + if e.objStr != "" { + msg += " of " + e.objStr + } + return msg +} + +// Completion returns a list of possible candidates for completion, given a +// a file and a position. +// +// The selection is computed based on the preceding identifier and can be used by +// the client to score the quality of the completion. For instance, some clients +// may tolerate imperfect matches as valid completion results, since users may make typos. +func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, protoPos protocol.Position, protoContext protocol.CompletionContext) ([]CompletionItem, *Selection, error) { + ctx, done := event.Start(ctx, "completion.Completion") + defer done() + + startTime := time.Now() + + pkg, pgf, err := golang.NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil || pgf.File.Package == token.NoPos { + // If we can't parse this file or find position for the package + // keyword, it may be missing a package declaration. Try offering + // suggestions for the package declaration. + // Note that this would be the case even if the keyword 'package' is + // present but no package name exists. + items, surrounding, innerErr := packageClauseCompletions(ctx, snapshot, fh, protoPos) + if innerErr != nil { + // return the error for GetParsedFile since it's more relevant in this situation. + return nil, nil, fmt.Errorf("getting file %s for Completion: %v (package completions: %v)", fh.URI(), err, innerErr) + } + return items, surrounding, nil + } + + pos, err := pgf.PositionPos(protoPos) + if err != nil { + return nil, nil, err + } + // Completion is based on what precedes the cursor. + // Find the path to the position before pos. + path, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1) + if path == nil { + return nil, nil, fmt.Errorf("cannot find node enclosing position") + } + + // Check if completion at this position is valid. If not, return early. + switch n := path[0].(type) { + case *ast.BasicLit: + // Skip completion inside literals except for ImportSpec + if len(path) > 1 { + if _, ok := path[1].(*ast.ImportSpec); ok { + break + } + } + return nil, nil, nil + case *ast.CallExpr: + if n.Ellipsis.IsValid() && pos > n.Ellipsis && pos <= n.Ellipsis+token.Pos(len("...")) { + // Don't offer completions inside or directly after "...". For + // example, don't offer completions at "<>" in "foo(bar...<>"). + return nil, nil, nil + } + case *ast.Ident: + // reject defining identifiers + if obj, ok := pkg.TypesInfo().Defs[n]; ok { + if v, ok := obj.(*types.Var); ok && v.IsField() && v.Embedded() { + // An anonymous field is also a reference to a type. + } else if pgf.File.Name == n { + // Don't skip completions if Ident is for package name. + break + } else { + objStr := "" + if obj != nil { + qual := types.RelativeTo(pkg.Types()) + objStr = types.ObjectString(obj, qual) + } + ans, sel := definition(path, obj, pgf) + if ans != nil { + sort.Slice(ans, func(i, j int) bool { + return ans[i].Score > ans[j].Score + }) + return ans, sel, nil + } + return nil, nil, ErrIsDefinition{objStr: objStr} + } + } + } + + // Collect all surrounding scopes, innermost first. + scopes := golang.CollectScopes(pkg.TypesInfo(), path, pos) + scopes = append(scopes, pkg.Types().Scope(), types.Universe) + + var goversion string // "" => no version check + // Prior go1.22, the behavior of FileVersion is not useful to us. + if slices.Contains(build.Default.ReleaseTags, "go1.22") { + goversion = versions.FileVersion(pkg.TypesInfo(), pgf.File) // may be "" + } + + opts := snapshot.Options() + c := &completer{ + pkg: pkg, + snapshot: snapshot, + qf: typesutil.FileQualifier(pgf.File, pkg.Types(), pkg.TypesInfo()), + mq: golang.MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()), + completionContext: completionContext{ + triggerCharacter: protoContext.TriggerCharacter, + triggerKind: protoContext.TriggerKind, + }, + fh: fh, + filename: fh.URI().Path(), + tokFile: pgf.Tok, + file: pgf.File, + goversion: goversion, + path: path, + pos: pos, + seen: make(map[types.Object]bool), + enclosingFunc: enclosingFunction(path, pkg.TypesInfo()), + enclosingCompositeLiteral: enclosingCompositeLiteral(path, pos, pkg.TypesInfo()), + deepState: deepCompletionState{ + enabled: opts.DeepCompletion, + }, + opts: &completionOptions{ + matcher: opts.Matcher, + unimported: opts.CompleteUnimported, + documentation: opts.CompletionDocumentation && opts.HoverKind != settings.NoDocumentation, + fullDocumentation: opts.HoverKind == settings.FullDocumentation, + placeholders: opts.UsePlaceholders, + budget: opts.CompletionBudget, + snippets: opts.InsertTextFormat == protocol.SnippetTextFormat, + postfix: opts.ExperimentalPostfixCompletions, + completeFunctionCalls: opts.CompleteFunctionCalls, + }, + // default to a matcher that always matches + matcher: prefixMatcher(""), + methodSetCache: make(map[methodSetKey]*types.MethodSet), + tooNewSymbolsCache: make(map[*types.Package]map[types.Object]string), + mapper: pgf.Mapper, + startTime: startTime, + scopes: scopes, + } + + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + // Compute the deadline for this operation. Deadline is relative to the + // search operation, not the entire completion RPC, as the work up until this + // point depends significantly on how long it took to type-check, which in + // turn depends on the timing of the request relative to other operations on + // the snapshot. Including that work in the budget leads to inconsistent + // results (and realistically, if type-checking took 200ms already, the user + // is unlikely to be significantly more bothered by e.g. another 100ms of + // search). + // + // Don't overload the context with this deadline, as we don't want to + // conflate user cancellation (=fail the operation) with our time limit + // (=stop searching and succeed with partial results). + var deadline *time.Time + if c.opts.budget > 0 { + d := startTime.Add(c.opts.budget) + deadline = &d + } + + if surrounding := c.containingIdent(pgf.Src); surrounding != nil { + c.setSurrounding(surrounding) + } + + c.inference = expectedCandidate(ctx, c) + + err = c.collectCompletions(ctx) + if err != nil { + return nil, nil, err + } + + // Deep search collected candidates and their members for more candidates. + c.deepSearch(ctx, 1, deadline) + + // At this point we have a sufficiently complete set of results, and want to + // return as close to the completion budget as possible. Previously, we + // avoided cancelling the context because it could result in partial results + // for e.g. struct fields. At this point, we have a minimal valid set of + // candidates, and so truncating due to context cancellation is acceptable. + if c.opts.budget > 0 { + timeoutDuration := time.Until(c.startTime.Add(c.opts.budget)) + ctx, cancel = context.WithTimeout(ctx, timeoutDuration) + defer cancel() + } + + for _, callback := range c.completionCallbacks { + if deadline == nil || time.Now().Before(*deadline) { + if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil { + return nil, nil, err + } + } + } + + // Search candidates populated by expensive operations like + // unimportedMembers etc. for more completion items. + c.deepSearch(ctx, 0, deadline) + + // Statement candidates offer an entire statement in certain contexts, as + // opposed to a single object. Add statement candidates last because they + // depend on other candidates having already been collected. + c.addStatementCandidates() + + c.sortItems() + return c.items, c.getSurrounding(), nil +} + +// collectCompletions adds possible completion candidates to either the deep +// search queue or completion items directly for different completion contexts. +func (c *completer) collectCompletions(ctx context.Context) error { + // Inside import blocks, return completions for unimported packages. + for _, importSpec := range c.file.Imports { + if !(importSpec.Path.Pos() <= c.pos && c.pos <= importSpec.Path.End()) { + continue + } + return c.populateImportCompletions(importSpec) + } + + // Inside comments, offer completions for the name of the relevant symbol. + for _, comment := range c.file.Comments { + if comment.Pos() < c.pos && c.pos <= comment.End() { + c.populateCommentCompletions(comment) + return nil + } + } + + // Struct literals are handled entirely separately. + if c.wantStructFieldCompletions() { + // If we are definitely completing a struct field name, deep completions + // don't make sense. + if c.enclosingCompositeLiteral.inKey { + c.deepState.enabled = false + } + return c.structLiteralFieldName(ctx) + } + + if lt := c.wantLabelCompletion(); lt != labelNone { + c.labels(lt) + return nil + } + + if c.emptySwitchStmt() { + // Empty switch statements only admit "default" and "case" keywords. + c.addKeywordItems(map[string]bool{}, highScore, CASE, DEFAULT) + return nil + } + + switch n := c.path[0].(type) { + case *ast.Ident: + if c.file.Name == n { + return c.packageNameCompletions(ctx, c.fh.URI(), n) + } else if sel, ok := c.path[1].(*ast.SelectorExpr); ok && sel.Sel == n { + // Is this the Sel part of a selector? + return c.selector(ctx, sel) + } + return c.lexical(ctx) + // The function name hasn't been typed yet, but the parens are there: + // recv.‸(arg) + case *ast.TypeAssertExpr: + // Create a fake selector expression. + // + // The name "_" is the convention used by go/parser to represent phantom + // selectors. + sel := &ast.Ident{NamePos: n.X.End() + token.Pos(len(".")), Name: "_"} + return c.selector(ctx, &ast.SelectorExpr{X: n.X, Sel: sel}) + case *ast.SelectorExpr: + return c.selector(ctx, n) + // At the file scope, only keywords are allowed. + case *ast.BadDecl, *ast.File: + c.addKeywordCompletions() + default: + // fallback to lexical completions + return c.lexical(ctx) + } + + return nil +} + +// containingIdent returns the *ast.Ident containing pos, if any. It +// synthesizes an *ast.Ident to allow completion in the face of +// certain syntax errors. +func (c *completer) containingIdent(src []byte) *ast.Ident { + // In the normal case, our leaf AST node is the identifier being completed. + if ident, ok := c.path[0].(*ast.Ident); ok { + return ident + } + + pos, tkn, lit := c.scanToken(src) + if !pos.IsValid() { + return nil + } + + fakeIdent := &ast.Ident{Name: lit, NamePos: pos} + + if _, isBadDecl := c.path[0].(*ast.BadDecl); isBadDecl { + // You don't get *ast.Idents at the file level, so look for bad + // decls and use the manually extracted token. + return fakeIdent + } else if c.emptySwitchStmt() { + // Only keywords are allowed in empty switch statements. + // *ast.Idents are not parsed, so we must use the manually + // extracted token. + return fakeIdent + } else if tkn.IsKeyword() { + // Otherwise, manually extract the prefix if our containing token + // is a keyword. This improves completion after an "accidental + // keyword", e.g. completing to "variance" in "someFunc(var<>)". + return fakeIdent + } + + return nil +} + +// scanToken scans pgh's contents for the token containing pos. +func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) { + tok := c.pkg.FileSet().File(c.pos) + + var s scanner.Scanner + s.Init(tok, contents, nil, 0) + for { + tknPos, tkn, lit := s.Scan() + if tkn == token.EOF || tknPos >= c.pos { + return token.NoPos, token.ILLEGAL, "" + } + + if len(lit) > 0 && tknPos <= c.pos && c.pos <= tknPos+token.Pos(len(lit)) { + return tknPos, tkn, lit + } + } +} + +func (c *completer) sortItems() { + sort.SliceStable(c.items, func(i, j int) bool { + // Sort by score first. + if c.items[i].Score != c.items[j].Score { + return c.items[i].Score > c.items[j].Score + } + + // Then sort by label so order stays consistent. This also has the + // effect of preferring shorter candidates. + return c.items[i].Label < c.items[j].Label + }) +} + +// emptySwitchStmt reports whether pos is in an empty switch or select +// statement. +func (c *completer) emptySwitchStmt() bool { + block, ok := c.path[0].(*ast.BlockStmt) + if !ok || len(block.List) > 0 || len(c.path) == 1 { + return false + } + + switch c.path[1].(type) { + case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + return true + default: + return false + } +} + +// populateImportCompletions yields completions for an import path around the cursor. +// +// Completions are suggested at the directory depth of the given import path so +// that we don't overwhelm the user with a large list of possibilities. As an +// example, a completion for the prefix "golang" results in "golang.org/". +// Completions for "golang.org/" yield its subdirectories +// (i.e. "golang.org/x/"). The user is meant to accept completion suggestions +// until they reach a complete import path. +func (c *completer) populateImportCompletions(searchImport *ast.ImportSpec) error { + if !strings.HasPrefix(searchImport.Path.Value, `"`) { + return nil + } + + // deepSearch is not valuable for import completions. + c.deepState.enabled = false + + importPath := searchImport.Path.Value + + // Extract the text between the quotes (if any) in an import spec. + // prefix is the part of import path before the cursor. + prefixEnd := c.pos - searchImport.Path.Pos() + prefix := strings.Trim(importPath[:prefixEnd], `"`) + + // The number of directories in the import path gives us the depth at + // which to search. + depth := len(strings.Split(prefix, "/")) - 1 + + content := importPath + start, end := searchImport.Path.Pos(), searchImport.Path.End() + namePrefix, nameSuffix := `"`, `"` + // If a starting quote is present, adjust surrounding to either after the + // cursor or after the first slash (/), except if cursor is at the starting + // quote. Otherwise we provide a completion including the starting quote. + if strings.HasPrefix(importPath, `"`) && c.pos > searchImport.Path.Pos() { + content = content[1:] + start++ + if depth > 0 { + // Adjust textEdit start to replacement range. For ex: if current + // path was "golang.or/x/to<>ols/internal/", where <> is the cursor + // position, start of the replacement range would be after + // "golang.org/x/". + path := strings.SplitAfter(prefix, "/") + numChars := len(strings.Join(path[:len(path)-1], "")) + content = content[numChars:] + start += token.Pos(numChars) + } + namePrefix = "" + } + + // We won't provide an ending quote if one is already present, except if + // cursor is after the ending quote but still in import spec. This is + // because cursor has to be in our textEdit range. + if strings.HasSuffix(importPath, `"`) && c.pos < searchImport.Path.End() { + end-- + content = content[:len(content)-1] + nameSuffix = "" + } + + c.surrounding = &Selection{ + content: content, + cursor: c.pos, + tokFile: c.tokFile, + start: start, + end: end, + mapper: c.mapper, + } + + seenImports := make(map[string]struct{}) + for _, importSpec := range c.file.Imports { + if importSpec.Path.Value == importPath { + continue + } + seenImportPath, err := strconv.Unquote(importSpec.Path.Value) + if err != nil { + return err + } + seenImports[seenImportPath] = struct{}{} + } + + var mu sync.Mutex // guard c.items locally, since searchImports is called in parallel + seen := make(map[string]struct{}) + searchImports := func(pkg imports.ImportFix) { + path := pkg.StmtInfo.ImportPath + if _, ok := seenImports[path]; ok { + return + } + + // Any package path containing fewer directories than the search + // prefix is not a match. + pkgDirList := strings.Split(path, "/") + if len(pkgDirList) < depth+1 { + return + } + pkgToConsider := strings.Join(pkgDirList[:depth+1], "/") + + name := pkgDirList[depth] + // if we're adding an opening quote to completion too, set name to full + // package path since we'll need to overwrite that range. + if namePrefix == `"` { + name = pkgToConsider + } + + score := pkg.Relevance + if len(pkgDirList)-1 == depth { + score *= highScore + } else { + // For incomplete package paths, add a terminal slash to indicate that the + // user should keep triggering completions. + name += "/" + pkgToConsider += "/" + } + + if _, ok := seen[pkgToConsider]; ok { + return + } + seen[pkgToConsider] = struct{}{} + + mu.Lock() + defer mu.Unlock() + + name = namePrefix + name + nameSuffix + obj := types.NewPkgName(0, nil, name, types.NewPackage(pkgToConsider, name)) + c.deepState.enqueue(candidate{ + obj: obj, + detail: strconv.Quote(pkgToConsider), + score: score, + }) + } + + c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { + return imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.Types().Name(), opts.Env) + }) + return nil +} + +// populateCommentCompletions yields completions for comments preceding or in declarations. +func (c *completer) populateCommentCompletions(comment *ast.CommentGroup) { + // If the completion was triggered by a period, ignore it. These types of + // completions will not be useful in comments. + if c.completionContext.triggerCharacter == "." { + return + } + + // Using the comment position find the line after + file := c.pkg.FileSet().File(comment.End()) + if file == nil { + return + } + + // Deep completion doesn't work properly in comments since we don't + // have a type object to complete further. + c.deepState.enabled = false + c.completionContext.commentCompletion = true + + // Documentation isn't useful in comments, since it might end up being the + // comment itself. + c.opts.documentation = false + + commentLine := safetoken.Line(file, comment.End()) + + // comment is valid, set surrounding as word boundaries around cursor + c.setSurroundingForComment(comment) + + // Using the next line pos, grab and parse the exported symbol on that line + for _, n := range c.file.Decls { + declLine := safetoken.Line(file, n.Pos()) + // if the comment is not in, directly above or on the same line as a declaration + if declLine != commentLine && declLine != commentLine+1 && + !(n.Pos() <= comment.Pos() && comment.End() <= n.End()) { + continue + } + switch node := n.(type) { + // handle const, vars, and types + case *ast.GenDecl: + for _, spec := range node.Specs { + switch spec := spec.(type) { + case *ast.ValueSpec: + for _, name := range spec.Names { + if name.String() == "_" { + continue + } + obj := c.pkg.TypesInfo().ObjectOf(name) + c.deepState.enqueue(candidate{obj: obj, score: stdScore}) + } + case *ast.TypeSpec: + // add TypeSpec fields to completion + switch typeNode := spec.Type.(type) { + case *ast.StructType: + c.addFieldItems(typeNode.Fields) + case *ast.FuncType: + c.addFieldItems(typeNode.Params) + c.addFieldItems(typeNode.Results) + case *ast.InterfaceType: + c.addFieldItems(typeNode.Methods) + } + + if spec.Name.String() == "_" { + continue + } + + obj := c.pkg.TypesInfo().ObjectOf(spec.Name) + // Type name should get a higher score than fields but not highScore by default + // since field near a comment cursor gets a highScore + score := stdScore * 1.1 + // If type declaration is on the line after comment, give it a highScore. + if declLine == commentLine+1 { + score = highScore + } + + c.deepState.enqueue(candidate{obj: obj, score: score}) + } + } + // handle functions + case *ast.FuncDecl: + c.addFieldItems(node.Recv) + c.addFieldItems(node.Type.Params) + c.addFieldItems(node.Type.Results) + + // collect receiver struct fields + if node.Recv != nil { + sig := c.pkg.TypesInfo().Defs[node.Name].(*types.Func).Type().(*types.Signature) + _, named := typesinternal.ReceiverNamed(sig.Recv()) // may be nil if ill-typed + if named != nil { + if recvStruct, ok := named.Underlying().(*types.Struct); ok { + for i := 0; i < recvStruct.NumFields(); i++ { + field := recvStruct.Field(i) + c.deepState.enqueue(candidate{obj: field, score: lowScore}) + } + } + } + } + + if node.Name.String() == "_" { + continue + } + + obj := c.pkg.TypesInfo().ObjectOf(node.Name) + if obj == nil || obj.Pkg() != nil && obj.Pkg() != c.pkg.Types() { + continue + } + + c.deepState.enqueue(candidate{obj: obj, score: highScore}) + } + } +} + +// sets word boundaries surrounding a cursor for a comment +func (c *completer) setSurroundingForComment(comments *ast.CommentGroup) { + var cursorComment *ast.Comment + for _, comment := range comments.List { + if c.pos >= comment.Pos() && c.pos <= comment.End() { + cursorComment = comment + break + } + } + // if cursor isn't in the comment + if cursorComment == nil { + return + } + + // index of cursor in comment text + cursorOffset := int(c.pos - cursorComment.Pos()) + start, end := cursorOffset, cursorOffset + for start > 0 && isValidIdentifierChar(cursorComment.Text[start-1]) { + start-- + } + for end < len(cursorComment.Text) && isValidIdentifierChar(cursorComment.Text[end]) { + end++ + } + + c.surrounding = &Selection{ + content: cursorComment.Text[start:end], + cursor: c.pos, + tokFile: c.tokFile, + start: token.Pos(int(cursorComment.Slash) + start), + end: token.Pos(int(cursorComment.Slash) + end), + mapper: c.mapper, + } + c.setMatcherFromPrefix(c.surrounding.Prefix()) +} + +// isValidIdentifierChar returns true if a byte is a valid go identifier +// character, i.e. unicode letter or digit or underscore. +func isValidIdentifierChar(char byte) bool { + charRune := rune(char) + return unicode.In(charRune, unicode.Letter, unicode.Digit) || char == '_' +} + +// adds struct fields, interface methods, function declaration fields to completion +func (c *completer) addFieldItems(fields *ast.FieldList) { + if fields == nil { + return + } + + cursor := c.surrounding.cursor + for _, field := range fields.List { + for _, name := range field.Names { + if name.String() == "_" { + continue + } + obj := c.pkg.TypesInfo().ObjectOf(name) + if obj == nil { + continue + } + + // if we're in a field comment/doc, score that field as more relevant + score := stdScore + if field.Comment != nil && field.Comment.Pos() <= cursor && cursor <= field.Comment.End() { + score = highScore + } else if field.Doc != nil && field.Doc.Pos() <= cursor && cursor <= field.Doc.End() { + score = highScore + } + + c.deepState.enqueue(candidate{obj: obj, score: score}) + } + } +} + +func (c *completer) wantStructFieldCompletions() bool { + clInfo := c.enclosingCompositeLiteral + if clInfo == nil { + return false + } + return is[*types.Struct](clInfo.clType) && (clInfo.inKey || clInfo.maybeInFieldName) +} + +func (c *completer) wantTypeName() bool { + return !c.completionContext.commentCompletion && c.inference.typeName.wantTypeName +} + +// See https://golang.org/issue/36001. Unimported completions are expensive. +const ( + maxUnimportedPackageNames = 5 + unimportedMemberTarget = 100 +) + +// selector finds completions for the specified selector expression. +func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error { + c.inference.objChain = objChain(c.pkg.TypesInfo(), sel.X) + + // True selector? + if tv, ok := c.pkg.TypesInfo().Types[sel.X]; ok { + c.methodsAndFields(tv.Type, tv.Addressable(), nil, c.deepState.enqueue) + c.addPostfixSnippetCandidates(ctx, sel) + return nil + } + + id, ok := sel.X.(*ast.Ident) + if !ok { + return nil + } + + // Treat sel as a qualified identifier. + var filter func(*metadata.Package) bool + needImport := false + if pkgName, ok := c.pkg.TypesInfo().Uses[id].(*types.PkgName); ok { + // Qualified identifier with import declaration. + imp := pkgName.Imported() + + // Known direct dependency? Expand using type information. + if _, ok := c.pkg.Metadata().DepsByPkgPath[golang.PackagePath(imp.Path())]; ok { + c.packageMembers(imp, stdScore, nil, c.deepState.enqueue) + return nil + } + + // Imported declaration with missing type information. + // Fall through to shallow completion of unimported package members. + // Match candidate packages by path. + filter = func(mp *metadata.Package) bool { + return strings.TrimPrefix(string(mp.PkgPath), "vendor/") == imp.Path() + } + } else { + // Qualified identifier without import declaration. + // Match candidate packages by name. + filter = func(mp *metadata.Package) bool { + return string(mp.Name) == id.Name + } + needImport = true + } + + // Search unimported packages. + if !c.opts.unimported { + return nil // feature disabled + } + + // -- completion of symbols in unimported packages -- + + // The deep completion algorithm is exceedingly complex and + // deeply coupled to the now obsolete notions that all + // token.Pos values can be interpreted by as a single FileSet + // belonging to the Snapshot and that all types.Object values + // are canonicalized by a single types.Importer mapping. + // These invariants are no longer true now that gopls uses + // an incremental approach, parsing and type-checking each + // package separately. + // + // Consequently, completion of symbols defined in packages that + // are not currently imported by the query file cannot use the + // deep completion machinery which is based on type information. + // Instead it must use only syntax information from a quick + // parse of top-level declarations (but not function bodies). + // + // TODO(adonovan): rewrite the deep completion machinery to + // not assume global Pos/Object realms and then use export + // data instead of the quick parse approach taken here. + + // First, we search among packages in the forward transitive + // closure of the workspace. + // We'll use a fast parse to extract package members + // from those that match the name/path criterion. + all, err := c.snapshot.AllMetadata(ctx) + if err != nil { + return err + } + known := make(map[golang.PackagePath]*metadata.Package) + for _, mp := range all { + if mp.Name == "main" { + continue // not importable + } + if mp.IsIntermediateTestVariant() { + continue + } + // The only test variant we admit is "p [p.test]" + // when we are completing within "p_test [p.test]", + // as in that case we would like to offer completions + // of the test variants' additional symbols. + if mp.ForTest != "" && c.pkg.Metadata().PkgPath != mp.ForTest+"_test" { + continue + } + if !filter(mp) { + continue + } + // Prefer previous entry unless this one is its test variant. + if mp.ForTest != "" || known[mp.PkgPath] == nil { + known[mp.PkgPath] = mp + } + } + + paths := make([]string, 0, len(known)) + for path := range known { + paths = append(paths, string(path)) + } + + // Rank import paths as goimports would. + var relevances map[string]float64 + if len(paths) > 0 { + if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { + var err error + relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) + return err + }); err != nil { + return err + } + sort.Slice(paths, func(i, j int) bool { + return relevances[paths[i]] > relevances[paths[j]] + }) + } + + // quickParse does a quick parse of a single file of package m, + // extracts exported package members and adds candidates to c.items. + // TODO(rfindley): synchronizing access to c here does not feel right. + // Consider adding a concurrency-safe API for completer. + var cMu sync.Mutex // guards c.items and c.matcher + var enough int32 // atomic bool + quickParse := func(uri protocol.DocumentURI, mp *metadata.Package, tooNew map[string]bool) error { + if atomic.LoadInt32(&enough) != 0 { + return nil + } + + fh, err := c.snapshot.ReadFile(ctx, uri) + if err != nil { + return err + } + content, err := fh.Content() + if err != nil { + return err + } + path := string(mp.PkgPath) + forEachPackageMember(content, func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl) { + if atomic.LoadInt32(&enough) != 0 { + return + } + + if !id.IsExported() { + return + } + + if tooNew[id.Name] { + return // symbol too new for requesting file's Go's version + } + + cMu.Lock() + score := c.matcher.Score(id.Name) + cMu.Unlock() + + if sel.Sel.Name != "_" && score == 0 { + return // not a match; avoid constructing the completion item below + } + + // The only detail is the kind and package: `var (from "example.com/foo")` + // TODO(adonovan): pretty-print FuncDecl.FuncType or TypeSpec.Type? + // TODO(adonovan): should this score consider the actual c.matcher.Score + // of the item? How does this compare with the deepState.enqueue path? + item := CompletionItem{ + Label: id.Name, + Detail: fmt.Sprintf("%s (from %q)", strings.ToLower(tok.String()), mp.PkgPath), + InsertText: id.Name, + Score: float64(score) * unimportedScore(relevances[path]), + } + switch tok { + case token.FUNC: + item.Kind = protocol.FunctionCompletion + case token.VAR: + item.Kind = protocol.VariableCompletion + case token.CONST: + item.Kind = protocol.ConstantCompletion + case token.TYPE: + // Without types, we can't distinguish Class from Interface. + item.Kind = protocol.ClassCompletion + } + + if needImport { + imp := &importInfo{importPath: path} + if imports.ImportPathToAssumedName(path) != string(mp.Name) { + imp.name = string(mp.Name) + } + item.AdditionalTextEdits, _ = c.importEdits(imp) + } + + // For functions, add a parameter snippet. + if fn != nil { + paramList := func(list *ast.FieldList) []string { + var params []string + if list != nil { + var cfg printer.Config // slight overkill + param := func(name string, typ ast.Expr) { + var buf strings.Builder + buf.WriteString(name) + buf.WriteByte(' ') + cfg.Fprint(&buf, token.NewFileSet(), typ) + params = append(params, buf.String()) + } + + for _, field := range list.List { + if field.Names != nil { + for _, name := range field.Names { + param(name.Name, field.Type) + } + } else { + param("_", field.Type) + } + } + } + return params + } + + // Ideally we would eliminate the suffix of type + // parameters that are redundant with inference + // from the argument types (#51783), but it's + // quite fiddly to do using syntax alone. + // (See inferableTypeParams in format.go.) + tparams := paramList(fn.Type.TypeParams) + params := paramList(fn.Type.Params) + var sn snippet.Builder + c.functionCallSnippet(id.Name, tparams, params, &sn) + item.snippet = &sn + } + + cMu.Lock() + c.items = append(c.items, item) + if len(c.items) >= unimportedMemberTarget { + atomic.StoreInt32(&enough, 1) + } + cMu.Unlock() + }) + return nil + } + + var goversion string + // TODO(adonovan): after go1.21, replace with: + // goversion = c.pkg.GetTypesInfo().FileVersions[c.file] + if v := reflect.ValueOf(c.pkg.TypesInfo()).Elem().FieldByName("FileVersions"); v.IsValid() { + goversion = v.Interface().(map[*ast.File]string)[c.file] // may be "" + } + + // Extract the package-level candidates using a quick parse. + var g errgroup.Group + for _, path := range paths { + mp := known[golang.PackagePath(path)] + + // For standard packages, build a filter of symbols that + // are too new for the requesting file's Go version. + var tooNew map[string]bool + if syms, ok := stdlib.PackageSymbols[path]; ok && goversion != "" { + tooNew = make(map[string]bool) + for _, sym := range syms { + if versions.Before(goversion, sym.Version.String()) { + tooNew[sym.Name] = true + } + } + } + + for _, uri := range mp.CompiledGoFiles { + uri := uri + g.Go(func() error { + return quickParse(uri, mp, tooNew) + }) + } + } + if err := g.Wait(); err != nil { + return err + } + + // In addition, we search in the module cache using goimports. + ctx, cancel := context.WithCancel(ctx) + var mu sync.Mutex + add := func(pkgExport imports.PackageExport) { + if ignoreUnimportedCompletion(pkgExport.Fix) { + return + } + + mu.Lock() + defer mu.Unlock() + // TODO(adonovan): what if the actual package has a vendor/ prefix? + if _, ok := known[golang.PackagePath(pkgExport.Fix.StmtInfo.ImportPath)]; ok { + return // We got this one above. + } + + // Continue with untyped proposals. + pkg := types.NewPackage(pkgExport.Fix.StmtInfo.ImportPath, pkgExport.Fix.IdentName) + for _, symbol := range pkgExport.Exports { + if goversion != "" && versions.Before(goversion, symbol.Version.String()) { + continue // symbol too new for this file + } + score := unimportedScore(pkgExport.Fix.Relevance) + c.deepState.enqueue(candidate{ + obj: types.NewVar(0, pkg, symbol.Name, nil), + score: score, + imp: &importInfo{ + importPath: pkgExport.Fix.StmtInfo.ImportPath, + name: pkgExport.Fix.StmtInfo.Name, + }, + }) + } + if len(c.items) >= unimportedMemberTarget { + cancel() + } + } + + c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { + defer cancel() + return imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.Types().Name(), opts.Env) + }) + return nil +} + +// unimportedScore returns a score for an unimported package that is generally +// lower than other candidates. +func unimportedScore(relevance float64) float64 { + return (stdScore + .1*relevance) / 2 +} + +func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo, cb func(candidate)) { + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + if c.tooNew(obj) { + continue // std symbol too new for file's Go version + } + cb(candidate{ + obj: obj, + score: score, + imp: imp, + addressable: isVar(obj), + }) + } +} + +// ignoreUnimportedCompletion reports whether an unimported completion +// resulting in the given import should be ignored. +func ignoreUnimportedCompletion(fix *imports.ImportFix) bool { + // golang/go#60062: don't add unimported completion to golang.org/toolchain. + return fix != nil && strings.HasPrefix(fix.StmtInfo.ImportPath, "golang.org/toolchain") +} + +func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) { + mset := c.methodSetCache[methodSetKey{typ, addressable}] + if mset == nil { + if addressable && !types.IsInterface(typ) && !isPointer(typ) { + // Add methods of *T, which includes methods with receiver T. + mset = types.NewMethodSet(types.NewPointer(typ)) + } else { + // Add methods of T. + mset = types.NewMethodSet(typ) + } + c.methodSetCache[methodSetKey{typ, addressable}] = mset + } + + if isStarTestingDotF(typ) && addressable { + // is that a sufficient test? (or is more care needed?) + if c.fuzz(mset, imp, cb) { + return + } + } + + for i := 0; i < mset.Len(); i++ { + obj := mset.At(i).Obj() + // to the other side of the cb() queue? + if c.tooNew(obj) { + continue // std method too new for file's Go version + } + cb(candidate{ + obj: mset.At(i).Obj(), + score: stdScore, + imp: imp, + addressable: addressable || isPointer(typ), + }) + } + + // Add fields of T. + eachField(typ, func(v *types.Var) { + if c.tooNew(v) { + return // std field too new for file's Go version + } + cb(candidate{ + obj: v, + score: stdScore - 0.01, + imp: imp, + addressable: addressable || isPointer(typ), + }) + }) +} + +// isStarTestingDotF reports whether typ is *testing.F. +func isStarTestingDotF(typ types.Type) bool { + // No Unalias, since go test doesn't consider + // types when enumeratinf test funcs, only syntax. + ptr, _ := typ.(*types.Pointer) + if ptr == nil { + return false + } + named, _ := ptr.Elem().(*types.Named) + if named == nil { + return false + } + obj := named.Obj() + // obj.Pkg is nil for the error type. + return obj != nil && obj.Pkg() != nil && obj.Pkg().Path() == "testing" && obj.Name() == "F" +} + +// lexical finds completions in the lexical environment. +func (c *completer) lexical(ctx context.Context) error { + var ( + builtinIota = types.Universe.Lookup("iota") + builtinNil = types.Universe.Lookup("nil") + + // TODO(rfindley): only allow "comparable" where it is valid (in constraint + // position or embedded in interface declarations). + // builtinComparable = types.Universe.Lookup("comparable") + ) + + // Track seen variables to avoid showing completions for shadowed variables. + // This works since we look at scopes from innermost to outermost. + seen := make(map[string]struct{}) + + // Process scopes innermost first. + for i, scope := range c.scopes { + if scope == nil { + continue + } + + Names: + for _, name := range scope.Names() { + declScope, obj := scope.LookupParent(name, c.pos) + if declScope != scope { + continue // Name was declared in some enclosing scope, or not at all. + } + + // If obj's type is invalid, find the AST node that defines the lexical block + // containing the declaration of obj. Don't resolve types for packages. + if !isPkgName(obj) && !typeIsValid(obj.Type()) { + // Match the scope to its ast.Node. If the scope is the package scope, + // use the *ast.File as the starting node. + var node ast.Node + if i < len(c.path) { + node = c.path[i] + } else if i == len(c.path) { // use the *ast.File for package scope + node = c.path[i-1] + } + if node != nil { + if resolved := resolveInvalid(c.pkg.FileSet(), obj, node, c.pkg.TypesInfo()); resolved != nil { + obj = resolved + } + } + } + + // Don't use LHS of decl in RHS. + for _, ident := range enclosingDeclLHS(c.path) { + if obj.Pos() == ident.Pos() { + continue Names + } + } + + // Don't suggest "iota" outside of const decls. + if obj == builtinIota && !c.inConstDecl() { + continue + } + + // Rank outer scopes lower than inner. + score := stdScore * math.Pow(.99, float64(i)) + + // Dowrank "nil" a bit so it is ranked below more interesting candidates. + if obj == builtinNil { + score /= 2 + } + + // If we haven't already added a candidate for an object with this name. + if _, ok := seen[obj.Name()]; !ok { + seen[obj.Name()] = struct{}{} + c.deepState.enqueue(candidate{ + obj: obj, + score: score, + addressable: isVar(obj), + }) + } + } + } + + if c.inference.objType != nil { + if named, ok := aliases.Unalias(typesinternal.Unpointer(c.inference.objType)).(*types.Named); ok { + // If we expected a named type, check the type's package for + // completion items. This is useful when the current file hasn't + // imported the type's package yet. + + if named.Obj() != nil && named.Obj().Pkg() != nil { + pkg := named.Obj().Pkg() + + // Make sure the package name isn't already in use by another + // object, and that this file doesn't import the package yet. + // TODO(adonovan): what if pkg.Path has vendor/ prefix? + if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.Types() && !alreadyImports(c.file, golang.ImportPath(pkg.Path())) { + seen[pkg.Name()] = struct{}{} + obj := types.NewPkgName(0, nil, pkg.Name(), pkg) + imp := &importInfo{ + importPath: pkg.Path(), + } + if imports.ImportPathToAssumedName(pkg.Path()) != pkg.Name() { + imp.name = pkg.Name() + } + c.deepState.enqueue(candidate{ + obj: obj, + score: stdScore, + imp: imp, + }) + } + } + } + } + + if c.opts.unimported { + if err := c.unimportedPackages(ctx, seen); err != nil { + return err + } + } + + if c.inference.typeName.isTypeParam { + // If we are completing a type param, offer each structural type. + // This ensures we suggest "[]int" and "[]float64" for a constraint + // with type union "[]int | []float64". + if t, ok := c.inference.objType.(*types.Interface); ok { + if terms, err := typeparams.InterfaceTermSet(t); err == nil { + for _, term := range terms { + c.injectType(ctx, term.Type()) + } + } + } + } else { + c.injectType(ctx, c.inference.objType) + } + + // Add keyword completion items appropriate in the current context. + c.addKeywordCompletions() + + return nil +} + +// injectType manufactures candidates based on the given type. This is +// intended for types not discoverable via lexical search, such as +// composite and/or generic types. For example, if the type is "[]int", +// this method makes sure you get candidates "[]int{}" and "[]int" +// (the latter applies when completing a type name). +func (c *completer) injectType(ctx context.Context, t types.Type) { + if t == nil { + return + } + + t = typesinternal.Unpointer(t) + + // If we have an expected type and it is _not_ a named type, handle + // it specially. Non-named types like "[]int" will never be + // considered via a lexical search, so we need to directly inject + // them. Also allow generic types since lexical search does not + // infer instantiated versions of them. + if named, ok := aliases.Unalias(t).(*types.Named); !ok || named.TypeParams().Len() > 0 { + // If our expected type is "[]int", this will add a literal + // candidate of "[]int{}". + c.literal(ctx, t, nil) + + if _, isBasic := t.(*types.Basic); !isBasic { + // If we expect a non-basic type name (e.g. "[]int"), hack up + // a named type whose name is literally "[]int". This allows + // us to reuse our object based completion machinery. + fakeNamedType := candidate{ + obj: types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qf), t), + score: stdScore, + } + // Make sure the type name matches before considering + // candidate. This cuts down on useless candidates. + if c.matchingTypeName(&fakeNamedType) { + c.deepState.enqueue(fakeNamedType) + } + } + } +} + +func (c *completer) unimportedPackages(ctx context.Context, seen map[string]struct{}) error { + var prefix string + if c.surrounding != nil { + prefix = c.surrounding.Prefix() + } + + // Don't suggest unimported packages if we have absolutely nothing + // to go on. + if prefix == "" { + return nil + } + + count := 0 + + // Search the forward transitive closure of the workspace. + all, err := c.snapshot.AllMetadata(ctx) + if err != nil { + return err + } + pkgNameByPath := make(map[golang.PackagePath]string) + var paths []string // actually PackagePaths + for _, mp := range all { + if mp.ForTest != "" { + continue // skip all test variants + } + if mp.Name == "main" { + continue // main is non-importable + } + if !strings.HasPrefix(string(mp.Name), prefix) { + continue // not a match + } + paths = append(paths, string(mp.PkgPath)) + pkgNameByPath[mp.PkgPath] = string(mp.Name) + } + + // Rank candidates using goimports' algorithm. + var relevances map[string]float64 + if len(paths) != 0 { + if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { + var err error + relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) + return err + }); err != nil { + return err + } + } + sort.Slice(paths, func(i, j int) bool { + if relevances[paths[i]] != relevances[paths[j]] { + return relevances[paths[i]] > relevances[paths[j]] + } + + // Fall back to lexical sort to keep truncated set of candidates + // in a consistent order. + return paths[i] < paths[j] + }) + + for _, path := range paths { + name := pkgNameByPath[golang.PackagePath(path)] + if _, ok := seen[name]; ok { + continue + } + imp := &importInfo{ + importPath: path, + } + if imports.ImportPathToAssumedName(path) != name { + imp.name = name + } + if count >= maxUnimportedPackageNames { + return nil + } + c.deepState.enqueue(candidate{ + // Pass an empty *types.Package to disable deep completions. + obj: types.NewPkgName(0, nil, name, types.NewPackage(path, name)), + score: unimportedScore(relevances[path]), + imp: imp, + }) + count++ + } + + var mu sync.Mutex + add := func(pkg imports.ImportFix) { + if ignoreUnimportedCompletion(&pkg) { + return + } + mu.Lock() + defer mu.Unlock() + if _, ok := seen[pkg.IdentName]; ok { + return + } + if _, ok := relevances[pkg.StmtInfo.ImportPath]; ok { + return + } + + if count >= maxUnimportedPackageNames { + return + } + + // Do not add the unimported packages to seen, since we can have + // multiple packages of the same name as completion suggestions, since + // only one will be chosen. + obj := types.NewPkgName(0, nil, pkg.IdentName, types.NewPackage(pkg.StmtInfo.ImportPath, pkg.IdentName)) + c.deepState.enqueue(candidate{ + obj: obj, + score: unimportedScore(pkg.Relevance), + imp: &importInfo{ + importPath: pkg.StmtInfo.ImportPath, + name: pkg.StmtInfo.Name, + }, + }) + count++ + } + + c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { + return imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.Types().Name(), opts.Env) + }) + + return nil +} + +// alreadyImports reports whether f has an import with the specified path. +func alreadyImports(f *ast.File, path golang.ImportPath) bool { + for _, s := range f.Imports { + if metadata.UnquoteImportPath(s) == path { + return true + } + } + return false +} + +func (c *completer) inConstDecl() bool { + for _, n := range c.path { + if decl, ok := n.(*ast.GenDecl); ok && decl.Tok == token.CONST { + return true + } + } + return false +} + +// structLiteralFieldName finds completions for struct field names inside a struct literal. +func (c *completer) structLiteralFieldName(ctx context.Context) error { + clInfo := c.enclosingCompositeLiteral + + // Mark fields of the composite literal that have already been set, + // except for the current field. + addedFields := make(map[*types.Var]bool) + for _, el := range clInfo.cl.Elts { + if kvExpr, ok := el.(*ast.KeyValueExpr); ok { + if clInfo.kv == kvExpr { + continue + } + + if key, ok := kvExpr.Key.(*ast.Ident); ok { + if used, ok := c.pkg.TypesInfo().Uses[key]; ok { + if usedVar, ok := used.(*types.Var); ok { + addedFields[usedVar] = true + } + } + } + } + } + + // Add struct fields. + if t, ok := aliases.Unalias(clInfo.clType).(*types.Struct); ok { + const deltaScore = 0.0001 + for i := 0; i < t.NumFields(); i++ { + field := t.Field(i) + if !addedFields[field] { + c.deepState.enqueue(candidate{ + obj: field, + score: highScore - float64(i)*deltaScore, + }) + } + } + + // Fall through and add lexical completions if we aren't + // certain we are in the key part of a key-value pair. + if !clInfo.maybeInFieldName { + return nil + } + } + + return c.lexical(ctx) +} + +// enclosingCompositeLiteral returns information about the composite literal enclosing the +// position. +func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo { + for _, n := range path { + switch n := n.(type) { + case *ast.CompositeLit: + // The enclosing node will be a composite literal if the user has just + // opened the curly brace (e.g. &x{<>) or the completion request is triggered + // from an already completed composite literal expression (e.g. &x{foo: 1, <>}) + // + // The position is not part of the composite literal unless it falls within the + // curly braces (e.g. "foo.Foo<>Struct{}"). + if !(n.Lbrace < pos && pos <= n.Rbrace) { + // Keep searching since we may yet be inside a composite literal. + // For example "Foo{B: Ba<>{}}". + break + } + + tv, ok := info.Types[n] + if !ok { + return nil + } + + clInfo := compLitInfo{ + cl: n, + clType: typesinternal.Unpointer(tv.Type).Underlying(), + } + + var ( + expr ast.Expr + hasKeys bool + ) + for _, el := range n.Elts { + // Remember the expression that the position falls in, if any. + if el.Pos() <= pos && pos <= el.End() { + expr = el + } + + if kv, ok := el.(*ast.KeyValueExpr); ok { + hasKeys = true + // If expr == el then we know the position falls in this expression, + // so also record kv as the enclosing *ast.KeyValueExpr. + if expr == el { + clInfo.kv = kv + break + } + } + } + + if clInfo.kv != nil { + // If in a *ast.KeyValueExpr, we know we are in the key if the position + // is to the left of the colon (e.g. "Foo{F<>: V}". + clInfo.inKey = pos <= clInfo.kv.Colon + } else if hasKeys { + // If we aren't in a *ast.KeyValueExpr but the composite literal has + // other *ast.KeyValueExprs, we must be on the key side of a new + // *ast.KeyValueExpr (e.g. "Foo{F: V, <>}"). + clInfo.inKey = true + } else { + switch clInfo.clType.(type) { + case *types.Struct: + if len(n.Elts) == 0 { + // If the struct literal is empty, next could be a struct field + // name or an expression (e.g. "Foo{<>}" could become "Foo{F:}" + // or "Foo{someVar}"). + clInfo.maybeInFieldName = true + } else if len(n.Elts) == 1 { + // If there is one expression and the position is in that expression + // and the expression is an identifier, we may be writing a field + // name or an expression (e.g. "Foo{F<>}"). + _, clInfo.maybeInFieldName = expr.(*ast.Ident) + } + case *types.Map: + // If we aren't in a *ast.KeyValueExpr we must be adding a new key + // to the map. + clInfo.inKey = true + } + } + + return &clInfo + default: + if breaksExpectedTypeInference(n, pos) { + return nil + } + } + } + + return nil +} + +// enclosingFunction returns the signature and body of the function +// enclosing the given position. +func enclosingFunction(path []ast.Node, info *types.Info) *funcInfo { + for _, node := range path { + switch t := node.(type) { + case *ast.FuncDecl: + if obj, ok := info.Defs[t.Name]; ok { + return &funcInfo{ + sig: obj.Type().(*types.Signature), + body: t.Body, + } + } + case *ast.FuncLit: + if typ, ok := info.Types[t]; ok { + if sig, _ := typ.Type.(*types.Signature); sig == nil { + // golang/go#49397: it should not be possible, but we somehow arrived + // here with a non-signature type, most likely due to AST mangling + // such that node.Type is not a FuncType. + return nil + } + return &funcInfo{ + sig: typ.Type.(*types.Signature), + body: t.Body, + } + } + } + } + return nil +} + +func (c *completer) expectedCompositeLiteralType() types.Type { + clInfo := c.enclosingCompositeLiteral + switch t := clInfo.clType.(type) { + case *types.Slice: + if clInfo.inKey { + return types.Typ[types.UntypedInt] + } + return t.Elem() + case *types.Array: + if clInfo.inKey { + return types.Typ[types.UntypedInt] + } + return t.Elem() + case *types.Map: + if clInfo.inKey { + return t.Key() + } + return t.Elem() + case *types.Struct: + // If we are completing a key (i.e. field name), there is no expected type. + if clInfo.inKey { + return nil + } + + // If we are in a key-value pair, but not in the key, then we must be on the + // value side. The expected type of the value will be determined from the key. + if clInfo.kv != nil { + if key, ok := clInfo.kv.Key.(*ast.Ident); ok { + for i := 0; i < t.NumFields(); i++ { + if field := t.Field(i); field.Name() == key.Name { + return field.Type() + } + } + } + } else { + // If we aren't in a key-value pair and aren't in the key, we must be using + // implicit field names. + + // The order of the literal fields must match the order in the struct definition. + // Find the element that the position belongs to and suggest that field's type. + if i := exprAtPos(c.pos, clInfo.cl.Elts); i < t.NumFields() { + return t.Field(i).Type() + } + } + } + return nil +} + +// typeMod represents an operator that changes the expected type. +type typeMod struct { + mod typeModKind + arrayLen int64 +} + +type typeModKind int + +const ( + dereference typeModKind = iota // pointer indirection: "*" + reference // adds level of pointer: "&" for values, "*" for type names + chanRead // channel read operator: "<-" + sliceType // make a slice type: "[]" in "[]int" + arrayType // make an array type: "[2]" in "[2]int" + invoke // make a function call: "()" in "foo()" + takeSlice // take slice of array: "[:]" in "foo[:]" + takeDotDotDot // turn slice into variadic args: "..." in "foo..." + index // index into slice/array: "[0]" in "foo[0]" +) + +type objKind int + +const ( + kindAny objKind = 0 + kindArray objKind = 1 << iota + kindSlice + kindChan + kindMap + kindStruct + kindString + kindInt + kindBool + kindBytes + kindPtr + kindFloat + kindComplex + kindError + kindStringer + kindFunc +) + +// penalizedObj represents an object that should be disfavored as a +// completion candidate. +type penalizedObj struct { + // objChain is the full "chain", e.g. "foo.bar().baz" becomes + // []types.Object{foo, bar, baz}. + objChain []types.Object + // penalty is score penalty in the range (0, 1). + penalty float64 +} + +// candidateInference holds information we have inferred about a type that can be +// used at the current position. +type candidateInference struct { + // objType is the desired type of an object used at the query position. + objType types.Type + + // objKind is a mask of expected kinds of types such as "map", "slice", etc. + objKind objKind + + // variadic is true if we are completing the initial variadic + // parameter. For example: + // append([]T{}, <>) // objType=T variadic=true + // append([]T{}, T{}, <>) // objType=T variadic=false + variadic bool + + // modifiers are prefixes such as "*", "&" or "<-" that influence how + // a candidate type relates to the expected type. + modifiers []typeMod + + // convertibleTo is a type our candidate type must be convertible to. + convertibleTo types.Type + + // typeName holds information about the expected type name at + // position, if any. + typeName typeNameInference + + // assignees are the types that would receive a function call's + // results at the position. For example: + // + // foo := 123 + // foo, bar := <> + // + // at "<>", the assignees are [int, ]. + assignees []types.Type + + // variadicAssignees is true if we could be completing an inner + // function call that fills out an outer function call's variadic + // params. For example: + // + // func foo(int, ...string) {} + // + // foo(<>) // variadicAssignees=true + // foo(bar<>) // variadicAssignees=true + // foo(bar, baz<>) // variadicAssignees=false + variadicAssignees bool + + // penalized holds expressions that should be disfavored as + // candidates. For example, it tracks expressions already used in a + // switch statement's other cases. Each expression is tracked using + // its entire object "chain" allowing differentiation between + // "a.foo" and "b.foo" when "a" and "b" are the same type. + penalized []penalizedObj + + // objChain contains the chain of objects representing the + // surrounding *ast.SelectorExpr. For example, if we are completing + // "foo.bar.ba<>", objChain will contain []types.Object{foo, bar}. + objChain []types.Object +} + +// typeNameInference holds information about the expected type name at +// position. +type typeNameInference struct { + // wantTypeName is true if we expect the name of a type. + wantTypeName bool + + // modifiers are prefixes such as "*", "&" or "<-" that influence how + // a candidate type relates to the expected type. + modifiers []typeMod + + // assertableFrom is a type that must be assertable to our candidate type. + assertableFrom types.Type + + // wantComparable is true if we want a comparable type. + wantComparable bool + + // seenTypeSwitchCases tracks types that have already been used by + // the containing type switch. + seenTypeSwitchCases []types.Type + + // compLitType is true if we are completing a composite literal type + // name, e.g "foo<>{}". + compLitType bool + + // isTypeParam is true if we are completing a type instantiation parameter + isTypeParam bool +} + +// expectedCandidate returns information about the expected candidate +// for an expression at the query position. +func expectedCandidate(ctx context.Context, c *completer) (inf candidateInference) { + inf.typeName = expectTypeName(c) + + if c.enclosingCompositeLiteral != nil { + inf.objType = c.expectedCompositeLiteralType() + } + +Nodes: + for i, node := range c.path { + switch node := node.(type) { + case *ast.BinaryExpr: + // Determine if query position comes from left or right of op. + e := node.X + if c.pos < node.OpPos { + e = node.Y + } + if tv, ok := c.pkg.TypesInfo().Types[e]; ok { + switch node.Op { + case token.LAND, token.LOR: + // Don't infer "bool" type for "&&" or "||". Often you want + // to compose a boolean expression from non-boolean + // candidates. + default: + inf.objType = tv.Type + } + break Nodes + } + case *ast.AssignStmt: + // Only rank completions if you are on the right side of the token. + if c.pos > node.TokPos { + i := exprAtPos(c.pos, node.Rhs) + if i >= len(node.Lhs) { + i = len(node.Lhs) - 1 + } + if tv, ok := c.pkg.TypesInfo().Types[node.Lhs[i]]; ok { + inf.objType = tv.Type + } + + // If we have a single expression on the RHS, record the LHS + // assignees so we can favor multi-return function calls with + // matching result values. + if len(node.Rhs) <= 1 { + for _, lhs := range node.Lhs { + inf.assignees = append(inf.assignees, c.pkg.TypesInfo().TypeOf(lhs)) + } + } else { + // Otherwise, record our single assignee, even if its type is + // not available. We use this info to downrank functions + // with the wrong number of result values. + inf.assignees = append(inf.assignees, c.pkg.TypesInfo().TypeOf(node.Lhs[i])) + } + } + return inf + case *ast.ValueSpec: + if node.Type != nil && c.pos > node.Type.End() { + inf.objType = c.pkg.TypesInfo().TypeOf(node.Type) + } + return inf + case *ast.CallExpr: + // Only consider CallExpr args if position falls between parens. + if node.Lparen < c.pos && c.pos <= node.Rparen { + // For type conversions like "int64(foo)" we can only infer our + // desired type is convertible to int64. + if typ := typeConversion(node, c.pkg.TypesInfo()); typ != nil { + inf.convertibleTo = typ + break Nodes + } + + sig, _ := c.pkg.TypesInfo().Types[node.Fun].Type.(*types.Signature) + + if sig != nil && sig.TypeParams().Len() > 0 { + // If we are completing a generic func call, re-check the call expression. + // This allows type param inference to work in cases like: + // + // func foo[T any](T) {} + // foo[int](<>) // <- get "int" completions instead of "T" + // + // TODO: remove this after https://go.dev/issue/52503 + info := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} + types.CheckExpr(c.pkg.FileSet(), c.pkg.Types(), node.Fun.Pos(), node.Fun, info) + sig, _ = info.Types[node.Fun].Type.(*types.Signature) + } + + if sig != nil { + inf = c.expectedCallParamType(inf, node, sig) + } + + if funIdent, ok := node.Fun.(*ast.Ident); ok { + obj := c.pkg.TypesInfo().ObjectOf(funIdent) + + if obj != nil && obj.Parent() == types.Universe { + // Defer call to builtinArgType so we can provide it the + // inferred type from its parent node. + defer func() { + inf = c.builtinArgType(obj, node, inf) + inf.objKind = c.builtinArgKind(ctx, obj, node) + }() + + // The expected type of builtin arguments like append() is + // the expected type of the builtin call itself. For + // example: + // + // var foo []int = append(<>) + // + // To find the expected type at <> we "skip" the append() + // node and get the expected type one level up, which is + // []int. + continue Nodes + } + } + + return inf + } + case *ast.ReturnStmt: + if c.enclosingFunc != nil { + sig := c.enclosingFunc.sig + // Find signature result that corresponds to our return statement. + if resultIdx := exprAtPos(c.pos, node.Results); resultIdx < len(node.Results) { + if resultIdx < sig.Results().Len() { + inf.objType = sig.Results().At(resultIdx).Type() + } + } + } + return inf + case *ast.CaseClause: + if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, node).(*ast.SwitchStmt); ok { + if tv, ok := c.pkg.TypesInfo().Types[swtch.Tag]; ok { + inf.objType = tv.Type + + // Record which objects have already been used in the case + // statements so we don't suggest them again. + for _, cc := range swtch.Body.List { + for _, caseExpr := range cc.(*ast.CaseClause).List { + // Don't record the expression we are currently completing. + if caseExpr.Pos() < c.pos && c.pos <= caseExpr.End() { + continue + } + + if objs := objChain(c.pkg.TypesInfo(), caseExpr); len(objs) > 0 { + inf.penalized = append(inf.penalized, penalizedObj{objChain: objs, penalty: 0.1}) + } + } + } + } + } + return inf + case *ast.SliceExpr: + // Make sure position falls within the brackets (e.g. "foo[a:<>]"). + if node.Lbrack < c.pos && c.pos <= node.Rbrack { + inf.objType = types.Typ[types.UntypedInt] + } + return inf + case *ast.IndexExpr: + // Make sure position falls within the brackets (e.g. "foo[<>]"). + if node.Lbrack < c.pos && c.pos <= node.Rbrack { + if tv, ok := c.pkg.TypesInfo().Types[node.X]; ok { + switch t := tv.Type.Underlying().(type) { + case *types.Map: + inf.objType = t.Key() + case *types.Slice, *types.Array: + inf.objType = types.Typ[types.UntypedInt] + } + + if ct := expectedConstraint(tv.Type, 0); ct != nil { + inf.objType = ct + inf.typeName.wantTypeName = true + inf.typeName.isTypeParam = true + } + } + } + return inf + case *ast.IndexListExpr: + if node.Lbrack < c.pos && c.pos <= node.Rbrack { + if tv, ok := c.pkg.TypesInfo().Types[node.X]; ok { + if ct := expectedConstraint(tv.Type, exprAtPos(c.pos, node.Indices)); ct != nil { + inf.objType = ct + inf.typeName.wantTypeName = true + inf.typeName.isTypeParam = true + } + } + } + return inf + case *ast.SendStmt: + // Make sure we are on right side of arrow (e.g. "foo <- <>"). + if c.pos > node.Arrow+1 { + if tv, ok := c.pkg.TypesInfo().Types[node.Chan]; ok { + if ch, ok := tv.Type.Underlying().(*types.Chan); ok { + inf.objType = ch.Elem() + } + } + } + return inf + case *ast.RangeStmt: + if goplsastutil.NodeContains(node.X, c.pos) { + inf.objKind |= kindSlice | kindArray | kindMap | kindString + if node.Value == nil { + inf.objKind |= kindChan + } + } + return inf + case *ast.StarExpr: + inf.modifiers = append(inf.modifiers, typeMod{mod: dereference}) + case *ast.UnaryExpr: + switch node.Op { + case token.AND: + inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) + case token.ARROW: + inf.modifiers = append(inf.modifiers, typeMod{mod: chanRead}) + } + case *ast.DeferStmt, *ast.GoStmt: + inf.objKind |= kindFunc + return inf + default: + if breaksExpectedTypeInference(node, c.pos) { + return inf + } + } + } + + return inf +} + +func (c *completer) expectedCallParamType(inf candidateInference, node *ast.CallExpr, sig *types.Signature) candidateInference { + numParams := sig.Params().Len() + if numParams == 0 { + return inf + } + + exprIdx := exprAtPos(c.pos, node.Args) + + // If we have one or zero arg expressions, we may be + // completing to a function call that returns multiple + // values, in turn getting passed in to the surrounding + // call. Record the assignees so we can favor function + // calls that return matching values. + if len(node.Args) <= 1 && exprIdx == 0 { + for i := 0; i < sig.Params().Len(); i++ { + inf.assignees = append(inf.assignees, sig.Params().At(i).Type()) + } + + // Record that we may be completing into variadic parameters. + inf.variadicAssignees = sig.Variadic() + } + + // Make sure not to run past the end of expected parameters. + if exprIdx >= numParams { + inf.objType = sig.Params().At(numParams - 1).Type() + } else { + inf.objType = sig.Params().At(exprIdx).Type() + } + + if sig.Variadic() && exprIdx >= (numParams-1) { + // If we are completing a variadic param, deslice the variadic type. + inf.objType = deslice(inf.objType) + // Record whether we are completing the initial variadic param. + inf.variadic = exprIdx == numParams-1 && len(node.Args) <= numParams + + // Check if we can infer object kind from printf verb. + inf.objKind |= printfArgKind(c.pkg.TypesInfo(), node, exprIdx) + } + + // If our expected type is an uninstantiated generic type param, + // swap to the constraint which will do a decent job filtering + // candidates. + if tp, _ := inf.objType.(*types.TypeParam); tp != nil { + inf.objType = tp.Constraint() + } + + return inf +} + +func expectedConstraint(t types.Type, idx int) types.Type { + var tp *types.TypeParamList + if named, _ := t.(*types.Named); named != nil { + tp = named.TypeParams() + } else if sig, _ := t.Underlying().(*types.Signature); sig != nil { + tp = sig.TypeParams() + } + if tp == nil || idx >= tp.Len() { + return nil + } + return tp.At(idx).Constraint() +} + +// objChain decomposes e into a chain of objects if possible. For +// example, "foo.bar().baz" will yield []types.Object{foo, bar, baz}. +// If any part can't be turned into an object, return nil. +func objChain(info *types.Info, e ast.Expr) []types.Object { + var objs []types.Object + + for e != nil { + switch n := e.(type) { + case *ast.Ident: + obj := info.ObjectOf(n) + if obj == nil { + return nil + } + objs = append(objs, obj) + e = nil + case *ast.SelectorExpr: + obj := info.ObjectOf(n.Sel) + if obj == nil { + return nil + } + objs = append(objs, obj) + e = n.X + case *ast.CallExpr: + if len(n.Args) > 0 { + return nil + } + e = n.Fun + default: + return nil + } + } + + // Reverse order so the layout matches the syntactic order. + for i := 0; i < len(objs)/2; i++ { + objs[i], objs[len(objs)-1-i] = objs[len(objs)-1-i], objs[i] + } + + return objs +} + +// applyTypeModifiers applies the list of type modifiers to a type. +// It returns nil if the modifiers could not be applied. +func (ci candidateInference) applyTypeModifiers(typ types.Type, addressable bool) types.Type { + for _, mod := range ci.modifiers { + switch mod.mod { + case dereference: + // For every "*" indirection operator, remove a pointer layer + // from candidate type. + if ptr, ok := typ.Underlying().(*types.Pointer); ok { + typ = ptr.Elem() + } else { + return nil + } + case reference: + // For every "&" address operator, add another pointer layer to + // candidate type, if the candidate is addressable. + if addressable { + typ = types.NewPointer(typ) + } else { + return nil + } + case chanRead: + // For every "<-" operator, remove a layer of channelness. + if ch, ok := typ.(*types.Chan); ok { + typ = ch.Elem() + } else { + return nil + } + } + } + + return typ +} + +// applyTypeNameModifiers applies the list of type modifiers to a type name. +func (ci candidateInference) applyTypeNameModifiers(typ types.Type) types.Type { + for _, mod := range ci.typeName.modifiers { + switch mod.mod { + case reference: + typ = types.NewPointer(typ) + case arrayType: + typ = types.NewArray(typ, mod.arrayLen) + case sliceType: + typ = types.NewSlice(typ) + } + } + return typ +} + +// matchesVariadic returns true if we are completing a variadic +// parameter and candType is a compatible slice type. +func (ci candidateInference) matchesVariadic(candType types.Type) bool { + return ci.variadic && ci.objType != nil && assignableTo(candType, types.NewSlice(ci.objType)) +} + +// findSwitchStmt returns an *ast.CaseClause's corresponding *ast.SwitchStmt or +// *ast.TypeSwitchStmt. path should start from the case clause's first ancestor. +func findSwitchStmt(path []ast.Node, pos token.Pos, c *ast.CaseClause) ast.Stmt { + // Make sure position falls within a "case <>:" clause. + if exprAtPos(pos, c.List) >= len(c.List) { + return nil + } + // A case clause is always nested within a block statement in a switch statement. + if len(path) < 2 { + return nil + } + if _, ok := path[0].(*ast.BlockStmt); !ok { + return nil + } + switch s := path[1].(type) { + case *ast.SwitchStmt: + return s + case *ast.TypeSwitchStmt: + return s + default: + return nil + } +} + +// breaksExpectedTypeInference reports if an expression node's type is unrelated +// to its child expression node types. For example, "Foo{Bar: x.Baz(<>)}" should +// expect a function argument, not a composite literal value. +func breaksExpectedTypeInference(n ast.Node, pos token.Pos) bool { + switch n := n.(type) { + case *ast.CompositeLit: + // Doesn't break inference if pos is in type name. + // For example: "Foo<>{Bar: 123}" + return n.Type == nil || !goplsastutil.NodeContains(n.Type, pos) + case *ast.CallExpr: + // Doesn't break inference if pos is in func name. + // For example: "Foo<>(123)" + return !goplsastutil.NodeContains(n.Fun, pos) + case *ast.FuncLit, *ast.IndexExpr, *ast.SliceExpr: + return true + default: + return false + } +} + +// expectTypeName returns information about the expected type name at position. +func expectTypeName(c *completer) typeNameInference { + var inf typeNameInference + +Nodes: + for i, p := range c.path { + switch n := p.(type) { + case *ast.FieldList: + // Expect a type name if pos is in a FieldList. This applies to + // FuncType params/results, FuncDecl receiver, StructType, and + // InterfaceType. We don't need to worry about the field name + // because completion bails out early if pos is in an *ast.Ident + // that defines an object. + inf.wantTypeName = true + break Nodes + case *ast.CaseClause: + // Expect type names in type switch case clauses. + if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, n).(*ast.TypeSwitchStmt); ok { + // The case clause types must be assertable from the type switch parameter. + ast.Inspect(swtch.Assign, func(n ast.Node) bool { + if ta, ok := n.(*ast.TypeAssertExpr); ok { + inf.assertableFrom = c.pkg.TypesInfo().TypeOf(ta.X) + return false + } + return true + }) + inf.wantTypeName = true + + // Track the types that have already been used in this + // switch's case statements so we don't recommend them. + for _, e := range swtch.Body.List { + for _, typeExpr := range e.(*ast.CaseClause).List { + // Skip if type expression contains pos. We don't want to + // count it as already used if the user is completing it. + if typeExpr.Pos() < c.pos && c.pos <= typeExpr.End() { + continue + } + + if t := c.pkg.TypesInfo().TypeOf(typeExpr); t != nil { + inf.seenTypeSwitchCases = append(inf.seenTypeSwitchCases, t) + } + } + } + + break Nodes + } + return typeNameInference{} + case *ast.TypeAssertExpr: + // Expect type names in type assert expressions. + if n.Lparen < c.pos && c.pos <= n.Rparen { + // The type in parens must be assertable from the expression type. + inf.assertableFrom = c.pkg.TypesInfo().TypeOf(n.X) + inf.wantTypeName = true + break Nodes + } + return typeNameInference{} + case *ast.StarExpr: + inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) + case *ast.CompositeLit: + // We want a type name if position is in the "Type" part of a + // composite literal (e.g. "Foo<>{}"). + if n.Type != nil && n.Type.Pos() <= c.pos && c.pos <= n.Type.End() { + inf.wantTypeName = true + inf.compLitType = true + + if i < len(c.path)-1 { + // Track preceding "&" operator. Technically it applies to + // the composite literal and not the type name, but if + // affects our type completion nonetheless. + if u, ok := c.path[i+1].(*ast.UnaryExpr); ok && u.Op == token.AND { + inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) + } + } + } + break Nodes + case *ast.ArrayType: + // If we are inside the "Elt" part of an array type, we want a type name. + if n.Elt.Pos() <= c.pos && c.pos <= n.Elt.End() { + inf.wantTypeName = true + if n.Len == nil { + // No "Len" expression means a slice type. + inf.modifiers = append(inf.modifiers, typeMod{mod: sliceType}) + } else { + // Try to get the array type using the constant value of "Len". + tv, ok := c.pkg.TypesInfo().Types[n.Len] + if ok && tv.Value != nil && tv.Value.Kind() == constant.Int { + if arrayLen, ok := constant.Int64Val(tv.Value); ok { + inf.modifiers = append(inf.modifiers, typeMod{mod: arrayType, arrayLen: arrayLen}) + } + } + } + + // ArrayTypes can be nested, so keep going if our parent is an + // ArrayType. + if i < len(c.path)-1 { + if _, ok := c.path[i+1].(*ast.ArrayType); ok { + continue Nodes + } + } + + break Nodes + } + case *ast.MapType: + inf.wantTypeName = true + if n.Key != nil { + inf.wantComparable = goplsastutil.NodeContains(n.Key, c.pos) + } else { + // If the key is empty, assume we are completing the key if + // pos is directly after the "map[". + inf.wantComparable = c.pos == n.Pos()+token.Pos(len("map[")) + } + break Nodes + case *ast.ValueSpec: + inf.wantTypeName = n.Type != nil && goplsastutil.NodeContains(n.Type, c.pos) + break Nodes + case *ast.TypeSpec: + inf.wantTypeName = goplsastutil.NodeContains(n.Type, c.pos) + default: + if breaksExpectedTypeInference(p, c.pos) { + return typeNameInference{} + } + } + } + + return inf +} + +func (c *completer) fakeObj(T types.Type) *types.Var { + return types.NewVar(token.NoPos, c.pkg.Types(), "", T) +} + +// derivableTypes iterates types you can derive from t. For example, +// from "foo" we might derive "&foo", and "foo()". +func derivableTypes(t types.Type, addressable bool, f func(t types.Type, addressable bool, mod typeModKind) bool) bool { + switch t := t.Underlying().(type) { + case *types.Signature: + // If t is a func type with a single result, offer the result type. + if t.Results().Len() == 1 && f(t.Results().At(0).Type(), false, invoke) { + return true + } + case *types.Array: + if f(t.Elem(), true, index) { + return true + } + // Try converting array to slice. + if f(types.NewSlice(t.Elem()), false, takeSlice) { + return true + } + case *types.Pointer: + if f(t.Elem(), false, dereference) { + return true + } + case *types.Slice: + if f(t.Elem(), true, index) { + return true + } + case *types.Map: + if f(t.Elem(), false, index) { + return true + } + case *types.Chan: + if f(t.Elem(), false, chanRead) { + return true + } + } + + // Check if c is addressable and a pointer to c matches our type inference. + if addressable && f(types.NewPointer(t), false, reference) { + return true + } + + return false +} + +// anyCandType reports whether f returns true for any candidate type +// derivable from c. It searches up to three levels of type +// modification. For example, given "foo" we could discover "***foo" +// or "*foo()". +func (c *candidate) anyCandType(f func(t types.Type, addressable bool) bool) bool { + if c.obj == nil || c.obj.Type() == nil { + return false + } + + const maxDepth = 3 + + var searchTypes func(t types.Type, addressable bool, mods []typeModKind) bool + searchTypes = func(t types.Type, addressable bool, mods []typeModKind) bool { + if f(t, addressable) { + if len(mods) > 0 { + newMods := make([]typeModKind, len(mods)+len(c.mods)) + copy(newMods, mods) + copy(newMods[len(mods):], c.mods) + c.mods = newMods + } + return true + } + + if len(mods) == maxDepth { + return false + } + + return derivableTypes(t, addressable, func(t types.Type, addressable bool, mod typeModKind) bool { + return searchTypes(t, addressable, append(mods, mod)) + }) + } + + return searchTypes(c.obj.Type(), c.addressable, make([]typeModKind, 0, maxDepth)) +} + +// matchingCandidate reports whether cand matches our type inferences. +// It mutates cand's score in certain cases. +func (c *completer) matchingCandidate(cand *candidate) bool { + if c.completionContext.commentCompletion { + return false + } + + // Bail out early if we are completing a field name in a composite literal. + if v, ok := cand.obj.(*types.Var); ok && v.IsField() && c.wantStructFieldCompletions() { + return true + } + + if isTypeName(cand.obj) { + return c.matchingTypeName(cand) + } else if c.wantTypeName() { + // If we want a type, a non-type object never matches. + return false + } + + if c.inference.candTypeMatches(cand) { + return true + } + + candType := cand.obj.Type() + if candType == nil { + return false + } + + if sig, ok := candType.Underlying().(*types.Signature); ok { + if c.inference.assigneesMatch(cand, sig) { + // Invoke the candidate if its results are multi-assignable. + cand.mods = append(cand.mods, invoke) + return true + } + } + + // Default to invoking *types.Func candidates. This is so function + // completions in an empty statement (or other cases with no expected type) + // are invoked by default. + if isFunc(cand.obj) { + cand.mods = append(cand.mods, invoke) + } + + return false +} + +// candTypeMatches reports whether cand makes a good completion +// candidate given the candidate inference. cand's score may be +// mutated to downrank the candidate in certain situations. +func (ci *candidateInference) candTypeMatches(cand *candidate) bool { + var ( + expTypes = make([]types.Type, 0, 2) + variadicType types.Type + ) + if ci.objType != nil { + expTypes = append(expTypes, ci.objType) + + if ci.variadic { + variadicType = types.NewSlice(ci.objType) + expTypes = append(expTypes, variadicType) + } + } + + return cand.anyCandType(func(candType types.Type, addressable bool) bool { + // Take into account any type modifiers on the expected type. + candType = ci.applyTypeModifiers(candType, addressable) + if candType == nil { + return false + } + + if ci.convertibleTo != nil && convertibleTo(candType, ci.convertibleTo) { + return true + } + + for _, expType := range expTypes { + if isEmptyInterface(expType) { + continue + } + + matches := ci.typeMatches(expType, candType) + if !matches { + // If candType doesn't otherwise match, consider if we can + // convert candType directly to expType. + if considerTypeConversion(candType, expType, cand.path) { + cand.convertTo = expType + // Give a major score penalty so we always prefer directly + // assignable candidates, all else equal. + cand.score *= 0.5 + return true + } + + continue + } + + if expType == variadicType { + cand.mods = append(cand.mods, takeDotDotDot) + } + + // Lower candidate score for untyped conversions. This avoids + // ranking untyped constants above candidates with an exact type + // match. Don't lower score of builtin constants, e.g. "true". + if isUntyped(candType) && !types.Identical(candType, expType) && cand.obj.Parent() != types.Universe { + // Bigger penalty for deep completions into other packages to + // avoid random constants from other packages popping up all + // the time. + if len(cand.path) > 0 && isPkgName(cand.path[0]) { + cand.score *= 0.5 + } else { + cand.score *= 0.75 + } + } + + return true + } + + // If we don't have a specific expected type, fall back to coarser + // object kind checks. + if ci.objType == nil || isEmptyInterface(ci.objType) { + // If we were able to apply type modifiers to our candidate type, + // count that as a match. For example: + // + // var foo chan int + // <-fo<> + // + // We were able to apply the "<-" type modifier to "foo", so "foo" + // matches. + if len(ci.modifiers) > 0 { + return true + } + + // If we didn't have an exact type match, check if our object kind + // matches. + if ci.kindMatches(candType) { + if ci.objKind == kindFunc { + cand.mods = append(cand.mods, invoke) + } + return true + } + } + + return false + }) +} + +// considerTypeConversion returns true if we should offer a completion +// automatically converting "from" to "to". +func considerTypeConversion(from, to types.Type, path []types.Object) bool { + // Don't offer to convert deep completions from other packages. + // Otherwise there are many random package level consts/vars that + // pop up as candidates all the time. + if len(path) > 0 && isPkgName(path[0]) { + return false + } + + if _, ok := from.(*types.TypeParam); ok { + return false + } + + if !convertibleTo(from, to) { + return false + } + + // Don't offer to convert ints to strings since that probably + // doesn't do what the user wants. + if isBasicKind(from, types.IsInteger) && isBasicKind(to, types.IsString) { + return false + } + + return true +} + +// typeMatches reports whether an object of candType makes a good +// completion candidate given the expected type expType. +func (ci *candidateInference) typeMatches(expType, candType types.Type) bool { + // Handle untyped values specially since AssignableTo gives false negatives + // for them (see https://golang.org/issue/32146). + if candBasic, ok := candType.Underlying().(*types.Basic); ok { + if expBasic, ok := expType.Underlying().(*types.Basic); ok { + // Note that the candidate and/or the expected can be untyped. + // In "fo<> == 100" the expected type is untyped, and the + // candidate could also be an untyped constant. + + // Sort by is_untyped and then by is_int to simplify below logic. + a, b := candBasic.Info(), expBasic.Info() + if a&types.IsUntyped == 0 || (b&types.IsInteger > 0 && b&types.IsUntyped > 0) { + a, b = b, a + } + + // If at least one is untyped... + if a&types.IsUntyped > 0 { + switch { + // Untyped integers are compatible with floats. + case a&types.IsInteger > 0 && b&types.IsFloat > 0: + return true + + // Check if their constant kind (bool|int|float|complex|string) matches. + // This doesn't take into account the constant value, so there will be some + // false positives due to integer sign and overflow. + case a&types.IsConstType == b&types.IsConstType: + return true + } + } + } + } + + // AssignableTo covers the case where the types are equal, but also handles + // cases like assigning a concrete type to an interface type. + return assignableTo(candType, expType) +} + +// kindMatches reports whether candType's kind matches our expected +// kind (e.g. slice, map, etc.). +func (ci *candidateInference) kindMatches(candType types.Type) bool { + return ci.objKind > 0 && ci.objKind&candKind(candType) > 0 +} + +// assigneesMatch reports whether an invocation of sig matches the +// number and type of any assignees. +func (ci *candidateInference) assigneesMatch(cand *candidate, sig *types.Signature) bool { + if len(ci.assignees) == 0 { + return false + } + + // Uniresult functions are always usable and are handled by the + // normal, non-assignees type matching logic. + if sig.Results().Len() == 1 { + return false + } + + // Don't prefer completing into func(...interface{}) calls since all + // functions would match. + if ci.variadicAssignees && len(ci.assignees) == 1 && isEmptyInterface(deslice(ci.assignees[0])) { + return false + } + + var numberOfResultsCouldMatch bool + if ci.variadicAssignees { + numberOfResultsCouldMatch = sig.Results().Len() >= len(ci.assignees)-1 + } else { + numberOfResultsCouldMatch = sig.Results().Len() == len(ci.assignees) + } + + // If our signature doesn't return the right number of values, it's + // not a match, so downrank it. For example: + // + // var foo func() (int, int) + // a, b, c := <> // downrank "foo()" since it only returns two values + if !numberOfResultsCouldMatch { + cand.score /= 2 + return false + } + + // If at least one assignee has a valid type, and all valid + // assignees match the corresponding sig result value, the signature + // is a match. + allMatch := false + for i := 0; i < sig.Results().Len(); i++ { + var assignee types.Type + + // If we are completing into variadic parameters, deslice the + // expected variadic type. + if ci.variadicAssignees && i >= len(ci.assignees)-1 { + assignee = ci.assignees[len(ci.assignees)-1] + if elem := deslice(assignee); elem != nil { + assignee = elem + } + } else { + assignee = ci.assignees[i] + } + + if assignee == nil || assignee == types.Typ[types.Invalid] { + continue + } + + allMatch = ci.typeMatches(assignee, sig.Results().At(i).Type()) + if !allMatch { + break + } + } + return allMatch +} + +func (c *completer) matchingTypeName(cand *candidate) bool { + if !c.wantTypeName() { + return false + } + + typeMatches := func(candType types.Type) bool { + // Take into account any type name modifier prefixes. + candType = c.inference.applyTypeNameModifiers(candType) + + if from := c.inference.typeName.assertableFrom; from != nil { + // Don't suggest the starting type in type assertions. For example, + // if "foo" is an io.Writer, don't suggest "foo.(io.Writer)". + if types.Identical(from, candType) { + return false + } + + if intf, ok := from.Underlying().(*types.Interface); ok { + if !types.AssertableTo(intf, candType) { + return false + } + } + } + + if c.inference.typeName.wantComparable && !types.Comparable(candType) { + return false + } + + // Skip this type if it has already been used in another type + // switch case. + for _, seen := range c.inference.typeName.seenTypeSwitchCases { + if types.Identical(candType, seen) { + return false + } + } + + // We can expect a type name and have an expected type in cases like: + // + // var foo []int + // foo = []i<> + // + // Where our expected type is "[]int", and we expect a type name. + if c.inference.objType != nil { + return assignableTo(candType, c.inference.objType) + } + + // Default to saying any type name is a match. + return true + } + + t := cand.obj.Type() + + if typeMatches(t) { + return true + } + + if !types.IsInterface(t) && typeMatches(types.NewPointer(t)) { + if c.inference.typeName.compLitType { + // If we are completing a composite literal type as in + // "foo<>{}", to make a pointer we must prepend "&". + cand.mods = append(cand.mods, reference) + } else { + // If we are completing a normal type name such as "foo<>", to + // make a pointer we must prepend "*". + cand.mods = append(cand.mods, dereference) + } + return true + } + + return false +} + +var ( + // "interface { Error() string }" (i.e. error) + errorIntf = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + + // "interface { String() string }" (i.e. fmt.Stringer) + stringerIntf = types.NewInterfaceType([]*types.Func{ + types.NewFunc(token.NoPos, nil, "String", types.NewSignature( + nil, + nil, + types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])), + false, + )), + }, nil).Complete() + + byteType = types.Universe.Lookup("byte").Type() +) + +// candKind returns the objKind of candType, if any. +func candKind(candType types.Type) objKind { + var kind objKind + + switch t := candType.Underlying().(type) { + case *types.Array: + kind |= kindArray + if t.Elem() == byteType { + kind |= kindBytes + } + case *types.Slice: + kind |= kindSlice + if t.Elem() == byteType { + kind |= kindBytes + } + case *types.Chan: + kind |= kindChan + case *types.Map: + kind |= kindMap + case *types.Pointer: + kind |= kindPtr + + // Some builtins handle array pointers as arrays, so just report a pointer + // to an array as an array. + if _, isArray := t.Elem().Underlying().(*types.Array); isArray { + kind |= kindArray + } + case *types.Basic: + switch info := t.Info(); { + case info&types.IsString > 0: + kind |= kindString + case info&types.IsInteger > 0: + kind |= kindInt + case info&types.IsFloat > 0: + kind |= kindFloat + case info&types.IsComplex > 0: + kind |= kindComplex + case info&types.IsBoolean > 0: + kind |= kindBool + } + case *types.Signature: + return kindFunc + } + + if types.Implements(candType, errorIntf) { + kind |= kindError + } + + if types.Implements(candType, stringerIntf) { + kind |= kindStringer + } + + return kind +} + +// innermostScope returns the innermost scope for c.pos. +func (c *completer) innermostScope() *types.Scope { + for _, s := range c.scopes { + if s != nil { + return s + } + } + return nil +} + +// isSlice reports whether the object's underlying type is a slice. +func isSlice(obj types.Object) bool { + if obj != nil && obj.Type() != nil { + if _, ok := obj.Type().Underlying().(*types.Slice); ok { + return true + } + } + return false +} + +// forEachPackageMember calls f(tok, id, fn) for each package-level +// TYPE/VAR/CONST/FUNC declaration in the Go source file, based on a +// quick partial parse. fn is non-nil only for function declarations. +// The AST position information is garbage. +func forEachPackageMember(content []byte, f func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl)) { + purged := goplsastutil.PurgeFuncBodies(content) + file, _ := parser.ParseFile(token.NewFileSet(), "", purged, 0) + for _, decl := range file.Decls { + switch decl := decl.(type) { + case *ast.GenDecl: + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.ValueSpec: // var/const + for _, id := range spec.Names { + f(decl.Tok, id, nil) + } + case *ast.TypeSpec: + f(decl.Tok, spec.Name, nil) + } + } + case *ast.FuncDecl: + if decl.Recv == nil { + f(token.FUNC, decl.Name, decl) + } + } + } +} + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} diff --git a/gopls/internal/lsp/source/completion/deep_completion.go b/gopls/internal/golang/completion/deep_completion.go similarity index 97% rename from gopls/internal/lsp/source/completion/deep_completion.go rename to gopls/internal/golang/completion/deep_completion.go index fac11bf4117..053ece8219e 100644 --- a/gopls/internal/lsp/source/completion/deep_completion.go +++ b/gopls/internal/golang/completion/deep_completion.go @@ -46,13 +46,6 @@ func (s *deepCompletionState) enqueue(cand candidate) { s.nextQueue = append(s.nextQueue, cand) } -// dequeue removes and returns the leftmost element from the search queue. -func (s *deepCompletionState) dequeue() *candidate { - var cand *candidate - cand, s.thisQueue = &s.thisQueue[len(s.thisQueue)-1], s.thisQueue[:len(s.thisQueue)-1] - return cand -} - // scorePenalty computes a deep candidate score penalty. A candidate is // penalized based on depth to favor shallower candidates. We also give a // slight bonus to unexported objects and a slight additional penalty to @@ -162,7 +155,7 @@ func (c *completer) deepSearch(ctx context.Context, minDepth int, deadline *time // not exported, don't treat it as a completion candidate unless it's // a package completion candidate. if !c.completionContext.packageCompletion && - obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() && !obj.Exported() { + obj.Pkg() != nil && obj.Pkg() != c.pkg.Types() && !obj.Exported() { continue } diff --git a/gopls/internal/lsp/source/completion/deep_completion_test.go b/gopls/internal/golang/completion/deep_completion_test.go similarity index 100% rename from gopls/internal/lsp/source/completion/deep_completion_test.go rename to gopls/internal/golang/completion/deep_completion_test.go diff --git a/gopls/internal/golang/completion/definition.go b/gopls/internal/golang/completion/definition.go new file mode 100644 index 00000000000..fc8b0ae5c69 --- /dev/null +++ b/gopls/internal/golang/completion/definition.go @@ -0,0 +1,160 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "go/ast" + "go/types" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/golang/completion/snippet" + "golang.org/x/tools/gopls/internal/protocol" +) + +// some function definitions in test files can be completed +// So far, TestFoo(t *testing.T), TestMain(m *testing.M) +// BenchmarkFoo(b *testing.B), FuzzFoo(f *testing.F) + +// path[0] is known to be *ast.Ident +func definition(path []ast.Node, obj types.Object, pgf *parsego.File) ([]CompletionItem, *Selection) { + if _, ok := obj.(*types.Func); !ok { + return nil, nil // not a function at all + } + if !strings.HasSuffix(pgf.URI.Path(), "_test.go") { + return nil, nil // not a test file + } + + name := path[0].(*ast.Ident).Name + if len(name) == 0 { + // can't happen + return nil, nil + } + start := path[0].Pos() + end := path[0].End() + sel := &Selection{ + content: "", + cursor: start, + tokFile: pgf.Tok, + start: start, + end: end, + mapper: pgf.Mapper, + } + var ans []CompletionItem + var hasParens bool + n, ok := path[1].(*ast.FuncDecl) + if !ok { + return nil, nil // can't happen + } + if n.Recv != nil { + return nil, nil // a method, not a function + } + t := n.Type.Params + if t.Closing != t.Opening { + hasParens = true + } + + // Always suggest TestMain, if possible + if strings.HasPrefix("TestMain", name) { + if hasParens { + ans = append(ans, defItem("TestMain", obj)) + } else { + ans = append(ans, defItem("TestMain(m *testing.M)", obj)) + } + } + + // If a snippet is possible, suggest it + if strings.HasPrefix("Test", name) { + if hasParens { + ans = append(ans, defItem("Test", obj)) + } else { + ans = append(ans, defSnippet("Test", "(t *testing.T)", obj)) + } + return ans, sel + } else if strings.HasPrefix("Benchmark", name) { + if hasParens { + ans = append(ans, defItem("Benchmark", obj)) + } else { + ans = append(ans, defSnippet("Benchmark", "(b *testing.B)", obj)) + } + return ans, sel + } else if strings.HasPrefix("Fuzz", name) { + if hasParens { + ans = append(ans, defItem("Fuzz", obj)) + } else { + ans = append(ans, defSnippet("Fuzz", "(f *testing.F)", obj)) + } + return ans, sel + } + + // Fill in the argument for what the user has already typed + if got := defMatches(name, "Test", path, "(t *testing.T)"); got != "" { + ans = append(ans, defItem(got, obj)) + } else if got := defMatches(name, "Benchmark", path, "(b *testing.B)"); got != "" { + ans = append(ans, defItem(got, obj)) + } else if got := defMatches(name, "Fuzz", path, "(f *testing.F)"); got != "" { + ans = append(ans, defItem(got, obj)) + } + return ans, sel +} + +// defMatches returns text for defItem, never for defSnippet +func defMatches(name, pat string, path []ast.Node, arg string) string { + if !strings.HasPrefix(name, pat) { + return "" + } + c, _ := utf8.DecodeRuneInString(name[len(pat):]) + if unicode.IsLower(c) { + return "" + } + fd, ok := path[1].(*ast.FuncDecl) + if !ok { + // we don't know what's going on + return "" + } + fp := fd.Type.Params + if len(fp.List) > 0 { + // signature already there, nothing to suggest + return "" + } + if fp.Opening != fp.Closing { + // nothing: completion works on words, not easy to insert arg + return "" + } + // suggesting signature too + return name + arg +} + +func defSnippet(prefix, suffix string, obj types.Object) CompletionItem { + var sn snippet.Builder + sn.WriteText(prefix) + sn.WritePlaceholder(func(b *snippet.Builder) { b.WriteText("Xxx") }) + sn.WriteText(suffix + " {\n\t") + sn.WriteFinalTabstop() + sn.WriteText("\n}") + return CompletionItem{ + Label: prefix + "Xxx" + suffix, + Detail: "tab, type the rest of the name, then tab", + Kind: protocol.FunctionCompletion, + Depth: 0, + Score: 10, + snippet: &sn, + Documentation: prefix + " test function", + isSlice: isSlice(obj), + } +} +func defItem(val string, obj types.Object) CompletionItem { + return CompletionItem{ + Label: val, + InsertText: val, + Kind: protocol.FunctionCompletion, + Depth: 0, + Score: 9, // prefer the snippets when available + Documentation: "complete the function name", + isSlice: isSlice(obj), + } +} diff --git a/gopls/internal/golang/completion/format.go b/gopls/internal/golang/completion/format.go new file mode 100644 index 00000000000..dbc57c18082 --- /dev/null +++ b/gopls/internal/golang/completion/format.go @@ -0,0 +1,444 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "context" + "errors" + "fmt" + "go/ast" + "go/doc" + "go/types" + "strings" + + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/golang/completion/snippet" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/imports" +) + +var ( + errNoMatch = errors.New("not a surrounding match") + errLowScore = errors.New("not a high scoring candidate") +) + +// item formats a candidate to a CompletionItem. +func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, error) { + obj := cand.obj + + // if the object isn't a valid match against the surrounding, return early. + matchScore := c.matcher.Score(cand.name) + if matchScore <= 0 { + return CompletionItem{}, errNoMatch + } + cand.score *= float64(matchScore) + + // Ignore deep candidates that won't be in the MaxDeepCompletions anyway. + if len(cand.path) != 0 && !c.deepState.isHighScore(cand.score) { + return CompletionItem{}, errLowScore + } + + // Handle builtin types separately. + if obj.Parent() == types.Universe { + return c.formatBuiltin(ctx, cand) + } + + var ( + label = cand.name + detail = types.TypeString(obj.Type(), c.qf) + insert = label + kind = protocol.TextCompletion + snip snippet.Builder + protocolEdits []protocol.TextEdit + ) + if obj.Type() == nil { + detail = "" + } + if isTypeName(obj) && c.wantTypeParams() { + x := cand.obj.(*types.TypeName) + if named, ok := aliases.Unalias(x.Type()).(*types.Named); ok { + tp := named.TypeParams() + label += golang.FormatTypeParams(tp) + insert = label // maintain invariant above (label == insert) + } + } + + snip.WriteText(insert) + + switch obj := obj.(type) { + case *types.TypeName: + detail, kind = golang.FormatType(obj.Type(), c.qf) + case *types.Const: + kind = protocol.ConstantCompletion + case *types.Var: + if _, ok := obj.Type().(*types.Struct); ok { + detail = "struct{...}" // for anonymous unaliased struct types + } else if obj.IsField() { + var err error + detail, err = golang.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qf, c.mq) + if err != nil { + return CompletionItem{}, err + } + } + if obj.IsField() { + kind = protocol.FieldCompletion + c.structFieldSnippet(cand, detail, &snip) + } else { + kind = protocol.VariableCompletion + } + if obj.Type() == nil { + break + } + case *types.Func: + if obj.Type().(*types.Signature).Recv() == nil { + kind = protocol.FunctionCompletion + } else { + kind = protocol.MethodCompletion + } + case *types.PkgName: + kind = protocol.ModuleCompletion + detail = fmt.Sprintf("%q", obj.Imported().Path()) + case *types.Label: + kind = protocol.ConstantCompletion + detail = "label" + } + + var prefix string + for _, mod := range cand.mods { + switch mod { + case reference: + prefix = "&" + prefix + case dereference: + prefix = "*" + prefix + case chanRead: + prefix = "<-" + prefix + } + } + + var ( + suffix string + funcType = obj.Type() + ) +Suffixes: + for _, mod := range cand.mods { + switch mod { + case invoke: + if sig, ok := funcType.Underlying().(*types.Signature); ok { + s, err := golang.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qf, c.mq) + if err != nil { + return CompletionItem{}, err + } + + tparams := s.TypeParams() + if len(tparams) > 0 { + // Eliminate the suffix of type parameters that are + // likely redundant because they can probably be + // inferred from the argument types (#51783). + // + // We don't bother doing the reverse inference from + // result types as result-only type parameters are + // quite unusual. + free := inferableTypeParams(sig) + for i := sig.TypeParams().Len() - 1; i >= 0; i-- { + tparam := sig.TypeParams().At(i) + if !free[tparam] { + break + } + tparams = tparams[:i] // eliminate + } + } + + c.functionCallSnippet("", tparams, s.Params(), &snip) + if sig.Results().Len() == 1 { + funcType = sig.Results().At(0).Type() + } + detail = "func" + s.Format() + } + + if !c.opts.snippets { + // Without snippets the candidate will not include "()". Don't + // add further suffixes since they will be invalid. For + // example, with snippets "foo()..." would become "foo..." + // without snippets if we added the dotDotDot. + break Suffixes + } + case takeSlice: + suffix += "[:]" + case takeDotDotDot: + suffix += "..." + case index: + snip.WriteText("[") + snip.WritePlaceholder(nil) + snip.WriteText("]") + } + } + + // If this candidate needs an additional import statement, + // add the additional text edits needed. + if cand.imp != nil { + addlEdits, err := c.importEdits(cand.imp) + + if err != nil { + return CompletionItem{}, err + } + + protocolEdits = append(protocolEdits, addlEdits...) + if kind != protocol.ModuleCompletion { + if detail != "" { + detail += " " + } + detail += fmt.Sprintf("(from %q)", cand.imp.importPath) + } + } + + if cand.convertTo != nil { + typeName := types.TypeString(cand.convertTo, c.qf) + + switch t := cand.convertTo.(type) { + // We need extra parens when casting to these types. For example, + // we need "(*int)(foo)", not "*int(foo)". + case *types.Pointer, *types.Signature: + typeName = "(" + typeName + ")" + case *types.Basic: + // If the types are incompatible (as determined by typeMatches), then we + // must need a conversion here. However, if the target type is untyped, + // don't suggest converting to e.g. "untyped float" (golang/go#62141). + if t.Info()&types.IsUntyped != 0 { + typeName = types.TypeString(types.Default(cand.convertTo), c.qf) + } + } + + prefix = typeName + "(" + prefix + suffix = ")" + } + + if prefix != "" { + // If we are in a selector, add an edit to place prefix before selector. + if sel := enclosingSelector(c.path, c.pos); sel != nil { + edits, err := c.editText(sel.Pos(), sel.Pos(), prefix) + if err != nil { + return CompletionItem{}, err + } + protocolEdits = append(protocolEdits, edits...) + } else { + // If there is no selector, just stick the prefix at the start. + insert = prefix + insert + snip.PrependText(prefix) + } + } + + if suffix != "" { + insert += suffix + snip.WriteText(suffix) + } + + detail = strings.TrimPrefix(detail, "untyped ") + // override computed detail with provided detail, if something is provided. + if cand.detail != "" { + detail = cand.detail + } + item := CompletionItem{ + Label: label, + InsertText: insert, + AdditionalTextEdits: protocolEdits, + Detail: detail, + Kind: kind, + Score: cand.score, + Depth: len(cand.path), + snippet: &snip, + isSlice: isSlice(obj), + } + // If the user doesn't want documentation for completion items. + if !c.opts.documentation { + return item, nil + } + pos := safetoken.StartPosition(c.pkg.FileSet(), obj.Pos()) + + // We ignore errors here, because some types, like "unsafe" or "error", + // may not have valid positions that we can use to get documentation. + if !pos.IsValid() { + return item, nil + } + + comment, err := golang.HoverDocForObject(ctx, c.snapshot, c.pkg.FileSet(), obj) + if err != nil { + event.Error(ctx, fmt.Sprintf("failed to find Hover for %q", obj.Name()), err) + return item, nil + } + if c.opts.fullDocumentation { + item.Documentation = comment.Text() + } else { + item.Documentation = doc.Synopsis(comment.Text()) + } + // The desired pattern is `^// Deprecated`, but the prefix has been removed + // TODO(rfindley): It doesn't look like this does the right thing for + // multi-line comments. + if strings.HasPrefix(comment.Text(), "Deprecated") { + if c.snapshot.Options().CompletionTags { + item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated} + } else if c.snapshot.Options().CompletionDeprecated { + item.Deprecated = true + } + } + + return item, nil +} + +// importEdits produces the text edits necessary to add the given import to the current file. +func (c *completer) importEdits(imp *importInfo) ([]protocol.TextEdit, error) { + if imp == nil { + return nil, nil + } + + pgf, err := c.pkg.File(protocol.URIFromPath(c.filename)) + if err != nil { + return nil, err + } + + return golang.ComputeOneImportFixEdits(c.snapshot, pgf, &imports.ImportFix{ + StmtInfo: imports.ImportInfo{ + ImportPath: imp.importPath, + Name: imp.name, + }, + // IdentName is unused on this path and is difficult to get. + FixType: imports.AddImport, + }) +} + +func (c *completer) formatBuiltin(ctx context.Context, cand candidate) (CompletionItem, error) { + obj := cand.obj + item := CompletionItem{ + Label: obj.Name(), + InsertText: obj.Name(), + Score: cand.score, + } + switch obj.(type) { + case *types.Const: + item.Kind = protocol.ConstantCompletion + case *types.Builtin: + item.Kind = protocol.FunctionCompletion + sig, err := golang.NewBuiltinSignature(ctx, c.snapshot, obj.Name()) + if err != nil { + return CompletionItem{}, err + } + item.Detail = "func" + sig.Format() + item.snippet = &snippet.Builder{} + // The signature inferred for a built-in is instantiated, so TypeParams=∅. + c.functionCallSnippet(obj.Name(), sig.TypeParams(), sig.Params(), item.snippet) + case *types.TypeName: + if types.IsInterface(obj.Type()) { + item.Kind = protocol.InterfaceCompletion + } else { + item.Kind = protocol.ClassCompletion + } + case *types.Nil: + item.Kind = protocol.VariableCompletion + } + return item, nil +} + +// decide if the type params (if any) should be part of the completion +// which only possible for types.Named and types.Signature +// (so far, only in receivers, e.g.; func (s *GENERIC[K, V])..., which is a types.Named) +func (c *completer) wantTypeParams() bool { + // Need to be lexically in a receiver, and a child of an IndexListExpr + // (but IndexListExpr only exists with go1.18) + start := c.path[0].Pos() + for i, nd := range c.path { + if fd, ok := nd.(*ast.FuncDecl); ok { + if i > 0 && fd.Recv != nil && start < fd.Recv.End() { + return true + } else { + return false + } + } + } + return false +} + +// inferableTypeParams returns the set of type parameters +// of sig that are constrained by (inferred from) the argument types. +func inferableTypeParams(sig *types.Signature) map[*types.TypeParam]bool { + free := make(map[*types.TypeParam]bool) + + // visit adds to free all the free type parameters of t. + var visit func(t types.Type) + visit = func(t types.Type) { + switch t := t.(type) { + case *types.Array: + visit(t.Elem()) + case *types.Chan: + visit(t.Elem()) + case *types.Map: + visit(t.Key()) + visit(t.Elem()) + case *types.Pointer: + visit(t.Elem()) + case *types.Slice: + visit(t.Elem()) + case *types.Interface: + for i := 0; i < t.NumExplicitMethods(); i++ { + visit(t.ExplicitMethod(i).Type()) + } + for i := 0; i < t.NumEmbeddeds(); i++ { + visit(t.EmbeddedType(i)) + } + case *types.Union: + for i := 0; i < t.Len(); i++ { + visit(t.Term(i).Type()) + } + case *types.Signature: + if tp := t.TypeParams(); tp != nil { + // Generic signatures only appear as the type of generic + // function declarations, so this isn't really reachable. + for i := 0; i < tp.Len(); i++ { + visit(tp.At(i).Constraint()) + } + } + visit(t.Params()) + visit(t.Results()) + case *types.Tuple: + for i := 0; i < t.Len(); i++ { + visit(t.At(i).Type()) + } + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + visit(t.Field(i).Type()) + } + case *types.TypeParam: + free[t] = true + case *aliases.Alias: + visit(aliases.Unalias(t)) + case *types.Named: + targs := t.TypeArgs() + for i := 0; i < targs.Len(); i++ { + visit(targs.At(i)) + } + case *types.Basic: + // nop + default: + panic(t) + } + } + + visit(sig.Params()) + + // Perform induction through constraints. +restart: + for i := 0; i < sig.TypeParams().Len(); i++ { + tp := sig.TypeParams().At(i) + if free[tp] { + n := len(free) + visit(tp.Constraint()) + if len(free) > n { + goto restart // iterate until fixed point + } + } + } + return free +} diff --git a/gopls/internal/lsp/source/completion/fuzz.go b/gopls/internal/golang/completion/fuzz.go similarity index 92% rename from gopls/internal/lsp/source/completion/fuzz.go rename to gopls/internal/golang/completion/fuzz.go index 08e7654c7ed..313e7f7b391 100644 --- a/gopls/internal/lsp/source/completion/fuzz.go +++ b/gopls/internal/golang/completion/fuzz.go @@ -7,11 +7,10 @@ package completion import ( "fmt" "go/ast" - "go/token" "go/types" "strings" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) // golang/go#51089 @@ -21,7 +20,7 @@ import ( // PJW: are there other packages where we can deduce usage constraints? // if we find fuzz completions, then return true, as those are the only completions to offer -func (c *completer) fuzz(typ types.Type, mset *types.MethodSet, imp *importInfo, cb func(candidate), fset *token.FileSet) bool { +func (c *completer) fuzz(mset *types.MethodSet, imp *importInfo, cb func(candidate)) bool { // 1. inside f.Fuzz? (only f.Failed and f.Name) // 2. possible completing f.Fuzz? // [Ident,SelectorExpr,Callexpr,ExprStmt,BlockiStmt,FuncDecl(Fuzz...)] @@ -82,7 +81,7 @@ Loop: return true } // Sel.X should be of type *testing.F - got := c.pkg.GetTypesInfo().Types[s.X] + got := c.pkg.TypesInfo().Types[s.X] if got.Type.String() == "*testing.F" { add = call } @@ -107,7 +106,7 @@ Loop: lbl := "Fuzz(func(t *testing.T" for i, a := range add.Args { - info := c.pkg.GetTypesInfo().TypeOf(a) + info := c.pkg.TypesInfo().TypeOf(a) if info == nil { return false // How could this happen, but better safe than panic. } diff --git a/gopls/internal/lsp/source/completion/keywords.go b/gopls/internal/golang/completion/keywords.go similarity index 94% rename from gopls/internal/lsp/source/completion/keywords.go rename to gopls/internal/golang/completion/keywords.go index a068ca2d57c..3f2f5ac78cd 100644 --- a/gopls/internal/lsp/source/completion/keywords.go +++ b/gopls/internal/golang/completion/keywords.go @@ -7,8 +7,8 @@ package completion import ( "go/ast" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/astutil" ) const ( @@ -63,7 +63,7 @@ func (c *completer) addKeywordCompletions() { // If we are at the file scope, only offer decl keywords. We don't // get *ast.Idents at the file scope because non-keyword identifiers // turn into *ast.BadDecl, not *ast.Ident. - if len(c.path) == 1 || isASTFile(c.path[1]) { + if len(c.path) == 1 || is[*ast.File](c.path[1]) { c.addKeywordItems(seen, stdScore, TYPE, CONST, VAR, FUNC, IMPORT) return } else if _, ok := c.path[0].(*ast.Ident); !ok { @@ -74,7 +74,7 @@ func (c *completer) addKeywordCompletions() { if len(c.path) > 2 { // Offer "range" if we are in ast.ForStmt.Init. This is what the // AST looks like before "range" is typed, e.g. "for i := r<>". - if loop, ok := c.path[2].(*ast.ForStmt); ok && source.NodeContains(loop.Init, c.pos) { + if loop, ok := c.path[2].(*ast.ForStmt); ok && loop.Init != nil && astutil.NodeContains(loop.Init, c.pos) { c.addKeywordItems(seen, stdScore, RANGE) } } diff --git a/gopls/internal/lsp/source/completion/labels.go b/gopls/internal/golang/completion/labels.go similarity index 98% rename from gopls/internal/lsp/source/completion/labels.go rename to gopls/internal/golang/completion/labels.go index e4fd961e319..f0e5f42a67a 100644 --- a/gopls/internal/lsp/source/completion/labels.go +++ b/gopls/internal/golang/completion/labels.go @@ -55,7 +55,7 @@ func (c *completer) labels(lt labelType) { } addLabel := func(score float64, l *ast.LabeledStmt) { - labelObj := c.pkg.GetTypesInfo().ObjectOf(l.Label) + labelObj := c.pkg.TypesInfo().ObjectOf(l.Label) if labelObj != nil { c.deepState.enqueue(candidate{obj: labelObj, score: score}) } diff --git a/gopls/internal/lsp/source/completion/literal.go b/gopls/internal/golang/completion/literal.go similarity index 89% rename from gopls/internal/lsp/source/completion/literal.go rename to gopls/internal/golang/completion/literal.go index 06ed559beb1..62398f064c2 100644 --- a/gopls/internal/lsp/source/completion/literal.go +++ b/gopls/internal/golang/completion/literal.go @@ -11,17 +11,18 @@ import ( "strings" "unicode" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/snippet" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/golang/completion/snippet" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) // literal generates composite literal, function literal, and make() // completion items. func (c *completer) literal(ctx context.Context, literalType types.Type, imp *importInfo) { - if !c.opts.literal { + if !c.opts.snippets { return } @@ -50,10 +51,15 @@ func (c *completer) literal(ctx context.Context, literalType types.Type, imp *im // // don't offer "mySlice{}" since we have already added a candidate // of "[]int{}". - if _, named := literalType.(*types.Named); named && expType != nil { - if _, named := source.Deref(expType).(*types.Named); !named { - return - } + + // TODO(adonovan): think about aliases: + // they should probably be treated more like Named. + // Should this use Deref not Unpointer? + if is[*types.Named](aliases.Unalias(literalType)) && + expType != nil && + !is[*types.Named](aliases.Unalias(typesinternal.Unpointer(expType))) { + + return } // Check if an object of type literalType would match our expected type. @@ -194,7 +200,7 @@ func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, m name = p.Name() ) - if tp, _ := p.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) { + if tp, _ := aliases.Unalias(p.Type()).(*types.TypeParam); tp != nil && !c.typeParamInScope(tp) { hasTypeParams = true } @@ -202,9 +208,9 @@ func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, m // If the param has no name in the signature, guess a name based // on the type. Use an empty qualifier to ignore the package. // For example, we want to name "http.Request" "r", not "hr". - typeName, err := source.FormatVarType(ctx, c.snapshot, c.pkg, p, + typeName, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, p, func(p *types.Package) string { return "" }, - func(source.PackageName, source.ImportPath, source.PackagePath) string { return "" }) + func(golang.PackageName, golang.ImportPath, golang.PackagePath) string { return "" }) if err != nil { // In general, the only error we should encounter while formatting is // context cancellation. @@ -272,7 +278,7 @@ func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, m // of "i int, j int". if i == sig.Params().Len()-1 || !types.Identical(p.Type(), sig.Params().At(i+1).Type()) { snip.WriteText(" ") - typeStr, err := source.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qf, c.mq) + typeStr, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qf, c.mq) if err != nil { // In general, the only error we should encounter while formatting is // context cancellation. @@ -285,7 +291,7 @@ func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, m typeStr = strings.Replace(typeStr, "[]", "...", 1) } - if tp, _ := p.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) { + if tp, ok := aliases.Unalias(p.Type()).(*types.TypeParam); ok && !c.typeParamInScope(tp) { snip.WritePlaceholder(func(snip *snippet.Builder) { snip.WriteText(typeStr) }) @@ -306,7 +312,7 @@ func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, m var resultHasTypeParams bool for i := 0; i < results.Len(); i++ { - if tp, _ := results.At(i).Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) { + if tp, ok := aliases.Unalias(results.At(i).Type()).(*types.TypeParam); ok && !c.typeParamInScope(tp) { resultHasTypeParams = true } } @@ -330,7 +336,7 @@ func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, m snip.WriteText(name + " ") } - text, err := source.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qf, c.mq) + text, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qf, c.mq) if err != nil { // In general, the only error we should encounter while formatting is // context cancellation. @@ -339,7 +345,7 @@ func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, m } return } - if tp, _ := r.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) { + if tp, ok := aliases.Unalias(r.Type()).(*types.TypeParam); ok && !c.typeParamInScope(tp) { snip.WritePlaceholder(func(snip *snippet.Builder) { snip.WriteText(text) }) @@ -426,11 +432,12 @@ func abbreviateTypeName(s string) string { } // compositeLiteral adds a composite literal completion item for the given typeName. +// T is an (unnamed, unaliased) struct, array, slice, or map type. func (c *completer) compositeLiteral(T types.Type, snip *snippet.Builder, typeName string, matchScore float64, edits []protocol.TextEdit) { snip.WriteText("{") // Don't put the tab stop inside the composite literal curlies "{}" // for structs that have no accessible fields. - if strct, ok := T.(*types.Struct); !ok || fieldsAccessible(strct, c.pkg.GetTypes()) { + if strct, ok := T.(*types.Struct); !ok || fieldsAccessible(strct, c.pkg.Types()) { snip.WriteFinalTabstop() } snip.WriteText("}") @@ -510,10 +517,12 @@ func (c *completer) typeNameSnippet(literalType types.Type, qf types.Qualifier) var ( snip snippet.Builder typeName string - named, _ = literalType.(*types.Named) + // TODO(adonovan): think more about aliases. + // They should probably be treated more like Named. + named, _ = aliases.Unalias(literalType).(*types.Named) ) - if named != nil && named.Obj() != nil && typeparams.ForNamed(named).Len() > 0 && !c.fullyInstantiated(named) { + if named != nil && named.Obj() != nil && named.TypeParams().Len() > 0 && !c.fullyInstantiated(named) { // We are not "fully instantiated" meaning we have type params that must be specified. if pkg := qf(named.Obj().Pkg()); pkg != "" { typeName = pkg + "." @@ -524,12 +533,12 @@ func (c *completer) typeNameSnippet(literalType types.Type, qf types.Qualifier) snip.WriteText(typeName + "[") if c.opts.placeholders { - for i := 0; i < typeparams.ForNamed(named).Len(); i++ { + for i := 0; i < named.TypeParams().Len(); i++ { if i > 0 { snip.WriteText(", ") } snip.WritePlaceholder(func(snip *snippet.Builder) { - snip.WriteText(types.TypeString(typeparams.ForNamed(named).At(i), qf)) + snip.WriteText(types.TypeString(named.TypeParams().At(i), qf)) }) } } else { @@ -549,16 +558,17 @@ func (c *completer) typeNameSnippet(literalType types.Type, qf types.Qualifier) // fullyInstantiated reports whether all of t's type params have // specified type args. func (c *completer) fullyInstantiated(t *types.Named) bool { - tps := typeparams.ForNamed(t) - tas := typeparams.NamedTypeArgs(t) + tps := t.TypeParams() + tas := t.TypeArgs() if tps.Len() != tas.Len() { return false } for i := 0; i < tas.Len(); i++ { - switch ta := tas.At(i).(type) { - case *typeparams.TypeParam: + // TODO(adonovan) think about generic aliases. + switch ta := aliases.Unalias(tas.At(i)).(type) { + case *types.TypeParam: // A *TypeParam only counts as specified if it is currently in // scope (i.e. we are in a generic definition). if !c.typeParamInScope(ta) { @@ -576,7 +586,7 @@ func (c *completer) fullyInstantiated(t *types.Named) bool { // typeParamInScope returns whether tp's object is in scope at c.pos. // This tells you whether you are in a generic definition and can // assume tp has been specified. -func (c *completer) typeParamInScope(tp *typeparams.TypeParam) bool { +func (c *completer) typeParamInScope(tp *types.TypeParam) bool { obj := tp.Obj() if obj == nil { return false diff --git a/gopls/internal/golang/completion/package.go b/gopls/internal/golang/completion/package.go new file mode 100644 index 00000000000..12d4ff0be36 --- /dev/null +++ b/gopls/internal/golang/completion/package.go @@ -0,0 +1,353 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "bytes" + "context" + "errors" + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "go/types" + "path/filepath" + "strings" + "unicode" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/fuzzy" +) + +// packageClauseCompletions offers completions for a package declaration when +// one is not present in the given file. +func packageClauseCompletions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]CompletionItem, *Selection, error) { + // We know that the AST for this file will be empty due to the missing + // package declaration, but parse it anyway to get a mapper. + // TODO(adonovan): opt: there's no need to parse just to get a mapper. + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, nil, err + } + + offset, err := pgf.Mapper.PositionOffset(position) + if err != nil { + return nil, nil, err + } + surrounding, err := packageCompletionSurrounding(pgf, offset) + if err != nil { + return nil, nil, fmt.Errorf("invalid position for package completion: %w", err) + } + + packageSuggestions, err := packageSuggestions(ctx, snapshot, fh.URI(), "") + if err != nil { + return nil, nil, err + } + + var items []CompletionItem + for _, pkg := range packageSuggestions { + insertText := fmt.Sprintf("package %s", pkg.name) + items = append(items, CompletionItem{ + Label: insertText, + Kind: protocol.ModuleCompletion, + InsertText: insertText, + Score: pkg.score, + }) + } + + return items, surrounding, nil +} + +// packageCompletionSurrounding returns surrounding for package completion if a +// package completions can be suggested at a given cursor offset. A valid location +// for package completion is above any declarations or import statements. +func packageCompletionSurrounding(pgf *parsego.File, offset int) (*Selection, error) { + m := pgf.Mapper + // If the file lacks a package declaration, the parser will return an empty + // AST. As a work-around, try to parse an expression from the file contents. + fset := token.NewFileSet() + expr, _ := parser.ParseExprFrom(fset, m.URI.Path(), pgf.Src, parser.Mode(0)) + if expr == nil { + return nil, fmt.Errorf("unparseable file (%s)", m.URI) + } + tok := fset.File(expr.Pos()) + cursor := tok.Pos(offset) + + // If we were able to parse out an identifier as the first expression from + // the file, it may be the beginning of a package declaration ("pack "). + // We can offer package completions if the cursor is in the identifier. + if name, ok := expr.(*ast.Ident); ok { + if cursor >= name.Pos() && cursor <= name.End() { + if !strings.HasPrefix(PACKAGE, name.Name) { + return nil, fmt.Errorf("cursor in non-matching ident") + } + return &Selection{ + content: name.Name, + cursor: cursor, + tokFile: tok, + start: name.Pos(), + end: name.End(), + mapper: m, + }, nil + } + } + + // The file is invalid, but it contains an expression that we were able to + // parse. We will use this expression to construct the cursor's + // "surrounding". + + // First, consider the possibility that we have a valid "package" keyword + // with an empty package name ("package "). "package" is parsed as an + // *ast.BadDecl since it is a keyword. This logic would allow "package" to + // appear on any line of the file as long as it's the first code expression + // in the file. + lines := strings.Split(string(pgf.Src), "\n") + cursorLine := safetoken.Line(tok, cursor) + if cursorLine <= 0 || cursorLine > len(lines) { + return nil, fmt.Errorf("invalid line number") + } + if safetoken.StartPosition(fset, expr.Pos()).Line == cursorLine { + words := strings.Fields(lines[cursorLine-1]) + if len(words) > 0 && words[0] == PACKAGE { + content := PACKAGE + // Account for spaces if there are any. + if len(words) > 1 { + content += " " + } + + start := expr.Pos() + end := token.Pos(int(expr.Pos()) + len(content) + 1) + // We have verified that we have a valid 'package' keyword as our + // first expression. Ensure that cursor is in this keyword or + // otherwise fallback to the general case. + if cursor >= start && cursor <= end { + return &Selection{ + content: content, + cursor: cursor, + tokFile: tok, + start: start, + end: end, + mapper: m, + }, nil + } + } + } + + // If the cursor is after the start of the expression, no package + // declaration will be valid. + if cursor > expr.Pos() { + return nil, fmt.Errorf("cursor after expression") + } + + // If the cursor is in a comment, don't offer any completions. + if cursorInComment(tok, cursor, m.Content) { + return nil, fmt.Errorf("cursor in comment") + } + + // The surrounding range in this case is the cursor. + return &Selection{ + content: "", + tokFile: tok, + start: cursor, + end: cursor, + cursor: cursor, + mapper: m, + }, nil +} + +func cursorInComment(file *token.File, cursor token.Pos, src []byte) bool { + var s scanner.Scanner + s.Init(file, src, func(_ token.Position, _ string) {}, scanner.ScanComments) + for { + pos, tok, lit := s.Scan() + if pos <= cursor && cursor <= token.Pos(int(pos)+len(lit)) { + return tok == token.COMMENT + } + if tok == token.EOF { + break + } + } + return false +} + +// packageNameCompletions returns name completions for a package clause using +// the current name as prefix. +func (c *completer) packageNameCompletions(ctx context.Context, fileURI protocol.DocumentURI, name *ast.Ident) error { + cursor := int(c.pos - name.NamePos) + if cursor < 0 || cursor > len(name.Name) { + return errors.New("cursor is not in package name identifier") + } + + c.completionContext.packageCompletion = true + + prefix := name.Name[:cursor] + packageSuggestions, err := packageSuggestions(ctx, c.snapshot, fileURI, prefix) + if err != nil { + return err + } + + for _, pkg := range packageSuggestions { + c.deepState.enqueue(pkg) + } + return nil +} + +// packageSuggestions returns a list of packages from workspace packages that +// have the given prefix and are used in the same directory as the given +// file. This also includes test packages for these packages (_test) and +// the directory name itself. +func packageSuggestions(ctx context.Context, snapshot *cache.Snapshot, fileURI protocol.DocumentURI, prefix string) (packages []candidate, err error) { + active, err := snapshot.WorkspaceMetadata(ctx) + if err != nil { + return nil, err + } + + toCandidate := func(name string, score float64) candidate { + obj := types.NewPkgName(0, nil, name, types.NewPackage("", name)) + return candidate{obj: obj, name: name, detail: name, score: score} + } + + matcher := fuzzy.NewMatcher(prefix) + + // Always try to suggest a main package + defer func() { + if score := float64(matcher.Score("main")); score > 0 { + packages = append(packages, toCandidate("main", score*lowScore)) + } + }() + + dirPath := filepath.Dir(fileURI.Path()) + dirName := filepath.Base(dirPath) + if !isValidDirName(dirName) { + return packages, nil + } + pkgName := convertDirNameToPkgName(dirName) + + seenPkgs := make(map[golang.PackageName]struct{}) + + // The `go` command by default only allows one package per directory but we + // support multiple package suggestions since gopls is build system agnostic. + for _, mp := range active { + if mp.Name == "main" || mp.Name == "" { + continue + } + if _, ok := seenPkgs[mp.Name]; ok { + continue + } + + // Only add packages that are previously used in the current directory. + var relevantPkg bool + for _, uri := range mp.CompiledGoFiles { + if filepath.Dir(uri.Path()) == dirPath { + relevantPkg = true + break + } + } + if !relevantPkg { + continue + } + + // Add a found package used in current directory as a high relevance + // suggestion and the test package for it as a medium relevance + // suggestion. + if score := float64(matcher.Score(string(mp.Name))); score > 0 { + packages = append(packages, toCandidate(string(mp.Name), score*highScore)) + } + seenPkgs[mp.Name] = struct{}{} + + testPkgName := mp.Name + "_test" + if _, ok := seenPkgs[testPkgName]; ok || strings.HasSuffix(string(mp.Name), "_test") { + continue + } + if score := float64(matcher.Score(string(testPkgName))); score > 0 { + packages = append(packages, toCandidate(string(testPkgName), score*stdScore)) + } + seenPkgs[testPkgName] = struct{}{} + } + + // Add current directory name as a low relevance suggestion. + if _, ok := seenPkgs[pkgName]; !ok { + if score := float64(matcher.Score(string(pkgName))); score > 0 { + packages = append(packages, toCandidate(string(pkgName), score*lowScore)) + } + + testPkgName := pkgName + "_test" + if score := float64(matcher.Score(string(testPkgName))); score > 0 { + packages = append(packages, toCandidate(string(testPkgName), score*lowScore)) + } + } + + return packages, nil +} + +// isValidDirName checks whether the passed directory name can be used in +// a package path. Requirements for a package path can be found here: +// https://golang.org/ref/mod#go-mod-file-ident. +func isValidDirName(dirName string) bool { + if dirName == "" { + return false + } + + for i, ch := range dirName { + if isLetter(ch) || isDigit(ch) { + continue + } + if i == 0 { + // Directory name can start only with '_'. '.' is not allowed in module paths. + // '-' and '~' are not allowed because elements of package paths must be + // safe command-line arguments. + if ch == '_' { + continue + } + } else { + // Modules path elements can't end with '.' + if isAllowedPunctuation(ch) && (i != len(dirName)-1 || ch != '.') { + continue + } + } + + return false + } + return true +} + +// convertDirNameToPkgName converts a valid directory name to a valid package name. +// It leaves only letters and digits. All letters are mapped to lower case. +func convertDirNameToPkgName(dirName string) golang.PackageName { + var buf bytes.Buffer + for _, ch := range dirName { + switch { + case isLetter(ch): + buf.WriteRune(unicode.ToLower(ch)) + + case buf.Len() != 0 && isDigit(ch): + buf.WriteRune(ch) + } + } + return golang.PackageName(buf.String()) +} + +// isLetter and isDigit allow only ASCII characters because +// "Each path element is a non-empty string made of up ASCII letters, +// ASCII digits, and limited ASCII punctuation" +// (see https://golang.org/ref/mod#go-mod-file-ident). + +func isLetter(ch rune) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' +} + +func isDigit(ch rune) bool { + return '0' <= ch && ch <= '9' +} + +func isAllowedPunctuation(ch rune) bool { + return ch == '_' || ch == '-' || ch == '~' || ch == '.' +} diff --git a/gopls/internal/lsp/source/completion/package_test.go b/gopls/internal/golang/completion/package_test.go similarity index 96% rename from gopls/internal/lsp/source/completion/package_test.go rename to gopls/internal/golang/completion/package_test.go index 614359fa5dc..dc4058fa651 100644 --- a/gopls/internal/lsp/source/completion/package_test.go +++ b/gopls/internal/golang/completion/package_test.go @@ -7,7 +7,7 @@ package completion import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/golang" ) func TestIsValidDirName(t *testing.T) { @@ -55,7 +55,7 @@ func TestIsValidDirName(t *testing.T) { func TestConvertDirNameToPkgName(t *testing.T) { tests := []struct { dirName string - pkgName source.PackageName + pkgName golang.PackageName }{ {dirName: "a", pkgName: "a"}, {dirName: "abcdef", pkgName: "abcdef"}, diff --git a/gopls/internal/golang/completion/postfix_snippets.go b/gopls/internal/golang/completion/postfix_snippets.go new file mode 100644 index 00000000000..641fe8746eb --- /dev/null +++ b/gopls/internal/golang/completion/postfix_snippets.go @@ -0,0 +1,682 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + "log" + "reflect" + "strings" + "sync" + "text/template" + + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/golang/completion/snippet" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/typesinternal" +) + +// Postfix snippets are artificial methods that allow the user to +// compose common operations in an "argument oriented" fashion. For +// example, instead of "sort.Slice(someSlice, ...)" a user can expand +// "someSlice.sort!". + +// postfixTmpl represents a postfix snippet completion candidate. +type postfixTmpl struct { + // label is the completion candidate's label presented to the user. + label string + + // details is passed along to the client as the candidate's details. + details string + + // body is the template text. See postfixTmplArgs for details on the + // facilities available to the template. + body string + + tmpl *template.Template +} + +// postfixTmplArgs are the template execution arguments available to +// the postfix snippet templates. +type postfixTmplArgs struct { + // StmtOK is true if it is valid to replace the selector with a + // statement. For example: + // + // func foo() { + // bar.sort! // statement okay + // + // someMethod(bar.sort!) // statement not okay + // } + StmtOK bool + + // X is the textual SelectorExpr.X. For example, when completing + // "foo.bar.print!", "X" is "foo.bar". + X string + + // Obj is the types.Object of SelectorExpr.X, if any. + Obj types.Object + + // Type is the type of "foo.bar" in "foo.bar.print!". + Type types.Type + + // FuncResult are results of the enclosed function + FuncResults []*types.Var + + sel *ast.SelectorExpr + scope *types.Scope + snip snippet.Builder + importIfNeeded func(pkgPath string, scope *types.Scope) (name string, edits []protocol.TextEdit, err error) + edits []protocol.TextEdit + qf types.Qualifier + varNames map[string]bool + placeholders bool + currentTabStop int +} + +var postfixTmpls = []postfixTmpl{{ + label: "sort", + details: "sort.Slice()", + body: `{{if and (eq .Kind "slice") .StmtOK -}} +{{.Import "sort"}}.Slice({{.X}}, func({{.VarName nil "i"}}, {{.VarName nil "j"}} int) bool { + {{.Cursor}} +}) +{{- end}}`, +}, { + label: "last", + details: "s[len(s)-1]", + body: `{{if and (eq .Kind "slice") .Obj -}} +{{.X}}[len({{.X}})-1] +{{- end}}`, +}, { + label: "reverse", + details: "reverse slice", + body: `{{if and (eq .Kind "slice") .StmtOK -}} +{{.Import "slices"}}.Reverse({{.X}}) +{{- end}}`, +}, { + label: "range", + details: "range over slice", + body: `{{if and (eq .Kind "slice") .StmtOK -}} +for {{.VarName nil "i" | .Placeholder }}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "for", + details: "range over slice by index", + body: `{{if and (eq .Kind "slice") .StmtOK -}} +for {{ .VarName nil "i" | .Placeholder }} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "forr", + details: "range over slice by index and value", + body: `{{if and (eq .Kind "slice") .StmtOK -}} +for {{.VarName nil "i" | .Placeholder }}, {{.VarName .ElemType "v" | .Placeholder }} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "append", + details: "append and re-assign slice", + body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} +{{.X}} = append({{.X}}, {{.Cursor}}) +{{- end}}`, +}, { + label: "append", + details: "append to slice", + body: `{{if and (eq .Kind "slice") (not .StmtOK) -}} +append({{.X}}, {{.Cursor}}) +{{- end}}`, +}, { + label: "copy", + details: "duplicate slice", + body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} +{{$v := (.VarName nil (printf "%sCopy" .X))}}{{$v}} := make([]{{.TypeName .ElemType}}, len({{.X}})) +copy({{$v}}, {{.X}}) +{{end}}`, +}, { + label: "range", + details: "range over map", + body: `{{if and (eq .Kind "map") .StmtOK -}} +for {{.VarName .KeyType "k" | .Placeholder}}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "for", + details: "range over map by key", + body: `{{if and (eq .Kind "map") .StmtOK -}} +for {{.VarName .KeyType "k" | .Placeholder}} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "forr", + details: "range over map by key and value", + body: `{{if and (eq .Kind "map") .StmtOK -}} +for {{.VarName .KeyType "k" | .Placeholder}}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "clear", + details: "clear map contents", + body: `{{if and (eq .Kind "map") .StmtOK -}} +{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { + delete({{.X}}, {{$k}}) +} +{{end}}`, +}, { + label: "keys", + details: "create slice of keys", + body: `{{if and (eq .Kind "map") .StmtOK -}} +{{$keysVar := (.VarName nil "keys")}}{{$keysVar}} := make([]{{.TypeName .KeyType}}, 0, len({{.X}})) +{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { + {{$keysVar}} = append({{$keysVar}}, {{$k}}) +} +{{end}}`, +}, { + label: "range", + details: "range over channel", + body: `{{if and (eq .Kind "chan") .StmtOK -}} +for {{.VarName .ElemType "e" | .Placeholder}} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "for", + details: "range over channel", + body: `{{if and (eq .Kind "chan") .StmtOK -}} +for {{.VarName .ElemType "e" | .Placeholder}} := range {{.X}} { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "var", + details: "assign to variables", + body: `{{if and (eq .Kind "tuple") .StmtOK -}} +{{$a := .}}{{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{$a.VarName $v.Type $v.Name | $a.Placeholder }}{{end}} := {{.X}} +{{- end}}`, +}, { + label: "var", + details: "assign to variable", + body: `{{if and (ne .Kind "tuple") .StmtOK -}} +{{.VarName .Type "" | .Placeholder }} := {{.X}} +{{- end}}`, +}, { + label: "print", + details: "print to stdout", + body: `{{if and (ne .Kind "tuple") .StmtOK -}} +{{.Import "fmt"}}.Printf("{{.EscapeQuotes .X}}: %v\n", {{.X}}) +{{- end}}`, +}, { + label: "print", + details: "print to stdout", + body: `{{if and (eq .Kind "tuple") .StmtOK -}} +{{.Import "fmt"}}.Println({{.X}}) +{{- end}}`, +}, { + label: "split", + details: "split string", + body: `{{if (eq (.TypeName .Type) "string") -}} +{{.Import "strings"}}.Split({{.X}}, "{{.Cursor}}") +{{- end}}`, +}, { + label: "join", + details: "join string slice", + body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}} +{{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}") +{{- end}}`, +}, { + label: "ifnotnil", + details: "if expr != nil", + body: `{{if and (or (eq .Kind "pointer") (eq .Kind "chan") (eq .Kind "signature") (eq .Kind "interface") (eq .Kind "map") (eq .Kind "slice")) .StmtOK -}} +if {{.X}} != nil { + {{.Cursor}} +} +{{- end}}`, +}, { + label: "len", + details: "len(s)", + body: `{{if (eq .Kind "slice" "map" "array" "chan") -}} +len({{.X}}) +{{- end}}`, +}, { + label: "iferr", + details: "check error and return", + body: `{{if and .StmtOK (eq (.TypeName .Type) "error") -}} +{{- $errName := (or (and .IsIdent .X) "err") -}} +if {{if not .IsIdent}}err := {{.X}}; {{end}}{{$errName}} != nil { + return {{$a := .}}{{range $i, $v := .FuncResults}} + {{- if $i}}, {{end -}} + {{- if eq ($a.TypeName $v.Type) "error" -}} + {{$a.Placeholder $errName}} + {{- else -}} + {{$a.Zero $v.Type}} + {{- end -}} + {{end}} +} +{{end}}`, +}, { + label: "iferr", + details: "check error and return", + body: `{{if and .StmtOK (eq .Kind "tuple") (len .Tuple) (eq (.TypeName .TupleLast.Type) "error") -}} +{{- $a := . -}} +if {{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{if and (eq ($a.TypeName $v.Type) "error") (eq (inc $i) (len $a.Tuple))}}err{{else}}_{{end}}{{end}} := {{.X -}} +; err != nil { + return {{range $i, $v := .FuncResults}} + {{- if $i}}, {{end -}} + {{- if eq ($a.TypeName $v.Type) "error" -}} + {{$a.Placeholder "err"}} + {{- else -}} + {{$a.Zero $v.Type}} + {{- end -}} + {{end}} +} +{{end}}`, +}, { + // variferr snippets use nested placeholders, as described in + // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#snippet_syntax, + // so that users can wrap the returned error without modifying the error + // variable name. + label: "variferr", + details: "assign variables and check error", + body: `{{if and .StmtOK (eq .Kind "tuple") (len .Tuple) (eq (.TypeName .TupleLast.Type) "error") -}} +{{- $a := . -}} +{{- $errName := "err" -}} +{{- range $i, $v := .Tuple -}} + {{- if $i}}, {{end -}} + {{- if and (eq ($a.TypeName $v.Type) "error") (eq (inc $i) (len $a.Tuple)) -}} + {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple)}} + {{- else -}} + {{$a.VarName $v.Type $v.Name | $a.Placeholder}} + {{- end -}} +{{- end}} := {{.X}} +if {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple)}} != nil { + return {{range $i, $v := .FuncResults}} + {{- if $i}}, {{end -}} + {{- if eq ($a.TypeName $v.Type) "error" -}} + {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple) | + $a.SpecifiedPlaceholder (inc (len $a.Tuple))}} + {{- else -}} + {{$a.Zero $v.Type}} + {{- end -}} + {{end}} +} +{{end}}`, +}, { + label: "variferr", + details: "assign variables and check error", + body: `{{if and .StmtOK (eq (.TypeName .Type) "error") -}} +{{- $a := . -}} +{{- $errName := .VarName nil "err" -}} +{{$errName | $a.SpecifiedPlaceholder 1}} := {{.X}} +if {{$errName | $a.SpecifiedPlaceholder 1}} != nil { + return {{range $i, $v := .FuncResults}} + {{- if $i}}, {{end -}} + {{- if eq ($a.TypeName $v.Type) "error" -}} + {{$errName | $a.SpecifiedPlaceholder 1 | $a.SpecifiedPlaceholder 2}} + {{- else -}} + {{$a.Zero $v.Type}} + {{- end -}} + {{end}} +} +{{end}}`, +}} + +// Cursor indicates where the client's cursor should end up after the +// snippet is done. +func (a *postfixTmplArgs) Cursor() string { + return "$0" +} + +// Placeholder indicate a tab stop with the placeholder string, the order +// of tab stops is the same as the order of invocation +func (a *postfixTmplArgs) Placeholder(placeholder string) string { + if !a.placeholders { + placeholder = "" + } + return fmt.Sprintf("${%d:%s}", a.nextTabStop(), placeholder) +} + +// nextTabStop returns the next tab stop index for a new placeholder. +func (a *postfixTmplArgs) nextTabStop() int { + // Tab stops start from 1, so increment before returning. + a.currentTabStop++ + return a.currentTabStop +} + +// SpecifiedPlaceholder indicate a specified tab stop with the placeholder string. +// Sometimes the same tab stop appears in multiple places and their numbers +// need to be specified. e.g. variferr +func (a *postfixTmplArgs) SpecifiedPlaceholder(tabStop int, placeholder string) string { + if !a.placeholders { + placeholder = "" + } + return fmt.Sprintf("${%d:%s}", tabStop, placeholder) +} + +// Import makes sure the package corresponding to path is imported, +// returning the identifier to use to refer to the package. +func (a *postfixTmplArgs) Import(path string) (string, error) { + name, edits, err := a.importIfNeeded(path, a.scope) + if err != nil { + return "", fmt.Errorf("couldn't import %q: %w", path, err) + } + a.edits = append(a.edits, edits...) + + return name, nil +} + +func (a *postfixTmplArgs) EscapeQuotes(v string) string { + return strings.ReplaceAll(v, `"`, `\\"`) +} + +// ElemType returns the Elem() type of xType, if applicable. +func (a *postfixTmplArgs) ElemType() types.Type { + type hasElem interface{ Elem() types.Type } // Array, Chan, Map, Pointer, Slice + if e, ok := a.Type.Underlying().(hasElem); ok { + return e.Elem() + } + return nil +} + +// Kind returns the underlying kind of type, e.g. "slice", "struct", +// etc. +func (a *postfixTmplArgs) Kind() string { + t := reflect.TypeOf(a.Type.Underlying()) + return strings.ToLower(strings.TrimPrefix(t.String(), "*types.")) +} + +// KeyType returns the type of X's key. KeyType panics if X is not a +// map. +func (a *postfixTmplArgs) KeyType() types.Type { + return a.Type.Underlying().(*types.Map).Key() +} + +// Tuple returns the tuple result vars if the type of X is tuple. +func (a *postfixTmplArgs) Tuple() []*types.Var { + tuple, _ := a.Type.(*types.Tuple) + if tuple == nil { + return nil + } + + typs := make([]*types.Var, 0, tuple.Len()) + for i := 0; i < tuple.Len(); i++ { + typs = append(typs, tuple.At(i)) + } + return typs +} + +// TupleLast returns the last tuple result vars if the type of X is tuple. +func (a *postfixTmplArgs) TupleLast() *types.Var { + tuple, _ := a.Type.(*types.Tuple) + if tuple == nil { + return nil + } + if tuple.Len() == 0 { + return nil + } + return tuple.At(tuple.Len() - 1) +} + +// TypeName returns the textual representation of type t. +func (a *postfixTmplArgs) TypeName(t types.Type) (string, error) { + if t == nil || t == types.Typ[types.Invalid] { + return "", fmt.Errorf("invalid type: %v", t) + } + return types.TypeString(t, a.qf), nil +} + +// Zero return the zero value representation of type t +func (a *postfixTmplArgs) Zero(t types.Type) string { + return formatZeroValue(t, a.qf) +} + +func (a *postfixTmplArgs) IsIdent() bool { + _, ok := a.sel.X.(*ast.Ident) + return ok +} + +// VarName returns a suitable variable name for the type t. If t +// implements the error interface, "err" is used. If t is not a named +// type then nonNamedDefault is used. Otherwise a name is made by +// abbreviating the type name. If the resultant name is already in +// scope, an integer is appended to make a unique name. +func (a *postfixTmplArgs) VarName(t types.Type, nonNamedDefault string) string { + if t == nil { + t = types.Typ[types.Invalid] + } + + var name string + // go/types predicates are undefined on types.Typ[types.Invalid]. + if !types.Identical(t, types.Typ[types.Invalid]) && types.Implements(t, errorIntf) { + name = "err" + } else if !is[*types.Named](aliases.Unalias(typesinternal.Unpointer(t))) { + name = nonNamedDefault + } + + if name == "" { + name = types.TypeString(t, func(p *types.Package) string { + return "" + }) + name = abbreviateTypeName(name) + } + + if dot := strings.LastIndex(name, "."); dot > -1 { + name = name[dot+1:] + } + + uniqueName := name + for i := 2; ; i++ { + if s, _ := a.scope.LookupParent(uniqueName, token.NoPos); s == nil && !a.varNames[uniqueName] { + break + } + uniqueName = fmt.Sprintf("%s%d", name, i) + } + + a.varNames[uniqueName] = true + + return uniqueName +} + +func (c *completer) addPostfixSnippetCandidates(ctx context.Context, sel *ast.SelectorExpr) { + if !c.opts.postfix { + return + } + + initPostfixRules() + + if sel == nil || sel.Sel == nil { + return + } + + selType := c.pkg.TypesInfo().TypeOf(sel.X) + if selType == nil { + return + } + + // Skip empty tuples since there is no value to operate on. + if tuple, ok := selType.(*types.Tuple); ok && tuple == nil { + return + } + + tokFile := c.pkg.FileSet().File(c.pos) + + // Only replace sel with a statement if sel is already a statement. + var stmtOK bool + for i, n := range c.path { + if n == sel && i < len(c.path)-1 { + switch p := c.path[i+1].(type) { + case *ast.ExprStmt: + stmtOK = true + case *ast.AssignStmt: + // In cases like: + // + // foo.<> + // bar = 123 + // + // detect that "foo." makes up the entire statement since the + // apparent selector spans lines. + stmtOK = safetoken.Line(tokFile, c.pos) < safetoken.Line(tokFile, p.TokPos) + } + break + } + } + + var funcResults []*types.Var + if c.enclosingFunc != nil { + results := c.enclosingFunc.sig.Results() + if results != nil { + funcResults = make([]*types.Var, results.Len()) + for i := 0; i < results.Len(); i++ { + funcResults[i] = results.At(i) + } + } + } + + scope := c.pkg.Types().Scope().Innermost(c.pos) + if scope == nil { + return + } + + // afterDot is the position after selector dot, e.g. "|" in + // "foo.|print". + afterDot := sel.Sel.Pos() + + // We must detect dangling selectors such as: + // + // foo.<> + // bar + // + // and adjust afterDot so that we don't mistakenly delete the + // newline thinking "bar" is part of our selector. + if startLine := safetoken.Line(tokFile, sel.Pos()); startLine != safetoken.Line(tokFile, afterDot) { + if safetoken.Line(tokFile, c.pos) != startLine { + return + } + afterDot = c.pos + } + + for _, rule := range postfixTmpls { + // When completing foo.print<>, "print" is naturally overwritten, + // but we need to also remove "foo." so the snippet has a clean + // slate. + edits, err := c.editText(sel.Pos(), afterDot, "") + if err != nil { + event.Error(ctx, "error calculating postfix edits", err) + return + } + + tmplArgs := postfixTmplArgs{ + X: golang.FormatNode(c.pkg.FileSet(), sel.X), + StmtOK: stmtOK, + Obj: exprObj(c.pkg.TypesInfo(), sel.X), + Type: selType, + FuncResults: funcResults, + sel: sel, + qf: c.qf, + importIfNeeded: c.importIfNeeded, + scope: scope, + varNames: make(map[string]bool), + placeholders: c.opts.placeholders, + } + + // Feed the template straight into the snippet builder. This + // allows templates to build snippets as they are executed. + err = rule.tmpl.Execute(&tmplArgs.snip, &tmplArgs) + if err != nil { + event.Error(ctx, "error executing postfix template", err) + continue + } + + if strings.TrimSpace(tmplArgs.snip.String()) == "" { + continue + } + + score := c.matcher.Score(rule.label) + if score <= 0 { + continue + } + + c.items = append(c.items, CompletionItem{ + Label: rule.label + "!", + Detail: rule.details, + Score: float64(score) * 0.01, + Kind: protocol.SnippetCompletion, + snippet: &tmplArgs.snip, + AdditionalTextEdits: append(edits, tmplArgs.edits...), + }) + } +} + +var postfixRulesOnce sync.Once + +func initPostfixRules() { + postfixRulesOnce.Do(func() { + var idx int + for _, rule := range postfixTmpls { + var err error + rule.tmpl, err = template.New("postfix_snippet").Funcs(template.FuncMap{ + "inc": inc, + }).Parse(rule.body) + if err != nil { + log.Panicf("error parsing postfix snippet template: %v", err) + } + postfixTmpls[idx] = rule + idx++ + } + postfixTmpls = postfixTmpls[:idx] + }) +} + +func inc(i int) int { + return i + 1 +} + +// importIfNeeded returns the package identifier and any necessary +// edits to import package pkgPath. +func (c *completer) importIfNeeded(pkgPath string, scope *types.Scope) (string, []protocol.TextEdit, error) { + defaultName := imports.ImportPathToAssumedName(pkgPath) + + // Check if file already imports pkgPath. + for _, s := range c.file.Imports { + // TODO(adonovan): what if pkgPath has a vendor/ suffix? + // This may be the cause of go.dev/issue/56291. + if string(metadata.UnquoteImportPath(s)) == pkgPath { + if s.Name == nil { + return defaultName, nil, nil + } + if s.Name.Name != "_" { + return s.Name.Name, nil, nil + } + } + } + + // Give up if the package's name is already in use by another object. + if _, obj := scope.LookupParent(defaultName, token.NoPos); obj != nil { + return "", nil, fmt.Errorf("import name %q of %q already in use", defaultName, pkgPath) + } + + edits, err := c.importEdits(&importInfo{ + importPath: pkgPath, + }) + if err != nil { + return "", nil, err + } + + return defaultName, edits, nil +} diff --git a/gopls/internal/lsp/source/completion/printf.go b/gopls/internal/golang/completion/printf.go similarity index 98% rename from gopls/internal/lsp/source/completion/printf.go rename to gopls/internal/golang/completion/printf.go index 43201175542..a7413bc57b0 100644 --- a/gopls/internal/lsp/source/completion/printf.go +++ b/gopls/internal/golang/completion/printf.go @@ -23,7 +23,7 @@ func printfArgKind(info *types.Info, call *ast.CallExpr, argIdx int) objKind { return kindAny } - sig, _ := fn.Type().(*types.Signature) + sig, _ := fn.Type().Underlying().(*types.Signature) if sig == nil { return kindAny } diff --git a/gopls/internal/lsp/source/completion/printf_test.go b/gopls/internal/golang/completion/printf_test.go similarity index 100% rename from gopls/internal/lsp/source/completion/printf_test.go rename to gopls/internal/golang/completion/printf_test.go diff --git a/gopls/internal/lsp/source/completion/snippet.go b/gopls/internal/golang/completion/snippet.go similarity index 89% rename from gopls/internal/lsp/source/completion/snippet.go rename to gopls/internal/golang/completion/snippet.go index 2be485f6d85..8df81f87672 100644 --- a/gopls/internal/lsp/source/completion/snippet.go +++ b/gopls/internal/golang/completion/snippet.go @@ -7,8 +7,8 @@ package completion import ( "go/ast" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/snippet" + "golang.org/x/tools/gopls/internal/golang/completion/snippet" + "golang.org/x/tools/gopls/internal/util/safetoken" ) // structFieldSnippet calculates the snippet for struct literal field names. @@ -50,6 +50,11 @@ func (c *completer) structFieldSnippet(cand candidate, detail string, snip *snip } // functionCallSnippet calculates the snippet for function calls. +// +// Callers should omit the suffix of type parameters that are +// constrained by the argument types, to avoid offering completions +// that contain instantiations that are redundant because of type +// inference, such as f[int](1) for func f[T any](x T). func (c *completer) functionCallSnippet(name string, tparams, params []string, snip *snippet.Builder) { if !c.opts.completeFunctionCalls { snip.WriteText(name) diff --git a/gopls/internal/lsp/snippet/snippet_builder.go b/gopls/internal/golang/completion/snippet/snippet_builder.go similarity index 100% rename from gopls/internal/lsp/snippet/snippet_builder.go rename to gopls/internal/golang/completion/snippet/snippet_builder.go diff --git a/gopls/internal/lsp/snippet/snippet_builder_test.go b/gopls/internal/golang/completion/snippet/snippet_builder_test.go similarity index 100% rename from gopls/internal/lsp/snippet/snippet_builder_test.go rename to gopls/internal/golang/completion/snippet/snippet_builder_test.go diff --git a/gopls/internal/lsp/source/completion/statements.go b/gopls/internal/golang/completion/statements.go similarity index 81% rename from gopls/internal/lsp/source/completion/statements.go rename to gopls/internal/golang/completion/statements.go index a801a09570b..3ac130c4e21 100644 --- a/gopls/internal/lsp/source/completion/statements.go +++ b/gopls/internal/golang/completion/statements.go @@ -9,10 +9,12 @@ import ( "go/ast" "go/token" "go/types" + "strings" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/snippet" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/golang/completion/snippet" + "golang.org/x/tools/gopls/internal/protocol" ) // addStatementCandidates adds full statement completion candidates @@ -20,6 +22,7 @@ import ( func (c *completer) addStatementCandidates() { c.addErrCheck() c.addAssignAppend() + c.addReturnZeroValues() } // addAssignAppend offers a completion candidate of the form: @@ -67,7 +70,7 @@ func (c *completer) addAssignAppend() { return } - lhsType := c.pkg.GetTypesInfo().TypeOf(n.Lhs[exprIdx]) + lhsType := c.pkg.TypesInfo().TypeOf(n.Lhs[exprIdx]) if lhsType == nil { return } @@ -78,7 +81,7 @@ func (c *completer) addAssignAppend() { } // The name or our slice is whatever's in the LHS expression. - sliceText = source.FormatNode(fset, n.Lhs[exprIdx]) + sliceText = golang.FormatNode(fset, n.Lhs[exprIdx]) case *ast.SelectorExpr: // Make sure we are a selector at the beginning of a statement. if _, parentIsExprtStmt := c.path[2].(*ast.ExprStmt); !parentIsExprtStmt { @@ -88,7 +91,7 @@ func (c *completer) addAssignAppend() { // So far we only know the first part of our slice name. For // example in "s.a<>" we only know our slice begins with "s." // since the user could still be typing. - sliceText = source.FormatNode(fset, n.X) + "." + sliceText = golang.FormatNode(fset, n.X) + "." needsLHS = true case *ast.ExprStmt: needsLHS = true @@ -203,13 +206,13 @@ func (c *completer) addErrCheck() { lastAssignee := assign.Lhs[len(assign.Lhs)-1] // Make sure the final assignee is an error. - if !types.Identical(c.pkg.GetTypesInfo().TypeOf(lastAssignee), errorType) { + if !types.Identical(c.pkg.TypesInfo().TypeOf(lastAssignee), errorType) { return } var ( // errVar is e.g. "err" in "foo, err := bar()". - errVar = source.FormatNode(c.pkg.FileSet(), lastAssignee) + errVar = golang.FormatNode(c.pkg.FileSet(), lastAssignee) // Whether we need to include the "if" keyword in our candidate. needsIf = true @@ -320,13 +323,13 @@ func (c *completer) addErrCheck() { // returns "b" etc. An empty string indicates that the function signature // does not take a testing.TB parameter or does so but is ignored such // as func someFunc(*testing.T). -func getTestVar(enclosingFunc *funcInfo, pkg source.Package) string { +func getTestVar(enclosingFunc *funcInfo, pkg *cache.Package) string { if enclosingFunc == nil || enclosingFunc.sig == nil { return "" } var testingPkg *types.Package - for _, p := range pkg.GetTypes().Imports() { + for _, p := range pkg.Types().Imports() { if p.Path() == "testing" { testingPkg = p break @@ -358,3 +361,60 @@ func getTestVar(enclosingFunc *funcInfo, pkg source.Package) string { return "" } + +// addReturnZeroValues offers a snippet candidate on the form: +// +// return 0, "", nil +// +// Requires a partially or fully written return keyword at position. +// Requires current position to be in a function with more than +// zero return parameters. +func (c *completer) addReturnZeroValues() { + if len(c.path) < 2 || c.enclosingFunc == nil || !c.opts.placeholders { + return + } + result := c.enclosingFunc.sig.Results() + if result.Len() == 0 { + return + } + + // Offer just less than we expect from return as a keyword. + var score = stdScore - 0.01 + switch c.path[0].(type) { + case *ast.ReturnStmt, *ast.Ident: + f := c.matcher.Score("return") + if f <= 0 { + return + } + score *= float64(f) + default: + return + } + + // The snippet will have a placeholder over each return value. + // The label will not. + var snip snippet.Builder + var label strings.Builder + snip.WriteText("return ") + fmt.Fprintf(&label, "return ") + + for i := 0; i < result.Len(); i++ { + if i > 0 { + snip.WriteText(", ") + fmt.Fprintf(&label, ", ") + } + + zero := formatZeroValue(result.At(i).Type(), c.qf) + snip.WritePlaceholder(func(b *snippet.Builder) { + b.WriteText(zero) + }) + fmt.Fprintf(&label, zero) + } + + c.items = append(c.items, CompletionItem{ + Label: label.String(), + Kind: protocol.SnippetCompletion, + Score: score, + snippet: &snip, + }) +} diff --git a/gopls/internal/golang/completion/util.go b/gopls/internal/golang/completion/util.go new file mode 100644 index 00000000000..1261d417080 --- /dev/null +++ b/gopls/internal/golang/completion/util.go @@ -0,0 +1,334 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/typeparams" +) + +// exprAtPos returns the index of the expression containing pos. +func exprAtPos(pos token.Pos, args []ast.Expr) int { + for i, expr := range args { + if expr.Pos() <= pos && pos <= expr.End() { + return i + } + } + return len(args) +} + +// eachField invokes fn for each field that can be selected from a +// value of type T. +func eachField(T types.Type, fn func(*types.Var)) { + // TODO(adonovan): this algorithm doesn't exclude ambiguous + // selections that match more than one field/method. + // types.NewSelectionSet should do that for us. + + // for termination on recursive types + var seen typeutil.Map + + var visit func(T types.Type) + visit = func(T types.Type) { + // T may be a Struct, optionally Named, with an optional + // Pointer (with optional Aliases at every step!): + // Consider: type T *struct{ f int }; _ = T(nil).f + if T, ok := typeparams.Deref(T).Underlying().(*types.Struct); ok { + if seen.At(T) != nil { + return + } + + for i := 0; i < T.NumFields(); i++ { + f := T.Field(i) + fn(f) + if f.Anonymous() { + seen.Set(T, true) + visit(f.Type()) + } + } + } + } + visit(T) +} + +// typeIsValid reports whether typ doesn't contain any Invalid types. +func typeIsValid(typ types.Type) bool { + // Check named types separately, because we don't want + // to call Underlying() on them to avoid problems with recursive types. + if _, ok := aliases.Unalias(typ).(*types.Named); ok { + return true + } + + switch typ := typ.Underlying().(type) { + case *types.Basic: + return typ.Kind() != types.Invalid + case *types.Array: + return typeIsValid(typ.Elem()) + case *types.Slice: + return typeIsValid(typ.Elem()) + case *types.Pointer: + return typeIsValid(typ.Elem()) + case *types.Map: + return typeIsValid(typ.Key()) && typeIsValid(typ.Elem()) + case *types.Chan: + return typeIsValid(typ.Elem()) + case *types.Signature: + return typeIsValid(typ.Params()) && typeIsValid(typ.Results()) + case *types.Tuple: + for i := 0; i < typ.Len(); i++ { + if !typeIsValid(typ.At(i).Type()) { + return false + } + } + return true + case *types.Struct, *types.Interface: + // Don't bother checking structs, interfaces for validity. + return true + default: + return false + } +} + +// resolveInvalid traverses the node of the AST that defines the scope +// containing the declaration of obj, and attempts to find a user-friendly +// name for its invalid type. The resulting Object and its Type are fake. +func resolveInvalid(fset *token.FileSet, obj types.Object, node ast.Node, info *types.Info) types.Object { + var resultExpr ast.Expr + ast.Inspect(node, func(node ast.Node) bool { + switch n := node.(type) { + case *ast.ValueSpec: + for _, name := range n.Names { + if info.Defs[name] == obj { + resultExpr = n.Type + } + } + return false + case *ast.Field: // This case handles parameters and results of a FuncDecl or FuncLit. + for _, name := range n.Names { + if info.Defs[name] == obj { + resultExpr = n.Type + } + } + return false + default: + return true + } + }) + // Construct a fake type for the object and return a fake object with this type. + typename := golang.FormatNode(fset, resultExpr) + typ := types.NewNamed(types.NewTypeName(token.NoPos, obj.Pkg(), typename, nil), types.Typ[types.Invalid], nil) + return types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ) +} + +// TODO(adonovan): inline these. +func isVar(obj types.Object) bool { return is[*types.Var](obj) } +func isTypeName(obj types.Object) bool { return is[*types.TypeName](obj) } +func isFunc(obj types.Object) bool { return is[*types.Func](obj) } +func isPkgName(obj types.Object) bool { return is[*types.PkgName](obj) } + +// isPointer reports whether T is a Pointer, or an alias of one. +// It returns false for a Named type whose Underlying is a Pointer. +// +// TODO(adonovan): shouldn't this use CoreType(T)? +func isPointer(T types.Type) bool { return is[*types.Pointer](aliases.Unalias(T)) } + +func isEmptyInterface(T types.Type) bool { + // TODO(adonovan): shouldn't this use Underlying? + intf, _ := T.(*types.Interface) + return intf != nil && intf.NumMethods() == 0 && intf.IsMethodSet() +} + +func isUntyped(T types.Type) bool { + if basic, ok := aliases.Unalias(T).(*types.Basic); ok { + return basic.Info()&types.IsUntyped > 0 + } + return false +} + +func deslice(T types.Type) types.Type { + if slice, ok := T.Underlying().(*types.Slice); ok { + return slice.Elem() + } + return nil +} + +// isSelector returns the enclosing *ast.SelectorExpr when pos is in the +// selector. +func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr { + if len(path) == 0 { + return nil + } + + if sel, ok := path[0].(*ast.SelectorExpr); ok { + return sel + } + + // TODO(adonovan): consider ast.ParenExpr (e.g. (x).name) + if _, ok := path[0].(*ast.Ident); ok && len(path) > 1 { + if sel, ok := path[1].(*ast.SelectorExpr); ok && pos >= sel.Sel.Pos() { + return sel + } + } + + return nil +} + +// enclosingDeclLHS returns LHS idents from containing value spec or +// assign statement. +func enclosingDeclLHS(path []ast.Node) []*ast.Ident { + for _, n := range path { + switch n := n.(type) { + case *ast.ValueSpec: + return n.Names + case *ast.AssignStmt: + ids := make([]*ast.Ident, 0, len(n.Lhs)) + for _, e := range n.Lhs { + if id, ok := e.(*ast.Ident); ok { + ids = append(ids, id) + } + } + return ids + } + } + + return nil +} + +// exprObj returns the types.Object associated with the *ast.Ident or +// *ast.SelectorExpr e. +func exprObj(info *types.Info, e ast.Expr) types.Object { + var ident *ast.Ident + switch expr := e.(type) { + case *ast.Ident: + ident = expr + case *ast.SelectorExpr: + ident = expr.Sel + default: + return nil + } + + return info.ObjectOf(ident) +} + +// typeConversion returns the type being converted to if call is a type +// conversion expression. +func typeConversion(call *ast.CallExpr, info *types.Info) types.Type { + // Type conversion (e.g. "float64(foo)"). + if fun, _ := exprObj(info, call.Fun).(*types.TypeName); fun != nil { + return fun.Type() + } + + return nil +} + +// fieldsAccessible returns whether s has at least one field accessible by p. +func fieldsAccessible(s *types.Struct, p *types.Package) bool { + for i := 0; i < s.NumFields(); i++ { + f := s.Field(i) + if f.Exported() || f.Pkg() == p { + return true + } + } + return false +} + +// prevStmt returns the statement that precedes the statement containing pos. +// For example: +// +// foo := 1 +// bar(1 + 2<>) +// +// If "<>" is pos, prevStmt returns "foo := 1" +func prevStmt(pos token.Pos, path []ast.Node) ast.Stmt { + var blockLines []ast.Stmt + for i := 0; i < len(path) && blockLines == nil; i++ { + switch n := path[i].(type) { + case *ast.BlockStmt: + blockLines = n.List + case *ast.CommClause: + blockLines = n.Body + case *ast.CaseClause: + blockLines = n.Body + } + } + + for i := len(blockLines) - 1; i >= 0; i-- { + if blockLines[i].End() < pos { + return blockLines[i] + } + } + + return nil +} + +// formatZeroValue produces Go code representing the zero value of T. It +// returns the empty string if T is invalid. +func formatZeroValue(T types.Type, qf types.Qualifier) string { + switch u := T.Underlying().(type) { + case *types.Basic: + switch { + case u.Info()&types.IsNumeric > 0: + return "0" + case u.Info()&types.IsString > 0: + return `""` + case u.Info()&types.IsBoolean > 0: + return "false" + default: + return "" + } + case *types.Pointer, *types.Interface, *types.Chan, *types.Map, *types.Slice, *types.Signature: + return "nil" + default: + return types.TypeString(T, qf) + "{}" + } +} + +// isBasicKind returns whether t is a basic type of kind k. +func isBasicKind(t types.Type, k types.BasicInfo) bool { + b, _ := t.Underlying().(*types.Basic) + return b != nil && b.Info()&k > 0 +} + +func (c *completer) editText(from, to token.Pos, newText string) ([]protocol.TextEdit, error) { + start, end, err := safetoken.Offsets(c.tokFile, from, to) + if err != nil { + return nil, err // can't happen: from/to came from c + } + return protocol.EditsFromDiffEdits(c.mapper, []diff.Edit{{ + Start: start, + End: end, + New: newText, + }}) +} + +// assignableTo is like types.AssignableTo, but returns false if +// either type is invalid. +func assignableTo(x, to types.Type) bool { + if aliases.Unalias(x) == types.Typ[types.Invalid] || + aliases.Unalias(to) == types.Typ[types.Invalid] { + return false + } + + return types.AssignableTo(x, to) +} + +// convertibleTo is like types.ConvertibleTo, but returns false if +// either type is invalid. +func convertibleTo(x, to types.Type) bool { + if aliases.Unalias(x) == types.Typ[types.Invalid] || + aliases.Unalias(to) == types.Typ[types.Invalid] { + return false + } + + return types.ConvertibleTo(x, to) +} diff --git a/gopls/internal/lsp/source/completion/util_test.go b/gopls/internal/golang/completion/util_test.go similarity index 100% rename from gopls/internal/lsp/source/completion/util_test.go rename to gopls/internal/golang/completion/util_test.go diff --git a/gopls/internal/golang/definition.go b/gopls/internal/golang/definition.go new file mode 100644 index 00000000000..e689c806b0c --- /dev/null +++ b/gopls/internal/golang/definition.go @@ -0,0 +1,312 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "errors" + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/event" +) + +// Definition handles the textDocument/definition request for Go files. +func Definition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { + ctx, done := event.Start(ctx, "golang.Definition") + defer done() + + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + pos, err := pgf.PositionPos(position) + if err != nil { + return nil, err + } + + // Handle the case where the cursor is in an import. + importLocations, err := importDefinition(ctx, snapshot, pkg, pgf, pos) + if err != nil { + return nil, err + } + if len(importLocations) > 0 { + return importLocations, nil + } + + // Handle the case where the cursor is in the package name. + // We use "<= End" to accept a query immediately after the package name. + if pgf.File != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End() { + // If there's no package documentation, just use current file. + declFile := pgf + for _, pgf := range pkg.CompiledGoFiles() { + if pgf.File.Name != nil && pgf.File.Doc != nil { + declFile = pgf + break + } + } + loc, err := declFile.NodeLocation(declFile.File.Name) + if err != nil { + return nil, err + } + return []protocol.Location{loc}, nil + } + + // Handle the case where the cursor is in a linkname directive. + locations, err := LinknameDefinition(ctx, snapshot, pgf.Mapper, position) + if !errors.Is(err, ErrNoLinkname) { + return locations, err + } + + // Handle the case where the cursor is in an embed directive. + locations, err = EmbedDefinition(pgf.Mapper, position) + if !errors.Is(err, ErrNoEmbed) { + return locations, err + } + + // The general case: the cursor is on an identifier. + _, obj, _ := referencedObject(pkg, pgf, pos) + if obj == nil { + return nil, nil + } + + // Handle objects with no position: builtin, unsafe. + if !obj.Pos().IsValid() { + return builtinDefinition(ctx, snapshot, obj) + } + + // Finally, map the object position. + loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj)) + if err != nil { + return nil, err + } + return []protocol.Location{loc}, nil +} + +// builtinDefinition returns the location of the fake source +// declaration of a built-in in {builtin,unsafe}.go. +func builtinDefinition(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) ([]protocol.Location, error) { + pgf, decl, err := builtinDecl(ctx, snapshot, obj) + if err != nil { + return nil, err + } + + loc, err := pgf.PosLocation(decl.Pos(), decl.Pos()+token.Pos(len(obj.Name()))) + if err != nil { + return nil, err + } + return []protocol.Location{loc}, nil +} + +// builtinDecl returns the parsed Go file and node corresponding to a builtin +// object, which may be a universe object or part of types.Unsafe. +func builtinDecl(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) (*parsego.File, ast.Node, error) { + // getDecl returns the file-level declaration of name + // using legacy (go/ast) object resolution. + getDecl := func(file *ast.File, name string) (ast.Node, error) { + astObj := file.Scope.Lookup(name) + if astObj == nil { + // Every built-in should have documentation syntax. + // However, it is possible to reach this statement by + // commenting out declarations in {builtin,unsafe}.go. + return nil, fmt.Errorf("internal error: no object for %s", name) + } + decl, ok := astObj.Decl.(ast.Node) + if !ok { + return nil, bug.Errorf("internal error: no declaration for %s", obj.Name()) + } + return decl, nil + } + + var ( + pgf *parsego.File + decl ast.Node + err error + ) + if obj.Pkg() == types.Unsafe { + // package "unsafe": + // parse $GOROOT/src/unsafe/unsafe.go + unsafe := snapshot.Metadata("unsafe") + if unsafe == nil { + // If the type checker somehow resolved 'unsafe', we must have metadata + // for it. + return nil, nil, bug.Errorf("no metadata for package 'unsafe'") + } + uri := unsafe.GoFiles[0] + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, nil, err + } + pgf, err = snapshot.ParseGo(ctx, fh, parsego.Full&^parser.SkipObjectResolution) + if err != nil { + return nil, nil, err + } + decl, err = getDecl(pgf.File, obj.Name()) + if err != nil { + return nil, nil, err + } + } else { + // pseudo-package "builtin": + // use parsed $GOROOT/src/builtin/builtin.go + pgf, err = snapshot.BuiltinFile(ctx) + if err != nil { + return nil, nil, err + } + + if obj.Parent() == types.Universe { + // built-in function or type + decl, err = getDecl(pgf.File, obj.Name()) + if err != nil { + return nil, nil, err + } + } else if obj.Name() == "Error" { + // error.Error method + decl, err = getDecl(pgf.File, "error") + if err != nil { + return nil, nil, err + } + decl = decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List[0] + + } else { + return nil, nil, bug.Errorf("unknown built-in %v", obj) + } + } + return pgf, decl, nil +} + +// referencedObject returns the identifier and object referenced at the +// specified position, which must be within the file pgf, for the purposes of +// definition/hover/call hierarchy operations. It returns a nil object if no +// object was found at the given position. +// +// If the returned identifier is a type-switch implicit (i.e. the x in x := +// e.(type)), the third result will be the type of the expression being +// switched on (the type of e in the example). This facilitates workarounds for +// limitations of the go/types API, which does not report an object for the +// identifier x. +// +// For embedded fields, referencedObject returns the type name object rather +// than the var (field) object. +// +// TODO(rfindley): this function exists to preserve the pre-existing behavior +// of golang.Identifier. Eliminate this helper in favor of sharing +// functionality with objectsAt, after choosing suitable primitives. +func referencedObject(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (*ast.Ident, types.Object, types.Type) { + path := pathEnclosingObjNode(pgf.File, pos) + if len(path) == 0 { + return nil, nil, nil + } + var obj types.Object + info := pkg.TypesInfo() + switch n := path[0].(type) { + case *ast.Ident: + obj = info.ObjectOf(n) + // If n is the var's declaring ident in a type switch + // [i.e. the x in x := foo.(type)], it will not have an object. In this + // case, set obj to the first implicit object (if any), and return the type + // of the expression being switched on. + // + // The type switch may have no case clauses and thus no + // implicit objects; this is a type error ("unused x"), + if obj == nil { + if implicits, typ := typeSwitchImplicits(info, path); len(implicits) > 0 { + return n, implicits[0], typ + } + } + + // If the original position was an embedded field, we want to jump + // to the field's type definition, not the field's definition. + if v, ok := obj.(*types.Var); ok && v.Embedded() { + // types.Info.Uses contains the embedded field's *types.TypeName. + if typeName := info.Uses[n]; typeName != nil { + obj = typeName + } + } + return n, obj, nil + } + return nil, nil, nil +} + +// importDefinition returns locations defining a package referenced by the +// import spec containing pos. +// +// If pos is not inside an import spec, it returns nil, nil. +func importDefinition(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol.Location, error) { + var imp *ast.ImportSpec + for _, spec := range pgf.File.Imports { + // We use "<= End" to accept a query immediately after an ImportSpec. + if spec.Path.Pos() <= pos && pos <= spec.Path.End() { + imp = spec + } + } + if imp == nil { + return nil, nil + } + + importPath := metadata.UnquoteImportPath(imp) + impID := pkg.Metadata().DepsByImpPath[importPath] + if impID == "" { + return nil, fmt.Errorf("failed to resolve import %q", importPath) + } + impMetadata := s.Metadata(impID) + if impMetadata == nil { + return nil, fmt.Errorf("missing information for package %q", impID) + } + + var locs []protocol.Location + for _, f := range impMetadata.CompiledGoFiles { + fh, err := s.ReadFile(ctx, f) + if err != nil { + if ctx.Err() != nil { + return nil, ctx.Err() + } + continue + } + pgf, err := s.ParseGo(ctx, fh, parsego.Header) + if err != nil { + if ctx.Err() != nil { + return nil, ctx.Err() + } + continue + } + loc, err := pgf.NodeLocation(pgf.File) + if err != nil { + return nil, err + } + locs = append(locs, loc) + } + + if len(locs) == 0 { + return nil, fmt.Errorf("package %q has no readable files", impID) // incl. unsafe + } + + return locs, nil +} + +// TODO(rfindley): avoid the duplicate column mapping here, by associating a +// column mapper with each file handle. +func mapPosition(ctx context.Context, fset *token.FileSet, s file.Source, start, end token.Pos) (protocol.Location, error) { + file := fset.File(start) + uri := protocol.URIFromPath(file.Name()) + fh, err := s.ReadFile(ctx, uri) + if err != nil { + return protocol.Location{}, err + } + content, err := fh.Content() + if err != nil { + return protocol.Location{}, err + } + m := protocol.NewMapper(fh.URI(), content) + return m.PosLocation(file, start, end) +} diff --git a/gopls/internal/golang/diagnostics.go b/gopls/internal/golang/diagnostics.go new file mode 100644 index 00000000000..b0fa8daf83c --- /dev/null +++ b/gopls/internal/golang/diagnostics.go @@ -0,0 +1,48 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/progress" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/maps" +) + +// Analyze reports go/analysis-framework diagnostics in the specified package. +// +// If the provided tracker is non-nil, it may be used to provide notifications +// of the ongoing analysis pass. +func Analyze(ctx context.Context, snapshot *cache.Snapshot, pkgIDs map[PackageID]*metadata.Package, tracker *progress.Tracker) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + // Exit early if the context has been canceled. This also protects us + // from a race on Options, see golang/go#36699. + if ctx.Err() != nil { + return nil, ctx.Err() + } + + options := snapshot.Options() + categories := []map[string]*settings.Analyzer{ + options.DefaultAnalyzers, + options.StaticcheckAnalyzers, + } + + var analyzers []*settings.Analyzer + for _, cat := range categories { + for _, a := range cat { + analyzers = append(analyzers, a) + } + } + + analysisDiagnostics, err := snapshot.Analyze(ctx, pkgIDs, analyzers, tracker) + if err != nil { + return nil, err + } + byURI := func(d *cache.Diagnostic) protocol.DocumentURI { return d.URI } + return maps.Group(analysisDiagnostics, byURI), nil +} diff --git a/gopls/internal/golang/embeddirective.go b/gopls/internal/golang/embeddirective.go new file mode 100644 index 00000000000..485da5c7a2d --- /dev/null +++ b/gopls/internal/golang/embeddirective.go @@ -0,0 +1,195 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "errors" + "fmt" + "io/fs" + "path/filepath" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// ErrNoEmbed is returned by EmbedDefinition when no embed +// directive is found at a particular position. +// As such it indicates that other definitions could be worth checking. +var ErrNoEmbed = errors.New("no embed directive found") + +var errStopWalk = errors.New("stop walk") + +// EmbedDefinition finds a file matching the embed directive at pos in the mapped file. +// If there is no embed directive at pos, returns ErrNoEmbed. +// If multiple files match the embed pattern, one is picked at random. +func EmbedDefinition(m *protocol.Mapper, pos protocol.Position) ([]protocol.Location, error) { + pattern, _ := parseEmbedDirective(m, pos) + if pattern == "" { + return nil, ErrNoEmbed + } + + // Find the first matching file. + var match string + dir := filepath.Dir(m.URI.Path()) + err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { + if e != nil { + return e + } + rel, err := filepath.Rel(dir, abs) + if err != nil { + return err + } + ok, err := filepath.Match(pattern, rel) + if err != nil { + return err + } + if ok && !d.IsDir() { + match = abs + return errStopWalk + } + return nil + }) + if err != nil && !errors.Is(err, errStopWalk) { + return nil, err + } + if match == "" { + return nil, fmt.Errorf("%q does not match any files in %q", pattern, dir) + } + + loc := protocol.Location{ + URI: protocol.URIFromPath(match), + Range: protocol.Range{ + Start: protocol.Position{Line: 0, Character: 0}, + }, + } + return []protocol.Location{loc}, nil +} + +// parseEmbedDirective attempts to parse a go:embed directive argument at pos. +// If successful it return the directive argument and its range, else zero values are returned. +func parseEmbedDirective(m *protocol.Mapper, pos protocol.Position) (string, protocol.Range) { + lineStart, err := m.PositionOffset(protocol.Position{Line: pos.Line, Character: 0}) + if err != nil { + return "", protocol.Range{} + } + lineEnd, err := m.PositionOffset(protocol.Position{Line: pos.Line + 1, Character: 0}) + if err != nil { + return "", protocol.Range{} + } + + text := string(m.Content[lineStart:lineEnd]) + if !strings.HasPrefix(text, "//go:embed") { + return "", protocol.Range{} + } + text = text[len("//go:embed"):] + offset := lineStart + len("//go:embed") + + // Find the first pattern in text that covers the offset of the pos we are looking for. + findOffset, err := m.PositionOffset(pos) + if err != nil { + return "", protocol.Range{} + } + patterns, err := parseGoEmbed(text, offset) + if err != nil { + return "", protocol.Range{} + } + for _, p := range patterns { + if p.startOffset <= findOffset && findOffset <= p.endOffset { + // Found our match. + rng, err := m.OffsetRange(p.startOffset, p.endOffset) + if err != nil { + return "", protocol.Range{} + } + return p.pattern, rng + } + } + + return "", protocol.Range{} +} + +type fileEmbed struct { + pattern string + startOffset int + endOffset int +} + +// parseGoEmbed patterns that come after the directive. +// +// Copied and adapted from go/build/read.go. +// Replaced token.Position with start/end offset (including quotes if present). +func parseGoEmbed(args string, offset int) ([]fileEmbed, error) { + trimBytes := func(n int) { + offset += n + args = args[n:] + } + trimSpace := func() { + trim := strings.TrimLeftFunc(args, unicode.IsSpace) + trimBytes(len(args) - len(trim)) + } + + var list []fileEmbed + for trimSpace(); args != ""; trimSpace() { + var path string + pathOffset := offset + Switch: + switch args[0] { + default: + i := len(args) + for j, c := range args { + if unicode.IsSpace(c) { + i = j + break + } + } + path = args[:i] + trimBytes(i) + + case '`': + var ok bool + path, _, ok = strings.Cut(args[1:], "`") + if !ok { + return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) + } + trimBytes(1 + len(path) + 1) + + case '"': + i := 1 + for ; i < len(args); i++ { + if args[i] == '\\' { + i++ + continue + } + if args[i] == '"' { + q, err := strconv.Unquote(args[:i+1]) + if err != nil { + return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1]) + } + path = q + trimBytes(i + 1) + break Switch + } + } + if i >= len(args) { + return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) + } + } + + if args != "" { + r, _ := utf8.DecodeRuneInString(args) + if !unicode.IsSpace(r) { + return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) + } + } + list = append(list, fileEmbed{ + pattern: path, + startOffset: pathOffset, + endOffset: offset, + }) + } + return list, nil +} diff --git a/gopls/internal/lsp/source/extract.go b/gopls/internal/golang/extract.go similarity index 95% rename from gopls/internal/lsp/source/extract.go rename to gopls/internal/golang/extract.go index 0e062bd9eae..c07faec1b7a 100644 --- a/gopls/internal/lsp/source/extract.go +++ b/gopls/internal/golang/extract.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "bytes" @@ -18,16 +18,16 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/safetoken" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/analysisinternal" ) -func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) { +func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { tokFile := fset.File(file.Pos()) expr, path, ok, err := CanExtractVariable(start, end, file) if !ok { - return nil, fmt.Errorf("extractVariable: cannot extract %s: %v", safetoken.StartPosition(fset, start), err) + return nil, nil, fmt.Errorf("extractVariable: cannot extract %s: %v", safetoken.StartPosition(fset, start), err) } // Create new AST node for extracted code. @@ -55,16 +55,16 @@ func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file lhsNames = append(lhsNames, lhsName) } default: - return nil, fmt.Errorf("cannot extract %T", expr) + return nil, nil, fmt.Errorf("cannot extract %T", expr) } insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path) if insertBeforeStmt == nil { - return nil, fmt.Errorf("cannot find location to insert extraction") + return nil, nil, fmt.Errorf("cannot find location to insert extraction") } indent, err := calculateIndentation(src, tokFile, insertBeforeStmt) if err != nil { - return nil, err + return nil, nil, err } newLineIndent := "\n" + indent @@ -76,11 +76,11 @@ func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file } var buf bytes.Buffer if err := format.Node(&buf, fset, assignStmt); err != nil { - return nil, err + return nil, nil, err } assignment := strings.ReplaceAll(buf.String(), "\n", newLineIndent) + newLineIndent - return &analysis.SuggestedFix{ + return fset, &analysis.SuggestedFix{ TextEdits: []analysis.TextEdit{ { Pos: insertBeforeStmt.Pos(), @@ -182,12 +182,12 @@ type returnVariable struct { } // extractMethod refactors the selected block of code into a new method. -func extractMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) { +func extractMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { return extractFunctionMethod(fset, start, end, src, file, pkg, info, true) } // extractFunction refactors the selected block of code into a new function. -func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) { +func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { return extractFunctionMethod(fset, start, end, src, file, pkg, info, false) } @@ -199,7 +199,7 @@ func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, file // and return values of the extracted function/method. Lastly, we construct the call // of the function/method and insert this call as well as the extracted function/method into // their proper locations. -func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*analysis.SuggestedFix, error) { +func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*token.FileSet, *analysis.SuggestedFix, error) { errorPrefix := "extractFunction" if isMethod { errorPrefix = "extractMethod" @@ -207,21 +207,21 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte tok := fset.File(file.Pos()) if tok == nil { - return nil, bug.Errorf("no file for position") + return nil, nil, bug.Errorf("no file for position") } p, ok, methodOk, err := CanExtractFunction(tok, start, end, src, file) if (!ok && !isMethod) || (!methodOk && isMethod) { - return nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix, + return nil, nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix, safetoken.StartPosition(fset, start), err) } tok, path, start, end, outer, node := p.tok, p.path, p.start, p.end, p.outer, p.node fileScope := info.Scopes[file] if fileScope == nil { - return nil, fmt.Errorf("%s: file scope is empty", errorPrefix) + return nil, nil, fmt.Errorf("%s: file scope is empty", errorPrefix) } pkgScope := fileScope.Parent() if pkgScope == nil { - return nil, fmt.Errorf("%s: package scope is empty", errorPrefix) + return nil, nil, fmt.Errorf("%s: package scope is empty", errorPrefix) } // A return statement is non-nested if its parent node is equal to the parent node @@ -255,7 +255,7 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte // the appropriate parameters and return values. variables, err := collectFreeVars(info, file, fileScope, pkgScope, start, end, path[0]) if err != nil { - return nil, err + return nil, nil, err } var ( @@ -266,11 +266,11 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte ) if isMethod { if outer == nil || outer.Recv == nil || len(outer.Recv.List) == 0 { - return nil, fmt.Errorf("%s: cannot extract need method receiver", errorPrefix) + return nil, nil, fmt.Errorf("%s: cannot extract need method receiver", errorPrefix) } receiver = outer.Recv.List[0] if len(receiver.Names) == 0 || receiver.Names[0] == nil { - return nil, fmt.Errorf("%s: cannot extract need method receiver name", errorPrefix) + return nil, nil, fmt.Errorf("%s: cannot extract need method receiver name", errorPrefix) } recvName := receiver.Names[0] receiverName = recvName.Name @@ -324,7 +324,7 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte } typ := analysisinternal.TypeExpr(file, pkg, v.obj.Type()) if typ == nil { - return nil, fmt.Errorf("nil AST expression for type: %v", v.obj.Name()) + return nil, nil, fmt.Errorf("nil AST expression for type: %v", v.obj.Name()) } seenVars[v.obj] = typ identifier := ast.NewIdent(v.obj.Name()) @@ -333,17 +333,27 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte // the selection (isAssigned), (2) it must be used at least once after the // selection (isUsed), and (3) its first use after the selection // cannot be its own reassignment or redefinition (objOverriden). - if v.obj.Parent() == nil { - return nil, fmt.Errorf("parent nil") + vscope := v.obj.Parent() + if vscope == nil { + return nil, nil, fmt.Errorf("parent nil") } - isUsed, firstUseAfter := objUsed(info, end, v.obj.Parent().End(), v.obj) + isUsed, firstUseAfter := objUsed(info, end, vscope.End(), v.obj) if v.assigned && isUsed && !varOverridden(info, firstUseAfter, v.obj, v.free, outer) { returnTypes = append(returnTypes, &ast.Field{Type: typ}) returns = append(returns, identifier) if !v.free { uninitialized = append(uninitialized, v.obj) - } else if v.obj.Parent().Pos() == startParent.Pos() { - canRedefineCount++ + + } else { + // In go1.22, Scope.Pos for function scopes changed (#60752): + // it used to start at the body ('{'), now it starts at "func". + // + // The second condition below handles the case when + // v's block is the FuncDecl.Body itself. + if vscope.Pos() == startParent.Pos() || + startParent == outer.Body && vscope == info.Scopes[outer.Type] { + canRedefineCount++ + } } } // An identifier must meet two conditions to become a parameter of the @@ -397,12 +407,12 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte // the extracted selection without modifying the original AST. startOffset, endOffset, err := safetoken.Offsets(tok, start, end) if err != nil { - return nil, err + return nil, nil, err } selection := src[startOffset:endOffset] extractedBlock, err := parseBlockStmt(fset, selection) if err != nil { - return nil, err + return nil, nil, err } // We need to account for return statements in the selected block, as they will complicate @@ -486,7 +496,7 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte // the return statements in the extracted function to reflect this change in // signature. if err := adjustReturnStatements(returnTypes, seenVars, file, pkg, extractedBlock); err != nil { - return nil, err + return nil, nil, err } } // Collect the additional return values and types needed to accommodate return @@ -495,7 +505,7 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte // function. retVars, ifReturn, err = generateReturnInfo(enclosing, pkg, path, file, info, start, hasNonNestedReturn) if err != nil { - return nil, err + return nil, nil, err } } @@ -565,18 +575,18 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte var declBuf, replaceBuf, newFuncBuf, ifBuf, commentBuf bytes.Buffer if err := format.Node(&declBuf, fset, declarations); err != nil { - return nil, err + return nil, nil, err } if err := format.Node(&replaceBuf, fset, extractedFunCall); err != nil { - return nil, err + return nil, nil, err } if ifReturn != nil { if err := format.Node(&ifBuf, fset, ifReturn); err != nil { - return nil, err + return nil, nil, err } } if err := format.Node(&newFuncBuf, fset, newFunc); err != nil { - return nil, err + return nil, nil, err } // Find all the comments within the range and print them to be put somewhere. // TODO(suzmue): print these in the extracted function at the correct place. @@ -592,13 +602,13 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte // so preserve the text before and after the selected block. outerStart, outerEnd, err := safetoken.Offsets(tok, outer.Pos(), outer.End()) if err != nil { - return nil, err + return nil, nil, err } before := src[outerStart:startOffset] after := src[endOffset:outerEnd] indent, err := calculateIndentation(src, tok, node) if err != nil { - return nil, err + return nil, nil, err } newLineIndent := "\n" + indent @@ -623,7 +633,7 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte fullReplacement.WriteString("\n\n") // add newlines after the enclosing function fullReplacement.Write(newFuncBuf.Bytes()) // insert the extracted function - return &analysis.SuggestedFix{ + return fset, &analysis.SuggestedFix{ TextEdits: []analysis.TextEdit{{ Pos: outer.Pos(), End: outer.End(), diff --git a/gopls/internal/golang/fix.go b/gopls/internal/golang/fix.go new file mode 100644 index 00000000000..2215da9b65e --- /dev/null +++ b/gopls/internal/golang/fix.go @@ -0,0 +1,227 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/gopls/internal/analysis/embeddirective" + "golang.org/x/tools/gopls/internal/analysis/fillstruct" + "golang.org/x/tools/gopls/internal/analysis/stubmethods" + "golang.org/x/tools/gopls/internal/analysis/undeclaredname" + "golang.org/x/tools/gopls/internal/analysis/unusedparams" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/imports" +) + +// A fixer is a function that suggests a fix for a diagnostic produced +// by the analysis framework. This is done outside of the analyzer Run +// function so that the construction of expensive fixes can be +// deferred until they are requested by the user. +// +// The actual diagnostic is not provided; only its position, as the +// triple (pgf, start, end); the resulting SuggestedFix implicitly +// relates to that file. +// +// The supplied token positions (start, end) must belong to +// pkg.FileSet(), and the returned positions +// (SuggestedFix.TextEdits[*].{Pos,End}) must belong to the returned +// FileSet. +// +// A fixer may return (nil, nil) if no fix is available. +type fixer func(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) + +// A singleFileFixer is a Fixer that inspects only a single file, +// and does not depend on data types from the cache package. +// +// TODO(adonovan): move fillstruct and undeclaredname into this +// package, so we can remove the import restriction and push +// the singleFile wrapper down into each singleFileFixer? +type singleFileFixer func(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) + +// singleFile adapts a single-file fixer to a Fixer. +func singleFile(fixer1 singleFileFixer) fixer { + return func(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + return fixer1(pkg.FileSet(), start, end, pgf.Src, pgf.File, pkg.Types(), pkg.TypesInfo()) + } +} + +// Names of ApplyFix.Fix created directly by the CodeAction handler. +const ( + fixExtractVariable = "extract_variable" + fixExtractFunction = "extract_function" + fixExtractMethod = "extract_method" + fixInlineCall = "inline_call" + fixInvertIfCondition = "invert_if_condition" + fixSplitLines = "split_lines" + fixJoinLines = "join_lines" +) + +// ApplyFix applies the specified kind of suggested fix to the given +// file and range, returning the resulting edits. +// +// A fix kind is either the Category of an analysis.Diagnostic that +// had a SuggestedFix with no edits; or the name of a fix agreed upon +// by [CodeActions] and this function. +// Fix kinds identify fixes in the command protocol. +// +// TODO(adonovan): come up with a better mechanism for registering the +// connection between analyzers, code actions, and fixers. A flaw of +// the current approach is that the same Category could in theory +// apply to a Diagnostic with several lazy fixes, making them +// impossible to distinguish. It would more precise if there was a +// SuggestedFix.Category field, or some other way to squirrel metadata +// in the fix. +func ApplyFix(ctx context.Context, fix string, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range) ([]protocol.TextDocumentEdit, error) { + // This can't be expressed as an entry in the fixer table below + // because it operates in the protocol (not go/{token,ast}) domain. + // (Sigh; perhaps it was a mistake to factor out the + // NarrowestPackageForFile/RangePos/suggestedFixToEdits + // steps.) + if fix == unusedparams.FixCategory { + changes, err := RemoveUnusedParameter(ctx, fh, rng, snapshot) + if err != nil { + return nil, err + } + // Unwrap TextDocumentEdits again! + var edits []protocol.TextDocumentEdit + for _, change := range changes { + edits = append(edits, *change.TextDocumentEdit) + } + return edits, nil + } + + fixers := map[string]fixer{ + // Fixes for analyzer-provided diagnostics. + // These match the Diagnostic.Category. + embeddirective.FixCategory: addEmbedImport, + fillstruct.FixCategory: singleFile(fillstruct.SuggestedFix), + stubmethods.FixCategory: stubMethodsFixer, + undeclaredname.FixCategory: singleFile(undeclaredname.SuggestedFix), + + // Ad-hoc fixers: these are used when the command is + // constructed directly by logic in server/code_action. + fixExtractFunction: singleFile(extractFunction), + fixExtractMethod: singleFile(extractMethod), + fixExtractVariable: singleFile(extractVariable), + fixInlineCall: inlineCall, + fixInvertIfCondition: singleFile(invertIfCondition), + fixSplitLines: singleFile(splitLines), + fixJoinLines: singleFile(joinLines), + } + fixer, ok := fixers[fix] + if !ok { + return nil, fmt.Errorf("no suggested fix function for %s", fix) + } + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + start, end, err := pgf.RangePos(rng) + if err != nil { + return nil, err + } + fixFset, suggestion, err := fixer(ctx, snapshot, pkg, pgf, start, end) + if err != nil { + return nil, err + } + if suggestion == nil { + return nil, nil + } + return suggestedFixToEdits(ctx, snapshot, fixFset, suggestion) +} + +// suggestedFixToEdits converts the suggestion's edits from analysis form into protocol form. +func suggestedFixToEdits(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, suggestion *analysis.SuggestedFix) ([]protocol.TextDocumentEdit, error) { + editsPerFile := map[protocol.DocumentURI]*protocol.TextDocumentEdit{} + for _, edit := range suggestion.TextEdits { + tokFile := fset.File(edit.Pos) + if tokFile == nil { + return nil, bug.Errorf("no file for edit position") + } + end := edit.End + if !end.IsValid() { + end = edit.Pos + } + fh, err := snapshot.ReadFile(ctx, protocol.URIFromPath(tokFile.Name())) + if err != nil { + return nil, err + } + te, ok := editsPerFile[fh.URI()] + if !ok { + te = &protocol.TextDocumentEdit{ + TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ + Version: fh.Version(), + TextDocumentIdentifier: protocol.TextDocumentIdentifier{ + URI: fh.URI(), + }, + }, + } + editsPerFile[fh.URI()] = te + } + content, err := fh.Content() + if err != nil { + return nil, err + } + m := protocol.NewMapper(fh.URI(), content) // TODO(adonovan): opt: memoize in map + rng, err := m.PosRange(tokFile, edit.Pos, end) + if err != nil { + return nil, err + } + te.Edits = append(te.Edits, protocol.Or_TextDocumentEdit_edits_Elem{ + Value: protocol.TextEdit{ + Range: rng, + NewText: string(edit.NewText), + }, + }) + } + var edits []protocol.TextDocumentEdit + for _, edit := range editsPerFile { + edits = append(edits, *edit) + } + return edits, nil +} + +// addEmbedImport adds a missing embed "embed" import with blank name. +func addEmbedImport(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, _, _ token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + // Like golang.AddImport, but with _ as Name and using our pgf. + protoEdits, err := ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ + StmtInfo: imports.ImportInfo{ + ImportPath: "embed", + Name: "_", + }, + FixType: imports.AddImport, + }) + if err != nil { + return nil, nil, fmt.Errorf("compute edits: %w", err) + } + + var edits []analysis.TextEdit + for _, e := range protoEdits { + start, end, err := pgf.RangePos(e.Range) + if err != nil { + return nil, nil, err // e.g. invalid range + } + edits = append(edits, analysis.TextEdit{ + Pos: start, + End: end, + NewText: []byte(e.NewText), + }) + } + + return pkg.FileSet(), &analysis.SuggestedFix{ + Message: "Add embed import", + TextEdits: edits, + }, nil +} diff --git a/gopls/internal/golang/folding_range.go b/gopls/internal/golang/folding_range.go new file mode 100644 index 00000000000..85faea5e31a --- /dev/null +++ b/gopls/internal/golang/folding_range.go @@ -0,0 +1,197 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "go/ast" + "go/token" + "sort" + "strings" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" +) + +// FoldingRangeInfo holds range and kind info of folding for an ast.Node +type FoldingRangeInfo struct { + MappedRange protocol.MappedRange + Kind protocol.FoldingRangeKind +} + +// FoldingRange gets all of the folding range for f. +func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, lineFoldingOnly bool) (ranges []*FoldingRangeInfo, err error) { + // TODO(suzmue): consider limiting the number of folding ranges returned, and + // implement a way to prioritize folding ranges in that case. + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + + // With parse errors, we wouldn't be able to produce accurate folding info. + // LSP protocol (3.16) currently does not have a way to handle this case + // (https://github.com/microsoft/language-server-protocol/issues/1200). + // We cannot return an error either because we are afraid some editors + // may not handle errors nicely. As a workaround, we now return an empty + // result and let the client handle this case by double check the file + // contents (i.e. if the file is not empty and the folding range result + // is empty, raise an internal error). + if pgf.ParseErr != nil { + return nil, nil + } + + // Get folding ranges for comments separately as they are not walked by ast.Inspect. + ranges = append(ranges, commentsFoldingRange(pgf)...) + + visit := func(n ast.Node) bool { + rng := foldingRangeFunc(pgf, n, lineFoldingOnly) + if rng != nil { + ranges = append(ranges, rng) + } + return true + } + // Walk the ast and collect folding ranges. + ast.Inspect(pgf.File, visit) + + sort.Slice(ranges, func(i, j int) bool { + irng := ranges[i].MappedRange.Range() + jrng := ranges[j].MappedRange.Range() + return protocol.CompareRange(irng, jrng) < 0 + }) + + return ranges, nil +} + +// foldingRangeFunc calculates the line folding range for ast.Node n +func foldingRangeFunc(pgf *parsego.File, n ast.Node, lineFoldingOnly bool) *FoldingRangeInfo { + // TODO(suzmue): include trailing empty lines before the closing + // parenthesis/brace. + var kind protocol.FoldingRangeKind + var start, end token.Pos + switch n := n.(type) { + case *ast.BlockStmt: + // Fold between positions of or lines between "{" and "}". + var startList, endList token.Pos + if num := len(n.List); num != 0 { + startList, endList = n.List[0].Pos(), n.List[num-1].End() + } + start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startList, endList, lineFoldingOnly) + case *ast.CaseClause: + // Fold from position of ":" to end. + start, end = n.Colon+1, n.End() + case *ast.CommClause: + // Fold from position of ":" to end. + start, end = n.Colon+1, n.End() + case *ast.CallExpr: + // Fold from position of "(" to position of ")". + start, end = n.Lparen+1, n.Rparen + case *ast.FieldList: + // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace. + var startList, endList token.Pos + if num := len(n.List); num != 0 { + startList, endList = n.List[0].Pos(), n.List[num-1].End() + } + start, end = validLineFoldingRange(pgf.Tok, n.Opening, n.Closing, startList, endList, lineFoldingOnly) + case *ast.GenDecl: + // If this is an import declaration, set the kind to be protocol.Imports. + if n.Tok == token.IMPORT { + kind = protocol.Imports + } + // Fold between positions of or lines between "(" and ")". + var startSpecs, endSpecs token.Pos + if num := len(n.Specs); num != 0 { + startSpecs, endSpecs = n.Specs[0].Pos(), n.Specs[num-1].End() + } + start, end = validLineFoldingRange(pgf.Tok, n.Lparen, n.Rparen, startSpecs, endSpecs, lineFoldingOnly) + case *ast.BasicLit: + // Fold raw string literals from position of "`" to position of "`". + if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' { + start, end = n.Pos(), n.End() + } + case *ast.CompositeLit: + // Fold between positions of or lines between "{" and "}". + var startElts, endElts token.Pos + if num := len(n.Elts); num != 0 { + startElts, endElts = n.Elts[0].Pos(), n.Elts[num-1].End() + } + start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startElts, endElts, lineFoldingOnly) + } + + // Check that folding positions are valid. + if !start.IsValid() || !end.IsValid() { + return nil + } + // in line folding mode, do not fold if the start and end lines are the same. + if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { + return nil + } + mrng, err := pgf.PosMappedRange(start, end) + if err != nil { + bug.Errorf("%w", err) // can't happen + } + return &FoldingRangeInfo{ + MappedRange: mrng, + Kind: kind, + } +} + +// validLineFoldingRange returns start and end token.Pos for folding range if the range is valid. +// returns token.NoPos otherwise, which fails token.IsValid check +func validLineFoldingRange(tokFile *token.File, open, close, start, end token.Pos, lineFoldingOnly bool) (token.Pos, token.Pos) { + if lineFoldingOnly { + if !open.IsValid() || !close.IsValid() { + return token.NoPos, token.NoPos + } + + // Don't want to fold if the start/end is on the same line as the open/close + // as an example, the example below should *not* fold: + // var x = [2]string{"d", + // "e" } + if safetoken.Line(tokFile, open) == safetoken.Line(tokFile, start) || + safetoken.Line(tokFile, close) == safetoken.Line(tokFile, end) { + return token.NoPos, token.NoPos + } + + return open + 1, end + } + return open + 1, close +} + +// commentsFoldingRange returns the folding ranges for all comment blocks in file. +// The folding range starts at the end of the first line of the comment block, and ends at the end of the +// comment block and has kind protocol.Comment. +func commentsFoldingRange(pgf *parsego.File) (comments []*FoldingRangeInfo) { + tokFile := pgf.Tok + for _, commentGrp := range pgf.File.Comments { + startGrpLine, endGrpLine := safetoken.Line(tokFile, commentGrp.Pos()), safetoken.Line(tokFile, commentGrp.End()) + if startGrpLine == endGrpLine { + // Don't fold single line comments. + continue + } + + firstComment := commentGrp.List[0] + startPos, endLinePos := firstComment.Pos(), firstComment.End() + startCmmntLine, endCmmntLine := safetoken.Line(tokFile, startPos), safetoken.Line(tokFile, endLinePos) + if startCmmntLine != endCmmntLine { + // If the first comment spans multiple lines, then we want to have the + // folding range start at the end of the first line. + endLinePos = token.Pos(int(startPos) + len(strings.Split(firstComment.Text, "\n")[0])) + } + mrng, err := pgf.PosMappedRange(endLinePos, commentGrp.End()) + if err != nil { + bug.Errorf("%w", err) // can't happen + } + comments = append(comments, &FoldingRangeInfo{ + // Fold from the end of the first line comment to the end of the comment block. + MappedRange: mrng, + Kind: protocol.Comment, + }) + } + return comments +} diff --git a/gopls/internal/golang/format.go b/gopls/internal/golang/format.go new file mode 100644 index 00000000000..3e5668b32fe --- /dev/null +++ b/gopls/internal/golang/format.go @@ -0,0 +1,337 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package golang defines the LSP features for navigation, analysis, +// and refactoring of Go source code. +package golang + +import ( + "bytes" + "context" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "strings" + "text/scanner" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/tokeninternal" +) + +// Format formats a file with a given range. +func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.TextEdit, error) { + ctx, done := event.Start(ctx, "golang.Format") + defer done() + + // Generated files shouldn't be edited. So, don't format them + if IsGenerated(ctx, snapshot, fh.URI()) { + return nil, fmt.Errorf("can't format %q: file is generated", fh.URI().Path()) + } + + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, err + } + // Even if this file has parse errors, it might still be possible to format it. + // Using format.Node on an AST with errors may result in code being modified. + // Attempt to format the source of this file instead. + if pgf.ParseErr != nil { + formatted, err := formatSource(ctx, fh) + if err != nil { + return nil, err + } + return computeTextEdits(ctx, pgf, string(formatted)) + } + + // format.Node changes slightly from one release to another, so the version + // of Go used to build the LSP server will determine how it formats code. + // This should be acceptable for all users, who likely be prompted to rebuild + // the LSP server on each Go release. + buf := &bytes.Buffer{} + fset := tokeninternal.FileSetFor(pgf.Tok) + if err := format.Node(buf, fset, pgf.File); err != nil { + return nil, err + } + formatted := buf.String() + + // Apply additional formatting, if any is supported. Currently, the only + // supported additional formatter is gofumpt. + if format := snapshot.Options().GofumptFormat; snapshot.Options().Gofumpt && format != nil { + // gofumpt can customize formatting based on language version and module + // path, if available. + // + // Try to derive this information, but fall-back on the default behavior. + // + // TODO: under which circumstances can we fail to find module information? + // Can this, for example, result in inconsistent formatting across saves, + // due to pending calls to packages.Load? + var langVersion, modulePath string + meta, err := NarrowestMetadataForFile(ctx, snapshot, fh.URI()) + if err == nil { + if mi := meta.Module; mi != nil { + langVersion = mi.GoVersion + modulePath = mi.Path + } + } + b, err := format(ctx, langVersion, modulePath, buf.Bytes()) + if err != nil { + return nil, err + } + formatted = string(b) + } + return computeTextEdits(ctx, pgf, formatted) +} + +func formatSource(ctx context.Context, fh file.Handle) ([]byte, error) { + _, done := event.Start(ctx, "golang.formatSource") + defer done() + + data, err := fh.Content() + if err != nil { + return nil, err + } + return format.Source(data) +} + +type importFix struct { + fix *imports.ImportFix + edits []protocol.TextEdit +} + +// allImportsFixes formats f for each possible fix to the imports. +// In addition to returning the result of applying all edits, +// it returns a list of fixes that could be applied to the file, with the +// corresponding TextEdits that would be needed to apply that fix. +func allImportsFixes(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego.File) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { + ctx, done := event.Start(ctx, "golang.AllImportsFixes") + defer done() + + if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { + allFixEdits, editsPerFix, err = computeImportEdits(ctx, pgf, opts) + return err + }); err != nil { + return nil, nil, fmt.Errorf("AllImportsFixes: %v", err) + } + return allFixEdits, editsPerFix, nil +} + +// computeImportEdits computes a set of edits that perform one or all of the +// necessary import fixes. +func computeImportEdits(ctx context.Context, pgf *parsego.File, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { + filename := pgf.URI.Path() + + // Build up basic information about the original file. + allFixes, err := imports.FixImports(ctx, filename, pgf.Src, options) + if err != nil { + return nil, nil, err + } + + allFixEdits, err = computeFixEdits(pgf, options, allFixes) + if err != nil { + return nil, nil, err + } + + // Apply all of the import fixes to the file. + // Add the edits for each fix to the result. + for _, fix := range allFixes { + edits, err := computeFixEdits(pgf, options, []*imports.ImportFix{fix}) + if err != nil { + return nil, nil, err + } + editsPerFix = append(editsPerFix, &importFix{ + fix: fix, + edits: edits, + }) + } + return allFixEdits, editsPerFix, nil +} + +// ComputeOneImportFixEdits returns text edits for a single import fix. +func ComputeOneImportFixEdits(snapshot *cache.Snapshot, pgf *parsego.File, fix *imports.ImportFix) ([]protocol.TextEdit, error) { + options := &imports.Options{ + LocalPrefix: snapshot.Options().Local, + // Defaults. + AllErrors: true, + Comments: true, + Fragment: true, + FormatOnly: false, + TabIndent: true, + TabWidth: 8, + } + return computeFixEdits(pgf, options, []*imports.ImportFix{fix}) +} + +func computeFixEdits(pgf *parsego.File, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) { + // trim the original data to match fixedData + left, err := importPrefix(pgf.Src) + if err != nil { + return nil, err + } + extra := !strings.Contains(left, "\n") // one line may have more than imports + if extra { + left = string(pgf.Src) + } + if len(left) > 0 && left[len(left)-1] != '\n' { + left += "\n" + } + // Apply the fixes and re-parse the file so that we can locate the + // new imports. + flags := parser.ImportsOnly + if extra { + // used all of origData above, use all of it here too + flags = 0 + } + fixedData, err := imports.ApplyFixes(fixes, "", pgf.Src, options, flags) + if err != nil { + return nil, err + } + if fixedData == nil || fixedData[len(fixedData)-1] != '\n' { + fixedData = append(fixedData, '\n') // ApplyFixes may miss the newline, go figure. + } + edits := diff.Strings(left, string(fixedData)) + return protocolEditsFromSource([]byte(left), edits) +} + +// importPrefix returns the prefix of the given file content through the final +// import statement. If there are no imports, the prefix is the package +// statement and any comment groups below it. +func importPrefix(src []byte) (string, error) { + fset := token.NewFileSet() + // do as little parsing as possible + f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments) + if err != nil { // This can happen if 'package' is misspelled + return "", fmt.Errorf("importPrefix: failed to parse: %s", err) + } + tok := fset.File(f.Pos()) + var importEnd int + for _, d := range f.Decls { + if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT { + if e, err := safetoken.Offset(tok, d.End()); err != nil { + return "", fmt.Errorf("importPrefix: %s", err) + } else if e > importEnd { + importEnd = e + } + } + } + + maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int { + offset, err := safetoken.Offset(tok, pos) + if err != nil { + return -1 + } + + // Don't go past the end of the file. + if offset > len(src) { + offset = len(src) + } + // The go/ast package does not account for different line endings, and + // specifically, in the text of a comment, it will strip out \r\n line + // endings in favor of \n. To account for these differences, we try to + // return a position on the next line whenever possible. + switch line := safetoken.Line(tok, tok.Pos(offset)); { + case line < tok.LineCount(): + nextLineOffset, err := safetoken.Offset(tok, tok.LineStart(line+1)) + if err != nil { + return -1 + } + // If we found a position that is at the end of a line, move the + // offset to the start of the next line. + if offset+1 == nextLineOffset { + offset = nextLineOffset + } + case isCommentNode, offset+1 == tok.Size(): + // If the last line of the file is a comment, or we are at the end + // of the file, the prefix is the entire file. + offset = len(src) + } + return offset + } + if importEnd == 0 { + pkgEnd := f.Name.End() + importEnd = maybeAdjustToLineEnd(pkgEnd, false) + } + for _, cgroup := range f.Comments { + for _, c := range cgroup.List { + if end, err := safetoken.Offset(tok, c.End()); err != nil { + return "", err + } else if end > importEnd { + startLine := safetoken.Position(tok, c.Pos()).Line + endLine := safetoken.Position(tok, c.End()).Line + + // Work around golang/go#41197 by checking if the comment might + // contain "\r", and if so, find the actual end position of the + // comment by scanning the content of the file. + startOffset, err := safetoken.Offset(tok, c.Pos()) + if err != nil { + return "", err + } + if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) { + if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 { + end = startOffset + commentEnd + } + } + importEnd = maybeAdjustToLineEnd(tok.Pos(end), true) + } + } + } + if importEnd > len(src) { + importEnd = len(src) + } + return string(src[:importEnd]), nil +} + +// scanForCommentEnd returns the offset of the end of the multi-line comment +// at the start of the given byte slice. +func scanForCommentEnd(src []byte) int { + var s scanner.Scanner + s.Init(bytes.NewReader(src)) + s.Mode ^= scanner.SkipComments + + t := s.Scan() + if t == scanner.Comment { + return s.Pos().Offset + } + return 0 +} + +func computeTextEdits(ctx context.Context, pgf *parsego.File, formatted string) ([]protocol.TextEdit, error) { + _, done := event.Start(ctx, "golang.computeTextEdits") + defer done() + + edits := diff.Strings(string(pgf.Src), formatted) + return protocol.EditsFromDiffEdits(pgf.Mapper, edits) +} + +// protocolEditsFromSource converts text edits to LSP edits using the original +// source. +func protocolEditsFromSource(src []byte, edits []diff.Edit) ([]protocol.TextEdit, error) { + m := protocol.NewMapper("", src) + var result []protocol.TextEdit + for _, edit := range edits { + rng, err := m.OffsetRange(edit.Start, edit.End) + if err != nil { + return nil, err + } + + if rng.Start == rng.End && edit.New == "" { + // Degenerate case, which may result from a diff tool wanting to delete + // '\r' in line endings. Filter it out. + continue + } + result = append(result, protocol.TextEdit{ + Range: rng, + NewText: edit.New, + }) + } + return result, nil +} diff --git a/gopls/internal/lsp/source/format_test.go b/gopls/internal/golang/format_test.go similarity index 96% rename from gopls/internal/lsp/source/format_test.go rename to gopls/internal/golang/format_test.go index fac80c3115b..4dbb4db71c0 100644 --- a/gopls/internal/lsp/source/format_test.go +++ b/gopls/internal/golang/format_test.go @@ -2,13 +2,13 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/test/compare" ) func TestImportPrefix(t *testing.T) { diff --git a/gopls/internal/lsp/source/gc_annotations.go b/gopls/internal/golang/gc_annotations.go similarity index 76% rename from gopls/internal/lsp/source/gc_annotations.go rename to gopls/internal/golang/gc_annotations.go index 2a21473aaf2..1ff866122ca 100644 --- a/gopls/internal/lsp/source/gc_annotations.go +++ b/gopls/internal/golang/gc_annotations.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "bytes" @@ -13,32 +13,18 @@ import ( "path/filepath" "strings" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/internal/gocommand" ) -type Annotation string - -const ( - // Nil controls nil checks. - Nil Annotation = "nil" - - // Escape controls diagnostics about escape choices. - Escape Annotation = "escape" - - // Inline controls diagnostics about inlining choices. - Inline Annotation = "inline" - - // Bounds controls bounds checking diagnostics. - Bounds Annotation = "bounds" -) - -func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) (map[span.URI][]*Diagnostic, error) { - if len(m.CompiledGoFiles) == 0 { +func GCOptimizationDetails(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + if len(mp.CompiledGoFiles) == 0 { return nil, nil } - pkgDir := filepath.Dir(m.CompiledGoFiles[0].Filename()) + pkgDir := filepath.Dir(mp.CompiledGoFiles[0].Path()) outDir := filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.details", os.Getpid())) if err := os.MkdirAll(outDir, 0700); err != nil { @@ -48,13 +34,14 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) if err != nil { return nil, err } + tmpFile.Close() // ignore error defer os.Remove(tmpFile.Name()) - outDirURI := span.URIFromPath(outDir) + outDirURI := protocol.URIFromPath(outDir) // GC details doesn't handle Windows URIs in the form of "file:///C:/...", // so rewrite them to "file://C:/...". See golang/go#41614. if !strings.HasPrefix(outDir, "/") { - outDirURI = span.URI(strings.Replace(string(outDirURI), "file:///", "file://", 1)) + outDirURI = protocol.DocumentURI(strings.Replace(string(outDirURI), "file:///", "file://", 1)) } inv := &gocommand.Invocation{ Verb: "build", @@ -65,7 +52,7 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) }, WorkingDir: pkgDir, } - _, err = snapshot.RunGoCommandDirect(ctx, Normal, inv) + _, err = snapshot.RunGoCommandDirect(ctx, cache.Normal, inv) if err != nil { return nil, err } @@ -73,7 +60,7 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) if err != nil { return nil, err } - reports := make(map[span.URI][]*Diagnostic) + reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) opts := snapshot.Options() var parseError error for _, fn := range files { @@ -86,7 +73,7 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) if fh == nil { continue } - if pkgDir != filepath.Dir(fh.URI().Filename()) { + if pkgDir != filepath.Dir(fh.URI().Path()) { // https://github.com/golang/go/issues/42198 // sometimes the detail diagnostics generated for files // outside the package can never be taken back. @@ -97,15 +84,15 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) return reports, parseError } -func parseDetailsFile(filename string, options *Options) (span.URI, []*Diagnostic, error) { +func parseDetailsFile(filename string, options *settings.Options) (protocol.DocumentURI, []*cache.Diagnostic, error) { buf, err := os.ReadFile(filename) if err != nil { return "", nil, err } var ( - uri span.URI + uri protocol.DocumentURI i int - diagnostics []*Diagnostic + diagnostics []*cache.Diagnostic ) type metadata struct { File string `json:"file,omitempty"` @@ -121,7 +108,7 @@ func parseDetailsFile(filename string, options *Options) (span.URI, []*Diagnosti if !strings.HasSuffix(m.File, ".go") { continue // } - uri = span.URIFromPath(m.File) + uri = protocol.URIFromPath(m.File) continue } d := new(protocol.Diagnostic) @@ -151,12 +138,12 @@ func parseDetailsFile(filename string, options *Options) (span.URI, []*Diagnosti Message: ri.Message, }) } - diagnostic := &Diagnostic{ + diagnostic := &cache.Diagnostic{ URI: uri, Range: zeroIndexedRange(d.Range), Message: msg, Severity: d.Severity, - Source: OptimizationDetailsError, // d.Source is always "go compiler" as of 1.16, use our own + Source: cache.OptimizationDetailsError, // d.Source is always "go compiler" as of 1.16, use our own Tags: d.Tags, Related: related, } @@ -168,7 +155,7 @@ func parseDetailsFile(filename string, options *Options) (span.URI, []*Diagnosti // showDiagnostic reports whether a given diagnostic should be shown to the end // user, given the current options. -func showDiagnostic(msg, source string, o *Options) bool { +func showDiagnostic(msg, source string, o *settings.Options) bool { if source != "go compiler" { return false } @@ -179,14 +166,14 @@ func showDiagnostic(msg, source string, o *Options) bool { case strings.HasPrefix(msg, "canInline") || strings.HasPrefix(msg, "cannotInline") || strings.HasPrefix(msg, "inlineCall"): - return o.Annotations[Inline] + return o.Annotations[settings.Inline] case strings.HasPrefix(msg, "escape") || msg == "leak": - return o.Annotations[Escape] + return o.Annotations[settings.Escape] case strings.HasPrefix(msg, "nilcheck"): - return o.Annotations[Nil] + return o.Annotations[settings.Nil] case strings.HasPrefix(msg, "isInBounds") || strings.HasPrefix(msg, "isSliceInBounds"): - return o.Annotations[Bounds] + return o.Annotations[settings.Bounds] } return false } diff --git a/gopls/internal/golang/highlight.go b/gopls/internal/golang/highlight.go new file mode 100644 index 00000000000..ea8a493041e --- /dev/null +++ b/gopls/internal/golang/highlight.go @@ -0,0 +1,528 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/event" +) + +func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Range, error) { + ctx, done := event.Start(ctx, "golang.Highlight") + defer done() + + // We always want fully parsed files for highlight, regardless + // of whether the file belongs to a workspace package. + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, fmt.Errorf("getting package for Highlight: %w", err) + } + + pos, err := pgf.PositionPos(position) + if err != nil { + return nil, err + } + path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) + if len(path) == 0 { + return nil, fmt.Errorf("no enclosing position found for %v:%v", position.Line, position.Character) + } + // If start == end for astutil.PathEnclosingInterval, the 1-char interval + // following start is used instead. As a result, we might not get an exact + // match so we should check the 1-char interval to the left of the passed + // in position to see if that is an exact match. + if _, ok := path[0].(*ast.Ident); !ok { + if p, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1); p != nil { + switch p[0].(type) { + case *ast.Ident, *ast.SelectorExpr: + path = p // use preceding ident/selector + } + } + } + result, err := highlightPath(path, pgf.File, pkg.TypesInfo()) + if err != nil { + return nil, err + } + var ranges []protocol.Range + for rng := range result { + rng, err := pgf.PosRange(rng.start, rng.end) + if err != nil { + return nil, err + } + ranges = append(ranges, rng) + } + return ranges, nil +} + +// highlightPath returns ranges to highlight for the given enclosing path, +// which should be the result of astutil.PathEnclosingInterval. +func highlightPath(path []ast.Node, file *ast.File, info *types.Info) (map[posRange]struct{}, error) { + result := make(map[posRange]struct{}) + switch node := path[0].(type) { + case *ast.BasicLit: + // Import path string literal? + if len(path) > 1 { + if imp, ok := path[1].(*ast.ImportSpec); ok { + highlight := func(n ast.Node) { + result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} + } + + // Highlight the import itself... + highlight(imp) + + // ...and all references to it in the file. + if pkgname, ok := typesutil.ImportedPkgName(info, imp); ok { + ast.Inspect(file, func(n ast.Node) bool { + if id, ok := n.(*ast.Ident); ok && + info.Uses[id] == pkgname { + highlight(id) + } + return true + }) + } + return result, nil + } + } + highlightFuncControlFlow(path, result) + case *ast.ReturnStmt, *ast.FuncDecl, *ast.FuncType: + highlightFuncControlFlow(path, result) + case *ast.Ident: + // Check if ident is inside return or func decl. + highlightFuncControlFlow(path, result) + highlightIdentifier(node, file, info, result) + case *ast.ForStmt, *ast.RangeStmt: + highlightLoopControlFlow(path, info, result) + case *ast.SwitchStmt, *ast.TypeSwitchStmt: + highlightSwitchFlow(path, info, result) + case *ast.BranchStmt: + // BREAK can exit a loop, switch or select, while CONTINUE exit a loop so + // these need to be handled separately. They can also be embedded in any + // other loop/switch/select if they have a label. TODO: add support for + // GOTO and FALLTHROUGH as well. + switch node.Tok { + case token.BREAK: + if node.Label != nil { + highlightLabeledFlow(path, info, node, result) + } else { + highlightUnlabeledBreakFlow(path, info, result) + } + case token.CONTINUE: + if node.Label != nil { + highlightLabeledFlow(path, info, node, result) + } else { + highlightLoopControlFlow(path, info, result) + } + } + default: + // If the cursor is in an unidentified area, return empty results. + return nil, nil + } + return result, nil +} + +type posRange struct { + start, end token.Pos +} + +// highlightFuncControlFlow adds highlight ranges to the result map to +// associate results and result parameters. +// +// Specifically, if the cursor is in a result or result parameter, all +// results and result parameters with the same index are highlighted. If the +// cursor is in a 'func' or 'return' keyword, the func keyword as well as all +// returns from that func are highlighted. +// +// As a special case, if the cursor is within a complicated expression, control +// flow highlighting is disabled, as it would highlight too much. +func highlightFuncControlFlow(path []ast.Node, result map[posRange]unit) { + + var ( + funcType *ast.FuncType // type of enclosing func, or nil + funcBody *ast.BlockStmt // body of enclosing func, or nil + returnStmt *ast.ReturnStmt // enclosing ReturnStmt within the func, or nil + ) + +findEnclosingFunc: + for i, n := range path { + switch n := n.(type) { + // TODO(rfindley, low priority): these pre-existing cases for KeyValueExpr + // and CallExpr appear to avoid highlighting when the cursor is in a + // complicated expression. However, the basis for this heuristic is + // unclear. Can we formalize a rationale? + case *ast.KeyValueExpr: + // If cursor is in a key: value expr, we don't want control flow highlighting. + return + + case *ast.CallExpr: + // If cursor is an arg in a callExpr, we don't want control flow highlighting. + if i > 0 { + for _, arg := range n.Args { + if arg == path[i-1] { + return + } + } + } + + case *ast.FuncLit: + funcType = n.Type + funcBody = n.Body + break findEnclosingFunc + + case *ast.FuncDecl: + funcType = n.Type + funcBody = n.Body + break findEnclosingFunc + + case *ast.ReturnStmt: + returnStmt = n + } + } + + if funcType == nil { + return // cursor is not in a function + } + + // Helper functions for inspecting the current location. + var ( + pos = path[0].Pos() + inSpan = func(start, end token.Pos) bool { return start <= pos && pos < end } + inNode = func(n ast.Node) bool { return inSpan(n.Pos(), n.End()) } + ) + + inResults := funcType.Results != nil && inNode(funcType.Results) + + // If the cursor is on a "return" or "func" keyword, but not highlighting any + // specific field or expression, we should highlight all of the exit points + // of the function, including the "return" and "func" keywords. + funcEnd := funcType.Func + token.Pos(len("func")) + highlightAll := path[0] == returnStmt || inSpan(funcType.Func, funcEnd) + var highlightIndexes map[int]bool + + if highlightAll { + // Add the "func" part of the func declaration. + result[posRange{ + start: funcType.Func, + end: funcEnd, + }] = unit{} + } else if returnStmt == nil && !inResults { + return // nothing to highlight + } else { + // If we're not highighting the entire return statement, we need to collect + // specific result indexes to highlight. This may be more than one index if + // the cursor is on a multi-name result field, but not in any specific name. + if !highlightAll { + highlightIndexes = make(map[int]bool) + if returnStmt != nil { + for i, n := range returnStmt.Results { + if inNode(n) { + highlightIndexes[i] = true + break + } + } + } + + if funcType.Results != nil { + // Scan fields, either adding highlights according to the highlightIndexes + // computed above, or accounting for the cursor position within the result + // list. + // (We do both at once to avoid repeating the cumbersome field traversal.) + i := 0 + findField: + for _, field := range funcType.Results.List { + for j, name := range field.Names { + if inNode(name) || highlightIndexes[i+j] { + result[posRange{name.Pos(), name.End()}] = unit{} + highlightIndexes[i+j] = true + break findField // found/highlighted the specific name + } + } + // If the cursor is in a field but not in a name (e.g. in the space, or + // the type), highlight the whole field. + // + // Note that this may not be ideal if we're at e.g. + // + // (x,‸y int, z int8) + // + // ...where it would make more sense to highlight only y. But we don't + // reach this function if not in a func, return, ident, or basiclit. + if inNode(field) || highlightIndexes[i] { + result[posRange{field.Pos(), field.End()}] = unit{} + highlightIndexes[i] = true + if inNode(field) { + for j := range field.Names { + highlightIndexes[i+j] = true + } + } + break findField // found/highlighted the field + } + + n := len(field.Names) + if n == 0 { + n = 1 + } + i += n + } + } + } + } + + if funcBody != nil { + ast.Inspect(funcBody, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.FuncDecl, *ast.FuncLit: + // Don't traverse into any functions other than enclosingFunc. + return false + case *ast.ReturnStmt: + if highlightAll { + // Add the entire return statement. + result[posRange{n.Pos(), n.End()}] = unit{} + } else { + // Add the highlighted indexes. + for i, expr := range n.Results { + if highlightIndexes[i] { + result[posRange{expr.Pos(), expr.End()}] = unit{} + } + } + } + return false + + } + return true + }) + } +} + +// highlightUnlabeledBreakFlow highlights the innermost enclosing for/range/switch or swlect +func highlightUnlabeledBreakFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { + // Reverse walk the path until we find closest loop, select, or switch. + for _, n := range path { + switch n.(type) { + case *ast.ForStmt, *ast.RangeStmt: + highlightLoopControlFlow(path, info, result) + return // only highlight the innermost statement + case *ast.SwitchStmt, *ast.TypeSwitchStmt: + highlightSwitchFlow(path, info, result) + return + case *ast.SelectStmt: + // TODO: add highlight when breaking a select. + return + } + } +} + +// highlightLabeledFlow highlights the enclosing labeled for, range, +// or switch statement denoted by a labeled break or continue stmt. +func highlightLabeledFlow(path []ast.Node, info *types.Info, stmt *ast.BranchStmt, result map[posRange]struct{}) { + use := info.Uses[stmt.Label] + if use == nil { + return + } + for _, n := range path { + if label, ok := n.(*ast.LabeledStmt); ok && info.Defs[label.Label] == use { + switch label.Stmt.(type) { + case *ast.ForStmt, *ast.RangeStmt: + highlightLoopControlFlow([]ast.Node{label.Stmt, label}, info, result) + case *ast.SwitchStmt, *ast.TypeSwitchStmt: + highlightSwitchFlow([]ast.Node{label.Stmt, label}, info, result) + } + return + } + } +} + +func labelFor(path []ast.Node) *ast.Ident { + if len(path) > 1 { + if n, ok := path[1].(*ast.LabeledStmt); ok { + return n.Label + } + } + return nil +} + +func highlightLoopControlFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { + var loop ast.Node + var loopLabel *ast.Ident + stmtLabel := labelFor(path) +Outer: + // Reverse walk the path till we get to the for loop. + for i := range path { + switch n := path[i].(type) { + case *ast.ForStmt, *ast.RangeStmt: + loopLabel = labelFor(path[i:]) + + if stmtLabel == nil || loopLabel == stmtLabel { + loop = n + break Outer + } + } + } + if loop == nil { + return + } + + // Add the for statement. + rng := posRange{ + start: loop.Pos(), + end: loop.Pos() + token.Pos(len("for")), + } + result[rng] = struct{}{} + + // Traverse AST to find branch statements within the same for-loop. + ast.Inspect(loop, func(n ast.Node) bool { + switch n.(type) { + case *ast.ForStmt, *ast.RangeStmt: + return loop == n + case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + return false + } + b, ok := n.(*ast.BranchStmt) + if !ok { + return true + } + if b.Label == nil || info.Uses[b.Label] == info.Defs[loopLabel] { + result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} + } + return true + }) + + // Find continue statements in the same loop or switches/selects. + ast.Inspect(loop, func(n ast.Node) bool { + switch n.(type) { + case *ast.ForStmt, *ast.RangeStmt: + return loop == n + } + + if n, ok := n.(*ast.BranchStmt); ok && n.Tok == token.CONTINUE { + result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} + } + return true + }) + + // We don't need to check other for loops if we aren't looking for labeled statements. + if loopLabel == nil { + return + } + + // Find labeled branch statements in any loop. + ast.Inspect(loop, func(n ast.Node) bool { + b, ok := n.(*ast.BranchStmt) + if !ok { + return true + } + // statement with labels that matches the loop + if b.Label != nil && info.Uses[b.Label] == info.Defs[loopLabel] { + result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} + } + return true + }) +} + +func highlightSwitchFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { + var switchNode ast.Node + var switchNodeLabel *ast.Ident + stmtLabel := labelFor(path) +Outer: + // Reverse walk the path till we get to the switch statement. + for i := range path { + switch n := path[i].(type) { + case *ast.SwitchStmt, *ast.TypeSwitchStmt: + switchNodeLabel = labelFor(path[i:]) + if stmtLabel == nil || switchNodeLabel == stmtLabel { + switchNode = n + break Outer + } + } + } + // Cursor is not in a switch statement + if switchNode == nil { + return + } + + // Add the switch statement. + rng := posRange{ + start: switchNode.Pos(), + end: switchNode.Pos() + token.Pos(len("switch")), + } + result[rng] = struct{}{} + + // Traverse AST to find break statements within the same switch. + ast.Inspect(switchNode, func(n ast.Node) bool { + switch n.(type) { + case *ast.SwitchStmt, *ast.TypeSwitchStmt: + return switchNode == n + case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt: + return false + } + + b, ok := n.(*ast.BranchStmt) + if !ok || b.Tok != token.BREAK { + return true + } + + if b.Label == nil || info.Uses[b.Label] == info.Defs[switchNodeLabel] { + result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} + } + return true + }) + + // We don't need to check other switches if we aren't looking for labeled statements. + if switchNodeLabel == nil { + return + } + + // Find labeled break statements in any switch + ast.Inspect(switchNode, func(n ast.Node) bool { + b, ok := n.(*ast.BranchStmt) + if !ok || b.Tok != token.BREAK { + return true + } + + if b.Label != nil && info.Uses[b.Label] == info.Defs[switchNodeLabel] { + result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} + } + + return true + }) +} + +func highlightIdentifier(id *ast.Ident, file *ast.File, info *types.Info, result map[posRange]struct{}) { + highlight := func(n ast.Node) { + result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} + } + + // obj may be nil if the Ident is undefined. + // In this case, the behavior expected by tests is + // to match other undefined Idents of the same name. + obj := info.ObjectOf(id) + + ast.Inspect(file, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.Ident: + if n.Name == id.Name && info.ObjectOf(n) == obj { + highlight(n) + } + + case *ast.ImportSpec: + pkgname, ok := typesutil.ImportedPkgName(info, n) + if ok && pkgname == obj { + if n.Name != nil { + highlight(n.Name) + } else { + highlight(n) + } + } + } + return true + }) +} diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go new file mode 100644 index 00000000000..296434b3270 --- /dev/null +++ b/gopls/internal/golang/hover.go @@ -0,0 +1,1411 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "go/ast" + "go/constant" + "go/doc" + "go/format" + "go/token" + "go/types" + "io/fs" + "path/filepath" + "sort" + "strconv" + "strings" + "text/tabwriter" + "time" + "unicode/utf8" + + "golang.org/x/text/unicode/runenames" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/tokeninternal" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" +) + +// hoverJSON contains the structured result of a hover query. It is +// formatted in one of several formats as determined by the HoverKind +// setting, one of which is JSON. +// +// We believe this is used only by govim. +// TODO(adonovan): see if we can wean all clients of this interface. +type hoverJSON struct { + // Synopsis is a single sentence synopsis of the symbol's documentation. + Synopsis string `json:"synopsis"` + + // FullDocumentation is the symbol's full documentation. + FullDocumentation string `json:"fullDocumentation"` + + // Signature is the symbol's signature. + Signature string `json:"signature"` + + // SingleLine is a single line describing the symbol. + // This is recommended only for use in clients that show a single line for hover. + SingleLine string `json:"singleLine"` + + // SymbolName is the human-readable name to use for the symbol in links. + SymbolName string `json:"symbolName"` + + // LinkPath is the pkg.go.dev link for the given symbol. + // For example, the "go/ast" part of "pkg.go.dev/go/ast#Node". + LinkPath string `json:"linkPath"` + + // LinkAnchor is the pkg.go.dev link anchor for the given symbol. + // For example, the "Node" part of "pkg.go.dev/go/ast#Node". + LinkAnchor string `json:"linkAnchor"` + + // New fields go below, and are unexported. The existing + // exported fields are underspecified and have already + // constrained our movements too much. A detailed JSON + // interface might be nice, but it needs a design and a + // precise specification. + + // typeDecl is the declaration syntax for a type, + // or "" for a non-type. + typeDecl string + + // methods is the list of descriptions of methods of a type, + // omitting any that are obvious from typeDecl. + // It is "" for a non-type. + methods string + + // promotedFields is the list of descriptions of accessible + // fields of a (struct) type that were promoted through an + // embedded field. + promotedFields string +} + +// Hover implements the "textDocument/hover" RPC for Go files. +func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { + ctx, done := event.Start(ctx, "golang.Hover") + defer done() + + rng, h, err := hover(ctx, snapshot, fh, position) + if err != nil { + return nil, err + } + if h == nil { + return nil, nil + } + hover, err := formatHover(h, snapshot.Options()) + if err != nil { + return nil, err + } + return &protocol.Hover{ + Contents: protocol.MarkupContent{ + Kind: snapshot.Options().PreferredContentFormat, + Value: hover, + }, + Range: rng, + }, nil +} + +// hover computes hover information at the given position. If we do not support +// hovering at the position, it returns _, nil, nil: an error is only returned +// if the position is valid but we fail to compute hover information. +func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) (protocol.Range, *hoverJSON, error) { + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return protocol.Range{}, nil, err + } + pos, err := pgf.PositionPos(pp) + if err != nil { + return protocol.Range{}, nil, err + } + + // Handle hovering over import paths, which do not have an associated + // identifier. + for _, spec := range pgf.File.Imports { + // We are inclusive of the end point here to allow hovering when the cursor + // is just after the import path. + if spec.Path.Pos() <= pos && pos <= spec.Path.End() { + return hoverImport(ctx, snapshot, pkg, pgf, spec) + } + } + + // Handle hovering over the package name, which does not have an associated + // object. + // As with import paths, we allow hovering just after the package name. + if pgf.File.Name != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.Pos() { + return hoverPackageName(pkg, pgf) + } + + // Handle hovering over (non-import-path) literals. + if path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos); len(path) > 0 { + if lit, _ := path[0].(*ast.BasicLit); lit != nil { + return hoverLit(pgf, lit, pos) + } + } + + // Handle hovering over embed directive argument. + pattern, embedRng := parseEmbedDirective(pgf.Mapper, pp) + if pattern != "" { + return hoverEmbed(fh, embedRng, pattern) + } + + // Handle linkname directive by overriding what to look for. + var linkedRange *protocol.Range // range referenced by linkname directive, or nil + if pkgPath, name, offset := parseLinkname(pgf.Mapper, pp); pkgPath != "" && name != "" { + // rng covering 2nd linkname argument: pkgPath.name. + rng, err := pgf.PosRange(pgf.Tok.Pos(offset), pgf.Tok.Pos(offset+len(pkgPath)+len(".")+len(name))) + if err != nil { + return protocol.Range{}, nil, fmt.Errorf("range over linkname arg: %w", err) + } + linkedRange = &rng + + pkg, pgf, pos, err = findLinkname(ctx, snapshot, PackagePath(pkgPath), name) + if err != nil { + return protocol.Range{}, nil, fmt.Errorf("find linkname: %w", err) + } + } + + // The general case: compute hover information for the object referenced by + // the identifier at pos. + ident, obj, selectedType := referencedObject(pkg, pgf, pos) + if obj == nil || ident == nil { + return protocol.Range{}, nil, nil // no object to hover + } + + // Unless otherwise specified, rng covers the ident being hovered. + var rng protocol.Range + if linkedRange != nil { + rng = *linkedRange + } else { + rng, err = pgf.NodeRange(ident) + if err != nil { + return protocol.Range{}, nil, err + } + } + + // By convention, we qualify hover information relative to the package + // from which the request originated. + qf := typesutil.FileQualifier(pgf.File, pkg.Types(), pkg.TypesInfo()) + + // Handle type switch identifiers as a special case, since they don't have an + // object. + // + // There's not much useful information to provide. + if selectedType != nil { + fakeObj := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), selectedType) + signature := types.ObjectString(fakeObj, qf) + return rng, &hoverJSON{ + Signature: signature, + SingleLine: signature, + SymbolName: fakeObj.Name(), + }, nil + } + + // Handle builtins, which don't have a package or position. + if !obj.Pos().IsValid() { + h, err := hoverBuiltin(ctx, snapshot, obj) + return rng, h, err + } + + // For all other objects, consider the full syntax of their declaration in + // order to correctly compute their documentation, signature, and link. + // + // Beware: decl{PGF,Pos} are not necessarily associated with pkg.FileSet(). + declPGF, declPos, err := parseFull(ctx, snapshot, pkg.FileSet(), obj.Pos()) + if err != nil { + return protocol.Range{}, nil, fmt.Errorf("re-parsing declaration of %s: %v", obj.Name(), err) + } + decl, spec, field := findDeclInfo([]*ast.File{declPGF.File}, declPos) // may be nil^3 + comment := chooseDocComment(decl, spec, field) + docText := comment.Text() + + // By default, types.ObjectString provides a reasonable signature. + signature := objectString(obj, qf, declPos, declPGF.Tok, spec) + singleLineSignature := signature + + // TODO(rfindley): we could do much better for inferred signatures. + // TODO(adonovan): fuse the two calls below. + if inferred := inferredSignature(pkg.TypesInfo(), ident); inferred != nil { + if s := inferredSignatureString(obj, qf, inferred); s != "" { + signature = s + } + } + + // Compute size information for types, + // and (size, offset) for struct fields. + // + // Also, if a struct type's field ordering is significantly + // wasteful of space, report its optimal size. + // + // This information is useful when debugging crashes or + // optimizing layout. To reduce distraction, we show it only + // when hovering over the declaring identifier, + // but not referring identifiers. + // + // Size and alignment vary across OS/ARCH. + // Gopls will select the appropriate build configuration when + // viewing a type declaration in a build-tagged file, but will + // use the default build config for all other types, even + // if they embed platform-variant types. + // + var sizeOffset string // optional size/offset description + if def, ok := pkg.TypesInfo().Defs[ident]; ok && ident.Pos() == def.Pos() { + // This is the declaring identifier. + // (We can't simply use ident.Pos() == obj.Pos() because + // referencedObject prefers the TypeName for an embedded field). + + // format returns the decimal and hex representation of x. + format := func(x int64) string { + if x < 10 { + return fmt.Sprintf("%d", x) + } + return fmt.Sprintf("%[1]d (%#[1]x)", x) + } + + path := pathEnclosingObjNode(pgf.File, pos) + + // Build string of form "size=... (X% wasted), offset=...". + size, wasted, offset := computeSizeOffsetInfo(pkg, path, obj) + var buf strings.Builder + if size >= 0 { + fmt.Fprintf(&buf, "size=%s", format(size)) + if wasted >= 20 { // >=20% wasted + fmt.Fprintf(&buf, " (%d%% wasted)", wasted) + } + } + if offset >= 0 { + if buf.Len() > 0 { + buf.WriteString(", ") + } + fmt.Fprintf(&buf, "offset=%s", format(offset)) + } + sizeOffset = buf.String() + } + + var typeDecl, methods, fields string + + // For "objects defined by a type spec", the signature produced by + // objectString is insufficient: + // (1) large structs are formatted poorly, with no newlines + // (2) we lose inline comments + // Furthermore, we include a summary of their method set. + _, isTypeName := obj.(*types.TypeName) + _, isTypeParam := aliases.Unalias(obj.Type()).(*types.TypeParam) + if isTypeName && !isTypeParam { + spec, ok := spec.(*ast.TypeSpec) + if !ok { + // We cannot find a TypeSpec for this type or alias declaration + // (that is not a type parameter or a built-in). + // This should be impossible even for ill-formed trees; + // we suspect that AST repair may be creating inconsistent + // positions. Don't report a bug in that case. (#64241) + errorf := fmt.Errorf + if !declPGF.Fixed() { + errorf = bug.Errorf + } + return protocol.Range{}, nil, errorf("type name %q without type spec", obj.Name()) + } + + // Format the type's declaration syntax. + { + // Don't duplicate comments. + spec2 := *spec + spec2.Doc = nil + spec2.Comment = nil + + var b strings.Builder + b.WriteString("type ") + fset := tokeninternal.FileSetFor(declPGF.Tok) + // TODO(adonovan): use a smarter formatter that omits + // inaccessible fields (non-exported ones from other packages). + if err := format.Node(&b, fset, &spec2); err != nil { + return protocol.Range{}, nil, err + } + typeDecl = b.String() + + // Splice in size/offset at end of first line. + // "type T struct { // size=..." + if sizeOffset != "" { + nl := strings.IndexByte(typeDecl, '\n') + if nl < 0 { + nl = len(typeDecl) + } + typeDecl = typeDecl[:nl] + " // " + sizeOffset + typeDecl[nl:] + } + } + + // Promoted fields + // + // Show a table of accessible fields of the (struct) + // type that may not be visible in the syntax (above) + // due to promotion through embedded fields. + // + // Example: + // + // // Embedded fields: + // foo int // through x.y + // z string // through x.y + if prom := promotedFields(obj.Type(), pkg.Types()); len(prom) > 0 { + var b strings.Builder + b.WriteString("// Embedded fields:\n") + w := tabwriter.NewWriter(&b, 0, 8, 1, ' ', 0) + for _, f := range prom { + fmt.Fprintf(w, "%s\t%s\t// through %s\t\n", + f.field.Name(), + types.TypeString(f.field.Type(), qf), + f.path) + } + w.Flush() + b.WriteByte('\n') + fields = b.String() + } + + // -- methods -- + + // For an interface type, explicit methods will have + // already been displayed when the node was formatted + // above. Don't list these again. + var skip map[string]bool + if iface, ok := spec.Type.(*ast.InterfaceType); ok { + if iface.Methods.List != nil { + for _, m := range iface.Methods.List { + if len(m.Names) == 1 { + if skip == nil { + skip = make(map[string]bool) + } + skip[m.Names[0].Name] = true + } + } + } + } + + // Display all the type's accessible methods, + // including those that require a pointer receiver, + // and those promoted from embedded struct fields or + // embedded interfaces. + var b strings.Builder + for _, m := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { + if !accessibleTo(m.Obj(), pkg.Types()) { + continue // inaccessible + } + if skip[m.Obj().Name()] { + continue // redundant with format.Node above + } + if b.Len() > 0 { + b.WriteByte('\n') + } + + // Use objectString for its prettier rendering of method receivers. + b.WriteString(objectString(m.Obj(), qf, token.NoPos, nil, nil)) + } + methods = b.String() + + signature = typeDecl + "\n" + methods + } else { + // Non-types + if sizeOffset != "" { + signature += " // " + sizeOffset + } + } + + // Compute link data (on pkg.go.dev or other documentation host). + // + // If linkPath is empty, the symbol is not linkable. + var ( + linkName string // => link title, always non-empty + linkPath string // => link path + anchor string // link anchor + linkMeta *metadata.Package // metadata for the linked package + ) + { + linkMeta = findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI) + if linkMeta == nil { + return protocol.Range{}, nil, bug.Errorf("no package data for %s", declPGF.URI) + } + + // For package names, we simply link to their imported package. + if pkgName, ok := obj.(*types.PkgName); ok { + linkName = pkgName.Name() + linkPath = pkgName.Imported().Path() + impID := linkMeta.DepsByPkgPath[PackagePath(pkgName.Imported().Path())] + linkMeta = snapshot.Metadata(impID) + if linkMeta == nil { + // Broken imports have fake package paths, so it is not a bug if we + // don't have metadata. As of writing, there is no way to distinguish + // broken imports from a true bug where expected metadata is missing. + return protocol.Range{}, nil, fmt.Errorf("no package data for %s", declPGF.URI) + } + } else { + // For all others, check whether the object is in the package scope, or + // an exported field or method of an object in the package scope. + // + // We try to match pkgsite's heuristics for what is linkable, and what is + // not. + var recv types.Object + switch obj := obj.(type) { + case *types.Func: + sig := obj.Type().(*types.Signature) + if sig.Recv() != nil { + tname := typeToObject(sig.Recv().Type()) + if tname != nil { // beware typed nil + recv = tname + } + } + case *types.Var: + if obj.IsField() { + if spec, ok := spec.(*ast.TypeSpec); ok { + typeName := spec.Name + scopeObj, _ := obj.Pkg().Scope().Lookup(typeName.Name).(*types.TypeName) + if scopeObj != nil { + if st, _ := scopeObj.Type().Underlying().(*types.Struct); st != nil { + for i := 0; i < st.NumFields(); i++ { + if obj == st.Field(i) { + recv = scopeObj + } + } + } + } + } + } + } + + // Even if the object is not available in package documentation, it may + // be embedded in a documented receiver. Detect this by searching + // enclosing selector expressions. + // + // TODO(rfindley): pkgsite doesn't document fields from embedding, just + // methods. + if recv == nil || !recv.Exported() { + path := pathEnclosingObjNode(pgf.File, pos) + if enclosing := searchForEnclosing(pkg.TypesInfo(), path); enclosing != nil { + recv = enclosing + } else { + recv = nil // note: just recv = ... could result in a typed nil. + } + } + + pkg := obj.Pkg() + if recv != nil { + linkName = fmt.Sprintf("(%s.%s).%s", pkg.Name(), recv.Name(), obj.Name()) + if obj.Exported() && recv.Exported() && pkg.Scope().Lookup(recv.Name()) == recv { + linkPath = pkg.Path() + anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name()) + } + } else { + linkName = fmt.Sprintf("%s.%s", pkg.Name(), obj.Name()) + if obj.Exported() && pkg.Scope().Lookup(obj.Name()) == obj { + linkPath = pkg.Path() + anchor = obj.Name() + } + } + } + } + + if snapshot.IsGoPrivatePath(linkPath) || linkMeta.ForTest != "" { + linkPath = "" + } else if linkMeta.Module != nil && linkMeta.Module.Version != "" { + mod := linkMeta.Module + linkPath = strings.Replace(linkPath, mod.Path, mod.Path+"@"+mod.Version, 1) + } + + return rng, &hoverJSON{ + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, + SingleLine: singleLineSignature, + SymbolName: linkName, + Signature: signature, + LinkPath: linkPath, + LinkAnchor: anchor, + typeDecl: typeDecl, + methods: methods, + promotedFields: fields, + }, nil +} + +// hoverBuiltin computes hover information when hovering over a builtin +// identifier. +func hoverBuiltin(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) (*hoverJSON, error) { + // Special handling for error.Error, which is the only builtin method. + // + // TODO(rfindley): can this be unified with the handling below? + if obj.Name() == "Error" { + signature := obj.String() + return &hoverJSON{ + Signature: signature, + SingleLine: signature, + // TODO(rfindley): these are better than the current behavior. + // SymbolName: "(error).Error", + // LinkPath: "builtin", + // LinkAnchor: "error.Error", + }, nil + } + + pgf, node, err := builtinDecl(ctx, snapshot, obj) + if err != nil { + return nil, err + } + + var comment *ast.CommentGroup + path, _ := astutil.PathEnclosingInterval(pgf.File, node.Pos(), node.End()) + for _, n := range path { + switch n := n.(type) { + case *ast.GenDecl: + // Separate documentation and signature. + comment = n.Doc + node2 := *n + node2.Doc = nil + node = &node2 + case *ast.FuncDecl: + // Ditto. + comment = n.Doc + node2 := *n + node2.Doc = nil + node = &node2 + } + } + + signature := FormatNodeFile(pgf.Tok, node) + // Replace fake types with their common equivalent. + // TODO(rfindley): we should instead use obj.Type(), which would have the + // *actual* types of the builtin call. + signature = replacer.Replace(signature) + + docText := comment.Text() + return &hoverJSON{ + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, + Signature: signature, + SingleLine: obj.String(), + SymbolName: obj.Name(), + LinkPath: "builtin", + LinkAnchor: obj.Name(), + }, nil +} + +// hoverImport computes hover information when hovering over the import path of +// imp in the file pgf of pkg. +// +// If we do not have metadata for the hovered import, it returns _ +func hoverImport(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, imp *ast.ImportSpec) (protocol.Range, *hoverJSON, error) { + rng, err := pgf.NodeRange(imp.Path) + if err != nil { + return protocol.Range{}, nil, err + } + + importPath := metadata.UnquoteImportPath(imp) + if importPath == "" { + return protocol.Range{}, nil, fmt.Errorf("invalid import path") + } + impID := pkg.Metadata().DepsByImpPath[importPath] + if impID == "" { + return protocol.Range{}, nil, fmt.Errorf("no package data for import %q", importPath) + } + impMetadata := snapshot.Metadata(impID) + if impMetadata == nil { + return protocol.Range{}, nil, bug.Errorf("failed to resolve import ID %q", impID) + } + + // Find the first file with a package doc comment. + var comment *ast.CommentGroup + for _, f := range impMetadata.CompiledGoFiles { + fh, err := snapshot.ReadFile(ctx, f) + if err != nil { + if ctx.Err() != nil { + return protocol.Range{}, nil, ctx.Err() + } + continue + } + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + if ctx.Err() != nil { + return protocol.Range{}, nil, ctx.Err() + } + continue + } + if pgf.File.Doc != nil { + comment = pgf.File.Doc + break + } + } + + docText := comment.Text() + return rng, &hoverJSON{ + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, + }, nil +} + +// hoverPackageName computes hover information for the package name of the file +// pgf in pkg. +func hoverPackageName(pkg *cache.Package, pgf *parsego.File) (protocol.Range, *hoverJSON, error) { + var comment *ast.CommentGroup + for _, pgf := range pkg.CompiledGoFiles() { + if pgf.File.Doc != nil { + comment = pgf.File.Doc + break + } + } + rng, err := pgf.NodeRange(pgf.File.Name) + if err != nil { + return protocol.Range{}, nil, err + } + docText := comment.Text() + return rng, &hoverJSON{ + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, + // Note: including a signature is redundant, since the cursor is already on the + // package name. + }, nil +} + +// hoverLit computes hover information when hovering over the basic literal lit +// in the file pgf. The provided pos must be the exact position of the cursor, +// as it is used to extract the hovered rune in strings. +// +// For example, hovering over "\u2211" in "foo \u2211 bar" yields: +// +// '∑', U+2211, N-ARY SUMMATION +func hoverLit(pgf *parsego.File, lit *ast.BasicLit, pos token.Pos) (protocol.Range, *hoverJSON, error) { + var ( + value string // if non-empty, a constant value to format in hover + r rune // if non-zero, format a description of this rune in hover + start, end token.Pos // hover span + ) + // Extract a rune from the current position. + // 'Ω', "...Ω...", or 0x03A9 => 'Ω', U+03A9, GREEK CAPITAL LETTER OMEGA + switch lit.Kind { + case token.CHAR: + s, err := strconv.Unquote(lit.Value) + if err != nil { + // If the conversion fails, it's because of an invalid syntax, therefore + // there is no rune to be found. + return protocol.Range{}, nil, nil + } + r, _ = utf8.DecodeRuneInString(s) + if r == utf8.RuneError { + return protocol.Range{}, nil, fmt.Errorf("rune error") + } + start, end = lit.Pos(), lit.End() + + case token.INT: + // Short literals (e.g. 99 decimal, 07 octal) are uninteresting. + if len(lit.Value) < 3 { + return protocol.Range{}, nil, nil + } + + v := constant.MakeFromLiteral(lit.Value, lit.Kind, 0) + if v.Kind() != constant.Int { + return protocol.Range{}, nil, nil + } + + switch lit.Value[:2] { + case "0x", "0X": + // As a special case, try to recognize hexadecimal literals as runes if + // they are within the range of valid unicode values. + if v, ok := constant.Int64Val(v); ok && v > 0 && v <= utf8.MaxRune && utf8.ValidRune(rune(v)) { + r = rune(v) + } + fallthrough + case "0o", "0O", "0b", "0B": + // Format the decimal value of non-decimal literals. + value = v.ExactString() + start, end = lit.Pos(), lit.End() + default: + return protocol.Range{}, nil, nil + } + + case token.STRING: + // It's a string, scan only if it contains a unicode escape sequence under or before the + // current cursor position. + litOffset, err := safetoken.Offset(pgf.Tok, lit.Pos()) + if err != nil { + return protocol.Range{}, nil, err + } + offset, err := safetoken.Offset(pgf.Tok, pos) + if err != nil { + return protocol.Range{}, nil, err + } + for i := offset - litOffset; i > 0; i-- { + // Start at the cursor position and search backward for the beginning of a rune escape sequence. + rr, _ := utf8.DecodeRuneInString(lit.Value[i:]) + if rr == utf8.RuneError { + return protocol.Range{}, nil, fmt.Errorf("rune error") + } + if rr == '\\' { + // Got the beginning, decode it. + var tail string + r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"') + if err != nil { + // If the conversion fails, it's because of an invalid syntax, + // therefore is no rune to be found. + return protocol.Range{}, nil, nil + } + // Only the rune escape sequence part of the string has to be highlighted, recompute the range. + runeLen := len(lit.Value) - (i + len(tail)) + start = token.Pos(int(lit.Pos()) + i) + end = token.Pos(int(start) + runeLen) + break + } + } + } + + if value == "" && r == 0 { // nothing to format + return protocol.Range{}, nil, nil + } + + rng, err := pgf.PosRange(start, end) + if err != nil { + return protocol.Range{}, nil, err + } + + var b strings.Builder + if value != "" { + b.WriteString(value) + } + if r != 0 { + runeName := runenames.Name(r) + if len(runeName) > 0 && runeName[0] == '<' { + // Check if the rune looks like an HTML tag. If so, trim the surrounding <> + // characters to work around https://github.com/microsoft/vscode/issues/124042. + runeName = strings.TrimRight(runeName[1:], ">") + } + if b.Len() > 0 { + b.WriteString(", ") + } + if strconv.IsPrint(r) { + fmt.Fprintf(&b, "'%c', ", r) + } + fmt.Fprintf(&b, "U+%04X, %s", r, runeName) + } + hover := b.String() + return rng, &hoverJSON{ + Synopsis: hover, + FullDocumentation: hover, + }, nil +} + +// hoverEmbed computes hover information for a filepath.Match pattern. +// Assumes that the pattern is relative to the location of fh. +func hoverEmbed(fh file.Handle, rng protocol.Range, pattern string) (protocol.Range, *hoverJSON, error) { + s := &strings.Builder{} + + dir := filepath.Dir(fh.URI().Path()) + var matches []string + err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { + if e != nil { + return e + } + rel, err := filepath.Rel(dir, abs) + if err != nil { + return err + } + ok, err := filepath.Match(pattern, rel) + if err != nil { + return err + } + if ok && !d.IsDir() { + matches = append(matches, rel) + } + return nil + }) + if err != nil { + return protocol.Range{}, nil, err + } + + for _, m := range matches { + // TODO: Renders each file as separate markdown paragraphs. + // If forcing (a single) newline is possible it might be more clear. + fmt.Fprintf(s, "%s\n\n", m) + } + + json := &hoverJSON{ + Signature: fmt.Sprintf("Embedding %q", pattern), + Synopsis: s.String(), + FullDocumentation: s.String(), + } + return rng, json, nil +} + +// inferredSignatureString is a wrapper around the types.ObjectString function +// that adds more information to inferred signatures. It will return an empty string +// if the passed types.Object is not a signature. +func inferredSignatureString(obj types.Object, qf types.Qualifier, inferred *types.Signature) string { + // If the signature type was inferred, prefer the inferred signature with a + // comment showing the generic signature. + if sig, _ := obj.Type().Underlying().(*types.Signature); sig != nil && sig.TypeParams().Len() > 0 && inferred != nil { + obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred) + str := types.ObjectString(obj2, qf) + // Try to avoid overly long lines. + if len(str) > 60 { + str += "\n" + } else { + str += " " + } + str += "// " + types.TypeString(sig, qf) + return str + } + return "" +} + +// objectString is a wrapper around the types.ObjectString function. +// It handles adding more information to the object string. +// If spec is non-nil, it may be used to format additional declaration +// syntax, and file must be the token.File describing its positions. +// +// Precondition: obj is not a built-in function or method. +func objectString(obj types.Object, qf types.Qualifier, declPos token.Pos, file *token.File, spec ast.Spec) string { + str := types.ObjectString(obj, qf) + + switch obj := obj.(type) { + case *types.Func: + // We fork ObjectString to improve its rendering of methods: + // specifically, we show the receiver name, + // and replace the period in (T).f by a space (#62190). + + sig := obj.Type().(*types.Signature) + + var buf bytes.Buffer + buf.WriteString("func ") + if recv := sig.Recv(); recv != nil { + buf.WriteByte('(') + if _, ok := recv.Type().(*types.Interface); ok { + // gcimporter creates abstract methods of + // named interfaces using the interface type + // (not the named type) as the receiver. + // Don't print it in full. + buf.WriteString("interface") + } else { + // Show receiver name (go/types does not). + name := recv.Name() + if name != "" && name != "_" { + buf.WriteString(name) + buf.WriteString(" ") + } + types.WriteType(&buf, recv.Type(), qf) + } + buf.WriteByte(')') + buf.WriteByte(' ') // space (go/types uses a period) + } else if s := qf(obj.Pkg()); s != "" { + buf.WriteString(s) + buf.WriteString(".") + } + buf.WriteString(obj.Name()) + types.WriteSignature(&buf, sig, qf) + str = buf.String() + + case *types.Const: + // Show value of a constant. + var ( + declaration = obj.Val().String() // default formatted declaration + comment = "" // if non-empty, a clarifying comment + ) + + // Try to use the original declaration. + switch obj.Val().Kind() { + case constant.String: + // Usually the original declaration of a string doesn't carry much information. + // Also strings can be very long. So, just use the constant's value. + + default: + if spec, _ := spec.(*ast.ValueSpec); spec != nil { + for i, name := range spec.Names { + if declPos == name.Pos() { + if i < len(spec.Values) { + originalDeclaration := FormatNodeFile(file, spec.Values[i]) + if originalDeclaration != declaration { + comment = declaration + declaration = originalDeclaration + } + } + break + } + } + } + } + + // Special formatting cases. + switch typ := aliases.Unalias(obj.Type()).(type) { + case *types.Named: + // Try to add a formatted duration as an inline comment. + pkg := typ.Obj().Pkg() + if pkg.Path() == "time" && typ.Obj().Name() == "Duration" && obj.Val().Kind() == constant.Int { + if d, ok := constant.Int64Val(obj.Val()); ok { + comment = time.Duration(d).String() + } + } + } + if comment == declaration { + comment = "" + } + + str += " = " + declaration + if comment != "" { + str += " // " + comment + } + } + return str +} + +// HoverDocForObject returns the best doc comment for obj (for which +// fset provides file/line information). +// +// TODO(rfindley): there appears to be zero(!) tests for this functionality. +func HoverDocForObject(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, obj types.Object) (*ast.CommentGroup, error) { + if is[*types.TypeName](obj) && is[*types.TypeParam](obj.Type()) { + return nil, nil + } + + pgf, pos, err := parseFull(ctx, snapshot, fset, obj.Pos()) + if err != nil { + return nil, fmt.Errorf("re-parsing: %v", err) + } + + decl, spec, field := findDeclInfo([]*ast.File{pgf.File}, pos) + return chooseDocComment(decl, spec, field), nil +} + +func chooseDocComment(decl ast.Decl, spec ast.Spec, field *ast.Field) *ast.CommentGroup { + if field != nil { + if field.Doc != nil { + return field.Doc + } + if field.Comment != nil { + return field.Comment + } + return nil + } + switch decl := decl.(type) { + case *ast.FuncDecl: + return decl.Doc + case *ast.GenDecl: + switch spec := spec.(type) { + case *ast.ValueSpec: + if spec.Doc != nil { + return spec.Doc + } + if decl.Doc != nil { + return decl.Doc + } + return spec.Comment + case *ast.TypeSpec: + if spec.Doc != nil { + return spec.Doc + } + if decl.Doc != nil { + return decl.Doc + } + return spec.Comment + } + } + return nil +} + +// parseFull fully parses the file corresponding to position pos (for +// which fset provides file/line information). +// +// It returns the resulting parsego.File as well as new pos contained +// in the parsed file. +// +// BEWARE: the provided FileSet is used only to interpret the provided +// pos; the resulting File and Pos may belong to the same or a +// different FileSet, such as one synthesized by the parser cache, if +// parse-caching is enabled. +func parseFull(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, pos token.Pos) (*parsego.File, token.Pos, error) { + f := fset.File(pos) + if f == nil { + return nil, 0, bug.Errorf("internal error: no file for position %d", pos) + } + + uri := protocol.URIFromPath(f.Name()) + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, 0, err + } + + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, 0, err + } + + offset, err := safetoken.Offset(f, pos) + if err != nil { + return nil, 0, bug.Errorf("offset out of bounds in %q", uri) + } + + fullPos, err := safetoken.Pos(pgf.Tok, offset) + if err != nil { + return nil, 0, err + } + + return pgf, fullPos, nil +} + +func formatHover(h *hoverJSON, options *settings.Options) (string, error) { + maybeMarkdown := func(s string) string { + if s != "" && options.PreferredContentFormat == protocol.Markdown { + s = fmt.Sprintf("```go\n%s\n```", strings.Trim(s, "\n")) + } + return s + } + + switch options.HoverKind { + case settings.SingleLine: + return h.SingleLine, nil + + case settings.NoDocumentation: + return maybeMarkdown(h.Signature), nil + + case settings.Structured: + b, err := json.Marshal(h) + if err != nil { + return "", err + } + return string(b), nil + + case settings.SynopsisDocumentation, + settings.FullDocumentation: + // For types, we display TypeDecl and Methods, + // but not Signature, which is redundant (= TypeDecl + "\n" + Methods). + // For all other symbols, we display Signature; + // TypeDecl and Methods are empty. + // (This awkwardness is to preserve JSON compatibility.) + parts := []string{ + maybeMarkdown(h.Signature), + maybeMarkdown(h.typeDecl), + formatDoc(h, options), + maybeMarkdown(h.promotedFields), + maybeMarkdown(h.methods), + formatLink(h, options), + } + if h.typeDecl != "" { + parts[0] = "" // type: suppress redundant Signature + } + parts = slices.Remove(parts, "") + + var b strings.Builder + for i, part := range parts { + if i > 0 { + if options.PreferredContentFormat == protocol.Markdown { + b.WriteString("\n\n") + } else { + b.WriteByte('\n') + } + } + b.WriteString(part) + } + return b.String(), nil + + default: + return "", fmt.Errorf("invalid HoverKind: %v", options.HoverKind) + } +} + +func formatLink(h *hoverJSON, options *settings.Options) string { + if !options.LinksInHover || options.LinkTarget == "" || h.LinkPath == "" { + return "" + } + plainLink := cache.BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor) + switch options.PreferredContentFormat { + case protocol.Markdown: + return fmt.Sprintf("[`%s` on %s](%s)", h.SymbolName, options.LinkTarget, plainLink) + case protocol.PlainText: + return "" + default: + return plainLink + } +} + +func formatDoc(h *hoverJSON, options *settings.Options) string { + var doc string + switch options.HoverKind { + case settings.SynopsisDocumentation: + doc = h.Synopsis + case settings.FullDocumentation: + doc = h.FullDocumentation + } + if options.PreferredContentFormat == protocol.Markdown { + return CommentToMarkdown(doc, options) + } + return doc +} + +// findDeclInfo returns the syntax nodes involved in the declaration of the +// types.Object with position pos, searching the given list of file syntax +// trees. +// +// Pos may be the position of the name-defining identifier in a FuncDecl, +// ValueSpec, TypeSpec, Field, or as a special case the position of +// Ellipsis.Elt in an ellipsis field. +// +// If found, the resulting decl, spec, and field will be the inner-most +// instance of each node type surrounding pos. +// +// If field is non-nil, pos is the position of a field Var. If field is nil and +// spec is non-nil, pos is the position of a Var, Const, or TypeName object. If +// both field and spec are nil and decl is non-nil, pos is the position of a +// Func object. +// +// It returns a nil decl if no object-defining node is found at pos. +// +// TODO(rfindley): this function has tricky semantics, and may be worth unit +// testing and/or refactoring. +func findDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spec, field *ast.Field) { + found := false + + // Visit the files in search of the node at pos. + stack := make([]ast.Node, 0, 20) + + // Allocate the closure once, outside the loop. + f := func(n ast.Node) bool { + if found { + return false + } + if n != nil { + stack = append(stack, n) // push + } else { + stack = stack[:len(stack)-1] // pop + return false + } + + // Skip subtrees (incl. files) that don't contain the search point. + if !(n.Pos() <= pos && pos < n.End()) { + return false + } + + switch n := n.(type) { + case *ast.Field: + findEnclosingDeclAndSpec := func() { + for i := len(stack) - 1; i >= 0; i-- { + switch n := stack[i].(type) { + case ast.Spec: + spec = n + case ast.Decl: + decl = n + return + } + } + } + + // Check each field name since you can have + // multiple names for the same type expression. + for _, id := range n.Names { + if id.Pos() == pos { + field = n + findEnclosingDeclAndSpec() + found = true + return false + } + } + + // Check *ast.Field itself. This handles embedded + // fields which have no associated *ast.Ident name. + if n.Pos() == pos { + field = n + findEnclosingDeclAndSpec() + found = true + return false + } + + // Also check "X" in "...X". This makes it easy to format variadic + // signature params properly. + // + // TODO(rfindley): I don't understand this comment. How does finding the + // field in this case make it easier to format variadic signature params? + if ell, ok := n.Type.(*ast.Ellipsis); ok && ell.Elt != nil && ell.Elt.Pos() == pos { + field = n + findEnclosingDeclAndSpec() + found = true + return false + } + + case *ast.FuncDecl: + if n.Name.Pos() == pos { + decl = n + found = true + return false + } + + case *ast.GenDecl: + for _, s := range n.Specs { + switch s := s.(type) { + case *ast.TypeSpec: + if s.Name.Pos() == pos { + decl = n + spec = s + found = true + return false + } + case *ast.ValueSpec: + for _, id := range s.Names { + if id.Pos() == pos { + decl = n + spec = s + found = true + return false + } + } + } + } + } + return true + } + for _, file := range files { + ast.Inspect(file, f) + if found { + return decl, spec, field + } + } + + return nil, nil, nil +} + +type promotedField struct { + path string // path (e.g. "x.y" through embedded fields) + field *types.Var +} + +// promotedFields returns the list of accessible promoted fields of a struct type t. +// (Logic plundered from x/tools/cmd/guru/describe.go.) +func promotedFields(t types.Type, from *types.Package) []promotedField { + wantField := func(f *types.Var) bool { + if !accessibleTo(f, from) { + return false + } + // Check that the field is not shadowed. + obj, _, _ := types.LookupFieldOrMethod(t, true, f.Pkg(), f.Name()) + return obj == f + } + + var fields []promotedField + var visit func(t types.Type, stack []*types.Named) + visit = func(t types.Type, stack []*types.Named) { + tStruct, ok := typesinternal.Unpointer(t).Underlying().(*types.Struct) + if !ok { + return + } + fieldloop: + for i := 0; i < tStruct.NumFields(); i++ { + f := tStruct.Field(i) + + // Handle recursion through anonymous fields. + if f.Anonymous() { + if _, named := typesinternal.ReceiverNamed(f); named != nil { + // If we've already visited this named type + // on this path, break the cycle. + for _, x := range stack { + if x.Origin() == named.Origin() { + continue fieldloop + } + } + visit(f.Type(), append(stack, named)) + } + } + + // Save accessible promoted fields. + if len(stack) > 0 && wantField(f) { + var path strings.Builder + for i, t := range stack { + if i > 0 { + path.WriteByte('.') + } + path.WriteString(t.Obj().Name()) + } + fields = append(fields, promotedField{ + path: path.String(), + field: f, + }) + } + } + } + visit(t, nil) + + return fields +} + +func accessibleTo(obj types.Object, pkg *types.Package) bool { + return obj.Exported() || obj.Pkg() == pkg +} + +// computeSizeOffsetInfo reports the size of obj (if a type or struct +// field), its wasted space percentage (if a struct type), and its +// offset (if a struct field). It returns -1 for undefined components. +func computeSizeOffsetInfo(pkg *cache.Package, path []ast.Node, obj types.Object) (size, wasted, offset int64) { + size, wasted, offset = -1, -1, -1 + + var free typeparams.Free + sizes := pkg.TypesSizes() + + // size (types and fields) + if v, ok := obj.(*types.Var); ok && v.IsField() || is[*types.TypeName](obj) { + // If the field's type has free type parameters, + // its size cannot be computed. + if !free.Has(obj.Type()) { + size = sizes.Sizeof(obj.Type()) + } + + // wasted space (struct types) + if tStruct, ok := obj.Type().Underlying().(*types.Struct); ok && is[*types.TypeName](obj) && size > 0 { + var fields []*types.Var + for i := 0; i < tStruct.NumFields(); i++ { + fields = append(fields, tStruct.Field(i)) + } + if len(fields) > 0 { + // Sort into descending (most compact) order + // and recompute size of entire struct. + sort.Slice(fields, func(i, j int) bool { + return sizes.Sizeof(fields[i].Type()) > + sizes.Sizeof(fields[j].Type()) + }) + offsets := sizes.Offsetsof(fields) + compactSize := offsets[len(offsets)-1] + sizes.Sizeof(fields[len(fields)-1].Type()) + wasted = 100 * (size - compactSize) / size + } + } + } + + // offset (fields) + if v, ok := obj.(*types.Var); ok && v.IsField() { + // Find enclosing struct type. + var tStruct *types.Struct + for _, n := range path { + if n, ok := n.(*ast.StructType); ok { + tStruct = pkg.TypesInfo().TypeOf(n).(*types.Struct) + break + } + } + if tStruct != nil { + var fields []*types.Var + for i := 0; i < tStruct.NumFields(); i++ { + f := tStruct.Field(i) + // If any preceding field's type has free type parameters, + // its offset cannot be computed. + if free.Has(f.Type()) { + break + } + fields = append(fields, f) + if f == v { + offsets := sizes.Offsetsof(fields) + offset = offsets[len(offsets)-1] + break + } + } + } + } + + return +} diff --git a/gopls/internal/lsp/source/identifier.go b/gopls/internal/golang/identifier.go similarity index 85% rename from gopls/internal/lsp/source/identifier.go rename to gopls/internal/golang/identifier.go index 57001af930b..30a83d3a05a 100644 --- a/gopls/internal/lsp/source/identifier.go +++ b/gopls/internal/golang/identifier.go @@ -2,14 +2,15 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "errors" "go/ast" "go/types" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typesinternal" ) // ErrNoIdentFound is error returned when no identifier is found at a particular position @@ -20,29 +21,35 @@ var ErrNoIdentFound = errors.New("no identifier found") // // If no such signature exists, it returns nil. func inferredSignature(info *types.Info, id *ast.Ident) *types.Signature { - inst := typeparams.GetInstances(info)[id] - sig, _ := inst.Type.(*types.Signature) + inst := info.Instances[id] + sig, _ := aliases.Unalias(inst.Type).(*types.Signature) return sig } +// searchForEnclosing returns, given the AST path to a SelectorExpr, +// the exported named type of the innermost implicit field selection. +// +// For example, given "new(A).d" where this is (due to embedding) a +// shorthand for "new(A).b.c.d", it returns the named type of c, +// if it is exported, otherwise the type of b, or A. func searchForEnclosing(info *types.Info, path []ast.Node) *types.TypeName { for _, n := range path { switch n := n.(type) { case *ast.SelectorExpr: if sel, ok := info.Selections[n]; ok { - recv := Deref(sel.Recv()) + recv := typesinternal.Unpointer(sel.Recv()) // Keep track of the last exported type seen. var exported *types.TypeName - if named, ok := recv.(*types.Named); ok && named.Obj().Exported() { + if named, ok := aliases.Unalias(recv).(*types.Named); ok && named.Obj().Exported() { exported = named.Obj() } // We don't want the last element, as that's the field or // method itself. for _, index := range sel.Index()[:len(sel.Index())-1] { if r, ok := recv.Underlying().(*types.Struct); ok { - recv = Deref(r.Field(index).Type()) - if named, ok := recv.(*types.Named); ok && named.Obj().Exported() { + recv = typesinternal.Unpointer(r.Field(index).Type()) + if named, ok := aliases.Unalias(recv).(*types.Named); ok && named.Obj().Exported() { exported = named.Obj() } } @@ -59,6 +66,8 @@ func searchForEnclosing(info *types.Info, path []ast.Node) *types.TypeName { // a single non-error result, and ignoring built-in named types. func typeToObject(typ types.Type) *types.TypeName { switch typ := typ.(type) { + case *aliases.Alias: + return typ.Obj() case *types.Named: // TODO(rfindley): this should use typeparams.NamedTypeOrigin. return typ.Obj() diff --git a/gopls/internal/lsp/source/identifier_test.go b/gopls/internal/golang/identifier_test.go similarity index 95% rename from gopls/internal/lsp/source/identifier_test.go rename to gopls/internal/golang/identifier_test.go index 7756fe4025b..b1e6d5a75a2 100644 --- a/gopls/internal/lsp/source/identifier_test.go +++ b/gopls/internal/golang/identifier_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "bytes" @@ -11,6 +11,8 @@ import ( "go/token" "go/types" "testing" + + "golang.org/x/tools/internal/versions" ) func TestSearchForEnclosing(t *testing.T) { @@ -92,7 +94,7 @@ func posAt(line, column int, fset *token.FileSet, fname string) token.Pos { // newInfo returns a types.Info with all maps populated. func newInfo() *types.Info { - return &types.Info{ + info := &types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), @@ -100,4 +102,6 @@ func newInfo() *types.Info { Selections: make(map[*ast.SelectorExpr]*types.Selection), Scopes: make(map[ast.Node]*types.Scope), } + versions.InitFileVersions(info) + return info } diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go new file mode 100644 index 00000000000..df2f4705130 --- /dev/null +++ b/gopls/internal/golang/implementation.go @@ -0,0 +1,497 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + "sort" + "strings" + "sync" + + "golang.org/x/sync/errgroup" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/methodsets" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/event" +) + +// This file defines the new implementation of the 'implementation' +// operator that does not require type-checker data structures for an +// unbounded number of packages. +// +// TODO(adonovan): +// - Audit to ensure robustness in face of type errors. +// - Eliminate false positives due to 'tricky' cases of the global algorithm. +// - Ensure we have test coverage of: +// type aliases +// nil, PkgName, Builtin (all errors) +// any (empty result) +// method of unnamed interface type (e.g. var x interface { f() }) +// (the global algorithm may find implementations of this type +// but will not include it in the index.) + +// Implementation returns a new sorted array of locations of +// declarations of types that implement (or are implemented by) the +// type referred to at the given position. +// +// If the position denotes a method, the computation is applied to its +// receiver type and then its corresponding methods are returned. +func Implementation(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position) ([]protocol.Location, error) { + ctx, done := event.Start(ctx, "golang.Implementation") + defer done() + + locs, err := implementations(ctx, snapshot, f, pp) + if err != nil { + return nil, err + } + + // Sort and de-duplicate locations. + sort.Slice(locs, func(i, j int) bool { + return protocol.CompareLocation(locs[i], locs[j]) < 0 + }) + out := locs[:0] + for _, loc := range locs { + if len(out) == 0 || out[len(out)-1] != loc { + out = append(out, loc) + } + } + locs = out + + return locs, nil +} + +func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.Location, error) { + obj, pkg, err := implementsObj(ctx, snapshot, fh.URI(), pp) + if err != nil { + return nil, err + } + + var localPkgs []*cache.Package + if obj.Pos().IsValid() { // no local package for error or error.Error + declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) + // Type-check the declaring package (incl. variants) for use + // by the "local" search, which uses type information to + // enumerate all types within the package that satisfy the + // query type, even those defined local to a function. + declURI := protocol.URIFromPath(declPosn.Filename) + declMPs, err := snapshot.MetadataForFile(ctx, declURI) + if err != nil { + return nil, err + } + metadata.RemoveIntermediateTestVariants(&declMPs) + if len(declMPs) == 0 { + return nil, fmt.Errorf("no packages for file %s", declURI) + } + ids := make([]PackageID, len(declMPs)) + for i, mp := range declMPs { + ids[i] = mp.ID + } + localPkgs, err = snapshot.TypeCheck(ctx, ids...) + if err != nil { + return nil, err + } + } + + // Is the selected identifier a type name or method? + // (For methods, report the corresponding method names.) + var queryType types.Type + var queryMethodID string + switch obj := obj.(type) { + case *types.TypeName: + queryType = obj.Type() + case *types.Func: + // For methods, use the receiver type, which may be anonymous. + if recv := obj.Type().(*types.Signature).Recv(); recv != nil { + queryType = recv.Type() + queryMethodID = obj.Id() + } + } + if queryType == nil { + return nil, bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj + } + + // Compute the method-set fingerprint used as a key to the global search. + key, hasMethods := methodsets.KeyOf(queryType) + if !hasMethods { + // A type with no methods yields an empty result. + // (No point reporting that every type satisfies 'any'.) + return nil, nil + } + + // The global search needs to look at every package in the + // forward transitive closure of the workspace; see package + // ./methodsets. + // + // For now we do all the type checking before beginning the search. + // TODO(adonovan): opt: search in parallel topological order + // so that we can overlap index lookup with typechecking. + // I suspect a number of algorithms on the result of TypeCheck could + // be optimized by being applied as soon as each package is available. + globalMetas, err := snapshot.AllMetadata(ctx) + if err != nil { + return nil, err + } + metadata.RemoveIntermediateTestVariants(&globalMetas) + globalIDs := make([]PackageID, 0, len(globalMetas)) + + var pkgPath PackagePath + if obj.Pkg() != nil { // nil for error + pkgPath = PackagePath(obj.Pkg().Path()) + } + for _, mp := range globalMetas { + if mp.PkgPath == pkgPath { + continue // declaring package is handled by local implementation + } + globalIDs = append(globalIDs, mp.ID) + } + indexes, err := snapshot.MethodSets(ctx, globalIDs...) + if err != nil { + return nil, fmt.Errorf("querying method sets: %v", err) + } + + // Search local and global packages in parallel. + var ( + group errgroup.Group + locsMu sync.Mutex + locs []protocol.Location + ) + // local search + for _, localPkg := range localPkgs { + localPkg := localPkg + group.Go(func() error { + localLocs, err := localImplementations(ctx, snapshot, localPkg, queryType, queryMethodID) + if err != nil { + return err + } + locsMu.Lock() + locs = append(locs, localLocs...) + locsMu.Unlock() + return nil + }) + } + // global search + for _, index := range indexes { + index := index + group.Go(func() error { + for _, res := range index.Search(key, queryMethodID) { + loc := res.Location + // Map offsets to protocol.Locations in parallel (may involve I/O). + group.Go(func() error { + ploc, err := offsetToLocation(ctx, snapshot, loc.Filename, loc.Start, loc.End) + if err != nil { + return err + } + locsMu.Lock() + locs = append(locs, ploc) + locsMu.Unlock() + return nil + }) + } + return nil + }) + } + if err := group.Wait(); err != nil { + return nil, err + } + + return locs, nil +} + +// offsetToLocation converts an offset-based position to a protocol.Location, +// which requires reading the file. +func offsetToLocation(ctx context.Context, snapshot *cache.Snapshot, filename string, start, end int) (protocol.Location, error) { + uri := protocol.URIFromPath(filename) + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return protocol.Location{}, err // cancelled, perhaps + } + content, err := fh.Content() + if err != nil { + return protocol.Location{}, err // nonexistent or deleted ("can't happen") + } + m := protocol.NewMapper(uri, content) + return m.OffsetLocation(start, end) +} + +// implementsObj returns the object to query for implementations, which is a +// type name or method. +// +// The returned Package is the narrowest package containing ppos, which is the +// package using the resulting obj but not necessarily the declaring package. +func implementsObj(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, ppos protocol.Position) (types.Object, *cache.Package, error) { + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) + if err != nil { + return nil, nil, err + } + pos, err := pgf.PositionPos(ppos) + if err != nil { + return nil, nil, err + } + + // This function inherits the limitation of its predecessor in + // requiring the selection to be an identifier (of a type or + // method). But there's no fundamental reason why one could + // not pose this query about any selected piece of syntax that + // has a type and thus a method set. + // (If LSP was more thorough about passing text selections as + // intervals to queries, you could ask about the method set of a + // subexpression such as x.f().) + + // TODO(adonovan): simplify: use objectsAt? + path := pathEnclosingObjNode(pgf.File, pos) + if path == nil { + return nil, nil, ErrNoIdentFound + } + id, ok := path[0].(*ast.Ident) + if !ok { + return nil, nil, ErrNoIdentFound + } + + // Is the object a type or method? Reject other kinds. + obj := pkg.TypesInfo().Uses[id] + if obj == nil { + // Check uses first (unlike ObjectOf) so that T in + // struct{T} is treated as a reference to a type, + // not a declaration of a field. + obj = pkg.TypesInfo().Defs[id] + } + switch obj := obj.(type) { + case *types.TypeName: + // ok + case *types.Func: + if obj.Type().(*types.Signature).Recv() == nil { + return nil, nil, fmt.Errorf("%s is a function, not a method", id.Name) + } + case nil: + return nil, nil, fmt.Errorf("%s denotes unknown object", id.Name) + default: + // e.g. *types.Var -> "var". + kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) + return nil, nil, fmt.Errorf("%s is a %s, not a type", id.Name, kind) + } + + return obj, pkg, nil +} + +// localImplementations searches within pkg for declarations of all +// types that are assignable to/from the query type, and returns a new +// unordered array of their locations. +// +// If methodID is non-empty, the function instead returns the location +// of each type's method (if any) of that ID. +// +// ("Local" refers to the search within the same package, but this +// function's results may include type declarations that are local to +// a function body. The global search index excludes such types +// because reliably naming such types is hard.) +func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, queryType types.Type, methodID string) ([]protocol.Location, error) { + queryType = methodsets.EnsurePointer(queryType) + + // Scan through all type declarations in the syntax. + var locs []protocol.Location + var methodLocs []methodsets.Location + for _, pgf := range pkg.CompiledGoFiles() { + ast.Inspect(pgf.File, func(n ast.Node) bool { + spec, ok := n.(*ast.TypeSpec) + if !ok { + return true // not a type declaration + } + def := pkg.TypesInfo().Defs[spec.Name] + if def == nil { + return true // "can't happen" for types + } + if def.(*types.TypeName).IsAlias() { + return true // skip type aliases to avoid duplicate reporting + } + candidateType := methodsets.EnsurePointer(def.Type()) + + // The historical behavior enshrined by this + // function rejects cases where both are + // (nontrivial) interface types? + // That seems like useful information. + // TODO(adonovan): UX: report I/I pairs too? + // The same question appears in the global algorithm (methodsets). + if !concreteImplementsIntf(candidateType, queryType) { + return true // not assignable + } + + // Ignore types with empty method sets. + // (No point reporting that every type satisfies 'any'.) + mset := types.NewMethodSet(candidateType) + if mset.Len() == 0 { + return true + } + + if methodID == "" { + // Found matching type. + locs = append(locs, mustLocation(pgf, spec.Name)) + return true + } + + // Find corresponding method. + // + // We can't use LookupFieldOrMethod because it requires + // the methodID's types.Package, which we don't know. + // We could recursively search pkg.Imports for it, + // but it's easier to walk the method set. + for i := 0; i < mset.Len(); i++ { + method := mset.At(i).Obj() + if method.Id() == methodID { + posn := safetoken.StartPosition(pkg.FileSet(), method.Pos()) + methodLocs = append(methodLocs, methodsets.Location{ + Filename: posn.Filename, + Start: posn.Offset, + End: posn.Offset + len(method.Name()), + }) + break + } + } + return true + }) + } + + // Finally convert method positions to protocol form by reading the files. + for _, mloc := range methodLocs { + loc, err := offsetToLocation(ctx, snapshot, mloc.Filename, mloc.Start, mloc.End) + if err != nil { + return nil, err + } + locs = append(locs, loc) + } + + // Special case: for types that satisfy error, report builtin.go (see #59527). + if types.Implements(queryType, errorInterfaceType) { + loc, err := errorLocation(ctx, snapshot) + if err != nil { + return nil, err + } + locs = append(locs, loc) + } + + return locs, nil +} + +var errorInterfaceType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +// errorLocation returns the location of the 'error' type in builtin.go. +func errorLocation(ctx context.Context, snapshot *cache.Snapshot) (protocol.Location, error) { + pgf, err := snapshot.BuiltinFile(ctx) + if err != nil { + return protocol.Location{}, err + } + for _, decl := range pgf.File.Decls { + if decl, ok := decl.(*ast.GenDecl); ok { + for _, spec := range decl.Specs { + if spec, ok := spec.(*ast.TypeSpec); ok && spec.Name.Name == "error" { + return pgf.NodeLocation(spec.Name) + } + } + } + } + return protocol.Location{}, fmt.Errorf("built-in error type not found") +} + +// concreteImplementsIntf returns true if a is an interface type implemented by +// concrete type b, or vice versa. +func concreteImplementsIntf(a, b types.Type) bool { + aIsIntf, bIsIntf := types.IsInterface(a), types.IsInterface(b) + + // Make sure exactly one is an interface type. + if aIsIntf == bIsIntf { + return false + } + + // Rearrange if needed so "a" is the concrete type. + if aIsIntf { + a, b = b, a + } + + // TODO(adonovan): this should really use GenericAssignableTo + // to report (e.g.) "ArrayList[T] implements List[T]", but + // GenericAssignableTo doesn't work correctly on pointers to + // generic named types. Thus the legacy implementation and the + // "local" part of implementations fail to report generics. + // The global algorithm based on subsets does the right thing. + return types.AssignableTo(a, b) +} + +var ( + // TODO(adonovan): why do various RPC handlers related to + // IncomingCalls return (nil, nil) on the protocol in response + // to this error? That seems like a violation of the protocol. + // Is it perhaps a workaround for VSCode behavior? + errNoObjectFound = errors.New("no object found") +) + +// pathEnclosingObjNode returns the AST path to the object-defining +// node associated with pos. "Object-defining" means either an +// *ast.Ident mapped directly to a types.Object or an ast.Node mapped +// implicitly to a types.Object. +func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { + var ( + path []ast.Node + found bool + ) + + ast.Inspect(f, func(n ast.Node) bool { + if found { + return false + } + + if n == nil { + path = path[:len(path)-1] + return false + } + + path = append(path, n) + + switch n := n.(type) { + case *ast.Ident: + // Include the position directly after identifier. This handles + // the common case where the cursor is right after the + // identifier the user is currently typing. Previously we + // handled this by calling astutil.PathEnclosingInterval twice, + // once for "pos" and once for "pos-1". + found = n.Pos() <= pos && pos <= n.End() + case *ast.ImportSpec: + if n.Path.Pos() <= pos && pos < n.Path.End() { + found = true + // If import spec has a name, add name to path even though + // position isn't in the name. + if n.Name != nil { + path = append(path, n.Name) + } + } + case *ast.StarExpr: + // Follow star expressions to the inner identifier. + if pos == n.Star { + pos = n.X.Pos() + } + } + + return !found + }) + + if len(path) == 0 { + return nil + } + + // Reverse path so leaf is first element. + for i := 0; i < len(path)/2; i++ { + path[i], path[len(path)-1-i] = path[len(path)-1-i], path[i] + } + + return path +} diff --git a/gopls/internal/golang/inlay_hint.go b/gopls/internal/golang/inlay_hint.go new file mode 100644 index 00000000000..245a9822b08 --- /dev/null +++ b/gopls/internal/golang/inlay_hint.go @@ -0,0 +1,396 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" +) + +const ( + maxLabelLength = 28 +) + +type InlayHintFunc func(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint + +type Hint struct { + Name string + Doc string + Run InlayHintFunc +} + +const ( + ParameterNames = "parameterNames" + AssignVariableTypes = "assignVariableTypes" + ConstantValues = "constantValues" + RangeVariableTypes = "rangeVariableTypes" + CompositeLiteralTypes = "compositeLiteralTypes" + CompositeLiteralFieldNames = "compositeLiteralFields" + FunctionTypeParameters = "functionTypeParameters" +) + +var AllInlayHints = map[string]*Hint{ + AssignVariableTypes: { + Name: AssignVariableTypes, + Doc: "Enable/disable inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```", + Run: assignVariableTypes, + }, + ParameterNames: { + Name: ParameterNames, + Doc: "Enable/disable inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```", + Run: parameterNames, + }, + ConstantValues: { + Name: ConstantValues, + Doc: "Enable/disable inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```", + Run: constantValues, + }, + RangeVariableTypes: { + Name: RangeVariableTypes, + Doc: "Enable/disable inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```", + Run: rangeVariableTypes, + }, + CompositeLiteralTypes: { + Name: CompositeLiteralTypes, + Doc: "Enable/disable inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```", + Run: compositeLiteralTypes, + }, + CompositeLiteralFieldNames: { + Name: CompositeLiteralFieldNames, + Doc: "Enable/disable inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```", + Run: compositeLiteralFields, + }, + FunctionTypeParameters: { + Name: FunctionTypeParameters, + Doc: "Enable/disable inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```", + Run: funcTypeParams, + }, +} + +func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pRng protocol.Range) ([]protocol.InlayHint, error) { + ctx, done := event.Start(ctx, "golang.InlayHint") + defer done() + + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, fmt.Errorf("getting file for InlayHint: %w", err) + } + + // Collect a list of the inlay hints that are enabled. + inlayHintOptions := snapshot.Options().InlayHintOptions + var enabledHints []InlayHintFunc + for hint, enabled := range inlayHintOptions.Hints { + if !enabled { + continue + } + if h, ok := AllInlayHints[hint]; ok { + enabledHints = append(enabledHints, h.Run) + } + } + if len(enabledHints) == 0 { + return nil, nil + } + + info := pkg.TypesInfo() + q := typesutil.FileQualifier(pgf.File, pkg.Types(), info) + + // Set the range to the full file if the range is not valid. + start, end := pgf.File.Pos(), pgf.File.End() + if pRng.Start.Line < pRng.End.Line || pRng.Start.Character < pRng.End.Character { + // Adjust start and end for the specified range. + var err error + start, end, err = pgf.RangePos(pRng) + if err != nil { + return nil, err + } + } + + var hints []protocol.InlayHint + ast.Inspect(pgf.File, func(node ast.Node) bool { + // If not in range, we can stop looking. + if node == nil || node.End() < start || node.Pos() > end { + return false + } + for _, fn := range enabledHints { + hints = append(hints, fn(node, pgf.Mapper, pgf.Tok, info, &q)...) + } + return true + }) + return hints, nil +} + +func parameterNames(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { + callExpr, ok := node.(*ast.CallExpr) + if !ok { + return nil + } + signature, ok := typeparams.CoreType(info.TypeOf(callExpr.Fun)).(*types.Signature) + if !ok { + return nil + } + + var hints []protocol.InlayHint + for i, v := range callExpr.Args { + start, err := m.PosPosition(tf, v.Pos()) + if err != nil { + continue + } + params := signature.Params() + // When a function has variadic params, we skip args after + // params.Len(). + if i > params.Len()-1 { + break + } + param := params.At(i) + // param.Name is empty for built-ins like append + if param.Name() == "" { + continue + } + // Skip the parameter name hint if the arg matches + // the parameter name. + if i, ok := v.(*ast.Ident); ok && i.Name == param.Name() { + continue + } + + label := param.Name() + if signature.Variadic() && i == params.Len()-1 { + label = label + "..." + } + hints = append(hints, protocol.InlayHint{ + Position: start, + Label: buildLabel(label + ":"), + Kind: protocol.Parameter, + PaddingRight: true, + }) + } + return hints +} + +func funcTypeParams(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { + ce, ok := node.(*ast.CallExpr) + if !ok { + return nil + } + id, ok := ce.Fun.(*ast.Ident) + if !ok { + return nil + } + inst := info.Instances[id] + if inst.TypeArgs == nil { + return nil + } + start, err := m.PosPosition(tf, id.End()) + if err != nil { + return nil + } + var args []string + for i := 0; i < inst.TypeArgs.Len(); i++ { + args = append(args, inst.TypeArgs.At(i).String()) + } + if len(args) == 0 { + return nil + } + return []protocol.InlayHint{{ + Position: start, + Label: buildLabel("[" + strings.Join(args, ", ") + "]"), + Kind: protocol.Type, + }} +} + +func assignVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { + stmt, ok := node.(*ast.AssignStmt) + if !ok || stmt.Tok != token.DEFINE { + return nil + } + + var hints []protocol.InlayHint + for _, v := range stmt.Lhs { + if h := variableType(v, m, tf, info, q); h != nil { + hints = append(hints, *h) + } + } + return hints +} + +func rangeVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { + rStmt, ok := node.(*ast.RangeStmt) + if !ok { + return nil + } + var hints []protocol.InlayHint + if h := variableType(rStmt.Key, m, tf, info, q); h != nil { + hints = append(hints, *h) + } + if h := variableType(rStmt.Value, m, tf, info, q); h != nil { + hints = append(hints, *h) + } + return hints +} + +func variableType(e ast.Expr, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) *protocol.InlayHint { + typ := info.TypeOf(e) + if typ == nil { + return nil + } + end, err := m.PosPosition(tf, e.End()) + if err != nil { + return nil + } + return &protocol.InlayHint{ + Position: end, + Label: buildLabel(types.TypeString(typ, *q)), + Kind: protocol.Type, + PaddingLeft: true, + } +} + +func constantValues(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { + genDecl, ok := node.(*ast.GenDecl) + if !ok || genDecl.Tok != token.CONST { + return nil + } + + var hints []protocol.InlayHint + for _, v := range genDecl.Specs { + spec, ok := v.(*ast.ValueSpec) + if !ok { + continue + } + end, err := m.PosPosition(tf, v.End()) + if err != nil { + continue + } + // Show hints when values are missing or at least one value is not + // a basic literal. + showHints := len(spec.Values) == 0 + checkValues := len(spec.Names) == len(spec.Values) + var values []string + for i, w := range spec.Names { + obj, ok := info.ObjectOf(w).(*types.Const) + if !ok || obj.Val().Kind() == constant.Unknown { + return nil + } + if checkValues { + switch spec.Values[i].(type) { + case *ast.BadExpr: + return nil + case *ast.BasicLit: + default: + if obj.Val().Kind() != constant.Bool { + showHints = true + } + } + } + values = append(values, fmt.Sprintf("%v", obj.Val())) + } + if !showHints || len(values) == 0 { + continue + } + hints = append(hints, protocol.InlayHint{ + Position: end, + Label: buildLabel("= " + strings.Join(values, ", ")), + PaddingLeft: true, + }) + } + return hints +} + +func compositeLiteralFields(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { + compLit, ok := node.(*ast.CompositeLit) + if !ok { + return nil + } + typ := info.TypeOf(compLit) + if typ == nil { + return nil + } + typ = typesinternal.Unpointer(typ) + strct, ok := typeparams.CoreType(typ).(*types.Struct) + if !ok { + return nil + } + + var hints []protocol.InlayHint + var allEdits []protocol.TextEdit + for i, v := range compLit.Elts { + if _, ok := v.(*ast.KeyValueExpr); !ok { + start, err := m.PosPosition(tf, v.Pos()) + if err != nil { + continue + } + if i > strct.NumFields()-1 { + break + } + hints = append(hints, protocol.InlayHint{ + Position: start, + Label: buildLabel(strct.Field(i).Name() + ":"), + Kind: protocol.Parameter, + PaddingRight: true, + }) + allEdits = append(allEdits, protocol.TextEdit{ + Range: protocol.Range{Start: start, End: start}, + NewText: strct.Field(i).Name() + ": ", + }) + } + } + // It is not allowed to have a mix of keyed and unkeyed fields, so + // have the text edits add keys to all fields. + for i := range hints { + hints[i].TextEdits = allEdits + } + return hints +} + +func compositeLiteralTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { + compLit, ok := node.(*ast.CompositeLit) + if !ok { + return nil + } + typ := info.TypeOf(compLit) + if typ == nil { + return nil + } + if compLit.Type != nil { + return nil + } + prefix := "" + if t, ok := typeparams.CoreType(typ).(*types.Pointer); ok { + typ = t.Elem() + prefix = "&" + } + // The type for this composite literal is implicit, add an inlay hint. + start, err := m.PosPosition(tf, compLit.Lbrace) + if err != nil { + return nil + } + return []protocol.InlayHint{{ + Position: start, + Label: buildLabel(fmt.Sprintf("%s%s", prefix, types.TypeString(typ, *q))), + Kind: protocol.Type, + }} +} + +func buildLabel(s string) []protocol.InlayHintLabelPart { + label := protocol.InlayHintLabelPart{ + Value: s, + } + if len(s) > maxLabelLength+len("...") { + label.Value = s[:maxLabelLength] + "..." + } + return []protocol.InlayHintLabelPart{label} +} diff --git a/gopls/internal/golang/inline.go b/gopls/internal/golang/inline.go new file mode 100644 index 00000000000..f3e213c644b --- /dev/null +++ b/gopls/internal/golang/inline.go @@ -0,0 +1,136 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +// This file defines the refactor.inline code action. + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/refactor/inline" +) + +// EnclosingStaticCall returns the innermost function call enclosing +// the selected range, along with the callee. +func EnclosingStaticCall(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*ast.CallExpr, *types.Func, error) { + path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) + + var call *ast.CallExpr +loop: + for _, n := range path { + switch n := n.(type) { + case *ast.FuncLit: + break loop + case *ast.CallExpr: + call = n + break loop + } + } + if call == nil { + return nil, nil, fmt.Errorf("no enclosing call") + } + if safetoken.Line(pgf.Tok, call.Lparen) != safetoken.Line(pgf.Tok, start) { + return nil, nil, fmt.Errorf("enclosing call is not on this line") + } + fn := typeutil.StaticCallee(pkg.TypesInfo(), call) + if fn == nil { + return nil, nil, fmt.Errorf("not a static call to a Go function") + } + return call, fn, nil +} + +func inlineCall(ctx context.Context, snapshot *cache.Snapshot, callerPkg *cache.Package, callerPGF *parsego.File, start, end token.Pos) (_ *token.FileSet, _ *analysis.SuggestedFix, err error) { + // Find enclosing static call. + call, fn, err := EnclosingStaticCall(callerPkg, callerPGF, start, end) + if err != nil { + return nil, nil, err + } + + // Locate callee by file/line and analyze it. + calleePosn := safetoken.StartPosition(callerPkg.FileSet(), fn.Pos()) + calleePkg, calleePGF, err := NarrowestPackageForFile(ctx, snapshot, protocol.URIFromPath(calleePosn.Filename)) + if err != nil { + return nil, nil, err + } + var calleeDecl *ast.FuncDecl + for _, decl := range calleePGF.File.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + posn := safetoken.StartPosition(calleePkg.FileSet(), decl.Name.Pos()) + if posn.Line == calleePosn.Line && posn.Column == calleePosn.Column { + calleeDecl = decl + break + } + } + } + if calleeDecl == nil { + return nil, nil, fmt.Errorf("can't find callee") + } + + // The inliner assumes that input is well-typed, + // but that is frequently not the case within gopls. + // Until we are able to harden the inliner, + // report panics as errors to avoid crashing the server. + bad := func(p *cache.Package) bool { return len(p.ParseErrors())+len(p.TypeErrors()) > 0 } + if bad(calleePkg) || bad(callerPkg) { + defer func() { + if x := recover(); x != nil { + err = fmt.Errorf("inlining failed (%q), likely because inputs were ill-typed", x) + } + }() + } + + // Users can consult the gopls event log to see + // why a particular inlining strategy was chosen. + logf := logger(ctx, "inliner", snapshot.Options().VerboseOutput) + + callee, err := inline.AnalyzeCallee(logf, calleePkg.FileSet(), calleePkg.Types(), calleePkg.TypesInfo(), calleeDecl, calleePGF.Src) + if err != nil { + return nil, nil, err + } + + // Inline the call. + caller := &inline.Caller{ + Fset: callerPkg.FileSet(), + Types: callerPkg.Types(), + Info: callerPkg.TypesInfo(), + File: callerPGF.File, + Call: call, + Content: callerPGF.Src, + } + + got, err := inline.Inline(logf, caller, callee) + if err != nil { + return nil, nil, err + } + + return callerPkg.FileSet(), &analysis.SuggestedFix{ + Message: fmt.Sprintf("inline call of %v", callee), + TextEdits: diffToTextEdits(callerPGF.Tok, diff.Bytes(callerPGF.Src, got)), + }, nil +} + +// TODO(adonovan): change the inliner to instead accept an io.Writer. +func logger(ctx context.Context, name string, verbose bool) func(format string, args ...any) { + if verbose { + return func(format string, args ...any) { + event.Log(ctx, name+": "+fmt.Sprintf(format, args...)) + } + } else { + return func(string, ...any) {} + } +} diff --git a/gopls/internal/lsp/source/inline_all.go b/gopls/internal/golang/inline_all.go similarity index 88% rename from gopls/internal/lsp/source/inline_all.go rename to gopls/internal/golang/inline_all.go index 848b5f7cc08..b6439d83191 100644 --- a/gopls/internal/lsp/source/inline_all.go +++ b/gopls/internal/golang/inline_all.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "context" @@ -13,9 +13,10 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/internal/refactor/inline" ) @@ -43,7 +44,7 @@ import ( // // The code below notes where are assumptions are made that only hold true in // the case of parameter removal (annotated with 'Assumption:') -func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Snapshot, pkg Package, pgf *ParsedGoFile, origDecl *ast.FuncDecl, callee *inline.Callee, post func([]byte) []byte) (map[span.URI][]byte, error) { +func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, origDecl *ast.FuncDecl, callee *inline.Callee, post func([]byte) []byte) (map[protocol.DocumentURI][]byte, error) { // Collect references. var refs []protocol.Location { @@ -66,12 +67,12 @@ func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Sna // parallel and to reduce peak memory for this operation. var ( pkgForRef = make(map[protocol.Location]PackageID) - pkgs = make(map[PackageID]Package) + pkgs = make(map[PackageID]*cache.Package) ) { needPkgs := make(map[PackageID]struct{}) for _, ref := range refs { - md, err := NarrowestMetadataForFile(ctx, snapshot, ref.URI.SpanURI()) + md, err := NarrowestMetadataForFile(ctx, snapshot, ref.URI) if err != nil { return nil, fmt.Errorf("finding ref metadata: %v", err) } @@ -99,21 +100,21 @@ func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Sna // declaration, we must re-type check. type fileCalls struct { - pkg Package - pgf *ParsedGoFile + pkg *cache.Package + pgf *parsego.File calls []*ast.CallExpr } - refsByFile := make(map[span.URI]*fileCalls) + refsByFile := make(map[protocol.DocumentURI]*fileCalls) for _, ref := range refs { refpkg := pkgs[pkgForRef[ref]] - pgf, err := refpkg.File(ref.URI.SpanURI()) + pgf, err := refpkg.File(ref.URI) if err != nil { return nil, bug.Errorf("finding %s in %s: %v", ref.URI, refpkg.Metadata().ID, err) } start, end, err := pgf.RangePos(ref.Range) if err != nil { - return nil, bug.Errorf("RangePos(ref): %v", err) + return nil, err // e.g. invalid range } // Look for the surrounding call expression. @@ -137,20 +138,20 @@ func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Sna return nil, fmt.Errorf("cannot inline: found non-call function reference %v", ref) } // Sanity check. - if obj := refpkg.GetTypesInfo().ObjectOf(name); obj == nil || + if obj := refpkg.TypesInfo().ObjectOf(name); obj == nil || obj.Name() != origDecl.Name.Name || obj.Pkg() == nil || obj.Pkg().Path() != string(pkg.Metadata().PkgPath) { return nil, bug.Errorf("cannot inline: corrupted reference %v", ref) } - callInfo, ok := refsByFile[ref.URI.SpanURI()] + callInfo, ok := refsByFile[ref.URI] if !ok { callInfo = &fileCalls{ pkg: refpkg, pgf: pgf, } - refsByFile[ref.URI.SpanURI()] = callInfo + refsByFile[ref.URI] = callInfo } callInfo.calls = append(callInfo.calls, call) } @@ -161,13 +162,13 @@ func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Sna // // Assumption: inlining does not affect the package scope, so we can operate // on separate files independently. - result := make(map[span.URI][]byte) + result := make(map[protocol.DocumentURI][]byte) for uri, callInfo := range refsByFile { var ( calls = callInfo.calls fset = callInfo.pkg.FileSet() - tpkg = callInfo.pkg.GetTypes() - tinfo = callInfo.pkg.GetTypesInfo() + tpkg = callInfo.pkg.Types() + tinfo = callInfo.pkg.TypesInfo() file = callInfo.pgf.File content = callInfo.pgf.Src ) @@ -215,7 +216,7 @@ func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Sna // feels sufficiently complicated that, to be safe, this optimization is // deferred until later. - file, err = parser.ParseFile(fset, uri.Filename(), content, parser.ParseComments|parser.SkipObjectResolution) + file, err = parser.ParseFile(fset, uri.Path(), content, parser.ParseComments|parser.SkipObjectResolution) if err != nil { return nil, bug.Errorf("inlined file failed to parse: %v", err) } @@ -229,7 +230,7 @@ func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Sna // anything in the surrounding scope. // // TODO(rfindley): improve this. - tpkg, tinfo, err = reTypeCheck(logf, callInfo.pkg, map[span.URI]*ast.File{uri: file}, true) + tpkg, tinfo, err = reTypeCheck(logf, callInfo.pkg, map[protocol.DocumentURI]*ast.File{uri: file}, true) if err != nil { return nil, bug.Errorf("type checking after inlining failed: %v", err) } diff --git a/gopls/internal/lsp/source/invertifcondition.go b/gopls/internal/golang/invertifcondition.go similarity index 95% rename from gopls/internal/lsp/source/invertifcondition.go rename to gopls/internal/golang/invertifcondition.go index 2b11485c367..377e1ce6186 100644 --- a/gopls/internal/lsp/source/invertifcondition.go +++ b/gopls/internal/golang/invertifcondition.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "fmt" @@ -13,22 +13,21 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/gopls/internal/util/safetoken" ) // invertIfCondition is a singleFileFixFunc that inverts an if/else statement -func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*analysis.SuggestedFix, error) { +func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { ifStatement, _, err := CanInvertIfCondition(file, start, end) if err != nil { - return nil, err + return nil, nil, err } var replaceElse analysis.TextEdit endsWithReturn, err := endsWithReturn(ifStatement.Else) if err != nil { - return nil, err + return nil, nil, err } if endsWithReturn { @@ -72,7 +71,7 @@ func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, fi // Replace the if condition with its inverse inverseCondition, err := invertCondition(fset, ifStatement.Cond, src) if err != nil { - return nil, err + return nil, nil, err } replaceConditionWithInverse := analysis.TextEdit{ Pos: ifStatement.Cond.Pos(), @@ -81,7 +80,7 @@ func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, fi } // Return a SuggestedFix with just that TextEdit in there - return &analysis.SuggestedFix{ + return fset, &analysis.SuggestedFix{ TextEdits: []analysis.TextEdit{ replaceConditionWithInverse, replaceBodyWithElse, @@ -93,7 +92,7 @@ func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, fi func endsWithReturn(elseBranch ast.Stmt) (bool, error) { elseBlock, isBlockStatement := elseBranch.(*ast.BlockStmt) if !isBlockStatement { - return false, fmt.Errorf("Unable to figure out whether this ends with return: %T", elseBranch) + return false, fmt.Errorf("unable to figure out whether this ends with return: %T", elseBranch) } if len(elseBlock.List) == 0 { @@ -131,7 +130,7 @@ func invertCondition(fset *token.FileSet, cond ast.Expr, src []byte) ([]byte, er oldText := string(src[condStart.Offset:condEnd.Offset]) switch expr := cond.(type) { - case *ast.Ident, *ast.ParenExpr, *ast.CallExpr, *ast.StarExpr, *ast.IndexExpr, *typeparams.IndexListExpr, *ast.SelectorExpr: + case *ast.Ident, *ast.ParenExpr, *ast.CallExpr, *ast.StarExpr, *ast.IndexExpr, *ast.IndexListExpr, *ast.SelectorExpr: newText := "!" + oldText if oldText == "true" { newText = "false" diff --git a/gopls/internal/lsp/source/known_packages.go b/gopls/internal/golang/known_packages.go similarity index 83% rename from gopls/internal/lsp/source/known_packages.go rename to gopls/internal/golang/known_packages.go index 11134037f14..60a89ca0285 100644 --- a/gopls/internal/lsp/source/known_packages.go +++ b/gopls/internal/golang/known_packages.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "context" @@ -13,6 +13,9 @@ import ( "sync" "time" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/imports" ) @@ -23,7 +26,7 @@ import ( // all dot-free paths (standard packages) appear before dotful ones. // // It is part of the gopls.list_known_packages command. -func KnownPackagePaths(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]PackagePath, error) { +func KnownPackagePaths(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]PackagePath, error) { // This algorithm is expressed in terms of Metadata, not Packages, // so it doesn't cause or wait for type checking. @@ -38,15 +41,15 @@ func KnownPackagePaths(ctx context.Context, snapshot Snapshot, fh FileHandle) ([ if err != nil { return nil, err } - file, err := parser.ParseFile(token.NewFileSet(), fh.URI().Filename(), src, parser.ImportsOnly) + file, err := parser.ParseFile(token.NewFileSet(), fh.URI().Path(), src, parser.ImportsOnly) if err != nil { return nil, err } imported := make(map[PackagePath]bool) for _, imp := range file.Imports { - if id := current.DepsByImpPath[UnquoteImportPath(imp)]; id != "" { - if m := snapshot.Metadata(id); m != nil { - imported[m.PkgPath] = true + if id := current.DepsByImpPath[metadata.UnquoteImportPath(imp)]; id != "" { + if mp := snapshot.Metadata(id); mp != nil { + imported[mp.PkgPath] = true } } } @@ -73,7 +76,7 @@ func KnownPackagePaths(ctx context.Context, snapshot Snapshot, fh FileHandle) ([ continue } // make sure internal packages are importable by the file - if !IsValidImport(current.PkgPath, knownPkg.PkgPath) { + if !metadata.IsValidImport(current.PkgPath, knownPkg.PkgPath) { continue } // naive check on cyclical imports @@ -95,7 +98,7 @@ func KnownPackagePaths(ctx context.Context, snapshot Snapshot, fh FileHandle) ([ // TODO(adonovan): what if the actual package path has a vendor/ prefix? seen[PackagePath(ifix.StmtInfo.ImportPath)] = true } - return imports.GetAllCandidates(ctx, wrapped, "", fh.URI().Filename(), string(current.Name), o.Env) + return imports.GetAllCandidates(ctx, wrapped, "", fh.URI().Path(), string(current.Name), o.Env) }); err != nil { // If goimports failed, proceed with just the candidates from the metadata. event.Error(ctx, "imports.GetAllCandidates", err) @@ -128,7 +131,7 @@ func KnownPackagePaths(ctx context.Context, snapshot Snapshot, fh FileHandle) ([ // TODO(adonovan): ensure that metadata graph is always cyclic! // Many algorithms will get confused or even stuck in the // presence of cycles. Then replace this function by 'false'. -func isDirectlyCyclical(pkg, imported *Metadata) bool { +func isDirectlyCyclical(pkg, imported *metadata.Package) bool { _, ok := imported.DepsByPkgPath[pkg.PkgPath] return ok } diff --git a/gopls/internal/golang/lines.go b/gopls/internal/golang/lines.go new file mode 100644 index 00000000000..1c4b562280d --- /dev/null +++ b/gopls/internal/golang/lines.go @@ -0,0 +1,261 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +// This file defines refactorings for splitting lists of elements +// (arguments, literals, etc) across multiple lines, and joining +// them into a single line. + +import ( + "bytes" + "go/ast" + "go/token" + "go/types" + "sort" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/slices" +) + +// CanSplitLines checks whether we can split lists of elements inside an enclosing curly bracket/parens into separate +// lines. +func CanSplitLines(file *ast.File, fset *token.FileSet, start, end token.Pos) (string, bool, error) { + itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, file, nil, start, end) + if itemType == "" { + return "", false, nil + } + + if !canSplitJoinLines(items, comments) { + return "", false, nil + } + + for i := 1; i < len(items); i++ { + prevLine := safetoken.EndPosition(fset, items[i-1].End()).Line + curLine := safetoken.StartPosition(fset, items[i].Pos()).Line + if prevLine == curLine { + return "Split " + itemType + " into separate lines", true, nil + } + } + + return "", false, nil +} + +// CanJoinLines checks whether we can join lists of elements inside an enclosing curly bracket/parens into a single line. +func CanJoinLines(file *ast.File, fset *token.FileSet, start, end token.Pos) (string, bool, error) { + itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, file, nil, start, end) + if itemType == "" { + return "", false, nil + } + + if !canSplitJoinLines(items, comments) { + return "", false, nil + } + + for i := 1; i < len(items); i++ { + prevLine := safetoken.EndPosition(fset, items[i-1].End()).Line + curLine := safetoken.StartPosition(fset, items[i].Pos()).Line + if prevLine != curLine { + return "Join " + itemType + " into one line", true, nil + } + } + + return "", false, nil +} + +// canSplitJoinLines determines whether we should split/join the lines or not. +func canSplitJoinLines(items []ast.Node, comments []*ast.CommentGroup) bool { + if len(items) <= 1 { + return false + } + + for _, cg := range comments { + if !strings.HasPrefix(cg.List[0].Text, "/*") { + return false // can't split/join lists containing "//" comments + } + } + + return true +} + +// splitLines is a singleFile fixer. +func splitLines(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + itemType, items, comments, indent, braceOpen, braceClose := findSplitJoinTarget(fset, file, src, start, end) + if itemType == "" { + return nil, nil, nil // no fix available + } + + return fset, processLines(fset, items, comments, src, braceOpen, braceClose, ",\n", "\n", ",\n"+indent, indent+"\t"), nil +} + +// joinLines is a singleFile fixer. +func joinLines(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + itemType, items, comments, _, braceOpen, braceClose := findSplitJoinTarget(fset, file, src, start, end) + if itemType == "" { + return nil, nil, nil // no fix available + } + + return fset, processLines(fset, items, comments, src, braceOpen, braceClose, ", ", "", "", ""), nil +} + +// processLines is the common operation for both split and join lines because this split/join operation is +// essentially a transformation of the separating whitespace. +func processLines(fset *token.FileSet, items []ast.Node, comments []*ast.CommentGroup, src []byte, braceOpen, braceClose token.Pos, sep, prefix, suffix, indent string) *analysis.SuggestedFix { + nodes := slices.Clone(items) + + // box *ast.CommentGroup to ast.Node for easier processing later. + for _, cg := range comments { + nodes = append(nodes, cg) + } + + // Sort to interleave comments and nodes. + sort.Slice(nodes, func(i, j int) bool { + return nodes[i].Pos() < nodes[j].Pos() + }) + + edits := []analysis.TextEdit{ + { + Pos: token.Pos(int(braceOpen) + len("{")), + End: nodes[0].Pos(), + NewText: []byte(prefix + indent), + }, + { + Pos: nodes[len(nodes)-1].End(), + End: braceClose, + NewText: []byte(suffix), + }, + } + + for i := 1; i < len(nodes); i++ { + pos, end := nodes[i-1].End(), nodes[i].Pos() + if pos > end { + // this will happen if we have a /*-style comment inside of a Field + // e.g. `a /*comment here */ int` + // + // we will ignore as we only care about finding the field delimiter. + continue + } + + // at this point, the `,` token in between 2 nodes here must be the field delimiter. + posOffset := safetoken.EndPosition(fset, pos).Offset + endOffset := safetoken.StartPosition(fset, end).Offset + if bytes.IndexByte(src[posOffset:endOffset], ',') == -1 { + // nodes[i] or nodes[i-1] is a comment hence no delimiter in between + // in such case, do nothing. + continue + } + + edits = append(edits, analysis.TextEdit{Pos: pos, End: end, NewText: []byte(sep + indent)}) + } + + return &analysis.SuggestedFix{TextEdits: edits} +} + +// findSplitJoinTarget returns the first curly bracket/parens that encloses the current cursor. +func findSplitJoinTarget(fset *token.FileSet, file *ast.File, src []byte, start, end token.Pos) (itemType string, items []ast.Node, comments []*ast.CommentGroup, indent string, open, close token.Pos) { + isCursorInside := func(nodePos, nodeEnd token.Pos) bool { + return nodePos < start && end < nodeEnd + } + + findTarget := func() (targetType string, target ast.Node, open, close token.Pos) { + path, _ := astutil.PathEnclosingInterval(file, start, end) + for _, node := range path { + switch node := node.(type) { + case *ast.FuncDecl: + // target struct method declarations. + // function (...) someMethod(a int, b int, c int) (d int, e, int) {} + params := node.Type.Params + if isCursorInside(params.Opening, params.Closing) { + return "parameters", params, params.Opening, params.Closing + } + + results := node.Type.Results + if results != nil && isCursorInside(results.Opening, results.Closing) { + return "return values", results, results.Opening, results.Closing + } + case *ast.FuncType: + // target function signature args and result. + // type someFunc func (a int, b int, c int) (d int, e int) + params := node.Params + if isCursorInside(params.Opening, params.Closing) { + return "parameters", params, params.Opening, params.Closing + } + + results := node.Results + if results != nil && isCursorInside(results.Opening, results.Closing) { + return "return values", results, results.Opening, results.Closing + } + case *ast.CallExpr: + // target function calls. + // someFunction(a, b, c) + if isCursorInside(node.Lparen, node.Rparen) { + return "parameters", node, node.Lparen, node.Rparen + } + case *ast.CompositeLit: + // target composite lit instantiation (structs, maps, arrays). + // A{b: 1, c: 2, d: 3} + if isCursorInside(node.Lbrace, node.Rbrace) { + return "elements", node, node.Lbrace, node.Rbrace + } + } + } + + return "", nil, 0, 0 + } + + targetType, targetNode, open, close := findTarget() + if targetType == "" { + return "", nil, nil, "", 0, 0 + } + + switch node := targetNode.(type) { + case *ast.FieldList: + for _, field := range node.List { + items = append(items, field) + } + case *ast.CallExpr: + for _, arg := range node.Args { + items = append(items, arg) + } + case *ast.CompositeLit: + for _, arg := range node.Elts { + items = append(items, arg) + } + } + + // preserve comments separately as it's not part of the targetNode AST. + for _, cg := range file.Comments { + if open <= cg.Pos() && cg.Pos() < close { + comments = append(comments, cg) + } + } + + // indent is the leading whitespace before the opening curly bracket/paren. + // + // in case where we don't have access to src yet i.e. src == nil + // it's fine to return incorrect indent because we don't need it yet. + indent = "" + if len(src) > 0 { + var pos token.Pos + switch node := targetNode.(type) { + case *ast.FieldList: + pos = node.Opening + case *ast.CallExpr: + pos = node.Lparen + case *ast.CompositeLit: + pos = node.Lbrace + } + + split := bytes.Split(src, []byte("\n")) + targetLineNumber := safetoken.StartPosition(fset, pos).Line + firstLine := string(split[targetLineNumber-1]) + trimmed := strings.TrimSpace(string(firstLine)) + indent = firstLine[:strings.Index(firstLine, trimmed)] + } + + return targetType, items, comments, indent, open, close +} diff --git a/gopls/internal/lsp/source/linkname.go b/gopls/internal/golang/linkname.go similarity index 84% rename from gopls/internal/lsp/source/linkname.go rename to gopls/internal/golang/linkname.go index 5a727e5c194..7bc25098580 100644 --- a/gopls/internal/lsp/source/linkname.go +++ b/gopls/internal/golang/linkname.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "context" @@ -11,9 +11,11 @@ import ( "go/token" "strings" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" ) // ErrNoLinkname is returned by LinknameDefinition when no linkname @@ -23,7 +25,7 @@ var ErrNoLinkname = errors.New("no linkname directive found") // LinknameDefinition finds the definition of the linkname directive in m at pos. // If there is no linkname directive at pos, returns ErrNoLinkname. -func LinknameDefinition(ctx context.Context, snapshot Snapshot, m *protocol.Mapper, from protocol.Position) ([]protocol.Location, error) { +func LinknameDefinition(ctx context.Context, snapshot *cache.Snapshot, m *protocol.Mapper, from protocol.Position) ([]protocol.Location, error) { pkgPath, name, _ := parseLinkname(m, from) if pkgPath == "" { return nil, ErrNoLinkname @@ -101,16 +103,16 @@ func parseLinkname(m *protocol.Mapper, pos protocol.Position) (pkgPath, name str // findLinkname searches dependencies of packages containing fh for an object // with linker name matching the given package path and name. -func findLinkname(ctx context.Context, snapshot Snapshot, pkgPath PackagePath, name string) (Package, *ParsedGoFile, token.Pos, error) { +func findLinkname(ctx context.Context, snapshot *cache.Snapshot, pkgPath PackagePath, name string) (*cache.Package, *parsego.File, token.Pos, error) { // Typically the linkname refers to a forward dependency // or a reverse dependency, but in general it may refer // to any package that is linked with this one. - var pkgMeta *Metadata + var pkgMeta *metadata.Package metas, err := snapshot.AllMetadata(ctx) if err != nil { return nil, nil, token.NoPos, err } - RemoveIntermediateTestVariants(&metas) + metadata.RemoveIntermediateTestVariants(&metas) for _, meta := range metas { if meta.PkgPath == pkgPath { pkgMeta = meta @@ -128,13 +130,13 @@ func findLinkname(ctx context.Context, snapshot Snapshot, pkgPath PackagePath, n } pkg := pkgs[0] - obj := pkg.GetTypes().Scope().Lookup(name) + obj := pkg.Types().Scope().Lookup(name) if obj == nil { return nil, nil, token.NoPos, fmt.Errorf("package %q does not define %s", pkgPath, name) } objURI := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) - pgf, err := pkg.File(span.URIFromPath(objURI.Filename)) + pgf, err := pkg.File(protocol.URIFromPath(objURI.Filename)) if err != nil { return nil, nil, token.NoPos, err } diff --git a/gopls/internal/golang/origin.go b/gopls/internal/golang/origin.go new file mode 100644 index 00000000000..aa77a9b3aa4 --- /dev/null +++ b/gopls/internal/golang/origin.go @@ -0,0 +1,30 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import "go/types" + +// containsOrigin reports whether the provided object set contains an object +// with the same origin as the provided obj (which may be a synthetic object +// created during instantiation). +func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { + objOrigin := origin(obj) + for target := range objSet { + if origin(target) == objOrigin { + return true + } + } + return false +} + +func origin(obj types.Object) types.Object { + switch obj := obj.(type) { + case *types.Var: + return obj.Origin() + case *types.Func: + return obj.Origin() + } + return obj +} diff --git a/gopls/internal/golang/pkgdoc.go b/gopls/internal/golang/pkgdoc.go new file mode 100644 index 00000000000..852b93e7426 --- /dev/null +++ b/gopls/internal/golang/pkgdoc.go @@ -0,0 +1,633 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +// This file defines a simple HTML rendering of package documentation +// in imitation of the style of pkg.go.dev. +// +// The current implementation is just a starting point and a +// placeholder for a more sophisticated one. +// +// TODO(adonovan): +// - rewrite using html/template. +// Or factor with golang.org/x/pkgsite/internal/godoc/dochtml. +// - emit breadcrumbs for parent + sibling packages. +// - list promoted methods---we have type information! +// - gather Example tests, following go/doc and pkgsite. +// - add option for doc.AllDecls: show non-exported symbols too. +// - abbreviate long signatures by replacing parameters 4 onwards with "...". +// - style the
  • bullets in the index as invisible. +// - add push notifications such as didChange -> reload. +// - there appears to be a maximum file size beyond which the +// "source.doc" code action is not offered. Remove that. +// - modify JS httpGET function to give a transient visual indication +// when clicking a source link that the editor is being navigated +// (in case it doesn't raise itself, like VS Code). + +import ( + "bytes" + "fmt" + "go/ast" + "go/doc" + "go/doc/comment" + "go/format" + "go/token" + "go/types" + "html" + "log" + "path/filepath" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/typesinternal" +) + +// RenderPackageDoc formats the package documentation page. +// +// The posURL function returns a URL that when visited, has the side +// effect of causing gopls to direct the client editor to navigate to +// the specified file/line/column position, in UTF-8 coordinates. +// +// The pkgURL function returns a URL for the documentation of the +// specified package and symbol. +func RenderPackageDoc(pkg *cache.Package, posURL func(filename string, line, col8 int) protocol.URI, pkgURL func(path PackagePath, fragment string) protocol.URI) ([]byte, error) { + // We can't use doc.NewFromFiles (even with doc.PreserveAST + // mode) as it calls ast.NewPackage which assumes that each + // ast.File has an ast.Scope and resolves identifiers to + // (deprecated) ast.Objects. (This is golang/go#66290.) + // But doc.New only requires pkg.{Name,Files}, + // so we just boil it down. + // + // The only loss is doc.classifyExamples. + // TODO(adonovan): simulate that too. + fileMap := make(map[string]*ast.File) + for _, f := range pkg.Syntax() { + fileMap[pkg.FileSet().File(f.Pos()).Name()] = f + } + astpkg := &ast.Package{ + Name: pkg.Types().Name(), + Files: fileMap, + } + // PreserveAST mode only half works (golang/go#66449): it still + // mutates ASTs when filtering out non-exported symbols. + // As a workaround, enable AllDecls to suppress filtering, + // and do it ourselves. + mode := doc.PreserveAST | doc.AllDecls + docpkg := doc.New(astpkg, pkg.Types().Path(), mode) + + // Discard non-exported symbols. + // TODO(adonovan): do this conditionally, and expose option in UI. + const showUnexported = false + if !showUnexported { + var ( + unexported = func(name string) bool { return !token.IsExported(name) } + filterValues = func(slice *[]*doc.Value) { + delValue := func(v *doc.Value) bool { + v.Names = slices.DeleteFunc(v.Names, unexported) + return len(v.Names) == 0 + } + *slice = slices.DeleteFunc(*slice, delValue) + } + filterFuncs = func(funcs *[]*doc.Func) { + *funcs = slices.DeleteFunc(*funcs, func(v *doc.Func) bool { + return unexported(v.Name) + }) + } + ) + filterValues(&docpkg.Consts) + filterValues(&docpkg.Vars) + filterFuncs(&docpkg.Funcs) + docpkg.Types = slices.DeleteFunc(docpkg.Types, func(t *doc.Type) bool { + filterValues(&t.Consts) + filterValues(&t.Vars) + filterFuncs(&t.Funcs) + filterFuncs(&t.Methods) + return unexported(t.Name) + }) + } + + var docHTML func(comment string) []byte + { + // Adapt doc comment parser and printer + // to our representation of Go packages + // so that doc links (e.g. "[fmt.Println]") + // become valid links. + + printer := docpkg.Printer() + printer.DocLinkURL = func(link *comment.DocLink) string { + path := pkg.Metadata().PkgPath + if link.ImportPath != "" { + path = PackagePath(link.ImportPath) + } + fragment := link.Name + if link.Recv != "" { + fragment = link.Recv + "." + link.Name + } + return pkgURL(path, fragment) + } + parser := docpkg.Parser() + parser.LookupPackage = func(name string) (importPath string, ok bool) { + // Ambiguous: different files in the same + // package may have different import mappings, + // but the hook doesn't provide the file context. + // TODO(adonovan): conspire with docHTML to + // pass the doc comment's enclosing file through + // a shared variable, so that we can compute + // the correct per-file mapping. + // + // TODO(adonovan): check for PkgName.Name + // matches, but also check for + // PkgName.Imported.Namer matches, since some + // packages are typically imported under a + // non-default name (e.g. pathpkg "path") but + // may be referred to in doc links using their + // canonical name. + for _, f := range pkg.Syntax() { + for _, imp := range f.Imports { + pkgName, ok := typesutil.ImportedPkgName(pkg.TypesInfo(), imp) + if ok && pkgName.Name() == name { + return pkgName.Imported().Path(), true + } + } + } + return "", false + } + parser.LookupSym = func(recv, name string) (ok bool) { + defer func() { + log.Printf("LookupSym %q %q = %t ", recv, name, ok) + }() + // package-level decl? + if recv == "" { + return pkg.Types().Scope().Lookup(name) != nil + } + + // method? + tname, ok := pkg.Types().Scope().Lookup(recv).(*types.TypeName) + if !ok { + return false + } + m, _, _ := types.LookupFieldOrMethod(tname.Type(), true, pkg.Types(), name) + return is[*types.Func](m) + } + docHTML = func(comment string) []byte { + return printer.HTML(parser.Parse(comment)) + } + } + + var buf bytes.Buffer + buf.WriteString(` + + + + + + + +
    Gopls server has terminated. Page is inactive.
    +`) + + escape := html.EscapeString + + // sourceLink returns HTML for a link to open a file in the client editor. + sourceLink := func(text, url string) string { + // The /open URL returns nothing but has the side effect + // of causing the LSP client to open the requested file. + // So we use onclick to prevent the browser from navigating. + // We keep the href attribute as it causes the to render + // as a link: blue, underlined, with URL hover information. + return fmt.Sprintf(`%[2]s`, + escape(url), text) + } + + // objHTML returns HTML for obj.Name(), possibly as a link. + objHTML := func(obj types.Object) string { + text := obj.Name() + if posn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()); posn.IsValid() { + return sourceLink(text, posURL(posn.Filename, posn.Line, posn.Column)) + } + return text + } + + // nodeHTML returns HTML markup for a syntax tree. + // It replaces referring identifiers with links, + // and adds style spans for strings and comments. + nodeHTML := func(n ast.Node) string { + + // linkify returns the appropriate URL (if any) for an identifier. + linkify := func(id *ast.Ident) protocol.URI { + if obj, ok := pkg.TypesInfo().Uses[id]; ok && obj.Pkg() != nil { + // imported package name? + if pkgname, ok := obj.(*types.PkgName); ok { + // TODO(adonovan): do this for Defs of PkgName too. + return pkgURL(PackagePath(pkgname.Imported().Path()), "") + } + + // package-level symbol? + if obj.Parent() == obj.Pkg().Scope() { + if obj.Pkg() == pkg.Types() { + return "#" + obj.Name() // intra-package ref + } else { + return pkgURL(PackagePath(obj.Pkg().Path()), obj.Name()) + } + } + + // method of package-level named type? + if fn, ok := obj.(*types.Func); ok { + sig := fn.Type().(*types.Signature) + if sig.Recv() != nil { + _, named := typesinternal.ReceiverNamed(sig.Recv()) + if named != nil { + fragment := named.Obj().Name() + "." + fn.Name() + return pkgURL(PackagePath(fn.Pkg().Path()), fragment) + } + } + return "" + } + + // TODO(adonovan): field of package-level named struct type. + // (Requires an index, since there's no way to + // get from Var to Named.) + } + return "" + } + + // Splice spans into HTML-escaped segments of the + // original source buffer (which is usually but not + // necessarily formatted). + // + // (For expedience we don't use the more sophisticated + // approach taken by cmd/godoc and pkgsite's render + // package, which emit the text, spans, and comments + // in one traversal of the syntax tree.) + // + // TODO(adonovan): splice styled spans around comments too. + // + // TODO(adonovan): pkgsite prints specs from grouped + // type decls like "type ( T1; T2 )" to make them + // appear as separate decls. We should too. + var buf bytes.Buffer + for _, file := range pkg.CompiledGoFiles() { + if astutil.NodeContains(file.File, n.Pos()) { + pos := n.Pos() + + // emit emits source in the interval [pos:to] and updates pos. + emit := func(to token.Pos) { + // Ident and BasicLit always have a valid pos. + // (Failure means the AST has been corrupted.) + if !to.IsValid() { + bug.Reportf("invalid Pos") + } + start, err := safetoken.Offset(file.Tok, pos) + if err != nil { + bug.Reportf("invalid start Pos: %v", err) + } + end, err := safetoken.Offset(file.Tok, to) + if err != nil { + bug.Reportf("invalid end Pos: %v", err) + } + buf.WriteString(escape(string(file.Src[start:end]))) + pos = to + } + ast.Inspect(n, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.Ident: + emit(n.Pos()) + pos = n.End() + if url := linkify(n); url != "" { + fmt.Fprintf(&buf, "%s", url, escape(n.Name)) + } else { + buf.WriteString(escape(n.Name)) // plain + } + + case *ast.BasicLit: + emit(n.Pos()) + pos = n.End() + fmt.Fprintf(&buf, "%s", escape(n.Value)) + } + return true + }) + emit(n.End()) + return buf.String() + } + } + + // Original source not found. + // Format the node without adornments. + if err := format.Node(&buf, pkg.FileSet(), n); err != nil { + // e.g. BadDecl? + buf.Reset() + fmt.Fprintf(&buf, "formatting error: %v", err) + } + return escape(buf.String()) + } + + // pkgRelative qualifies types by package name alone + pkgRelative := func(other *types.Package) string { + if pkg.Types() == other { + return "" // same package; unqualified + } + return other.Name() + } + + // package name + fmt.Fprintf(&buf, "

    Package %s

    \n", pkg.Types().Name()) + + // import path + fmt.Fprintf(&buf, "
    import %q
    \n", pkg.Types().Path()) + + // link to same package in pkg.go.dev + fmt.Fprintf(&buf, "
    \n", + "https://pkg.go.dev/"+string(pkg.Types().Path())) + + // package doc + fmt.Fprintf(&buf, "
    %s
    \n", docHTML(docpkg.Doc)) + + // symbol index + fmt.Fprintf(&buf, "

    Index

    \n") + fmt.Fprintf(&buf, "
      \n") + if len(docpkg.Consts) > 0 { + fmt.Fprintf(&buf, "
    • Constants
    • \n") + } + if len(docpkg.Vars) > 0 { + fmt.Fprintf(&buf, "
    • Variables
    • \n") + } + scope := pkg.Types().Scope() + for _, fn := range docpkg.Funcs { + obj := scope.Lookup(fn.Name).(*types.Func) + fmt.Fprintf(&buf, "
    • %s
    • \n", + obj.Name(), + escape(types.ObjectString(obj, pkgRelative))) + } + for _, doctype := range docpkg.Types { + tname := scope.Lookup(doctype.Name).(*types.TypeName) + fmt.Fprintf(&buf, "
    • type %[1]s
    • \n", + tname.Name()) + + if len(doctype.Funcs)+len(doctype.Methods) > 0 { + fmt.Fprintf(&buf, "
        \n") + + // constructors + for _, docfn := range doctype.Funcs { + obj := scope.Lookup(docfn.Name).(*types.Func) + fmt.Fprintf(&buf, "
      • %s
      • \n", + docfn.Name, + escape(types.ObjectString(obj, pkgRelative))) + } + // methods + for _, docmethod := range doctype.Methods { + method, _, _ := types.LookupFieldOrMethod(tname.Type(), true, tname.Pkg(), docmethod.Name) + // TODO(adonovan): style: change the . into a space in + // ObjectString's "func (T).M()", and hide unexported + // embedded types. + fmt.Fprintf(&buf, "
      • %s
      • \n", + doctype.Name, + docmethod.Name, + escape(types.ObjectString(method, pkgRelative))) + } + fmt.Fprintf(&buf, "
      \n") + } + } + // TODO(adonovan): add index of Examples here. + fmt.Fprintf(&buf, "
    \n") + + // constants and variables + values := func(vals []*doc.Value) { + for _, v := range vals { + // anchors + for _, name := range v.Names { + fmt.Fprintf(&buf, "\n", escape(name)) + } + + // declaration + decl2 := *v.Decl // shallow copy + decl2.Doc = nil + fmt.Fprintf(&buf, "
    %s
    \n", nodeHTML(&decl2)) + + // comment (if any) + fmt.Fprintf(&buf, "
    %s
    \n", docHTML(v.Doc)) + } + } + fmt.Fprintf(&buf, "

    Constants

    \n") + if len(docpkg.Consts) == 0 { + fmt.Fprintf(&buf, "
    (no constants)
    \n") + } else { + values(docpkg.Consts) + } + fmt.Fprintf(&buf, "

    Variables

    \n") + if len(docpkg.Vars) == 0 { + fmt.Fprintf(&buf, "
    (no variables)
    \n") + } else { + values(docpkg.Vars) + } + + // package-level functions + fmt.Fprintf(&buf, "

    Functions

    \n") + // funcs emits a list of package-level functions, + // possibly organized beneath the type they construct. + funcs := func(funcs []*doc.Func) { + for _, docfn := range funcs { + obj := scope.Lookup(docfn.Name).(*types.Func) + fmt.Fprintf(&buf, "

    func %s

    \n", + docfn.Name, objHTML(obj)) + + // decl: func F(params) results + fmt.Fprintf(&buf, "
    %s
    \n", + nodeHTML(docfn.Decl.Type)) + + // comment (if any) + fmt.Fprintf(&buf, "
    %s
    \n", docHTML(docfn.Doc)) + } + } + funcs(docpkg.Funcs) + + // types and their subelements + fmt.Fprintf(&buf, "

    Types

    \n") + for _, doctype := range docpkg.Types { + tname := scope.Lookup(doctype.Name).(*types.TypeName) + + // title and source link + fmt.Fprintf(&buf, "

    type %s

    \n", doctype.Name, objHTML(tname)) + + // declaration + // TODO(adonovan): excise non-exported struct fields somehow. + decl2 := *doctype.Decl // shallow copy + decl2.Doc = nil + fmt.Fprintf(&buf, "
    %s
    \n", nodeHTML(&decl2)) + + // comment (if any) + fmt.Fprintf(&buf, "
    %s
    \n", docHTML(doctype.Doc)) + + // subelements + values(doctype.Consts) // constants of type T + values(doctype.Vars) // vars of type T + funcs(doctype.Funcs) // constructors of T + + // methods on T + for _, docmethod := range doctype.Methods { + method, _, _ := types.LookupFieldOrMethod(tname.Type(), true, tname.Pkg(), docmethod.Name) + fmt.Fprintf(&buf, "

    func (%s) %s

    \n", + doctype.Name, docmethod.Name, + doctype.Name, objHTML(method)) + + // decl: func (x T) M(params) results + fmt.Fprintf(&buf, "
    %s
    \n", + nodeHTML(docmethod.Decl.Type)) + + // comment (if any) + fmt.Fprintf(&buf, "
    %s
    \n", + docHTML(docmethod.Doc)) + } + } + + // source files + fmt.Fprintf(&buf, "

    Source files

    \n") + for _, filename := range docpkg.Filenames { + fmt.Fprintf(&buf, "
    %s
    \n", + sourceLink(filepath.Base(filename), posURL(filename, 1, 1))) + } + + return buf.Bytes(), nil +} + +// (partly taken from pkgsite's typography.css) +const pkgDocStyle = ` +body { + font-family: Helvetica, Arial, sans-serif; + font-size: 1rem; + line-height: normal; +} + +h1 { + font-size: 1.5rem; +} + +h2 { + font-size: 1.375rem; +} + +h3 { + font-size: 1.25rem; +} + +h4 { + font-size: 1.125rem; +} + +h5 { + font-size: 1rem; +} + +h6 { + font-size: 0.875rem; +} + +h1, +h2, +h3, +h4 { + font-weight: 600; + line-height: 1.25em; + word-break: break-word; +} + +h5, +h6 { + font-weight: 500; + line-height: 1.3em; + word-break: break-word; +} + +p { + font-size: 1rem; + line-height: 1.5rem; + max-width: 60rem; +} + +strong { + font-weight: 600; +} + +code, +pre, +textarea.code { + font-family: Consolas, 'Liberation Mono', Menlo, monospace; + font-size: 0.875rem; + line-height: 1.5em; +} + +pre, +textarea.code { + background-color: #eee; + border: 3px; + border-radius: 3px + color: black; + overflow-x: auto; + padding: 0.625rem; + tab-size: 4; + white-space: pre; +} + +button, +input, +select, +textarea { + font: inherit; +} + +a, +a:link, +a:visited { + color: rgb(0, 125, 156); + text-decoration: none; +} + +a:hover, +a:focus { + color: rgb(0, 125, 156); + text-decoration: underline; +} + +a:hover > * { + text-decoration: underline; +} + +.lit { color: darkgreen; } + +#pkgsite { height: 1.5em; } + +#disconnected { + position: fixed; + top: 1em; + left: 1em; + display: none; /* initially */ + background-color: white; + border: thick solid red; + padding: 2em; +} +` diff --git a/gopls/internal/golang/references.go b/gopls/internal/golang/references.go new file mode 100644 index 00000000000..e5d9f2a4581 --- /dev/null +++ b/gopls/internal/golang/references.go @@ -0,0 +1,698 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +// This file defines the 'references' query based on a serializable +// index constructed during type checking, thus avoiding the need to +// type-check packages at search time. +// +// See the ./xrefs/ subpackage for the index construction and lookup. +// +// This implementation does not intermingle objects from distinct +// calls to TypeCheck. + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + "sort" + "strings" + "sync" + + "golang.org/x/sync/errgroup" + "golang.org/x/tools/go/types/objectpath" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/methodsets" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/event" +) + +// References returns a list of all references (sorted with +// definitions before uses) to the object denoted by the identifier at +// the given file/position, searching the entire workspace. +func References(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position, includeDeclaration bool) ([]protocol.Location, error) { + references, err := references(ctx, snapshot, fh, pp, includeDeclaration) + if err != nil { + return nil, err + } + locations := make([]protocol.Location, len(references)) + for i, ref := range references { + locations[i] = ref.location + } + return locations, nil +} + +// A reference describes an identifier that refers to the same +// object as the subject of a References query. +type reference struct { + isDeclaration bool + location protocol.Location + pkgPath PackagePath // of declaring package (same for all elements of the slice) +} + +// references returns a list of all references (sorted with +// definitions before uses) to the object denoted by the identifier at +// the given file/position, searching the entire workspace. +func references(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, includeDeclaration bool) ([]reference, error) { + ctx, done := event.Start(ctx, "golang.references") + defer done() + + // Is the cursor within the package name declaration? + _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) + if err != nil { + return nil, err + } + + var refs []reference + if inPackageName { + refs, err = packageReferences(ctx, snapshot, f.URI()) + } else { + refs, err = ordinaryReferences(ctx, snapshot, f.URI(), pp) + } + if err != nil { + return nil, err + } + + sort.Slice(refs, func(i, j int) bool { + x, y := refs[i], refs[j] + if x.isDeclaration != y.isDeclaration { + return x.isDeclaration // decls < refs + } + return protocol.CompareLocation(x.location, y.location) < 0 + }) + + // De-duplicate by location, and optionally remove declarations. + out := refs[:0] + for _, ref := range refs { + if !includeDeclaration && ref.isDeclaration { + continue + } + if len(out) == 0 || out[len(out)-1].location != ref.location { + out = append(out, ref) + } + } + refs = out + + return refs, nil +} + +// packageReferences returns a list of references to the package +// declaration of the specified name and uri by searching among the +// import declarations of all packages that directly import the target +// package. +func packageReferences(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) ([]reference, error) { + metas, err := snapshot.MetadataForFile(ctx, uri) + if err != nil { + return nil, err + } + if len(metas) == 0 { + return nil, fmt.Errorf("found no package containing %s", uri) + } + + var refs []reference + + // Find external references to the package declaration + // from each direct import of the package. + // + // The narrowest package is the most broadly imported, + // so we choose it for the external references. + // + // But if the file ends with _test.go then we need to + // find the package it is testing; there's no direct way + // to do that, so pick a file from the same package that + // doesn't end in _test.go and start over. + narrowest := metas[0] + if narrowest.ForTest != "" && strings.HasSuffix(string(uri), "_test.go") { + for _, f := range narrowest.CompiledGoFiles { + if !strings.HasSuffix(string(f), "_test.go") { + return packageReferences(ctx, snapshot, f) + } + } + // This package has no non-test files. + // Skip the search for external references. + // (Conceivably one could blank-import an empty package, but why?) + } else { + rdeps, err := snapshot.ReverseDependencies(ctx, narrowest.ID, false) // direct + if err != nil { + return nil, err + } + + // Restrict search to workspace packages. + workspace, err := snapshot.WorkspaceMetadata(ctx) + if err != nil { + return nil, err + } + workspaceMap := make(map[PackageID]*metadata.Package, len(workspace)) + for _, mp := range workspace { + workspaceMap[mp.ID] = mp + } + + for _, rdep := range rdeps { + if _, ok := workspaceMap[rdep.ID]; !ok { + continue + } + for _, uri := range rdep.CompiledGoFiles { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + f, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + return nil, err + } + for _, imp := range f.File.Imports { + if rdep.DepsByImpPath[metadata.UnquoteImportPath(imp)] == narrowest.ID { + refs = append(refs, reference{ + isDeclaration: false, + location: mustLocation(f, imp), + pkgPath: narrowest.PkgPath, + }) + } + } + } + } + } + + // Find internal "references" to the package from + // of each package declaration in the target package itself. + // + // The widest package (possibly a test variant) has the + // greatest number of files and thus we choose it for the + // "internal" references. + widest := metas[len(metas)-1] // may include _test.go files + for _, uri := range widest.CompiledGoFiles { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + f, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + return nil, err + } + // golang/go#66250: don't crash if the package file lacks a name. + if f.File.Name.Pos().IsValid() { + refs = append(refs, reference{ + isDeclaration: true, // (one of many) + location: mustLocation(f, f.File.Name), + pkgPath: widest.PkgPath, + }) + } + } + + return refs, nil +} + +// ordinaryReferences computes references for all ordinary objects (not package declarations). +func ordinaryReferences(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, pp protocol.Position) ([]reference, error) { + // Strategy: use the reference information computed by the + // type checker to find the declaration. First type-check this + // package to find the declaration, then type check the + // declaring package (which may be different), plus variants, + // to find local (in-package) references. + // Global references are satisfied by the index. + + // Strictly speaking, a wider package could provide a different + // declaration (e.g. because the _test.go files can change the + // meaning of a field or method selection), but the narrower + // package reports the more broadly referenced object. + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) + if err != nil { + return nil, err + } + + // Find the selected object (declaration or reference). + // For struct{T}, we choose the field (Def) over the type (Use). + pos, err := pgf.PositionPos(pp) + if err != nil { + return nil, err + } + candidates, _, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) + if err != nil { + return nil, err + } + + // Pick first object arbitrarily. + // The case variables of a type switch have different + // types but that difference is immaterial here. + var obj types.Object + for obj = range candidates { + break + } + if obj == nil { + return nil, ErrNoIdentFound // can't happen + } + + // nil, error, error.Error, iota, or other built-in? + if obj.Pkg() == nil { + return nil, fmt.Errorf("references to builtin %q are not supported", obj.Name()) + } + if !obj.Pos().IsValid() { + if obj.Pkg().Path() != "unsafe" { + bug.Reportf("references: object %v has no position", obj) + } + return nil, fmt.Errorf("references to unsafe.%s are not supported", obj.Name()) + } + + // Find metadata of all packages containing the object's defining file. + // This may include the query pkg, and possibly other variants. + declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) + declURI := protocol.URIFromPath(declPosn.Filename) + variants, err := snapshot.MetadataForFile(ctx, declURI) + if err != nil { + return nil, err + } + if len(variants) == 0 { + return nil, fmt.Errorf("no packages for file %q", declURI) // can't happen + } + // (variants must include ITVs for reverse dependency computation below.) + + // Is object exported? + // If so, compute scope and targets of the global search. + var ( + globalScope = make(map[PackageID]*metadata.Package) // (excludes ITVs) + globalTargets map[PackagePath]map[objectpath.Path]unit + expansions = make(map[PackageID]unit) // packages that caused search expansion + ) + // TODO(adonovan): what about generic functions? Need to consider both + // uninstantiated and instantiated. The latter have no objectpath. Use Origin? + if path, err := objectpath.For(obj); err == nil && obj.Exported() { + pkgPath := variants[0].PkgPath // (all variants have same package path) + globalTargets = map[PackagePath]map[objectpath.Path]unit{ + pkgPath: {path: {}}, // primary target + } + + // Compute set of (non-ITV) workspace packages. + // We restrict references to this subset. + workspace, err := snapshot.WorkspaceMetadata(ctx) + if err != nil { + return nil, err + } + workspaceMap := make(map[PackageID]*metadata.Package, len(workspace)) + workspaceIDs := make([]PackageID, 0, len(workspace)) + for _, mp := range workspace { + workspaceMap[mp.ID] = mp + workspaceIDs = append(workspaceIDs, mp.ID) + } + + // addRdeps expands the global scope to include the + // reverse dependencies of the specified package. + addRdeps := func(id PackageID, transitive bool) error { + rdeps, err := snapshot.ReverseDependencies(ctx, id, transitive) + if err != nil { + return err + } + for rdepID, rdep := range rdeps { + // Skip non-workspace packages. + // + // This means we also skip any expansion of the + // search that might be caused by a non-workspace + // package, possibly causing us to miss references + // to the expanded target set from workspace packages. + // + // TODO(adonovan): don't skip those expansions. + // The challenge is how to so without type-checking + // a lot of non-workspace packages not covered by + // the initial workspace load. + if _, ok := workspaceMap[rdepID]; !ok { + continue + } + + globalScope[rdepID] = rdep + } + return nil + } + + // How far need we search? + // For package-level objects, we need only search the direct importers. + // For fields and methods, we must search transitively. + transitive := obj.Pkg().Scope().Lookup(obj.Name()) != obj + + // The scope is the union of rdeps of each variant. + // (Each set is disjoint so there's no benefit to + // combining the metadata graph traversals.) + for _, mp := range variants { + if err := addRdeps(mp.ID, transitive); err != nil { + return nil, err + } + } + + // Is object a method? + // + // If so, expand the search so that the targets include + // all methods that correspond to it through interface + // satisfaction, and the scope includes the rdeps of + // the package that declares each corresponding type. + // + // 'expansions' records the packages that declared + // such types. + if recv := effectiveReceiver(obj); recv != nil { + if err := expandMethodSearch(ctx, snapshot, workspaceIDs, obj.(*types.Func), recv, addRdeps, globalTargets, expansions); err != nil { + return nil, err + } + } + } + + // The search functions will call report(loc) for each hit. + var ( + refsMu sync.Mutex + refs []reference + ) + report := func(loc protocol.Location, isDecl bool) { + ref := reference{ + isDeclaration: isDecl, + location: loc, + pkgPath: pkg.Metadata().PkgPath, + } + refsMu.Lock() + refs = append(refs, ref) + refsMu.Unlock() + } + + // Loop over the variants of the declaring package, + // and perform both the local (in-package) and global + // (cross-package) searches, in parallel. + // + // TODO(adonovan): opt: support LSP reference streaming. See: + // - https://github.com/microsoft/vscode-languageserver-node/pull/164 + // - https://github.com/microsoft/language-server-protocol/pull/182 + // + // Careful: this goroutine must not return before group.Wait. + var group errgroup.Group + + // Compute local references for each variant. + // The target objects are identified by (URI, offset). + for _, mp := range variants { + // We want the ordinary importable package, + // plus any test-augmented variants, since + // declarations in _test.go files may change + // the reference of a selection, or even a + // field into a method or vice versa. + // + // But we don't need intermediate test variants, + // as their local references will be covered + // already by other variants. + if mp.IsIntermediateTestVariant() { + continue + } + mp := mp + group.Go(func() error { + // TODO(adonovan): opt: batch these TypeChecks. + pkgs, err := snapshot.TypeCheck(ctx, mp.ID) + if err != nil { + return err + } + pkg := pkgs[0] + + // Find the declaration of the corresponding + // object in this package based on (URI, offset). + pgf, err := pkg.File(declURI) + if err != nil { + return err + } + pos, err := safetoken.Pos(pgf.Tok, declPosn.Offset) + if err != nil { + return err + } + objects, _, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) + if err != nil { + return err // unreachable? (probably caught earlier) + } + + // Report the locations of the declaration(s). + // TODO(adonovan): what about for corresponding methods? Add tests. + for _, node := range objects { + report(mustLocation(pgf, node), true) + } + + // Convert targets map to set. + targets := make(map[types.Object]bool) + for obj := range objects { + targets[obj] = true + } + + return localReferences(pkg, targets, true, report) + }) + } + + // Also compute local references within packages that declare + // corresponding methods (see above), which expand the global search. + // The target objects are identified by (PkgPath, objectpath). + for id := range expansions { + id := id + group.Go(func() error { + // TODO(adonovan): opt: batch these TypeChecks. + pkgs, err := snapshot.TypeCheck(ctx, id) + if err != nil { + return err + } + pkg := pkgs[0] + + targets := make(map[types.Object]bool) + for objpath := range globalTargets[pkg.Metadata().PkgPath] { + obj, err := objectpath.Object(pkg.Types(), objpath) + if err != nil { + // No such object, because it was + // declared only in the test variant. + continue + } + targets[obj] = true + } + + // Don't include corresponding types or methods + // since expansions did that already, and we don't + // want (e.g.) concrete -> interface -> concrete. + const correspond = false + return localReferences(pkg, targets, correspond, report) + }) + } + + // Compute global references for selected reverse dependencies. + group.Go(func() error { + var globalIDs []PackageID + for id := range globalScope { + globalIDs = append(globalIDs, id) + } + indexes, err := snapshot.References(ctx, globalIDs...) + if err != nil { + return err + } + for _, index := range indexes { + for _, loc := range index.Lookup(globalTargets) { + report(loc, false) + } + } + return nil + }) + + if err := group.Wait(); err != nil { + return nil, err + } + return refs, nil +} + +// expandMethodSearch expands the scope and targets of a global search +// for an exported method to include all methods in the workspace +// that correspond to it through interface satisfaction. +// +// Each package that declares a corresponding type is added to +// expansions so that we can also find local references to the type +// within the package, which of course requires type checking. +// +// The scope is expanded by a sequence of calls (not concurrent) to addRdeps. +// +// recv is the method's effective receiver type, for method-set computations. +func expandMethodSearch(ctx context.Context, snapshot *cache.Snapshot, workspaceIDs []PackageID, method *types.Func, recv types.Type, addRdeps func(id PackageID, transitive bool) error, targets map[PackagePath]map[objectpath.Path]unit, expansions map[PackageID]unit) error { + // Compute the method-set fingerprint used as a key to the global search. + key, hasMethods := methodsets.KeyOf(recv) + if !hasMethods { + return bug.Errorf("KeyOf(%s)={} yet %s is a method", recv, method) + } + // Search the methodset index of each package in the workspace. + indexes, err := snapshot.MethodSets(ctx, workspaceIDs...) + if err != nil { + return err + } + var mu sync.Mutex // guards addRdeps, targets, expansions + var group errgroup.Group + for i, index := range indexes { + i := i + index := index + group.Go(func() error { + // Consult index for matching methods. + results := index.Search(key, method.Name()) + if len(results) == 0 { + return nil + } + + // We have discovered one or more corresponding types. + id := workspaceIDs[i] + + mu.Lock() + defer mu.Unlock() + + // Expand global search scope to include rdeps of this pkg. + if err := addRdeps(id, true); err != nil { + return err + } + + // Mark this package so that we search within it for + // local references to the additional types/methods. + expansions[id] = unit{} + + // Add each corresponding method the to set of global search targets. + for _, res := range results { + methodPkg := PackagePath(res.PkgPath) + opaths, ok := targets[methodPkg] + if !ok { + opaths = make(map[objectpath.Path]unit) + targets[methodPkg] = opaths + } + opaths[res.ObjectPath] = unit{} + } + return nil + }) + } + return group.Wait() +} + +// localReferences traverses syntax and reports each reference to one +// of the target objects, or (if correspond is set) an object that +// corresponds to one of them via interface satisfaction. +func localReferences(pkg *cache.Package, targets map[types.Object]bool, correspond bool, report func(loc protocol.Location, isDecl bool)) error { + // If we're searching for references to a method optionally + // broaden the search to include references to corresponding + // methods of mutually assignable receiver types. + // (We use a slice, but objectsAt never returns >1 methods.) + var methodRecvs []types.Type + var methodName string // name of an arbitrary target, iff a method + if correspond { + for obj := range targets { + if t := effectiveReceiver(obj); t != nil { + methodRecvs = append(methodRecvs, t) + methodName = obj.Name() + } + } + } + + // matches reports whether obj either is or corresponds to a target. + // (Correspondence is defined as usual for interface methods.) + matches := func(obj types.Object) bool { + if containsOrigin(targets, obj) { + return true + } + if methodRecvs != nil && obj.Name() == methodName { + if orecv := effectiveReceiver(obj); orecv != nil { + for _, mrecv := range methodRecvs { + if concreteImplementsIntf(orecv, mrecv) { + return true + } + } + } + } + return false + } + + // Scan through syntax looking for uses of one of the target objects. + for _, pgf := range pkg.CompiledGoFiles() { + ast.Inspect(pgf.File, func(n ast.Node) bool { + if id, ok := n.(*ast.Ident); ok { + if obj, ok := pkg.TypesInfo().Uses[id]; ok && matches(obj) { + report(mustLocation(pgf, id), false) + } + } + return true + }) + } + return nil +} + +// effectiveReceiver returns the effective receiver type for method-set +// comparisons for obj, if it is a method, or nil otherwise. +func effectiveReceiver(obj types.Object) types.Type { + if fn, ok := obj.(*types.Func); ok { + if recv := fn.Type().(*types.Signature).Recv(); recv != nil { + return methodsets.EnsurePointer(recv.Type()) + } + } + return nil +} + +// objectsAt returns the non-empty set of objects denoted (def or use) +// by the specified position within a file syntax tree, or an error if +// none were found. +// +// The result may contain more than one element because all case +// variables of a type switch appear to be declared at the same +// position. +// +// Each object is mapped to the syntax node that was treated as an +// identifier, which is not always an ast.Ident. The second component +// of the result is the innermost node enclosing pos. +// +// TODO(adonovan): factor in common with referencedObject. +func objectsAt(info *types.Info, file *ast.File, pos token.Pos) (map[types.Object]ast.Node, ast.Node, error) { + path := pathEnclosingObjNode(file, pos) + if path == nil { + return nil, nil, ErrNoIdentFound + } + + targets := make(map[types.Object]ast.Node) + + switch leaf := path[0].(type) { + case *ast.Ident: + // If leaf represents an implicit type switch object or the type + // switch "assign" variable, expand to all of the type switch's + // implicit objects. + if implicits, _ := typeSwitchImplicits(info, path); len(implicits) > 0 { + for _, obj := range implicits { + targets[obj] = leaf + } + } else { + // Note: prior to go1.21, go/types issue #60372 causes the position + // a field Var T created for struct{*p.T} to be recorded at the + // start of the field type ("*") not the location of the T. + // This affects references and other gopls operations (issue #60369). + // TODO(adonovan): delete this comment when we drop support for go1.20. + + // For struct{T}, we prefer the defined field Var over the used TypeName. + obj := info.ObjectOf(leaf) + if obj == nil { + return nil, nil, fmt.Errorf("%w for %q", errNoObjectFound, leaf.Name) + } + targets[obj] = leaf + } + case *ast.ImportSpec: + // Look up the implicit *types.PkgName. + obj := info.Implicits[leaf] + if obj == nil { + return nil, nil, fmt.Errorf("%w for import %s", errNoObjectFound, metadata.UnquoteImportPath(leaf)) + } + targets[obj] = leaf + } + + if len(targets) == 0 { + return nil, nil, fmt.Errorf("objectAt: internal error: no targets") // can't happen + } + return targets, path[0], nil +} + +// mustLocation reports the location interval a syntax node, +// which must belong to m.File. +// +// Safe for use only by references and implementations. +func mustLocation(pgf *parsego.File, n ast.Node) protocol.Location { + loc, err := pgf.NodeLocation(n) + if err != nil { + panic(err) // can't happen in references or implementations + } + return loc +} diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go new file mode 100644 index 00000000000..4251a0f83da --- /dev/null +++ b/gopls/internal/golang/rename.go @@ -0,0 +1,1438 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +// TODO(adonovan): +// +// - method of generic concrete type -> arbitrary instances of same +// +// - make satisfy work across packages. +// +// - tests, tests, tests: +// - play with renamings in the k8s tree. +// - generics +// - error cases (e.g. conflicts) +// - renaming a symbol declared in the module cache +// (currently proceeds with half of the renaming!) +// - make sure all tests have both a local and a cross-package analogue. +// - look at coverage +// - special cases: embedded fields, interfaces, test variants, +// function-local things with uppercase names; +// packages with type errors (currently 'satisfy' rejects them), +// package with missing imports; +// +// - measure performance in k8s. +// +// - The original gorename tool assumed well-typedness, but the gopls feature +// does no such check (which actually makes it much more useful). +// Audit to ensure it is safe on ill-typed code. +// +// - Generics support was no doubt buggy before but incrementalization +// may have exacerbated it. If the problem were just about objects, +// defs and uses it would be fairly simple, but type assignability +// comes into play in the 'satisfy' check for method renamings. +// De-instantiating Vector[int] to Vector[T] changes its type. +// We need to come up with a theory for the satisfy check that +// works with generics, and across packages. We currently have no +// simple way to pass types between packages (think: objectpath for +// types), though presumably exportdata could be pressed into service. +// +// - FileID-based de-duplication of edits to different URIs for the same file. + +import ( + "context" + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + "path" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + + "golang.org/x/mod/modfile" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/objectpath" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/refactor/satisfy" +) + +// A renamer holds state of a single call to renameObj, which renames +// an object (or several coupled objects) within a single type-checked +// syntax package. +type renamer struct { + pkg *cache.Package // the syntax package in which the renaming is applied + objsToUpdate map[types.Object]bool // records progress of calls to check + conflicts []string + from, to string + satisfyConstraints map[satisfy.Constraint]bool + msets typeutil.MethodSetCache + changeMethods bool +} + +// A PrepareItem holds the result of a "prepare rename" operation: +// the source range and value of a selected identifier. +type PrepareItem struct { + Range protocol.Range + Text string +} + +// PrepareRename searches for a valid renaming at position pp. +// +// The returned usererr is intended to be displayed to the user to explain why +// the prepare fails. Probably we could eliminate the redundancy in returning +// two errors, but for now this is done defensively. +func PrepareRename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position) (_ *PrepareItem, usererr, err error) { + ctx, done := event.Start(ctx, "golang.PrepareRename") + defer done() + + // Is the cursor within the package name declaration? + if pgf, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp); err != nil { + return nil, err, err + } else if inPackageName { + item, err := prepareRenamePackageName(ctx, snapshot, pgf) + return item, err, err + } + + // Ordinary (non-package) renaming. + // + // Type-check the current package, locate the reference at the position, + // validate the object, and report its name and range. + // + // TODO(adonovan): in all cases below, we return usererr=nil, + // which means we return (nil, nil) at the protocol + // layer. This seems like a bug, or at best an exploitation of + // knowledge of VSCode-specific behavior. Can we avoid that? + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, f.URI()) + if err != nil { + return nil, nil, err + } + pos, err := pgf.PositionPos(pp) + if err != nil { + return nil, nil, err + } + targets, node, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) + if err != nil { + return nil, nil, err + } + var obj types.Object + for obj = range targets { + break // pick one arbitrarily + } + if err := checkRenamable(obj); err != nil { + return nil, nil, err + } + rng, err := pgf.NodeRange(node) + if err != nil { + return nil, nil, err + } + if _, isImport := node.(*ast.ImportSpec); isImport { + // We're not really renaming the import path. + rng.End = rng.Start + } + return &PrepareItem{ + Range: rng, + Text: obj.Name(), + }, nil, nil +} + +func prepareRenamePackageName(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego.File) (*PrepareItem, error) { + // Does the client support file renaming? + fileRenameSupported := false + for _, op := range snapshot.Options().SupportedResourceOperations { + if op == protocol.Rename { + fileRenameSupported = true + break + } + } + if !fileRenameSupported { + return nil, errors.New("can't rename package: LSP client does not support file renaming") + } + + // Check validity of the metadata for the file's containing package. + meta, err := NarrowestMetadataForFile(ctx, snapshot, pgf.URI) + if err != nil { + return nil, err + } + if meta.Name == "main" { + return nil, fmt.Errorf("can't rename package \"main\"") + } + if strings.HasSuffix(string(meta.Name), "_test") { + return nil, fmt.Errorf("can't rename x_test packages") + } + if meta.Module == nil { + return nil, fmt.Errorf("can't rename package: missing module information for package %q", meta.PkgPath) + } + if meta.Module.Path == string(meta.PkgPath) { + return nil, fmt.Errorf("can't rename package: package path %q is the same as module path %q", meta.PkgPath, meta.Module.Path) + } + + // Return the location of the package declaration. + rng, err := pgf.NodeRange(pgf.File.Name) + if err != nil { + return nil, err + } + return &PrepareItem{ + Range: rng, + Text: string(meta.Name), + }, nil +} + +func checkRenamable(obj types.Object) error { + switch obj := obj.(type) { + case *types.Var: + if obj.Embedded() { + return fmt.Errorf("can't rename embedded fields: rename the type directly or name the field") + } + case *types.Builtin, *types.Nil: + return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) + } + if obj.Pkg() == nil || obj.Pkg().Path() == "unsafe" { + // e.g. error.Error, unsafe.Pointer + return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) + } + if obj.Name() == "_" { + return errors.New("can't rename \"_\"") + } + return nil +} + +// Rename returns a map of TextEdits for each file modified when renaming a +// given identifier within a package and a boolean value of true for renaming +// package and false otherwise. +func Rename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, newName string) (map[protocol.DocumentURI][]protocol.TextEdit, bool, error) { + ctx, done := event.Start(ctx, "golang.Rename") + defer done() + + if !isValidIdentifier(newName) { + return nil, false, fmt.Errorf("invalid identifier to rename: %q", newName) + } + + // Cursor within package name declaration? + _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) + if err != nil { + return nil, false, err + } + + var editMap map[protocol.DocumentURI][]diff.Edit + if inPackageName { + editMap, err = renamePackageName(ctx, snapshot, f, PackageName(newName)) + } else { + editMap, err = renameOrdinary(ctx, snapshot, f, pp, newName) + } + if err != nil { + return nil, false, err + } + + // Convert edits to protocol form. + result := make(map[protocol.DocumentURI][]protocol.TextEdit) + for uri, edits := range editMap { + // Sort and de-duplicate edits. + // + // Overlapping edits may arise in local renamings (due + // to type switch implicits) and globals ones (due to + // processing multiple package variants). + // + // We assume renaming produces diffs that are all + // replacements (no adjacent insertions that might + // become reordered) and that are either identical or + // non-overlapping. + diff.SortEdits(edits) + filtered := edits[:0] + for i, edit := range edits { + if i == 0 || edit != filtered[len(filtered)-1] { + filtered = append(filtered, edit) + } + } + edits = filtered + + // TODO(adonovan): the logic above handles repeat edits to the + // same file URI (e.g. as a member of package p and p_test) but + // is not sufficient to handle file-system level aliasing arising + // from symbolic or hard links. For that, we should use a + // robustio-FileID-keyed map. + // See https://go.dev/cl/457615 for example. + // This really occurs in practice, e.g. kubernetes has + // vendor/k8s.io/kubectl -> ../../staging/src/k8s.io/kubectl. + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, false, err + } + data, err := fh.Content() + if err != nil { + return nil, false, err + } + m := protocol.NewMapper(uri, data) + protocolEdits, err := protocol.EditsFromDiffEdits(m, edits) + if err != nil { + return nil, false, err + } + result[uri] = protocolEdits + } + + return result, inPackageName, nil +} + +// renameOrdinary renames an ordinary (non-package) name throughout the workspace. +func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, newName string) (map[protocol.DocumentURI][]diff.Edit, error) { + // Type-check the referring package and locate the object(s). + // + // Unlike NarrowestPackageForFile, this operation prefers the + // widest variant as, for non-exported identifiers, it is the + // only package we need. (In case you're wondering why + // 'references' doesn't also want the widest variant: it + // computes the union across all variants.) + var targets map[types.Object]ast.Node + var pkg *cache.Package + { + mps, err := snapshot.MetadataForFile(ctx, f.URI()) + if err != nil { + return nil, err + } + metadata.RemoveIntermediateTestVariants(&mps) + if len(mps) == 0 { + return nil, fmt.Errorf("no package metadata for file %s", f.URI()) + } + widest := mps[len(mps)-1] // widest variant may include _test.go files + pkgs, err := snapshot.TypeCheck(ctx, widest.ID) + if err != nil { + return nil, err + } + pkg = pkgs[0] + pgf, err := pkg.File(f.URI()) + if err != nil { + return nil, err // "can't happen" + } + pos, err := pgf.PositionPos(pp) + if err != nil { + return nil, err + } + objects, _, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) + if err != nil { + return nil, err + } + targets = objects + } + + // Pick a representative object arbitrarily. + // (All share the same name, pos, and kind.) + var obj types.Object + for obj = range targets { + break + } + if obj.Name() == newName { + return nil, fmt.Errorf("old and new names are the same: %s", newName) + } + if err := checkRenamable(obj); err != nil { + return nil, err + } + + // Find objectpath, if object is exported ("" otherwise). + var declObjPath objectpath.Path + if obj.Exported() { + // objectpath.For requires the origin of a generic function or type, not an + // instantiation (a bug?). + // + // Note that unlike Funcs, TypeNames are always canonical (they are "left" + // of the type parameters, unlike methods). + switch obj.(type) { // avoid "obj :=" since cases reassign the var + case *types.TypeName: + if _, ok := aliases.Unalias(obj.Type()).(*types.TypeParam); ok { + // As with capitalized function parameters below, type parameters are + // local. + goto skipObjectPath + } + case *types.Func: + obj = obj.(*types.Func).Origin() + case *types.Var: + // TODO(adonovan): do vars need the origin treatment too? (issue #58462) + + // Function parameter and result vars that are (unusually) + // capitalized are technically exported, even though they + // cannot be referenced, because they may affect downstream + // error messages. But we can safely treat them as local. + // + // This is not merely an optimization: the renameExported + // operation gets confused by such vars. It finds them from + // objectpath, the classifies them as local vars, but as + // they came from export data they lack syntax and the + // correct scope tree (issue #61294). + if !obj.(*types.Var).IsField() && !isPackageLevel(obj) { + goto skipObjectPath + } + } + if path, err := objectpath.For(obj); err == nil { + declObjPath = path + } + skipObjectPath: + } + + // Nonexported? Search locally. + if declObjPath == "" { + var objects []types.Object + for obj := range targets { + objects = append(objects, obj) + } + editMap, _, err := renameObjects(newName, pkg, objects...) + return editMap, err + } + + // Exported: search globally. + // + // For exported package-level var/const/func/type objects, the + // search scope is just the direct importers. + // + // For exported fields and methods, the scope is the + // transitive rdeps. (The exportedness of the field's struct + // or method's receiver is irrelevant.) + transitive := false + switch obj := obj.(type) { + case *types.TypeName: + // Renaming an exported package-level type + // requires us to inspect all transitive rdeps + // in the event that the type is embedded. + // + // TODO(adonovan): opt: this is conservative + // but inefficient. Instead, expand the scope + // of the search only if we actually encounter + // an embedding of the type, and only then to + // the rdeps of the embedding package. + if obj.Parent() == obj.Pkg().Scope() { + transitive = true + } + + case *types.Var: + if obj.IsField() { + transitive = true // field + } + + // TODO(adonovan): opt: process only packages that + // contain a reference (xrefs) to the target field. + + case *types.Func: + if obj.Type().(*types.Signature).Recv() != nil { + transitive = true // method + } + + // It's tempting to optimize by skipping + // packages that don't contain a reference to + // the method in the xrefs index, but we still + // need to apply the satisfy check to those + // packages to find assignment statements that + // might expands the scope of the renaming. + } + + // Type-check all the packages to inspect. + declURI := protocol.URIFromPath(pkg.FileSet().File(obj.Pos()).Name()) + pkgs, err := typeCheckReverseDependencies(ctx, snapshot, declURI, transitive) + if err != nil { + return nil, err + } + + // Apply the renaming to the (initial) object. + declPkgPath := PackagePath(obj.Pkg().Path()) + return renameExported(pkgs, declPkgPath, declObjPath, newName) +} + +// typeCheckReverseDependencies returns the type-checked packages for +// the reverse dependencies of all packages variants containing +// file declURI. The packages are in some topological order. +// +// It includes all variants (even intermediate test variants) for the +// purposes of computing reverse dependencies, but discards ITVs for +// the actual renaming work. +// +// (This neglects obscure edge cases where a _test.go file changes the +// selectors used only in an ITV, but life is short. Also sin must be +// punished.) +func typeCheckReverseDependencies(ctx context.Context, snapshot *cache.Snapshot, declURI protocol.DocumentURI, transitive bool) ([]*cache.Package, error) { + variants, err := snapshot.MetadataForFile(ctx, declURI) + if err != nil { + return nil, err + } + // variants must include ITVs for the reverse dependency + // computation, but they are filtered out before we typecheck. + allRdeps := make(map[PackageID]*metadata.Package) + for _, variant := range variants { + rdeps, err := snapshot.ReverseDependencies(ctx, variant.ID, transitive) + if err != nil { + return nil, err + } + allRdeps[variant.ID] = variant // include self + for id, meta := range rdeps { + allRdeps[id] = meta + } + } + var ids []PackageID + for id, meta := range allRdeps { + if meta.IsIntermediateTestVariant() { + continue + } + ids = append(ids, id) + } + + // Sort the packages into some topological order of the + // (unfiltered) metadata graph. + metadata.SortPostOrder(snapshot, ids) + + // Dependencies must be visited first since they can expand + // the search set. Ideally we would process the (filtered) set + // of packages in the parallel postorder of the snapshot's + // (unfiltered) metadata graph, but this is quite tricky + // without a good graph abstraction. + // + // For now, we visit packages sequentially in order of + // ascending height, like an inverted breadth-first search. + // + // Type checking is by far the dominant cost, so + // overlapping it with renaming may not be worthwhile. + return snapshot.TypeCheck(ctx, ids...) +} + +// renameExported renames the object denoted by (pkgPath, objPath) +// within the specified packages, along with any other objects that +// must be renamed as a consequence. The slice of packages must be +// topologically ordered. +func renameExported(pkgs []*cache.Package, declPkgPath PackagePath, declObjPath objectpath.Path, newName string) (map[protocol.DocumentURI][]diff.Edit, error) { + + // A target is a name for an object that is stable across types.Packages. + type target struct { + pkg PackagePath + obj objectpath.Path + } + + // Populate the initial set of target objects. + // This set may grow as we discover the consequences of each renaming. + // + // TODO(adonovan): strictly, each cone of reverse dependencies + // of a single variant should have its own target map that + // monotonically expands as we go up the import graph, because + // declarations in test files can alter the set of + // package-level names and change the meaning of field and + // method selectors. So if we parallelize the graph + // visitation (see above), we should also compute the targets + // as a union of dependencies. + // + // Or we could decide that the logic below is fast enough not + // to need parallelism. In small measurements so far the + // type-checking step is about 95% and the renaming only 5%. + targets := map[target]bool{{declPkgPath, declObjPath}: true} + + // Apply the renaming operation to each package. + allEdits := make(map[protocol.DocumentURI][]diff.Edit) + for _, pkg := range pkgs { + + // Resolved target objects within package pkg. + var objects []types.Object + for t := range targets { + p := pkg.DependencyTypes(t.pkg) + if p == nil { + continue // indirect dependency of no consequence + } + obj, err := objectpath.Object(p, t.obj) + if err != nil { + // Possibly a method or an unexported type + // that is not reachable through export data? + // See https://github.com/golang/go/issues/60789. + // + // TODO(adonovan): it seems unsatisfactory that Object + // should return an error for a "valid" path. Perhaps + // we should define such paths as invalid and make + // objectpath.For compute reachability? + // Would that be a compatible change? + continue + } + objects = append(objects, obj) + } + if len(objects) == 0 { + continue // no targets of consequence to this package + } + + // Apply the renaming. + editMap, moreObjects, err := renameObjects(newName, pkg, objects...) + if err != nil { + return nil, err + } + + // It is safe to concatenate the edits as they are non-overlapping + // (or identical, in which case they will be de-duped by Rename). + for uri, edits := range editMap { + allEdits[uri] = append(allEdits[uri], edits...) + } + + // Expand the search set? + for obj := range moreObjects { + objpath, err := objectpath.For(obj) + if err != nil { + continue // not exported + } + target := target{PackagePath(obj.Pkg().Path()), objpath} + targets[target] = true + + // TODO(adonovan): methods requires dynamic + // programming of the product targets x + // packages as any package might add a new + // target (from a forward dep) as a + // consequence, and any target might imply a + // new set of rdeps. See golang/go#58461. + } + } + + return allEdits, nil +} + +// renamePackageName renames package declarations, imports, and go.mod files. +func renamePackageName(ctx context.Context, s *cache.Snapshot, f file.Handle, newName PackageName) (map[protocol.DocumentURI][]diff.Edit, error) { + // Rename the package decl and all imports. + renamingEdits, err := renamePackage(ctx, s, f, newName) + if err != nil { + return nil, err + } + + // Update the last component of the file's enclosing directory. + oldBase := filepath.Dir(f.URI().Path()) + newPkgDir := filepath.Join(filepath.Dir(oldBase), string(newName)) + + // Update any affected replace directives in go.mod files. + // TODO(adonovan): extract into its own function. + // + // Get all workspace modules. + // TODO(adonovan): should this operate on all go.mod files, + // irrespective of whether they are included in the workspace? + modFiles := s.View().ModFiles() + for _, m := range modFiles { + fh, err := s.ReadFile(ctx, m) + if err != nil { + return nil, err + } + pm, err := s.ParseMod(ctx, fh) + if err != nil { + return nil, err + } + + modFileDir := filepath.Dir(pm.URI.Path()) + affectedReplaces := []*modfile.Replace{} + + // Check if any replace directives need to be fixed + for _, r := range pm.File.Replace { + if !strings.HasPrefix(r.New.Path, "/") && !strings.HasPrefix(r.New.Path, "./") && !strings.HasPrefix(r.New.Path, "../") { + continue + } + + replacedPath := r.New.Path + if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { + replacedPath = filepath.Join(modFileDir, r.New.Path) + } + + // TODO: Is there a risk of converting a '\' delimited replacement to a '/' delimited replacement? + if !strings.HasPrefix(filepath.ToSlash(replacedPath)+"/", filepath.ToSlash(oldBase)+"/") { + continue // not affected by the package renaming + } + + affectedReplaces = append(affectedReplaces, r) + } + + if len(affectedReplaces) == 0 { + continue + } + copied, err := modfile.Parse("", pm.Mapper.Content, nil) + if err != nil { + return nil, err + } + + for _, r := range affectedReplaces { + replacedPath := r.New.Path + if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { + replacedPath = filepath.Join(modFileDir, r.New.Path) + } + + suffix := strings.TrimPrefix(replacedPath, oldBase) + + newReplacedPath, err := filepath.Rel(modFileDir, newPkgDir+suffix) + if err != nil { + return nil, err + } + + newReplacedPath = filepath.ToSlash(newReplacedPath) + + if !strings.HasPrefix(newReplacedPath, "/") && !strings.HasPrefix(newReplacedPath, "../") { + newReplacedPath = "./" + newReplacedPath + } + + if err := copied.AddReplace(r.Old.Path, "", newReplacedPath, ""); err != nil { + return nil, err + } + } + + copied.Cleanup() + newContent, err := copied.Format() + if err != nil { + return nil, err + } + + // Calculate the edits to be made due to the change. + edits := diff.Bytes(pm.Mapper.Content, newContent) + renamingEdits[pm.URI] = append(renamingEdits[pm.URI], edits...) + } + + return renamingEdits, nil +} + +// renamePackage computes all workspace edits required to rename the package +// described by the given metadata, to newName, by renaming its package +// directory. +// +// It updates package clauses and import paths for the renamed package as well +// as any other packages affected by the directory renaming among all packages +// known to the snapshot. +func renamePackage(ctx context.Context, s *cache.Snapshot, f file.Handle, newName PackageName) (map[protocol.DocumentURI][]diff.Edit, error) { + if strings.HasSuffix(string(newName), "_test") { + return nil, fmt.Errorf("cannot rename to _test package") + } + + // We need metadata for the relevant package and module paths. + // These should be the same for all packages containing the file. + meta, err := NarrowestMetadataForFile(ctx, s, f.URI()) + if err != nil { + return nil, err + } + + oldPkgPath := meta.PkgPath + if meta.Module == nil { + return nil, fmt.Errorf("cannot rename package: missing module information for package %q", meta.PkgPath) + } + modulePath := PackagePath(meta.Module.Path) + if modulePath == oldPkgPath { + return nil, fmt.Errorf("cannot rename package: module path %q is the same as the package path, so renaming the package directory would have no effect", modulePath) + } + + newPathPrefix := path.Join(path.Dir(string(oldPkgPath)), string(newName)) + + // We must inspect all packages, not just direct importers, + // because we also rename subpackages, which may be unrelated. + // (If the renamed package imports a subpackage it may require + // edits to both its package and import decls.) + allMetadata, err := s.AllMetadata(ctx) + if err != nil { + return nil, err + } + + // Rename package and import declarations in all relevant packages. + edits := make(map[protocol.DocumentURI][]diff.Edit) + for _, mp := range allMetadata { + // Special case: x_test packages for the renamed package will not have the + // package path as a dir prefix, but still need their package clauses + // renamed. + if mp.PkgPath == oldPkgPath+"_test" { + if err := renamePackageClause(ctx, mp, s, newName+"_test", edits); err != nil { + return nil, err + } + continue + } + + // Subtle: check this condition before checking for valid module info + // below, because we should not fail this operation if unrelated packages + // lack module info. + if !strings.HasPrefix(string(mp.PkgPath)+"/", string(oldPkgPath)+"/") { + continue // not affected by the package renaming + } + + if mp.Module == nil { + // This check will always fail under Bazel. + return nil, fmt.Errorf("cannot rename package: missing module information for package %q", mp.PkgPath) + } + + if modulePath != PackagePath(mp.Module.Path) { + continue // don't edit imports if nested package and renaming package have different module paths + } + + // Renaming a package consists of changing its import path and package name. + suffix := strings.TrimPrefix(string(mp.PkgPath), string(oldPkgPath)) + newPath := newPathPrefix + suffix + + pkgName := mp.Name + if mp.PkgPath == oldPkgPath { + pkgName = newName + + if err := renamePackageClause(ctx, mp, s, newName, edits); err != nil { + return nil, err + } + } + + imp := ImportPath(newPath) // TODO(adonovan): what if newPath has vendor/ prefix? + if err := renameImports(ctx, s, mp, imp, pkgName, edits); err != nil { + return nil, err + } + } + + return edits, nil +} + +// renamePackageClause computes edits renaming the package clause of files in +// the package described by the given metadata, to newName. +// +// Edits are written into the edits map. +func renamePackageClause(ctx context.Context, mp *metadata.Package, snapshot *cache.Snapshot, newName PackageName, edits map[protocol.DocumentURI][]diff.Edit) error { + // Rename internal references to the package in the renaming package. + for _, uri := range mp.CompiledGoFiles { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return err + } + f, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + return err + } + if f.File.Name == nil { + continue // no package declaration + } + + edit, err := posEdit(f.Tok, f.File.Name.Pos(), f.File.Name.End(), string(newName)) + if err != nil { + return err + } + edits[f.URI] = append(edits[f.URI], edit) + } + + return nil +} + +// renameImports computes the set of edits to imports resulting from renaming +// the package described by the given metadata, to a package with import path +// newPath and name newName. +// +// Edits are written into the edits map. +func renameImports(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package, newPath ImportPath, newName PackageName, allEdits map[protocol.DocumentURI][]diff.Edit) error { + rdeps, err := snapshot.ReverseDependencies(ctx, mp.ID, false) // find direct importers + if err != nil { + return err + } + + // Pass 1: rename import paths in import declarations. + needsTypeCheck := make(map[PackageID][]protocol.DocumentURI) + for _, rdep := range rdeps { + if rdep.IsIntermediateTestVariant() { + continue // for renaming, these variants are redundant + } + + for _, uri := range rdep.CompiledGoFiles { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return err + } + f, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + return err + } + if f.File.Name == nil { + continue // no package declaration + } + for _, imp := range f.File.Imports { + if rdep.DepsByImpPath[metadata.UnquoteImportPath(imp)] != mp.ID { + continue // not the import we're looking for + } + + // If the import does not explicitly specify + // a local name, then we need to invoke the + // type checker to locate references to update. + // + // TODO(adonovan): is this actually true? + // Renaming an import with a local name can still + // cause conflicts: shadowing of built-ins, or of + // package-level decls in the same or another file. + if imp.Name == nil { + needsTypeCheck[rdep.ID] = append(needsTypeCheck[rdep.ID], uri) + } + + // Create text edit for the import path (string literal). + edit, err := posEdit(f.Tok, imp.Path.Pos(), imp.Path.End(), strconv.Quote(string(newPath))) + if err != nil { + return err + } + allEdits[uri] = append(allEdits[uri], edit) + } + } + } + + // If the imported package's name hasn't changed, + // we don't need to rename references within each file. + if newName == mp.Name { + return nil + } + + // Pass 2: rename local name (types.PkgName) of imported + // package throughout one or more files of the package. + ids := make([]PackageID, 0, len(needsTypeCheck)) + for id := range needsTypeCheck { + ids = append(ids, id) + } + pkgs, err := snapshot.TypeCheck(ctx, ids...) + if err != nil { + return err + } + for i, id := range ids { + pkg := pkgs[i] + for _, uri := range needsTypeCheck[id] { + f, err := pkg.File(uri) + if err != nil { + return err + } + for _, imp := range f.File.Imports { + if imp.Name != nil { + continue // has explicit local name + } + if rdeps[id].DepsByImpPath[metadata.UnquoteImportPath(imp)] != mp.ID { + continue // not the import we're looking for + } + + pkgname := pkg.TypesInfo().Implicits[imp].(*types.PkgName) + + pkgScope := pkg.Types().Scope() + fileScope := pkg.TypesInfo().Scopes[f.File] + + localName := string(newName) + try := 0 + + // Keep trying with fresh names until one succeeds. + // + // TODO(adonovan): fix: this loop is not sufficient to choose a name + // that is guaranteed to be conflict-free; renameObj may still fail. + // So the retry loop should be around renameObj, and we shouldn't + // bother with scopes here. + for fileScope.Lookup(localName) != nil || pkgScope.Lookup(localName) != nil { + try++ + localName = fmt.Sprintf("%s%d", newName, try) + } + + // renameObj detects various conflicts, including: + // - new name conflicts with a package-level decl in this file; + // - new name hides a package-level decl in another file that + // is actually referenced in this file; + // - new name hides a built-in that is actually referenced + // in this file; + // - a reference in this file to the old package name would + // become shadowed by an intervening declaration that + // uses the new name. + // It returns the edits if no conflict was detected. + editMap, _, err := renameObjects(localName, pkg, pkgname) + if err != nil { + return err + } + + // If the chosen local package name matches the package's + // new name, delete the change that would have inserted + // an explicit local name, which is always the lexically + // first change. + if localName == string(newName) { + edits, ok := editMap[uri] + if !ok { + return fmt.Errorf("internal error: no changes for %s", uri) + } + diff.SortEdits(edits) + editMap[uri] = edits[1:] + } + for uri, edits := range editMap { + allEdits[uri] = append(allEdits[uri], edits...) + } + } + } + } + return nil +} + +// renameObjects computes the edits to the type-checked syntax package pkg +// required to rename a set of target objects to newName. +// +// It also returns the set of objects that were found (due to +// corresponding methods and embedded fields) to require renaming as a +// consequence of the requested renamings. +// +// It returns an error if the renaming would cause a conflict. +func renameObjects(newName string, pkg *cache.Package, targets ...types.Object) (map[protocol.DocumentURI][]diff.Edit, map[types.Object]bool, error) { + r := renamer{ + pkg: pkg, + objsToUpdate: make(map[types.Object]bool), + from: targets[0].Name(), + to: newName, + } + + // A renaming initiated at an interface method indicates the + // intention to rename abstract and concrete methods as needed + // to preserve assignability. + // TODO(adonovan): pull this into the caller. + for _, obj := range targets { + if obj, ok := obj.(*types.Func); ok { + recv := obj.Type().(*types.Signature).Recv() + if recv != nil && types.IsInterface(recv.Type().Underlying()) { + r.changeMethods = true + break + } + } + } + + // Check that the renaming of the identifier is ok. + for _, obj := range targets { + r.check(obj) + if len(r.conflicts) > 0 { + // Stop at first error. + return nil, nil, fmt.Errorf("%s", strings.Join(r.conflicts, "\n")) + } + } + + editMap, err := r.update() + if err != nil { + return nil, nil, err + } + + // Remove initial targets so that only 'consequences' remain. + for _, obj := range targets { + delete(r.objsToUpdate, obj) + } + return editMap, r.objsToUpdate, nil +} + +// Rename all references to the target objects. +func (r *renamer) update() (map[protocol.DocumentURI][]diff.Edit, error) { + result := make(map[protocol.DocumentURI][]diff.Edit) + + // shouldUpdate reports whether obj is one of (or an + // instantiation of one of) the target objects. + shouldUpdate := func(obj types.Object) bool { + return containsOrigin(r.objsToUpdate, obj) + } + + // Find all identifiers in the package that define or use a + // renamed object. We iterate over info as it is more efficient + // than calling ast.Inspect for each of r.pkg.CompiledGoFiles(). + type item struct { + node ast.Node // Ident, ImportSpec (obj=PkgName), or CaseClause (obj=Var) + obj types.Object + isDef bool + } + var items []item + info := r.pkg.TypesInfo() + for id, obj := range info.Uses { + if shouldUpdate(obj) { + items = append(items, item{id, obj, false}) + } + } + for id, obj := range info.Defs { + if shouldUpdate(obj) { + items = append(items, item{id, obj, true}) + } + } + for node, obj := range info.Implicits { + if shouldUpdate(obj) { + switch node.(type) { + case *ast.ImportSpec, *ast.CaseClause: + items = append(items, item{node, obj, true}) + } + } + } + sort.Slice(items, func(i, j int) bool { + return items[i].node.Pos() < items[j].node.Pos() + }) + + // Update each identifier, and its doc comment if it is a declaration. + for _, item := range items { + pgf, ok := enclosingFile(r.pkg, item.node.Pos()) + if !ok { + bug.Reportf("edit does not belong to syntax of package %q", r.pkg) + continue + } + + // Renaming a types.PkgName may result in the addition or removal of an identifier, + // so we deal with this separately. + if pkgName, ok := item.obj.(*types.PkgName); ok && item.isDef { + edit, err := r.updatePkgName(pgf, pkgName) + if err != nil { + return nil, err + } + result[pgf.URI] = append(result[pgf.URI], edit) + continue + } + + // Workaround the unfortunate lack of a Var object + // for x in "switch x := expr.(type) {}" by adjusting + // the case clause to the switch ident. + // This may result in duplicate edits, but we de-dup later. + if _, ok := item.node.(*ast.CaseClause); ok { + path, _ := astutil.PathEnclosingInterval(pgf.File, item.obj.Pos(), item.obj.Pos()) + item.node = path[0].(*ast.Ident) + } + + // Replace the identifier with r.to. + edit, err := posEdit(pgf.Tok, item.node.Pos(), item.node.End(), r.to) + if err != nil { + return nil, err + } + + result[pgf.URI] = append(result[pgf.URI], edit) + + if !item.isDef { // uses do not have doc comments to update. + continue + } + + doc := docComment(pgf, item.node.(*ast.Ident)) + if doc == nil { + continue + } + + // Perform the rename in doc comments declared in the original package. + // go/parser strips out \r\n returns from the comment text, so go + // line-by-line through the comment text to get the correct positions. + docRegexp := regexp.MustCompile(`\b` + r.from + `\b`) // valid identifier => valid regexp + for _, comment := range doc.List { + if isDirective(comment.Text) { + continue + } + // TODO(adonovan): why are we looping over lines? + // Just run the loop body once over the entire multiline comment. + lines := strings.Split(comment.Text, "\n") + tokFile := pgf.Tok + commentLine := safetoken.Line(tokFile, comment.Pos()) + uri := protocol.URIFromPath(tokFile.Name()) + for i, line := range lines { + lineStart := comment.Pos() + if i > 0 { + lineStart = tokFile.LineStart(commentLine + i) + } + for _, locs := range docRegexp.FindAllIndex([]byte(line), -1) { + edit, err := posEdit(tokFile, lineStart+token.Pos(locs[0]), lineStart+token.Pos(locs[1]), r.to) + if err != nil { + return nil, err // can't happen + } + result[uri] = append(result[uri], edit) + } + } + } + } + + docLinkEdits, err := r.updateCommentDocLinks() + if err != nil { + return nil, err + } + for uri, edits := range docLinkEdits { + result[uri] = append(result[uri], edits...) + } + + return result, nil +} + +// updateCommentDocLinks updates each doc comment in the package +// that refers to one of the renamed objects using a doc link +// (https://golang.org/doc/comment#doclinks) such as "[pkg.Type.Method]". +func (r *renamer) updateCommentDocLinks() (map[protocol.DocumentURI][]diff.Edit, error) { + result := make(map[protocol.DocumentURI][]diff.Edit) + var docRenamers []*docLinkRenamer + for obj := range r.objsToUpdate { + if _, ok := obj.(*types.PkgName); ok { + // The dot package name will not be referenced + if obj.Name() == "." { + continue + } + + docRenamers = append(docRenamers, &docLinkRenamer{ + isDep: false, + isPkgOrType: true, + file: r.pkg.FileSet().File(obj.Pos()), + regexp: docLinkPattern("", "", obj.Name(), true), + to: r.to, + }) + continue + } + if !obj.Exported() { + continue + } + recvName := "" + // Doc links can reference only exported package-level objects + // and methods of exported package-level named types. + if !isPackageLevel(obj) { + obj, isFunc := obj.(*types.Func) + if !isFunc { + continue + } + recv := obj.Type().(*types.Signature).Recv() + if recv == nil { + continue + } + _, named := typesinternal.ReceiverNamed(recv) + if named == nil { + continue + } + // Doc links can't reference interface methods. + if types.IsInterface(named.Underlying()) { + continue + } + name := named.Origin().Obj() + if !name.Exported() || !isPackageLevel(name) { + continue + } + recvName = name.Name() + } + + // Qualify objects from other packages. + pkgName := "" + if r.pkg.Types() != obj.Pkg() { + pkgName = obj.Pkg().Name() + } + _, isTypeName := obj.(*types.TypeName) + docRenamers = append(docRenamers, &docLinkRenamer{ + isDep: r.pkg.Types() != obj.Pkg(), + isPkgOrType: isTypeName, + packagePath: obj.Pkg().Path(), + packageName: pkgName, + recvName: recvName, + objName: obj.Name(), + regexp: docLinkPattern(pkgName, recvName, obj.Name(), isTypeName), + to: r.to, + }) + } + for _, pgf := range r.pkg.CompiledGoFiles() { + for _, d := range docRenamers { + edits, err := d.update(pgf) + if err != nil { + return nil, err + } + if len(edits) > 0 { + result[pgf.URI] = append(result[pgf.URI], edits...) + } + } + } + return result, nil +} + +// docLinkPattern returns a regular expression that matches doclinks in comments. +// It has one submatch that indicates the symbol to be updated. +func docLinkPattern(pkgName, recvName, objName string, isPkgOrType bool) *regexp.Regexp { + // The doc link may contain a leading star, e.g. [*bytes.Buffer]. + pattern := `\[\*?` + if pkgName != "" { + pattern += pkgName + `\.` + } + if recvName != "" { + pattern += recvName + `\.` + } + // The first submatch is object name. + pattern += `(` + objName + `)` + // If the object is a *types.TypeName or *types.PkgName, also need + // match the objects referenced by them, so add `(\.\w+)*`. + if isPkgOrType { + pattern += `(?:\.\w+)*` + } + // There are two type of link in comments: + // 1. url link. e.g. [text]: url + // 2. doc link. e.g. [pkg.Name] + // in order to only match the doc link, add `([^:]|$)` in the end. + pattern += `\](?:[^:]|$)` + + return regexp.MustCompile(pattern) +} + +// A docLinkRenamer renames doc links of forms such as these: +// +// [Func] +// [pkg.Func] +// [RecvType.Method] +// [*Type] +// [*pkg.Type] +// [*pkg.RecvType.Method] +type docLinkRenamer struct { + isDep bool // object is from a dependency package + isPkgOrType bool // object is *types.PkgName or *types.TypeName + packagePath string + packageName string // e.g. "pkg" + recvName string // e.g. "RecvType" + objName string // e.g. "Func", "Type", "Method" + to string // new name + regexp *regexp.Regexp + + file *token.File // enclosing file, if renaming *types.PkgName +} + +// update updates doc links in the package level comments. +func (r *docLinkRenamer) update(pgf *parsego.File) (result []diff.Edit, err error) { + if r.file != nil && r.file != pgf.Tok { + return nil, nil + } + pattern := r.regexp + // If the object is in dependency package, + // the imported name in the file may be different from the original package name + if r.isDep { + for _, spec := range pgf.File.Imports { + importPath, _ := strconv.Unquote(spec.Path.Value) + if importPath == r.packagePath { + // Ignore blank imports + if spec.Name == nil || spec.Name.Name == "_" || spec.Name.Name == "." { + continue + } + if spec.Name.Name != r.packageName { + pattern = docLinkPattern(spec.Name.Name, r.recvName, r.objName, r.isPkgOrType) + } + break + } + } + } + + var edits []diff.Edit + updateDocLinks := func(doc *ast.CommentGroup) error { + if doc != nil { + for _, c := range doc.List { + for _, locs := range pattern.FindAllStringSubmatchIndex(c.Text, -1) { + // The first submatch is the object name, so the locs[2:4] is the index of object name. + edit, err := posEdit(pgf.Tok, c.Pos()+token.Pos(locs[2]), c.Pos()+token.Pos(locs[3]), r.to) + if err != nil { + return err + } + edits = append(edits, edit) + } + } + } + return nil + } + + // Update package doc comments. + err = updateDocLinks(pgf.File.Doc) + if err != nil { + return nil, err + } + for _, decl := range pgf.File.Decls { + var doc *ast.CommentGroup + switch decl := decl.(type) { + case *ast.GenDecl: + doc = decl.Doc + case *ast.FuncDecl: + doc = decl.Doc + } + err = updateDocLinks(doc) + if err != nil { + return nil, err + } + } + return edits, nil +} + +// docComment returns the doc for an identifier within the specified file. +func docComment(pgf *parsego.File, id *ast.Ident) *ast.CommentGroup { + nodes, _ := astutil.PathEnclosingInterval(pgf.File, id.Pos(), id.End()) + for _, node := range nodes { + switch decl := node.(type) { + case *ast.FuncDecl: + return decl.Doc + case *ast.Field: + return decl.Doc + case *ast.GenDecl: + return decl.Doc + // For {Type,Value}Spec, if the doc on the spec is absent, + // search for the enclosing GenDecl + case *ast.TypeSpec: + if decl.Doc != nil { + return decl.Doc + } + case *ast.ValueSpec: + if decl.Doc != nil { + return decl.Doc + } + case *ast.Ident: + case *ast.AssignStmt: + // *ast.AssignStmt doesn't have an associated comment group. + // So, we try to find a comment just before the identifier. + + // Try to find a comment group only for short variable declarations (:=). + if decl.Tok != token.DEFINE { + return nil + } + + identLine := safetoken.Line(pgf.Tok, id.Pos()) + for _, comment := range nodes[len(nodes)-1].(*ast.File).Comments { + if comment.Pos() > id.Pos() { + // Comment is after the identifier. + continue + } + + lastCommentLine := safetoken.Line(pgf.Tok, comment.End()) + if lastCommentLine+1 == identLine { + return comment + } + } + default: + return nil + } + } + return nil +} + +// updatePkgName returns the updates to rename a pkgName in the import spec by +// only modifying the package name portion of the import declaration. +func (r *renamer) updatePkgName(pgf *parsego.File, pkgName *types.PkgName) (diff.Edit, error) { + // Modify ImportSpec syntax to add or remove the Name as needed. + path, _ := astutil.PathEnclosingInterval(pgf.File, pkgName.Pos(), pkgName.Pos()) + if len(path) < 2 { + return diff.Edit{}, fmt.Errorf("no path enclosing interval for %s", pkgName.Name()) + } + spec, ok := path[1].(*ast.ImportSpec) + if !ok { + return diff.Edit{}, fmt.Errorf("failed to update PkgName for %s", pkgName.Name()) + } + + newText := "" + if pkgName.Imported().Name() != r.to { + newText = r.to + " " + } + + // Replace the portion (possibly empty) of the spec before the path: + // local "path" or "path" + // -> <- -><- + return posEdit(pgf.Tok, spec.Pos(), spec.Path.Pos(), newText) +} + +// parsePackageNameDecl is a convenience function that parses and +// returns the package name declaration of file fh, and reports +// whether the position ppos lies within it. +// +// Note: also used by references. +func parsePackageNameDecl(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, ppos protocol.Position) (*parsego.File, bool, error) { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + return nil, false, err + } + // Careful: because we used parsego.Header, + // pgf.Pos(ppos) may be beyond EOF => (0, err). + pos, _ := pgf.PositionPos(ppos) + return pgf, pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End(), nil +} + +// enclosingFile returns the CompiledGoFile of pkg that contains the specified position. +func enclosingFile(pkg *cache.Package, pos token.Pos) (*parsego.File, bool) { + for _, pgf := range pkg.CompiledGoFiles() { + if pgf.File.Pos() <= pos && pos <= pgf.File.End() { + return pgf, true + } + } + return nil, false +} + +// posEdit returns an edit to replace the (start, end) range of tf with 'new'. +func posEdit(tf *token.File, start, end token.Pos, new string) (diff.Edit, error) { + startOffset, endOffset, err := safetoken.Offsets(tf, start, end) + if err != nil { + return diff.Edit{}, err + } + return diff.Edit{Start: startOffset, End: endOffset, New: new}, nil +} diff --git a/gopls/internal/lsp/source/rename_check.go b/gopls/internal/golang/rename_check.go similarity index 88% rename from gopls/internal/lsp/source/rename_check.go rename to gopls/internal/golang/rename_check.go index 53341748793..e63b745c1ad 100644 --- a/gopls/internal/lsp/source/rename_check.go +++ b/gopls/internal/golang/rename_check.go @@ -4,7 +4,7 @@ // // Taken from golang.org/x/tools/refactor/rename. -package source +package golang // This file defines the conflict-checking portion of the rename operation. // @@ -43,7 +43,11 @@ import ( "unicode" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/lsp/safetoken" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/refactor/satisfy" ) @@ -137,8 +141,8 @@ func (r *renamer) checkInFileBlock(from *types.PkgName) { func (r *renamer) checkInPackageBlock(from types.Object) { // Check that there are no references to the name from another // package if the renaming would make it unexported. - if typ := r.pkg.GetTypes(); typ != from.Pkg() && ast.IsExported(r.from) && !ast.IsExported(r.to) { - if id := someUse(r.pkg.GetTypesInfo(), from); id != nil { + if typ := r.pkg.Types(); typ != from.Pkg() && ast.IsExported(r.from) && !ast.IsExported(r.to) { + if id := someUse(r.pkg.TypesInfo(), from); id != nil { r.checkExport(id, typ, from) } } @@ -148,7 +152,7 @@ func (r *renamer) checkInPackageBlock(from types.Object) { kind := objectKind(from) if kind == "func" { // Reject if intra-package references to it exist. - for id, obj := range r.pkg.GetTypesInfo().Uses { + for id, obj := range r.pkg.TypesInfo().Uses { if obj == from { r.errorf(from.Pos(), "renaming this func %q to %q would make it a package initializer", @@ -164,8 +168,8 @@ func (r *renamer) checkInPackageBlock(from types.Object) { } // Check for conflicts between package block and all file blocks. - for _, f := range r.pkg.GetSyntax() { - fileScope := r.pkg.GetTypesInfo().Scopes[f] + for _, f := range r.pkg.Syntax() { + fileScope := r.pkg.TypesInfo().Scopes[f] b, prev := fileScope.LookupParent(r.to, token.NoPos) if b == fileScope { r.errorf(from.Pos(), "renaming this %s %q to %q would conflict", objectKind(from), from.Name(), r.to) @@ -273,9 +277,9 @@ func (r *renamer) checkInLexicalScope(from types.Object) { // var s struct {T} // print(s.T) // ...this must change too if _, ok := from.(*types.TypeName); ok { - for id, obj := range r.pkg.GetTypesInfo().Uses { + for id, obj := range r.pkg.TypesInfo().Uses { if obj == from { - if field := r.pkg.GetTypesInfo().Defs[id]; field != nil { + if field := r.pkg.TypesInfo().Defs[id]; field != nil { r.check(field) } } @@ -294,11 +298,45 @@ func deeper(x, y *types.Scope) bool { } } +// Scope and Position +// +// Consider a function f declared as: +// +// func f[T *U, U *T](p, q T) (r, s U) { var ( v T; w = v ); type (t *t; u t) } +// ^ ^ ^ ^ ^ ^ +/// {T,U} {p,q,r,s} v w t u +// +// All objects {T, U, p, q, r, s, local} belong to the same lexical +// block, the function scope, which is found in types.Info.Scopes +// for f's FuncType. (A function body's BlockStmt does not have +// an associated scope; only nested BlockStmts do.) +// +// The effective scope of each object is different: +// +// - The type parameters T and U, whose constraints may refer to each +// other, all have a scope that starts at the beginning of the +// FuncDecl.Type.Func token. +// +// - The parameter and result variables {p,q,r,s} can reference the +// type parameters but not each other, so their scopes all start at +// the end of the FuncType. +// (Prior to go1.22 it was--incorrectly--unset; see #64295). +// Beware also that Scope.Innermost does not currently work correctly for +// type parameters: it returns the scope of the package, not the function. +// +// - Each const or var {v,w} declared within the function body has a +// scope that begins at the end of its ValueSpec, or after the +// AssignStmt for a var declared by ":=". +// +// - Each type {t,u} in the body has a scope that that begins at +// the start of the TypeSpec, so they can be self-recursive +// but--unlike package-level types--not mutually recursive. + // forEachLexicalRef calls fn(id, block) for each identifier id in package // pkg that is a reference to obj in lexical scope. block is the // lexical block enclosing the reference. If fn returns false the // iteration is terminated and findLexicalRefs returns false. -func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool { +func forEachLexicalRef(pkg *cache.Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool { ok := true var stack []ast.Node @@ -315,8 +353,8 @@ func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, blo stack = append(stack, n) // push switch n := n.(type) { case *ast.Ident: - if pkg.GetTypesInfo().Uses[n] == obj { - block := enclosingBlock(pkg.GetTypesInfo(), stack) + if pkg.TypesInfo().Uses[n] == obj { + block := enclosingBlock(pkg.TypesInfo(), stack) if !fn(n, block) { ok = false } @@ -331,11 +369,11 @@ func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, blo case *ast.CompositeLit: // Handle recursion ourselves for struct literals // so we don't visit field identifiers. - tv, ok := pkg.GetTypesInfo().Types[n] + tv, ok := pkg.TypesInfo().Types[n] if !ok { return visit(nil) // pop stack, don't descend } - if _, ok := Deref(tv.Type).Underlying().(*types.Struct); ok { + if is[*types.Struct](typeparams.CoreType(typeparams.Deref(tv.Type))) { if n.Type != nil { ast.Inspect(n.Type, visit) } @@ -352,7 +390,7 @@ func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, blo return true } - for _, f := range pkg.GetSyntax() { + for _, f := range pkg.Syntax() { ast.Inspect(f, visit) if len(stack) != 0 { panic(stack) @@ -364,15 +402,15 @@ func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, blo return ok } -// enclosingBlock returns the innermost block enclosing the specified -// AST node, specified in the form of a path from the root of the file, -// [file...n]. +// enclosingBlock returns the innermost block logically enclosing the +// specified AST node (an ast.Ident), specified in the form of a path +// from the root of the file, [file...n]. func enclosingBlock(info *types.Info, stack []ast.Node) *types.Scope { for i := range stack { n := stack[len(stack)-1-i] // For some reason, go/types always associates a // function's scope with its FuncType. - // TODO(adonovan): feature or a bug? + // See comments about scope above. switch f := n.(type) { case *ast.FuncDecl: n = f.Type @@ -434,8 +472,8 @@ func (r *renamer) checkStructField(from *types.Var) { // This struct is also a named type. // We must check for direct (non-promoted) field/field // and method/field conflicts. - named := r.pkg.GetTypesInfo().Defs[spec.Name].Type() - prev, indices, _ := types.LookupFieldOrMethod(named, true, r.pkg.GetTypes(), r.to) + named := r.pkg.TypesInfo().Defs[spec.Name].Type() + prev, indices, _ := types.LookupFieldOrMethod(named, true, r.pkg.Types(), r.to) if len(indices) == 1 { r.errorf(from.Pos(), "renaming this field %q to %q", from.Name(), r.to) @@ -446,7 +484,7 @@ func (r *renamer) checkStructField(from *types.Var) { } else { // This struct is not a named type. // We need only check for direct (non-promoted) field/field conflicts. - T := r.pkg.GetTypesInfo().Types[tStruct].Type.Underlying().(*types.Struct) + T := r.pkg.TypesInfo().Types[tStruct].Type.Underlying().(*types.Struct) for i := 0; i < T.NumFields(); i++ { if prev := T.Field(i); prev.Name() == r.to { r.errorf(from.Pos(), "renaming this field %q to %q", @@ -465,7 +503,7 @@ func (r *renamer) checkStructField(from *types.Var) { if from.Anonymous() { if named, ok := from.Type().(*types.Named); ok { r.check(named.Obj()) - } else if named, ok := Deref(from.Type()).(*types.Named); ok { + } else if named, ok := aliases.Unalias(typesinternal.Unpointer(from.Type())).(*types.Named); ok { r.check(named.Obj()) } } @@ -478,15 +516,15 @@ func (r *renamer) checkStructField(from *types.Var) { // the specified object would continue to do so after the renaming. func (r *renamer) checkSelections(from types.Object) { pkg := r.pkg - typ := pkg.GetTypes() + typ := pkg.Types() { - if id := someUse(pkg.GetTypesInfo(), from); id != nil { + if id := someUse(pkg.TypesInfo(), from); id != nil { if !r.checkExport(id, typ, from) { return } } - for syntax, sel := range pkg.GetTypesInfo().Selections { + for syntax, sel := range pkg.TypesInfo().Selections { // There may be extant selections of only the old // name or only the new name, so we must check both. // (If neither, the renaming is sound.) @@ -604,7 +642,7 @@ func (r *renamer) checkMethod(from *types.Func) { // declaration conflicts too. { // Start with named interface types (better errors) - for _, obj := range r.pkg.GetTypesInfo().Defs { + for _, obj := range r.pkg.TypesInfo().Defs { if obj, ok := obj.(*types.TypeName); ok && types.IsInterface(obj.Type()) { f, _, _ := types.LookupFieldOrMethod( obj.Type(), false, from.Pkg(), from.Name()) @@ -624,7 +662,7 @@ func (r *renamer) checkMethod(from *types.Func) { } // Now look at all literal interface types (includes named ones again). - for e, tv := range r.pkg.GetTypesInfo().Types { + for e, tv := range r.pkg.TypesInfo().Types { if e, ok := e.(*ast.InterfaceType); ok { _ = e _ = tv.Type.(*types.Interface) @@ -774,7 +812,7 @@ func (r *renamer) checkMethod(from *types.Func) { var iface string I := recv(imeth).Type() - if named, ok := I.(*types.Named); ok { + if named, ok := aliases.Unalias(I).(*types.Named); ok { pos = named.Obj().Pos() iface = "interface " + named.Obj().Name() } else { @@ -827,13 +865,13 @@ func (r *renamer) satisfy() map[satisfy.Constraint]bool { // type-checker. // // Only proceed if all packages have no errors. - if len(pkg.GetParseErrors()) > 0 || len(pkg.GetTypeErrors()) > 0 { + if len(pkg.ParseErrors()) > 0 || len(pkg.TypeErrors()) > 0 { r.errorf(token.NoPos, // we don't have a position for this error. "renaming %q to %q not possible because %q has errors", r.from, r.to, pkg.Metadata().PkgPath) return nil } - f.Find(pkg.GetTypesInfo(), pkg.GetSyntax()) + f.Find(pkg.TypesInfo(), pkg.Syntax()) } r.satisfyConstraints = f.Result } @@ -871,7 +909,7 @@ func objectKind(obj types.Object) string { return "field" } case *types.Func: - if obj.Type().(*types.Signature).Recv() != nil { + if recv(obj) != nil { return "method" } } diff --git a/gopls/internal/golang/semtok.go b/gopls/internal/golang/semtok.go new file mode 100644 index 00000000000..956f19c50ce --- /dev/null +++ b/gopls/internal/golang/semtok.go @@ -0,0 +1,956 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +// This file defines the Semantic Tokens operation for Go source. + +import ( + "bytes" + "context" + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + "log" + "path/filepath" + "regexp" + "strings" + "time" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/semtok" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/event" +) + +// semDebug enables comprehensive logging of decisions +// (gopls semtok foo.go > /dev/null shows log output). +// It should never be true in checked-in code. +const semDebug = false + +func SemanticTokens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng *protocol.Range) (*protocol.SemanticTokens, error) { + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + + // Select range. + var start, end token.Pos + if rng != nil { + var err error + start, end, err = pgf.RangePos(*rng) + if err != nil { + return nil, err // e.g. invalid range + } + } else { + tok := pgf.Tok + start, end = tok.Pos(0), tok.Pos(tok.Size()) // entire file + } + + // Reject full semantic token requests for large files. + // + // The LSP says that errors for the semantic token requests + // should only be returned for exceptions (a word not + // otherwise defined). This code treats a too-large file as an + // exception. On parse errors, the code does what it can. + const maxFullFileSize = 100000 + if int(end-start) > maxFullFileSize { + return nil, fmt.Errorf("semantic tokens: range %s too large (%d > %d)", + fh.URI().Path(), end-start, maxFullFileSize) + } + + tv := tokenVisitor{ + ctx: ctx, + metadataSource: snapshot, + metadata: pkg.Metadata(), + info: pkg.TypesInfo(), + fset: pkg.FileSet(), + pkg: pkg, + pgf: pgf, + start: start, + end: end, + } + tv.visit() + return &protocol.SemanticTokens{ + Data: semtok.Encode( + tv.tokens, + snapshot.Options().NoSemanticString, + snapshot.Options().NoSemanticNumber, + snapshot.Options().SemanticTypes, + snapshot.Options().SemanticMods), + ResultID: time.Now().String(), // for delta requests, but we've never seen any + }, nil +} + +type tokenVisitor struct { + // inputs + ctx context.Context // for event logging + metadataSource metadata.Source // used to resolve imports + metadata *metadata.Package + info *types.Info + fset *token.FileSet + pkg *cache.Package + pgf *parsego.File + start, end token.Pos // range of interest + + // working state + stack []ast.Node // path from root of the syntax tree + tokens []semtok.Token // computed sequence of semantic tokens +} + +func (tv *tokenVisitor) visit() { + f := tv.pgf.File + // may not be in range, but harmless + tv.token(f.Package, len("package"), semtok.TokKeyword, nil) + if f.Name != nil { + tv.token(f.Name.NamePos, len(f.Name.Name), semtok.TokNamespace, nil) + } + for _, decl := range f.Decls { + // Only look at the decls that overlap the range. + if decl.End() <= tv.start || decl.Pos() >= tv.end { + continue + } + ast.Inspect(decl, tv.inspect) + } + + // Scan all files for imported pkgs, ignore the ambiguous pkg. + // This is to be consistent with the behavior in [go/doc]: https://pkg.go.dev/pkg/go/doc. + importByName := make(map[string]*types.PkgName) + for _, pgf := range tv.pkg.CompiledGoFiles() { + for _, imp := range pgf.File.Imports { + if obj, _ := typesutil.ImportedPkgName(tv.pkg.TypesInfo(), imp); obj != nil { + if old, ok := importByName[obj.Name()]; ok { + if old != nil && old.Imported() != obj.Imported() { + importByName[obj.Name()] = nil // nil => ambiguous across files + } + continue + } + importByName[obj.Name()] = obj + } + } + } + + for _, cg := range f.Comments { + for _, c := range cg.List { + tv.comment(c, importByName) + } + } +} + +// Matches (for example) "[F]", "[*p.T]", "[p.T.M]" +// unless followed by a colon (exclude url link, e.g. "[go]: https://go.dev"). +// The first group is reference name. e.g. The first group of "[*p.T.M]" is "p.T.M". +var docLinkRegex = regexp.MustCompile(`\[\*?([\pL_][\pL_0-9]*(\.[\pL_][\pL_0-9]*){0,2})](?:[^:]|$)`) + +// comment emits semantic tokens for a comment. +// If the comment contains doc links or "go:" directives, +// it emits a separate token for each link or directive and +// each comment portion between them. +func (tv *tokenVisitor) comment(c *ast.Comment, importByName map[string]*types.PkgName) { + if strings.HasPrefix(c.Text, "//go:") { + tv.godirective(c) + return + } + + pkgScope := tv.pkg.Types().Scope() + // lookupObjects interprets the name in various forms + // (X, p.T, p.T.M, etc) and return the list of symbols + // denoted by each identifier in the dotted list. + lookupObjects := func(name string) (objs []types.Object) { + scope := pkgScope + if pkg, suffix, ok := strings.Cut(name, "."); ok { + if obj, _ := importByName[pkg]; obj != nil { + objs = append(objs, obj) + scope = obj.Imported().Scope() + name = suffix + } + } + + if recv, method, ok := strings.Cut(name, "."); ok { + obj, ok := scope.Lookup(recv).(*types.TypeName) + if !ok { + return nil + } + objs = append(objs, obj) + t, ok := obj.Type().(*types.Named) + if !ok { + return nil + } + m, _, _ := types.LookupFieldOrMethod(t, true, tv.pkg.Types(), method) + if m == nil { + return nil + } + objs = append(objs, m) + return objs + } else { + obj := scope.Lookup(name) + if obj == nil { + return nil + } + if _, ok := obj.(*types.PkgName); !ok && !obj.Exported() { + return nil + } + objs = append(objs, obj) + return objs + + } + } + + tokenTypeByObject := func(obj types.Object) semtok.TokenType { + switch obj.(type) { + case *types.PkgName: + return semtok.TokNamespace + case *types.Func: + return semtok.TokFunction + case *types.TypeName: + return semtok.TokType + case *types.Const, *types.Var: + return semtok.TokVariable + default: + return semtok.TokComment + } + } + + pos := c.Pos() + for _, line := range strings.Split(c.Text, "\n") { + last := 0 + + for _, idx := range docLinkRegex.FindAllStringSubmatchIndex(line, -1) { + // The first group is the reference name. e.g. "X", "p.T", "p.T.M". + name := line[idx[2]:idx[3]] + if objs := lookupObjects(name); len(objs) > 0 { + if last < idx[2] { + tv.token(pos+token.Pos(last), idx[2]-last, semtok.TokComment, nil) + } + offset := pos + token.Pos(idx[2]) + for i, obj := range objs { + if i > 0 { + tv.token(offset, len("."), semtok.TokComment, nil) + offset += token.Pos(len(".")) + } + id, rest, _ := strings.Cut(name, ".") + name = rest + tv.token(offset, len(id), tokenTypeByObject(obj), nil) + offset += token.Pos(len(id)) + } + last = idx[3] + } + } + if last != len(c.Text) { + tv.token(pos+token.Pos(last), len(line)-last, semtok.TokComment, nil) + } + pos += token.Pos(len(line) + 1) + } +} + +// token emits a token of the specified extent and semantics. +func (tv *tokenVisitor) token(start token.Pos, length int, typ semtok.TokenType, modifiers []string) { + if length <= 0 { + return // vscode doesn't like 0-length Tokens + } + if !start.IsValid() { + // This is not worth reporting. TODO(pjw): does it still happen? + return + } + end := start + token.Pos(length) + if start >= tv.end || end <= tv.start { + return + } + // want a line and column from start (in LSP coordinates). Ignore line directives. + rng, err := tv.pgf.PosRange(start, end) + if err != nil { + event.Error(tv.ctx, "failed to convert to range", err) + return + } + if rng.End.Line != rng.Start.Line { + // this happens if users are typing at the end of the file, but report nothing + return + } + tv.tokens = append(tv.tokens, semtok.Token{ + Line: rng.Start.Line, + Start: rng.Start.Character, + Len: rng.End.Character - rng.Start.Character, // (on same line) + Type: typ, + Modifiers: modifiers, + }) +} + +// strStack converts the stack to a string, for debugging and error messages. +func (tv *tokenVisitor) strStack() string { + msg := []string{"["} + for i := len(tv.stack) - 1; i >= 0; i-- { + n := tv.stack[i] + msg = append(msg, strings.TrimPrefix(fmt.Sprintf("%T", n), "*ast.")) + } + if len(tv.stack) > 0 { + pos := tv.stack[len(tv.stack)-1].Pos() + if _, err := safetoken.Offset(tv.pgf.Tok, pos); err != nil { + msg = append(msg, fmt.Sprintf("invalid position %v for %s", pos, tv.pgf.URI)) + } else { + posn := safetoken.Position(tv.pgf.Tok, pos) + msg = append(msg, fmt.Sprintf("(%s:%d,col:%d)", + filepath.Base(posn.Filename), posn.Line, posn.Column)) + } + } + msg = append(msg, "]") + return strings.Join(msg, " ") +} + +// srcLine returns the source text for n (truncated at first newline). +func (tv *tokenVisitor) srcLine(n ast.Node) string { + file := tv.pgf.Tok + line := safetoken.Line(file, n.Pos()) + start, err := safetoken.Offset(file, file.LineStart(line)) + if err != nil { + return "" + } + end := start + for ; end < len(tv.pgf.Src) && tv.pgf.Src[end] != '\n'; end++ { + + } + return string(tv.pgf.Src[start:end]) +} + +func (tv *tokenVisitor) inspect(n ast.Node) (descend bool) { + if n == nil { + tv.stack = tv.stack[:len(tv.stack)-1] // pop + return true + } + tv.stack = append(tv.stack, n) // push + defer func() { + if !descend { + tv.stack = tv.stack[:len(tv.stack)-1] // pop + } + }() + + switch n := n.(type) { + case *ast.ArrayType: + case *ast.AssignStmt: + tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator, nil) + case *ast.BasicLit: + if strings.Contains(n.Value, "\n") { + // has to be a string. + tv.multiline(n.Pos(), n.End(), semtok.TokString) + break + } + what := semtok.TokNumber + if n.Kind == token.STRING { + what = semtok.TokString + } + tv.token(n.Pos(), len(n.Value), what, nil) + case *ast.BinaryExpr: + tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator, nil) + case *ast.BlockStmt: + case *ast.BranchStmt: + tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword, nil) + if n.Label != nil { + tv.token(n.Label.Pos(), len(n.Label.Name), semtok.TokLabel, nil) + } + case *ast.CallExpr: + if n.Ellipsis.IsValid() { + tv.token(n.Ellipsis, len("..."), semtok.TokOperator, nil) + } + case *ast.CaseClause: + iam := "case" + if n.List == nil { + iam = "default" + } + tv.token(n.Case, len(iam), semtok.TokKeyword, nil) + case *ast.ChanType: + // chan | chan <- | <- chan + switch { + case n.Arrow == token.NoPos: + tv.token(n.Begin, len("chan"), semtok.TokKeyword, nil) + case n.Arrow == n.Begin: + tv.token(n.Arrow, 2, semtok.TokOperator, nil) + pos := tv.findKeyword("chan", n.Begin+2, n.Value.Pos()) + tv.token(pos, len("chan"), semtok.TokKeyword, nil) + case n.Arrow != n.Begin: + tv.token(n.Begin, len("chan"), semtok.TokKeyword, nil) + tv.token(n.Arrow, 2, semtok.TokOperator, nil) + } + case *ast.CommClause: + length := len("case") + if n.Comm == nil { + length = len("default") + } + tv.token(n.Case, length, semtok.TokKeyword, nil) + case *ast.CompositeLit: + case *ast.DeclStmt: + case *ast.DeferStmt: + tv.token(n.Defer, len("defer"), semtok.TokKeyword, nil) + case *ast.Ellipsis: + tv.token(n.Ellipsis, len("..."), semtok.TokOperator, nil) + case *ast.EmptyStmt: + case *ast.ExprStmt: + case *ast.Field: + case *ast.FieldList: + case *ast.ForStmt: + tv.token(n.For, len("for"), semtok.TokKeyword, nil) + case *ast.FuncDecl: + case *ast.FuncLit: + case *ast.FuncType: + if n.Func != token.NoPos { + tv.token(n.Func, len("func"), semtok.TokKeyword, nil) + } + case *ast.GenDecl: + tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword, nil) + case *ast.GoStmt: + tv.token(n.Go, len("go"), semtok.TokKeyword, nil) + case *ast.Ident: + tv.ident(n) + case *ast.IfStmt: + tv.token(n.If, len("if"), semtok.TokKeyword, nil) + if n.Else != nil { + // x.Body.End() or x.Body.End()+1, not that it matters + pos := tv.findKeyword("else", n.Body.End(), n.Else.Pos()) + tv.token(pos, len("else"), semtok.TokKeyword, nil) + } + case *ast.ImportSpec: + tv.importSpec(n) + return false + case *ast.IncDecStmt: + tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator, nil) + case *ast.IndexExpr: + case *ast.IndexListExpr: + case *ast.InterfaceType: + tv.token(n.Interface, len("interface"), semtok.TokKeyword, nil) + case *ast.KeyValueExpr: + case *ast.LabeledStmt: + tv.token(n.Label.Pos(), len(n.Label.Name), semtok.TokLabel, []string{"definition"}) + case *ast.MapType: + tv.token(n.Map, len("map"), semtok.TokKeyword, nil) + case *ast.ParenExpr: + case *ast.RangeStmt: + tv.token(n.For, len("for"), semtok.TokKeyword, nil) + // x.TokPos == token.NoPos is legal (for range foo {}) + offset := n.TokPos + if offset == token.NoPos { + offset = n.For + } + pos := tv.findKeyword("range", offset, n.X.Pos()) + tv.token(pos, len("range"), semtok.TokKeyword, nil) + case *ast.ReturnStmt: + tv.token(n.Return, len("return"), semtok.TokKeyword, nil) + case *ast.SelectStmt: + tv.token(n.Select, len("select"), semtok.TokKeyword, nil) + case *ast.SelectorExpr: + case *ast.SendStmt: + tv.token(n.Arrow, len("<-"), semtok.TokOperator, nil) + case *ast.SliceExpr: + case *ast.StarExpr: + tv.token(n.Star, len("*"), semtok.TokOperator, nil) + case *ast.StructType: + tv.token(n.Struct, len("struct"), semtok.TokKeyword, nil) + case *ast.SwitchStmt: + tv.token(n.Switch, len("switch"), semtok.TokKeyword, nil) + case *ast.TypeAssertExpr: + if n.Type == nil { + pos := tv.findKeyword("type", n.Lparen, n.Rparen) + tv.token(pos, len("type"), semtok.TokKeyword, nil) + } + case *ast.TypeSpec: + case *ast.TypeSwitchStmt: + tv.token(n.Switch, len("switch"), semtok.TokKeyword, nil) + case *ast.UnaryExpr: + tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator, nil) + case *ast.ValueSpec: + // things only seen with parsing or type errors, so ignore them + case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: + return false + // not going to see these + case *ast.File, *ast.Package: + tv.errorf("implement %T %s", n, safetoken.Position(tv.pgf.Tok, n.Pos())) + // other things we knowingly ignore + case *ast.Comment, *ast.CommentGroup: + return false + default: + tv.errorf("failed to implement %T", n) + } + return true +} + +func (tv *tokenVisitor) ident(id *ast.Ident) { + var obj types.Object + + // emit emits a token for the identifier's extent. + emit := func(tok semtok.TokenType, modifiers ...string) { + tv.token(id.Pos(), len(id.Name), tok, modifiers) + if semDebug { + q := "nil" + if obj != nil { + q = fmt.Sprintf("%T", obj.Type()) // e.g. "*types.Map" + } + log.Printf(" use %s/%T/%s got %s %v (%s)", + id.Name, obj, q, tok, modifiers, tv.strStack()) + } + } + + // definition? + obj = tv.info.Defs[id] + if obj != nil { + if tok, modifiers := tv.definitionFor(id, obj); tok != "" { + emit(tok, modifiers...) + } else if semDebug { + log.Printf(" for %s/%T/%T got '' %v (%s)", + id.Name, obj, obj.Type(), modifiers, tv.strStack()) + } + return + } + + // use? + obj = tv.info.Uses[id] + switch obj := obj.(type) { + case *types.Builtin: + emit(semtok.TokFunction, "defaultLibrary") + case *types.Const: + if is[*types.Named](obj.Type()) && + (id.Name == "iota" || id.Name == "true" || id.Name == "false") { + emit(semtok.TokVariable, "readonly", "defaultLibrary") + } else { + emit(semtok.TokVariable, "readonly") + } + case *types.Func: + emit(semtok.TokFunction) + case *types.Label: + // Labels are reliably covered by the syntax traversal. + case *types.Nil: + // nil is a predeclared identifier + emit(semtok.TokVariable, "readonly", "defaultLibrary") + case *types.PkgName: + emit(semtok.TokNamespace) + case *types.TypeName: // could be a TypeParam + if is[*types.TypeParam](aliases.Unalias(obj.Type())) { + emit(semtok.TokTypeParam) + } else if is[*types.Basic](obj.Type()) { + emit(semtok.TokType, "defaultLibrary") + } else { + emit(semtok.TokType) + } + case *types.Var: + if is[*types.Signature](aliases.Unalias(obj.Type())) { + emit(semtok.TokFunction) + } else if tv.isParam(obj.Pos()) { + // variable, unless use.pos is the pos of a Field in an ancestor FuncDecl + // or FuncLit and then it's a parameter + emit(semtok.TokParameter) + } else { + emit(semtok.TokVariable) + } + case nil: + if tok, modifiers := tv.unkIdent(id); tok != "" { + emit(tok, modifiers...) + } + default: + panic(obj) + } +} + +// isParam reports whether the position is that of a parameter name of +// an enclosing function. +func (tv *tokenVisitor) isParam(pos token.Pos) bool { + for i := len(tv.stack) - 1; i >= 0; i-- { + switch n := tv.stack[i].(type) { + case *ast.FuncDecl: + for _, f := range n.Type.Params.List { + for _, id := range f.Names { + if id.Pos() == pos { + return true + } + } + } + case *ast.FuncLit: + for _, f := range n.Type.Params.List { + for _, id := range f.Names { + if id.Pos() == pos { + return true + } + } + } + } + } + return false +} + +// unkIdent handles identifiers with no types.Object (neither use nor +// def), use the parse stack. +// A lot of these only happen when the package doesn't compile, +// but in that case it is all best-effort from the parse tree. +func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.TokenType, []string) { + def := []string{"definition"} + n := len(tv.stack) - 2 // parent of Ident; stack is [File ... Ident] + if n < 0 { + tv.errorf("no stack") // can't happen + return "", nil + } + switch parent := tv.stack[n].(type) { + case *ast.BinaryExpr, *ast.UnaryExpr, *ast.ParenExpr, *ast.StarExpr, + *ast.IncDecStmt, *ast.SliceExpr, *ast.ExprStmt, *ast.IndexExpr, + *ast.ReturnStmt, *ast.ChanType, *ast.SendStmt, + *ast.ForStmt, // possibly incomplete + *ast.IfStmt, /* condition */ + *ast.KeyValueExpr, // either key or value + *ast.IndexListExpr: + return semtok.TokVariable, nil + case *ast.Ellipsis: + return semtok.TokType, nil + case *ast.CaseClause: + if n-2 >= 0 && is[ast.TypeSwitchStmt](tv.stack[n-2]) { + return semtok.TokType, nil + } + return semtok.TokVariable, nil + case *ast.ArrayType: + if id == parent.Len { + // or maybe a Type Param, but we can't just from the parse tree + return semtok.TokVariable, nil + } else { + return semtok.TokType, nil + } + case *ast.MapType: + return semtok.TokType, nil + case *ast.CallExpr: + if id == parent.Fun { + return semtok.TokFunction, nil + } + return semtok.TokVariable, nil + case *ast.SwitchStmt: + return semtok.TokVariable, nil + case *ast.TypeAssertExpr: + if id == parent.X { + return semtok.TokVariable, nil + } else if id == parent.Type { + return semtok.TokType, nil + } + case *ast.ValueSpec: + for _, p := range parent.Names { + if p == id { + return semtok.TokVariable, def + } + } + for _, p := range parent.Values { + if p == id { + return semtok.TokVariable, nil + } + } + return semtok.TokType, nil + case *ast.SelectorExpr: // e.ti.Selections[nd] is nil, so no help + if n-1 >= 0 { + if ce, ok := tv.stack[n-1].(*ast.CallExpr); ok { + // ... CallExpr SelectorExpr Ident (_.x()) + if ce.Fun == parent && parent.Sel == id { + return semtok.TokFunction, nil + } + } + } + return semtok.TokVariable, nil + case *ast.AssignStmt: + for _, p := range parent.Lhs { + // x := ..., or x = ... + if p == id { + if parent.Tok != token.DEFINE { + def = nil + } + return semtok.TokVariable, def // '_' in _ = ... + } + } + // RHS, = x + return semtok.TokVariable, nil + case *ast.TypeSpec: // it's a type if it is either the Name or the Type + if id == parent.Type { + def = nil + } + return semtok.TokType, def + case *ast.Field: + // ident could be type in a field, or a method in an interface type, or a variable + if id == parent.Type { + return semtok.TokType, nil + } + if n > 2 && + is[*ast.InterfaceType](tv.stack[n-2]) && + is[*ast.FieldList](tv.stack[n-1]) { + + return semtok.TokMethod, def + } + return semtok.TokVariable, nil + case *ast.LabeledStmt: + if id == parent.Label { + return semtok.TokLabel, def + } + case *ast.BranchStmt: + if id == parent.Label { + return semtok.TokLabel, nil + } + case *ast.CompositeLit: + if parent.Type == id { + return semtok.TokType, nil + } + return semtok.TokVariable, nil + case *ast.RangeStmt: + if parent.Tok != token.DEFINE { + def = nil + } + return semtok.TokVariable, def + case *ast.FuncDecl: + return semtok.TokFunction, def + default: + tv.errorf("%T unexpected: %s %s%q", parent, id.Name, tv.strStack(), tv.srcLine(id)) + } + return "", nil +} + +func isDeprecated(n *ast.CommentGroup) bool { + if n != nil { + for _, c := range n.List { + if strings.HasPrefix(c.Text, "// Deprecated") { + return true + } + } + } + return false +} + +// definitionFor handles a defining identifier. +func (tv *tokenVisitor) definitionFor(id *ast.Ident, obj types.Object) (semtok.TokenType, []string) { + // The definition of a types.Label cannot be found by + // ascending the syntax tree, and doing so will reach the + // FuncDecl, causing us to misinterpret the label as a + // parameter (#65494). + // + // However, labels are reliably covered by the syntax + // traversal, so we don't need to use type information. + if is[*types.Label](obj) { + return "", nil + } + + // PJW: look into replacing these syntactic tests with types more generally + modifiers := []string{"definition"} + for i := len(tv.stack) - 1; i >= 0; i-- { + switch ancestor := tv.stack[i].(type) { + case *ast.AssignStmt, *ast.RangeStmt: + if id.Name == "_" { + return "", nil // not really a variable + } + return semtok.TokVariable, modifiers + case *ast.GenDecl: + if isDeprecated(ancestor.Doc) { + modifiers = append(modifiers, "deprecated") + } + if ancestor.Tok == token.CONST { + modifiers = append(modifiers, "readonly") + } + return semtok.TokVariable, modifiers + case *ast.FuncDecl: + // If x is immediately under a FuncDecl, it is a function or method + if i == len(tv.stack)-2 { + if isDeprecated(ancestor.Doc) { + modifiers = append(modifiers, "deprecated") + } + if ancestor.Recv != nil { + return semtok.TokMethod, modifiers + } + return semtok.TokFunction, modifiers + } + // if x < ... < FieldList < FuncDecl, this is the receiver, a variable + // PJW: maybe not. it might be a typeparameter in the type of the receiver + if is[*ast.FieldList](tv.stack[i+1]) { + if is[*types.TypeName](obj) { + return semtok.TokTypeParam, modifiers + } + return semtok.TokVariable, nil + } + // if x < ... < FieldList < FuncType < FuncDecl, this is a param + return semtok.TokParameter, modifiers + case *ast.FuncType: + if isTypeParam(id, ancestor) { + return semtok.TokTypeParam, modifiers + } + return semtok.TokParameter, modifiers + case *ast.InterfaceType: + return semtok.TokMethod, modifiers + case *ast.TypeSpec: + // GenDecl/Typespec/FuncType/FieldList/Field/Ident + // (type A func(b uint64)) (err error) + // b and err should not be semtok.TokType, but semtok.TokVariable + // and in GenDecl/TpeSpec/StructType/FieldList/Field/Ident + // (type A struct{b uint64} + // but on type B struct{C}), C is a type, but is not being defined. + // GenDecl/TypeSpec/FieldList/Field/Ident is a typeParam + if is[*ast.FieldList](tv.stack[i+1]) { + return semtok.TokTypeParam, modifiers + } + fldm := tv.stack[len(tv.stack)-2] + if fld, ok := fldm.(*ast.Field); ok { + // if len(fld.names) == 0 this is a semtok.TokType, being used + if len(fld.Names) == 0 { + return semtok.TokType, nil + } + return semtok.TokVariable, modifiers + } + return semtok.TokType, modifiers + } + } + // can't happen + tv.errorf("failed to find the decl for %s", safetoken.Position(tv.pgf.Tok, id.Pos())) + return "", nil +} + +func isTypeParam(id *ast.Ident, t *ast.FuncType) bool { + if tp := t.TypeParams; tp != nil { + for _, p := range tp.List { + for _, n := range p.Names { + if id == n { + return true + } + } + } + } + return false +} + +// multiline emits a multiline token (`string` or /*comment*/). +func (tv *tokenVisitor) multiline(start, end token.Pos, tok semtok.TokenType) { + // TODO(adonovan): test with non-ASCII. + + f := tv.fset.File(start) + // the hard part is finding the lengths of lines. include the \n + length := func(line int) int { + n := f.LineStart(line) + if line >= f.LineCount() { + return f.Size() - int(n) + } + return int(f.LineStart(line+1) - n) + } + spos := safetoken.StartPosition(tv.fset, start) + epos := safetoken.EndPosition(tv.fset, end) + sline := spos.Line + eline := epos.Line + // first line is from spos.Column to end + tv.token(start, length(sline)-spos.Column, tok, nil) // leng(sline)-1 - (spos.Column-1) + for i := sline + 1; i < eline; i++ { + // intermediate lines are from 1 to end + tv.token(f.LineStart(i), length(i)-1, tok, nil) // avoid the newline + } + // last line is from 1 to epos.Column + tv.token(f.LineStart(eline), epos.Column-1, tok, nil) // columns are 1-based +} + +// findKeyword returns the position of a keyword by searching within +// the specified range, for when its cannot be exactly known from the AST. +func (tv *tokenVisitor) findKeyword(keyword string, start, end token.Pos) token.Pos { + // TODO(adonovan): use safetoken.Offset. + offset := int(start) - tv.pgf.Tok.Base() + last := int(end) - tv.pgf.Tok.Base() + buf := tv.pgf.Src + idx := bytes.Index(buf[offset:last], []byte(keyword)) + if idx != -1 { + return start + token.Pos(idx) + } + //(in unparsable programs: type _ <-<-chan int) + tv.errorf("not found:%s %v", keyword, safetoken.StartPosition(tv.fset, start)) + return token.NoPos +} + +func (tv *tokenVisitor) importSpec(spec *ast.ImportSpec) { + // a local package name or the last component of the Path + if spec.Name != nil { + name := spec.Name.String() + if name != "_" && name != "." { + tv.token(spec.Name.Pos(), len(name), semtok.TokNamespace, nil) + } + return // don't mark anything for . or _ + } + importPath := metadata.UnquoteImportPath(spec) + if importPath == "" { + return + } + // Import strings are implementation defined. Try to match with parse information. + depID := tv.metadata.DepsByImpPath[importPath] + if depID == "" { + return + } + depMD := tv.metadataSource.Metadata(depID) + if depMD == nil { + // unexpected, but impact is that maybe some import is not colored + return + } + // Check whether the original literal contains the package's declared name. + j := strings.LastIndex(spec.Path.Value, string(depMD.Name)) + if j < 0 { + // Package name does not match import path, so there is nothing to report. + return + } + // Report virtual declaration at the position of the substring. + start := spec.Path.Pos() + token.Pos(j) + tv.token(start, len(depMD.Name), semtok.TokNamespace, nil) +} + +// errorf logs an error and reports a bug. +func (tv *tokenVisitor) errorf(format string, args ...any) { + msg := fmt.Sprintf(format, args...) + bug.Report(msg) + event.Error(tv.ctx, tv.strStack(), errors.New(msg)) +} + +var godirectives = map[string]struct{}{ + // https://pkg.go.dev/cmd/compile + "noescape": {}, + "uintptrescapes": {}, + "noinline": {}, + "norace": {}, + "nosplit": {}, + "linkname": {}, + + // https://pkg.go.dev/go/build + "build": {}, + "binary-only-package": {}, + "embed": {}, +} + +// Tokenize godirective at the start of the comment c, if any, and the surrounding comment. +// If there is any failure, emits the entire comment as a TokComment token. +// Directives are highlighted as-is, even if used incorrectly. Typically there are +// dedicated analyzers that will warn about misuse. +func (tv *tokenVisitor) godirective(c *ast.Comment) { + // First check if '//go:directive args...' is a valid directive. + directive, args, _ := strings.Cut(c.Text, " ") + kind, _ := stringsCutPrefix(directive, "//go:") + if _, ok := godirectives[kind]; !ok { + // Unknown 'go:' directive. + tv.token(c.Pos(), len(c.Text), semtok.TokComment, nil) + return + } + + // Make the 'go:directive' part stand out, the rest is comments. + tv.token(c.Pos(), len("//"), semtok.TokComment, nil) + + directiveStart := c.Pos() + token.Pos(len("//")) + tv.token(directiveStart, len(directive[len("//"):]), semtok.TokNamespace, nil) + + if len(args) > 0 { + tailStart := c.Pos() + token.Pos(len(directive)+len(" ")) + tv.token(tailStart, len(args), semtok.TokComment, nil) + } +} + +// Go 1.20 strings.CutPrefix. +func stringsCutPrefix(s, prefix string) (after string, found bool) { + if !strings.HasPrefix(s, prefix) { + return s, false + } + return s[len(prefix):], true +} + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} diff --git a/gopls/internal/golang/signature_help.go b/gopls/internal/golang/signature_help.go new file mode 100644 index 00000000000..2f13cd0afdc --- /dev/null +++ b/gopls/internal/golang/signature_help.go @@ -0,0 +1,204 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/event" +) + +func SignatureHelp(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.SignatureInformation, int, error) { + ctx, done := event.Start(ctx, "golang.SignatureHelp") + defer done() + + // We need full type-checking here, as we must type-check function bodies in + // order to provide signature help at the requested position. + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, 0, fmt.Errorf("getting file for SignatureHelp: %w", err) + } + pos, err := pgf.PositionPos(position) + if err != nil { + return nil, 0, err + } + // Find a call expression surrounding the query position. + var callExpr *ast.CallExpr + path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) + if path == nil { + return nil, 0, fmt.Errorf("cannot find node enclosing position") + } +FindCall: + for _, node := range path { + switch node := node.(type) { + case *ast.CallExpr: + if pos >= node.Lparen && pos <= node.Rparen { + callExpr = node + break FindCall + } + case *ast.FuncLit, *ast.FuncType: + // The user is within an anonymous function, + // which may be the parameter to the *ast.CallExpr. + // Don't show signature help in this case. + return nil, 0, fmt.Errorf("no signature help within a function declaration") + case *ast.BasicLit: + if node.Kind == token.STRING { + return nil, 0, fmt.Errorf("no signature help within a string literal") + } + } + + } + if callExpr == nil || callExpr.Fun == nil { + return nil, 0, fmt.Errorf("cannot find an enclosing function") + } + + info := pkg.TypesInfo() + + // Get the type information for the function being called. + var sig *types.Signature + if tv, ok := info.Types[callExpr.Fun]; !ok { + return nil, 0, fmt.Errorf("cannot get type for Fun %[1]T (%[1]v)", callExpr.Fun) + } else if tv.IsType() { + return nil, 0, fmt.Errorf("this is a conversion to %s, not a call", tv.Type) + } else if sig, ok = tv.Type.Underlying().(*types.Signature); !ok { + return nil, 0, fmt.Errorf("cannot find signature for Fun %[1]T (%[1]v)", callExpr.Fun) + } + // Inv: sig != nil + + qf := typesutil.FileQualifier(pgf.File, pkg.Types(), info) + + // Get the object representing the function, if available. + // There is no object in certain cases such as calling a function returned by + // a function (e.g. "foo()()"). + var obj types.Object + switch t := callExpr.Fun.(type) { + case *ast.Ident: + obj = info.ObjectOf(t) + case *ast.SelectorExpr: + obj = info.ObjectOf(t.Sel) + } + + // Call to built-in? + if obj != nil && !obj.Pos().IsValid() { + // function? + if obj, ok := obj.(*types.Builtin); ok { + return builtinSignature(ctx, snapshot, callExpr, obj.Name(), pos) + } + + // method (only error.Error)? + if fn, ok := obj.(*types.Func); ok && fn.Name() == "Error" { + return &protocol.SignatureInformation{ + Label: "Error()", + Documentation: stringToSigInfoDocumentation("Error returns the error message.", snapshot.Options()), + }, 0, nil + } + + return nil, 0, bug.Errorf("call to unexpected built-in %v (%T)", obj, obj) + } + + activeParam := activeParameter(callExpr, sig.Params().Len(), sig.Variadic(), pos) + + var ( + name string + comment *ast.CommentGroup + ) + if obj != nil { + d, err := HoverDocForObject(ctx, snapshot, pkg.FileSet(), obj) + if err != nil { + return nil, 0, err + } + name = obj.Name() + comment = d + } else { + name = "func" + } + mq := MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()) + s, err := NewSignature(ctx, snapshot, pkg, sig, comment, qf, mq) + if err != nil { + return nil, 0, err + } + paramInfo := make([]protocol.ParameterInformation, 0, len(s.params)) + for _, p := range s.params { + paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p}) + } + return &protocol.SignatureInformation{ + Label: name + s.Format(), + Documentation: stringToSigInfoDocumentation(s.doc, snapshot.Options()), + Parameters: paramInfo, + }, activeParam, nil +} + +func builtinSignature(ctx context.Context, snapshot *cache.Snapshot, callExpr *ast.CallExpr, name string, pos token.Pos) (*protocol.SignatureInformation, int, error) { + sig, err := NewBuiltinSignature(ctx, snapshot, name) + if err != nil { + return nil, 0, err + } + paramInfo := make([]protocol.ParameterInformation, 0, len(sig.params)) + for _, p := range sig.params { + paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p}) + } + activeParam := activeParameter(callExpr, len(sig.params), sig.variadic, pos) + return &protocol.SignatureInformation{ + Label: sig.name + sig.Format(), + Documentation: stringToSigInfoDocumentation(sig.doc, snapshot.Options()), + Parameters: paramInfo, + }, activeParam, nil +} + +func activeParameter(callExpr *ast.CallExpr, numParams int, variadic bool, pos token.Pos) (activeParam int) { + if len(callExpr.Args) == 0 { + return 0 + } + // First, check if the position is even in the range of the arguments. + start, end := callExpr.Lparen, callExpr.Rparen + if !(start <= pos && pos <= end) { + return 0 + } + for _, expr := range callExpr.Args { + if start == token.NoPos { + start = expr.Pos() + } + end = expr.End() + if start <= pos && pos <= end { + break + } + // Don't advance the active parameter for the last parameter of a variadic function. + if !variadic || activeParam < numParams-1 { + activeParam++ + } + start = expr.Pos() + 1 // to account for commas + } + return activeParam +} + +func stringToSigInfoDocumentation(s string, options *settings.Options) *protocol.Or_SignatureInformation_documentation { + v := s + k := protocol.PlainText + if options.PreferredContentFormat == protocol.Markdown { + v = CommentToMarkdown(s, options) + // whether or not content is newline terminated may not matter for LSP clients, + // but our tests expect trailing newlines to be stripped. + v = strings.TrimSuffix(v, "\n") // TODO(pjw): change the golden files + k = protocol.Markdown + } + return &protocol.Or_SignatureInformation_documentation{ + Value: protocol.MarkupContent{ + Kind: k, + Value: v, + }, + } +} diff --git a/gopls/internal/golang/snapshot.go b/gopls/internal/golang/snapshot.go new file mode 100644 index 00000000000..c381c962d08 --- /dev/null +++ b/gopls/internal/golang/snapshot.go @@ -0,0 +1,98 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/protocol" +) + +// NarrowestMetadataForFile returns metadata for the narrowest package +// (the one with the fewest files) that encloses the specified file. +// The result may be a test variant, but never an intermediate test variant. +func NarrowestMetadataForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (*metadata.Package, error) { + mps, err := snapshot.MetadataForFile(ctx, uri) + if err != nil { + return nil, err + } + metadata.RemoveIntermediateTestVariants(&mps) + if len(mps) == 0 { + return nil, fmt.Errorf("no package metadata for file %s", uri) + } + return mps[0], nil +} + +// NarrowestPackageForFile is a convenience function that selects the narrowest +// non-ITV package to which this file belongs, type-checks it in the requested +// mode (full or workspace), and returns it, along with the parse tree of that +// file. +// +// The "narrowest" package is the one with the fewest number of files that +// includes the given file. This solves the problem of test variants, as the +// test will have more files than the non-test package. +// +// An intermediate test variant (ITV) package has identical source to a regular +// package but resolves imports differently. gopls should never need to +// type-check them. +// +// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse +// tree, or snapshot.MetadataForFile if you only need metadata. +func NarrowestPackageForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (*cache.Package, *parsego.File, error) { + return selectPackageForFile(ctx, snapshot, uri, func(metas []*metadata.Package) *metadata.Package { return metas[0] }) +} + +// WidestPackageForFile is a convenience function that selects the widest +// non-ITV package to which this file belongs, type-checks it in the requested +// mode (full or workspace), and returns it, along with the parse tree of that +// file. +// +// The "widest" package is the one with the most number of files that includes +// the given file. Which is the test variant if one exists. +// +// An intermediate test variant (ITV) package has identical source to a regular +// package but resolves imports differently. gopls should never need to +// type-check them. +// +// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse +// tree, or snapshot.MetadataForFile if you only need metadata. +func WidestPackageForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (*cache.Package, *parsego.File, error) { + return selectPackageForFile(ctx, snapshot, uri, func(metas []*metadata.Package) *metadata.Package { return metas[len(metas)-1] }) +} + +func selectPackageForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, selector func([]*metadata.Package) *metadata.Package) (*cache.Package, *parsego.File, error) { + mps, err := snapshot.MetadataForFile(ctx, uri) + if err != nil { + return nil, nil, err + } + metadata.RemoveIntermediateTestVariants(&mps) + if len(mps) == 0 { + return nil, nil, fmt.Errorf("no package metadata for file %s", uri) + } + mp := selector(mps) + pkgs, err := snapshot.TypeCheck(ctx, mp.ID) + if err != nil { + return nil, nil, err + } + pkg := pkgs[0] + pgf, err := pkg.File(uri) + if err != nil { + return nil, nil, err // "can't happen" + } + return pkg, pgf, err +} + +type ( + PackageID = metadata.PackageID + PackagePath = metadata.PackagePath + PackageName = metadata.PackageName + ImportPath = metadata.ImportPath +) + +type unit = struct{} diff --git a/gopls/internal/golang/stub.go b/gopls/internal/golang/stub.go new file mode 100644 index 00000000000..47bcf3a7dcf --- /dev/null +++ b/gopls/internal/golang/stub.go @@ -0,0 +1,334 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "bytes" + "context" + "fmt" + "go/format" + "go/parser" + "go/token" + "go/types" + "io" + pathpkg "path" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/analysis/stubmethods" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/tokeninternal" +) + +// stubMethodsFixer returns a suggested fix to declare the missing +// methods of the concrete type that is assigned to an interface type +// at the cursor position. +func stubMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end) + si := stubmethods.GetStubInfo(pkg.FileSet(), pkg.TypesInfo(), nodes, start) + if si == nil { + return nil, nil, fmt.Errorf("nil interface request") + } + + // A function-local type cannot be stubbed + // since there's nowhere to put the methods. + conc := si.Concrete.Obj() + if conc.Parent() != conc.Pkg().Scope() { + return nil, nil, fmt.Errorf("local type %q cannot be stubbed", conc.Name()) + } + + // Parse the file declaring the concrete type. + // + // Beware: declPGF is not necessarily covered by pkg.FileSet() or si.Fset. + declPGF, _, err := parseFull(ctx, snapshot, si.Fset, conc.Pos()) + if err != nil { + return nil, nil, fmt.Errorf("failed to parse file %q declaring implementation type: %w", declPGF.URI, err) + } + if declPGF.Fixed() { + return nil, nil, fmt.Errorf("file contains parse errors: %s", declPGF.URI) + } + + // Find metadata for the concrete type's declaring package + // as we'll need its import mapping. + declMeta := findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI) + if declMeta == nil { + return nil, nil, bug.Errorf("can't find metadata for file %s among dependencies of %s", declPGF.URI, pkg) + } + + // Record all direct methods of the current object + concreteFuncs := make(map[string]struct{}) + for i := 0; i < si.Concrete.NumMethods(); i++ { + concreteFuncs[si.Concrete.Method(i).Name()] = struct{}{} + } + + // Find subset of interface methods that the concrete type lacks. + ifaceType := si.Interface.Type().Underlying().(*types.Interface) + + type missingFn struct { + fn *types.Func + needSubtle string + } + + var ( + missing []missingFn + concreteStruct, isStruct = si.Concrete.Origin().Underlying().(*types.Struct) + ) + + for i := 0; i < ifaceType.NumMethods(); i++ { + imethod := ifaceType.Method(i) + cmethod, index, _ := types.LookupFieldOrMethod(si.Concrete, si.Pointer, imethod.Pkg(), imethod.Name()) + if cmethod == nil { + missing = append(missing, missingFn{fn: imethod}) + continue + } + + if _, ok := cmethod.(*types.Var); ok { + // len(LookupFieldOrMethod.index) = 1 => conflict, >1 => shadow. + return nil, nil, fmt.Errorf("adding method %s.%s would conflict with (or shadow) existing field", + conc.Name(), imethod.Name()) + } + + if _, exist := concreteFuncs[imethod.Name()]; exist { + if !types.Identical(cmethod.Type(), imethod.Type()) { + return nil, nil, fmt.Errorf("method %s.%s already exists but has the wrong type: got %s, want %s", + conc.Name(), imethod.Name(), cmethod.Type(), imethod.Type()) + } + continue + } + + mf := missingFn{fn: imethod} + if isStruct && len(index) > 0 { + field := concreteStruct.Field(index[0]) + + fn := field.Name() + if is[*types.Pointer](field.Type()) { + fn = "*" + fn + } + + mf.needSubtle = fmt.Sprintf("// Subtle: this method shadows the method (%s).%s of %s.%s.\n", fn, imethod.Name(), si.Concrete.Obj().Name(), field.Name()) + } + + missing = append(missing, mf) + } + if len(missing) == 0 { + return nil, nil, fmt.Errorf("no missing methods found") + } + + // Build import environment for the declaring file. + // (typesutil.FileQualifier works only for complete + // import mappings, and requires types.) + importEnv := make(map[ImportPath]string) // value is local name + for _, imp := range declPGF.File.Imports { + importPath := metadata.UnquoteImportPath(imp) + var name string + if imp.Name != nil { + name = imp.Name.Name + if name == "_" { + continue + } else if name == "." { + name = "" // see types.Qualifier + } + } else { + // Use the correct name from the metadata of the imported + // package---not a guess based on the import path. + mp := snapshot.Metadata(declMeta.DepsByImpPath[importPath]) + if mp == nil { + continue // can't happen? + } + name = string(mp.Name) + } + importEnv[importPath] = name // latest alias wins + } + + // Create a package name qualifier that uses the + // locally appropriate imported package name. + // It records any needed new imports. + // TODO(adonovan): factor with golang.FormatVarType? + // + // Prior to CL 469155 this logic preserved any renaming + // imports from the file that declares the interface + // method--ostensibly the preferred name for imports of + // frequently renamed packages such as protobufs. + // Now we use the package's declared name. If this turns out + // to be a mistake, then use parseHeader(si.iface.Pos()). + // + type newImport struct{ name, importPath string } + var newImports []newImport // for AddNamedImport + qual := func(pkg *types.Package) string { + // TODO(adonovan): don't ignore vendor prefix. + // + // Ignore the current package import. + if pkg.Path() == conc.Pkg().Path() { + return "" + } + + importPath := ImportPath(pkg.Path()) + name, ok := importEnv[importPath] + if !ok { + // Insert new import using package's declared name. + // + // TODO(adonovan): resolve conflict between declared + // name and existing file-level (declPGF.File.Imports) + // or package-level (si.Concrete.Pkg.Scope) decls by + // generating a fresh name. + name = pkg.Name() + importEnv[importPath] = name + new := newImport{importPath: string(importPath)} + // For clarity, use a renaming import whenever the + // local name does not match the path's last segment. + if name != pathpkg.Base(trimVersionSuffix(new.importPath)) { + new.name = name + } + newImports = append(newImports, new) + } + return name + } + + // Format interface name (used only in a comment). + iface := si.Interface.Name() + if ipkg := si.Interface.Pkg(); ipkg != nil && ipkg != conc.Pkg() { + iface = ipkg.Name() + "." + iface + } + + // Pointer receiver? + var star string + if si.Pointer { + star = "*" + } + + // If there are any that have named receiver, choose the first one. + // Otherwise, use lowercase for the first letter of the object. + rn := strings.ToLower(si.Concrete.Obj().Name()[0:1]) + for i := 0; i < si.Concrete.NumMethods(); i++ { + if recv := si.Concrete.Method(i).Type().(*types.Signature).Recv(); recv.Name() != "" { + rn = recv.Name() + break + } + } + + // Check for receiver name conflicts + checkRecvName := func(tuple *types.Tuple) bool { + for i := 0; i < tuple.Len(); i++ { + if rn == tuple.At(i).Name() { + return true + } + } + return false + } + + // Format the new methods. + var newMethods bytes.Buffer + + for index := range missing { + mrn := rn + " " + sig := missing[index].fn.Type().(*types.Signature) + if checkRecvName(sig.Params()) || checkRecvName(sig.Results()) { + mrn = "" + } + + fmt.Fprintf(&newMethods, `// %s implements %s. +%sfunc (%s%s%s%s) %s%s { + panic("unimplemented") +} +`, + missing[index].fn.Name(), + iface, + missing[index].needSubtle, + mrn, + star, + si.Concrete.Obj().Name(), + FormatTypeParams(si.Concrete.TypeParams()), + missing[index].fn.Name(), + strings.TrimPrefix(types.TypeString(missing[index].fn.Type(), qual), "func")) + } + + // Compute insertion point for new methods: + // after the top-level declaration enclosing the (package-level) type. + insertOffset, err := safetoken.Offset(declPGF.Tok, declPGF.File.End()) + if err != nil { + return nil, nil, bug.Errorf("internal error: end position outside file bounds: %v", err) + } + concOffset, err := safetoken.Offset(si.Fset.File(conc.Pos()), conc.Pos()) + if err != nil { + return nil, nil, bug.Errorf("internal error: finding type decl offset: %v", err) + } + for _, decl := range declPGF.File.Decls { + declEndOffset, err := safetoken.Offset(declPGF.Tok, decl.End()) + if err != nil { + return nil, nil, bug.Errorf("internal error: finding decl offset: %v", err) + } + if declEndOffset > concOffset { + insertOffset = declEndOffset + break + } + } + + // Splice the new methods into the file content. + var buf bytes.Buffer + input := declPGF.Mapper.Content // unfixed content of file + buf.Write(input[:insertOffset]) + buf.WriteByte('\n') + io.Copy(&buf, &newMethods) + buf.Write(input[insertOffset:]) + + // Re-parse the file. + fset := token.NewFileSet() + newF, err := parser.ParseFile(fset, declPGF.URI.Path(), buf.Bytes(), parser.ParseComments) + if err != nil { + return nil, nil, fmt.Errorf("could not reparse file: %w", err) + } + + // Splice the new imports into the syntax tree. + for _, imp := range newImports { + astutil.AddNamedImport(fset, newF, imp.name, imp.importPath) + } + + // Pretty-print. + var output bytes.Buffer + if err := format.Node(&output, fset, newF); err != nil { + return nil, nil, fmt.Errorf("format.Node: %w", err) + } + + // Report the diff. + diffs := diff.Bytes(input, output.Bytes()) + return tokeninternal.FileSetFor(declPGF.Tok), // edits use declPGF.Tok + &analysis.SuggestedFix{TextEdits: diffToTextEdits(declPGF.Tok, diffs)}, + nil +} + +// diffToTextEdits converts diff (offset-based) edits to analysis (token.Pos) form. +func diffToTextEdits(tok *token.File, diffs []diff.Edit) []analysis.TextEdit { + edits := make([]analysis.TextEdit, 0, len(diffs)) + for _, edit := range diffs { + edits = append(edits, analysis.TextEdit{ + Pos: tok.Pos(edit.Start), + End: tok.Pos(edit.End), + NewText: []byte(edit.New), + }) + } + return edits +} + +// trimVersionSuffix removes a trailing "/v2" (etc) suffix from a module path. +// +// This is only a heuristic as to the package's declared name, and +// should only be used for stylistic decisions, such as whether it +// would be clearer to use an explicit local name in the import +// because the declared name differs from the result of this function. +// When the name matters for correctness, look up the imported +// package's Metadata.Name. +func trimVersionSuffix(path string) string { + dir, base := pathpkg.Split(path) + if len(base) > 1 && base[0] == 'v' && strings.Trim(base[1:], "0123456789") == "" { + return dir // sans "/v2" + } + return path +} diff --git a/gopls/internal/golang/symbols.go b/gopls/internal/golang/symbols.go new file mode 100644 index 00000000000..35959c2de7a --- /dev/null +++ b/gopls/internal/golang/symbols.go @@ -0,0 +1,230 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +func DocumentSymbols(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentSymbol, error) { + ctx, done := event.Start(ctx, "golang.DocumentSymbols") + defer done() + + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, fmt.Errorf("getting file for DocumentSymbols: %w", err) + } + + // Build symbols for file declarations. When encountering a declaration with + // errors (typically because positions are invalid), we skip the declaration + // entirely. VS Code fails to show any symbols if one of the top-level + // symbols is missing position information. + var symbols []protocol.DocumentSymbol + for _, decl := range pgf.File.Decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + if decl.Name.Name == "_" { + continue + } + fs, err := funcSymbol(pgf.Mapper, pgf.Tok, decl) + if err == nil { + // If function is a method, prepend the type of the method. + if decl.Recv != nil && len(decl.Recv.List) > 0 { + fs.Name = fmt.Sprintf("(%s).%s", types.ExprString(decl.Recv.List[0].Type), fs.Name) + } + symbols = append(symbols, fs) + } + case *ast.GenDecl: + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.TypeSpec: + if spec.Name.Name == "_" { + continue + } + ts, err := typeSymbol(pgf.Mapper, pgf.Tok, spec) + if err == nil { + symbols = append(symbols, ts) + } + case *ast.ValueSpec: + for _, name := range spec.Names { + if name.Name == "_" { + continue + } + vs, err := varSymbol(pgf.Mapper, pgf.Tok, spec, name, decl.Tok == token.CONST) + if err == nil { + symbols = append(symbols, vs) + } + } + } + } + } + } + return symbols, nil +} + +func funcSymbol(m *protocol.Mapper, tf *token.File, decl *ast.FuncDecl) (protocol.DocumentSymbol, error) { + s := protocol.DocumentSymbol{ + Name: decl.Name.Name, + Kind: protocol.Function, + } + if decl.Recv != nil { + s.Kind = protocol.Method + } + var err error + s.Range, err = m.NodeRange(tf, decl) + if err != nil { + return protocol.DocumentSymbol{}, err + } + s.SelectionRange, err = m.NodeRange(tf, decl.Name) + if err != nil { + return protocol.DocumentSymbol{}, err + } + s.Detail = types.ExprString(decl.Type) + return s, nil +} + +func typeSymbol(m *protocol.Mapper, tf *token.File, spec *ast.TypeSpec) (protocol.DocumentSymbol, error) { + s := protocol.DocumentSymbol{ + Name: spec.Name.Name, + } + var err error + s.Range, err = m.NodeRange(tf, spec) + if err != nil { + return protocol.DocumentSymbol{}, err + } + s.SelectionRange, err = m.NodeRange(tf, spec.Name) + if err != nil { + return protocol.DocumentSymbol{}, err + } + s.Kind, s.Detail, s.Children = typeDetails(m, tf, spec.Type) + return s, nil +} + +func typeDetails(m *protocol.Mapper, tf *token.File, typExpr ast.Expr) (kind protocol.SymbolKind, detail string, children []protocol.DocumentSymbol) { + switch typExpr := typExpr.(type) { + case *ast.StructType: + kind = protocol.Struct + children = fieldListSymbols(m, tf, typExpr.Fields, protocol.Field) + if len(children) > 0 { + detail = "struct{...}" + } else { + detail = "struct{}" + } + + // Find interface methods and embedded types. + case *ast.InterfaceType: + kind = protocol.Interface + children = fieldListSymbols(m, tf, typExpr.Methods, protocol.Method) + if len(children) > 0 { + detail = "interface{...}" + } else { + detail = "interface{}" + } + + case *ast.FuncType: + kind = protocol.Function + detail = types.ExprString(typExpr) + + default: + kind = protocol.Class // catch-all, for cases where we don't know the kind syntactically + detail = types.ExprString(typExpr) + } + return +} + +func fieldListSymbols(m *protocol.Mapper, tf *token.File, fields *ast.FieldList, fieldKind protocol.SymbolKind) []protocol.DocumentSymbol { + if fields == nil { + return nil + } + + var symbols []protocol.DocumentSymbol + for _, field := range fields.List { + detail, children := "", []protocol.DocumentSymbol(nil) + if field.Type != nil { + _, detail, children = typeDetails(m, tf, field.Type) + } + if len(field.Names) == 0 { // embedded interface or struct field + // By default, use the formatted type details as the name of this field. + // This handles potentially invalid syntax, as well as type embeddings in + // interfaces. + child := protocol.DocumentSymbol{ + Name: detail, + Kind: protocol.Field, // consider all embeddings to be fields + Children: children, + } + + // If the field is a valid embedding, promote the type name to field + // name. + selection := field.Type + if id := embeddedIdent(field.Type); id != nil { + child.Name = id.Name + child.Detail = detail + selection = id + } + + if rng, err := m.NodeRange(tf, field.Type); err == nil { + child.Range = rng + } + if rng, err := m.NodeRange(tf, selection); err == nil { + child.SelectionRange = rng + } + + symbols = append(symbols, child) + } else { + for _, name := range field.Names { + child := protocol.DocumentSymbol{ + Name: name.Name, + Kind: fieldKind, + Detail: detail, + Children: children, + } + + if rng, err := m.NodeRange(tf, field); err == nil { + child.Range = rng + } + if rng, err := m.NodeRange(tf, name); err == nil { + child.SelectionRange = rng + } + + symbols = append(symbols, child) + } + } + + } + return symbols +} + +func varSymbol(m *protocol.Mapper, tf *token.File, spec *ast.ValueSpec, name *ast.Ident, isConst bool) (protocol.DocumentSymbol, error) { + s := protocol.DocumentSymbol{ + Name: name.Name, + Kind: protocol.Variable, + } + if isConst { + s.Kind = protocol.Constant + } + var err error + s.Range, err = m.NodeRange(tf, spec) + if err != nil { + return protocol.DocumentSymbol{}, err + } + s.SelectionRange, err = m.NodeRange(tf, name) + if err != nil { + return protocol.DocumentSymbol{}, err + } + if spec.Type != nil { // type may be missing from the syntax + _, s.Detail, s.Children = typeDetails(m, tf, spec.Type) + } + return s, nil +} diff --git a/gopls/internal/lsp/source/type_definition.go b/gopls/internal/golang/type_definition.go similarity index 77% rename from gopls/internal/lsp/source/type_definition.go rename to gopls/internal/golang/type_definition.go index 6c26b1693f8..306852cdcaf 100644 --- a/gopls/internal/lsp/source/type_definition.go +++ b/gopls/internal/golang/type_definition.go @@ -2,21 +2,23 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "context" "fmt" "go/token" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/internal/event" ) // TypeDefinition handles the textDocument/typeDefinition request for Go files. -func TypeDefinition(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "source.TypeDefinition") +func TypeDefinition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { + ctx, done := event.Start(ctx, "golang.TypeDefinition") defer done() pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) diff --git a/gopls/internal/lsp/source/types_format.go b/gopls/internal/golang/types_format.go similarity index 92% rename from gopls/internal/lsp/source/types_format.go rename to gopls/internal/golang/types_format.go index 8ea81f5db27..8cc98a98bb1 100644 --- a/gopls/internal/lsp/source/types_format.go +++ b/gopls/internal/golang/types_format.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package source +package golang import ( "bytes" @@ -15,8 +15,10 @@ import ( "go/types" "strings" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" "golang.org/x/tools/internal/tokeninternal" @@ -25,14 +27,13 @@ import ( // FormatType returns the detail and kind for a types.Type. func FormatType(typ types.Type, qf types.Qualifier) (detail string, kind protocol.CompletionItemKind) { + typ = typ.Underlying() if types.IsInterface(typ) { detail = "interface{...}" kind = protocol.InterfaceCompletion } else if _, ok := typ.(*types.Struct); ok { detail = "struct{...}" kind = protocol.StructCompletion - } else if typ != typ.Underlying() { - detail, kind = FormatType(typ.Underlying(), qf) } else { detail = types.TypeString(typ, qf) kind = protocol.ClassCompletion @@ -87,7 +88,7 @@ func (s *signature) Params() []string { // NewBuiltinSignature returns signature for the builtin object with a given // name, if a builtin object with the name exists. -func NewBuiltinSignature(ctx context.Context, s Snapshot, name string) (*signature, error) { +func NewBuiltinSignature(ctx context.Context, s *cache.Snapshot, name string) (*signature, error) { builtin, err := s.BuiltinFile(ctx) if err != nil { return nil, err @@ -116,9 +117,9 @@ func NewBuiltinSignature(ctx context.Context, s Snapshot, name string) (*signatu results, needResultParens := formatFieldList(ctx, fset, decl.Type.Results, false) d := decl.Doc.Text() switch s.Options().HoverKind { - case SynopsisDocumentation: + case settings.SynopsisDocumentation: d = doc.Synopsis(d) - case NoDocumentation: + case settings.NoDocumentation: d = "" } return &signature{ @@ -180,7 +181,7 @@ func formatFieldList(ctx context.Context, fset *token.FileSet, list *ast.FieldLi // FormatTypeParams turns TypeParamList into its Go representation, such as: // [T, Y]. Note that it does not print constraints as this is mainly used for // formatting type params in method receivers. -func FormatTypeParams(tparams *typeparams.TypeParamList) string { +func FormatTypeParams(tparams *types.TypeParamList) string { if tparams == nil || tparams.Len() == 0 { return "" } @@ -197,9 +198,9 @@ func FormatTypeParams(tparams *typeparams.TypeParamList) string { } // NewSignature returns formatted signature for a types.Signature struct. -func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier, mq MetadataQualifier) (*signature, error) { +func NewSignature(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier, mq MetadataQualifier) (*signature, error) { var tparams []string - tpList := typeparams.ForSignature(sig) + tpList := sig.TypeParams() for i := 0; i < tpList.Len(); i++ { tparam := tpList.At(i) // TODO: is it possible to reuse the logic from FormatVarType here? @@ -246,9 +247,9 @@ func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signa d = comment.Text() } switch s.Options().HoverKind { - case SynopsisDocumentation: + case settings.SynopsisDocumentation: d = doc.Synopsis(d) - case NoDocumentation: + case settings.NoDocumentation: d = "" } return &signature{ @@ -267,7 +268,7 @@ func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signa // // TODO(rfindley): this function could return the actual name used in syntax, // for better parameter names. -func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *types.Var, qf types.Qualifier, mq MetadataQualifier) (string, error) { +func FormatVarType(ctx context.Context, snapshot *cache.Snapshot, srcpkg *cache.Package, obj *types.Var, qf types.Qualifier, mq MetadataQualifier) (string, error) { // TODO(rfindley): This looks wrong. The previous comment said: // "If the given expr refers to a type parameter, then use the // object's Type instead of the type parameter declaration. This helps @@ -308,7 +309,7 @@ func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj * // We can't handle type parameters correctly, so we fall back on TypeString // for parameterized decls. if decl, _ := decl.(*ast.FuncDecl); decl != nil { - if typeparams.ForFuncType(decl.Type).NumFields() > 0 { + if decl.Type.TypeParams.NumFields() > 0 { return types.TypeString(obj.Type(), qf), nil // in generic function } if decl.Recv != nil && len(decl.Recv.List) > 0 { @@ -321,7 +322,7 @@ func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj * } } } - if spec, _ := spec.(*ast.TypeSpec); spec != nil && typeparams.ForTypeSpec(spec).NumFields() > 0 { + if spec, _ := spec.(*ast.TypeSpec); spec != nil && spec.TypeParams.NumFields() > 0 { return types.TypeString(obj.Type(), qf), nil // in generic type decl } @@ -428,12 +429,12 @@ func qualifyTypeExpr(expr ast.Expr, qf func(string) string) ast.Expr { Rbrack: expr.Rbrack, } - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: indices := make([]ast.Expr, len(expr.Indices)) for i, idx := range expr.Indices { indices[i] = qualifyTypeExpr(idx, qf) } - return &typeparams.IndexListExpr{ + return &ast.IndexListExpr{ X: qualifyTypeExpr(expr.X, qf), Lbrack: expr.Lbrack, Indices: indices, diff --git a/gopls/internal/golang/util.go b/gopls/internal/golang/util.go new file mode 100644 index 00000000000..c2f5d50d608 --- /dev/null +++ b/gopls/internal/golang/util.go @@ -0,0 +1,366 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "go/ast" + "go/printer" + "go/token" + "go/types" + "regexp" + "strings" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/tokeninternal" +) + +// IsGenerated gets and reads the file denoted by uri and reports +// whether it contains a "generated file" comment as described at +// https://golang.org/s/generatedcode. +// +// TODO(adonovan): opt: this function does too much. +// Move snapshot.ReadFile into the caller (most of which have already done it). +func IsGenerated(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) bool { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return false + } + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + return false + } + for _, commentGroup := range pgf.File.Comments { + for _, comment := range commentGroup.List { + if matched := generatedRx.MatchString(comment.Text); matched { + // Check if comment is at the beginning of the line in source. + if safetoken.Position(pgf.Tok, comment.Slash).Column == 1 { + return true + } + } + } + } + return false +} + +// adjustedObjEnd returns the end position of obj, possibly modified for +// package names. +// +// TODO(rfindley): eliminate this function, by inlining it at callsites where +// it makes sense. +func adjustedObjEnd(obj types.Object) token.Pos { + nameLen := len(obj.Name()) + if pkgName, ok := obj.(*types.PkgName); ok { + // An imported Go package has a package-local, unqualified name. + // When the name matches the imported package name, there is no + // identifier in the import spec with the local package name. + // + // For example: + // import "go/ast" // name "ast" matches package name + // import a "go/ast" // name "a" does not match package name + // + // When the identifier does not appear in the source, have the range + // of the object be the import path, including quotes. + if pkgName.Imported().Name() == pkgName.Name() { + nameLen = len(pkgName.Imported().Path()) + len(`""`) + } + } + return obj.Pos() + token.Pos(nameLen) +} + +// Matches cgo generated comment as well as the proposed standard: +// +// https://golang.org/s/generatedcode +var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) + +// nodeAtPos returns the index and the node whose position is contained inside +// the node list. +func nodeAtPos(nodes []ast.Node, pos token.Pos) (ast.Node, int) { + if nodes == nil { + return nil, -1 + } + for i, node := range nodes { + if node.Pos() <= pos && pos <= node.End() { + return node, i + } + } + return nil, -1 +} + +// FormatNode returns the "pretty-print" output for an ast node. +func FormatNode(fset *token.FileSet, n ast.Node) string { + var buf strings.Builder + if err := printer.Fprint(&buf, fset, n); err != nil { + // TODO(rfindley): we should use bug.Reportf here. + // We encounter this during completion.resolveInvalid. + return "" + } + return buf.String() +} + +// FormatNodeFile is like FormatNode, but requires only the token.File for the +// syntax containing the given ast node. +func FormatNodeFile(file *token.File, n ast.Node) string { + fset := tokeninternal.FileSetFor(file) + return FormatNode(fset, n) +} + +// findFileInDeps finds package metadata containing URI in the transitive +// dependencies of m. When using the Go command, the answer is unique. +func findFileInDeps(s metadata.Source, mp *metadata.Package, uri protocol.DocumentURI) *metadata.Package { + seen := make(map[PackageID]bool) + var search func(*metadata.Package) *metadata.Package + search = func(mp *metadata.Package) *metadata.Package { + if seen[mp.ID] { + return nil + } + seen[mp.ID] = true + for _, cgf := range mp.CompiledGoFiles { + if cgf == uri { + return mp + } + } + for _, dep := range mp.DepsByPkgPath { + mp := s.Metadata(dep) + if mp == nil { + bug.Reportf("nil metadata for %q", dep) + continue + } + if found := search(mp); found != nil { + return found + } + } + return nil + } + return search(mp) +} + +// CollectScopes returns all scopes in an ast path, ordered as innermost scope +// first. +func CollectScopes(info *types.Info, path []ast.Node, pos token.Pos) []*types.Scope { + // scopes[i], where i import path mapping. + inverseDeps := make(map[PackageID]PackagePath) + for path, id := range mp.DepsByPkgPath { + inverseDeps[id] = path + } + importsByPkgPath := make(map[PackagePath]ImportPath) // best import paths by pkgPath + for impPath, id := range mp.DepsByImpPath { + if id == "" { + continue + } + pkgPath := inverseDeps[id] + _, hasPath := importsByPkgPath[pkgPath] + _, hasImp := localNames[impPath] + // In rare cases, there may be multiple import paths with the same package + // path. In such scenarios, prefer an import path that already exists in + // the file. + if !hasPath || hasImp { + importsByPkgPath[pkgPath] = impPath + } + } + + return func(pkgName PackageName, impPath ImportPath, pkgPath PackagePath) string { + // If supplied, translate the package path to an import path in the source + // package. + if pkgPath != "" { + if srcImp := importsByPkgPath[pkgPath]; srcImp != "" { + impPath = srcImp + } + if pkgPath == mp.PkgPath { + return "" + } + } + if localName, ok := localNames[impPath]; ok && impPath != "" { + return localName + } + if pkgName != "" { + return string(pkgName) + } + idx := strings.LastIndexByte(string(impPath), '/') + return string(impPath[idx+1:]) + } +} + +// importInfo collects information about the import specified by imp, +// extracting its file-local name, package name, import path, and package path. +// +// If metadata is missing for the import, the resulting package name and +// package path may be empty, and the file local name may be guessed based on +// the import path. +// +// Note: previous versions of this helper used a PackageID->PackagePath map +// extracted from m, for extracting package path even in the case where +// metadata for a dep was missing. This should not be necessary, as we should +// always have metadata for IDs contained in DepsByPkgPath. +func importInfo(s metadata.Source, imp *ast.ImportSpec, mp *metadata.Package) (string, PackageName, ImportPath, PackagePath) { + var ( + name string // local name + pkgName PackageName + impPath = metadata.UnquoteImportPath(imp) + pkgPath PackagePath + ) + + // If the import has a local name, use it. + if imp.Name != nil { + name = imp.Name.Name + } + + // Try to find metadata for the import. If successful and there is no local + // name, the package name is the local name. + if depID := mp.DepsByImpPath[impPath]; depID != "" { + if depMP := s.Metadata(depID); depMP != nil { + if name == "" { + name = string(depMP.Name) + } + pkgName = depMP.Name + pkgPath = depMP.PkgPath + } + } + + // If the local name is still unknown, guess it based on the import path. + if name == "" { + idx := strings.LastIndexByte(string(impPath), '/') + name = string(impPath[idx+1:]) + } + return name, pkgName, impPath, pkgPath +} + +// isDirective reports whether c is a comment directive. +// +// Copied and adapted from go/src/go/ast/ast.go. +func isDirective(c string) bool { + if len(c) < 3 { + return false + } + if c[1] != '/' { + return false + } + //-style comment (no newline at the end) + c = c[2:] + if len(c) == 0 { + // empty line + return false + } + // "//line " is a line directive. + // (The // has been removed.) + if strings.HasPrefix(c, "line ") { + return true + } + + // "//[a-z0-9]+:[a-z0-9]" + // (The // has been removed.) + colon := strings.Index(c, ":") + if colon <= 0 || colon+1 >= len(c) { + return false + } + for i := 0; i <= colon+1; i++ { + if i == colon { + continue + } + b := c[i] + if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { + return false + } + } + return true +} + +// embeddedIdent returns the type name identifier for an embedding x, if x in a +// valid embedding. Otherwise, it returns nil. +// +// Spec: An embedded field must be specified as a type name T or as a pointer +// to a non-interface type name *T +func embeddedIdent(x ast.Expr) *ast.Ident { + if star, ok := x.(*ast.StarExpr); ok { + x = star.X + } + switch ix := x.(type) { // check for instantiated receivers + case *ast.IndexExpr: + x = ix.X + case *ast.IndexListExpr: + x = ix.X + } + switch x := x.(type) { + case *ast.Ident: + return x + case *ast.SelectorExpr: + if _, ok := x.X.(*ast.Ident); ok { + return x.Sel + } + } + return nil +} + +// An importFunc is an implementation of the single-method +// types.Importer interface based on a function value. +type ImporterFunc func(path string) (*types.Package, error) + +func (f ImporterFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/gopls/internal/golang/workspace_symbol.go b/gopls/internal/golang/workspace_symbol.go new file mode 100644 index 00000000000..4ab5a21b8a5 --- /dev/null +++ b/gopls/internal/golang/workspace_symbol.go @@ -0,0 +1,526 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + "path/filepath" + "runtime" + "sort" + "strings" + "unicode" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/fuzzy" +) + +// maxSymbols defines the maximum number of symbol results that should ever be +// sent in response to a client. +const maxSymbols = 100 + +// WorkspaceSymbols matches symbols across all views using the given query, +// according to the match semantics parameterized by matcherType and style. +// +// The workspace symbol method is defined in the spec as follows: +// +// The workspace symbol request is sent from the client to the server to +// list project-wide symbols matching the query string. +// +// It is unclear what "project-wide" means here, but given the parameters of +// workspace/symbol do not include any workspace identifier, then it has to be +// assumed that "project-wide" means "across all workspaces". Hence why +// WorkspaceSymbols receives the views []View. +// +// However, it then becomes unclear what it would mean to call WorkspaceSymbols +// with a different configured SymbolMatcher per View. Therefore we assume that +// Session level configuration will define the SymbolMatcher to be used for the +// WorkspaceSymbols method. +func WorkspaceSymbols(ctx context.Context, matcher settings.SymbolMatcher, style settings.SymbolStyle, snapshots []*cache.Snapshot, query string) ([]protocol.SymbolInformation, error) { + ctx, done := event.Start(ctx, "golang.WorkspaceSymbols") + defer done() + if query == "" { + return nil, nil + } + + var s symbolizer + switch style { + case settings.DynamicSymbols: + s = dynamicSymbolMatch + case settings.FullyQualifiedSymbols: + s = fullyQualifiedSymbolMatch + case settings.PackageQualifiedSymbols: + s = packageSymbolMatch + default: + panic(fmt.Errorf("unknown symbol style: %v", style)) + } + + return collectSymbols(ctx, snapshots, matcher, s, query) +} + +// A matcherFunc returns the index and score of a symbol match. +// +// See the comment for symbolCollector for more information. +type matcherFunc func(chunks []string) (int, float64) + +// A symbolizer returns the best symbol match for a name with pkg, according to +// some heuristic. The symbol name is passed as the slice nameParts of logical +// name pieces. For example, for myType.field the caller can pass either +// []string{"myType.field"} or []string{"myType.", "field"}. +// +// See the comment for symbolCollector for more information. +// +// The space argument is an empty slice with spare capacity that may be used +// to allocate the result. +type symbolizer func(space []string, name string, pkg *metadata.Package, m matcherFunc) ([]string, float64) + +func fullyQualifiedSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { + if _, score := dynamicSymbolMatch(space, name, pkg, matcher); score > 0 { + return append(space, string(pkg.PkgPath), ".", name), score + } + return nil, 0 +} + +func dynamicSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { + if metadata.IsCommandLineArguments(pkg.ID) { + // command-line-arguments packages have a non-sensical package path, so + // just use their package name. + return packageSymbolMatch(space, name, pkg, matcher) + } + + var score float64 + + endsInPkgName := strings.HasSuffix(string(pkg.PkgPath), string(pkg.Name)) + + // If the package path does not end in the package name, we need to check the + // package-qualified symbol as an extra pass first. + if !endsInPkgName { + pkgQualified := append(space, string(pkg.Name), ".", name) + idx, score := matcher(pkgQualified) + nameStart := len(pkg.Name) + 1 + if score > 0 { + // If our match is contained entirely within the unqualified portion, + // just return that. + if idx >= nameStart { + return append(space, name), score + } + // Lower the score for matches that include the package name. + return pkgQualified, score * 0.8 + } + } + + // Now try matching the fully qualified symbol. + fullyQualified := append(space, string(pkg.PkgPath), ".", name) + idx, score := matcher(fullyQualified) + + // As above, check if we matched just the unqualified symbol name. + nameStart := len(pkg.PkgPath) + 1 + if idx >= nameStart { + return append(space, name), score + } + + // If our package path ends in the package name, we'll have skipped the + // initial pass above, so check if we matched just the package-qualified + // name. + if endsInPkgName && idx >= 0 { + pkgStart := len(pkg.PkgPath) - len(pkg.Name) + if idx >= pkgStart { + return append(space, string(pkg.Name), ".", name), score + } + } + + // Our match was not contained within the unqualified or package qualified + // symbol. Return the fully qualified symbol but discount the score. + return fullyQualified, score * 0.6 +} + +func packageSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { + qualified := append(space, string(pkg.Name), ".", name) + if _, s := matcher(qualified); s > 0 { + return qualified, s + } + return nil, 0 +} + +func buildMatcher(matcher settings.SymbolMatcher, query string) matcherFunc { + switch matcher { + case settings.SymbolFuzzy: + return parseQuery(query, newFuzzyMatcher) + case settings.SymbolFastFuzzy: + return parseQuery(query, func(query string) matcherFunc { + return fuzzy.NewSymbolMatcher(query).Match + }) + case settings.SymbolCaseSensitive: + return matchExact(query) + case settings.SymbolCaseInsensitive: + q := strings.ToLower(query) + exact := matchExact(q) + wrapper := []string{""} + return func(chunks []string) (int, float64) { + s := strings.Join(chunks, "") + wrapper[0] = strings.ToLower(s) + return exact(wrapper) + } + } + panic(fmt.Errorf("unknown symbol matcher: %v", matcher)) +} + +func newFuzzyMatcher(query string) matcherFunc { + fm := fuzzy.NewMatcher(query) + return func(chunks []string) (int, float64) { + score := float64(fm.ScoreChunks(chunks)) + ranges := fm.MatchedRanges() + if len(ranges) > 0 { + return ranges[0], score + } + return -1, score + } +} + +// parseQuery parses a field-separated symbol query, extracting the special +// characters listed below, and returns a matcherFunc corresponding to the AND +// of all field queries. +// +// Special characters: +// +// ^ match exact prefix +// $ match exact suffix +// ' match exact +// +// In all three of these special queries, matches are 'smart-cased', meaning +// they are case sensitive if the symbol query contains any upper-case +// characters, and case insensitive otherwise. +func parseQuery(q string, newMatcher func(string) matcherFunc) matcherFunc { + fields := strings.Fields(q) + if len(fields) == 0 { + return func([]string) (int, float64) { return -1, 0 } + } + var funcs []matcherFunc + for _, field := range fields { + var f matcherFunc + switch { + case strings.HasPrefix(field, "^"): + prefix := field[1:] + f = smartCase(prefix, func(chunks []string) (int, float64) { + s := strings.Join(chunks, "") + if strings.HasPrefix(s, prefix) { + return 0, 1 + } + return -1, 0 + }) + case strings.HasPrefix(field, "'"): + exact := field[1:] + f = smartCase(exact, matchExact(exact)) + case strings.HasSuffix(field, "$"): + suffix := field[0 : len(field)-1] + f = smartCase(suffix, func(chunks []string) (int, float64) { + s := strings.Join(chunks, "") + if strings.HasSuffix(s, suffix) { + return len(s) - len(suffix), 1 + } + return -1, 0 + }) + default: + f = newMatcher(field) + } + funcs = append(funcs, f) + } + if len(funcs) == 1 { + return funcs[0] + } + return comboMatcher(funcs).match +} + +func matchExact(exact string) matcherFunc { + return func(chunks []string) (int, float64) { + s := strings.Join(chunks, "") + if idx := strings.LastIndex(s, exact); idx >= 0 { + return idx, 1 + } + return -1, 0 + } +} + +// smartCase returns a matcherFunc that is case-sensitive if q contains any +// upper-case characters, and case-insensitive otherwise. +func smartCase(q string, m matcherFunc) matcherFunc { + insensitive := strings.ToLower(q) == q + wrapper := []string{""} + return func(chunks []string) (int, float64) { + s := strings.Join(chunks, "") + if insensitive { + s = strings.ToLower(s) + } + wrapper[0] = s + return m(wrapper) + } +} + +type comboMatcher []matcherFunc + +func (c comboMatcher) match(chunks []string) (int, float64) { + score := 1.0 + first := 0 + for _, f := range c { + idx, s := f(chunks) + if idx < first { + first = idx + } + score *= s + } + return first, score +} + +// collectSymbols calls snapshot.Symbols to walk the syntax trees of +// all files in the views' current snapshots, and returns a sorted, +// scored list of symbols that best match the parameters. +// +// How it matches symbols is parameterized by two interfaces: +// - A matcherFunc determines how well a string symbol matches a query. It +// returns a non-negative score indicating the quality of the match. A score +// of zero indicates no match. +// - A symbolizer determines how we extract the symbol for an object. This +// enables the 'symbolStyle' configuration option. +func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherType settings.SymbolMatcher, symbolizer symbolizer, query string) ([]protocol.SymbolInformation, error) { + // Extract symbols from all files. + var work []symbolFile + var roots []string + seen := make(map[protocol.DocumentURI]bool) + // TODO(adonovan): opt: parallelize this loop? How often is len > 1? + for _, snapshot := range snapshots { + // Use the root view URIs for determining (lexically) + // whether a URI is in any open workspace. + folderURI := snapshot.Folder() + roots = append(roots, strings.TrimRight(string(folderURI), "/")) + + filters := snapshot.Options().DirectoryFilters + filterer := cache.NewFilterer(filters) + folder := filepath.ToSlash(folderURI.Path()) + + workspaceOnly := true + if snapshot.Options().SymbolScope == settings.AllSymbolScope { + workspaceOnly = false + } + symbols, err := snapshot.Symbols(ctx, workspaceOnly) + if err != nil { + return nil, err + } + + for uri, syms := range symbols { + norm := filepath.ToSlash(uri.Path()) + nm := strings.TrimPrefix(norm, folder) + if filterer.Disallow(nm) { + continue + } + // Only scan each file once. + if seen[uri] { + continue + } + meta, err := NarrowestMetadataForFile(ctx, snapshot, uri) + if err != nil { + event.Error(ctx, fmt.Sprintf("missing metadata for %q", uri), err) + continue + } + seen[uri] = true + work = append(work, symbolFile{uri, meta, syms}) + } + } + + // Match symbols in parallel. + // Each worker has its own symbolStore, + // which we merge at the end. + nmatchers := runtime.GOMAXPROCS(-1) // matching is CPU bound + results := make(chan *symbolStore) + for i := 0; i < nmatchers; i++ { + go func(i int) { + matcher := buildMatcher(matcherType, query) + store := new(symbolStore) + // Assign files to workers in round-robin fashion. + for j := i; j < len(work); j += nmatchers { + matchFile(store, symbolizer, matcher, roots, work[j]) + } + results <- store + }(i) + } + + // Gather and merge results as they arrive. + var unified symbolStore + for i := 0; i < nmatchers; i++ { + store := <-results + for _, syms := range store.res { + unified.store(syms) + } + } + return unified.results(), nil +} + +// symbolFile holds symbol information for a single file. +type symbolFile struct { + uri protocol.DocumentURI + mp *metadata.Package + syms []cache.Symbol +} + +// matchFile scans a symbol file and adds matching symbols to the store. +func matchFile(store *symbolStore, symbolizer symbolizer, matcher matcherFunc, roots []string, i symbolFile) { + space := make([]string, 0, 3) + for _, sym := range i.syms { + symbolParts, score := symbolizer(space, sym.Name, i.mp, matcher) + + // Check if the score is too low before applying any downranking. + if store.tooLow(score) { + continue + } + + // Factors to apply to the match score for the purpose of downranking + // results. + // + // These numbers were crudely calibrated based on trial-and-error using a + // small number of sample queries. Adjust as necessary. + // + // All factors are multiplicative, meaning if more than one applies they are + // multiplied together. + const ( + // nonWorkspaceFactor is applied to symbols outside the workspace. + // Developers are less likely to want to jump to code that they + // are not actively working on. + nonWorkspaceFactor = 0.5 + // nonWorkspaceUnexportedFactor is applied to unexported symbols outside + // the workspace. Since one wouldn't usually jump to unexported + // symbols to understand a package API, they are particularly irrelevant. + nonWorkspaceUnexportedFactor = 0.5 + // every field or method nesting level to access the field decreases + // the score by a factor of 1.0 - depth*depthFactor, up to a depth of + // 3. + // + // Use a small constant here, as this exists mostly to break ties + // (e.g. given a type Foo and a field x.Foo, prefer Foo). + depthFactor = 0.01 + ) + + startWord := true + exported := true + depth := 0.0 + for _, r := range sym.Name { + if startWord && !unicode.IsUpper(r) { + exported = false + } + if r == '.' { + startWord = true + depth++ + } else { + startWord = false + } + } + + // TODO(rfindley): use metadata to determine if the file is in a workspace + // package, rather than this heuristic. + inWorkspace := false + for _, root := range roots { + if strings.HasPrefix(string(i.uri), root) { + inWorkspace = true + break + } + } + + // Apply downranking based on workspace position. + if !inWorkspace { + score *= nonWorkspaceFactor + if !exported { + score *= nonWorkspaceUnexportedFactor + } + } + + // Apply downranking based on symbol depth. + if depth > 3 { + depth = 3 + } + score *= 1.0 - depth*depthFactor + + if store.tooLow(score) { + continue + } + + si := symbolInformation{ + score: score, + symbol: strings.Join(symbolParts, ""), + kind: sym.Kind, + uri: i.uri, + rng: sym.Range, + container: string(i.mp.PkgPath), + } + store.store(si) + } +} + +type symbolStore struct { + res [maxSymbols]symbolInformation +} + +// store inserts si into the sorted results, if si has a high enough score. +func (sc *symbolStore) store(si symbolInformation) { + if sc.tooLow(si.score) { + return + } + insertAt := sort.Search(len(sc.res), func(i int) bool { + // Sort by score, then symbol length, and finally lexically. + if sc.res[i].score != si.score { + return sc.res[i].score < si.score + } + if len(sc.res[i].symbol) != len(si.symbol) { + return len(sc.res[i].symbol) > len(si.symbol) + } + return sc.res[i].symbol > si.symbol + }) + if insertAt < len(sc.res)-1 { + copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1]) + } + sc.res[insertAt] = si +} + +func (sc *symbolStore) tooLow(score float64) bool { + return score <= sc.res[len(sc.res)-1].score +} + +func (sc *symbolStore) results() []protocol.SymbolInformation { + var res []protocol.SymbolInformation + for _, si := range sc.res { + if si.score <= 0 { + return res + } + res = append(res, si.asProtocolSymbolInformation()) + } + return res +} + +// symbolInformation is a cut-down version of protocol.SymbolInformation that +// allows struct values of this type to be used as map keys. +type symbolInformation struct { + score float64 + symbol string + container string + kind protocol.SymbolKind + uri protocol.DocumentURI + rng protocol.Range +} + +// asProtocolSymbolInformation converts s to a protocol.SymbolInformation value. +// +// TODO: work out how to handle tags if/when they are needed. +func (s symbolInformation) asProtocolSymbolInformation() protocol.SymbolInformation { + return protocol.SymbolInformation{ + Name: s.symbol, + Kind: s.kind, + Location: protocol.Location{ + URI: s.uri, + Range: s.rng, + }, + ContainerName: s.container, + } +} diff --git a/gopls/internal/golang/workspace_symbol_test.go b/gopls/internal/golang/workspace_symbol_test.go new file mode 100644 index 00000000000..4982b767754 --- /dev/null +++ b/gopls/internal/golang/workspace_symbol_test.go @@ -0,0 +1,138 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "testing" + + "golang.org/x/tools/gopls/internal/cache" +) + +func TestParseQuery(t *testing.T) { + tests := []struct { + query, s string + wantMatch bool + }{ + {"", "anything", false}, + {"any", "anything", true}, + {"any$", "anything", false}, + {"ing$", "anything", true}, + {"ing$", "anythinG", true}, + {"inG$", "anything", false}, + {"^any", "anything", true}, + {"^any", "Anything", true}, + {"^Any", "anything", false}, + {"at", "anything", true}, + // TODO: this appears to be a bug in the fuzzy matching algorithm. 'At' + // should cause a case-sensitive match. + // {"At", "anything", false}, + {"At", "Anything", true}, + {"'yth", "Anything", true}, + {"'yti", "Anything", false}, + {"'any 'thing", "Anything", true}, + {"anythn nythg", "Anything", true}, + {"ntx", "Anything", false}, + {"anythn", "anything", true}, + {"ing", "anything", true}, + {"anythn nythgx", "anything", false}, + } + + for _, test := range tests { + matcher := parseQuery(test.query, newFuzzyMatcher) + if _, score := matcher([]string{test.s}); score > 0 != test.wantMatch { + t.Errorf("parseQuery(%q) match for %q: %.2g, want match: %t", test.query, test.s, score, test.wantMatch) + } + } +} + +func TestFiltererDisallow(t *testing.T) { + tests := []struct { + filters []string + included []string + excluded []string + }{ + { + []string{"+**/c.go"}, + []string{"a/c.go", "a/b/c.go"}, + []string{}, + }, + { + []string{"+a/**/c.go"}, + []string{"a/b/c.go", "a/b/d/c.go", "a/c.go"}, + []string{}, + }, + { + []string{"-a/c.go", "+a/**"}, + []string{"a/c.go"}, + []string{}, + }, + { + []string{"+a/**/c.go", "-**/c.go"}, + []string{}, + []string{"a/b/c.go"}, + }, + { + []string{"+a/**/c.go", "-a/**"}, + []string{}, + []string{"a/b/c.go"}, + }, + { + []string{"+**/c.go", "-a/**/c.go"}, + []string{}, + []string{"a/b/c.go"}, + }, + { + []string{"+foobar", "-foo"}, + []string{"foobar", "foobar/a"}, + []string{"foo", "foo/a"}, + }, + { + []string{"+", "-"}, + []string{}, + []string{"foobar", "foobar/a", "foo", "foo/a"}, + }, + { + []string{"-", "+"}, + []string{"foobar", "foobar/a", "foo", "foo/a"}, + []string{}, + }, + { + []string{"-a/**/b/**/c.go"}, + []string{}, + []string{"a/x/y/z/b/f/g/h/c.go"}, + }, + // tests for unsupported glob operators + { + []string{"+**/c.go", "-a/*/c.go"}, + []string{"a/b/c.go"}, + []string{}, + }, + { + []string{"+**/c.go", "-a/?/c.go"}, + []string{"a/b/c.go"}, + []string{}, + }, + { + []string{"-b"}, // should only filter paths prefixed with the "b" directory + []string{"a/b/c.go", "bb"}, + []string{"b/c/d.go", "b"}, + }, + } + + for _, test := range tests { + filterer := cache.NewFilterer(test.filters) + for _, inc := range test.included { + if filterer.Disallow(inc) { + t.Errorf("Filters %v excluded %v, wanted included", test.filters, inc) + } + } + + for _, exc := range test.excluded { + if !filterer.Disallow(exc) { + t.Errorf("Filters %v included %v, wanted excluded", test.filters, exc) + } + } + } +} diff --git a/gopls/internal/hooks/analysis_116.go b/gopls/internal/hooks/analysis_116.go deleted file mode 100644 index de58632ba95..00000000000 --- a/gopls/internal/hooks/analysis_116.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package hooks - -import "golang.org/x/tools/gopls/internal/lsp/source" - -func updateAnalyzers(options *source.Options) { - options.StaticcheckSupported = false -} diff --git a/gopls/internal/hooks/analysis_119.go b/gopls/internal/hooks/analysis_119.go index 1f81d7be6c8..8fc7b461a73 100644 --- a/gopls/internal/hooks/analysis_119.go +++ b/gopls/internal/hooks/analysis_119.go @@ -1,62 +1,14 @@ -// Copyright 2019 The Go Authors. All rights reserved. +// Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.19 -// +build go1.19 +//go:build !go1.20 +// +build !go1.20 package hooks -import ( - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "honnef.co/go/tools/analysis/lint" - "honnef.co/go/tools/quickfix" - "honnef.co/go/tools/simple" - "honnef.co/go/tools/staticcheck" - "honnef.co/go/tools/stylecheck" -) +import "golang.org/x/tools/gopls/internal/settings" -func updateAnalyzers(options *source.Options) { - options.StaticcheckSupported = true - - mapSeverity := func(severity lint.Severity) protocol.DiagnosticSeverity { - switch severity { - case lint.SeverityError: - return protocol.SeverityError - case lint.SeverityDeprecated: - // TODO(dh): in LSP, deprecated is a tag, not a severity. - // We'll want to support this once we enable SA5011. - return protocol.SeverityWarning - case lint.SeverityWarning: - return protocol.SeverityWarning - case lint.SeverityInfo: - return protocol.SeverityInformation - case lint.SeverityHint: - return protocol.SeverityHint - default: - return protocol.SeverityWarning - } - } - add := func(analyzers []*lint.Analyzer, skip map[string]struct{}) { - for _, a := range analyzers { - if _, ok := skip[a.Analyzer.Name]; ok { - continue - } - - enabled := !a.Doc.NonDefault - options.AddStaticcheckAnalyzer(a.Analyzer, enabled, mapSeverity(a.Doc.Severity)) - } - } - - add(simple.Analyzers, nil) - add(staticcheck.Analyzers, map[string]struct{}{ - // This check conflicts with the vet printf check (golang/go#34494). - "SA5009": {}, - // This check relies on facts from dependencies, which - // we don't currently compute. - "SA5011": {}, - }) - add(stylecheck.Analyzers, nil) - add(quickfix.Analyzers, nil) +func updateAnalyzers(options *settings.Options) { + options.StaticcheckSupported = false } diff --git a/gopls/internal/hooks/analysis_120.go b/gopls/internal/hooks/analysis_120.go new file mode 100644 index 00000000000..cded05eb4a6 --- /dev/null +++ b/gopls/internal/hooks/analysis_120.go @@ -0,0 +1,62 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 +// +build go1.20 + +package hooks + +import ( + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/quickfix" + "honnef.co/go/tools/simple" + "honnef.co/go/tools/staticcheck" + "honnef.co/go/tools/stylecheck" +) + +func updateAnalyzers(options *settings.Options) { + options.StaticcheckSupported = true + + mapSeverity := func(severity lint.Severity) protocol.DiagnosticSeverity { + switch severity { + case lint.SeverityError: + return protocol.SeverityError + case lint.SeverityDeprecated: + // TODO(dh): in LSP, deprecated is a tag, not a severity. + // We'll want to support this once we enable SA5011. + return protocol.SeverityWarning + case lint.SeverityWarning: + return protocol.SeverityWarning + case lint.SeverityInfo: + return protocol.SeverityInformation + case lint.SeverityHint: + return protocol.SeverityHint + default: + return protocol.SeverityWarning + } + } + add := func(analyzers []*lint.Analyzer, skip map[string]struct{}) { + for _, a := range analyzers { + if _, ok := skip[a.Analyzer.Name]; ok { + continue + } + + enabled := !a.Doc.NonDefault + options.AddStaticcheckAnalyzer(a.Analyzer, enabled, mapSeverity(a.Doc.Severity)) + } + } + + add(simple.Analyzers, nil) + add(staticcheck.Analyzers, map[string]struct{}{ + // This check conflicts with the vet printf check (golang/go#34494). + "SA5009": {}, + // This check relies on facts from dependencies, which + // we don't currently compute. + "SA5011": {}, + }) + add(stylecheck.Analyzers, nil) + add(quickfix.Analyzers, nil) +} diff --git a/gopls/internal/hooks/diff.go b/gopls/internal/hooks/diff.go deleted file mode 100644 index 53dc4975a36..00000000000 --- a/gopls/internal/hooks/diff.go +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package hooks - -import ( - "encoding/json" - "fmt" - "log" - "os" - "path/filepath" - "runtime" - "sync" - "time" - - "github.com/sergi/go-diff/diffmatchpatch" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/internal/diff" -) - -// structure for saving information about diffs -// while the new code is being rolled out -type diffstat struct { - Before, After int - Oldedits, Newedits int - Oldtime, Newtime time.Duration - Stack string - Msg string `json:",omitempty"` // for errors - Ignored int `json:",omitempty"` // numbr of skipped records with 0 edits -} - -var ( - ignoredMu sync.Mutex - ignored int // counter of diff requests on equal strings - - diffStatsOnce sync.Once - diffStats *os.File // never closed -) - -// save writes a JSON record of statistics about diff requests to a temporary file. -func (s *diffstat) save() { - diffStatsOnce.Do(func() { - f, err := os.CreateTemp("", "gopls-diff-stats-*") - if err != nil { - log.Printf("can't create diff stats temp file: %v", err) // e.g. disk full - return - } - diffStats = f - }) - if diffStats == nil { - return - } - - // diff is frequently called with equal strings, - // so we count repeated instances but only print every 15th. - ignoredMu.Lock() - if s.Oldedits == 0 && s.Newedits == 0 { - ignored++ - if ignored < 15 { - ignoredMu.Unlock() - return - } - } - s.Ignored = ignored - ignored = 0 - ignoredMu.Unlock() - - // Record the name of the file in which diff was called. - // There aren't many calls, so only the base name is needed. - if _, file, line, ok := runtime.Caller(2); ok { - s.Stack = fmt.Sprintf("%s:%d", filepath.Base(file), line) - } - x, err := json.Marshal(s) - if err != nil { - log.Fatalf("internal error marshalling JSON: %v", err) - } - fmt.Fprintf(diffStats, "%s\n", x) -} - -// disaster is called when the diff algorithm panics or produces a -// diff that cannot be applied. It saves the broken input in a -// new temporary file and logs the file name, which is returned. -func disaster(before, after string) string { - // We use the pid to salt the name, not os.TempFile, - // so that each process creates at most one file. - // One is sufficient for a bug report. - filename := fmt.Sprintf("%s/gopls-diff-bug-%x", os.TempDir(), os.Getpid()) - - // We use NUL as a separator: it should never appear in Go source. - data := before + "\x00" + after - - if err := os.WriteFile(filename, []byte(data), 0600); err != nil { - log.Printf("failed to write diff bug report: %v", err) - return "" - } - - bug.Reportf("Bug detected in diff algorithm! Please send file %s to the maintainers of gopls if you are comfortable sharing its contents.", filename) - - return filename -} - -// BothDiffs edits calls both the new and old diffs, checks that the new diffs -// change before into after, and attempts to preserve some statistics. -func BothDiffs(before, after string) (edits []diff.Edit) { - // The new diff code contains a lot of internal checks that panic when they - // fail. This code catches the panics, or other failures, tries to save - // the failing example (and it would ask the user to send it back to us, and - // changes options.newDiff to 'old', if only we could figure out how.) - stat := diffstat{Before: len(before), After: len(after)} - now := time.Now() - oldedits := ComputeEdits(before, after) - stat.Oldedits = len(oldedits) - stat.Oldtime = time.Since(now) - defer func() { - if r := recover(); r != nil { - disaster(before, after) - edits = oldedits - } - }() - now = time.Now() - newedits := diff.Strings(before, after) - stat.Newedits = len(newedits) - stat.Newtime = time.Now().Sub(now) - got, err := diff.Apply(before, newedits) - if err != nil || got != after { - stat.Msg += "FAIL" - disaster(before, after) - stat.save() - return oldedits - } - stat.save() - return newedits -} - -// ComputeEdits computes a diff using the github.com/sergi/go-diff implementation. -func ComputeEdits(before, after string) (edits []diff.Edit) { - // The go-diff library has an unresolved panic (see golang/go#278774). - // TODO(rstambler): Remove the recover once the issue has been fixed - // upstream. - defer func() { - if r := recover(); r != nil { - bug.Reportf("unable to compute edits: %s", r) - // Report one big edit for the whole file. - edits = []diff.Edit{{ - Start: 0, - End: len(before), - New: after, - }} - } - }() - diffs := diffmatchpatch.New().DiffMain(before, after, true) - edits = make([]diff.Edit, 0, len(diffs)) - offset := 0 - for _, d := range diffs { - start := offset - switch d.Type { - case diffmatchpatch.DiffDelete: - offset += len(d.Text) - edits = append(edits, diff.Edit{Start: start, End: offset}) - case diffmatchpatch.DiffEqual: - offset += len(d.Text) - case diffmatchpatch.DiffInsert: - edits = append(edits, diff.Edit{Start: start, End: start, New: d.Text}) - } - } - return edits -} diff --git a/gopls/internal/hooks/diff_test.go b/gopls/internal/hooks/diff_test.go deleted file mode 100644 index 0a809589892..00000000000 --- a/gopls/internal/hooks/diff_test.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package hooks - -import ( - "os" - "testing" - - "golang.org/x/tools/internal/diff/difftest" -) - -func TestDiff(t *testing.T) { - difftest.DiffTest(t, ComputeEdits) -} - -func TestDisaster(t *testing.T) { - a := "This is a string,(\u0995) just for basic\nfunctionality" - b := "This is another string, (\u0996) to see if disaster will store stuff correctly" - fname := disaster(a, b) - buf, err := os.ReadFile(fname) - if err != nil { - t.Fatal(err) - } - if string(buf) != a+"\x00"+b { - t.Error("failed to record original strings") - } - if err := os.Remove(fname); err != nil { - t.Error(err) - } -} diff --git a/gopls/internal/hooks/gofumpt_117.go b/gopls/internal/hooks/gofumpt_117.go deleted file mode 100644 index 71886357704..00000000000 --- a/gopls/internal/hooks/gofumpt_117.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package hooks - -import "golang.org/x/tools/gopls/internal/lsp/source" - -func updateGofumpt(options *source.Options) { -} diff --git a/gopls/internal/hooks/gofumpt_118.go b/gopls/internal/hooks/gofumpt_118.go deleted file mode 100644 index bf0ba41e744..00000000000 --- a/gopls/internal/hooks/gofumpt_118.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package hooks - -import ( - "context" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/source" - "mvdan.cc/gofumpt/format" -) - -func updateGofumpt(options *source.Options) { - options.GofumptFormat = func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error) { - fixedVersion, err := fixLangVersion(langVersion) - if err != nil { - return nil, err - } - return format.Source(src, format.Options{ - LangVersion: fixedVersion, - ModulePath: modulePath, - }) - } -} - -// fixLangVersion function cleans the input so that gofumpt doesn't panic. It is -// rather permissive, and accepts version strings that aren't technically valid -// in a go.mod file. -// -// More specifically, it looks for an optional 'v' followed by 1-3 -// '.'-separated numbers. The resulting string is stripped of any suffix beyond -// this expected version number pattern. -// -// See also golang/go#61692: gofumpt does not accept the new language versions -// appearing in go.mod files (e.g. go1.21rc3). -func fixLangVersion(input string) (string, error) { - bad := func() (string, error) { - return "", fmt.Errorf("invalid language version syntax %q", input) - } - if input == "" { - return input, nil - } - i := 0 - if input[0] == 'v' { // be flexible about 'v' - i++ - } - // takeDigits consumes ascii numerals 0-9 and reports if at least one was - // consumed. - takeDigits := func() bool { - found := false - for ; i < len(input) && '0' <= input[i] && input[i] <= '9'; i++ { - found = true - } - return found - } - if !takeDigits() { // versions must start with at least one number - return bad() - } - - // Accept optional minor and patch versions. - for n := 0; n < 2; n++ { - if i < len(input) && input[i] == '.' { - // Look for minor/patch version. - i++ - if !takeDigits() { - i-- - break - } - } - } - // Accept any suffix. - return input[:i], nil -} diff --git a/gopls/internal/hooks/gofumpt_118_test.go b/gopls/internal/hooks/gofumpt_118_test.go deleted file mode 100644 index 838ce73176c..00000000000 --- a/gopls/internal/hooks/gofumpt_118_test.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package hooks - -import "testing" - -func TestFixLangVersion(t *testing.T) { - tests := []struct { - input, want string - wantErr bool - }{ - {"", "", false}, - {"1.18", "1.18", false}, - {"v1.18", "v1.18", false}, - {"1.21", "1.21", false}, - {"1.21rc3", "1.21", false}, - {"1.21.0", "1.21.0", false}, - {"1.21.1", "1.21.1", false}, - {"v1.21.1", "v1.21.1", false}, - {"v1.21.0rc1", "v1.21.0", false}, // not technically valid, but we're flexible - {"v1.21.0.0", "v1.21.0", false}, // also technically invalid - {"1.1", "1.1", false}, - {"v1", "v1", false}, - {"1", "1", false}, - {"v1.21.", "v1.21", false}, // also invalid - {"1.21.", "1.21", false}, - - // Error cases. - {"rc1", "", true}, - {"x1.2.3", "", true}, - } - - for _, test := range tests { - got, err := fixLangVersion(test.input) - if test.wantErr { - if err == nil { - t.Errorf("fixLangVersion(%q) succeeded unexpectedly", test.input) - } - continue - } - if err != nil { - t.Fatalf("fixLangVersion(%q) failed: %v", test.input, err) - } - if got != test.want { - t.Errorf("fixLangVersion(%q) = %s, want %s", test.input, got, test.want) - } - } -} diff --git a/gopls/internal/hooks/gofumpt_119.go b/gopls/internal/hooks/gofumpt_119.go new file mode 100644 index 00000000000..d5bc98794f6 --- /dev/null +++ b/gopls/internal/hooks/gofumpt_119.go @@ -0,0 +1,13 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.20 +// +build !go1.20 + +package hooks + +import "golang.org/x/tools/gopls/internal/settings" + +func updateGofumpt(options *settings.Options) { +} diff --git a/gopls/internal/hooks/gofumpt_120.go b/gopls/internal/hooks/gofumpt_120.go new file mode 100644 index 00000000000..9ac2465efda --- /dev/null +++ b/gopls/internal/hooks/gofumpt_120.go @@ -0,0 +1,78 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 +// +build go1.20 + +package hooks + +import ( + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/settings" + "mvdan.cc/gofumpt/format" +) + +func updateGofumpt(options *settings.Options) { + options.GofumptFormat = func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error) { + fixedVersion, err := fixLangVersion(langVersion) + if err != nil { + return nil, err + } + return format.Source(src, format.Options{ + LangVersion: fixedVersion, + ModulePath: modulePath, + }) + } +} + +// fixLangVersion function cleans the input so that gofumpt doesn't panic. It is +// rather permissive, and accepts version strings that aren't technically valid +// in a go.mod file. +// +// More specifically, it looks for an optional 'v' followed by 1-3 +// '.'-separated numbers. The resulting string is stripped of any suffix beyond +// this expected version number pattern. +// +// See also golang/go#61692: gofumpt does not accept the new language versions +// appearing in go.mod files (e.g. go1.21rc3). +func fixLangVersion(input string) (string, error) { + bad := func() (string, error) { + return "", fmt.Errorf("invalid language version syntax %q", input) + } + if input == "" { + return input, nil + } + i := 0 + if input[0] == 'v' { // be flexible about 'v' + i++ + } + // takeDigits consumes ascii numerals 0-9 and reports if at least one was + // consumed. + takeDigits := func() bool { + found := false + for ; i < len(input) && '0' <= input[i] && input[i] <= '9'; i++ { + found = true + } + return found + } + if !takeDigits() { // versions must start with at least one number + return bad() + } + + // Accept optional minor and patch versions. + for n := 0; n < 2; n++ { + if i < len(input) && input[i] == '.' { + // Look for minor/patch version. + i++ + if !takeDigits() { + i-- + break + } + } + } + // Accept any suffix. + return input[:i], nil +} diff --git a/gopls/internal/hooks/gofumpt_120_test.go b/gopls/internal/hooks/gofumpt_120_test.go new file mode 100644 index 00000000000..bb674980e1b --- /dev/null +++ b/gopls/internal/hooks/gofumpt_120_test.go @@ -0,0 +1,53 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 +// +build go1.20 + +package hooks + +import "testing" + +func TestFixLangVersion(t *testing.T) { + tests := []struct { + input, want string + wantErr bool + }{ + {"", "", false}, + {"1.18", "1.18", false}, + {"v1.18", "v1.18", false}, + {"1.21", "1.21", false}, + {"1.21rc3", "1.21", false}, + {"1.21.0", "1.21.0", false}, + {"1.21.1", "1.21.1", false}, + {"v1.21.1", "v1.21.1", false}, + {"v1.21.0rc1", "v1.21.0", false}, // not technically valid, but we're flexible + {"v1.21.0.0", "v1.21.0", false}, // also technically invalid + {"1.1", "1.1", false}, + {"v1", "v1", false}, + {"1", "1", false}, + {"v1.21.", "v1.21", false}, // also invalid + {"1.21.", "1.21", false}, + + // Error cases. + {"rc1", "", true}, + {"x1.2.3", "", true}, + } + + for _, test := range tests { + got, err := fixLangVersion(test.input) + if test.wantErr { + if err == nil { + t.Errorf("fixLangVersion(%q) succeeded unexpectedly", test.input) + } + continue + } + if err != nil { + t.Fatalf("fixLangVersion(%q) failed: %v", test.input, err) + } + if got != test.want { + t.Errorf("fixLangVersion(%q) = %s, want %s", test.input, got, test.want) + } + } +} diff --git a/gopls/internal/hooks/hooks.go b/gopls/internal/hooks/hooks.go index 5624a5eb386..0168615fec9 100644 --- a/gopls/internal/hooks/hooks.go +++ b/gopls/internal/hooks/hooks.go @@ -8,23 +8,12 @@ package hooks // import "golang.org/x/tools/gopls/internal/hooks" import ( - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/diff" + "golang.org/x/tools/gopls/internal/settings" "mvdan.cc/xurls/v2" ) -func Options(options *source.Options) { +func Options(options *settings.Options) { options.LicensesText = licensesText - if options.GoDiff { - switch options.NewDiff { - case "old": - options.ComputeEdits = ComputeEdits - case "new": - options.ComputeEdits = diff.Strings - default: - options.ComputeEdits = BothDiffs - } - } options.URLRegexp = xurls.Relaxed() updateAnalyzers(options) updateGofumpt(options) diff --git a/gopls/internal/hooks/licenses.go b/gopls/internal/hooks/licenses.go index a1594654730..6dad4e16df8 100644 --- a/gopls/internal/hooks/licenses.go +++ b/gopls/internal/hooks/licenses.go @@ -60,29 +60,6 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --- github.com/sergi/go-diff LICENSE -- - -Copyright (c) 2012-2016 The go-diff Authors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. - - -- honnef.co/go/tools LICENSE -- Copyright (c) 2016 Dominik Honnef diff --git a/gopls/internal/lsp/README.md b/gopls/internal/lsp/README.md deleted file mode 100644 index 34a142cbbe3..00000000000 --- a/gopls/internal/lsp/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# lsp - -internal/lsp provides much of the Language Server Protocol (lsp) implementation -for gopls. - -Documentation for users and contributors can be found in the -[`gopls/doc`](../../gopls/doc) directory. diff --git a/gopls/internal/lsp/analysis/embeddirective/embeddirective.go b/gopls/internal/lsp/analysis/embeddirective/embeddirective.go deleted file mode 100644 index 33af72b9f65..00000000000 --- a/gopls/internal/lsp/analysis/embeddirective/embeddirective.go +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package embeddirective defines an Analyzer that validates //go:embed directives. -// The analyzer defers fixes to its parent source.Analyzer. -package embeddirective - -import ( - "go/ast" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" -) - -const Doc = `check //go:embed directive usage - -This analyzer checks that the embed package is imported if //go:embed -directives are present, providing a suggested fix to add the import if -it is missing. - -This analyzer also checks that //go:embed directives precede the -declaration of a single variable.` - -var Analyzer = &analysis.Analyzer{ - Name: "embed", - Doc: Doc, - Requires: []*analysis.Analyzer{}, - Run: run, - RunDespiteErrors: true, -} - -// source.fixedByImportingEmbed relies on this message to filter -// out fixable diagnostics from this Analyzer. -const MissingImportMessage = `must import "embed" when using go:embed directives` - -func run(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - comments := embedDirectiveComments(f) - if len(comments) == 0 { - continue // nothing to check - } - - hasEmbedImport := false - for _, imp := range f.Imports { - if imp.Path.Value == `"embed"` { - hasEmbedImport = true - break - } - } - - for _, c := range comments { - report := func(msg string) { - pass.Report(analysis.Diagnostic{ - Pos: c.Pos(), - End: c.Pos() + token.Pos(len("//go:embed")), - Message: msg, - }) - } - - if !hasEmbedImport { - report(MissingImportMessage) - } - - spec := nextVarSpec(c, f) - switch { - case spec == nil: - report(`go:embed directives must precede a "var" declaration`) - case len(spec.Names) != 1: - report("declarations following go:embed directives must define a single variable") - case len(spec.Values) > 0: - report("declarations following go:embed directives must not specify a value") - case !embeddableType(pass.TypesInfo.Defs[spec.Names[0]]): - report("declarations following go:embed directives must be of type string, []byte or embed.FS") - } - } - } - return nil, nil -} - -// embedDirectiveComments returns all comments in f that contains a //go:embed directive. -func embedDirectiveComments(f *ast.File) []*ast.Comment { - comments := []*ast.Comment{} - for _, cg := range f.Comments { - for _, c := range cg.List { - if strings.HasPrefix(c.Text, "//go:embed ") { - comments = append(comments, c) - } - } - } - return comments -} - -// nextVarSpec returns the ValueSpec for the variable declaration immediately following -// the go:embed comment, or nil if the next declaration is not a variable declaration. -func nextVarSpec(com *ast.Comment, f *ast.File) *ast.ValueSpec { - // Embed directives must be followed by a declaration of one variable with no value. - // There may be comments and empty lines between the directive and the declaration. - var nextDecl ast.Decl - for _, d := range f.Decls { - if com.End() < d.End() { - nextDecl = d - break - } - } - if nextDecl == nil || nextDecl.Pos() == token.NoPos { - return nil - } - decl, ok := nextDecl.(*ast.GenDecl) - if !ok { - return nil - } - if decl.Tok != token.VAR { - return nil - } - - // var declarations can be both freestanding and blocks (with parenthesis). - // Only the first variable spec following the directive is interesting. - var nextSpec ast.Spec - for _, s := range decl.Specs { - if com.End() < s.End() { - nextSpec = s - break - } - } - if nextSpec == nil { - return nil - } - spec, ok := nextSpec.(*ast.ValueSpec) - if !ok { - // Invalid AST, but keep going. - return nil - } - return spec -} - -// embeddableType in go:embed directives are string, []byte or embed.FS. -func embeddableType(o types.Object) bool { - if o == nil { - return false - } - - // For embed.FS the underlying type is an implementation detail. - // As long as the named type resolves to embed.FS, it is OK. - if named, ok := o.Type().(*types.Named); ok { - obj := named.Obj() - if obj.Pkg() != nil && obj.Pkg().Path() == "embed" && obj.Name() == "FS" { - return true - } - } - - switch v := o.Type().Underlying().(type) { - case *types.Basic: - return types.Identical(v, types.Typ[types.String]) - case *types.Slice: - return types.Identical(v.Elem(), types.Typ[types.Byte]) - } - - return false -} diff --git a/gopls/internal/lsp/analysis/embeddirective/embeddirective_test.go b/gopls/internal/lsp/analysis/embeddirective/embeddirective_test.go deleted file mode 100644 index 1165c0bf6e1..00000000000 --- a/gopls/internal/lsp/analysis/embeddirective/embeddirective_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package embeddirective - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests) - } - - analysistest.RunWithSuggestedFixes(t, testdata, Analyzer, tests...) -} diff --git a/gopls/internal/lsp/analysis/fillreturns/fillreturns_test.go b/gopls/internal/lsp/analysis/fillreturns/fillreturns_test.go deleted file mode 100644 index 1f7627551a0..00000000000 --- a/gopls/internal/lsp/analysis/fillreturns/fillreturns_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package fillreturns_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/fillreturns" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, fillreturns.Analyzer, tests...) -} diff --git a/gopls/internal/lsp/analysis/fillstruct/fillstruct_test.go b/gopls/internal/lsp/analysis/fillstruct/fillstruct_test.go deleted file mode 100644 index 66642b7ab59..00000000000 --- a/gopls/internal/lsp/analysis/fillstruct/fillstruct_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package fillstruct_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.Run(t, testdata, fillstruct.Analyzer, tests...) -} diff --git a/gopls/internal/lsp/analysis/fillstruct/testdata/src/a/a.go b/gopls/internal/lsp/analysis/fillstruct/testdata/src/a/a.go deleted file mode 100644 index 9ee3860fcae..00000000000 --- a/gopls/internal/lsp/analysis/fillstruct/testdata/src/a/a.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package fillstruct - -import ( - data "b" - "go/ast" - "go/token" - "unsafe" -) - -type emptyStruct struct{} - -var _ = emptyStruct{} - -type basicStruct struct { - foo int -} - -var _ = basicStruct{} // want `Fill basicStruct` - -type twoArgStruct struct { - foo int - bar string -} - -var _ = twoArgStruct{} // want `Fill twoArgStruct` - -var _ = twoArgStruct{ // want `Fill twoArgStruct` - bar: "bar", -} - -type nestedStruct struct { - bar string - basic basicStruct -} - -var _ = nestedStruct{} // want `Fill nestedStruct` - -var _ = data.B{} // want `Fill b.B` - -type typedStruct struct { - m map[string]int - s []int - c chan int - c1 <-chan int - a [2]string -} - -var _ = typedStruct{} // want `Fill typedStruct` - -type funStruct struct { - fn func(i int) int -} - -var _ = funStruct{} // want `Fill funStruct` - -type funStructComplex struct { - fn func(i int, s string) (string, int) -} - -var _ = funStructComplex{} // want `Fill funStructComplex` - -type funStructEmpty struct { - fn func() -} - -var _ = funStructEmpty{} // want `Fill funStructEmpty` - -type Foo struct { - A int -} - -type Bar struct { - X *Foo - Y *Foo -} - -var _ = Bar{} // want `Fill Bar` - -type importedStruct struct { - m map[*ast.CompositeLit]ast.Field - s []ast.BadExpr - a [3]token.Token - c chan ast.EmptyStmt - fn func(ast_decl ast.DeclStmt) ast.Ellipsis - st ast.CompositeLit -} - -var _ = importedStruct{} // want `Fill importedStruct` - -type pointerBuiltinStruct struct { - b *bool - s *string - i *int -} - -var _ = pointerBuiltinStruct{} // want `Fill pointerBuiltinStruct` - -var _ = []ast.BasicLit{ - {}, // want `Fill go/ast.BasicLit` -} - -var _ = []ast.BasicLit{{}, // want "go/ast.BasicLit" -} - -type unsafeStruct struct { - foo unsafe.Pointer -} - -var _ = unsafeStruct{} // want `Fill unsafeStruct` diff --git a/gopls/internal/lsp/analysis/fillstruct/testdata/src/typeparams/typeparams.go b/gopls/internal/lsp/analysis/fillstruct/testdata/src/typeparams/typeparams.go deleted file mode 100644 index 46bb8ae4027..00000000000 --- a/gopls/internal/lsp/analysis/fillstruct/testdata/src/typeparams/typeparams.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package fillstruct - -type emptyStruct[A any] struct{} - -var _ = emptyStruct[int]{} - -type basicStruct[T any] struct { - foo T -} - -var _ = basicStruct[int]{} // want `Fill basicStruct\[int\]` - -type twoArgStruct[F, B any] struct { - foo F - bar B -} - -var _ = twoArgStruct[string, int]{} // want `Fill twoArgStruct\[string, int\]` - -var _ = twoArgStruct[int, string]{ // want `Fill twoArgStruct\[int, string\]` - bar: "bar", -} - -type nestedStruct struct { - bar string - basic basicStruct[int] -} - -var _ = nestedStruct{} // want "Fill nestedStruct" - -func _[T any]() { - type S struct{ t T } - x := S{} // want "Fill S" - _ = x -} - -func Test() { - var tests = []struct { - a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p string - }{ - {}, // want "Fill anonymous struct { a: string, b: string, c: string, ... }" - } - for _, test := range tests { - _ = test - } -} diff --git a/gopls/internal/lsp/analysis/infertypeargs/infertypeargs.go b/gopls/internal/lsp/analysis/infertypeargs/infertypeargs.go deleted file mode 100644 index 9cc9cb9f075..00000000000 --- a/gopls/internal/lsp/analysis/infertypeargs/infertypeargs.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package infertypeargs defines an analyzer that checks for explicit function -// arguments that could be inferred. -package infertypeargs - -import ( - "go/token" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `check for unnecessary type arguments in call expressions - -Explicit type arguments may be omitted from call expressions if they can be -inferred from function arguments, or from other type arguments: - - func f[T any](T) {} - - func _() { - f[string]("foo") // string could be inferred - } -` - -var Analyzer = &analysis.Analyzer{ - Name: "infertypeargs", - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -// TODO(rfindley): remove this thin wrapper around the infertypeargs refactoring, -// and eliminate the infertypeargs analyzer. -// -// Previous iterations used the analysis framework for computing refactorings, -// which proved inefficient. -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for _, diag := range DiagnoseInferableTypeArgs(pass.Fset, inspect, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { - pass.Report(diag) - } - return nil, nil -} diff --git a/gopls/internal/lsp/analysis/infertypeargs/infertypeargs_test.go b/gopls/internal/lsp/analysis/infertypeargs/infertypeargs_test.go deleted file mode 100644 index 70855e1ab3e..00000000000 --- a/gopls/internal/lsp/analysis/infertypeargs/infertypeargs_test.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package infertypeargs_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/infertypeargs" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type params are not enabled") - } - testdata := analysistest.TestData() - analysistest.RunWithSuggestedFixes(t, testdata, infertypeargs.Analyzer, "a") -} diff --git a/gopls/internal/lsp/analysis/infertypeargs/run_go117.go b/gopls/internal/lsp/analysis/infertypeargs/run_go117.go deleted file mode 100644 index fdf831830dd..00000000000 --- a/gopls/internal/lsp/analysis/infertypeargs/run_go117.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package infertypeargs - -import ( - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" -) - -// DiagnoseInferableTypeArgs returns an empty slice, as generics are not supported at -// this go version. -func DiagnoseInferableTypeArgs(fset *token.FileSet, inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { - return nil -} diff --git a/gopls/internal/lsp/analysis/infertypeargs/run_go118.go b/gopls/internal/lsp/analysis/infertypeargs/run_go118.go deleted file mode 100644 index 66097ecb4f9..00000000000 --- a/gopls/internal/lsp/analysis/infertypeargs/run_go118.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package infertypeargs - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" -) - -// DiagnoseInferableTypeArgs reports diagnostics describing simplifications to type -// arguments overlapping with the provided start and end position. -// -// If start or end is token.NoPos, the corresponding bound is not checked -// (i.e. if both start and end are NoPos, all call expressions are considered). -func DiagnoseInferableTypeArgs(fset *token.FileSet, inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { - var diags []analysis.Diagnostic - - nodeFilter := []ast.Node{(*ast.CallExpr)(nil)} - inspect.Preorder(nodeFilter, func(node ast.Node) { - call := node.(*ast.CallExpr) - x, lbrack, indices, rbrack := typeparams.UnpackIndexExpr(call.Fun) - ident := calledIdent(x) - if ident == nil || len(indices) == 0 { - return // no explicit args, nothing to do - } - - if (start.IsValid() && call.End() < start) || (end.IsValid() && call.Pos() > end) { - return // non-overlapping - } - - // Confirm that instantiation actually occurred at this ident. - idata, ok := typeparams.GetInstances(info)[ident] - if !ok { - return // something went wrong, but fail open - } - instance := idata.Type - - // Start removing argument expressions from the right, and check if we can - // still infer the call expression. - required := len(indices) // number of type expressions that are required - for i := len(indices) - 1; i >= 0; i-- { - var fun ast.Expr - if i == 0 { - // No longer an index expression: just use the parameterized operand. - fun = x - } else { - fun = typeparams.PackIndexExpr(x, lbrack, indices[:i], indices[i-1].End()) - } - newCall := &ast.CallExpr{ - Fun: fun, - Lparen: call.Lparen, - Args: call.Args, - Ellipsis: call.Ellipsis, - Rparen: call.Rparen, - } - info := new(types.Info) - typeparams.InitInstanceInfo(info) - if err := types.CheckExpr(fset, pkg, call.Pos(), newCall, info); err != nil { - // Most likely inference failed. - break - } - newIData := typeparams.GetInstances(info)[ident] - newInstance := newIData.Type - if !types.Identical(instance, newInstance) { - // The inferred result type does not match the original result type, so - // this simplification is not valid. - break - } - required = i - } - if required < len(indices) { - var s, e token.Pos - var edit analysis.TextEdit - if required == 0 { - s, e = lbrack, rbrack+1 // erase the entire index - edit = analysis.TextEdit{Pos: s, End: e} - } else { - s = indices[required].Pos() - e = rbrack - // erase from end of last arg to include last comma & white-spaces - edit = analysis.TextEdit{Pos: indices[required-1].End(), End: e} - } - // Recheck that our (narrower) fixes overlap with the requested range. - if (start.IsValid() && e < start) || (end.IsValid() && s > end) { - return // non-overlapping - } - diags = append(diags, analysis.Diagnostic{ - Pos: s, - End: e, - Message: "unnecessary type arguments", - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "simplify type arguments", - TextEdits: []analysis.TextEdit{edit}, - }}, - }) - } - }) - - return diags -} - -func calledIdent(x ast.Expr) *ast.Ident { - switch x := x.(type) { - case *ast.Ident: - return x - case *ast.SelectorExpr: - return x.Sel - } - return nil -} diff --git a/gopls/internal/lsp/analysis/nonewvars/nonewvars_test.go b/gopls/internal/lsp/analysis/nonewvars/nonewvars_test.go deleted file mode 100644 index 8f6f0a51fb4..00000000000 --- a/gopls/internal/lsp/analysis/nonewvars/nonewvars_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package nonewvars_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/nonewvars" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, nonewvars.Analyzer, tests...) -} diff --git a/gopls/internal/lsp/analysis/noresultvalues/noresultvalues.go b/gopls/internal/lsp/analysis/noresultvalues/noresultvalues.go deleted file mode 100644 index 41952a5479e..00000000000 --- a/gopls/internal/lsp/analysis/noresultvalues/noresultvalues.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package noresultvalues defines an Analyzer that applies suggested fixes -// to errors of the type "no result values expected". -package noresultvalues - -import ( - "bytes" - "go/ast" - "go/format" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" -) - -const Doc = `suggested fixes for unexpected return values - -This checker provides suggested fixes for type errors of the -type "no result values expected" or "too many return values". -For example: - func z() { return nil } -will turn into - func z() { return } -` - -var Analyzer = &analysis.Analyzer{ - Name: "noresultvalues", - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - RunDespiteErrors: true, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if len(pass.TypeErrors) == 0 { - return nil, nil - } - - nodeFilter := []ast.Node{(*ast.ReturnStmt)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - retStmt, _ := n.(*ast.ReturnStmt) - - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= retStmt.Pos() && retStmt.Pos() < f.End() { - file = f - break - } - } - if file == nil { - return - } - - for _, err := range pass.TypeErrors { - if !FixesError(err.Msg) { - continue - } - if retStmt.Pos() >= err.Pos || err.Pos >= retStmt.End() { - continue - } - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } - pass.Report(analysis.Diagnostic{ - Pos: err.Pos, - End: analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos), - Message: err.Msg, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Delete return values", - TextEdits: []analysis.TextEdit{{ - Pos: retStmt.Pos(), - End: retStmt.End(), - NewText: []byte("return"), - }}, - }}, - }) - } - }) - return nil, nil -} - -func FixesError(msg string) bool { - return msg == "no result values expected" || - strings.HasPrefix(msg, "too many return values") && strings.Contains(msg, "want ()") -} diff --git a/gopls/internal/lsp/analysis/noresultvalues/noresultvalues_test.go b/gopls/internal/lsp/analysis/noresultvalues/noresultvalues_test.go deleted file mode 100644 index 24ce39207ee..00000000000 --- a/gopls/internal/lsp/analysis/noresultvalues/noresultvalues_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package noresultvalues_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/noresultvalues" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, noresultvalues.Analyzer, tests...) -} diff --git a/gopls/internal/lsp/analysis/simplifyslice/simplifyslice_test.go b/gopls/internal/lsp/analysis/simplifyslice/simplifyslice_test.go deleted file mode 100644 index 41914ba3170..00000000000 --- a/gopls/internal/lsp/analysis/simplifyslice/simplifyslice_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package simplifyslice_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/simplifyslice" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, simplifyslice.Analyzer, tests...) -} diff --git a/gopls/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go b/gopls/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go deleted file mode 100644 index 69db3100a90..00000000000 --- a/gopls/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -// -//go:build go1.18 -// +build go1.18 - -package testdata - -type List[E any] []E - -// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed. -// type S interface{ ~[]int } - -var ( - a [10]byte - b [20]float32 - p List[int] - - _ = p[0:] - _ = p[1:10] - _ = p[2:len(p)] // want "unneeded: len\\(p\\)" - _ = p[3:(len(p))] - _ = p[len(a) : len(p)-1] - _ = p[0:len(b)] - _ = p[2:len(p):len(p)] - - _ = p[:] - _ = p[:10] - _ = p[:len(p)] // want "unneeded: len\\(p\\)" - _ = p[:(len(p))] - _ = p[:len(p)-1] - _ = p[:len(b)] - _ = p[:len(p):len(p)] -) - -func foo[E any](a List[E]) { - _ = a[0:len(a)] // want "unneeded: len\\(a\\)" -} diff --git a/gopls/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden b/gopls/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden deleted file mode 100644 index 99ca9e4474b..00000000000 --- a/gopls/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -// -//go:build go1.18 -// +build go1.18 - -package testdata - -type List[E any] []E - -// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed. -// type S interface{ ~[]int } - -var ( - a [10]byte - b [20]float32 - p List[int] - - _ = p[0:] - _ = p[1:10] - _ = p[2:] // want "unneeded: len\\(p\\)" - _ = p[3:(len(p))] - _ = p[len(a) : len(p)-1] - _ = p[0:len(b)] - _ = p[2:len(p):len(p)] - - _ = p[:] - _ = p[:10] - _ = p[:] // want "unneeded: len\\(p\\)" - _ = p[:(len(p))] - _ = p[:len(p)-1] - _ = p[:len(b)] - _ = p[:len(p):len(p)] -) - -func foo[E any](a List[E]) { - _ = a[0:] // want "unneeded: len\\(a\\)" -} diff --git a/gopls/internal/lsp/analysis/stubmethods/stubmethods.go b/gopls/internal/lsp/analysis/stubmethods/stubmethods.go deleted file mode 100644 index f5b2ac55fd3..00000000000 --- a/gopls/internal/lsp/analysis/stubmethods/stubmethods.go +++ /dev/null @@ -1,449 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package stubmethods - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/internal/analysisinternal" - "golang.org/x/tools/internal/typesinternal" -) - -const Doc = `stub methods analyzer - -This analyzer generates method stubs for concrete types -in order to implement a target interface` - -var Analyzer = &analysis.Analyzer{ - Name: "stubmethods", - Doc: Doc, - Run: run, - RunDespiteErrors: true, -} - -// TODO(rfindley): remove this thin wrapper around the stubmethods refactoring, -// and eliminate the stubmethods analyzer. -// -// Previous iterations used the analysis framework for computing refactorings, -// which proved inefficient. -func run(pass *analysis.Pass) (interface{}, error) { - for _, err := range pass.TypeErrors { - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= err.Pos && err.Pos < f.End() { - file = f - break - } - } - // Get the end position of the error. - _, _, end, ok := typesinternal.ReadGo116ErrorData(err) - if !ok { - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } - end = analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos) - } - if diag, ok := DiagnosticForError(pass.Fset, file, err.Pos, end, err.Msg, pass.TypesInfo); ok { - pass.Report(diag) - } - } - - return nil, nil -} - -// MatchesMessage reports whether msg matches the error message sought after by -// the stubmethods fix. -func MatchesMessage(msg string) bool { - return strings.Contains(msg, "missing method") || strings.HasPrefix(msg, "cannot convert") -} - -// DiagnosticForError computes a diagnostic suggesting to implement an -// interface to fix the type checking error defined by (start, end, msg). -// -// If no such fix is possible, the second result is false. -// -// TODO(rfindley): simplify this signature once the stubmethods refactoring is -// no longer wedged into the analysis framework. -func DiagnosticForError(fset *token.FileSet, file *ast.File, start, end token.Pos, msg string, info *types.Info) (analysis.Diagnostic, bool) { - if !MatchesMessage(msg) { - return analysis.Diagnostic{}, false - } - - path, _ := astutil.PathEnclosingInterval(file, start, end) - si := GetStubInfo(fset, info, path, start) - if si == nil { - return analysis.Diagnostic{}, false - } - qf := RelativeToFiles(si.Concrete.Obj().Pkg(), file, nil, nil) - return analysis.Diagnostic{ - Pos: start, - End: end, - Message: fmt.Sprintf("Implement %s", types.TypeString(si.Interface.Type(), qf)), - }, true -} - -// StubInfo represents a concrete type -// that wants to stub out an interface type -type StubInfo struct { - // Interface is the interface that the client wants to implement. - // When the interface is defined, the underlying object will be a TypeName. - // Note that we keep track of types.Object instead of types.Type in order - // to keep a reference to the declaring object's package and the ast file - // in the case where the concrete type file requires a new import that happens to be renamed - // in the interface file. - // TODO(marwan-at-work): implement interface literals. - Fset *token.FileSet // the FileSet used to type-check the types below - Interface *types.TypeName - Concrete *types.Named - Pointer bool -} - -// GetStubInfo determines whether the "missing method error" -// can be used to deduced what the concrete and interface types are. -// -// TODO(adonovan): this function (and its following 5 helpers) tries -// to deduce a pair of (concrete, interface) types that are related by -// an assignment, either explicitly or through a return statement or -// function call. This is essentially what the refactor/satisfy does, -// more generally. Refactor to share logic, after auditing 'satisfy' -// for safety on ill-typed code. -func GetStubInfo(fset *token.FileSet, ti *types.Info, path []ast.Node, pos token.Pos) *StubInfo { - for _, n := range path { - switch n := n.(type) { - case *ast.ValueSpec: - return fromValueSpec(fset, ti, n, pos) - case *ast.ReturnStmt: - // An error here may not indicate a real error the user should know about, but it may. - // Therefore, it would be best to log it out for debugging/reporting purposes instead of ignoring - // it. However, event.Log takes a context which is not passed via the analysis package. - // TODO(marwan-at-work): properly log this error. - si, _ := fromReturnStmt(fset, ti, pos, path, n) - return si - case *ast.AssignStmt: - return fromAssignStmt(fset, ti, n, pos) - case *ast.CallExpr: - // Note that some call expressions don't carry the interface type - // because they don't point to a function or method declaration elsewhere. - // For eaxmple, "var Interface = (*Concrete)(nil)". In that case, continue - // this loop to encounter other possibilities such as *ast.ValueSpec or others. - si := fromCallExpr(fset, ti, pos, n) - if si != nil { - return si - } - } - } - return nil -} - -// fromCallExpr tries to find an *ast.CallExpr's function declaration and -// analyzes a function call's signature against the passed in parameter to deduce -// the concrete and interface types. -func fromCallExpr(fset *token.FileSet, ti *types.Info, pos token.Pos, ce *ast.CallExpr) *StubInfo { - paramIdx := -1 - for i, p := range ce.Args { - if pos >= p.Pos() && pos <= p.End() { - paramIdx = i - break - } - } - if paramIdx == -1 { - return nil - } - p := ce.Args[paramIdx] - concObj, pointer := concreteType(p, ti) - if concObj == nil || concObj.Obj().Pkg() == nil { - return nil - } - tv, ok := ti.Types[ce.Fun] - if !ok { - return nil - } - sig, ok := tv.Type.(*types.Signature) - if !ok { - return nil - } - var paramType types.Type - if sig.Variadic() && paramIdx >= sig.Params().Len()-1 { - v := sig.Params().At(sig.Params().Len() - 1) - if s, _ := v.Type().(*types.Slice); s != nil { - paramType = s.Elem() - } - } else if paramIdx < sig.Params().Len() { - paramType = sig.Params().At(paramIdx).Type() - } - if paramType == nil { - return nil // A type error prevents us from determining the param type. - } - iface := ifaceObjFromType(paramType) - if iface == nil { - return nil - } - return &StubInfo{ - Fset: fset, - Concrete: concObj, - Pointer: pointer, - Interface: iface, - } -} - -// fromReturnStmt analyzes a "return" statement to extract -// a concrete type that is trying to be returned as an interface type. -// -// For example, func() io.Writer { return myType{} } -// would return StubInfo with the interface being io.Writer and the concrete type being myType{}. -func fromReturnStmt(fset *token.FileSet, ti *types.Info, pos token.Pos, path []ast.Node, rs *ast.ReturnStmt) (*StubInfo, error) { - returnIdx := -1 - for i, r := range rs.Results { - if pos >= r.Pos() && pos <= r.End() { - returnIdx = i - } - } - if returnIdx == -1 { - return nil, fmt.Errorf("pos %d not within return statement bounds: [%d-%d]", pos, rs.Pos(), rs.End()) - } - concObj, pointer := concreteType(rs.Results[returnIdx], ti) - if concObj == nil || concObj.Obj().Pkg() == nil { - return nil, nil - } - ef := enclosingFunction(path, ti) - if ef == nil { - return nil, fmt.Errorf("could not find the enclosing function of the return statement") - } - iface := ifaceType(ef.Results.List[returnIdx].Type, ti) - if iface == nil { - return nil, nil - } - return &StubInfo{ - Fset: fset, - Concrete: concObj, - Pointer: pointer, - Interface: iface, - }, nil -} - -// fromValueSpec returns *StubInfo from a variable declaration such as -// var x io.Writer = &T{} -func fromValueSpec(fset *token.FileSet, ti *types.Info, vs *ast.ValueSpec, pos token.Pos) *StubInfo { - var idx int - for i, vs := range vs.Values { - if pos >= vs.Pos() && pos <= vs.End() { - idx = i - break - } - } - - valueNode := vs.Values[idx] - ifaceNode := vs.Type - callExp, ok := valueNode.(*ast.CallExpr) - // if the ValueSpec is `var _ = myInterface(...)` - // as opposed to `var _ myInterface = ...` - if ifaceNode == nil && ok && len(callExp.Args) == 1 { - ifaceNode = callExp.Fun - valueNode = callExp.Args[0] - } - concObj, pointer := concreteType(valueNode, ti) - if concObj == nil || concObj.Obj().Pkg() == nil { - return nil - } - ifaceObj := ifaceType(ifaceNode, ti) - if ifaceObj == nil { - return nil - } - return &StubInfo{ - Fset: fset, - Concrete: concObj, - Interface: ifaceObj, - Pointer: pointer, - } -} - -// fromAssignStmt returns *StubInfo from a variable re-assignment such as -// var x io.Writer -// x = &T{} -func fromAssignStmt(fset *token.FileSet, ti *types.Info, as *ast.AssignStmt, pos token.Pos) *StubInfo { - idx := -1 - var lhs, rhs ast.Expr - // Given a re-assignment interface conversion error, - // the compiler error shows up on the right hand side of the expression. - // For example, x = &T{} where x is io.Writer highlights the error - // under "&T{}" and not "x". - for i, hs := range as.Rhs { - if pos >= hs.Pos() && pos <= hs.End() { - idx = i - break - } - } - if idx == -1 { - return nil - } - // Technically, this should never happen as - // we would get a "cannot assign N values to M variables" - // before we get an interface conversion error. Nonetheless, - // guard against out of range index errors. - if idx >= len(as.Lhs) { - return nil - } - lhs, rhs = as.Lhs[idx], as.Rhs[idx] - ifaceObj := ifaceType(lhs, ti) - if ifaceObj == nil { - return nil - } - concType, pointer := concreteType(rhs, ti) - if concType == nil || concType.Obj().Pkg() == nil { - return nil - } - return &StubInfo{ - Fset: fset, - Concrete: concType, - Interface: ifaceObj, - Pointer: pointer, - } -} - -// RelativeToFiles returns a types.Qualifier that formats package -// names according to the import environments of the files that define -// the concrete type and the interface type. (Only the imports of the -// latter file are provided.) -// -// This is similar to types.RelativeTo except if a file imports the package with a different name, -// then it will use it. And if the file does import the package but it is ignored, -// then it will return the original name. It also prefers package names in importEnv in case -// an import is missing from concFile but is present among importEnv. -// -// Additionally, if missingImport is not nil, the function will be called whenever the concFile -// is presented with a package that is not imported. This is useful so that as types.TypeString is -// formatting a function signature, it is identifying packages that will need to be imported when -// stubbing an interface. -// -// TODO(rfindley): investigate if this can be merged with source.Qualifier. -func RelativeToFiles(concPkg *types.Package, concFile *ast.File, ifaceImports []*ast.ImportSpec, missingImport func(name, path string)) types.Qualifier { - return func(other *types.Package) string { - if other == concPkg { - return "" - } - - // Check if the concrete file already has the given import, - // if so return the default package name or the renamed import statement. - for _, imp := range concFile.Imports { - impPath, _ := strconv.Unquote(imp.Path.Value) - isIgnored := imp.Name != nil && (imp.Name.Name == "." || imp.Name.Name == "_") - // TODO(adonovan): this comparison disregards a vendor prefix in 'other'. - if impPath == other.Path() && !isIgnored { - importName := other.Name() - if imp.Name != nil { - importName = imp.Name.Name - } - return importName - } - } - - // If the concrete file does not have the import, check if the package - // is renamed in the interface file and prefer that. - var importName string - for _, imp := range ifaceImports { - impPath, _ := strconv.Unquote(imp.Path.Value) - isIgnored := imp.Name != nil && (imp.Name.Name == "." || imp.Name.Name == "_") - // TODO(adonovan): this comparison disregards a vendor prefix in 'other'. - if impPath == other.Path() && !isIgnored { - if imp.Name != nil && imp.Name.Name != concPkg.Name() { - importName = imp.Name.Name - } - break - } - } - - if missingImport != nil { - missingImport(importName, other.Path()) - } - - // Up until this point, importName must stay empty when calling missingImport, - // otherwise we'd end up with `import time "time"` which doesn't look idiomatic. - if importName == "" { - importName = other.Name() - } - return importName - } -} - -// ifaceType will try to extract the types.Object that defines -// the interface given the ast.Expr where the "missing method" -// or "conversion" errors happen. -func ifaceType(n ast.Expr, ti *types.Info) *types.TypeName { - tv, ok := ti.Types[n] - if !ok { - return nil - } - return ifaceObjFromType(tv.Type) -} - -func ifaceObjFromType(t types.Type) *types.TypeName { - named, ok := t.(*types.Named) - if !ok { - return nil - } - _, ok = named.Underlying().(*types.Interface) - if !ok { - return nil - } - // Interfaces defined in the "builtin" package return nil a Pkg(). - // But they are still real interfaces that we need to make a special case for. - // Therefore, protect gopls from panicking if a new interface type was added in the future. - if named.Obj().Pkg() == nil && named.Obj().Name() != "error" { - return nil - } - return named.Obj() -} - -// concreteType tries to extract the *types.Named that defines -// the concrete type given the ast.Expr where the "missing method" -// or "conversion" errors happened. If the concrete type is something -// that cannot have methods defined on it (such as basic types), this -// method will return a nil *types.Named. The second return parameter -// is a boolean that indicates whether the concreteType was defined as a -// pointer or value. -func concreteType(n ast.Expr, ti *types.Info) (*types.Named, bool) { - tv, ok := ti.Types[n] - if !ok { - return nil, false - } - typ := tv.Type - ptr, isPtr := typ.(*types.Pointer) - if isPtr { - typ = ptr.Elem() - } - named, ok := typ.(*types.Named) - if !ok { - return nil, false - } - return named, isPtr -} - -// enclosingFunction returns the signature and type of the function -// enclosing the given position. -func enclosingFunction(path []ast.Node, info *types.Info) *ast.FuncType { - for _, node := range path { - switch t := node.(type) { - case *ast.FuncDecl: - if _, ok := info.Defs[t.Name]; ok { - return t.Type - } - case *ast.FuncLit: - if _, ok := info.Types[t]; ok { - return t.Type - } - } - } - return nil -} diff --git a/gopls/internal/lsp/analysis/undeclaredname/undeclared.go b/gopls/internal/lsp/analysis/undeclaredname/undeclared.go deleted file mode 100644 index 043979408d0..00000000000 --- a/gopls/internal/lsp/analysis/undeclaredname/undeclared.go +++ /dev/null @@ -1,347 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package undeclaredname defines an Analyzer that applies suggested fixes -// to errors of the type "undeclared name: %s". -package undeclaredname - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "strings" - "unicode" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/internal/analysisinternal" -) - -const Doc = `suggested fixes for "undeclared name: <>" - -This checker provides suggested fixes for type errors of the -type "undeclared name: <>". It will either insert a new statement, -such as: - -"<> := " - -or a new function declaration, such as: - -func <>(inferred parameters) { - panic("implement me!") -} -` - -var Analyzer = &analysis.Analyzer{ - Name: "undeclaredname", - Doc: Doc, - Requires: []*analysis.Analyzer{}, - Run: run, - RunDespiteErrors: true, -} - -// The prefix for this error message changed in Go 1.20. -var undeclaredNamePrefixes = []string{"undeclared name: ", "undefined: "} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, err := range pass.TypeErrors { - runForError(pass, err) - } - return nil, nil -} - -func runForError(pass *analysis.Pass, err types.Error) { - var name string - for _, prefix := range undeclaredNamePrefixes { - if !strings.HasPrefix(err.Msg, prefix) { - continue - } - name = strings.TrimPrefix(err.Msg, prefix) - } - if name == "" { - return - } - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= err.Pos && err.Pos < f.End() { - file = f - break - } - } - if file == nil { - return - } - - // Get the path for the relevant range. - path, _ := astutil.PathEnclosingInterval(file, err.Pos, err.Pos) - if len(path) < 2 { - return - } - ident, ok := path[0].(*ast.Ident) - if !ok || ident.Name != name { - return - } - - // Undeclared quick fixes only work in function bodies. - inFunc := false - for i := range path { - if _, inFunc = path[i].(*ast.FuncDecl); inFunc { - if i == 0 { - return - } - if _, isBody := path[i-1].(*ast.BlockStmt); !isBody { - return - } - break - } - } - if !inFunc { - return - } - // Skip selector expressions because it might be too complex - // to try and provide a suggested fix for fields and methods. - if _, ok := path[1].(*ast.SelectorExpr); ok { - return - } - tok := pass.Fset.File(file.Pos()) - if tok == nil { - return - } - offset := safetoken.StartPosition(pass.Fset, err.Pos).Offset - end := tok.Pos(offset + len(name)) // TODO(adonovan): dubious! err.Pos + len(name)?? - pass.Report(analysis.Diagnostic{ - Pos: err.Pos, - End: end, - Message: err.Msg, - }) -} - -func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) { - pos := start // don't use the end - path, _ := astutil.PathEnclosingInterval(file, pos, pos) - if len(path) < 2 { - return nil, fmt.Errorf("no expression found") - } - ident, ok := path[0].(*ast.Ident) - if !ok { - return nil, fmt.Errorf("no identifier found") - } - - // Check for a possible call expression, in which case we should add a - // new function declaration. - if len(path) > 1 { - if _, ok := path[1].(*ast.CallExpr); ok { - return newFunctionDeclaration(path, file, pkg, info, fset) - } - } - - // Get the place to insert the new statement. - insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path) - if insertBeforeStmt == nil { - return nil, fmt.Errorf("could not locate insertion point") - } - - insertBefore := safetoken.StartPosition(fset, insertBeforeStmt.Pos()).Offset - - // Get the indent to add on the line after the new statement. - // Since this will have a parse error, we can not use format.Source(). - contentBeforeStmt, indent := content[:insertBefore], "\n" - if nl := bytes.LastIndex(contentBeforeStmt, []byte("\n")); nl != -1 { - indent = string(contentBeforeStmt[nl:]) - } - - // Create the new local variable statement. - newStmt := fmt.Sprintf("%s := %s", ident.Name, indent) - return &analysis.SuggestedFix{ - Message: fmt.Sprintf("Create variable \"%s\"", ident.Name), - TextEdits: []analysis.TextEdit{{ - Pos: insertBeforeStmt.Pos(), - End: insertBeforeStmt.Pos(), - NewText: []byte(newStmt), - }}, - }, nil -} - -func newFunctionDeclaration(path []ast.Node, file *ast.File, pkg *types.Package, info *types.Info, fset *token.FileSet) (*analysis.SuggestedFix, error) { - if len(path) < 3 { - return nil, fmt.Errorf("unexpected set of enclosing nodes: %v", path) - } - ident, ok := path[0].(*ast.Ident) - if !ok { - return nil, fmt.Errorf("no name for function declaration %v (%T)", path[0], path[0]) - } - call, ok := path[1].(*ast.CallExpr) - if !ok { - return nil, fmt.Errorf("no call expression found %v (%T)", path[1], path[1]) - } - - // Find the enclosing function, so that we can add the new declaration - // below. - var enclosing *ast.FuncDecl - for _, n := range path { - if n, ok := n.(*ast.FuncDecl); ok { - enclosing = n - break - } - } - // TODO(rstambler): Support the situation when there is no enclosing - // function. - if enclosing == nil { - return nil, fmt.Errorf("no enclosing function found: %v", path) - } - - pos := enclosing.End() - - var paramNames []string - var paramTypes []types.Type - // keep track of all param names to later ensure uniqueness - nameCounts := map[string]int{} - for _, arg := range call.Args { - typ := info.TypeOf(arg) - if typ == nil { - return nil, fmt.Errorf("unable to determine type for %s", arg) - } - - switch t := typ.(type) { - // this is the case where another function call returning multiple - // results is used as an argument - case *types.Tuple: - n := t.Len() - for i := 0; i < n; i++ { - name := typeToArgName(t.At(i).Type()) - nameCounts[name]++ - - paramNames = append(paramNames, name) - paramTypes = append(paramTypes, types.Default(t.At(i).Type())) - } - - default: - // does the argument have a name we can reuse? - // only happens in case of a *ast.Ident - var name string - if ident, ok := arg.(*ast.Ident); ok { - name = ident.Name - } - - if name == "" { - name = typeToArgName(typ) - } - - nameCounts[name]++ - - paramNames = append(paramNames, name) - paramTypes = append(paramTypes, types.Default(typ)) - } - } - - for n, c := range nameCounts { - // Any names we saw more than once will need a unique suffix added - // on. Reset the count to 1 to act as the suffix for the first - // occurrence of that name. - if c >= 2 { - nameCounts[n] = 1 - } else { - delete(nameCounts, n) - } - } - - params := &ast.FieldList{} - - for i, name := range paramNames { - if suffix, repeats := nameCounts[name]; repeats { - nameCounts[name]++ - name = fmt.Sprintf("%s%d", name, suffix) - } - - // only worth checking after previous param in the list - if i > 0 { - // if type of parameter at hand is the same as the previous one, - // add it to the previous param list of identifiers so to have: - // (s1, s2 string) - // and not - // (s1 string, s2 string) - if paramTypes[i] == paramTypes[i-1] { - params.List[len(params.List)-1].Names = append(params.List[len(params.List)-1].Names, ast.NewIdent(name)) - continue - } - } - - params.List = append(params.List, &ast.Field{ - Names: []*ast.Ident{ - ast.NewIdent(name), - }, - Type: analysisinternal.TypeExpr(file, pkg, paramTypes[i]), - }) - } - - decl := &ast.FuncDecl{ - Name: ast.NewIdent(ident.Name), - Type: &ast.FuncType{ - Params: params, - // TODO(rstambler): Also handle result parameters here. - }, - Body: &ast.BlockStmt{ - List: []ast.Stmt{ - &ast.ExprStmt{ - X: &ast.CallExpr{ - Fun: ast.NewIdent("panic"), - Args: []ast.Expr{ - &ast.BasicLit{ - Value: `"unimplemented"`, - }, - }, - }, - }, - }, - }, - } - - b := bytes.NewBufferString("\n\n") - if err := format.Node(b, fset, decl); err != nil { - return nil, err - } - return &analysis.SuggestedFix{ - Message: fmt.Sprintf("Create function \"%s\"", ident.Name), - TextEdits: []analysis.TextEdit{{ - Pos: pos, - End: pos, - NewText: b.Bytes(), - }}, - }, nil -} -func typeToArgName(ty types.Type) string { - s := types.Default(ty).String() - - switch t := ty.(type) { - case *types.Basic: - // use first letter in type name for basic types - return s[0:1] - case *types.Slice: - // use element type to decide var name for slices - return typeToArgName(t.Elem()) - case *types.Array: - // use element type to decide var name for arrays - return typeToArgName(t.Elem()) - case *types.Chan: - return "ch" - } - - s = strings.TrimFunc(s, func(r rune) bool { - return !unicode.IsLetter(r) - }) - - if s == "error" { - return "err" - } - - // remove package (if present) - // and make first letter lowercase - a := []rune(s[strings.LastIndexByte(s, '.')+1:]) - a[0] = unicode.ToLower(a[0]) - return string(a) -} diff --git a/gopls/internal/lsp/analysis/undeclaredname/undeclared_test.go b/gopls/internal/lsp/analysis/undeclaredname/undeclared_test.go deleted file mode 100644 index 306c3f03941..00000000000 --- a/gopls/internal/lsp/analysis/undeclaredname/undeclared_test.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package undeclaredname_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/undeclaredname" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - analysistest.Run(t, testdata, undeclaredname.Analyzer, "a") -} diff --git a/gopls/internal/lsp/analysis/unusedparams/cmd/main.go b/gopls/internal/lsp/analysis/unusedparams/cmd/main.go deleted file mode 100644 index fafb126ffdf..00000000000 --- a/gopls/internal/lsp/analysis/unusedparams/cmd/main.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// The stringintconv command runs the stringintconv analyzer. -package main - -import ( - "golang.org/x/tools/go/analysis/singlechecker" - "golang.org/x/tools/gopls/internal/lsp/analysis/unusedparams" -) - -func main() { singlechecker.Main(unusedparams.Analyzer) } diff --git a/gopls/internal/lsp/analysis/unusedparams/testdata/src/a/a.go b/gopls/internal/lsp/analysis/unusedparams/testdata/src/a/a.go deleted file mode 100644 index 23e4122c4cc..00000000000 --- a/gopls/internal/lsp/analysis/unusedparams/testdata/src/a/a.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package a - -import ( - "bytes" - "fmt" - "net/http" -) - -type parent interface { - n(f bool) -} - -type yuh struct { - a int -} - -func (y *yuh) n(f bool) { - for i := 0; i < 10; i++ { - fmt.Println(i) - } -} - -func a(i1 int, i2 int, i3 int) int { // want "potentially unused parameter: 'i2'" - i3 += i1 - _ = func(z int) int { // want "potentially unused parameter: 'z'" - _ = 1 - return 1 - } - return i3 -} - -func b(c bytes.Buffer) { // want "potentially unused parameter: 'c'" - _ = 1 -} - -func z(h http.ResponseWriter, _ *http.Request) { // want "potentially unused parameter: 'h'" - fmt.Println("Before") -} - -func l(h http.Handler) http.Handler { - return http.HandlerFunc(z) -} - -func mult(a, b int) int { // want "potentially unused parameter: 'b'" - a += 1 - return a -} - -func y(a int) { - panic("yo") -} diff --git a/gopls/internal/lsp/analysis/unusedparams/testdata/src/a/a.go.golden b/gopls/internal/lsp/analysis/unusedparams/testdata/src/a/a.go.golden deleted file mode 100644 index e28a6bdeabe..00000000000 --- a/gopls/internal/lsp/analysis/unusedparams/testdata/src/a/a.go.golden +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package a - -import ( - "bytes" - "fmt" - "net/http" -) - -type parent interface { - n(f bool) -} - -type yuh struct { - a int -} - -func (y *yuh) n(f bool) { - for i := 0; i < 10; i++ { - fmt.Println(i) - } -} - -func a(i1 int, _ int, i3 int) int { // want "potentially unused parameter: 'i2'" - i3 += i1 - _ = func(_ int) int { // want "potentially unused parameter: 'z'" - _ = 1 - return 1 - } - return i3 -} - -func b(_ bytes.Buffer) { // want "potentially unused parameter: 'c'" - _ = 1 -} - -func z(_ http.ResponseWriter, _ *http.Request) { // want "potentially unused parameter: 'h'" - fmt.Println("Before") -} - -func l(h http.Handler) http.Handler { - return http.HandlerFunc(z) -} - -func mult(a, _ int) int { // want "potentially unused parameter: 'b'" - a += 1 - return a -} - -func y(a int) { - panic("yo") -} diff --git a/gopls/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go b/gopls/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go deleted file mode 100644 index 93af2681b94..00000000000 --- a/gopls/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeparams - -import ( - "bytes" - "fmt" - "net/http" -) - -type parent[T any] interface { - n(f T) -} - -type yuh[T any] struct { - a T -} - -func (y *yuh[int]) n(f bool) { - for i := 0; i < 10; i++ { - fmt.Println(i) - } -} - -func a[T comparable](i1 int, i2 T, i3 int) int { // want "potentially unused parameter: 'i2'" - i3 += i1 - _ = func(z int) int { // want "potentially unused parameter: 'z'" - _ = 1 - return 1 - } - return i3 -} - -func b[T any](c bytes.Buffer) { // want "potentially unused parameter: 'c'" - _ = 1 -} - -func z[T http.ResponseWriter](h T, _ *http.Request) { // want "potentially unused parameter: 'h'" - fmt.Println("Before") -} - -func l(h http.Handler) http.Handler { - return http.HandlerFunc(z[http.ResponseWriter]) -} - -func mult(a, b int) int { // want "potentially unused parameter: 'b'" - a += 1 - return a -} - -func y[T any](a T) { - panic("yo") -} diff --git a/gopls/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden b/gopls/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden deleted file mode 100644 index c86bf289a3e..00000000000 --- a/gopls/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeparams - -import ( - "bytes" - "fmt" - "net/http" -) - -type parent[T any] interface { - n(f T) -} - -type yuh[T any] struct { - a T -} - -func (y *yuh[int]) n(f bool) { - for i := 0; i < 10; i++ { - fmt.Println(i) - } -} - -func a[T comparable](i1 int, _ T, i3 int) int { // want "potentially unused parameter: 'i2'" - i3 += i1 - _ = func(_ int) int { // want "potentially unused parameter: 'z'" - _ = 1 - return 1 - } - return i3 -} - -func b[T any](_ bytes.Buffer) { // want "potentially unused parameter: 'c'" - _ = 1 -} - -func z[T http.ResponseWriter](_ T, _ *http.Request) { // want "potentially unused parameter: 'h'" - fmt.Println("Before") -} - -func l(h http.Handler) http.Handler { - return http.HandlerFunc(z[http.ResponseWriter]) -} - -func mult(a, _ int) int { // want "potentially unused parameter: 'b'" - a += 1 - return a -} - -func y[T any](a T) { - panic("yo") -} diff --git a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go b/gopls/internal/lsp/analysis/unusedparams/unusedparams.go deleted file mode 100644 index 64702b2f0a6..00000000000 --- a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unusedparams defines an analyzer that checks for unused -// parameters of functions. -package unusedparams - -import ( - "fmt" - "go/ast" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `check for unused parameters of functions - -The unusedparams analyzer checks functions to see if there are -any parameters that are not being used. - -To reduce false positives it ignores: -- methods -- parameters that do not have a name or have the name '_' (the blank identifier) -- functions in test files -- functions with empty bodies or those with just a return stmt` - -var ( - Analyzer = &analysis.Analyzer{ - Name: "unusedparams", - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - } - inspectLits bool - inspectWrappers bool -) - -func init() { - Analyzer.Flags.BoolVar(&inspectLits, "lits", true, "inspect function literals") - Analyzer.Flags.BoolVar(&inspectWrappers, "wrappers", false, "inspect functions whose body consists of a single return statement") -} - -type paramData struct { - field *ast.Field - ident *ast.Ident - typObj types.Object -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - } - if inspectLits { - nodeFilter = append(nodeFilter, (*ast.FuncLit)(nil)) - } - - inspect.Preorder(nodeFilter, func(n ast.Node) { - var fieldList *ast.FieldList - var body *ast.BlockStmt - - // Get the fieldList and body from the function node. - switch f := n.(type) { - case *ast.FuncDecl: - fieldList, body = f.Type.Params, f.Body - // TODO(golang/go#36602): add better handling for methods, if we enable methods - // we will get false positives if a struct is potentially implementing - // an interface. - if f.Recv != nil { - return - } - - // Ignore functions in _test.go files to reduce false positives. - if file := pass.Fset.File(n.Pos()); file != nil && strings.HasSuffix(file.Name(), "_test.go") { - return - } - case *ast.FuncLit: - fieldList, body = f.Type.Params, f.Body - } - // If there are no arguments or the function is empty, then return. - if fieldList.NumFields() == 0 || body == nil || len(body.List) == 0 { - return - } - - switch expr := body.List[0].(type) { - case *ast.ReturnStmt: - if !inspectWrappers { - // Ignore functions that only contain a return statement to reduce false positives. - return - } - case *ast.ExprStmt: - callExpr, ok := expr.X.(*ast.CallExpr) - if !ok || len(body.List) > 1 { - break - } - // Ignore functions that only contain a panic statement to reduce false positives. - if fun, ok := callExpr.Fun.(*ast.Ident); ok && fun.Name == "panic" { - return - } - } - - // Get the useful data from each field. - params := make(map[string]*paramData) - unused := make(map[*paramData]bool) - for _, f := range fieldList.List { - for _, i := range f.Names { - if i.Name == "_" { - continue - } - params[i.Name] = ¶mData{ - field: f, - ident: i, - typObj: pass.TypesInfo.ObjectOf(i), - } - unused[params[i.Name]] = true - } - } - - // Traverse through the body of the function and - // check to see which parameters are unused. - ast.Inspect(body, func(node ast.Node) bool { - n, ok := node.(*ast.Ident) - if !ok { - return true - } - param, ok := params[n.Name] - if !ok { - return false - } - if nObj := pass.TypesInfo.ObjectOf(n); nObj != param.typObj { - return false - } - delete(unused, param) - return false - }) - - // Create the reports for the unused parameters. - for u := range unused { - start, end := u.field.Pos(), u.field.End() - if len(u.field.Names) > 1 { - start, end = u.ident.Pos(), u.ident.End() - } - // TODO(golang/go#36602): Add suggested fixes to automatically - // remove the unused parameter from every use of this - // function. - pass.Report(analysis.Diagnostic{ - Pos: start, - End: end, - Message: fmt.Sprintf("potentially unused parameter: '%s'", u.ident.Name), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: `Replace with "_"`, - TextEdits: []analysis.TextEdit{{ - Pos: u.ident.Pos(), - End: u.ident.End(), - NewText: []byte("_"), - }}, - }}, - }) - } - }) - return nil, nil -} diff --git a/gopls/internal/lsp/analysis/unusedparams/unusedparams_test.go b/gopls/internal/lsp/analysis/unusedparams/unusedparams_test.go deleted file mode 100644 index fdd43b821fe..00000000000 --- a/gopls/internal/lsp/analysis/unusedparams/unusedparams_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package unusedparams_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/unusedparams" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - testdata := analysistest.TestData() - tests := []string{"a"} - if typeparams.Enabled { - tests = append(tests, "typeparams") - } - analysistest.RunWithSuggestedFixes(t, testdata, unusedparams.Analyzer, tests...) -} diff --git a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/assign/a.go b/gopls/internal/lsp/analysis/unusedvariable/testdata/src/assign/a.go deleted file mode 100644 index aa9f46e5b31..00000000000 --- a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/assign/a.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package a - -import ( - "fmt" - "os" -) - -type A struct { - b int -} - -func singleAssignment() { - v := "s" // want `v declared (and|but) not used` - - s := []int{ // want `s declared (and|but) not used` - 1, - 2, - } - - a := func(s string) bool { // want `a declared (and|but) not used` - return false - } - - if 1 == 1 { - s := "v" // want `s declared (and|but) not used` - } - - panic("I should survive") -} - -func noOtherStmtsInBlock() { - v := "s" // want `v declared (and|but) not used` -} - -func partOfMultiAssignment() { - f, err := os.Open("file") // want `f declared (and|but) not used` - panic(err) -} - -func sideEffects(cBool chan bool, cInt chan int) { - b := <-c // want `b declared (and|but) not used` - s := fmt.Sprint("") // want `s declared (and|but) not used` - a := A{ // want `a declared (and|but) not used` - b: func() int { - return 1 - }(), - } - c := A{<-cInt} // want `c declared (and|but) not used` - d := fInt() + <-cInt // want `d declared (and|but) not used` - e := fBool() && <-cBool // want `e declared (and|but) not used` - f := map[int]int{ // want `f declared (and|but) not used` - fInt(): <-cInt, - } - g := []int{<-cInt} // want `g declared (and|but) not used` - h := func(s string) {} // want `h declared (and|but) not used` - i := func(s string) {}() // want `i declared (and|but) not used` -} - -func commentAbove() { - // v is a variable - v := "s" // want `v declared (and|but) not used` -} - -func fBool() bool { - return true -} - -func fInt() int { - return 1 -} diff --git a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/assign/a.go.golden b/gopls/internal/lsp/analysis/unusedvariable/testdata/src/assign/a.go.golden deleted file mode 100644 index 18173ce0bf9..00000000000 --- a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/assign/a.go.golden +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package a - -import ( - "fmt" - "os" -) - -type A struct { - b int -} - -func singleAssignment() { - if 1 == 1 { - } - - panic("I should survive") -} - -func noOtherStmtsInBlock() { -} - -func partOfMultiAssignment() { - _, err := os.Open("file") // want `f declared (and|but) not used` - panic(err) -} - -func sideEffects(cBool chan bool, cInt chan int) { - <-c // want `b declared (and|but) not used` - fmt.Sprint("") // want `s declared (and|but) not used` - A{ // want `a declared (and|but) not used` - b: func() int { - return 1 - }(), - } - A{<-cInt} // want `c declared (and|but) not used` - fInt() + <-cInt // want `d declared (and|but) not used` - fBool() && <-cBool // want `e declared (and|but) not used` - map[int]int{ // want `f declared (and|but) not used` - fInt(): <-cInt, - } - []int{<-cInt} // want `g declared (and|but) not used` - func(s string) {}() // want `i declared (and|but) not used` -} - -func commentAbove() { - // v is a variable -} - -func fBool() bool { - return true -} - -func fInt() int { - return 1 -} diff --git a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/decl/a.go b/gopls/internal/lsp/analysis/unusedvariable/testdata/src/decl/a.go deleted file mode 100644 index 8e843024a54..00000000000 --- a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/decl/a.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package decl - -func a() { - var b, c bool // want `b declared (and|but) not used` - panic(c) - - if 1 == 1 { - var s string // want `s declared (and|but) not used` - } -} - -func b() { - // b is a variable - var b bool // want `b declared (and|but) not used` -} - -func c() { - var ( - d string - - // some comment for c - c bool // want `c declared (and|but) not used` - ) - - panic(d) -} diff --git a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/decl/a.go.golden b/gopls/internal/lsp/analysis/unusedvariable/testdata/src/decl/a.go.golden deleted file mode 100644 index 6ed97332eea..00000000000 --- a/gopls/internal/lsp/analysis/unusedvariable/testdata/src/decl/a.go.golden +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package decl - -func a() { - var c bool // want `b declared (and|but) not used` - panic(c) - - if 1 == 1 { - } -} - -func b() { - // b is a variable -} - -func c() { - var ( - d string - ) - panic(d) -} diff --git a/gopls/internal/lsp/analysis/useany/useany_test.go b/gopls/internal/lsp/analysis/useany/useany_test.go deleted file mode 100644 index 083c3d54fd4..00000000000 --- a/gopls/internal/lsp/analysis/useany/useany_test.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package useany_test - -import ( - "testing" - - "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/lsp/analysis/useany" - "golang.org/x/tools/internal/typeparams" -) - -func Test(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type params are not enabled") - } - testdata := analysistest.TestData() - analysistest.RunWithSuggestedFixes(t, testdata, useany.Analyzer, "a") -} diff --git a/gopls/internal/lsp/cache/cache.go b/gopls/internal/lsp/cache/cache.go deleted file mode 100644 index b1cdfcef16b..00000000000 --- a/gopls/internal/lsp/cache/cache.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "reflect" - "strconv" - "sync/atomic" - "time" - - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/memoize" - "golang.org/x/tools/internal/robustio" -) - -// New Creates a new cache for gopls operation results, using the given file -// set, shared store, and session options. -// -// Both the fset and store may be nil, but if store is non-nil so must be fset -// (and they must always be used together), otherwise it may be possible to get -// cached data referencing token.Pos values not mapped by the FileSet. -func New(store *memoize.Store) *Cache { - index := atomic.AddInt64(&cacheIndex, 1) - - if store == nil { - store = &memoize.Store{} - } - - c := &Cache{ - id: strconv.FormatInt(index, 10), - store: store, - memoizedFS: &memoizedFS{filesByID: map[robustio.FileID][]*DiskFile{}}, - } - return c -} - -// A Cache holds caching stores that are bundled together for consistency. -// -// TODO(rfindley): once fset and store need not be bundled together, the Cache -// type can be eliminated. -type Cache struct { - id string - - store *memoize.Store - - *memoizedFS // implements source.FileSource -} - -// NewSession creates a new gopls session with the given cache and options overrides. -// -// The provided optionsOverrides may be nil. -// -// TODO(rfindley): move this to session.go. -func NewSession(ctx context.Context, c *Cache) *Session { - index := atomic.AddInt64(&sessionIndex, 1) - s := &Session{ - id: strconv.FormatInt(index, 10), - cache: c, - gocmdRunner: &gocommand.Runner{}, - overlayFS: newOverlayFS(c), - parseCache: newParseCache(1 * time.Minute), // keep recently parsed files for a minute, to optimize typing CPU - } - event.Log(ctx, "New session", KeyCreateSession.Of(s)) - return s -} - -var cacheIndex, sessionIndex, viewIndex int64 - -func (c *Cache) ID() string { return c.id } -func (c *Cache) MemStats() map[reflect.Type]int { return c.store.Stats() } - -// FileStats returns information about the set of files stored in the cache. -// It is intended for debugging only. -func (c *Cache) FileStats() (files, largest, errs int) { - return c.fileStats() -} diff --git a/gopls/internal/lsp/cache/check.go b/gopls/internal/lsp/cache/check.go deleted file mode 100644 index 93d6b086fde..00000000000 --- a/gopls/internal/lsp/cache/check.go +++ /dev/null @@ -1,1864 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "crypto/sha256" - "fmt" - "go/ast" - "go/parser" - "go/token" - "go/types" - "regexp" - "runtime" - "sort" - "strings" - "sync" - "sync/atomic" - - "golang.org/x/mod/module" - "golang.org/x/sync/errgroup" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/filecache" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/typerefs" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" - "golang.org/x/tools/internal/gcimporter" - "golang.org/x/tools/internal/packagesinternal" - "golang.org/x/tools/internal/tokeninternal" - "golang.org/x/tools/internal/typeparams" - "golang.org/x/tools/internal/typesinternal" -) - -// Various optimizations that should not affect correctness. -const ( - preserveImportGraph = true // hold on to the import graph for open packages -) - -type unit = struct{} - -// A typeCheckBatch holds data for a logical type-checking operation, which may -// type-check many unrelated packages. -// -// It shares state such as parsed files and imports, to optimize type-checking -// for packages with overlapping dependency graphs. -type typeCheckBatch struct { - activePackageCache interface { - getActivePackage(id PackageID) *Package - setActivePackage(id PackageID, pkg *Package) - } - syntaxIndex map[PackageID]int // requested ID -> index in ids - pre preTypeCheck - post postTypeCheck - handles map[PackageID]*packageHandle - parseCache *parseCache - fset *token.FileSet // describes all parsed or imported files - cpulimit chan unit // concurrency limiter for CPU-bound operations - - mu sync.Mutex - syntaxPackages map[PackageID]*futurePackage // results of processing a requested package; may hold (nil, nil) - importPackages map[PackageID]*futurePackage // package results to use for importing -} - -// A futurePackage is a future result of type checking or importing a package, -// to be cached in a map. -// -// The goroutine that creates the futurePackage is responsible for evaluating -// its value, and closing the done channel. -type futurePackage struct { - done chan unit - v pkgOrErr -} - -type pkgOrErr struct { - pkg *types.Package - err error -} - -// TypeCheck type-checks the specified packages. -// -// The resulting packages slice always contains len(ids) entries, though some -// of them may be nil if (and only if) the resulting error is non-nil. -// -// An error is returned if any of the requested packages fail to type-check. -// This is different from having type-checking errors: a failure to type-check -// indicates context cancellation or otherwise significant failure to perform -// the type-checking operation. -func (s *snapshot) TypeCheck(ctx context.Context, ids ...PackageID) ([]source.Package, error) { - pkgs := make([]source.Package, len(ids)) - - var ( - needIDs []PackageID // ids to type-check - indexes []int // original index of requested ids - ) - - // Check for existing active packages, as any package will do. - // - // This is also done inside forEachPackage, but doing it here avoids - // unnecessary set up for type checking (e.g. assembling the package handle - // graph). - for i, id := range ids { - if pkg := s.getActivePackage(id); pkg != nil { - pkgs[i] = pkg - } else { - needIDs = append(needIDs, id) - indexes = append(indexes, i) - } - } - - post := func(i int, pkg *Package) { - pkgs[indexes[i]] = pkg - } - return pkgs, s.forEachPackage(ctx, needIDs, nil, post) -} - -// getImportGraph returns a shared import graph use for this snapshot, or nil. -// -// This is purely an optimization: holding on to more imports allows trading -// memory for CPU and latency. Currently, getImportGraph returns an import -// graph containing all packages imported by open packages, since these are -// highly likely to be needed when packages change. -// -// Furthermore, since we memoize active packages, including their imports in -// the shared import graph means we don't run the risk of pinning duplicate -// copies of common imports, if active packages are computed in separate type -// checking batches. -func (s *snapshot) getImportGraph(ctx context.Context) *importGraph { - if !preserveImportGraph { - return nil - } - s.mu.Lock() - - // Evaluate the shared import graph for the snapshot. There are three major - // codepaths here: - // - // 1. importGraphDone == nil, importGraph == nil: it is this goroutine's - // responsibility to type-check the shared import graph. - // 2. importGraphDone == nil, importGraph != nil: it is this goroutine's - // responsibility to resolve the import graph, which may result in - // type-checking only if the existing importGraph (carried over from the - // preceding snapshot) is invalid. - // 3. importGraphDone != nil: some other goroutine is doing (1) or (2), wait - // for the work to be done. - done := s.importGraphDone - if done == nil { - done = make(chan unit) - s.importGraphDone = done - release := s.Acquire() // must acquire to use the snapshot asynchronously - go func() { - defer release() - importGraph, err := s.resolveImportGraph() // may be nil - if err != nil { - if ctx.Err() == nil { - event.Error(ctx, "computing the shared import graph", err) - } - importGraph = nil - } - s.mu.Lock() - s.importGraph = importGraph - s.mu.Unlock() - close(done) - }() - } - s.mu.Unlock() - - select { - case <-done: - return s.importGraph - case <-ctx.Done(): - return nil - } -} - -// resolveImportGraph evaluates the shared import graph to use for -// type-checking in this snapshot. This may involve re-using the import graph -// of the previous snapshot (stored in s.importGraph), or computing a fresh -// import graph. -// -// resolveImportGraph should only be called from getImportGraph. -func (s *snapshot) resolveImportGraph() (*importGraph, error) { - ctx := s.backgroundCtx - ctx, done := event.Start(event.Detach(ctx), "cache.resolveImportGraph") - defer done() - - s.mu.Lock() - lastImportGraph := s.importGraph - s.mu.Unlock() - - openPackages := make(map[PackageID]bool) - for _, fh := range s.overlays() { - meta, err := s.MetadataForFile(ctx, fh.URI()) - if err != nil { - return nil, err - } - source.RemoveIntermediateTestVariants(&meta) - for _, m := range meta { - openPackages[m.ID] = true - } - } - - var openPackageIDs []source.PackageID - for id := range openPackages { - openPackageIDs = append(openPackageIDs, id) - } - - handles, err := s.getPackageHandles(ctx, openPackageIDs) - if err != nil { - return nil, err - } - - // Subtlety: we erase the upward cone of open packages from the shared import - // graph, to increase reusability. - // - // This is easiest to understand via an example: suppose A imports B, and B - // imports C. Now suppose A and B are open. If we preserve the entire set of - // shared deps by open packages, deps will be {B, C}. But this means that any - // change to the open package B will invalidate the shared import graph, - // meaning we will experience no benefit from sharing when B is edited. - // Consider that this will be a common scenario, when A is foo_test and B is - // foo. Better to just preserve the shared import C. - // - // With precise pruning, we may want to truncate this search based on - // reachability. - // - // TODO(rfindley): this logic could use a unit test. - volatileDeps := make(map[PackageID]bool) - var isVolatile func(*packageHandle) bool - isVolatile = func(ph *packageHandle) (volatile bool) { - if v, ok := volatileDeps[ph.m.ID]; ok { - return v - } - defer func() { - volatileDeps[ph.m.ID] = volatile - }() - if openPackages[ph.m.ID] { - return true - } - for _, dep := range ph.m.DepsByPkgPath { - if isVolatile(handles[dep]) { - return true - } - } - return false - } - for _, dep := range handles { - isVolatile(dep) - } - for id, volatile := range volatileDeps { - if volatile { - delete(handles, id) - } - } - - // We reuse the last import graph if and only if none of the dependencies - // have changed. Doing better would involve analyzing dependencies to find - // subgraphs that are still valid. Not worth it, especially when in the - // common case nothing has changed. - unchanged := lastImportGraph != nil && len(handles) == len(lastImportGraph.depKeys) - var ids []PackageID - depKeys := make(map[PackageID]source.Hash) - for id, ph := range handles { - ids = append(ids, id) - depKeys[id] = ph.key - if unchanged { - prevKey, ok := lastImportGraph.depKeys[id] - unchanged = ok && prevKey == ph.key - } - } - - if unchanged { - return lastImportGraph, nil - } - - b, err := s.forEachPackageInternal(ctx, nil, ids, nil, nil, nil, handles) - if err != nil { - return nil, err - } - - next := &importGraph{ - fset: b.fset, - depKeys: depKeys, - imports: make(map[PackageID]pkgOrErr), - } - for id, fut := range b.importPackages { - if fut.v.pkg == nil && fut.v.err == nil { - panic(fmt.Sprintf("internal error: import node %s is not evaluated", id)) - } - next.imports[id] = fut.v - } - return next, nil -} - -// An importGraph holds selected results of a type-checking pass, to be re-used -// by subsequent snapshots. -type importGraph struct { - fset *token.FileSet // fileset used for type checking imports - depKeys map[PackageID]source.Hash // hash of direct dependencies for this graph - imports map[PackageID]pkgOrErr // results of type checking -} - -// Package visiting functions used by forEachPackage; see the documentation of -// forEachPackage for details. -type ( - preTypeCheck = func(int, *packageHandle) bool // false => don't type check - postTypeCheck = func(int, *Package) -) - -// forEachPackage does a pre- and post- order traversal of the packages -// specified by ids using the provided pre and post functions. -// -// The pre func is optional. If set, pre is evaluated after the package -// handle has been constructed, but before type-checking. If pre returns false, -// type-checking is skipped for this package handle. -// -// post is called with a syntax package after type-checking completes -// successfully. It is only called if pre returned true. -// -// Both pre and post may be called concurrently. -func (s *snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preTypeCheck, post postTypeCheck) error { - ctx, done := event.Start(ctx, "cache.forEachPackage", tag.PackageCount.Of(len(ids))) - defer done() - - if len(ids) == 0 { - return nil // short cut: many call sites do not handle empty ids - } - - handles, err := s.getPackageHandles(ctx, ids) - if err != nil { - return err - } - - impGraph := s.getImportGraph(ctx) - _, err = s.forEachPackageInternal(ctx, impGraph, nil, ids, pre, post, handles) - return err -} - -// forEachPackageInternal is used by both forEachPackage and loadImportGraph to -// type-check a graph of packages. -// -// If a non-nil importGraph is provided, imports in this graph will be reused. -func (s *snapshot) forEachPackageInternal(ctx context.Context, importGraph *importGraph, importIDs, syntaxIDs []PackageID, pre preTypeCheck, post postTypeCheck, handles map[PackageID]*packageHandle) (*typeCheckBatch, error) { - b := &typeCheckBatch{ - activePackageCache: s, - pre: pre, - post: post, - handles: handles, - parseCache: s.view.parseCache, - fset: fileSetWithBase(reservedForParsing), - syntaxIndex: make(map[PackageID]int), - cpulimit: make(chan unit, runtime.GOMAXPROCS(0)), - syntaxPackages: make(map[PackageID]*futurePackage), - importPackages: make(map[PackageID]*futurePackage), - } - - if importGraph != nil { - // Clone the file set every time, to ensure we do not leak files. - b.fset = tokeninternal.CloneFileSet(importGraph.fset) - // Pre-populate future cache with 'done' futures. - done := make(chan unit) - close(done) - for id, res := range importGraph.imports { - b.importPackages[id] = &futurePackage{done, res} - } - } else { - b.fset = fileSetWithBase(reservedForParsing) - } - - for i, id := range syntaxIDs { - b.syntaxIndex[id] = i - } - - // Start a single goroutine for each requested package. - // - // Other packages are reached recursively, and will not be evaluated if they - // are not needed. - var g errgroup.Group - for _, id := range importIDs { - id := id - g.Go(func() error { - _, err := b.getImportPackage(ctx, id) - return err - }) - } - for i, id := range syntaxIDs { - i := i - id := id - g.Go(func() error { - _, err := b.handleSyntaxPackage(ctx, i, id) - return err - }) - } - return b, g.Wait() -} - -// TODO(rfindley): re-order the declarations below to read better from top-to-bottom. - -// getImportPackage returns the *types.Package to use for importing the -// package referenced by id. -// -// This may be the package produced by type-checking syntax (as in the case -// where id is in the set of requested IDs), a package loaded from export data, -// or a package type-checked for import only. -func (b *typeCheckBatch) getImportPackage(ctx context.Context, id PackageID) (pkg *types.Package, err error) { - b.mu.Lock() - f, ok := b.importPackages[id] - if ok { - b.mu.Unlock() - - select { - case <-ctx.Done(): - return nil, ctx.Err() - case <-f.done: - return f.v.pkg, f.v.err - } - } - - f = &futurePackage{done: make(chan unit)} - b.importPackages[id] = f - b.mu.Unlock() - - defer func() { - f.v = pkgOrErr{pkg, err} - close(f.done) - }() - - if index, ok := b.syntaxIndex[id]; ok { - pkg, err := b.handleSyntaxPackage(ctx, index, id) - if err != nil { - return nil, err - } - if pkg != nil { - return pkg, nil - } - // type-checking was short-circuited by the pre- func. - } - - // unsafe cannot be imported or type-checked. - if id == "unsafe" { - return types.Unsafe, nil - } - - ph := b.handles[id] - - // Do a second check for "unsafe" defensively, due to golang/go#60890. - if ph.m.PkgPath == "unsafe" { - bug.Reportf("encountered \"unsafe\" as %s (golang/go#60890)", id) - return types.Unsafe, nil - } - - data, err := filecache.Get(exportDataKind, ph.key) - if err == filecache.ErrNotFound { - // No cached export data: type-check as fast as possible. - return b.checkPackageForImport(ctx, ph) - } - if err != nil { - return nil, fmt.Errorf("failed to read cache data for %s: %v", ph.m.ID, err) - } - return b.importPackage(ctx, ph.m, data) -} - -// handleSyntaxPackage handles one package from the ids slice. -// -// If type checking occurred while handling the package, it returns the -// resulting types.Package so that it may be used for importing. -// -// handleSyntaxPackage returns (nil, nil) if pre returned false. -func (b *typeCheckBatch) handleSyntaxPackage(ctx context.Context, i int, id PackageID) (pkg *types.Package, err error) { - b.mu.Lock() - f, ok := b.syntaxPackages[id] - if ok { - b.mu.Unlock() - <-f.done - return f.v.pkg, f.v.err - } - - f = &futurePackage{done: make(chan unit)} - b.syntaxPackages[id] = f - b.mu.Unlock() - defer func() { - f.v = pkgOrErr{pkg, err} - close(f.done) - }() - - ph := b.handles[id] - if b.pre != nil && !b.pre(i, ph) { - return nil, nil // skip: export data only - } - - // Check for existing active packages. - // - // Since gopls can't depend on package identity, any instance of the - // requested package must be ok to return. - // - // This is an optimization to avoid redundant type-checking: following - // changes to an open package many LSP clients send several successive - // requests for package information for the modified package (semantic - // tokens, code lens, inlay hints, etc.) - if pkg := b.activePackageCache.getActivePackage(id); pkg != nil { - b.post(i, pkg) - return nil, nil // skip: not checked in this batch - } - - if err := b.awaitPredecessors(ctx, ph.m); err != nil { - // One failed precessesor should not fail the entire type checking - // operation. Errors related to imports will be reported as type checking - // diagnostics. - if ctx.Err() != nil { - return nil, ctx.Err() - } - } - - // Wait to acquire a CPU token. - // - // Note: it is important to acquire this token only after awaiting - // predecessors, to avoid starvation. - select { - case <-ctx.Done(): - return nil, ctx.Err() - case b.cpulimit <- unit{}: - defer func() { - <-b.cpulimit // release CPU token - }() - } - - // We need a syntax package. - syntaxPkg, err := b.checkPackage(ctx, ph) - if err != nil { - return nil, err - } - b.activePackageCache.setActivePackage(id, syntaxPkg) - b.post(i, syntaxPkg) - - return syntaxPkg.pkg.types, nil -} - -// importPackage loads the given package from its export data in p.exportData -// (which must already be populated). -func (b *typeCheckBatch) importPackage(ctx context.Context, m *source.Metadata, data []byte) (*types.Package, error) { - ctx, done := event.Start(ctx, "cache.typeCheckBatch.importPackage", tag.Package.Of(string(m.ID))) - defer done() - - impMap := b.importMap(m.ID) - - thisPackage := types.NewPackage(string(m.PkgPath), string(m.Name)) - getPackages := func(items []gcimporter.GetPackagesItem) error { - for i, item := range items { - var id PackageID - var pkg *types.Package - if item.Path == string(m.PkgPath) { - id = m.ID - pkg = thisPackage - } else { - id = impMap[item.Path] - var err error - pkg, err = b.getImportPackage(ctx, id) - if err != nil { - return err - } - } - items[i].Pkg = pkg - - // debugging issue #60904 - if pkg.Name() != item.Name { - return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", - pkg.Name(), item.Name, id, item.Path) - } - } - return nil - } - - // Importing is potentially expensive, and might not encounter cancellations - // via dependencies (e.g. if they have already been evaluated). - if ctx.Err() != nil { - return nil, ctx.Err() - } - - // TODO(rfindley): collect "deep" hashes here using the getPackages - // callback, for precise pruning. - imported, err := gcimporter.IImportShallow(b.fset, getPackages, data, string(m.PkgPath), bug.Reportf) - if err != nil { - return nil, fmt.Errorf("import failed for %q: %v", m.ID, err) - } - return imported, nil -} - -// checkPackageForImport type checks, but skips function bodies and does not -// record syntax information. -func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageHandle) (*types.Package, error) { - ctx, done := event.Start(ctx, "cache.typeCheckBatch.checkPackageForImport", tag.Package.Of(string(ph.m.ID))) - defer done() - - onError := func(e error) { - // Ignore errors for exporting. - } - cfg := b.typesConfig(ctx, ph.localInputs, onError) - cfg.IgnoreFuncBodies = true - - // Parse the compiled go files, bypassing the parse cache as packages checked - // for import are unlikely to get cache hits. Additionally, we can optimize - // parsing slightly by not passing parser.ParseComments. - pgfs := make([]*source.ParsedGoFile, len(ph.localInputs.compiledGoFiles)) - { - var group errgroup.Group - // Set an arbitrary concurrency limit; we want some parallelism but don't - // need GOMAXPROCS, as there is already a lot of concurrency among calls to - // checkPackageForImport. - // - // TODO(rfindley): is there a better way to limit parallelism here? We could - // have a global limit on the type-check batch, but would have to be very - // careful to avoid starvation. - group.SetLimit(4) - for i, fh := range ph.localInputs.compiledGoFiles { - i, fh := i, fh - group.Go(func() error { - pgf, err := parseGoImpl(ctx, b.fset, fh, parser.SkipObjectResolution, false) - pgfs[i] = pgf - return err - }) - } - if err := group.Wait(); err != nil { - return nil, err // cancelled, or catastrophic error (e.g. missing file) - } - } - pkg := types.NewPackage(string(ph.localInputs.pkgPath), string(ph.localInputs.name)) - check := types.NewChecker(cfg, b.fset, pkg, nil) - - files := make([]*ast.File, len(pgfs)) - for i, pgf := range pgfs { - files[i] = pgf.File - } - - // Type checking is expensive, and we may not have ecountered cancellations - // via parsing (e.g. if we got nothing but cache hits for parsed files). - if ctx.Err() != nil { - return nil, ctx.Err() - } - - _ = check.Files(files) // ignore errors - - // If the context was cancelled, we may have returned a ton of transient - // errors to the type checker. Swallow them. - if ctx.Err() != nil { - return nil, ctx.Err() - } - - // Asynchronously record export data. - go func() { - exportData, err := gcimporter.IExportShallow(b.fset, pkg, bug.Reportf) - if err != nil { - bug.Reportf("exporting package %v: %v", ph.m.ID, err) - return - } - if err := filecache.Set(exportDataKind, ph.key, exportData); err != nil { - event.Error(ctx, fmt.Sprintf("storing export data for %s", ph.m.ID), err) - } - }() - return pkg, nil -} - -// checkPackage "fully type checks" to produce a syntax package. -func (b *typeCheckBatch) checkPackage(ctx context.Context, ph *packageHandle) (*Package, error) { - ctx, done := event.Start(ctx, "cache.typeCheckBatch.checkPackage", tag.Package.Of(string(ph.m.ID))) - defer done() - - // TODO(rfindley): refactor to inline typeCheckImpl here. There is no need - // for so many layers to build up the package - // (checkPackage->typeCheckImpl->doTypeCheck). - pkg, err := typeCheckImpl(ctx, b, ph.localInputs) - - if err == nil { - // Write package data to disk asynchronously. - go func() { - toCache := map[string][]byte{ - xrefsKind: pkg.xrefs(), - methodSetsKind: pkg.methodsets().Encode(), - diagnosticsKind: encodeDiagnostics(pkg.diagnostics), - } - - if ph.m.PkgPath != "unsafe" { // unsafe cannot be exported - exportData, err := gcimporter.IExportShallow(pkg.fset, pkg.types, bug.Reportf) - if err != nil { - bug.Reportf("exporting package %v: %v", ph.m.ID, err) - } else { - toCache[exportDataKind] = exportData - } - } else if ph.m.ID != "unsafe" { - // golang/go#60890: we should only ever see one variant of the "unsafe" - // package. - bug.Reportf("encountered \"unsafe\" as %s (golang/go#60890)", ph.m.ID) - } - - for kind, data := range toCache { - if err := filecache.Set(kind, ph.key, data); err != nil { - event.Error(ctx, fmt.Sprintf("storing %s data for %s", kind, ph.m.ID), err) - } - } - }() - } - - return &Package{ph.m, pkg}, err -} - -// awaitPredecessors awaits all packages for m.DepsByPkgPath, returning an -// error if awaiting failed due to context cancellation or if there was an -// unrecoverable error loading export data. -// -// TODO(rfindley): inline, now that this is only called in one place. -func (b *typeCheckBatch) awaitPredecessors(ctx context.Context, m *source.Metadata) error { - // await predecessors concurrently, as some of them may be non-syntax - // packages, and therefore will not have been started by the type-checking - // batch. - var g errgroup.Group - for _, depID := range m.DepsByPkgPath { - depID := depID - g.Go(func() error { - _, err := b.getImportPackage(ctx, depID) - return err - }) - } - return g.Wait() -} - -// importMap returns the map of package path -> package ID relative to the -// specified ID. -func (b *typeCheckBatch) importMap(id PackageID) map[string]source.PackageID { - impMap := make(map[string]source.PackageID) - var populateDeps func(m *source.Metadata) - populateDeps = func(parent *source.Metadata) { - for _, id := range parent.DepsByPkgPath { - m := b.handles[id].m - if _, ok := impMap[string(m.PkgPath)]; ok { - continue - } - impMap[string(m.PkgPath)] = m.ID - populateDeps(m) - } - } - m := b.handles[id].m - populateDeps(m) - return impMap -} - -// A packageHandle holds inputs required to compute a type-checked package, -// including inputs to type checking itself, and a key for looking up -// precomputed data. -// -// packageHandles may be invalid following an invalidation via snapshot.clone, -// but the handles returned by getPackageHandles will always be valid. -// -// packageHandles are critical for implementing "precise pruning" in gopls: -// packageHandle.key is a hash of a precise set of inputs, such as package -// files and "reachable" syntax, that may affect type checking. -// -// packageHandles also keep track of state that allows gopls to compute, and -// then quickly recompute, these keys. This state is split into two categories: -// - local state, which depends only on the package's local files and metadata -// - other state, which includes data derived from dependencies. -// -// Dividing the data in this way allows gopls to minimize invalidation when a -// package is modified. For example, any change to a package file fully -// invalidates the package handle. On the other hand, if that change was not -// metadata-affecting it may be the case that packages indirectly depending on -// the modified package are unaffected by the change. For that reason, we have -// two types of invalidation, corresponding to the two types of data above: -// - deletion of the handle, which occurs when the package itself changes -// - clearing of the validated field, which marks the package as possibly -// invalid. -// -// With the second type of invalidation, packageHandles are re-evaluated from the -// bottom up. If this process encounters a packageHandle whose deps have not -// changed (as detected by the depkeys field), then the packageHandle in -// question must also not have changed, and we need not re-evaluate its key. -type packageHandle struct { - m *source.Metadata - - // Local data: - - // localInputs holds all local type-checking localInputs, excluding - // dependencies. - localInputs typeCheckInputs - // localKey is a hash of localInputs. - localKey source.Hash - // refs is the result of syntactic dependency analysis produced by the - // typerefs package. - refs map[string][]typerefs.Symbol - - // Data derived from dependencies: - - // validated indicates whether the current packageHandle is known to have a - // valid key. Invalidated package handles are stored for packages whose - // type information may have changed. - validated bool - // depKeys records the key of each dependency that was used to calculate the - // key above. If the handle becomes invalid, we must re-check that each still - // matches. - depKeys map[PackageID]source.Hash - // key is the hashed key for the package. - // - // It includes the all bits of the transitive closure of - // dependencies's sources. - key source.Hash -} - -// clone returns a copy of the receiver with the validated bit set to the -// provided value. -func (ph *packageHandle) clone(validated bool) *packageHandle { - copy := *ph - copy.validated = validated - return © -} - -// getPackageHandles gets package handles for all given ids and their -// dependencies, recursively. -func (s *snapshot) getPackageHandles(ctx context.Context, ids []PackageID) (map[PackageID]*packageHandle, error) { - // perform a two-pass traversal. - // - // On the first pass, build up a bidirectional graph of handle nodes, and collect leaves. - // Then build package handles from bottom up. - - s.mu.Lock() // guard s.meta and s.packages below - b := &packageHandleBuilder{ - s: s, - transitiveRefs: make(map[typerefs.IndexID]*partialRefs), - nodes: make(map[typerefs.IndexID]*handleNode), - } - - var leaves []*handleNode - var makeNode func(*handleNode, PackageID) *handleNode - makeNode = func(from *handleNode, id PackageID) *handleNode { - idxID := b.s.pkgIndex.IndexID(id) - n, ok := b.nodes[idxID] - if !ok { - m := s.meta.metadata[id] - if m == nil { - panic(fmt.Sprintf("nil metadata for %q", id)) - } - n = &handleNode{ - m: m, - idxID: idxID, - unfinishedSuccs: int32(len(m.DepsByPkgPath)), - } - if entry, hit := b.s.packages.Get(m.ID); hit { - n.ph = entry - } - if n.unfinishedSuccs == 0 { - leaves = append(leaves, n) - } else { - n.succs = make(map[source.PackageID]*handleNode, n.unfinishedSuccs) - } - b.nodes[idxID] = n - for _, depID := range m.DepsByPkgPath { - n.succs[depID] = makeNode(n, depID) - } - } - // Add edge from predecessor. - if from != nil { - n.preds = append(n.preds, from) - } - return n - } - for _, id := range ids { - makeNode(nil, id) - } - s.mu.Unlock() - - g, ctx := errgroup.WithContext(ctx) - - // files are preloaded, so building package handles is CPU-bound. - // - // Note that we can't use g.SetLimit, as that could result in starvation: - // g.Go blocks until a slot is available, and so all existing goroutines - // could be blocked trying to enqueue a predecessor. - limiter := make(chan unit, runtime.GOMAXPROCS(0)) - - var enqueue func(*handleNode) - enqueue = func(n *handleNode) { - g.Go(func() error { - limiter <- unit{} - defer func() { <-limiter }() - - if ctx.Err() != nil { - return ctx.Err() - } - - b.buildPackageHandle(ctx, n) - - for _, pred := range n.preds { - if atomic.AddInt32(&pred.unfinishedSuccs, -1) == 0 { - enqueue(pred) - } - } - - return n.err - }) - } - for _, leaf := range leaves { - enqueue(leaf) - } - - if err := g.Wait(); err != nil { - return nil, err - } - - // Copy handles into the result map. - handles := make(map[PackageID]*packageHandle, len(b.nodes)) - for _, v := range b.nodes { - assert(v.ph != nil, "nil handle") - handles[v.m.ID] = v.ph - } - - return handles, nil -} - -// A packageHandleBuilder computes a batch of packageHandles concurrently, -// sharing computed transitive reachability sets used to compute package keys. -type packageHandleBuilder struct { - meta *metadataGraph - s *snapshot - - // nodes are assembled synchronously. - nodes map[typerefs.IndexID]*handleNode - - // transitiveRefs is incrementally evaluated as package handles are built. - transitiveRefsMu sync.Mutex - transitiveRefs map[typerefs.IndexID]*partialRefs // see getTransitiveRefs -} - -// A handleNode represents a to-be-computed packageHandle within a graph of -// predecessors and successors. -// -// It is used to implement a bottom-up construction of packageHandles. -type handleNode struct { - m *source.Metadata - idxID typerefs.IndexID - ph *packageHandle - err error - preds []*handleNode - succs map[PackageID]*handleNode - unfinishedSuccs int32 -} - -// partialRefs maps names declared by a given package to their set of -// transitive references. -// -// If complete is set, refs is known to be complete for the package in -// question. Otherwise, it may only map a subset of all names declared by the -// package. -type partialRefs struct { - refs map[string]*typerefs.PackageSet - complete bool -} - -// getTransitiveRefs gets or computes the set of transitively reachable -// packages for each exported name in the package specified by id. -// -// The operation may fail if building a predecessor failed. If and only if this -// occurs, the result will be nil. -func (b *packageHandleBuilder) getTransitiveRefs(pkgID PackageID) map[string]*typerefs.PackageSet { - b.transitiveRefsMu.Lock() - defer b.transitiveRefsMu.Unlock() - - idxID := b.s.pkgIndex.IndexID(pkgID) - trefs, ok := b.transitiveRefs[idxID] - if !ok { - trefs = &partialRefs{ - refs: make(map[string]*typerefs.PackageSet), - } - b.transitiveRefs[idxID] = trefs - } - - if !trefs.complete { - trefs.complete = true - ph := b.nodes[idxID].ph - for name := range ph.refs { - if ('A' <= name[0] && name[0] <= 'Z') || token.IsExported(name) { - if _, ok := trefs.refs[name]; !ok { - pkgs := b.s.pkgIndex.NewSet() - for _, sym := range ph.refs[name] { - pkgs.Add(sym.Package) - otherSet := b.getOneTransitiveRefLocked(sym) - pkgs.Union(otherSet) - } - trefs.refs[name] = pkgs - } - } - } - } - - return trefs.refs -} - -// getOneTransitiveRefLocked computes the full set packages transitively -// reachable through the given sym reference. -// -// It may return nil if the reference is invalid (i.e. the referenced name does -// not exist). -func (b *packageHandleBuilder) getOneTransitiveRefLocked(sym typerefs.Symbol) *typerefs.PackageSet { - assert(token.IsExported(sym.Name), "expected exported symbol") - - trefs := b.transitiveRefs[sym.Package] - if trefs == nil { - trefs = &partialRefs{ - refs: make(map[string]*typerefs.PackageSet), - complete: false, - } - b.transitiveRefs[sym.Package] = trefs - } - - pkgs, ok := trefs.refs[sym.Name] - if ok && pkgs == nil { - // See below, where refs is set to nil before recursing. - bug.Reportf("cycle detected to %q in reference graph", sym.Name) - } - - // Note that if (!ok && trefs.complete), the name does not exist in the - // referenced package, and we should not write to trefs as that may introduce - // a race. - if !ok && !trefs.complete { - n := b.nodes[sym.Package] - if n == nil { - // We should always have IndexID in our node set, because symbol references - // should only be recorded for packages that actually exist in the import graph. - // - // However, it is not easy to prove this (typerefs are serialized and - // deserialized), so make this code temporarily defensive while we are on a - // point release. - // - // TODO(rfindley): in the future, we should turn this into an assertion. - bug.Reportf("missing reference to package %s", b.s.pkgIndex.PackageID(sym.Package)) - return nil - } - - // Break cycles. This is perhaps overly defensive as cycles should not - // exist at this point: metadata cycles should have been broken at load - // time, and intra-package reference cycles should have been contracted by - // the typerefs algorithm. - // - // See the "cycle detected" bug report above. - trefs.refs[sym.Name] = nil - - pkgs := b.s.pkgIndex.NewSet() - for _, sym2 := range n.ph.refs[sym.Name] { - pkgs.Add(sym2.Package) - otherSet := b.getOneTransitiveRefLocked(sym2) - pkgs.Union(otherSet) - } - trefs.refs[sym.Name] = pkgs - } - - return pkgs -} - -// buildPackageHandle gets or builds a package handle for the given id, storing -// its result in the snapshot.packages map. -// -// buildPackageHandle must only be called from getPackageHandles. -func (b *packageHandleBuilder) buildPackageHandle(ctx context.Context, n *handleNode) { - var prevPH *packageHandle - if n.ph != nil { - // Existing package handle: if it is valid, return it. Otherwise, create a - // copy to update. - if n.ph.validated { - return - } - prevPH = n.ph - // Either prevPH is still valid, or we will update the key and depKeys of - // this copy. In either case, the result will be valid. - n.ph = prevPH.clone(true) - } else { - // No package handle: read and analyze the package syntax. - inputs, err := b.s.typeCheckInputs(ctx, n.m) - if err != nil { - n.err = err - return - } - refs, err := b.s.typerefs(ctx, n.m, inputs.compiledGoFiles) - if err != nil { - n.err = err - return - } - n.ph = &packageHandle{ - m: n.m, - localInputs: inputs, - localKey: localPackageKey(inputs), - refs: refs, - validated: true, - } - } - - // ph either did not exist, or was invalid. We must re-evaluate deps and key. - if err := b.evaluatePackageHandle(prevPH, n); err != nil { - n.err = err - return - } - - assert(n.ph.validated, "unvalidated handle") - - // Ensure the result (or an equivalent) is recorded in the snapshot. - b.s.mu.Lock() - defer b.s.mu.Unlock() - - // Check that the metadata has not changed - // (which should invalidate this handle). - // - // TODO(rfindley): eventually promote this to an assert. - // TODO(rfindley): move this to after building the package handle graph? - if b.s.meta.metadata[n.m.ID] != n.m { - bug.Reportf("stale metadata for %s", n.m.ID) - } - - // Check the packages map again in case another goroutine got there first. - if alt, ok := b.s.packages.Get(n.m.ID); ok && alt.validated { - if alt.m != n.m { - bug.Reportf("existing package handle does not match for %s", n.m.ID) - } - n.ph = alt - } else { - b.s.packages.Set(n.m.ID, n.ph, nil) - } -} - -// evaluatePackageHandle validates and/or computes the key of ph, setting key, -// depKeys, and the validated flag on ph. -// -// It uses prevPH to avoid recomputing keys that can't have changed, since -// their depKeys did not change. -// -// See the documentation for packageHandle for more details about packageHandle -// state, and see the documentation for the typerefs package for more details -// about precise reachability analysis. -func (b *packageHandleBuilder) evaluatePackageHandle(prevPH *packageHandle, n *handleNode) error { - // Opt: if no dep keys have changed, we need not re-evaluate the key. - if prevPH != nil { - depsChanged := false - assert(len(prevPH.depKeys) == len(n.succs), "mismatching dep count") - for id, succ := range n.succs { - oldKey, ok := prevPH.depKeys[id] - assert(ok, "missing dep") - if oldKey != succ.ph.key { - depsChanged = true - break - } - } - if !depsChanged { - return nil // key cannot have changed - } - } - - // Deps have changed, so we must re-evaluate the key. - n.ph.depKeys = make(map[PackageID]source.Hash) - - // See the typerefs package: the reachable set of packages is defined to be - // the set of packages containing syntax that is reachable through the - // exported symbols in the dependencies of n.ph. - reachable := b.s.pkgIndex.NewSet() - for depID, succ := range n.succs { - n.ph.depKeys[depID] = succ.ph.key - reachable.Add(succ.idxID) - trefs := b.getTransitiveRefs(succ.m.ID) - if trefs == nil { - // A predecessor failed to build due to e.g. context cancellation. - return fmt.Errorf("missing transitive refs for %s", succ.m.ID) - } - for _, set := range trefs { - reachable.Union(set) - } - } - - // Collect reachable handles. - var reachableHandles []*packageHandle - // In the presence of context cancellation, any package may be missing. - // We need all dependencies to produce a valid key. - missingReachablePackage := false - reachable.Elems(func(id typerefs.IndexID) { - dh := b.nodes[id] - if dh == nil { - missingReachablePackage = true - } else { - assert(dh.ph.validated, "unvalidated dependency") - reachableHandles = append(reachableHandles, dh.ph) - } - }) - if missingReachablePackage { - return fmt.Errorf("missing reachable package") - } - // Sort for stability. - sort.Slice(reachableHandles, func(i, j int) bool { - return reachableHandles[i].m.ID < reachableHandles[j].m.ID - }) - - // Key is the hash of the local key, and the local key of all reachable - // packages. - depHasher := sha256.New() - depHasher.Write(n.ph.localKey[:]) - for _, rph := range reachableHandles { - depHasher.Write(rph.localKey[:]) - } - depHasher.Sum(n.ph.key[:0]) - - return nil -} - -// typerefs returns typerefs for the package described by m and cgfs, after -// either computing it or loading it from the file cache. -func (s *snapshot) typerefs(ctx context.Context, m *source.Metadata, cgfs []source.FileHandle) (map[string][]typerefs.Symbol, error) { - imports := make(map[ImportPath]*source.Metadata) - for impPath, id := range m.DepsByImpPath { - if id != "" { - imports[impPath] = s.Metadata(id) - } - } - - data, err := s.typerefData(ctx, m.ID, imports, cgfs) - if err != nil { - return nil, err - } - classes := typerefs.Decode(s.pkgIndex, m.ID, data) - refs := make(map[string][]typerefs.Symbol) - for _, class := range classes { - for _, decl := range class.Decls { - refs[decl] = class.Refs - } - } - return refs, nil -} - -// typerefData retrieves encoded typeref data from the filecache, or computes it on -// a cache miss. -func (s *snapshot) typerefData(ctx context.Context, id PackageID, imports map[ImportPath]*source.Metadata, cgfs []source.FileHandle) ([]byte, error) { - key := typerefsKey(id, imports, cgfs) - if data, err := filecache.Get(typerefsKind, key); err == nil { - return data, nil - } else if err != filecache.ErrNotFound { - bug.Reportf("internal error reading typerefs data: %v", err) - } - - pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseFull&^parser.ParseComments, true, cgfs...) - if err != nil { - return nil, err - } - data := typerefs.Encode(pgfs, id, imports) - - // Store the resulting data in the cache. - go func() { - if err := filecache.Set(typerefsKind, key, data); err != nil { - event.Error(ctx, fmt.Sprintf("storing typerefs data for %s", id), err) - } - }() - - return data, nil -} - -// typerefsKey produces a key for the reference information produced by the -// typerefs package. -func typerefsKey(id PackageID, imports map[ImportPath]*source.Metadata, compiledGoFiles []source.FileHandle) source.Hash { - hasher := sha256.New() - - fmt.Fprintf(hasher, "typerefs: %s\n", id) - - importPaths := make([]string, 0, len(imports)) - for impPath := range imports { - importPaths = append(importPaths, string(impPath)) - } - sort.Strings(importPaths) - for _, importPath := range importPaths { - imp := imports[ImportPath(importPath)] - // TODO(rfindley): strength reduce the typerefs.Export API to guarantee - // that it only depends on these attributes of dependencies. - fmt.Fprintf(hasher, "import %s %s %s", importPath, imp.ID, imp.Name) - } - - fmt.Fprintf(hasher, "compiledGoFiles: %d\n", len(compiledGoFiles)) - for _, fh := range compiledGoFiles { - fmt.Fprintln(hasher, fh.FileIdentity()) - } - - var hash [sha256.Size]byte - hasher.Sum(hash[:0]) - return hash -} - -// typeCheckInputs contains the inputs of a call to typeCheckImpl, which -// type-checks a package. -// -// Part of the purpose of this type is to keep type checking in-sync with the -// package handle key, by explicitly identifying the inputs to type checking. -type typeCheckInputs struct { - id PackageID - - // Used for type checking: - pkgPath PackagePath - name PackageName - goFiles, compiledGoFiles []source.FileHandle - sizes types.Sizes - depsByImpPath map[ImportPath]PackageID - goVersion string // packages.Module.GoVersion, e.g. "1.18" - - // Used for type check diagnostics: - relatedInformation bool - linkTarget string - moduleMode bool -} - -func (s *snapshot) typeCheckInputs(ctx context.Context, m *source.Metadata) (typeCheckInputs, error) { - // Read both lists of files of this package. - // - // Parallelism is not necessary here as the files will have already been - // pre-read at load time. - // - // goFiles aren't presented to the type checker--nor - // are they included in the key, unsoundly--but their - // syntax trees are available from (*pkg).File(URI). - // TODO(adonovan): consider parsing them on demand? - // The need should be rare. - goFiles, err := readFiles(ctx, s, m.GoFiles) - if err != nil { - return typeCheckInputs{}, err - } - compiledGoFiles, err := readFiles(ctx, s, m.CompiledGoFiles) - if err != nil { - return typeCheckInputs{}, err - } - - goVersion := "" - if m.Module != nil && m.Module.GoVersion != "" { - goVersion = m.Module.GoVersion - } - - return typeCheckInputs{ - id: m.ID, - pkgPath: m.PkgPath, - name: m.Name, - goFiles: goFiles, - compiledGoFiles: compiledGoFiles, - sizes: m.TypesSizes, - depsByImpPath: m.DepsByImpPath, - goVersion: goVersion, - - relatedInformation: s.Options().RelatedInformationSupported, - linkTarget: s.Options().LinkTarget, - moduleMode: s.view.moduleMode(), - }, nil -} - -// readFiles reads the content of each file URL from the source -// (e.g. snapshot or cache). -func readFiles(ctx context.Context, fs source.FileSource, uris []span.URI) (_ []source.FileHandle, err error) { - fhs := make([]source.FileHandle, len(uris)) - for i, uri := range uris { - fhs[i], err = fs.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - } - return fhs, nil -} - -// localPackageKey returns a key for local inputs into type-checking, excluding -// dependency information: files, metadata, and configuration. -func localPackageKey(inputs typeCheckInputs) source.Hash { - hasher := sha256.New() - - // In principle, a key must be the hash of an - // unambiguous encoding of all the relevant data. - // If it's ambiguous, we risk collisions. - - // package identifiers - fmt.Fprintf(hasher, "package: %s %s %s\n", inputs.id, inputs.name, inputs.pkgPath) - - // module Go version - fmt.Fprintf(hasher, "go %s\n", inputs.goVersion) - - // import map - importPaths := make([]string, 0, len(inputs.depsByImpPath)) - for impPath := range inputs.depsByImpPath { - importPaths = append(importPaths, string(impPath)) - } - sort.Strings(importPaths) - for _, impPath := range importPaths { - fmt.Fprintf(hasher, "import %s %s", impPath, string(inputs.depsByImpPath[ImportPath(impPath)])) - } - - // file names and contents - fmt.Fprintf(hasher, "compiledGoFiles: %d\n", len(inputs.compiledGoFiles)) - for _, fh := range inputs.compiledGoFiles { - fmt.Fprintln(hasher, fh.FileIdentity()) - } - fmt.Fprintf(hasher, "goFiles: %d\n", len(inputs.goFiles)) - for _, fh := range inputs.goFiles { - fmt.Fprintln(hasher, fh.FileIdentity()) - } - - // types sizes - wordSize := inputs.sizes.Sizeof(types.Typ[types.Int]) - maxAlign := inputs.sizes.Alignof(types.NewPointer(types.Typ[types.Int64])) - fmt.Fprintf(hasher, "sizes: %d %d\n", wordSize, maxAlign) - - fmt.Fprintf(hasher, "relatedInformation: %t\n", inputs.relatedInformation) - fmt.Fprintf(hasher, "linkTarget: %s\n", inputs.linkTarget) - fmt.Fprintf(hasher, "moduleMode: %t\n", inputs.moduleMode) - - var hash [sha256.Size]byte - hasher.Sum(hash[:0]) - return hash -} - -// typeCheckImpl type checks the parsed source files in compiledGoFiles. -// (The resulting pkg also holds the parsed but not type-checked goFiles.) -// deps holds the future results of type-checking the direct dependencies. -func typeCheckImpl(ctx context.Context, b *typeCheckBatch, inputs typeCheckInputs) (*syntaxPackage, error) { - ctx, done := event.Start(ctx, "cache.typeCheck", tag.Package.Of(string(inputs.id))) - defer done() - - pkg, err := doTypeCheck(ctx, b, inputs) - if err != nil { - return nil, err - } - - // Our heuristic for whether to show type checking errors is: - // + If any file was 'fixed', don't show type checking errors as we - // can't guarantee that they reference accurate locations in the source. - // + If there is a parse error _in the current file_, suppress type - // errors in that file. - // + Otherwise, show type errors even in the presence of parse errors in - // other package files. go/types attempts to suppress follow-on errors - // due to bad syntax, so on balance type checking errors still provide - // a decent signal/noise ratio as long as the file in question parses. - - // Track URIs with parse errors so that we can suppress type errors for these - // files. - unparseable := map[span.URI]bool{} - for _, e := range pkg.parseErrors { - diags, err := parseErrorDiagnostics(pkg, e) - if err != nil { - event.Error(ctx, "unable to compute positions for parse errors", err, tag.Package.Of(string(inputs.id))) - continue - } - for _, diag := range diags { - unparseable[diag.URI] = true - pkg.diagnostics = append(pkg.diagnostics, diag) - } - } - - if pkg.hasFixedFiles { - return pkg, nil - } - - unexpanded := pkg.typeErrors - pkg.typeErrors = nil - for _, e := range expandErrors(unexpanded, inputs.relatedInformation) { - diags, err := typeErrorDiagnostics(inputs.moduleMode, inputs.linkTarget, pkg, e) - if err != nil { - // If we fail here and there are no parse errors, it means we are hiding - // a valid type-checking error from the user. This must be a bug, with - // one exception: relocated primary errors may fail processing, because - // they reference locations outside of the package. - if len(pkg.parseErrors) == 0 && !e.relocated { - bug.Reportf("failed to compute position for type error %v: %v", e, err) - } - continue - } - pkg.typeErrors = append(pkg.typeErrors, e.primary) - for _, diag := range diags { - // If the file didn't parse cleanly, it is highly likely that type - // checking errors will be confusing or redundant. But otherwise, type - // checking usually provides a good enough signal to include. - if !unparseable[diag.URI] { - pkg.diagnostics = append(pkg.diagnostics, diag) - } - } - } - - // Work around golang/go#61561: interface instances aren't concurrency-safe - // as they are not completed by the type checker. - for _, inst := range typeparams.GetInstances(pkg.typesInfo) { - if iface, _ := inst.Type.Underlying().(*types.Interface); iface != nil { - iface.Complete() - } - } - - return pkg, nil -} - -// TODO(golang/go#63472): this looks wrong with the new Go version syntax. -var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`) - -func doTypeCheck(ctx context.Context, b *typeCheckBatch, inputs typeCheckInputs) (*syntaxPackage, error) { - pkg := &syntaxPackage{ - id: inputs.id, - fset: b.fset, // must match parse call below - types: types.NewPackage(string(inputs.pkgPath), string(inputs.name)), - typesInfo: &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), - }, - } - typeparams.InitInstanceInfo(pkg.typesInfo) - - // Collect parsed files from the type check pass, capturing parse errors from - // compiled files. - var err error - pkg.goFiles, err = b.parseCache.parseFiles(ctx, b.fset, source.ParseFull, false, inputs.goFiles...) - if err != nil { - return nil, err - } - pkg.compiledGoFiles, err = b.parseCache.parseFiles(ctx, b.fset, source.ParseFull, false, inputs.compiledGoFiles...) - if err != nil { - return nil, err - } - for _, pgf := range pkg.compiledGoFiles { - if pgf.ParseErr != nil { - pkg.parseErrors = append(pkg.parseErrors, pgf.ParseErr) - } - } - - // Use the default type information for the unsafe package. - if inputs.pkgPath == "unsafe" { - // Don't type check Unsafe: it's unnecessary, and doing so exposes a data - // race to Unsafe.completed. - pkg.types = types.Unsafe - return pkg, nil - } - - if len(pkg.compiledGoFiles) == 0 { - // No files most likely means go/packages failed. - // - // TODO(rfindley): in the past, we would capture go list errors in this - // case, to present go list errors to the user. However we had no tests for - // this behavior. It is unclear if anything better can be done here. - return nil, fmt.Errorf("no parsed files for package %s", inputs.pkgPath) - } - - onError := func(e error) { - pkg.typeErrors = append(pkg.typeErrors, e.(types.Error)) - } - cfg := b.typesConfig(ctx, inputs, onError) - - check := types.NewChecker(cfg, pkg.fset, pkg.types, pkg.typesInfo) - - var files []*ast.File - for _, cgf := range pkg.compiledGoFiles { - files = append(files, cgf.File) - } - - // Type checking is expensive, and we may not have ecountered cancellations - // via parsing (e.g. if we got nothing but cache hits for parsed files). - if ctx.Err() != nil { - return nil, ctx.Err() - } - - // Type checking errors are handled via the config, so ignore them here. - _ = check.Files(files) // 50us-15ms, depending on size of package - - // If the context was cancelled, we may have returned a ton of transient - // errors to the type checker. Swallow them. - if ctx.Err() != nil { - return nil, ctx.Err() - } - - // Collect imports by package path for the DependencyTypes API. - pkg.importMap = make(map[PackagePath]*types.Package) - var collectDeps func(*types.Package) - collectDeps = func(p *types.Package) { - pkgPath := PackagePath(p.Path()) - if _, ok := pkg.importMap[pkgPath]; ok { - return - } - pkg.importMap[pkgPath] = p - for _, imp := range p.Imports() { - collectDeps(imp) - } - } - collectDeps(pkg.types) - - return pkg, nil -} - -func (b *typeCheckBatch) typesConfig(ctx context.Context, inputs typeCheckInputs, onError func(e error)) *types.Config { - cfg := &types.Config{ - Sizes: inputs.sizes, - Error: onError, - Importer: importerFunc(func(path string) (*types.Package, error) { - // While all of the import errors could be reported - // based on the metadata before we start type checking, - // reporting them via types.Importer places the errors - // at the correct source location. - id, ok := inputs.depsByImpPath[ImportPath(path)] - if !ok { - // If the import declaration is broken, - // go list may fail to report metadata about it. - // See TestFixImportDecl for an example. - return nil, fmt.Errorf("missing metadata for import of %q", path) - } - depPH := b.handles[id] - if depPH == nil { - // e.g. missing metadata for dependencies in buildPackageHandle - return nil, missingPkgError(inputs.id, path, inputs.moduleMode) - } - if !source.IsValidImport(inputs.pkgPath, depPH.m.PkgPath) { - return nil, fmt.Errorf("invalid use of internal package %q", path) - } - return b.getImportPackage(ctx, id) - }), - } - - if inputs.goVersion != "" { - goVersion := "go" + inputs.goVersion - // types.NewChecker panics if GoVersion is invalid. An unparsable mod - // file should probably stop us before we get here, but double check - // just in case. - if goVersionRx.MatchString(goVersion) { - typesinternal.SetGoVersion(cfg, goVersion) - } - } - - // We want to type check cgo code if go/types supports it. - // We passed typecheckCgo to go/packages when we Loaded. - typesinternal.SetUsesCgo(cfg) - return cfg -} - -// depsErrors creates diagnostics for each metadata error (e.g. import cycle). -// These may be attached to import declarations in the transitive source files -// of pkg, or to 'requires' declarations in the package's go.mod file. -// -// TODO(rfindley): move this to load.go -func depsErrors(ctx context.Context, m *source.Metadata, meta *metadataGraph, fs source.FileSource, workspacePackages map[PackageID]PackagePath) ([]*source.Diagnostic, error) { - // Select packages that can't be found, and were imported in non-workspace packages. - // Workspace packages already show their own errors. - var relevantErrors []*packagesinternal.PackageError - for _, depsError := range m.DepsErrors { - // Up to Go 1.15, the missing package was included in the stack, which - // was presumably a bug. We want the next one up. - directImporterIdx := len(depsError.ImportStack) - 1 - if directImporterIdx < 0 { - continue - } - - directImporter := depsError.ImportStack[directImporterIdx] - if _, ok := workspacePackages[PackageID(directImporter)]; ok { - continue - } - relevantErrors = append(relevantErrors, depsError) - } - - // Don't build the import index for nothing. - if len(relevantErrors) == 0 { - return nil, nil - } - - // Subsequent checks require Go files. - if len(m.CompiledGoFiles) == 0 { - return nil, nil - } - - // Build an index of all imports in the package. - type fileImport struct { - cgf *source.ParsedGoFile - imp *ast.ImportSpec - } - allImports := map[string][]fileImport{} - for _, uri := range m.CompiledGoFiles { - pgf, err := parseGoURI(ctx, fs, uri, source.ParseHeader) - if err != nil { - return nil, err - } - fset := tokeninternal.FileSetFor(pgf.Tok) - // TODO(adonovan): modify Imports() to accept a single token.File (cgf.Tok). - for _, group := range astutil.Imports(fset, pgf.File) { - for _, imp := range group { - if imp.Path == nil { - continue - } - path := strings.Trim(imp.Path.Value, `"`) - allImports[path] = append(allImports[path], fileImport{pgf, imp}) - } - } - } - - // Apply a diagnostic to any import involved in the error, stopping once - // we reach the workspace. - var errors []*source.Diagnostic - for _, depErr := range relevantErrors { - for i := len(depErr.ImportStack) - 1; i >= 0; i-- { - item := depErr.ImportStack[i] - if _, ok := workspacePackages[PackageID(item)]; ok { - break - } - - for _, imp := range allImports[item] { - rng, err := imp.cgf.NodeRange(imp.imp) - if err != nil { - return nil, err - } - fixes, err := goGetQuickFixes(m.Module != nil, imp.cgf.URI, item) - if err != nil { - return nil, err - } - diag := &source.Diagnostic{ - URI: imp.cgf.URI, - Range: rng, - Severity: protocol.SeverityError, - Source: source.TypeError, - Message: fmt.Sprintf("error while importing %v: %v", item, depErr.Err), - SuggestedFixes: fixes, - } - if !source.BundleQuickFixes(diag) { - bug.Reportf("failed to bundle fixes for diagnostic %q", diag.Message) - } - errors = append(errors, diag) - } - } - } - - modFile, err := nearestModFile(ctx, m.CompiledGoFiles[0], fs) - if err != nil { - return nil, err - } - pm, err := parseModURI(ctx, fs, modFile) - if err != nil { - return nil, err - } - - // Add a diagnostic to the module that contained the lowest-level import of - // the missing package. - for _, depErr := range relevantErrors { - for i := len(depErr.ImportStack) - 1; i >= 0; i-- { - item := depErr.ImportStack[i] - m := meta.metadata[PackageID(item)] - if m == nil || m.Module == nil { - continue - } - modVer := module.Version{Path: m.Module.Path, Version: m.Module.Version} - reference := findModuleReference(pm.File, modVer) - if reference == nil { - continue - } - rng, err := pm.Mapper.OffsetRange(reference.Start.Byte, reference.End.Byte) - if err != nil { - return nil, err - } - fixes, err := goGetQuickFixes(true, pm.URI, item) - if err != nil { - return nil, err - } - diag := &source.Diagnostic{ - URI: pm.URI, - Range: rng, - Severity: protocol.SeverityError, - Source: source.TypeError, - Message: fmt.Sprintf("error while importing %v: %v", item, depErr.Err), - SuggestedFixes: fixes, - } - if !source.BundleQuickFixes(diag) { - bug.Reportf("failed to bundle fixes for diagnostic %q", diag.Message) - } - errors = append(errors, diag) - break - } - } - return errors, nil -} - -// missingPkgError returns an error message for a missing package that varies -// based on the user's workspace mode. -func missingPkgError(from PackageID, pkgPath string, moduleMode bool) error { - // TODO(rfindley): improve this error. Previous versions of this error had - // access to the full snapshot, and could provide more information (such as - // the initialization error). - if moduleMode { - if source.IsCommandLineArguments(from) { - return fmt.Errorf("current file is not included in a workspace module") - } else { - // Previously, we would present the initialization error here. - return fmt.Errorf("no required module provides package %q", pkgPath) - } - } else { - // Previously, we would list the directories in GOROOT and GOPATH here. - return fmt.Errorf("cannot find package %q in GOROOT or GOPATH", pkgPath) - } -} - -type extendedError struct { - relocated bool // if set, this is a relocation of a primary error to a secondary location - primary types.Error - secondaries []types.Error -} - -func (e extendedError) Error() string { - return e.primary.Error() -} - -// expandErrors duplicates "secondary" errors by mapping them to their main -// error. Some errors returned by the type checker are followed by secondary -// errors which give more information about the error. These are errors in -// their own right, and they are marked by starting with \t. For instance, when -// there is a multiply-defined function, the secondary error points back to the -// definition first noticed. -// -// This function associates the secondary error with its primary error, which can -// then be used as RelatedInformation when the error becomes a diagnostic. -// -// If supportsRelatedInformation is false, the secondary is instead embedded as -// additional context in the primary error. -func expandErrors(errs []types.Error, supportsRelatedInformation bool) []extendedError { - var result []extendedError - for i := 0; i < len(errs); { - original := extendedError{ - primary: errs[i], - } - for i++; i < len(errs); i++ { - spl := errs[i] - if len(spl.Msg) == 0 || spl.Msg[0] != '\t' { - break - } - spl.Msg = spl.Msg[1:] - original.secondaries = append(original.secondaries, spl) - } - - // Clone the error to all its related locations -- VS Code, at least, - // doesn't do it for us. - result = append(result, original) - for i, mainSecondary := range original.secondaries { - // Create the new primary error, with a tweaked message, in the - // secondary's location. We need to start from the secondary to - // capture its unexported location fields. - relocatedSecondary := mainSecondary - if supportsRelatedInformation { - relocatedSecondary.Msg = fmt.Sprintf("%v (see details)", original.primary.Msg) - } else { - relocatedSecondary.Msg = fmt.Sprintf("%v (this error: %v)", original.primary.Msg, mainSecondary.Msg) - } - relocatedSecondary.Soft = original.primary.Soft - - // Copy over the secondary errors, noting the location of the - // current error we're cloning. - clonedError := extendedError{relocated: true, primary: relocatedSecondary, secondaries: []types.Error{original.primary}} - for j, secondary := range original.secondaries { - if i == j { - secondary.Msg += " (this error)" - } - clonedError.secondaries = append(clonedError.secondaries, secondary) - } - result = append(result, clonedError) - } - } - return result -} - -// An importFunc is an implementation of the single-method -// types.Importer interface based on a function value. -type importerFunc func(path string) (*types.Package, error) - -func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/gopls/internal/lsp/cache/constraints_test.go b/gopls/internal/lsp/cache/constraints_test.go deleted file mode 100644 index 9adf01e6cea..00000000000 --- a/gopls/internal/lsp/cache/constraints_test.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.16 -// +build go1.16 - -package cache - -import ( - "testing" -) - -func TestIsStandaloneFile(t *testing.T) { - tests := []struct { - desc string - contents string - standaloneTags []string - want bool - }{ - { - "new syntax", - "//go:build ignore\n\npackage main\n", - []string{"ignore"}, - true, - }, - { - "legacy syntax", - "// +build ignore\n\npackage main\n", - []string{"ignore"}, - true, - }, - { - "multiple tags", - "//go:build ignore\n\npackage main\n", - []string{"exclude", "ignore"}, - true, - }, - { - "invalid tag", - "// +build ignore\n\npackage main\n", - []string{"script"}, - false, - }, - { - "non-main package", - "//go:build ignore\n\npackage p\n", - []string{"ignore"}, - false, - }, - { - "alternate tag", - "// +build script\n\npackage main\n", - []string{"script"}, - true, - }, - { - "both syntax", - "//go:build ignore\n// +build ignore\n\npackage main\n", - []string{"ignore"}, - true, - }, - { - "after comments", - "// A non-directive comment\n//go:build ignore\n\npackage main\n", - []string{"ignore"}, - true, - }, - { - "after package decl", - "package main //go:build ignore\n", - []string{"ignore"}, - false, - }, - { - "on line after package decl", - "package main\n\n//go:build ignore\n", - []string{"ignore"}, - false, - }, - { - "combined with other expressions", - "\n\n//go:build ignore || darwin\n\npackage main\n", - []string{"ignore"}, - false, - }, - } - - for _, test := range tests { - t.Run(test.desc, func(t *testing.T) { - if got := isStandaloneFile([]byte(test.contents), test.standaloneTags); got != test.want { - t.Errorf("isStandaloneFile(%q, %v) = %t, want %t", test.contents, test.standaloneTags, got, test.want) - } - }) - } -} diff --git a/gopls/internal/lsp/cache/cycle_test.go b/gopls/internal/lsp/cache/cycle_test.go deleted file mode 100644 index 25edbbfe338..00000000000 --- a/gopls/internal/lsp/cache/cycle_test.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "sort" - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/source" -) - -// This is an internal test of the breakImportCycles logic. -func TestBreakImportCycles(t *testing.T) { - - type Graph = map[PackageID]*source.Metadata - - // cyclic returns a description of a cycle, - // if the graph is cyclic, otherwise "". - cyclic := func(graph Graph) string { - const ( - unvisited = 0 - visited = 1 - onstack = 2 - ) - color := make(map[PackageID]int) - var visit func(id PackageID) string - visit = func(id PackageID) string { - switch color[id] { - case unvisited: - color[id] = onstack - case onstack: - return string(id) // cycle! - case visited: - return "" - } - if m := graph[id]; m != nil { - for _, depID := range m.DepsByPkgPath { - if cycle := visit(depID); cycle != "" { - return string(id) + "->" + cycle - } - } - } - color[id] = visited - return "" - } - for id := range graph { - if cycle := visit(id); cycle != "" { - return cycle - } - } - return "" - } - - // parse parses an import dependency graph. - // The input is a semicolon-separated list of node descriptions. - // Each node description is a package ID, optionally followed by - // "->" and a comma-separated list of successor IDs. - // Thus "a->b;b->c,d;e" represents the set of nodes {a,b,e} - // and the set of edges {a->b, b->c, b->d}. - parse := func(s string) Graph { - m := make(Graph) - makeNode := func(name string) *source.Metadata { - id := PackageID(name) - n, ok := m[id] - if !ok { - n = &source.Metadata{ - ID: id, - DepsByPkgPath: make(map[PackagePath]PackageID), - } - m[id] = n - } - return n - } - if s != "" { - for _, item := range strings.Split(s, ";") { - nodeID, succIDs, ok := strings.Cut(item, "->") - node := makeNode(nodeID) - if ok { - for _, succID := range strings.Split(succIDs, ",") { - node.DepsByPkgPath[PackagePath(succID)] = PackageID(succID) - } - } - } - } - return m - } - - // Sanity check of cycle detector. - { - got := cyclic(parse("a->b;b->c;c->a,d")) - has := func(s string) bool { return strings.Contains(got, s) } - if !(has("a->b") && has("b->c") && has("c->a") && !has("d")) { - t.Fatalf("cyclic: got %q, want a->b->c->a or equivalent", got) - } - } - - // format formats an import graph, in lexicographic order, - // in the notation of parse, but with a "!" after the name - // of each node that has errors. - format := func(graph Graph) string { - var items []string - for _, m := range graph { - item := string(m.ID) - if len(m.Errors) > 0 { - item += "!" - } - var succs []string - for _, depID := range m.DepsByPkgPath { - succs = append(succs, string(depID)) - } - if succs != nil { - sort.Strings(succs) - item += "->" + strings.Join(succs, ",") - } - items = append(items, item) - } - sort.Strings(items) - return strings.Join(items, ";") - } - - // We needn't test self-cycles as they are eliminated at Metadata construction. - for _, test := range []struct { - metadata, updates, want string - }{ - // Simple 2-cycle. - {"a->b", "b->a", - "a->b;b!"}, // broke b->a - - {"a->b;b->c;c", "b->a,c", - "a->b;b!->c;c"}, // broke b->a - - // Reversing direction of p->s edge creates pqrs cycle. - {"a->p,q,r,s;p->q,s,z;q->r,z;r->s,z;s->z", "p->q,z;s->p,z", - "a->p,q,r,s;p!->z;q->r,z;r->s,z;s!->z"}, // broke p->q, s->p - - // We break all intra-SCC edges from updated nodes, - // which may be more than necessary (e.g. a->b). - {"a->b;b->c;c;d->a", "a->b,e;c->d", - "a!->e;b->c;c!;d->a"}, // broke a->b, c->d - } { - metadata := parse(test.metadata) - updates := parse(test.updates) - - if cycle := cyclic(metadata); cycle != "" { - t.Errorf("initial metadata %s has cycle %s: ", format(metadata), cycle) - continue - } - - t.Log("initial", format(metadata)) - - // Apply updates. - // (parse doesn't have a way to express node deletions, - // but they aren't very interesting.) - for id, m := range updates { - metadata[id] = m - } - - t.Log("updated", format(metadata)) - - // breakImportCycles accesses only these fields of Metadata: - // DepsByImpPath, ID - read - // DepsByPkgPath - read, updated - // Errors - updated - breakImportCycles(metadata, updates) - - t.Log("acyclic", format(metadata)) - - if cycle := cyclic(metadata); cycle != "" { - t.Errorf("resulting metadata %s has cycle %s: ", format(metadata), cycle) - } - - got := format(metadata) - if got != test.want { - t.Errorf("test.metadata=%s test.updates=%s: got=%s want=%s", - test.metadata, test.updates, got, test.want) - } - } -} diff --git a/gopls/internal/lsp/cache/errors.go b/gopls/internal/lsp/cache/errors.go deleted file mode 100644 index e252ee2930c..00000000000 --- a/gopls/internal/lsp/cache/errors.go +++ /dev/null @@ -1,545 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -// This file defines routines to convert diagnostics from go list, go -// get, go/packages, parsing, type checking, and analysis into -// source.Diagnostic form, and suggesting quick fixes. - -import ( - "context" - "fmt" - "go/parser" - "go/scanner" - "go/token" - "go/types" - "log" - "regexp" - "strconv" - "strings" - - "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/analysisinternal" - "golang.org/x/tools/internal/typesinternal" -) - -// goPackagesErrorDiagnostics translates the given go/packages Error into a -// diagnostic, using the provided metadata and filesource. -// -// The slice of diagnostics may be empty. -func goPackagesErrorDiagnostics(ctx context.Context, e packages.Error, m *source.Metadata, fs source.FileSource) ([]*source.Diagnostic, error) { - if diag, err := parseGoListImportCycleError(ctx, e, m, fs); err != nil { - return nil, err - } else if diag != nil { - return []*source.Diagnostic{diag}, nil - } - - var spn span.Span - if e.Pos == "" { - spn = parseGoListError(e.Msg, m.LoadDir) - // We may not have been able to parse a valid span. Apply the errors to all files. - if _, err := spanToRange(ctx, fs, spn); err != nil { - var diags []*source.Diagnostic - for _, uri := range m.CompiledGoFiles { - diags = append(diags, &source.Diagnostic{ - URI: uri, - Severity: protocol.SeverityError, - Source: source.ListError, - Message: e.Msg, - }) - } - return diags, nil - } - } else { - spn = span.ParseInDir(e.Pos, m.LoadDir) - } - - // TODO(rfindley): in some cases the go command outputs invalid spans, for - // example (from TestGoListErrors): - // - // package a - // import - // - // In this case, the go command will complain about a.go:2:8, which is after - // the trailing newline but still considered to be on the second line, most - // likely because *token.File lacks information about newline termination. - // - // We could do better here by handling that case. - rng, err := spanToRange(ctx, fs, spn) - if err != nil { - return nil, err - } - return []*source.Diagnostic{{ - URI: spn.URI(), - Range: rng, - Severity: protocol.SeverityError, - Source: source.ListError, - Message: e.Msg, - }}, nil -} - -func parseErrorDiagnostics(pkg *syntaxPackage, errList scanner.ErrorList) ([]*source.Diagnostic, error) { - // The first parser error is likely the root cause of the problem. - if errList.Len() <= 0 { - return nil, fmt.Errorf("no errors in %v", errList) - } - e := errList[0] - pgf, err := pkg.File(span.URIFromPath(e.Pos.Filename)) - if err != nil { - return nil, err - } - rng, err := pgf.Mapper.OffsetRange(e.Pos.Offset, e.Pos.Offset) - if err != nil { - return nil, err - } - return []*source.Diagnostic{{ - URI: pgf.URI, - Range: rng, - Severity: protocol.SeverityError, - Source: source.ParseError, - Message: e.Msg, - }}, nil -} - -var importErrorRe = regexp.MustCompile(`could not import ([^\s]+)`) -var unsupportedFeatureRe = regexp.MustCompile(`.*require.* go(\d+\.\d+) or later`) - -func typeErrorDiagnostics(moduleMode bool, linkTarget string, pkg *syntaxPackage, e extendedError) ([]*source.Diagnostic, error) { - code, loc, err := typeErrorData(pkg, e.primary) - if err != nil { - return nil, err - } - diag := &source.Diagnostic{ - URI: loc.URI.SpanURI(), - Range: loc.Range, - Severity: protocol.SeverityError, - Source: source.TypeError, - Message: e.primary.Msg, - } - if code != 0 { - diag.Code = code.String() - diag.CodeHref = typesCodeHref(linkTarget, code) - } - switch code { - case typesinternal.UnusedVar, typesinternal.UnusedImport: - diag.Tags = append(diag.Tags, protocol.Unnecessary) - } - - for _, secondary := range e.secondaries { - _, secondaryLoc, err := typeErrorData(pkg, secondary) - if err != nil { - // We may not be able to compute type error data in scenarios where the - // secondary position is outside of the current package. In this case, we - // don't want to ignore the diagnostic entirely. - // - // See golang/go#59005 for an example where gopls was missing diagnostics - // due to returning an error here. - continue - } - diag.Related = append(diag.Related, protocol.DiagnosticRelatedInformation{ - Location: secondaryLoc, - Message: secondary.Msg, - }) - } - - if match := importErrorRe.FindStringSubmatch(e.primary.Msg); match != nil { - diag.SuggestedFixes, err = goGetQuickFixes(moduleMode, loc.URI.SpanURI(), match[1]) - if err != nil { - return nil, err - } - } - if match := unsupportedFeatureRe.FindStringSubmatch(e.primary.Msg); match != nil { - diag.SuggestedFixes, err = editGoDirectiveQuickFix(moduleMode, loc.URI.SpanURI(), match[1]) - if err != nil { - return nil, err - } - } - return []*source.Diagnostic{diag}, nil -} - -func goGetQuickFixes(moduleMode bool, uri span.URI, pkg string) ([]source.SuggestedFix, error) { - // Go get only supports module mode for now. - if !moduleMode { - return nil, nil - } - title := fmt.Sprintf("go get package %v", pkg) - cmd, err := command.NewGoGetPackageCommand(title, command.GoGetPackageArgs{ - URI: protocol.URIFromSpanURI(uri), - AddRequire: true, - Pkg: pkg, - }) - if err != nil { - return nil, err - } - return []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, nil -} - -func editGoDirectiveQuickFix(moduleMode bool, uri span.URI, version string) ([]source.SuggestedFix, error) { - // Go mod edit only supports module mode. - if !moduleMode { - return nil, nil - } - title := fmt.Sprintf("go mod edit -go=%s", version) - cmd, err := command.NewEditGoDirectiveCommand(title, command.EditGoDirectiveArgs{ - URI: protocol.URIFromSpanURI(uri), - Version: version, - }) - if err != nil { - return nil, err - } - return []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, nil -} - -// encodeDiagnostics gob-encodes the given diagnostics. -func encodeDiagnostics(srcDiags []*source.Diagnostic) []byte { - var gobDiags []gobDiagnostic - for _, srcDiag := range srcDiags { - var gobFixes []gobSuggestedFix - for _, srcFix := range srcDiag.SuggestedFixes { - gobFix := gobSuggestedFix{ - Message: srcFix.Title, - ActionKind: srcFix.ActionKind, - } - for uri, srcEdits := range srcFix.Edits { - for _, srcEdit := range srcEdits { - gobFix.TextEdits = append(gobFix.TextEdits, gobTextEdit{ - Location: protocol.Location{ - URI: protocol.URIFromSpanURI(uri), - Range: srcEdit.Range, - }, - NewText: []byte(srcEdit.NewText), - }) - } - } - if srcCmd := srcFix.Command; srcCmd != nil { - gobFix.Command = &gobCommand{ - Title: srcCmd.Title, - Command: srcCmd.Command, - Arguments: srcCmd.Arguments, - } - } - gobFixes = append(gobFixes, gobFix) - } - var gobRelated []gobRelatedInformation - for _, srcRel := range srcDiag.Related { - gobRel := gobRelatedInformation(srcRel) - gobRelated = append(gobRelated, gobRel) - } - gobDiag := gobDiagnostic{ - Location: protocol.Location{ - URI: protocol.URIFromSpanURI(srcDiag.URI), - Range: srcDiag.Range, - }, - Severity: srcDiag.Severity, - Code: srcDiag.Code, - CodeHref: srcDiag.CodeHref, - Source: string(srcDiag.Source), - Message: srcDiag.Message, - SuggestedFixes: gobFixes, - Related: gobRelated, - Tags: srcDiag.Tags, - } - gobDiags = append(gobDiags, gobDiag) - } - return diagnosticsCodec.Encode(gobDiags) -} - -// decodeDiagnostics decodes the given gob-encoded diagnostics. -func decodeDiagnostics(data []byte) []*source.Diagnostic { - var gobDiags []gobDiagnostic - diagnosticsCodec.Decode(data, &gobDiags) - var srcDiags []*source.Diagnostic - for _, gobDiag := range gobDiags { - var srcFixes []source.SuggestedFix - for _, gobFix := range gobDiag.SuggestedFixes { - srcFix := source.SuggestedFix{ - Title: gobFix.Message, - ActionKind: gobFix.ActionKind, - } - for _, gobEdit := range gobFix.TextEdits { - if srcFix.Edits == nil { - srcFix.Edits = make(map[span.URI][]protocol.TextEdit) - } - srcEdit := protocol.TextEdit{ - Range: gobEdit.Location.Range, - NewText: string(gobEdit.NewText), - } - uri := gobEdit.Location.URI.SpanURI() - srcFix.Edits[uri] = append(srcFix.Edits[uri], srcEdit) - } - if gobCmd := gobFix.Command; gobCmd != nil { - srcFix.Command = &protocol.Command{ - Title: gobCmd.Title, - Command: gobCmd.Command, - Arguments: gobCmd.Arguments, - } - } - srcFixes = append(srcFixes, srcFix) - } - var srcRelated []protocol.DiagnosticRelatedInformation - for _, gobRel := range gobDiag.Related { - srcRel := protocol.DiagnosticRelatedInformation(gobRel) - srcRelated = append(srcRelated, srcRel) - } - srcDiag := &source.Diagnostic{ - URI: gobDiag.Location.URI.SpanURI(), - Range: gobDiag.Location.Range, - Severity: gobDiag.Severity, - Code: gobDiag.Code, - CodeHref: gobDiag.CodeHref, - Source: source.AnalyzerErrorKind(gobDiag.Source), - Message: gobDiag.Message, - Tags: gobDiag.Tags, - Related: srcRelated, - SuggestedFixes: srcFixes, - } - srcDiags = append(srcDiags, srcDiag) - } - return srcDiags -} - -// toSourceDiagnostic converts a gobDiagnostic to "source" form. -func toSourceDiagnostic(srcAnalyzer *source.Analyzer, gobDiag *gobDiagnostic) *source.Diagnostic { - var related []protocol.DiagnosticRelatedInformation - for _, gobRelated := range gobDiag.Related { - related = append(related, protocol.DiagnosticRelatedInformation(gobRelated)) - } - - kinds := srcAnalyzer.ActionKind - if len(srcAnalyzer.ActionKind) == 0 { - kinds = append(kinds, protocol.QuickFix) - } - fixes := suggestedAnalysisFixes(gobDiag, kinds) - if srcAnalyzer.Fix != "" { - cmd, err := command.NewApplyFixCommand(gobDiag.Message, command.ApplyFixArgs{ - URI: gobDiag.Location.URI, - Range: gobDiag.Location.Range, - Fix: srcAnalyzer.Fix, - }) - if err != nil { - // JSON marshalling of these argument values cannot fail. - log.Fatalf("internal error in NewApplyFixCommand: %v", err) - } - for _, kind := range kinds { - fixes = append(fixes, source.SuggestedFixFromCommand(cmd, kind)) - } - } - - severity := srcAnalyzer.Severity - if severity == 0 { - severity = protocol.SeverityWarning - } - - diag := &source.Diagnostic{ - URI: gobDiag.Location.URI.SpanURI(), - Range: gobDiag.Location.Range, - Severity: severity, - Code: gobDiag.Code, - CodeHref: gobDiag.CodeHref, - Source: source.AnalyzerErrorKind(gobDiag.Source), - Message: gobDiag.Message, - Related: related, - Tags: srcAnalyzer.Tag, - } - if srcAnalyzer.FixesDiagnostic(diag) { - diag.SuggestedFixes = fixes - } - - // If the fixes only delete code, assume that the diagnostic is reporting dead code. - if onlyDeletions(fixes) { - diag.Tags = append(diag.Tags, protocol.Unnecessary) - } - return diag -} - -// onlyDeletions returns true if all of the suggested fixes are deletions. -func onlyDeletions(fixes []source.SuggestedFix) bool { - for _, fix := range fixes { - if fix.Command != nil { - return false - } - for _, edits := range fix.Edits { - for _, edit := range edits { - if edit.NewText != "" { - return false - } - if protocol.ComparePosition(edit.Range.Start, edit.Range.End) == 0 { - return false - } - } - } - } - return len(fixes) > 0 -} - -func typesCodeHref(linkTarget string, code typesinternal.ErrorCode) string { - return source.BuildLink(linkTarget, "golang.org/x/tools/internal/typesinternal", code.String()) -} - -func suggestedAnalysisFixes(diag *gobDiagnostic, kinds []protocol.CodeActionKind) []source.SuggestedFix { - var fixes []source.SuggestedFix - for _, fix := range diag.SuggestedFixes { - edits := make(map[span.URI][]protocol.TextEdit) - for _, e := range fix.TextEdits { - uri := span.URI(e.Location.URI) - edits[uri] = append(edits[uri], protocol.TextEdit{ - Range: e.Location.Range, - NewText: string(e.NewText), - }) - } - for _, kind := range kinds { - fixes = append(fixes, source.SuggestedFix{ - Title: fix.Message, - Edits: edits, - ActionKind: kind, - }) - } - - } - return fixes -} - -func typeErrorData(pkg *syntaxPackage, terr types.Error) (typesinternal.ErrorCode, protocol.Location, error) { - ecode, start, end, ok := typesinternal.ReadGo116ErrorData(terr) - if !ok { - start, end = terr.Pos, terr.Pos - ecode = 0 - } - // go/types may return invalid positions in some cases, such as - // in errors on tokens missing from the syntax tree. - if !start.IsValid() { - return 0, protocol.Location{}, fmt.Errorf("type error (%q, code %d, go116=%t) without position", terr.Msg, ecode, ok) - } - // go/types errors retain their FileSet. - // Sanity-check that we're using the right one. - fset := pkg.fset - if fset != terr.Fset { - return 0, protocol.Location{}, bug.Errorf("wrong FileSet for type error") - } - posn := safetoken.StartPosition(fset, start) - if !posn.IsValid() { - return 0, protocol.Location{}, fmt.Errorf("position %d of type error %q (code %q) not found in FileSet", start, start, terr) - } - pgf, err := pkg.File(span.URIFromPath(posn.Filename)) - if err != nil { - return 0, protocol.Location{}, err - } - if !end.IsValid() || end == start { - end = analysisinternal.TypeErrorEndPos(fset, pgf.Src, start) - } - loc, err := pgf.Mapper.PosLocation(pgf.Tok, start, end) - return ecode, loc, err -} - -// spanToRange converts a span.Span to a protocol.Range, by mapping content -// contained in the provided FileSource. -func spanToRange(ctx context.Context, fs source.FileSource, spn span.Span) (protocol.Range, error) { - uri := spn.URI() - fh, err := fs.ReadFile(ctx, uri) - if err != nil { - return protocol.Range{}, err - } - content, err := fh.Content() - if err != nil { - return protocol.Range{}, err - } - mapper := protocol.NewMapper(uri, content) - return mapper.SpanRange(spn) -} - -// parseGoListError attempts to parse a standard `go list` error message -// by stripping off the trailing error message. -// -// It works only on errors whose message is prefixed by colon, -// followed by a space (": "). For example: -// -// attributes.go:13:1: expected 'package', found 'type' -func parseGoListError(input, wd string) span.Span { - input = strings.TrimSpace(input) - msgIndex := strings.Index(input, ": ") - if msgIndex < 0 { - return span.Parse(input) - } - return span.ParseInDir(input[:msgIndex], wd) -} - -// parseGoListImportCycleError attempts to parse the given go/packages error as -// an import cycle, returning a diagnostic if successful. -// -// If the error is not detected as an import cycle error, it returns nil, nil. -func parseGoListImportCycleError(ctx context.Context, e packages.Error, m *source.Metadata, fs source.FileSource) (*source.Diagnostic, error) { - re := regexp.MustCompile(`(.*): import stack: \[(.+)\]`) - matches := re.FindStringSubmatch(strings.TrimSpace(e.Msg)) - if len(matches) < 3 { - return nil, nil - } - msg := matches[1] - importList := strings.Split(matches[2], " ") - // Since the error is relative to the current package. The import that is causing - // the import cycle error is the second one in the list. - if len(importList) < 2 { - return nil, nil - } - // Imports have quotation marks around them. - circImp := strconv.Quote(importList[1]) - for _, uri := range m.CompiledGoFiles { - pgf, err := parseGoURI(ctx, fs, uri, source.ParseHeader) - if err != nil { - return nil, err - } - // Search file imports for the import that is causing the import cycle. - for _, imp := range pgf.File.Imports { - if imp.Path.Value == circImp { - rng, err := pgf.NodeMappedRange(imp) - if err != nil { - return nil, nil - } - - return &source.Diagnostic{ - URI: pgf.URI, - Range: rng.Range(), - Severity: protocol.SeverityError, - Source: source.ListError, - Message: msg, - }, nil - } - } - } - return nil, nil -} - -// parseGoURI is a helper to parse the Go file at the given URI from the file -// source fs. The resulting syntax and token.File belong to an ephemeral, -// encapsulated FileSet, so this file stands only on its own: it's not suitable -// to use in a list of file of a package, for example. -// -// It returns an error if the file could not be read. -// -// TODO(rfindley): eliminate this helper. -func parseGoURI(ctx context.Context, fs source.FileSource, uri span.URI, mode parser.Mode) (*source.ParsedGoFile, error) { - fh, err := fs.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - return parseGoImpl(ctx, token.NewFileSet(), fh, mode, false) -} - -// parseModURI is a helper to parse the Mod file at the given URI from the file -// source fs. -// -// It returns an error if the file could not be read. -func parseModURI(ctx context.Context, fs source.FileSource, uri span.URI) (*source.ParsedModule, error) { - fh, err := fs.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - return parseModImpl(ctx, fh) -} diff --git a/gopls/internal/lsp/cache/filemap.go b/gopls/internal/lsp/cache/filemap.go deleted file mode 100644 index 52b7a13ba95..00000000000 --- a/gopls/internal/lsp/cache/filemap.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "path/filepath" - - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/persistent" -) - -// A fileMap maps files in the snapshot, with some additional bookkeeping: -// It keeps track of overlays as well as directories containing any observed -// file. -type fileMap struct { - files *persistent.Map[span.URI, source.FileHandle] - overlays *persistent.Map[span.URI, *Overlay] // the subset of files that are overlays - dirs *persistent.Set[string] // all dirs containing files; if nil, dirs have not been initialized -} - -func newFileMap() *fileMap { - return &fileMap{ - files: new(persistent.Map[span.URI, source.FileHandle]), - overlays: new(persistent.Map[span.URI, *Overlay]), - dirs: new(persistent.Set[string]), - } -} - -// Clone creates a copy of the fileMap, incorporating the changes specified by -// the changes map. -func (m *fileMap) Clone(changes map[span.URI]source.FileHandle) *fileMap { - m2 := &fileMap{ - files: m.files.Clone(), - overlays: m.overlays.Clone(), - } - if m.dirs != nil { - m2.dirs = m.dirs.Clone() - } - - // Handle file changes. - // - // Note, we can't simply delete the file unconditionally and let it be - // re-read by the snapshot, as (1) the snapshot must always observe all - // overlays, and (2) deleting a file forces directories to be reevaluated, as - // it may be the last file in a directory. We want to avoid that work in the - // common case where a file has simply changed. - // - // For that reason, we also do this in two passes, processing deletions - // first, as a set before a deletion would result in pointless work. - for uri, fh := range changes { - if !fileExists(fh) { - m2.Delete(uri) - } - } - for uri, fh := range changes { - if fileExists(fh) { - m2.Set(uri, fh) - } - } - return m2 -} - -func (m *fileMap) Destroy() { - m.files.Destroy() - m.overlays.Destroy() - if m.dirs != nil { - m.dirs.Destroy() - } -} - -// Get returns the file handle mapped by the given key, or (nil, false) if the -// key is not present. -func (m *fileMap) Get(key span.URI) (source.FileHandle, bool) { - return m.files.Get(key) -} - -// Range calls f for each (uri, fh) in the map. -func (m *fileMap) Range(f func(uri span.URI, fh source.FileHandle)) { - m.files.Range(f) -} - -// Set stores the given file handle for key, updating overlays and directories -// accordingly. -func (m *fileMap) Set(key span.URI, fh source.FileHandle) { - m.files.Set(key, fh, nil) - - // update overlays - if o, ok := fh.(*Overlay); ok { - m.overlays.Set(key, o, nil) - } else { - // Setting a non-overlay must delete the corresponding overlay, to preserve - // the accuracy of the overlay set. - m.overlays.Delete(key) - } - - // update dirs, if they have been computed - if m.dirs != nil { - m.addDirs(key) - } -} - -// addDirs adds all directories containing u to the dirs set. -func (m *fileMap) addDirs(u span.URI) { - dir := filepath.Dir(u.Filename()) - for dir != "" && !m.dirs.Contains(dir) { - m.dirs.Add(dir) - dir = filepath.Dir(dir) - } -} - -// Delete removes a file from the map, and updates overlays and dirs -// accordingly. -func (m *fileMap) Delete(key span.URI) { - m.files.Delete(key) - m.overlays.Delete(key) - - // Deleting a file may cause the set of dirs to shrink; therefore we must - // re-evaluate the dir set. - // - // Do this lazily, to avoid work if there are multiple deletions in a row. - if m.dirs != nil { - m.dirs.Destroy() - m.dirs = nil - } -} - -// Overlays returns a new unordered array of overlay files. -func (m *fileMap) Overlays() []*Overlay { - var overlays []*Overlay - m.overlays.Range(func(_ span.URI, o *Overlay) { - overlays = append(overlays, o) - }) - return overlays -} - -// Dirs reports returns the set of dirs observed by the fileMap. -// -// This operation mutates the fileMap. -// The result must not be mutated by the caller. -func (m *fileMap) Dirs() *persistent.Set[string] { - if m.dirs == nil { - m.dirs = new(persistent.Set[string]) - m.files.Range(func(u span.URI, _ source.FileHandle) { - m.addDirs(u) - }) - } - return m.dirs -} diff --git a/gopls/internal/lsp/cache/fs_overlay.go b/gopls/internal/lsp/cache/fs_overlay.go deleted file mode 100644 index 6764adda063..00000000000 --- a/gopls/internal/lsp/cache/fs_overlay.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "sync" - - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" -) - -// An overlayFS is a source.FileSource that keeps track of overlays on top of a -// delegate FileSource. -type overlayFS struct { - delegate source.FileSource - - mu sync.Mutex - overlays map[span.URI]*Overlay -} - -func newOverlayFS(delegate source.FileSource) *overlayFS { - return &overlayFS{ - delegate: delegate, - overlays: make(map[span.URI]*Overlay), - } -} - -// Overlays returns a new unordered array of overlays. -func (fs *overlayFS) Overlays() []*Overlay { - fs.mu.Lock() - defer fs.mu.Unlock() - overlays := make([]*Overlay, 0, len(fs.overlays)) - for _, overlay := range fs.overlays { - overlays = append(overlays, overlay) - } - return overlays -} - -func (fs *overlayFS) ReadFile(ctx context.Context, uri span.URI) (source.FileHandle, error) { - fs.mu.Lock() - overlay, ok := fs.overlays[uri] - fs.mu.Unlock() - if ok { - return overlay, nil - } - return fs.delegate.ReadFile(ctx, uri) -} - -// An Overlay is a file open in the editor. It may have unsaved edits. -// It implements the source.FileHandle interface. -type Overlay struct { - uri span.URI - content []byte - hash source.Hash - version int32 - kind source.FileKind - - // saved is true if a file matches the state on disk, - // and therefore does not need to be part of the overlay sent to go/packages. - saved bool -} - -func (o *Overlay) URI() span.URI { return o.uri } - -func (o *Overlay) FileIdentity() source.FileIdentity { - return source.FileIdentity{ - URI: o.uri, - Hash: o.hash, - } -} - -func (o *Overlay) Content() ([]byte, error) { return o.content, nil } -func (o *Overlay) Version() int32 { return o.version } -func (o *Overlay) SameContentsOnDisk() bool { return o.saved } -func (o *Overlay) Kind() source.FileKind { return o.kind } diff --git a/gopls/internal/lsp/cache/graph.go b/gopls/internal/lsp/cache/graph.go deleted file mode 100644 index 684bdab957f..00000000000 --- a/gopls/internal/lsp/cache/graph.go +++ /dev/null @@ -1,364 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "sort" - - "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" -) - -// A metadataGraph is an immutable and transitively closed import -// graph of Go packages, as obtained from go/packages. -type metadataGraph struct { - // metadata maps package IDs to their associated metadata. - metadata map[PackageID]*source.Metadata - - // importedBy maps package IDs to the list of packages that import them. - importedBy map[PackageID][]PackageID - - // ids maps file URIs to package IDs, sorted by (!valid, cli, packageID). - // A single file may belong to multiple packages due to tests packages. - // - // Invariant: all IDs present in the ids map exist in the metadata map. - ids map[span.URI][]PackageID -} - -// Metadata implements the source.MetadataSource interface. -func (g *metadataGraph) Metadata(id PackageID) *source.Metadata { - return g.metadata[id] -} - -// Clone creates a new metadataGraph, applying the given updates to the -// receiver. A nil map value represents a deletion. -func (g *metadataGraph) Clone(updates map[PackageID]*source.Metadata) *metadataGraph { - if len(updates) == 0 { - // Optimization: since the graph is immutable, we can return the receiver. - return g - } - - // Copy metadata map then apply updates. - metadata := make(map[PackageID]*source.Metadata, len(g.metadata)) - for id, m := range g.metadata { - metadata[id] = m - } - for id, m := range updates { - if m == nil { - delete(metadata, id) - } else { - metadata[id] = m - } - } - - // Break import cycles involving updated nodes. - breakImportCycles(metadata, updates) - - return newMetadataGraph(metadata) -} - -// newMetadataGraph returns a new metadataGraph, -// deriving relations from the specified metadata. -func newMetadataGraph(metadata map[PackageID]*source.Metadata) *metadataGraph { - // Build the import graph. - importedBy := make(map[PackageID][]PackageID) - for id, m := range metadata { - for _, depID := range m.DepsByPkgPath { - importedBy[depID] = append(importedBy[depID], id) - } - } - - // Collect file associations. - uriIDs := make(map[span.URI][]PackageID) - for id, m := range metadata { - uris := map[span.URI]struct{}{} - for _, uri := range m.CompiledGoFiles { - uris[uri] = struct{}{} - } - for _, uri := range m.GoFiles { - uris[uri] = struct{}{} - } - for uri := range uris { - uriIDs[uri] = append(uriIDs[uri], id) - } - } - - // Sort and filter file associations. - for uri, ids := range uriIDs { - sort.Slice(ids, func(i, j int) bool { - cli := source.IsCommandLineArguments(ids[i]) - clj := source.IsCommandLineArguments(ids[j]) - if cli != clj { - return clj - } - - // 2. packages appear in name order. - return ids[i] < ids[j] - }) - - // Choose the best IDs for each URI, according to the following rules: - // - If there are any valid real packages, choose them. - // - Else, choose the first valid command-line-argument package, if it exists. - // - // TODO(rfindley): it might be better to track all IDs here, and exclude - // them later when type checking, but this is the existing behavior. - for i, id := range ids { - // If we've seen *anything* prior to command-line arguments package, take - // it. Note that ids[0] may itself be command-line-arguments. - if i > 0 && source.IsCommandLineArguments(id) { - uriIDs[uri] = ids[:i] - break - } - } - } - - return &metadataGraph{ - metadata: metadata, - importedBy: importedBy, - ids: uriIDs, - } -} - -// reverseReflexiveTransitiveClosure returns a new mapping containing the -// metadata for the specified packages along with any package that -// transitively imports one of them, keyed by ID, including all the initial packages. -func (g *metadataGraph) reverseReflexiveTransitiveClosure(ids ...PackageID) map[PackageID]*source.Metadata { - seen := make(map[PackageID]*source.Metadata) - var visitAll func([]PackageID) - visitAll = func(ids []PackageID) { - for _, id := range ids { - if seen[id] == nil { - if m := g.metadata[id]; m != nil { - seen[id] = m - visitAll(g.importedBy[id]) - } - } - } - } - visitAll(ids) - return seen -} - -// breakImportCycles breaks import cycles in the metadata by deleting -// Deps* edges. It modifies only metadata present in the 'updates' -// subset. This function has an internal test. -func breakImportCycles(metadata, updates map[PackageID]*source.Metadata) { - // 'go list' should never report a cycle without flagging it - // as such, but we're extra cautious since we're combining - // information from multiple runs of 'go list'. Also, Bazel - // may silently report cycles. - cycles := detectImportCycles(metadata, updates) - if len(cycles) > 0 { - // There were cycles (uncommon). Break them. - // - // The naive way to break cycles would be to perform a - // depth-first traversal and to detect and delete - // cycle-forming edges as we encounter them. - // However, we're not allowed to modify the existing - // Metadata records, so we can only break edges out of - // the 'updates' subset. - // - // Another possibility would be to delete not the - // cycle forming edge but the topmost edge on the - // stack whose tail is an updated node. - // However, this would require that we retroactively - // undo all the effects of the traversals that - // occurred since that edge was pushed on the stack. - // - // We use a simpler scheme: we compute the set of cycles. - // All cyclic paths necessarily involve at least one - // updated node, so it is sufficient to break all - // edges from each updated node to other members of - // the strong component. - // - // This may result in the deletion of dominating - // edges, causing some dependencies to appear - // spuriously unreachable. Consider A <-> B -> C - // where updates={A,B}. The cycle is {A,B} so the - // algorithm will break both A->B and B->A, causing - // A to no longer depend on B or C. - // - // But that's ok: any error in Metadata.Errors is - // conservatively assumed by snapshot.clone to be a - // potential import cycle error, and causes special - // invalidation so that if B later drops its - // cycle-forming import of A, both A and B will be - // invalidated. - for _, cycle := range cycles { - cyclic := make(map[PackageID]bool) - for _, m := range cycle { - cyclic[m.ID] = true - } - for id := range cyclic { - if m := updates[id]; m != nil { - for path, depID := range m.DepsByImpPath { - if cyclic[depID] { - delete(m.DepsByImpPath, path) - } - } - for path, depID := range m.DepsByPkgPath { - if cyclic[depID] { - delete(m.DepsByPkgPath, path) - } - } - - // Set m.Errors to enable special - // invalidation logic in snapshot.clone. - if len(m.Errors) == 0 { - m.Errors = []packages.Error{{ - Msg: "detected import cycle", - Kind: packages.ListError, - }} - } - } - } - } - - // double-check when debugging - if false { - if cycles := detectImportCycles(metadata, updates); len(cycles) > 0 { - bug.Reportf("unbroken cycle: %v", cycles) - } - } - } -} - -// detectImportCycles reports cycles in the metadata graph. It returns a new -// unordered array of all cycles (nontrivial strong components) in the -// metadata graph reachable from a non-nil 'updates' value. -func detectImportCycles(metadata, updates map[PackageID]*source.Metadata) [][]*source.Metadata { - // We use the depth-first algorithm of Tarjan. - // https://doi.org/10.1137/0201010 - // - // TODO(adonovan): when we can use generics, consider factoring - // in common with the other implementation of Tarjan (in typerefs), - // abstracting over the node and edge representation. - - // A node wraps a Metadata with its working state. - // (Unfortunately we can't intrude on shared Metadata.) - type node struct { - rep *node - m *source.Metadata - index, lowlink int32 - scc int8 // TODO(adonovan): opt: cram these 1.5 bits into previous word - } - nodes := make(map[PackageID]*node, len(metadata)) - nodeOf := func(id PackageID) *node { - n, ok := nodes[id] - if !ok { - m := metadata[id] - if m == nil { - // Dangling import edge. - // Not sure whether a go/packages driver ever - // emits this, but create a dummy node in case. - // Obviously it won't be part of any cycle. - m = &source.Metadata{ID: id} - } - n = &node{m: m} - n.rep = n - nodes[id] = n - } - return n - } - - // find returns the canonical node decl. - // (The nodes form a disjoint set forest.) - var find func(*node) *node - find = func(n *node) *node { - rep := n.rep - if rep != n { - rep = find(rep) - n.rep = rep // simple path compression (no union-by-rank) - } - return rep - } - - // global state - var ( - index int32 = 1 - stack []*node - sccs [][]*source.Metadata // set of nontrivial strongly connected components - ) - - // visit implements the depth-first search of Tarjan's SCC algorithm - // Precondition: x is canonical. - var visit func(*node) - visit = func(x *node) { - x.index = index - x.lowlink = index - index++ - - stack = append(stack, x) // push - x.scc = -1 - - for _, yid := range x.m.DepsByPkgPath { - y := nodeOf(yid) - // Loop invariant: x is canonical. - y = find(y) - if x == y { - continue // nodes already combined (self-edges are impossible) - } - - switch { - case y.scc > 0: - // y is already a collapsed SCC - - case y.scc < 0: - // y is on the stack, and thus in the current SCC. - if y.index < x.lowlink { - x.lowlink = y.index - } - - default: - // y is unvisited; visit it now. - visit(y) - // Note: x and y are now non-canonical. - x = find(x) - if y.lowlink < x.lowlink { - x.lowlink = y.lowlink - } - } - } - - // Is x the root of an SCC? - if x.lowlink == x.index { - // Gather all metadata in the SCC (if nontrivial). - var scc []*source.Metadata - for { - // Pop y from stack. - i := len(stack) - 1 - y := stack[i] - stack = stack[:i] - if x != y || scc != nil { - scc = append(scc, y.m) - } - if x == y { - break // complete - } - // x becomes y's canonical representative. - y.rep = x - } - if scc != nil { - sccs = append(sccs, scc) - } - x.scc = 1 - } - } - - // Visit only the updated nodes: - // the existing metadata graph has no cycles, - // so any new cycle must involve an updated node. - for id, m := range updates { - if m != nil { - if n := nodeOf(id); n.index == 0 { // unvisited - visit(n) - } - } - } - - return sccs -} diff --git a/gopls/internal/lsp/cache/imports.go b/gopls/internal/lsp/cache/imports.go deleted file mode 100644 index f820da4ab46..00000000000 --- a/gopls/internal/lsp/cache/imports.go +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "fmt" - "reflect" - "strings" - "sync" - "time" - - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/keys" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/imports" -) - -type importsState struct { - ctx context.Context - - mu sync.Mutex - processEnv *imports.ProcessEnv - cacheRefreshDuration time.Duration - cacheRefreshTimer *time.Timer - cachedModFileHash source.Hash - cachedBuildFlags []string - cachedDirectoryFilters []string -} - -func (s *importsState) runProcessEnvFunc(ctx context.Context, snapshot *snapshot, fn func(context.Context, *imports.Options) error) error { - ctx, done := event.Start(ctx, "cache.importsState.runProcessEnvFunc") - defer done() - - s.mu.Lock() - defer s.mu.Unlock() - - // Find the hash of active mod files, if any. Using the unsaved content - // is slightly wasteful, since we'll drop caches a little too often, but - // the mod file shouldn't be changing while people are autocompleting. - // - // TODO(rfindley): consider instead hashing on-disk modfiles here. - var modFileHash source.Hash - for m := range snapshot.workspaceModFiles { - fh, err := snapshot.ReadFile(ctx, m) - if err != nil { - return err - } - modFileHash.XORWith(fh.FileIdentity().Hash) - } - - // view.goEnv is immutable -- changes make a new view. Options can change. - // We can't compare build flags directly because we may add -modfile. - localPrefix := snapshot.Options().Local - currentBuildFlags := snapshot.Options().BuildFlags - currentDirectoryFilters := snapshot.Options().DirectoryFilters - changed := !reflect.DeepEqual(currentBuildFlags, s.cachedBuildFlags) || - snapshot.Options().VerboseOutput != (s.processEnv.Logf != nil) || - modFileHash != s.cachedModFileHash || - !reflect.DeepEqual(snapshot.Options().DirectoryFilters, s.cachedDirectoryFilters) - - // If anything relevant to imports has changed, clear caches and - // update the processEnv. Clearing caches blocks on any background - // scans. - if changed { - if err := populateProcessEnvFromSnapshot(ctx, s.processEnv, snapshot); err != nil { - return err - } - - if resolver, err := s.processEnv.GetResolver(); err == nil { - if modResolver, ok := resolver.(*imports.ModuleResolver); ok { - modResolver.ClearForNewMod() - } - } - - s.cachedModFileHash = modFileHash - s.cachedBuildFlags = currentBuildFlags - s.cachedDirectoryFilters = currentDirectoryFilters - } - - // Run the user function. - opts := &imports.Options{ - // Defaults. - AllErrors: true, - Comments: true, - Fragment: true, - FormatOnly: false, - TabIndent: true, - TabWidth: 8, - Env: s.processEnv, - LocalPrefix: localPrefix, - } - - if err := fn(ctx, opts); err != nil { - return err - } - - if s.cacheRefreshTimer == nil { - // Don't refresh more than twice per minute. - delay := 30 * time.Second - // Don't spend more than a couple percent of the time refreshing. - if adaptive := 50 * s.cacheRefreshDuration; adaptive > delay { - delay = adaptive - } - s.cacheRefreshTimer = time.AfterFunc(delay, s.refreshProcessEnv) - } - - return nil -} - -// populateProcessEnvFromSnapshot sets the dynamically configurable fields for -// the view's process environment. Assumes that the caller is holding the -// importsState mutex. -func populateProcessEnvFromSnapshot(ctx context.Context, pe *imports.ProcessEnv, snapshot *snapshot) error { - ctx, done := event.Start(ctx, "cache.populateProcessEnvFromSnapshot") - defer done() - - if snapshot.Options().VerboseOutput { - pe.Logf = func(format string, args ...interface{}) { - event.Log(ctx, fmt.Sprintf(format, args...)) - } - } else { - pe.Logf = nil - } - - // Extract invocation details from the snapshot to use with goimports. - // - // TODO(rfindley): refactor to extract the necessary invocation logic into - // separate functions. Using goCommandInvocation is unnecessarily indirect, - // and has led to memory leaks in the past, when the snapshot was - // unintentionally held past its lifetime. - _, inv, cleanupInvocation, err := snapshot.goCommandInvocation(ctx, source.LoadWorkspace, &gocommand.Invocation{ - WorkingDir: snapshot.view.goCommandDir.Filename(), - }) - if err != nil { - return err - } - - pe.BuildFlags = inv.BuildFlags - pe.ModFlag = "readonly" // processEnv operations should not mutate the modfile - pe.Env = map[string]string{} - for _, kv := range inv.Env { - split := strings.SplitN(kv, "=", 2) - if len(split) != 2 { - continue - } - pe.Env[split[0]] = split[1] - } - // We don't actually use the invocation, so clean it up now. - cleanupInvocation() - // TODO(rfindley): should this simply be inv.WorkingDir? - pe.WorkingDir = snapshot.view.goCommandDir.Filename() - return nil -} - -func (s *importsState) refreshProcessEnv() { - ctx, done := event.Start(s.ctx, "cache.importsState.refreshProcessEnv") - defer done() - - start := time.Now() - - s.mu.Lock() - env := s.processEnv - if resolver, err := s.processEnv.GetResolver(); err == nil { - resolver.ClearForNewScan() - } - s.mu.Unlock() - - event.Log(s.ctx, "background imports cache refresh starting") - if err := imports.PrimeCache(context.Background(), env); err == nil { - event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start))) - } else { - event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start)), keys.Err.Of(err)) - } - s.mu.Lock() - s.cacheRefreshDuration = time.Since(start) - s.cacheRefreshTimer = nil - s.mu.Unlock() -} diff --git a/gopls/internal/lsp/cache/load.go b/gopls/internal/lsp/cache/load.go deleted file mode 100644 index 7621f8198f4..00000000000 --- a/gopls/internal/lsp/cache/load.go +++ /dev/null @@ -1,766 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "bytes" - "context" - "errors" - "fmt" - "path/filepath" - "sort" - "strings" - "sync/atomic" - "time" - - "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/packagesinternal" -) - -var loadID uint64 // atomic identifier for loads - -// errNoPackages indicates that a load query matched no packages. -var errNoPackages = errors.New("no packages returned") - -// load calls packages.Load for the given scopes, updating package metadata, -// import graph, and mapped files with the result. -// -// The resulting error may wrap the moduleErrorMap error type, representing -// errors associated with specific modules. -// -// If scopes contains a file scope there must be exactly one scope. -func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadScope) (err error) { - id := atomic.AddUint64(&loadID, 1) - eventName := fmt.Sprintf("go/packages.Load #%d", id) // unique name for logging - - var query []string - var containsDir bool // for logging - var standalone bool // whether this is a load of a standalone file - - // Keep track of module query -> module path so that we can later correlate query - // errors with errors. - moduleQueries := make(map[string]string) - for _, scope := range scopes { - switch scope := scope.(type) { - case packageLoadScope: - // The only time we pass package paths is when we're doing a - // partial workspace load. In those cases, the paths came back from - // go list and should already be GOPATH-vendorized when appropriate. - query = append(query, string(scope)) - - case fileLoadScope: - // Given multiple scopes, the resulting load might contain inaccurate - // information. For example go/packages returns at most one command-line - // arguments package, and does not handle a combination of standalone - // files and packages. - uri := span.URI(scope) - if len(scopes) > 1 { - panic(fmt.Sprintf("internal error: load called with multiple scopes when a file scope is present (file: %s)", uri)) - } - fh := s.FindFile(uri) - if fh == nil || s.FileKind(fh) != source.Go { - // Don't try to load a file that doesn't exist, or isn't a go file. - continue - } - contents, err := fh.Content() - if err != nil { - continue - } - if isStandaloneFile(contents, s.Options().StandaloneTags) { - standalone = true - query = append(query, uri.Filename()) - } else { - query = append(query, fmt.Sprintf("file=%s", uri.Filename())) - } - - case moduleLoadScope: - modQuery := fmt.Sprintf("%s%c...", scope.dir, filepath.Separator) - query = append(query, modQuery) - moduleQueries[modQuery] = string(scope.modulePath) - - case viewLoadScope: - // If we are outside of GOPATH, a module, or some other known - // build system, don't load subdirectories. - if !s.validBuildConfiguration() { - query = append(query, "./") - } else { - query = append(query, "./...") - } - - default: - panic(fmt.Sprintf("unknown scope type %T", scope)) - } - switch scope.(type) { - case viewLoadScope, moduleLoadScope: - containsDir = true - } - } - if len(query) == 0 { - return nil - } - sort.Strings(query) // for determinism - - ctx, done := event.Start(ctx, "cache.snapshot.load", tag.Query.Of(query)) - defer done() - - flags := source.LoadWorkspace - if allowNetwork { - flags |= source.AllowNetwork - } - _, inv, cleanup, err := s.goCommandInvocation(ctx, flags, &gocommand.Invocation{ - WorkingDir: s.view.goCommandDir.Filename(), - }) - if err != nil { - return err - } - - // Set a last resort deadline on packages.Load since it calls the go - // command, which may hang indefinitely if it has a bug. golang/go#42132 - // and golang/go#42255 have more context. - ctx, cancel := context.WithTimeout(ctx, 10*time.Minute) - defer cancel() - - cfg := s.config(ctx, inv) - pkgs, err := packages.Load(cfg, query...) - cleanup() - - // If the context was canceled, return early. Otherwise, we might be - // type-checking an incomplete result. Check the context directly, - // because go/packages adds extra information to the error. - if ctx.Err() != nil { - return ctx.Err() - } - - // This log message is sought for by TestReloadOnlyOnce. - labels := append(source.SnapshotLabels(s), tag.Query.Of(query), tag.PackageCount.Of(len(pkgs))) - if err != nil { - event.Error(ctx, eventName, err, labels...) - } else { - event.Log(ctx, eventName, labels...) - } - - if len(pkgs) == 0 { - if err == nil { - err = errNoPackages - } - return fmt.Errorf("packages.Load error: %w", err) - } - - if standalone && len(pkgs) > 1 { - return bug.Errorf("internal error: go/packages returned multiple packages for standalone file") - } - - moduleErrs := make(map[string][]packages.Error) // module path -> errors - filterFunc := s.view.filterFunc() - newMetadata := make(map[PackageID]*source.Metadata) - for _, pkg := range pkgs { - // The Go command returns synthetic list results for module queries that - // encountered module errors. - // - // For example, given a module path a.mod, we'll query for "a.mod/..." and - // the go command will return a package named "a.mod/..." holding this - // error. Save it for later interpretation. - // - // See golang/go#50862 for more details. - if mod := moduleQueries[pkg.PkgPath]; mod != "" { // a synthetic result for the unloadable module - if len(pkg.Errors) > 0 { - moduleErrs[mod] = pkg.Errors - } - continue - } - - if !containsDir || s.Options().VerboseOutput { - event.Log(ctx, eventName, append( - source.SnapshotLabels(s), - tag.Package.Of(pkg.ID), - tag.Files.Of(pkg.CompiledGoFiles))...) - } - - // Ignore packages with no sources, since we will never be able to - // correctly invalidate that metadata. - if len(pkg.GoFiles) == 0 && len(pkg.CompiledGoFiles) == 0 { - continue - } - // Special case for the builtin package, as it has no dependencies. - if pkg.PkgPath == "builtin" { - if len(pkg.GoFiles) != 1 { - return fmt.Errorf("only expected 1 file for builtin, got %v", len(pkg.GoFiles)) - } - s.setBuiltin(pkg.GoFiles[0]) - continue - } - // Skip test main packages. - if isTestMain(pkg, s.view.gocache) { - continue - } - // Skip filtered packages. They may be added anyway if they're - // dependencies of non-filtered packages. - // - // TODO(rfindley): why exclude metadata arbitrarily here? It should be safe - // to capture all metadata. - // TODO(rfindley): what about compiled go files? - if allFilesExcluded(pkg.GoFiles, filterFunc) { - continue - } - buildMetadata(newMetadata, pkg, cfg.Dir, standalone) - } - - s.mu.Lock() - - // Assert the invariant s.packages.Get(id).m == s.meta.metadata[id]. - s.packages.Range(func(id PackageID, ph *packageHandle) { - if s.meta.metadata[id] != ph.m { - panic("inconsistent metadata") - } - }) - - // Compute the minimal metadata updates (for Clone) - // required to preserve the above invariant. - var files []span.URI // files to preload - seenFiles := make(map[span.URI]bool) - updates := make(map[PackageID]*source.Metadata) - for _, m := range newMetadata { - if existing := s.meta.metadata[m.ID]; existing == nil { - // Record any new files we should pre-load. - for _, uri := range m.CompiledGoFiles { - if !seenFiles[uri] { - seenFiles[uri] = true - files = append(files, uri) - } - } - updates[m.ID] = m - delete(s.shouldLoad, m.ID) - } - } - - event.Log(ctx, fmt.Sprintf("%s: updating metadata for %d packages", eventName, len(updates))) - - // Before mutating the snapshot, ensure that we compute load diagnostics - // successfully. This could fail if the context is cancelled, and we don't - // want to leave the snapshot metadata in a partial state. - meta := s.meta.Clone(updates) - workspacePackages := computeWorkspacePackagesLocked(s, meta) - for _, update := range updates { - computeLoadDiagnostics(ctx, update, meta, lockedSnapshot{s}, workspacePackages) - } - s.meta = meta - s.workspacePackages = workspacePackages - s.resetActivePackagesLocked() - - s.mu.Unlock() - - // Opt: preLoad files in parallel. - // - // Requesting files in batch optimizes the underlying filesystem reads. - // However, this is also currently necessary for correctness: populating all - // files in the snapshot is necessary for certain operations that rely on the - // completeness of the file map, e.g. computing the set of directories to - // watch. - // - // TODO(rfindley, golang/go#57558): determine the set of directories based on - // loaded packages, so that reading files here is not necessary for - // correctness. - s.preloadFiles(ctx, files) - - if len(moduleErrs) > 0 { - return &moduleErrorMap{moduleErrs} - } - - return nil -} - -type moduleErrorMap struct { - errs map[string][]packages.Error // module path -> errors -} - -func (m *moduleErrorMap) Error() string { - var paths []string // sort for stability - for path, errs := range m.errs { - if len(errs) > 0 { // should always be true, but be cautious - paths = append(paths, path) - } - } - sort.Strings(paths) - - var buf bytes.Buffer - fmt.Fprintf(&buf, "%d modules have errors:\n", len(paths)) - for _, path := range paths { - fmt.Fprintf(&buf, "\t%s:%s\n", path, m.errs[path][0].Msg) - } - - return buf.String() -} - -// workspaceLayoutError returns an error describing a misconfiguration of the -// workspace, along with related diagnostic. -// -// The unusual argument ordering of results is intentional: if the resulting -// error is nil, so must be the resulting diagnostics. -// -// If ctx is cancelled, it may return ctx.Err(), nil. -// -// TODO(rfindley): separate workspace diagnostics from critical workspace -// errors. -func (s *snapshot) workspaceLayoutError(ctx context.Context) (error, []*source.Diagnostic) { - // TODO(rfindley): both of the checks below should be delegated to the workspace. - - if s.view.effectiveGO111MODULE() == off { - return nil, nil - } - - // If the user is using a go.work file, we assume that they know what they - // are doing. - // - // TODO(golang/go#53880): improve orphaned file diagnostics when using go.work. - if s.view.gowork != "" { - return nil, nil - } - - // Apply diagnostics about the workspace configuration to relevant open - // files. - openFiles := s.overlays() - - // If the snapshot does not have a valid build configuration, it may be - // that the user has opened a directory that contains multiple modules. - // Check for that an warn about it. - if !s.validBuildConfiguration() { - var msg string - if s.view.goversion >= 18 { - msg = `gopls was not able to find modules in your workspace. -When outside of GOPATH, gopls needs to know which modules you are working on. -You can fix this by opening your workspace to a folder inside a Go module, or -by using a go.work file to specify multiple modules. -See the documentation for more information on setting up your workspace: -https://github.com/golang/tools/blob/master/gopls/doc/workspace.md.` - } else { - msg = `gopls requires a module at the root of your workspace. -You can work with multiple modules by upgrading to Go 1.18 or later, and using -go workspaces (go.work files). -See the documentation for more information on setting up your workspace: -https://github.com/golang/tools/blob/master/gopls/doc/workspace.md.` - } - return fmt.Errorf(msg), s.applyCriticalErrorToFiles(ctx, msg, openFiles) - } - - return nil, nil -} - -func (s *snapshot) applyCriticalErrorToFiles(ctx context.Context, msg string, files []*Overlay) []*source.Diagnostic { - var srcDiags []*source.Diagnostic - for _, fh := range files { - // Place the diagnostics on the package or module declarations. - var rng protocol.Range - switch s.FileKind(fh) { - case source.Go: - if pgf, err := s.ParseGo(ctx, fh, source.ParseHeader); err == nil { - // Check that we have a valid `package foo` range to use for positioning the error. - if pgf.File.Package.IsValid() && pgf.File.Name != nil && pgf.File.Name.End().IsValid() { - rng, _ = pgf.PosRange(pgf.File.Package, pgf.File.Name.End()) - } - } - case source.Mod: - if pmf, err := s.ParseMod(ctx, fh); err == nil { - if mod := pmf.File.Module; mod != nil && mod.Syntax != nil { - rng, _ = pmf.Mapper.OffsetRange(mod.Syntax.Start.Byte, mod.Syntax.End.Byte) - } - } - } - srcDiags = append(srcDiags, &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityError, - Source: source.ListError, - Message: msg, - }) - } - return srcDiags -} - -// buildMetadata populates the updates map with metadata updates to -// apply, based on the given pkg. It recurs through pkg.Imports to ensure that -// metadata exists for all dependencies. -func buildMetadata(updates map[PackageID]*source.Metadata, pkg *packages.Package, loadDir string, standalone bool) { - // Allow for multiple ad-hoc packages in the workspace (see #47584). - pkgPath := PackagePath(pkg.PkgPath) - id := PackageID(pkg.ID) - - if source.IsCommandLineArguments(id) { - if len(pkg.CompiledGoFiles) != 1 { - bug.Reportf("unexpected files in command-line-arguments package: %v", pkg.CompiledGoFiles) - return - } - suffix := pkg.CompiledGoFiles[0] - id = PackageID(pkg.ID + suffix) - pkgPath = PackagePath(pkg.PkgPath + suffix) - } - - // Duplicate? - if _, ok := updates[id]; ok { - // A package was encountered twice due to shared - // subgraphs (common) or cycles (rare). Although "go - // list" usually breaks cycles, we don't rely on it. - // breakImportCycles in metadataGraph.Clone takes care - // of it later. - return - } - - if pkg.TypesSizes == nil { - panic(id + ".TypeSizes is nil") - } - - // Recreate the metadata rather than reusing it to avoid locking. - m := &source.Metadata{ - ID: id, - PkgPath: pkgPath, - Name: PackageName(pkg.Name), - ForTest: PackagePath(packagesinternal.GetForTest(pkg)), - TypesSizes: pkg.TypesSizes, - LoadDir: loadDir, - Module: pkg.Module, - Errors: pkg.Errors, - DepsErrors: packagesinternal.GetDepsErrors(pkg), - Standalone: standalone, - } - - updates[id] = m - - for _, filename := range pkg.CompiledGoFiles { - uri := span.URIFromPath(filename) - m.CompiledGoFiles = append(m.CompiledGoFiles, uri) - } - for _, filename := range pkg.GoFiles { - uri := span.URIFromPath(filename) - m.GoFiles = append(m.GoFiles, uri) - } - for _, filename := range pkg.IgnoredFiles { - uri := span.URIFromPath(filename) - m.IgnoredFiles = append(m.IgnoredFiles, uri) - } - - depsByImpPath := make(map[ImportPath]PackageID) - depsByPkgPath := make(map[PackagePath]PackageID) - for importPath, imported := range pkg.Imports { - importPath := ImportPath(importPath) - - // It is not an invariant that importPath == imported.PkgPath. - // For example, package "net" imports "golang.org/x/net/dns/dnsmessage" - // which refers to the package whose ID and PkgPath are both - // "vendor/golang.org/x/net/dns/dnsmessage". Notice the ImportMap, - // which maps ImportPaths to PackagePaths: - // - // $ go list -json net vendor/golang.org/x/net/dns/dnsmessage - // { - // "ImportPath": "net", - // "Name": "net", - // "Imports": [ - // "C", - // "vendor/golang.org/x/net/dns/dnsmessage", - // "vendor/golang.org/x/net/route", - // ... - // ], - // "ImportMap": { - // "golang.org/x/net/dns/dnsmessage": "vendor/golang.org/x/net/dns/dnsmessage", - // "golang.org/x/net/route": "vendor/golang.org/x/net/route" - // }, - // ... - // } - // { - // "ImportPath": "vendor/golang.org/x/net/dns/dnsmessage", - // "Name": "dnsmessage", - // ... - // } - // - // (Beware that, for historical reasons, go list uses - // the JSON field "ImportPath" for the package's - // path--effectively the linker symbol prefix.) - // - // The example above is slightly special to go list - // because it's in the std module. Otherwise, - // vendored modules are simply modules whose directory - // is vendor/ instead of GOMODCACHE, and the - // import path equals the package path. - // - // But in GOPATH (non-module) mode, it's possible for - // package vendoring to cause a non-identity ImportMap, - // as in this example: - // - // $ cd $HOME/src - // $ find . -type f - // ./b/b.go - // ./vendor/example.com/a/a.go - // $ cat ./b/b.go - // package b - // import _ "example.com/a" - // $ cat ./vendor/example.com/a/a.go - // package a - // $ GOPATH=$HOME GO111MODULE=off go list -json ./b | grep -A2 ImportMap - // "ImportMap": { - // "example.com/a": "vendor/example.com/a" - // }, - - // Don't remember any imports with significant errors. - // - // The len=0 condition is a heuristic check for imports of - // non-existent packages (for which go/packages will create - // an edge to a synthesized node). The heuristic is unsound - // because some valid packages have zero files, for example, - // a directory containing only the file p_test.go defines an - // empty package p. - // TODO(adonovan): clarify this. Perhaps go/packages should - // report which nodes were synthesized. - if importPath != "unsafe" && len(imported.CompiledGoFiles) == 0 { - depsByImpPath[importPath] = "" // missing - continue - } - - // Don't record self-import edges. - // (This simplifies metadataGraph's cycle check.) - if PackageID(imported.ID) == id { - if len(pkg.Errors) == 0 { - bug.Reportf("self-import without error in package %s", id) - } - continue - } - - buildMetadata(updates, imported, loadDir, false) // only top level packages can be standalone - - // Don't record edges to packages with no name, as they cause trouble for - // the importer (golang/go#60952). - // - // However, we do want to insert these packages into the update map - // (buildMetadata above), so that we get type-checking diagnostics for the - // invalid packages. - if imported.Name == "" { - depsByImpPath[importPath] = "" // missing - continue - } - - depsByImpPath[importPath] = PackageID(imported.ID) - depsByPkgPath[PackagePath(imported.PkgPath)] = PackageID(imported.ID) - } - m.DepsByImpPath = depsByImpPath - m.DepsByPkgPath = depsByPkgPath - - // m.Diagnostics is set later in the loading pass, using - // computeLoadDiagnostics. -} - -// computeLoadDiagnostics computes and sets m.Diagnostics for the given metadata m. -// -// It should only be called during metadata construction in snapshot.load. -func computeLoadDiagnostics(ctx context.Context, m *source.Metadata, meta *metadataGraph, fs source.FileSource, workspacePackages map[PackageID]PackagePath) { - for _, packagesErr := range m.Errors { - // Filter out parse errors from go list. We'll get them when we - // actually parse, and buggy overlay support may generate spurious - // errors. (See TestNewModule_Issue38207.) - if strings.Contains(packagesErr.Msg, "expected '") { - continue - } - pkgDiags, err := goPackagesErrorDiagnostics(ctx, packagesErr, m, fs) - if err != nil { - // There are certain cases where the go command returns invalid - // positions, so we cannot panic or even bug.Reportf here. - event.Error(ctx, "unable to compute positions for list errors", err, tag.Package.Of(string(m.ID))) - continue - } - m.Diagnostics = append(m.Diagnostics, pkgDiags...) - } - - // TODO(rfindley): this is buggy: an insignificant change to a modfile - // (or an unsaved modfile) could affect the position of deps errors, - // without invalidating the package. - depsDiags, err := depsErrors(ctx, m, meta, fs, workspacePackages) - if err != nil { - if ctx.Err() == nil { - // TODO(rfindley): consider making this a bug.Reportf. depsErrors should - // not normally fail. - event.Error(ctx, "unable to compute deps errors", err, tag.Package.Of(string(m.ID))) - } - } - m.Diagnostics = append(m.Diagnostics, depsDiags...) -} - -// containsPackageLocked reports whether p is a workspace package for the -// snapshot s. -// -// s.mu must be held while calling this function. -func containsPackageLocked(s *snapshot, m *source.Metadata) bool { - // In legacy workspace mode, or if a package does not have an associated - // module, a package is considered inside the workspace if any of its files - // are under the workspace root (and not excluded). - // - // Otherwise if the package has a module it must be an active module (as - // defined by the module root or go.work file) and at least one file must not - // be filtered out by directoryFilters. - // - // TODO(rfindley): revisit this function. We should not need to predicate on - // gowork != "". It should suffice to consider workspace mod files (also, we - // will hopefully eliminate the concept of a workspace package soon). - if m.Module != nil && s.view.gowork != "" { - modURI := span.URIFromPath(m.Module.GoMod) - _, ok := s.workspaceModFiles[modURI] - if !ok { - return false - } - - uris := map[span.URI]struct{}{} - for _, uri := range m.CompiledGoFiles { - uris[uri] = struct{}{} - } - for _, uri := range m.GoFiles { - uris[uri] = struct{}{} - } - - filterFunc := s.view.filterFunc() - for uri := range uris { - // Don't use view.contains here. go.work files may include modules - // outside of the workspace folder. - if !strings.Contains(string(uri), "/vendor/") && !filterFunc(uri) { - return true - } - } - return false - } - - return containsFileInWorkspaceLocked(s.view, m) -} - -// containsOpenFileLocked reports whether any file referenced by m is open in -// the snapshot s. -// -// s.mu must be held while calling this function. -func containsOpenFileLocked(s *snapshot, m *source.Metadata) bool { - uris := map[span.URI]struct{}{} - for _, uri := range m.CompiledGoFiles { - uris[uri] = struct{}{} - } - for _, uri := range m.GoFiles { - uris[uri] = struct{}{} - } - - for uri := range uris { - fh, _ := s.files.Get(uri) - if _, open := fh.(*Overlay); open { - return true - } - } - return false -} - -// containsFileInWorkspaceLocked reports whether m contains any file inside the -// workspace of the snapshot s. -// -// s.mu must be held while calling this function. -func containsFileInWorkspaceLocked(v *View, m *source.Metadata) bool { - uris := map[span.URI]struct{}{} - for _, uri := range m.CompiledGoFiles { - uris[uri] = struct{}{} - } - for _, uri := range m.GoFiles { - uris[uri] = struct{}{} - } - - for uri := range uris { - // In order for a package to be considered for the workspace, at least one - // file must be contained in the workspace and not vendored. - - // The package's files are in this view. It may be a workspace package. - // Vendored packages are not likely to be interesting to the user. - if !strings.Contains(string(uri), "/vendor/") && v.contains(uri) { - return true - } - } - return false -} - -// computeWorkspacePackagesLocked computes workspace packages in the -// snapshot s for the given metadata graph. The result does not -// contain intermediate test variants. -// -// s.mu must be held while calling this function. -func computeWorkspacePackagesLocked(s *snapshot, meta *metadataGraph) map[PackageID]PackagePath { - workspacePackages := make(map[PackageID]PackagePath) - for _, m := range meta.metadata { - if !containsPackageLocked(s, m) { - continue - } - - if source.IsCommandLineArguments(m.ID) { - // If all the files contained in m have a real package, we don't need to - // keep m as a workspace package. - if allFilesHaveRealPackages(meta, m) { - continue - } - - // We only care about command-line-arguments packages if they are still - // open. - if !containsOpenFileLocked(s, m) { - continue - } - } - - switch { - case m.ForTest == "": - // A normal package. - workspacePackages[m.ID] = m.PkgPath - case m.ForTest == m.PkgPath, m.ForTest+"_test" == m.PkgPath: - // The test variant of some workspace package or its x_test. - // To load it, we need to load the non-test variant with -test. - // - // Notably, this excludes intermediate test variants from workspace - // packages. - assert(!m.IsIntermediateTestVariant(), "unexpected ITV") - workspacePackages[m.ID] = m.ForTest - } - } - return workspacePackages -} - -// allFilesHaveRealPackages reports whether all files referenced by m are -// contained in a "real" package (not command-line-arguments). -// -// If m is valid but all "real" packages containing any file are invalid, this -// function returns false. -// -// If m is not a command-line-arguments package, this is trivially true. -func allFilesHaveRealPackages(g *metadataGraph, m *source.Metadata) bool { - n := len(m.CompiledGoFiles) -checkURIs: - for _, uri := range append(m.CompiledGoFiles[0:n:n], m.GoFiles...) { - for _, id := range g.ids[uri] { - if !source.IsCommandLineArguments(id) { - continue checkURIs - } - } - return false - } - return true -} - -func isTestMain(pkg *packages.Package, gocache string) bool { - // Test mains must have an import path that ends with ".test". - if !strings.HasSuffix(pkg.PkgPath, ".test") { - return false - } - // Test main packages are always named "main". - if pkg.Name != "main" { - return false - } - // Test mains always have exactly one GoFile that is in the build cache. - if len(pkg.GoFiles) > 1 { - return false - } - if !source.InDir(gocache, pkg.GoFiles[0]) { - return false - } - return true -} diff --git a/gopls/internal/lsp/cache/mod_vuln.go b/gopls/internal/lsp/cache/mod_vuln.go deleted file mode 100644 index 8c635c181bf..00000000000 --- a/gopls/internal/lsp/cache/mod_vuln.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/vulncheck" - "golang.org/x/tools/gopls/internal/vulncheck/scan" - "golang.org/x/tools/internal/memoize" -) - -// ModVuln returns import vulnerability analysis for the given go.mod URI. -// Concurrent requests are combined into a single command. -func (s *snapshot) ModVuln(ctx context.Context, modURI span.URI) (*vulncheck.Result, error) { - s.mu.Lock() - entry, hit := s.modVulnHandles.Get(modURI) - s.mu.Unlock() - - type modVuln struct { - result *vulncheck.Result - err error - } - - // Cache miss? - if !hit { - handle := memoize.NewPromise("modVuln", func(ctx context.Context, arg interface{}) interface{} { - result, err := scan.VulnerablePackages(ctx, arg.(*snapshot)) - return modVuln{result, err} - }) - - entry = handle - s.mu.Lock() - s.modVulnHandles.Set(modURI, entry, nil) - s.mu.Unlock() - } - - // Await result. - v, err := s.awaitPromise(ctx, entry) - if err != nil { - return nil, err - } - res := v.(modVuln) - return res.result, res.err -} diff --git a/gopls/internal/lsp/cache/parse.go b/gopls/internal/lsp/cache/parse.go deleted file mode 100644 index 42353dfa468..00000000000 --- a/gopls/internal/lsp/cache/parse.go +++ /dev/null @@ -1,969 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "bytes" - "context" - "fmt" - "go/ast" - "go/parser" - "go/scanner" - "go/token" - "path/filepath" - "reflect" - - goplsastutil "golang.org/x/tools/gopls/internal/astutil" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -// ParseGo parses the file whose contents are provided by fh, using a cache. -// The resulting tree may have beeen fixed up. -func (s *snapshot) ParseGo(ctx context.Context, fh source.FileHandle, mode parser.Mode) (*source.ParsedGoFile, error) { - pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh) - if err != nil { - return nil, err - } - return pgfs[0], nil -} - -// parseGoImpl parses the Go source file whose content is provided by fh. -func parseGoImpl(ctx context.Context, fset *token.FileSet, fh source.FileHandle, mode parser.Mode, purgeFuncBodies bool) (*source.ParsedGoFile, error) { - ext := filepath.Ext(fh.URI().Filename()) - if ext != ".go" && ext != "" { // files generated by cgo have no extension - return nil, fmt.Errorf("cannot parse non-Go file %s", fh.URI()) - } - content, err := fh.Content() - if err != nil { - return nil, err - } - // Check for context cancellation before actually doing the parse. - if ctx.Err() != nil { - return nil, ctx.Err() - } - pgf, _ := ParseGoSrc(ctx, fset, fh.URI(), content, mode, purgeFuncBodies) - return pgf, nil -} - -// ParseGoSrc parses a buffer of Go source, repairing the tree if necessary. -// -// The provided ctx is used only for logging. -func ParseGoSrc(ctx context.Context, fset *token.FileSet, uri span.URI, src []byte, mode parser.Mode, purgeFuncBodies bool) (res *source.ParsedGoFile, fixes []fixType) { - if purgeFuncBodies { - src = goplsastutil.PurgeFuncBodies(src) - } - ctx, done := event.Start(ctx, "cache.ParseGoSrc", tag.File.Of(uri.Filename())) - defer done() - - file, err := parser.ParseFile(fset, uri.Filename(), src, mode) - var parseErr scanner.ErrorList - if err != nil { - // We passed a byte slice, so the only possible error is a parse error. - parseErr = err.(scanner.ErrorList) - } - - tok := fset.File(file.Pos()) - if tok == nil { - // file.Pos is the location of the package declaration (issue #53202). If there was - // none, we can't find the token.File that ParseFile created, and we - // have no choice but to recreate it. - tok = fset.AddFile(uri.Filename(), -1, len(src)) - tok.SetLinesForContent(src) - } - - fixedSrc := false - fixedAST := false - // If there were parse errors, attempt to fix them up. - if parseErr != nil { - // Fix any badly parsed parts of the AST. - astFixes := fixAST(file, tok, src) - fixedAST = len(fixes) > 0 - if fixedAST { - fixes = append(fixes, astFixes...) - } - - for i := 0; i < 10; i++ { - // Fix certain syntax errors that render the file unparseable. - newSrc, srcFix := fixSrc(file, tok, src) - if newSrc == nil { - break - } - - // If we thought there was something to fix 10 times in a row, - // it is likely we got stuck in a loop somehow. Log out a diff - // of the last changes we made to aid in debugging. - if i == 9 { - unified := diff.Unified("before", "after", string(src), string(newSrc)) - event.Log(ctx, fmt.Sprintf("fixSrc loop - last diff:\n%v", unified), tag.File.Of(tok.Name())) - } - - newFile, newErr := parser.ParseFile(fset, uri.Filename(), newSrc, mode) - if newFile == nil { - break // no progress - } - - // Maintain the original parseError so we don't try formatting the - // doctored file. - file = newFile - src = newSrc - tok = fset.File(file.Pos()) - - // Only now that we accept the fix do we record the src fix from above. - fixes = append(fixes, srcFix) - fixedSrc = true - - if newErr == nil { - break // nothing to fix - } - - // Note that fixedAST is reset after we fix src. - astFixes = fixAST(file, tok, src) - fixedAST = len(astFixes) > 0 - if fixedAST { - fixes = append(fixes, astFixes...) - } - } - } - - return &source.ParsedGoFile{ - URI: uri, - Mode: mode, - Src: src, - FixedSrc: fixedSrc, - FixedAST: fixedAST, - File: file, - Tok: tok, - Mapper: protocol.NewMapper(uri, src), - ParseErr: parseErr, - }, fixes -} - -// fixAST inspects the AST and potentially modifies any *ast.BadStmts so that it can be -// type-checked more effectively. -// -// If fixAST returns true, the resulting AST is considered "fixed", meaning -// positions have been mangled, and type checker errors may not make sense. -func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) { - var err error - walkASTWithParent(n, func(n, parent ast.Node) bool { - switch n := n.(type) { - case *ast.BadStmt: - if fixDeferOrGoStmt(n, parent, tok, src) { - fixes = append(fixes, fixedDeferOrGo) - // Recursively fix in our fixed node. - moreFixes := fixAST(parent, tok, src) - fixes = append(fixes, moreFixes...) - } else { - err = fmt.Errorf("unable to parse defer or go from *ast.BadStmt: %v", err) - } - return false - case *ast.BadExpr: - if fixArrayType(n, parent, tok, src) { - fixes = append(fixes, fixedArrayType) - // Recursively fix in our fixed node. - moreFixes := fixAST(parent, tok, src) - fixes = append(fixes, moreFixes...) - return false - } - - // Fix cases where parser interprets if/for/switch "init" - // statement as "cond" expression, e.g.: - // - // // "i := foo" is init statement, not condition. - // for i := foo - // - if fixInitStmt(n, parent, tok, src) { - fixes = append(fixes, fixedInit) - } - return false - case *ast.SelectorExpr: - // Fix cases where a keyword prefix results in a phantom "_" selector, e.g.: - // - // foo.var<> // want to complete to "foo.variance" - // - if fixPhantomSelector(n, tok, src) { - fixes = append(fixes, fixedPhantomSelector) - } - return true - - case *ast.BlockStmt: - switch parent.(type) { - case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: - // Adjust closing curly brace of empty switch/select - // statements so we can complete inside them. - if fixEmptySwitch(n, tok, src) { - fixes = append(fixes, fixedEmptySwitch) - } - } - - return true - default: - return true - } - }) - return fixes -} - -// walkASTWithParent walks the AST rooted at n. The semantics are -// similar to ast.Inspect except it does not call f(nil). -func walkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) { - var ancestors []ast.Node - ast.Inspect(n, func(n ast.Node) (recurse bool) { - defer func() { - if recurse { - ancestors = append(ancestors, n) - } - }() - - if n == nil { - ancestors = ancestors[:len(ancestors)-1] - return false - } - - var parent ast.Node - if len(ancestors) > 0 { - parent = ancestors[len(ancestors)-1] - } - - return f(n, parent) - }) -} - -// TODO(rfindley): revert this intrumentation once we're certain the crash in -// #59097 is fixed. -type fixType int - -const ( - noFix fixType = iota - fixedCurlies - fixedDanglingSelector - fixedDeferOrGo - fixedArrayType - fixedInit - fixedPhantomSelector - fixedEmptySwitch -) - -// fixSrc attempts to modify the file's source code to fix certain -// syntax errors that leave the rest of the file unparsed. -// -// fixSrc returns a non-nil result if and only if a fix was applied. -func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte, fix fixType) { - walkASTWithParent(f, func(n, parent ast.Node) bool { - if newSrc != nil { - return false - } - - switch n := n.(type) { - case *ast.BlockStmt: - newSrc = fixMissingCurlies(f, n, parent, tf, src) - if newSrc != nil { - fix = fixedCurlies - } - case *ast.SelectorExpr: - newSrc = fixDanglingSelector(n, tf, src) - if newSrc != nil { - fix = fixedDanglingSelector - } - } - - return newSrc == nil - }) - - return newSrc, fix -} - -// fixMissingCurlies adds in curly braces for block statements that -// are missing curly braces. For example: -// -// if foo -// -// becomes -// -// if foo {} -func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *token.File, src []byte) []byte { - // If the "{" is already in the source code, there isn't anything to - // fix since we aren't missing curlies. - if b.Lbrace.IsValid() { - braceOffset, err := safetoken.Offset(tok, b.Lbrace) - if err != nil { - return nil - } - if braceOffset < len(src) && src[braceOffset] == '{' { - return nil - } - } - - parentLine := safetoken.Line(tok, parent.Pos()) - - if parentLine >= tok.LineCount() { - // If we are the last line in the file, no need to fix anything. - return nil - } - - // Insert curlies at the end of parent's starting line. The parent - // is the statement that contains the block, e.g. *ast.IfStmt. The - // block's Pos()/End() can't be relied upon because they are based - // on the (missing) curly braces. We assume the statement is a - // single line for now and try sticking the curly braces at the end. - insertPos := tok.LineStart(parentLine+1) - 1 - - // Scootch position backwards until it's not in a comment. For example: - // - // if foo<> // some amazing comment | - // someOtherCode() - // - // insertPos will be located at "|", so we back it out of the comment. - didSomething := true - for didSomething { - didSomething = false - for _, c := range f.Comments { - if c.Pos() < insertPos && insertPos <= c.End() { - insertPos = c.Pos() - didSomething = true - } - } - } - - // Bail out if line doesn't end in an ident or ".". This is to avoid - // cases like below where we end up making things worse by adding - // curlies: - // - // if foo && - // bar<> - switch precedingToken(insertPos, tok, src) { - case token.IDENT, token.PERIOD: - // ok - default: - return nil - } - - var buf bytes.Buffer - buf.Grow(len(src) + 3) - offset, err := safetoken.Offset(tok, insertPos) - if err != nil { - return nil - } - buf.Write(src[:offset]) - - // Detect if we need to insert a semicolon to fix "for" loop situations like: - // - // for i := foo(); foo<> - // - // Just adding curlies is not sufficient to make things parse well. - if fs, ok := parent.(*ast.ForStmt); ok { - if _, ok := fs.Cond.(*ast.BadExpr); !ok { - if xs, ok := fs.Post.(*ast.ExprStmt); ok { - if _, ok := xs.X.(*ast.BadExpr); ok { - buf.WriteByte(';') - } - } - } - } - - // Insert "{}" at insertPos. - buf.WriteByte('{') - buf.WriteByte('}') - buf.Write(src[offset:]) - return buf.Bytes() -} - -// fixEmptySwitch moves empty switch/select statements' closing curly -// brace down one line. This allows us to properly detect incomplete -// "case" and "default" keywords as inside the switch statement. For -// example: -// -// switch { -// def<> -// } -// -// gets parsed like: -// -// switch { -// } -// -// Later we manually pull out the "def" token, but we need to detect -// that our "<>" position is inside the switch block. To do that we -// move the curly brace so it looks like: -// -// switch { -// -// } -// -// The resulting bool reports whether any fixing occurred. -func fixEmptySwitch(body *ast.BlockStmt, tok *token.File, src []byte) bool { - // We only care about empty switch statements. - if len(body.List) > 0 || !body.Rbrace.IsValid() { - return false - } - - // If the right brace is actually in the source code at the - // specified position, don't mess with it. - braceOffset, err := safetoken.Offset(tok, body.Rbrace) - if err != nil { - return false - } - if braceOffset < len(src) && src[braceOffset] == '}' { - return false - } - - braceLine := safetoken.Line(tok, body.Rbrace) - if braceLine >= tok.LineCount() { - // If we are the last line in the file, no need to fix anything. - return false - } - - // Move the right brace down one line. - body.Rbrace = tok.LineStart(braceLine + 1) - return true -} - -// fixDanglingSelector inserts real "_" selector expressions in place -// of phantom "_" selectors. For example: -// -// func _() { -// x.<> -// } -// -// var x struct { i int } -// -// To fix completion at "<>", we insert a real "_" after the "." so the -// following declaration of "x" can be parsed and type checked -// normally. -func fixDanglingSelector(s *ast.SelectorExpr, tf *token.File, src []byte) []byte { - if !isPhantomUnderscore(s.Sel, tf, src) { - return nil - } - - if !s.X.End().IsValid() { - return nil - } - - insertOffset, err := safetoken.Offset(tf, s.X.End()) - if err != nil { - return nil - } - // Insert directly after the selector's ".". - insertOffset++ - if src[insertOffset-1] != '.' { - return nil - } - - var buf bytes.Buffer - buf.Grow(len(src) + 1) - buf.Write(src[:insertOffset]) - buf.WriteByte('_') - buf.Write(src[insertOffset:]) - return buf.Bytes() -} - -// fixPhantomSelector tries to fix selector expressions with phantom -// "_" selectors. In particular, we check if the selector is a -// keyword, and if so we swap in an *ast.Ident with the keyword text. For example: -// -// foo.var -// -// yields a "_" selector instead of "var" since "var" is a keyword. -// -// TODO(rfindley): should this constitute an ast 'fix'? -// -// The resulting bool reports whether any fixing occurred. -func fixPhantomSelector(sel *ast.SelectorExpr, tf *token.File, src []byte) bool { - if !isPhantomUnderscore(sel.Sel, tf, src) { - return false - } - - // Only consider selectors directly abutting the selector ".". This - // avoids false positives in cases like: - // - // foo. // don't think "var" is our selector - // var bar = 123 - // - if sel.Sel.Pos() != sel.X.End()+1 { - return false - } - - maybeKeyword := readKeyword(sel.Sel.Pos(), tf, src) - if maybeKeyword == "" { - return false - } - - return replaceNode(sel, sel.Sel, &ast.Ident{ - Name: maybeKeyword, - NamePos: sel.Sel.Pos(), - }) -} - -// isPhantomUnderscore reports whether the given ident is a phantom -// underscore. The parser sometimes inserts phantom underscores when -// it encounters otherwise unparseable situations. -func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool { - if id == nil || id.Name != "_" { - return false - } - - // Phantom underscore means the underscore is not actually in the - // program text. - offset, err := safetoken.Offset(tok, id.Pos()) - if err != nil { - return false - } - return len(src) <= offset || src[offset] != '_' -} - -// fixInitStmt fixes cases where the parser misinterprets an -// if/for/switch "init" statement as the "cond" conditional. In cases -// like "if i := 0" the user hasn't typed the semicolon yet so the -// parser is looking for the conditional expression. However, "i := 0" -// are not valid expressions, so we get a BadExpr. -// -// The resulting bool reports whether any fixing occurred. -func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool { - if !bad.Pos().IsValid() || !bad.End().IsValid() { - return false - } - - // Try to extract a statement from the BadExpr. - start, end, err := safetoken.Offsets(tok, bad.Pos(), bad.End()-1) - if err != nil { - return false - } - stmtBytes := src[start : end+1] - stmt, err := parseStmt(bad.Pos(), stmtBytes) - if err != nil { - return false - } - - // If the parent statement doesn't already have an "init" statement, - // move the extracted statement into the "init" field and insert a - // dummy expression into the required "cond" field. - switch p := parent.(type) { - case *ast.IfStmt: - if p.Init != nil { - return false - } - p.Init = stmt - p.Cond = &ast.Ident{ - Name: "_", - NamePos: stmt.End(), - } - return true - case *ast.ForStmt: - if p.Init != nil { - return false - } - p.Init = stmt - p.Cond = &ast.Ident{ - Name: "_", - NamePos: stmt.End(), - } - return true - case *ast.SwitchStmt: - if p.Init != nil { - return false - } - p.Init = stmt - p.Tag = nil - return true - } - return false -} - -// readKeyword reads the keyword starting at pos, if any. -func readKeyword(pos token.Pos, tok *token.File, src []byte) string { - var kwBytes []byte - offset, err := safetoken.Offset(tok, pos) - if err != nil { - return "" - } - for i := offset; i < len(src); i++ { - // Use a simplified identifier check since keywords are always lowercase ASCII. - if src[i] < 'a' || src[i] > 'z' { - break - } - kwBytes = append(kwBytes, src[i]) - - // Stop search at arbitrarily chosen too-long-for-a-keyword length. - if len(kwBytes) > 15 { - return "" - } - } - - if kw := string(kwBytes); token.Lookup(kw).IsKeyword() { - return kw - } - - return "" -} - -// fixArrayType tries to parse an *ast.BadExpr into an *ast.ArrayType. -// go/parser often turns lone array types like "[]int" into BadExprs -// if it isn't expecting a type. -func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool { - // Our expected input is a bad expression that looks like "[]someExpr". - - from := bad.Pos() - to := bad.End() - - if !from.IsValid() || !to.IsValid() { - return false - } - - exprBytes := make([]byte, 0, int(to-from)+3) - // Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF. - // It also panics if the position is not in the range of the file, and - // badExprs may not necessarily have good positions, so check first. - fromOffset, toOffset, err := safetoken.Offsets(tok, from, to-1) - if err != nil { - return false - } - exprBytes = append(exprBytes, src[fromOffset:toOffset+1]...) - exprBytes = bytes.TrimSpace(exprBytes) - - // If our expression ends in "]" (e.g. "[]"), add a phantom selector - // so we can complete directly after the "[]". - if len(exprBytes) > 0 && exprBytes[len(exprBytes)-1] == ']' { - exprBytes = append(exprBytes, '_') - } - - // Add "{}" to turn our ArrayType into a CompositeLit. This is to - // handle the case of "[...]int" where we must make it a composite - // literal to be parseable. - exprBytes = append(exprBytes, '{', '}') - - expr, err := parseExpr(from, exprBytes) - if err != nil { - return false - } - - cl, _ := expr.(*ast.CompositeLit) - if cl == nil { - return false - } - - at, _ := cl.Type.(*ast.ArrayType) - if at == nil { - return false - } - - return replaceNode(parent, bad, at) -} - -// precedingToken scans src to find the token preceding pos. -func precedingToken(pos token.Pos, tok *token.File, src []byte) token.Token { - s := &scanner.Scanner{} - s.Init(tok, src, nil, 0) - - var lastTok token.Token - for { - p, t, _ := s.Scan() - if t == token.EOF || p >= pos { - break - } - - lastTok = t - } - return lastTok -} - -// fixDeferOrGoStmt tries to parse an *ast.BadStmt into a defer or a go statement. -// -// go/parser packages a statement of the form "defer x." as an *ast.BadStmt because -// it does not include a call expression. This means that go/types skips type-checking -// this statement entirely, and we can't use the type information when completing. -// Here, we try to generate a fake *ast.DeferStmt or *ast.GoStmt to put into the AST, -// instead of the *ast.BadStmt. -func fixDeferOrGoStmt(bad *ast.BadStmt, parent ast.Node, tok *token.File, src []byte) bool { - // Check if we have a bad statement containing either a "go" or "defer". - s := &scanner.Scanner{} - s.Init(tok, src, nil, 0) - - var ( - pos token.Pos - tkn token.Token - ) - for { - if tkn == token.EOF { - return false - } - if pos >= bad.From { - break - } - pos, tkn, _ = s.Scan() - } - - var stmt ast.Stmt - switch tkn { - case token.DEFER: - stmt = &ast.DeferStmt{ - Defer: pos, - } - case token.GO: - stmt = &ast.GoStmt{ - Go: pos, - } - default: - return false - } - - var ( - from, to, last token.Pos - lastToken token.Token - braceDepth int - phantomSelectors []token.Pos - ) -FindTo: - for { - to, tkn, _ = s.Scan() - - if from == token.NoPos { - from = to - } - - switch tkn { - case token.EOF: - break FindTo - case token.SEMICOLON: - // If we aren't in nested braces, end of statement means - // end of expression. - if braceDepth == 0 { - break FindTo - } - case token.LBRACE: - braceDepth++ - } - - // This handles the common dangling selector case. For example in - // - // defer fmt. - // y := 1 - // - // we notice the dangling period and end our expression. - // - // If the previous token was a "." and we are looking at a "}", - // the period is likely a dangling selector and needs a phantom - // "_". Likewise if the current token is on a different line than - // the period, the period is likely a dangling selector. - if lastToken == token.PERIOD && (tkn == token.RBRACE || safetoken.Line(tok, to) > safetoken.Line(tok, last)) { - // Insert phantom "_" selector after the dangling ".". - phantomSelectors = append(phantomSelectors, last+1) - // If we aren't in a block then end the expression after the ".". - if braceDepth == 0 { - to = last + 1 - break - } - } - - lastToken = tkn - last = to - - switch tkn { - case token.RBRACE: - braceDepth-- - if braceDepth <= 0 { - if braceDepth == 0 { - // +1 to include the "}" itself. - to += 1 - } - break FindTo - } - } - } - - fromOffset, toOffset, err := safetoken.Offsets(tok, from, to) - if err != nil { - return false - } - if !from.IsValid() || fromOffset >= len(src) { - return false - } - if !to.IsValid() || toOffset >= len(src) { - return false - } - - // Insert any phantom selectors needed to prevent dangling "." from messing - // up the AST. - exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors)) - for i, b := range src[fromOffset:toOffset] { - if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] { - exprBytes = append(exprBytes, '_') - phantomSelectors = phantomSelectors[1:] - } - exprBytes = append(exprBytes, b) - } - - if len(phantomSelectors) > 0 { - exprBytes = append(exprBytes, '_') - } - - expr, err := parseExpr(from, exprBytes) - if err != nil { - return false - } - - // Package the expression into a fake *ast.CallExpr and re-insert - // into the function. - call := &ast.CallExpr{ - Fun: expr, - Lparen: to, - Rparen: to, - } - - switch stmt := stmt.(type) { - case *ast.DeferStmt: - stmt.Call = call - case *ast.GoStmt: - stmt.Call = call - } - - return replaceNode(parent, bad, stmt) -} - -// parseStmt parses the statement in src and updates its position to -// start at pos. -func parseStmt(pos token.Pos, src []byte) (ast.Stmt, error) { - // Wrap our expression to make it a valid Go file we can pass to ParseFile. - fileSrc := bytes.Join([][]byte{ - []byte("package fake;func _(){"), - src, - []byte("}"), - }, nil) - - // Use ParseFile instead of ParseExpr because ParseFile has - // best-effort behavior, whereas ParseExpr fails hard on any error. - fakeFile, err := parser.ParseFile(token.NewFileSet(), "", fileSrc, 0) - if fakeFile == nil { - return nil, fmt.Errorf("error reading fake file source: %v", err) - } - - // Extract our expression node from inside the fake file. - if len(fakeFile.Decls) == 0 { - return nil, fmt.Errorf("error parsing fake file: %v", err) - } - - fakeDecl, _ := fakeFile.Decls[0].(*ast.FuncDecl) - if fakeDecl == nil || len(fakeDecl.Body.List) == 0 { - return nil, fmt.Errorf("no statement in %s: %v", src, err) - } - - stmt := fakeDecl.Body.List[0] - - // parser.ParseFile returns undefined positions. - // Adjust them for the current file. - offsetPositions(stmt, pos-1-(stmt.Pos()-1)) - - return stmt, nil -} - -// parseExpr parses the expression in src and updates its position to -// start at pos. -func parseExpr(pos token.Pos, src []byte) (ast.Expr, error) { - stmt, err := parseStmt(pos, src) - if err != nil { - return nil, err - } - - exprStmt, ok := stmt.(*ast.ExprStmt) - if !ok { - return nil, fmt.Errorf("no expr in %s: %v", src, err) - } - - return exprStmt.X, nil -} - -var tokenPosType = reflect.TypeOf(token.NoPos) - -// offsetPositions applies an offset to the positions in an ast.Node. -func offsetPositions(n ast.Node, offset token.Pos) { - ast.Inspect(n, func(n ast.Node) bool { - if n == nil { - return false - } - - v := reflect.ValueOf(n).Elem() - - switch v.Kind() { - case reflect.Struct: - for i := 0; i < v.NumField(); i++ { - f := v.Field(i) - if f.Type() != tokenPosType { - continue - } - - if !f.CanSet() { - continue - } - - // Don't offset invalid positions: they should stay invalid. - if !token.Pos(f.Int()).IsValid() { - continue - } - - f.SetInt(f.Int() + int64(offset)) - } - } - - return true - }) -} - -// replaceNode updates parent's child oldChild to be newChild. It -// returns whether it replaced successfully. -func replaceNode(parent, oldChild, newChild ast.Node) bool { - if parent == nil || oldChild == nil || newChild == nil { - return false - } - - parentVal := reflect.ValueOf(parent).Elem() - if parentVal.Kind() != reflect.Struct { - return false - } - - newChildVal := reflect.ValueOf(newChild) - - tryReplace := func(v reflect.Value) bool { - if !v.CanSet() || !v.CanInterface() { - return false - } - - // If the existing value is oldChild, we found our child. Make - // sure our newChild is assignable and then make the swap. - if v.Interface() == oldChild && newChildVal.Type().AssignableTo(v.Type()) { - v.Set(newChildVal) - return true - } - - return false - } - - // Loop over parent's struct fields. - for i := 0; i < parentVal.NumField(); i++ { - f := parentVal.Field(i) - - switch f.Kind() { - // Check interface and pointer fields. - case reflect.Interface, reflect.Ptr: - if tryReplace(f) { - return true - } - - // Search through any slice fields. - case reflect.Slice: - for i := 0; i < f.Len(); i++ { - if tryReplace(f.Index(i)) { - return true - } - } - } - } - - return false -} diff --git a/gopls/internal/lsp/cache/parse_cache_test.go b/gopls/internal/lsp/cache/parse_cache_test.go deleted file mode 100644 index cc6a0c1e24e..00000000000 --- a/gopls/internal/lsp/cache/parse_cache_test.go +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "fmt" - "go/token" - "math/bits" - "testing" - "time" - - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" -) - -func skipIfNoParseCache(t *testing.T) { - if bits.UintSize == 32 { - t.Skip("the parse cache is not supported on 32-bit systems") - } -} - -func TestParseCache(t *testing.T) { - skipIfNoParseCache(t) - - ctx := context.Background() - uri := span.URI("file:///myfile") - fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) - fset := token.NewFileSet() - - cache := newParseCache(0) - pgfs1, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh) - if err != nil { - t.Fatal(err) - } - pgf1 := pgfs1[0] - pgfs2, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh) - pgf2 := pgfs2[0] - if err != nil { - t.Fatal(err) - } - if pgf1 != pgf2 { - t.Errorf("parseFiles(%q): unexpected cache miss on repeated call", uri) - } - - // Fill up the cache with other files, but don't evict the file above. - cache.gcOnce() - files := []source.FileHandle{fh} - files = append(files, dummyFileHandles(parseCacheMinFiles-1)...) - - pgfs3, err := cache.parseFiles(ctx, fset, source.ParseFull, false, files...) - if err != nil { - t.Fatal(err) - } - pgf3 := pgfs3[0] - if pgf3 != pgf1 { - t.Errorf("parseFiles(%q, ...): unexpected cache miss", uri) - } - if pgf3.Tok.Base() != pgf1.Tok.Base() || pgf3.Tok.Size() != pgf1.Tok.Size() { - t.Errorf("parseFiles(%q, ...): result.Tok has base: %d, size: %d, want (%d, %d)", uri, pgf3.Tok.Base(), pgf3.Tok.Size(), pgf1.Tok.Base(), pgf1.Tok.Size()) - } - if tok := fset.File(token.Pos(pgf3.Tok.Base())); tok != pgf3.Tok { - t.Errorf("parseFiles(%q, ...): result.Tok not contained in FileSet", uri) - } - - // Now overwrite the cache, after which we should get new results. - cache.gcOnce() - files = dummyFileHandles(parseCacheMinFiles) - _, err = cache.parseFiles(ctx, fset, source.ParseFull, false, files...) - if err != nil { - t.Fatal(err) - } - // force a GC, which should collect the recently parsed files - cache.gcOnce() - pgfs4, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh) - if err != nil { - t.Fatal(err) - } - if pgfs4[0] == pgf1 { - t.Errorf("parseFiles(%q): unexpected cache hit after overwriting cache", uri) - } -} - -func TestParseCache_Reparsing(t *testing.T) { - skipIfNoParseCache(t) - - defer func(padding int) { - parsePadding = padding - }(parsePadding) - parsePadding = 0 - - files := dummyFileHandles(parseCacheMinFiles) - danglingSelector := []byte("package p\nfunc _() {\n\tx.\n}") - files = append(files, makeFakeFileHandle("file:///bad1", danglingSelector)) - files = append(files, makeFakeFileHandle("file:///bad2", danglingSelector)) - - // Parsing should succeed even though we overflow the padding. - cache := newParseCache(0) - _, err := cache.parseFiles(context.Background(), token.NewFileSet(), source.ParseFull, false, files...) - if err != nil { - t.Fatal(err) - } -} - -// Re-parsing the first file should not panic. -func TestParseCache_Issue59097(t *testing.T) { - skipIfNoParseCache(t) - - defer func(padding int) { - parsePadding = padding - }(parsePadding) - parsePadding = 0 - - danglingSelector := []byte("package p\nfunc _() {\n\tx.\n}") - files := []source.FileHandle{makeFakeFileHandle("file:///bad", danglingSelector)} - - // Parsing should succeed even though we overflow the padding. - cache := newParseCache(0) - _, err := cache.parseFiles(context.Background(), token.NewFileSet(), source.ParseFull, false, files...) - if err != nil { - t.Fatal(err) - } -} - -func TestParseCache_TimeEviction(t *testing.T) { - skipIfNoParseCache(t) - - ctx := context.Background() - fset := token.NewFileSet() - uri := span.URI("file:///myfile") - fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) - - const gcDuration = 10 * time.Millisecond - cache := newParseCache(gcDuration) - cache.stop() // we'll manage GC manually, for testing. - - pgfs0, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh, fh) - if err != nil { - t.Fatal(err) - } - - files := dummyFileHandles(parseCacheMinFiles) - _, err = cache.parseFiles(ctx, fset, source.ParseFull, false, files...) - if err != nil { - t.Fatal(err) - } - - // Even after filling up the 'min' files, we get a cache hit for our original file. - pgfs1, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh, fh) - if err != nil { - t.Fatal(err) - } - - if pgfs0[0] != pgfs1[0] { - t.Errorf("before GC, got unexpected cache miss") - } - - // But after GC, we get a cache miss. - _, err = cache.parseFiles(ctx, fset, source.ParseFull, false, files...) // mark dummy files as newer - if err != nil { - t.Fatal(err) - } - time.Sleep(gcDuration) - cache.gcOnce() - - pgfs2, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh, fh) - if err != nil { - t.Fatal(err) - } - - if pgfs0[0] == pgfs2[0] { - t.Errorf("after GC, got unexpected cache hit for %s", pgfs0[0].URI) - } -} - -func TestParseCache_Duplicates(t *testing.T) { - skipIfNoParseCache(t) - - ctx := context.Background() - uri := span.URI("file:///myfile") - fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) - - cache := newParseCache(0) - pgfs, err := cache.parseFiles(ctx, token.NewFileSet(), source.ParseFull, false, fh, fh) - if err != nil { - t.Fatal(err) - } - if pgfs[0] != pgfs[1] { - t.Errorf("parseFiles(fh, fh): = [%p, %p], want duplicate files", pgfs[0].File, pgfs[1].File) - } -} - -func dummyFileHandles(n int) []source.FileHandle { - var fhs []source.FileHandle - for i := 0; i < n; i++ { - uri := span.URI(fmt.Sprintf("file:///_%d", i)) - src := []byte(fmt.Sprintf("package p\nvar _ = %d", i)) - fhs = append(fhs, makeFakeFileHandle(uri, src)) - } - return fhs -} - -func makeFakeFileHandle(uri span.URI, src []byte) fakeFileHandle { - return fakeFileHandle{ - uri: uri, - data: src, - hash: source.HashOf(src), - } -} - -type fakeFileHandle struct { - source.FileHandle - uri span.URI - data []byte - hash source.Hash -} - -func (h fakeFileHandle) URI() span.URI { - return h.uri -} - -func (h fakeFileHandle) Content() ([]byte, error) { - return h.data, nil -} - -func (h fakeFileHandle) FileIdentity() source.FileIdentity { - return source.FileIdentity{ - URI: h.uri, - Hash: h.hash, - } -} diff --git a/gopls/internal/lsp/cache/pkg.go b/gopls/internal/lsp/cache/pkg.go deleted file mode 100644 index 68ec38a4bef..00000000000 --- a/gopls/internal/lsp/cache/pkg.go +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "fmt" - "go/ast" - "go/scanner" - "go/token" - "go/types" - "sync" - - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/methodsets" - "golang.org/x/tools/gopls/internal/lsp/source/xrefs" - "golang.org/x/tools/gopls/internal/span" -) - -// Convenient local aliases for typed strings. -type ( - PackageID = source.PackageID - PackagePath = source.PackagePath - PackageName = source.PackageName - ImportPath = source.ImportPath -) - -// A Package is the union of package metadata and type checking results. -// -// TODO(rfindley): for now, we do not persist the post-processing of -// loadDiagnostics, because the value of the snapshot.packages map is just the -// package handle. Fix this. -type Package struct { - m *source.Metadata - pkg *syntaxPackage -} - -// syntaxPackage contains parse trees and type information for a package. -type syntaxPackage struct { - // -- identifiers -- - id PackageID - - // -- outputs -- - fset *token.FileSet // for now, same as the snapshot's FileSet - goFiles []*source.ParsedGoFile - compiledGoFiles []*source.ParsedGoFile - diagnostics []*source.Diagnostic - parseErrors []scanner.ErrorList - typeErrors []types.Error - types *types.Package - typesInfo *types.Info - importMap map[PackagePath]*types.Package - hasFixedFiles bool // if true, AST was sufficiently mangled that we should hide type errors - - xrefsOnce sync.Once - _xrefs []byte // only used by the xrefs method - - methodsetsOnce sync.Once - _methodsets *methodsets.Index // only used by the methodsets method -} - -func (p *syntaxPackage) xrefs() []byte { - p.xrefsOnce.Do(func() { - p._xrefs = xrefs.Index(p.compiledGoFiles, p.types, p.typesInfo) - }) - return p._xrefs -} - -func (p *syntaxPackage) methodsets() *methodsets.Index { - p.methodsetsOnce.Do(func() { - p._methodsets = methodsets.NewIndex(p.fset, p.types) - }) - return p._methodsets -} - -func (p *Package) String() string { return string(p.m.ID) } - -func (p *Package) Metadata() *source.Metadata { return p.m } - -// A loadScope defines a package loading scope for use with go/packages. -// -// TODO(rfindley): move this to load.go. -type loadScope interface { - aScope() -} - -type ( - fileLoadScope span.URI // load packages containing a file (including command-line-arguments) - packageLoadScope string // load a specific package (the value is its PackageID) - moduleLoadScope struct { - dir string // dir containing the go.mod file - modulePath string // parsed module path - } - viewLoadScope span.URI // load the workspace -) - -// Implement the loadScope interface. -func (fileLoadScope) aScope() {} -func (packageLoadScope) aScope() {} -func (moduleLoadScope) aScope() {} -func (viewLoadScope) aScope() {} - -func (p *Package) CompiledGoFiles() []*source.ParsedGoFile { - return p.pkg.compiledGoFiles -} - -func (p *Package) File(uri span.URI) (*source.ParsedGoFile, error) { - return p.pkg.File(uri) -} - -func (pkg *syntaxPackage) File(uri span.URI) (*source.ParsedGoFile, error) { - for _, cgf := range pkg.compiledGoFiles { - if cgf.URI == uri { - return cgf, nil - } - } - for _, gf := range pkg.goFiles { - if gf.URI == uri { - return gf, nil - } - } - return nil, fmt.Errorf("no parsed file for %s in %v", uri, pkg.id) -} - -func (p *Package) GetSyntax() []*ast.File { - var syntax []*ast.File - for _, pgf := range p.pkg.compiledGoFiles { - syntax = append(syntax, pgf.File) - } - return syntax -} - -func (p *Package) FileSet() *token.FileSet { - return p.pkg.fset -} - -func (p *Package) GetTypes() *types.Package { - return p.pkg.types -} - -func (p *Package) GetTypesInfo() *types.Info { - return p.pkg.typesInfo -} - -// DependencyTypes returns the type checker's symbol for the specified -// package. It returns nil if path is not among the transitive -// dependencies of p, or if no symbols from that package were -// referenced during the type-checking of p. -func (p *Package) DependencyTypes(path source.PackagePath) *types.Package { - return p.pkg.importMap[path] -} - -func (p *Package) GetParseErrors() []scanner.ErrorList { - return p.pkg.parseErrors -} - -func (p *Package) GetTypeErrors() []types.Error { - return p.pkg.typeErrors -} - -func (p *Package) DiagnosticsForFile(ctx context.Context, s source.Snapshot, uri span.URI) ([]*source.Diagnostic, error) { - var diags []*source.Diagnostic - for _, diag := range p.m.Diagnostics { - if diag.URI == uri { - diags = append(diags, diag) - } - } - for _, diag := range p.pkg.diagnostics { - if diag.URI == uri { - diags = append(diags, diag) - } - } - - return diags, nil -} diff --git a/gopls/internal/lsp/cache/session.go b/gopls/internal/lsp/cache/session.go deleted file mode 100644 index e565f19e1a6..00000000000 --- a/gopls/internal/lsp/cache/session.go +++ /dev/null @@ -1,704 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "fmt" - "strconv" - "strings" - "sync" - "sync/atomic" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/typerefs" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/vulncheck" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/imports" - "golang.org/x/tools/internal/memoize" - "golang.org/x/tools/internal/persistent" - "golang.org/x/tools/internal/xcontext" -) - -type Session struct { - // Unique identifier for this session. - id string - - // Immutable attributes shared across views. - cache *Cache // shared cache - gocmdRunner *gocommand.Runner // limits go command concurrency - - viewMu sync.Mutex - views []*View - viewMap map[span.URI]*View // file->best view - - parseCache *parseCache - - *overlayFS -} - -// ID returns the unique identifier for this session on this server. -func (s *Session) ID() string { return s.id } -func (s *Session) String() string { return s.id } - -// GoCommandRunner returns the gocommand Runner for this session. -func (s *Session) GoCommandRunner() *gocommand.Runner { - return s.gocmdRunner -} - -// Shutdown the session and all views it has created. -func (s *Session) Shutdown(ctx context.Context) { - var views []*View - s.viewMu.Lock() - views = append(views, s.views...) - s.views = nil - s.viewMap = nil - s.viewMu.Unlock() - for _, view := range views { - view.shutdown() - } - s.parseCache.stop() - event.Log(ctx, "Shutdown session", KeyShutdownSession.Of(s)) -} - -// Cache returns the cache that created this session, for debugging only. -func (s *Session) Cache() *Cache { - return s.cache -} - -// NewView creates a new View, returning it and its first snapshot. If a -// non-empty tempWorkspace directory is provided, the View will record a copy -// of its gopls workspace module in that directory, so that client tooling -// can execute in the same main module. On success it also returns a release -// function that must be called when the Snapshot is no longer needed. -func (s *Session) NewView(ctx context.Context, folder *Folder) (*View, source.Snapshot, func(), error) { - s.viewMu.Lock() - defer s.viewMu.Unlock() - for _, view := range s.views { - if span.SameExistingFile(view.folder.Dir, folder.Dir) { - return nil, nil, nil, source.ErrViewExists - } - } - info, err := getWorkspaceInformation(ctx, s.gocmdRunner, s, folder) - if err != nil { - return nil, nil, nil, err - } - view, snapshot, release, err := s.createView(ctx, info, folder, 0) - if err != nil { - return nil, nil, nil, err - } - s.views = append(s.views, view) - // we always need to drop the view map - s.viewMap = make(map[span.URI]*View) - return view, snapshot, release, nil -} - -// TODO(rfindley): clarify that createView can never be cancelled (with the -// possible exception of server shutdown). -// On success, the caller becomes responsible for calling the release function once. -func (s *Session) createView(ctx context.Context, info *workspaceInformation, folder *Folder, seqID uint64) (*View, *snapshot, func(), error) { - index := atomic.AddInt64(&viewIndex, 1) - - gowork, _ := info.GOWORK() - wsModFiles, wsModFilesErr := computeWorkspaceModFiles(ctx, info.gomod, gowork, info.effectiveGO111MODULE(), s) - - // We want a true background context and not a detached context here - // the spans need to be unrelated and no tag values should pollute it. - baseCtx := event.Detach(xcontext.Detach(ctx)) - backgroundCtx, cancel := context.WithCancel(baseCtx) - - v := &View{ - id: strconv.FormatInt(index, 10), - gocmdRunner: s.gocmdRunner, - folder: folder, - initialWorkspaceLoad: make(chan struct{}), - initializationSema: make(chan struct{}, 1), - moduleUpgrades: map[span.URI]map[string]string{}, - vulns: map[span.URI]*vulncheck.Result{}, - parseCache: s.parseCache, - fs: s.overlayFS, - workspaceInformation: info, - } - v.importsState = &importsState{ - ctx: baseCtx, - processEnv: &imports.ProcessEnv{ - GocmdRunner: s.gocmdRunner, - SkipPathInScan: func(dir string) bool { - prefix := strings.TrimSuffix(string(v.folder.Dir), "/") + "/" - uri := strings.TrimSuffix(string(span.URIFromPath(dir)), "/") - if !strings.HasPrefix(uri+"/", prefix) { - return false - } - filterer := source.NewFilterer(folder.Options.DirectoryFilters) - rel := strings.TrimPrefix(uri, prefix) - disallow := filterer.Disallow(rel) - return disallow - }, - }, - } - v.snapshot = &snapshot{ - sequenceID: seqID, - globalID: nextSnapshotID(), - view: v, - backgroundCtx: backgroundCtx, - cancel: cancel, - store: s.cache.store, - packages: new(persistent.Map[PackageID, *packageHandle]), - meta: new(metadataGraph), - files: newFileMap(), - activePackages: new(persistent.Map[PackageID, *Package]), - symbolizeHandles: new(persistent.Map[span.URI, *memoize.Promise]), - workspacePackages: make(map[PackageID]PackagePath), - unloadableFiles: new(persistent.Set[span.URI]), - parseModHandles: new(persistent.Map[span.URI, *memoize.Promise]), - parseWorkHandles: new(persistent.Map[span.URI, *memoize.Promise]), - modTidyHandles: new(persistent.Map[span.URI, *memoize.Promise]), - modVulnHandles: new(persistent.Map[span.URI, *memoize.Promise]), - modWhyHandles: new(persistent.Map[span.URI, *memoize.Promise]), - workspaceModFiles: wsModFiles, - workspaceModFilesErr: wsModFilesErr, - pkgIndex: typerefs.NewPackageIndex(), - } - // Save one reference in the view. - v.releaseSnapshot = v.snapshot.Acquire() - - // Record the environment of the newly created view in the log. - event.Log(ctx, viewEnv(v)) - - // Initialize the view without blocking. - initCtx, initCancel := context.WithCancel(xcontext.Detach(ctx)) - v.initCancelFirstAttempt = initCancel - snapshot := v.snapshot - - // Pass a second reference to the background goroutine. - bgRelease := snapshot.Acquire() - go func() { - defer bgRelease() - snapshot.initialize(initCtx, true) - }() - - // Return a third reference to the caller. - return v, snapshot, snapshot.Acquire(), nil -} - -// ViewByName returns a view with a matching name, if the session has one. -func (s *Session) ViewByName(name string) *View { - s.viewMu.Lock() - defer s.viewMu.Unlock() - for _, view := range s.views { - if view.Name() == name { - return view - } - } - return nil -} - -// View returns the view with a matching id, if present. -func (s *Session) View(id string) (*View, error) { - s.viewMu.Lock() - defer s.viewMu.Unlock() - for _, view := range s.views { - if view.ID() == id { - return view, nil - } - } - return nil, fmt.Errorf("no view with ID %q", id) -} - -// ViewOf returns a view corresponding to the given URI. -// If the file is not already associated with a view, pick one using some heuristics. -func (s *Session) ViewOf(uri span.URI) (*View, error) { - s.viewMu.Lock() - defer s.viewMu.Unlock() - return s.viewOfLocked(uri) -} - -// Precondition: caller holds s.viewMu lock. -func (s *Session) viewOfLocked(uri span.URI) (*View, error) { - // Check if we already know this file. - if v, found := s.viewMap[uri]; found { - return v, nil - } - // Pick the best view for this file and memoize the result. - if len(s.views) == 0 { - return nil, fmt.Errorf("no views in session") - } - s.viewMap[uri] = bestViewForURI(uri, s.views) - return s.viewMap[uri], nil -} - -func (s *Session) Views() []*View { - s.viewMu.Lock() - defer s.viewMu.Unlock() - result := make([]*View, len(s.views)) - copy(result, s.views) - return result -} - -// bestViewForURI returns the most closely matching view for the given URI -// out of the given set of views. -func bestViewForURI(uri span.URI, views []*View) *View { - // we need to find the best view for this file - var longest *View - for _, view := range views { - if longest != nil && len(longest.Folder()) > len(view.Folder()) { - continue - } - // TODO(rfindley): this should consider the workspace layout (i.e. - // go.work). - if view.contains(uri) { - longest = view - } - } - if longest != nil { - return longest - } - // Try our best to return a view that knows the file. - for _, view := range views { - if view.knownFile(uri) { - return view - } - } - // TODO: are there any more heuristics we can use? - return views[0] -} - -// RemoveView removes the view v from the session -func (s *Session) RemoveView(view *View) { - s.viewMu.Lock() - defer s.viewMu.Unlock() - - i := s.dropView(view) - if i == -1 { // error reported elsewhere - return - } - // delete this view... we don't care about order but we do want to make - // sure we can garbage collect the view - s.views = removeElement(s.views, i) -} - -// updateViewLocked recreates the view with the given options. -// -// If the resulting error is non-nil, the view may or may not have already been -// dropped from the session. -func (s *Session) updateViewLocked(ctx context.Context, view *View, info *workspaceInformation, folder *Folder) (*View, error) { - // Preserve the snapshot ID if we are recreating the view. - view.snapshotMu.Lock() - if view.snapshot == nil { - view.snapshotMu.Unlock() - panic("updateView called after View was already shut down") - } - // TODO(rfindley): we should probably increment the sequence ID here. - seqID := view.snapshot.sequenceID // Preserve sequence IDs when updating a view in place. - view.snapshotMu.Unlock() - - i := s.dropView(view) - if i == -1 { - return nil, fmt.Errorf("view %q not found", view.id) - } - - var ( - snapshot *snapshot - release func() - err error - ) - view, snapshot, release, err = s.createView(ctx, info, folder, seqID) - if err != nil { - // we have dropped the old view, but could not create the new one - // this should not happen and is very bad, but we still need to clean - // up the view array if it happens - s.views = removeElement(s.views, i) - return nil, err - } - defer release() - - // The new snapshot has lost the history of the previous view. As a result, - // it may not see open files that aren't in its build configuration (as it - // would have done via didOpen notifications). This can lead to inconsistent - // behavior when configuration is changed mid-session. - // - // Ensure the new snapshot observes all open files. - for _, o := range view.fs.Overlays() { - _, _ = snapshot.ReadFile(ctx, o.URI()) - } - - // substitute the new view into the array where the old view was - s.views[i] = view - return view, nil -} - -// removeElement removes the ith element from the slice replacing it with the last element. -// TODO(adonovan): generics, someday. -func removeElement(slice []*View, index int) []*View { - last := len(slice) - 1 - slice[index] = slice[last] - slice[last] = nil // aid GC - return slice[:last] -} - -// dropView removes v from the set of views for the receiver s and calls -// v.shutdown, returning the index of v in s.views (if found), or -1 if v was -// not found. s.viewMu must be held while calling this function. -func (s *Session) dropView(v *View) int { - // we always need to drop the view map - s.viewMap = make(map[span.URI]*View) - for i := range s.views { - if v == s.views[i] { - // we found the view, drop it and return the index it was found at - s.views[i] = nil - v.shutdown() - return i - } - } - // TODO(rfindley): it looks wrong that we don't shutdown v in this codepath. - // We should never get here. - bug.Reportf("tried to drop nonexistent view %q", v.id) - return -1 -} - -func (s *Session) ModifyFiles(ctx context.Context, changes []source.FileModification) error { - _, release, err := s.DidModifyFiles(ctx, changes) - release() - return err -} - -// ResetView resets the best view for the given URI. -func (s *Session) ResetView(ctx context.Context, uri span.URI) (*View, error) { - s.viewMu.Lock() - defer s.viewMu.Unlock() - v := bestViewForURI(uri, s.views) - return s.updateViewLocked(ctx, v, v.workspaceInformation, v.folder) -} - -// DidModifyFiles reports a file modification to the session. It returns -// the new snapshots after the modifications have been applied, paired with -// the affected file URIs for those snapshots. -// On success, it returns a release function that -// must be called when the snapshots are no longer needed. -// -// TODO(rfindley): what happens if this function fails? It must leave us in a -// broken state, which we should surface to the user, probably as a request to -// restart gopls. -func (s *Session) DidModifyFiles(ctx context.Context, changes []source.FileModification) (map[source.Snapshot][]span.URI, func(), error) { - s.viewMu.Lock() - defer s.viewMu.Unlock() - - // Update overlays. - // - // TODO(rfindley): I think we do this while holding viewMu to prevent views - // from seeing the updated file content before they have processed - // invalidations, which could lead to a partial view of the changes (i.e. - // spurious diagnostics). However, any such view would immediately be - // invalidated here, so it is possible that we could update overlays before - // acquiring viewMu. - if err := s.updateOverlays(ctx, changes); err != nil { - return nil, nil, err - } - - // Re-create views whose definition may have changed. - // - // checkViews controls whether to re-evaluate view definitions when - // collecting views below. Any addition or deletion of a go.mod or go.work - // file may have affected the definition of the view. - checkViews := false - - for _, c := range changes { - // TODO(rfindley): go.work files need not be named "go.work" -- we need to - // check each view's source. - if isGoMod(c.URI) || isGoWork(c.URI) { - // Change, InvalidateMetadata, and UnknownFileAction actions do not cause - // us to re-evaluate views. - redoViews := (c.Action != source.Change && - c.Action != source.UnknownFileAction) - - if redoViews { - checkViews = true - break - } - } - } - - if checkViews { - for _, view := range s.views { - // TODO(rfindley): can we avoid running the go command (go env) - // synchronously to change processing? Can we assume that the env did not - // change, and derive go.work using a combination of the configured - // GOWORK value and filesystem? - info, err := getWorkspaceInformation(ctx, s.gocmdRunner, s, view.folder) - if err != nil { - // Catastrophic failure, equivalent to a failure of session - // initialization and therefore should almost never happen. One - // scenario where this failure mode could occur is if some file - // permissions have changed preventing us from reading go.mod - // files. - // - // TODO(rfindley): consider surfacing this error more loudly. We - // could report a bug, but it's not really a bug. - event.Error(ctx, "fetching workspace information", err) - } else if *info != *view.workspaceInformation { - if _, err := s.updateViewLocked(ctx, view, info, view.folder); err != nil { - // More catastrophic failure. The view may or may not still exist. - // The best we can do is log and move on. - event.Error(ctx, "recreating view", err) - } - } - } - } - - // Collect information about views affected by these changes. - views := make(map[*View]map[span.URI]source.FileHandle) - affectedViews := map[span.URI][]*View{} - for _, c := range changes { - // Build the list of affected views. - var changedViews []*View - for _, view := range s.views { - // Don't propagate changes that are outside of the view's scope - // or knowledge. - if !view.relevantChange(c) { - continue - } - changedViews = append(changedViews, view) - } - // If the change is not relevant to any view, but the change is - // happening in the editor, assign it the most closely matching view. - if len(changedViews) == 0 { - if c.OnDisk { - continue - } - bestView, err := s.viewOfLocked(c.URI) - if err != nil { - return nil, nil, err - } - changedViews = append(changedViews, bestView) - } - affectedViews[c.URI] = changedViews - - // Apply the changes to all affected views. - fh := mustReadFile(ctx, s, c.URI) - for _, view := range changedViews { - // Make sure that the file is added to the view's seenFiles set. - view.markKnown(c.URI) - if _, ok := views[view]; !ok { - views[view] = make(map[span.URI]source.FileHandle) - } - views[view][c.URI] = fh - } - } - - var releases []func() - viewToSnapshot := map[*View]*snapshot{} - for view, changed := range views { - snapshot, release := view.invalidateContent(ctx, changed) - releases = append(releases, release) - viewToSnapshot[view] = snapshot - } - - // The release function is called when the - // returned URIs no longer need to be valid. - release := func() { - for _, release := range releases { - release() - } - } - - // We only want to diagnose each changed file once, in the view to which - // it "most" belongs. We do this by picking the best view for each URI, - // and then aggregating the set of snapshots and their URIs (to avoid - // diagnosing the same snapshot multiple times). - snapshotURIs := map[source.Snapshot][]span.URI{} - for _, mod := range changes { - viewSlice, ok := affectedViews[mod.URI] - if !ok || len(viewSlice) == 0 { - continue - } - view := bestViewForURI(mod.URI, viewSlice) - snapshot, ok := viewToSnapshot[view] - if !ok { - panic(fmt.Sprintf("no snapshot for view %s", view.Folder())) - } - snapshotURIs[snapshot] = append(snapshotURIs[snapshot], mod.URI) - } - - return snapshotURIs, release, nil -} - -// ExpandModificationsToDirectories returns the set of changes with the -// directory changes removed and expanded to include all of the files in -// the directory. -func (s *Session) ExpandModificationsToDirectories(ctx context.Context, changes []source.FileModification) []source.FileModification { - var snapshots []*snapshot - s.viewMu.Lock() - for _, v := range s.views { - snapshot, release, err := v.getSnapshot() - if err != nil { - continue // view is shut down; continue with others - } - defer release() - snapshots = append(snapshots, snapshot) - } - s.viewMu.Unlock() - - // Expand the modification to any file we could care about, which we define - // to be any file observed by any of the snapshots. - // - // There may be other files in the directory, but if we haven't read them yet - // we don't need to invalidate them. - var result []source.FileModification - for _, c := range changes { - expanded := make(map[span.URI]bool) - for _, snapshot := range snapshots { - for _, uri := range snapshot.filesInDir(c.URI) { - expanded[uri] = true - } - } - if len(expanded) == 0 { - result = append(result, c) - } else { - for uri := range expanded { - result = append(result, source.FileModification{ - URI: uri, - Action: c.Action, - LanguageID: "", - OnDisk: c.OnDisk, - // changes to directories cannot include text or versions - }) - } - } - } - return result -} - -// Precondition: caller holds s.viewMu lock. -// TODO(rfindley): move this to fs_overlay.go. -func (fs *overlayFS) updateOverlays(ctx context.Context, changes []source.FileModification) error { - fs.mu.Lock() - defer fs.mu.Unlock() - - for _, c := range changes { - o, ok := fs.overlays[c.URI] - - // If the file is not opened in an overlay and the change is on disk, - // there's no need to update an overlay. If there is an overlay, we - // may need to update the overlay's saved value. - if !ok && c.OnDisk { - continue - } - - // Determine the file kind on open, otherwise, assume it has been cached. - var kind source.FileKind - switch c.Action { - case source.Open: - kind = source.FileKindForLang(c.LanguageID) - default: - if !ok { - return fmt.Errorf("updateOverlays: modifying unopened overlay %v", c.URI) - } - kind = o.kind - } - - // Closing a file just deletes its overlay. - if c.Action == source.Close { - delete(fs.overlays, c.URI) - continue - } - - // If the file is on disk, check if its content is the same as in the - // overlay. Saves and on-disk file changes don't come with the file's - // content. - text := c.Text - if text == nil && (c.Action == source.Save || c.OnDisk) { - if !ok { - return fmt.Errorf("no known content for overlay for %s", c.Action) - } - text = o.content - } - // On-disk changes don't come with versions. - version := c.Version - if c.OnDisk || c.Action == source.Save { - version = o.version - } - hash := source.HashOf(text) - var sameContentOnDisk bool - switch c.Action { - case source.Delete: - // Do nothing. sameContentOnDisk should be false. - case source.Save: - // Make sure the version and content (if present) is the same. - if false && o.version != version { // Client no longer sends the version - return fmt.Errorf("updateOverlays: saving %s at version %v, currently at %v", c.URI, c.Version, o.version) - } - if c.Text != nil && o.hash != hash { - return fmt.Errorf("updateOverlays: overlay %s changed on save", c.URI) - } - sameContentOnDisk = true - default: - fh := mustReadFile(ctx, fs.delegate, c.URI) - _, readErr := fh.Content() - sameContentOnDisk = (readErr == nil && fh.FileIdentity().Hash == hash) - } - o = &Overlay{ - uri: c.URI, - version: version, - content: text, - kind: kind, - hash: hash, - saved: sameContentOnDisk, - } - - // NOTE: previous versions of this code checked here that the overlay had a - // view and file kind (but we don't know why). - - fs.overlays[c.URI] = o - } - - return nil -} - -func mustReadFile(ctx context.Context, fs source.FileSource, uri span.URI) source.FileHandle { - ctx = xcontext.Detach(ctx) - fh, err := fs.ReadFile(ctx, uri) - if err != nil { - // ReadFile cannot fail with an uncancellable context. - bug.Reportf("reading file failed unexpectedly: %v", err) - return brokenFile{uri, err} - } - return fh -} - -// A brokenFile represents an unexpected failure to read a file. -type brokenFile struct { - uri span.URI - err error -} - -func (b brokenFile) URI() span.URI { return b.uri } -func (b brokenFile) FileIdentity() source.FileIdentity { return source.FileIdentity{URI: b.uri} } -func (b brokenFile) SameContentsOnDisk() bool { return false } -func (b brokenFile) Version() int32 { return 0 } -func (b brokenFile) Content() ([]byte, error) { return nil, b.err } - -// FileWatchingGlobPatterns returns a new set of glob patterns to -// watch every directory known by the view. For views within a module, -// this is the module root, any directory in the module root, and any -// replace targets. -func (s *Session) FileWatchingGlobPatterns(ctx context.Context) map[string]struct{} { - s.viewMu.Lock() - defer s.viewMu.Unlock() - patterns := map[string]struct{}{} - for _, view := range s.views { - snapshot, release, err := view.getSnapshot() - if err != nil { - continue // view is shut down; continue with others - } - for k, v := range snapshot.fileWatchingGlobPatterns(ctx) { - patterns[k] = v - } - release() - } - return patterns -} diff --git a/gopls/internal/lsp/cache/snapshot.go b/gopls/internal/lsp/cache/snapshot.go deleted file mode 100644 index 47dd1ab714f..00000000000 --- a/gopls/internal/lsp/cache/snapshot.go +++ /dev/null @@ -1,2464 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "bytes" - "context" - "errors" - "fmt" - "go/ast" - "go/build/constraint" - "go/token" - "go/types" - "io" - "log" - "os" - "path/filepath" - "regexp" - "runtime" - "sort" - "strconv" - "strings" - "sync" - "sync/atomic" - "unsafe" - - "golang.org/x/sync/errgroup" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/filecache" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/methodsets" - "golang.org/x/tools/gopls/internal/lsp/source/typerefs" - "golang.org/x/tools/gopls/internal/lsp/source/xrefs" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/memoize" - "golang.org/x/tools/internal/packagesinternal" - "golang.org/x/tools/internal/persistent" - "golang.org/x/tools/internal/typesinternal" - "golang.org/x/tools/internal/xcontext" -) - -type snapshot struct { - sequenceID uint64 - globalID source.GlobalSnapshotID - - // TODO(rfindley): the snapshot holding a reference to the view poses - // lifecycle problems: a view may be shut down and waiting for work - // associated with this snapshot to complete. While most accesses of the view - // are benign (options or workspace information), this is not formalized and - // it is wrong for the snapshot to use a shutdown view. - // - // Fix this by passing options and workspace information to the snapshot, - // both of which should be immutable for the snapshot. - view *View - - cancel func() - backgroundCtx context.Context - - store *memoize.Store // cache of handles shared by all snapshots - - refcount sync.WaitGroup // number of references - destroyedBy *string // atomically set to non-nil in Destroy once refcount = 0 - - // initialized reports whether the snapshot has been initialized. Concurrent - // initialization is guarded by the view.initializationSema. Each snapshot is - // initialized at most once: concurrent initialization is guarded by - // view.initializationSema. - initialized bool - // initializedErr holds the last error resulting from initialization. If - // initialization fails, we only retry when the workspace modules change, - // to avoid too many go/packages calls. - initializedErr *source.CriticalError - - // mu guards all of the maps in the snapshot, as well as the builtin URI. - mu sync.Mutex - - // builtin is the location of builtin.go in GOROOT. - // - // TODO(rfindley): would it make more sense to eagerly parse builtin, and - // instead store a *ParsedGoFile here? - builtin span.URI - - // meta holds loaded metadata. - // - // meta is guarded by mu, but the metadataGraph itself is immutable. - // TODO(rfindley): in many places we hold mu while operating on meta, even - // though we only need to hold mu while reading the pointer. - meta *metadataGraph - - // files maps file URIs to their corresponding FileHandles. - // It may invalidated when a file's content changes. - files *fileMap - - // symbolizeHandles maps each file URI to a handle for the future - // result of computing the symbols declared in that file. - symbolizeHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[symbolizeResult] - - // packages maps a packageKey to a *packageHandle. - // It may be invalidated when a file's content changes. - // - // Invariants to preserve: - // - packages.Get(id).meta == meta.metadata[id] for all ids - // - if a package is in packages, then all of its dependencies should also - // be in packages, unless there is a missing import - packages *persistent.Map[PackageID, *packageHandle] - - // activePackages maps a package ID to a memoized active package, or nil if - // the package is known not to be open. - // - // IDs not contained in the map are not known to be open or not open. - activePackages *persistent.Map[PackageID, *Package] - - // workspacePackages contains the workspace's packages, which are loaded - // when the view is created. It contains no intermediate test variants. - // TODO(rfindley): use a persistent.Map. - workspacePackages map[PackageID]PackagePath - - // shouldLoad tracks packages that need to be reloaded, mapping a PackageID - // to the package paths that should be used to reload it - // - // When we try to load a package, we clear it from the shouldLoad map - // regardless of whether the load succeeded, to prevent endless loads. - shouldLoad map[PackageID][]PackagePath - - // unloadableFiles keeps track of files that we've failed to load. - unloadableFiles *persistent.Set[span.URI] - - // TODO(rfindley): rename the handles below to "promises". A promise is - // different from a handle (we mutate the package handle.) - - // parseModHandles keeps track of any parseModHandles for the snapshot. - // The handles need not refer to only the view's go.mod file. - parseModHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[parseModResult] - - // parseWorkHandles keeps track of any parseWorkHandles for the snapshot. - // The handles need not refer to only the view's go.work file. - parseWorkHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[parseWorkResult] - - // Preserve go.mod-related handles to avoid garbage-collecting the results - // of various calls to the go command. The handles need not refer to only - // the view's go.mod file. - modTidyHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[modTidyResult] - modWhyHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[modWhyResult] - modVulnHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[modVulnResult] - - // workspaceModFiles holds the set of mod files active in this snapshot. - // - // This is either empty, a single entry for the workspace go.mod file, or the - // set of mod files used by the workspace go.work file. - // - // This set is immutable inside the snapshot, and therefore is not guarded by mu. - workspaceModFiles map[span.URI]struct{} - workspaceModFilesErr error // error encountered computing workspaceModFiles - - // importGraph holds a shared import graph to use for type-checking. Adding - // more packages to this import graph can speed up type checking, at the - // expense of in-use memory. - // - // See getImportGraph for additional documentation. - importGraphDone chan struct{} // closed when importGraph is set; may be nil - importGraph *importGraph // copied from preceding snapshot and re-evaluated - - // pkgIndex is an index of package IDs, for efficient storage of typerefs. - pkgIndex *typerefs.PackageIndex - - // Only compute module prefixes once, as they are used with high frequency to - // detect ignored files. - ignoreFilterOnce sync.Once - ignoreFilter *ignoreFilter -} - -var globalSnapshotID uint64 - -func nextSnapshotID() source.GlobalSnapshotID { - return source.GlobalSnapshotID(atomic.AddUint64(&globalSnapshotID, 1)) -} - -var _ memoize.RefCounted = (*snapshot)(nil) // snapshots are reference-counted - -// Acquire prevents the snapshot from being destroyed until the returned function is called. -// -// (s.Acquire().release() could instead be expressed as a pair of -// method calls s.IncRef(); s.DecRef(). The latter has the advantage -// that the DecRefs are fungible and don't require holding anything in -// addition to the refcounted object s, but paradoxically that is also -// an advantage of the current approach, which forces the caller to -// consider the release function at every stage, making a reference -// leak more obvious.) -func (s *snapshot) Acquire() func() { - type uP = unsafe.Pointer - if destroyedBy := atomic.LoadPointer((*uP)(uP(&s.destroyedBy))); destroyedBy != nil { - log.Panicf("%d: acquire() after Destroy(%q)", s.globalID, *(*string)(destroyedBy)) - } - s.refcount.Add(1) - return s.refcount.Done -} - -func (s *snapshot) awaitPromise(ctx context.Context, p *memoize.Promise) (interface{}, error) { - return p.Get(ctx, s) -} - -// destroy waits for all leases on the snapshot to expire then releases -// any resources (reference counts and files) associated with it. -// Snapshots being destroyed can be awaited using v.destroyWG. -// -// TODO(adonovan): move this logic into the release function returned -// by Acquire when the reference count becomes zero. (This would cost -// us the destroyedBy debug info, unless we add it to the signature of -// memoize.RefCounted.Acquire.) -// -// The destroyedBy argument is used for debugging. -// -// v.snapshotMu must be held while calling this function, in order to preserve -// the invariants described by the docstring for v.snapshot. -func (v *View) destroy(s *snapshot, destroyedBy string) { - v.snapshotWG.Add(1) - go func() { - defer v.snapshotWG.Done() - s.destroy(destroyedBy) - }() -} - -func (s *snapshot) destroy(destroyedBy string) { - // Wait for all leases to end before commencing destruction. - s.refcount.Wait() - - // Report bad state as a debugging aid. - // Not foolproof: another thread could acquire() at this moment. - type uP = unsafe.Pointer // looking forward to generics... - if old := atomic.SwapPointer((*uP)(uP(&s.destroyedBy)), uP(&destroyedBy)); old != nil { - log.Panicf("%d: Destroy(%q) after Destroy(%q)", s.globalID, destroyedBy, *(*string)(old)) - } - - s.packages.Destroy() - s.activePackages.Destroy() - s.files.Destroy() - s.symbolizeHandles.Destroy() - s.parseModHandles.Destroy() - s.parseWorkHandles.Destroy() - s.modTidyHandles.Destroy() - s.modVulnHandles.Destroy() - s.modWhyHandles.Destroy() - s.unloadableFiles.Destroy() -} - -func (s *snapshot) SequenceID() uint64 { - return s.sequenceID -} - -func (s *snapshot) GlobalID() source.GlobalSnapshotID { - return s.globalID -} - -func (s *snapshot) View() source.View { - return s.view -} - -func (s *snapshot) FileKind(fh source.FileHandle) source.FileKind { - // The kind of an unsaved buffer comes from the - // TextDocumentItem.LanguageID field in the didChange event, - // not from the file name. They may differ. - if o, ok := fh.(*Overlay); ok { - if o.kind != source.UnknownKind { - return o.kind - } - } - - fext := filepath.Ext(fh.URI().Filename()) - switch fext { - case ".go": - return source.Go - case ".mod": - return source.Mod - case ".sum": - return source.Sum - case ".work": - return source.Work - } - exts := s.Options().TemplateExtensions - for _, ext := range exts { - if fext == ext || fext == "."+ext { - return source.Tmpl - } - } - // and now what? This should never happen, but it does for cgo before go1.15 - return source.Go -} - -func (s *snapshot) Options() *source.Options { - return s.view.folder.Options -} - -func (s *snapshot) BackgroundContext() context.Context { - return s.backgroundCtx -} - -func (s *snapshot) ModFiles() []span.URI { - var uris []span.URI - for modURI := range s.workspaceModFiles { - uris = append(uris, modURI) - } - return uris -} - -func (s *snapshot) WorkFile() span.URI { - gowork, _ := s.view.GOWORK() - return gowork -} - -func (s *snapshot) Templates() map[span.URI]source.FileHandle { - s.mu.Lock() - defer s.mu.Unlock() - - tmpls := map[span.URI]source.FileHandle{} - s.files.Range(func(k span.URI, fh source.FileHandle) { - if s.FileKind(fh) == source.Tmpl { - tmpls[k] = fh - } - }) - return tmpls -} - -func (s *snapshot) validBuildConfiguration() bool { - // Since we only really understand the `go` command, if the user has a - // different GOPACKAGESDRIVER, assume that their configuration is valid. - if s.view.hasGopackagesDriver { - return true - } - - // Check if the user is working within a module or if we have found - // multiple modules in the workspace. - if len(s.workspaceModFiles) > 0 { - return true - } - - // TODO(rfindley): this should probably be subject to "if GO111MODULES = off {...}". - if s.view.inGOPATH { - return true - } - - return false -} - -// workspaceMode describes the way in which the snapshot's workspace should -// be loaded. -// -// TODO(rfindley): remove this, in favor of specific methods. -func (s *snapshot) workspaceMode() workspaceMode { - var mode workspaceMode - - // If the view has an invalid configuration, don't build the workspace - // module. - validBuildConfiguration := s.validBuildConfiguration() - if !validBuildConfiguration { - return mode - } - // If the view is not in a module and contains no modules, but still has a - // valid workspace configuration, do not create the workspace module. - // It could be using GOPATH or a different build system entirely. - if len(s.workspaceModFiles) == 0 && validBuildConfiguration { - return mode - } - mode |= moduleMode - if s.Options().TempModfile { - mode |= tempModfile - } - return mode -} - -// config returns the configuration used for the snapshot's interaction with -// the go/packages API. It uses the given working directory. -// -// TODO(rstambler): go/packages requires that we do not provide overlays for -// multiple modules in on config, so buildOverlay needs to filter overlays by -// module. -func (s *snapshot) config(ctx context.Context, inv *gocommand.Invocation) *packages.Config { - - cfg := &packages.Config{ - Context: ctx, - Dir: inv.WorkingDir, - Env: inv.Env, - BuildFlags: inv.BuildFlags, - Mode: packages.NeedName | - packages.NeedFiles | - packages.NeedCompiledGoFiles | - packages.NeedImports | - packages.NeedDeps | - packages.NeedTypesSizes | - packages.NeedModule | - packages.NeedEmbedFiles | - packages.LoadMode(packagesinternal.DepsErrors) | - packages.LoadMode(packagesinternal.ForTest), - Fset: nil, // we do our own parsing - Overlay: s.buildOverlay(), - ParseFile: func(*token.FileSet, string, []byte) (*ast.File, error) { - panic("go/packages must not be used to parse files") - }, - Logf: func(format string, args ...interface{}) { - if s.Options().VerboseOutput { - event.Log(ctx, fmt.Sprintf(format, args...)) - } - }, - Tests: true, - } - packagesinternal.SetModFile(cfg, inv.ModFile) - packagesinternal.SetModFlag(cfg, inv.ModFlag) - // We want to type check cgo code if go/types supports it. - if typesinternal.SetUsesCgo(&types.Config{}) { - cfg.Mode |= packages.LoadMode(packagesinternal.TypecheckCgo) - } - packagesinternal.SetGoCmdRunner(cfg, s.view.gocmdRunner) - return cfg -} - -func (s *snapshot) RunGoCommandDirect(ctx context.Context, mode source.InvocationFlags, inv *gocommand.Invocation) (*bytes.Buffer, error) { - _, inv, cleanup, err := s.goCommandInvocation(ctx, mode, inv) - if err != nil { - return nil, err - } - defer cleanup() - - return s.view.gocmdRunner.Run(ctx, *inv) -} - -func (s *snapshot) RunGoCommandPiped(ctx context.Context, mode source.InvocationFlags, inv *gocommand.Invocation, stdout, stderr io.Writer) error { - _, inv, cleanup, err := s.goCommandInvocation(ctx, mode, inv) - if err != nil { - return err - } - defer cleanup() - return s.view.gocmdRunner.RunPiped(ctx, *inv, stdout, stderr) -} - -func (s *snapshot) RunGoCommands(ctx context.Context, allowNetwork bool, wd string, run func(invoke func(...string) (*bytes.Buffer, error)) error) (bool, []byte, []byte, error) { - var flags source.InvocationFlags - if s.workspaceMode()&tempModfile != 0 { - flags = source.WriteTemporaryModFile - } else { - flags = source.Normal - } - if allowNetwork { - flags |= source.AllowNetwork - } - tmpURI, inv, cleanup, err := s.goCommandInvocation(ctx, flags, &gocommand.Invocation{WorkingDir: wd}) - if err != nil { - return false, nil, nil, err - } - defer cleanup() - invoke := func(args ...string) (*bytes.Buffer, error) { - inv.Verb = args[0] - inv.Args = args[1:] - return s.view.gocmdRunner.Run(ctx, *inv) - } - if err := run(invoke); err != nil { - return false, nil, nil, err - } - if flags.Mode() != source.WriteTemporaryModFile { - return false, nil, nil, nil - } - var modBytes, sumBytes []byte - modBytes, err = os.ReadFile(tmpURI.Filename()) - if err != nil && !os.IsNotExist(err) { - return false, nil, nil, err - } - sumBytes, err = os.ReadFile(strings.TrimSuffix(tmpURI.Filename(), ".mod") + ".sum") - if err != nil && !os.IsNotExist(err) { - return false, nil, nil, err - } - return true, modBytes, sumBytes, nil -} - -// goCommandInvocation populates inv with configuration for running go commands on the snapshot. -// -// TODO(rfindley): refactor this function to compose the required configuration -// explicitly, rather than implicitly deriving it from flags and inv. -// -// TODO(adonovan): simplify cleanup mechanism. It's hard to see, but -// it used only after call to tempModFile. Clarify that it is only -// non-nil on success. -func (s *snapshot) goCommandInvocation(ctx context.Context, flags source.InvocationFlags, inv *gocommand.Invocation) (tmpURI span.URI, updatedInv *gocommand.Invocation, cleanup func(), err error) { - allowModfileModificationOption := s.Options().AllowModfileModifications - allowNetworkOption := s.Options().AllowImplicitNetworkAccess - - // TODO(rfindley): this is very hard to follow, and may not even be doing the - // right thing: should inv.Env really trample view.options? Do we ever invoke - // this with a non-empty inv.Env? - // - // We should refactor to make it clearer that the correct env is being used. - inv.Env = append(append(append(os.Environ(), s.Options().EnvSlice()...), inv.Env...), "GO111MODULE="+s.view.GO111MODULE()) - inv.BuildFlags = append([]string{}, s.Options().BuildFlags...) - cleanup = func() {} // fallback - - // All logic below is for module mode. - if s.workspaceMode()&moduleMode == 0 { - return "", inv, cleanup, nil - } - - mode, allowNetwork := flags.Mode(), flags.AllowNetwork() - if !allowNetwork && !allowNetworkOption { - inv.Env = append(inv.Env, "GOPROXY=off") - } - - // What follows is rather complicated logic for how to actually run the go - // command. A word of warning: this is the result of various incremental - // features added to gopls, and varying behavior of the Go command across Go - // versions. It can surely be cleaned up significantly, but tread carefully. - // - // Roughly speaking we need to resolve four things: - // - the working directory. - // - the -mod flag - // - the -modfile flag - // - // These are dependent on a number of factors: whether we need to run in a - // synthetic workspace, whether flags are supported at the current go - // version, and what we're actually trying to achieve (the - // source.InvocationFlags). - - var modURI span.URI - // Select the module context to use. - // If we're type checking, we need to use the workspace context, meaning - // the main (workspace) module. Otherwise, we should use the module for - // the passed-in working dir. - if mode == source.LoadWorkspace { - if gowork, _ := s.view.GOWORK(); gowork == "" && s.view.gomod != "" { - modURI = s.view.gomod - } - } else { - modURI = s.GoModForFile(span.URIFromPath(inv.WorkingDir)) - } - - var modContent []byte - if modURI != "" { - modFH, err := s.ReadFile(ctx, modURI) - if err != nil { - return "", nil, cleanup, err - } - modContent, err = modFH.Content() - if err != nil { - return "", nil, cleanup, err - } - } - - // TODO(rfindley): in the case of go.work mode, modURI is empty and we fall - // back on the default behavior of vendorEnabled with an empty modURI. Figure - // out what is correct here and implement it explicitly. - vendorEnabled, err := s.vendorEnabled(ctx, modURI, modContent) - if err != nil { - return "", nil, cleanup, err - } - - const mutableModFlag = "mod" - // If the mod flag isn't set, populate it based on the mode and workspace. - // TODO(rfindley): this doesn't make sense if we're not in module mode - if inv.ModFlag == "" { - switch mode { - case source.LoadWorkspace, source.Normal: - if vendorEnabled { - inv.ModFlag = "vendor" - } else if !allowModfileModificationOption { - inv.ModFlag = "readonly" - } else { - inv.ModFlag = mutableModFlag - } - case source.WriteTemporaryModFile: - inv.ModFlag = mutableModFlag - // -mod must be readonly when using go.work files - see issue #48941 - inv.Env = append(inv.Env, "GOWORK=off") - } - } - - // Only use a temp mod file if the modfile can actually be mutated. - needTempMod := inv.ModFlag == mutableModFlag - useTempMod := s.workspaceMode()&tempModfile != 0 - if needTempMod && !useTempMod { - return "", nil, cleanup, source.ErrTmpModfileUnsupported - } - - // We should use -modfile if: - // - the workspace mode supports it - // - we're using a go.work file on go1.18+, or we need a temp mod file (for - // example, if running go mod tidy in a go.work workspace) - // - // TODO(rfindley): this is very hard to follow. Refactor. - if !needTempMod && s.view.gowork != "" { - // Since we're running in the workspace root, the go command will resolve GOWORK automatically. - } else if useTempMod { - if modURI == "" { - return "", nil, cleanup, fmt.Errorf("no go.mod file found in %s", inv.WorkingDir) - } - modFH, err := s.ReadFile(ctx, modURI) - if err != nil { - return "", nil, cleanup, err - } - // Use the go.sum if it happens to be available. - gosum := s.goSum(ctx, modURI) - tmpURI, cleanup, err = tempModFile(modFH, gosum) - if err != nil { - return "", nil, cleanup, err - } - inv.ModFile = tmpURI.Filename() - } - - return tmpURI, inv, cleanup, nil -} - -func (s *snapshot) buildOverlay() map[string][]byte { - overlays := make(map[string][]byte) - for _, overlay := range s.overlays() { - if overlay.saved { - continue - } - // TODO(rfindley): previously, there was a todo here to make sure we don't - // send overlays outside of the current view. IMO we should instead make - // sure this doesn't matter. - overlays[overlay.URI().Filename()] = overlay.content - } - return overlays -} - -func (s *snapshot) overlays() []*Overlay { - s.mu.Lock() - defer s.mu.Unlock() - - return s.files.Overlays() -} - -// Package data kinds, identifying various package data that may be stored in -// the file cache. -const ( - xrefsKind = "xrefs" - methodSetsKind = "methodsets" - exportDataKind = "export" - diagnosticsKind = "diagnostics" - typerefsKind = "typerefs" -) - -func (s *snapshot) PackageDiagnostics(ctx context.Context, ids ...PackageID) (map[span.URI][]*source.Diagnostic, error) { - ctx, done := event.Start(ctx, "cache.snapshot.PackageDiagnostics") - defer done() - - var mu sync.Mutex - perFile := make(map[span.URI][]*source.Diagnostic) - collect := func(diags []*source.Diagnostic) { - mu.Lock() - defer mu.Unlock() - for _, diag := range diags { - perFile[diag.URI] = append(perFile[diag.URI], diag) - } - } - pre := func(_ int, ph *packageHandle) bool { - data, err := filecache.Get(diagnosticsKind, ph.key) - if err == nil { // hit - collect(ph.m.Diagnostics) - collect(decodeDiagnostics(data)) - return false - } else if err != filecache.ErrNotFound { - event.Error(ctx, "reading diagnostics from filecache", err) - } - return true - } - post := func(_ int, pkg *Package) { - collect(pkg.m.Diagnostics) - collect(pkg.pkg.diagnostics) - } - return perFile, s.forEachPackage(ctx, ids, pre, post) -} - -func (s *snapshot) References(ctx context.Context, ids ...PackageID) ([]source.XrefIndex, error) { - ctx, done := event.Start(ctx, "cache.snapshot.References") - defer done() - - indexes := make([]source.XrefIndex, len(ids)) - pre := func(i int, ph *packageHandle) bool { - data, err := filecache.Get(xrefsKind, ph.key) - if err == nil { // hit - indexes[i] = XrefIndex{m: ph.m, data: data} - return false - } else if err != filecache.ErrNotFound { - event.Error(ctx, "reading xrefs from filecache", err) - } - return true - } - post := func(i int, pkg *Package) { - indexes[i] = XrefIndex{m: pkg.m, data: pkg.pkg.xrefs()} - } - return indexes, s.forEachPackage(ctx, ids, pre, post) -} - -// An XrefIndex is a helper for looking up a package in a given package. -type XrefIndex struct { - m *source.Metadata - data []byte -} - -func (index XrefIndex) Lookup(targets map[PackagePath]map[objectpath.Path]struct{}) []protocol.Location { - return xrefs.Lookup(index.m, index.data, targets) -} - -func (s *snapshot) MethodSets(ctx context.Context, ids ...PackageID) ([]*methodsets.Index, error) { - ctx, done := event.Start(ctx, "cache.snapshot.MethodSets") - defer done() - - indexes := make([]*methodsets.Index, len(ids)) - pre := func(i int, ph *packageHandle) bool { - data, err := filecache.Get(methodSetsKind, ph.key) - if err == nil { // hit - indexes[i] = methodsets.Decode(data) - return false - } else if err != filecache.ErrNotFound { - event.Error(ctx, "reading methodsets from filecache", err) - } - return true - } - post := func(i int, pkg *Package) { - indexes[i] = pkg.pkg.methodsets() - } - return indexes, s.forEachPackage(ctx, ids, pre, post) -} - -func (s *snapshot) MetadataForFile(ctx context.Context, uri span.URI) ([]*source.Metadata, error) { - if s.view.ViewType() == AdHocView { - // As described in golang/go#57209, in ad-hoc workspaces (where we load ./ - // rather than ./...), preempting the directory load with file loads can - // lead to an inconsistent outcome, where certain files are loaded with - // command-line-arguments packages and others are loaded only in the ad-hoc - // package. Therefore, ensure that the workspace is loaded before doing any - // file loads. - if err := s.awaitLoaded(ctx); err != nil { - return nil, err - } - } - - s.mu.Lock() - - // Start with the set of package associations derived from the last load. - ids := s.meta.ids[uri] - - shouldLoad := false // whether any packages containing uri are marked 'shouldLoad' - for _, id := range ids { - if len(s.shouldLoad[id]) > 0 { - shouldLoad = true - } - } - - // Check if uri is known to be unloadable. - unloadable := s.unloadableFiles.Contains(uri) - - s.mu.Unlock() - - // Reload if loading is likely to improve the package associations for uri: - // - uri is not contained in any valid packages - // - ...or one of the packages containing uri is marked 'shouldLoad' - // - ...but uri is not unloadable - if (shouldLoad || len(ids) == 0) && !unloadable { - scope := fileLoadScope(uri) - err := s.load(ctx, false, scope) - - // - // Return the context error here as the current operation is no longer - // valid. - if err != nil { - // Guard against failed loads due to context cancellation. We don't want - // to mark loads as completed if they failed due to context cancellation. - if ctx.Err() != nil { - return nil, ctx.Err() - } - - // Don't return an error here, as we may still return stale IDs. - // Furthermore, the result of MetadataForFile should be consistent upon - // subsequent calls, even if the file is marked as unloadable. - if !errors.Is(err, errNoPackages) { - event.Error(ctx, "MetadataForFile", err) - } - } - - // We must clear scopes after loading. - // - // TODO(rfindley): unlike reloadWorkspace, this is simply marking loaded - // packages as loaded. We could do this from snapshot.load and avoid - // raciness. - s.clearShouldLoad(scope) - } - - // Retrieve the metadata. - s.mu.Lock() - defer s.mu.Unlock() - ids = s.meta.ids[uri] - metas := make([]*source.Metadata, len(ids)) - for i, id := range ids { - metas[i] = s.meta.metadata[id] - if metas[i] == nil { - panic("nil metadata") - } - } - // Metadata is only ever added by loading, - // so if we get here and still have - // no IDs, uri is unloadable. - if !unloadable && len(ids) == 0 { - s.unloadableFiles.Add(uri) - } - - // Sort packages "narrowest" to "widest" (in practice: - // non-tests before tests), and regular packages before - // their intermediate test variants (which have the same - // files but different imports). - sort.Slice(metas, func(i, j int) bool { - x, y := metas[i], metas[j] - xfiles, yfiles := len(x.CompiledGoFiles), len(y.CompiledGoFiles) - if xfiles != yfiles { - return xfiles < yfiles - } - return boolLess(x.IsIntermediateTestVariant(), y.IsIntermediateTestVariant()) - }) - - return metas, nil -} - -func boolLess(x, y bool) bool { return !x && y } // false < true - -func (s *snapshot) ReverseDependencies(ctx context.Context, id PackageID, transitive bool) (map[PackageID]*source.Metadata, error) { - if err := s.awaitLoaded(ctx); err != nil { - return nil, err - } - s.mu.Lock() - meta := s.meta - s.mu.Unlock() - - var rdeps map[PackageID]*source.Metadata - if transitive { - rdeps = meta.reverseReflexiveTransitiveClosure(id) - - // Remove the original package ID from the map. - // (Callers all want irreflexivity but it's easier - // to compute reflexively then subtract.) - delete(rdeps, id) - - } else { - // direct reverse dependencies - rdeps = make(map[PackageID]*source.Metadata) - for _, rdepID := range meta.importedBy[id] { - if rdep := meta.metadata[rdepID]; rdep != nil { - rdeps[rdepID] = rdep - } - } - } - - return rdeps, nil -} - -// -- Active package tracking -- -// -// We say a package is "active" if any of its files are open. -// This is an optimization: the "active" concept is an -// implementation detail of the cache and is not exposed -// in the source or Snapshot API. -// After type-checking we keep active packages in memory. -// The activePackages persistent map does bookkeeping for -// the set of active packages. - -// getActivePackage returns a the memoized active package for id, if it exists. -// If id is not active or has not yet been type-checked, it returns nil. -func (s *snapshot) getActivePackage(id PackageID) *Package { - s.mu.Lock() - defer s.mu.Unlock() - - if value, ok := s.activePackages.Get(id); ok { - return value - } - return nil -} - -// setActivePackage checks if pkg is active, and if so either records it in -// the active packages map or returns the existing memoized active package for id. -func (s *snapshot) setActivePackage(id PackageID, pkg *Package) { - s.mu.Lock() - defer s.mu.Unlock() - - if _, ok := s.activePackages.Get(id); ok { - return // already memoized - } - - if containsOpenFileLocked(s, pkg.Metadata()) { - s.activePackages.Set(id, pkg, nil) - } else { - s.activePackages.Set(id, (*Package)(nil), nil) // remember that pkg is not open - } -} - -func (s *snapshot) resetActivePackagesLocked() { - s.activePackages.Destroy() - s.activePackages = new(persistent.Map[PackageID, *Package]) -} - -func (s *snapshot) fileWatchingGlobPatterns(ctx context.Context) map[string]struct{} { - extensions := "go,mod,sum,work" - for _, ext := range s.Options().TemplateExtensions { - extensions += "," + ext - } - // Work-around microsoft/vscode#100870 by making sure that we are, - // at least, watching the user's entire workspace. This will still be - // applied to every folder in the workspace. - patterns := map[string]struct{}{ - fmt.Sprintf("**/*.{%s}", extensions): {}, - } - - // If GOWORK is outside the folder, ensure we are watching it. - gowork, _ := s.view.GOWORK() - if gowork != "" && !source.InDir(s.view.folder.Dir.Filename(), gowork.Filename()) { - patterns[gowork.Filename()] = struct{}{} - } - - // Add a pattern for each Go module in the workspace that is not within the view. - dirs := s.workspaceDirs(ctx) - for _, dir := range dirs { - // If the directory is within the view's folder, we're already watching - // it with the first pattern above. - if source.InDir(s.view.folder.Dir.Filename(), dir) { - continue - } - // TODO(rstambler): If microsoft/vscode#3025 is resolved before - // microsoft/vscode#101042, we will need a work-around for Windows - // drive letter casing. - patterns[fmt.Sprintf("%s/**/*.{%s}", dir, extensions)] = struct{}{} - } - - if s.watchSubdirs() { - // Some clients (e.g. VS Code) do not send notifications for changes to - // directories that contain Go code (golang/go#42348). To handle this, - // explicitly watch all of the directories in the workspace. We find them - // by adding the directories of every file in the snapshot's workspace - // directories. There may be thousands of patterns, each a single - // directory. - // - // We compute this set by looking at files that we've previously observed. - // This may miss changed to directories that we haven't observed, but that - // shouldn't matter as there is nothing to invalidate (if a directory falls - // in forest, etc). - // - // (A previous iteration created a single glob pattern holding a union of - // all the directories, but this was found to cause VS Code to get stuck - // for several minutes after a buffer was saved twice in a workspace that - // had >8000 watched directories.) - // - // Some clients (notably coc.nvim, which uses watchman for globs) perform - // poorly with a large list of individual directories. - s.addKnownSubdirs(patterns, dirs) - } - - return patterns -} - -func (s *snapshot) addKnownSubdirs(patterns map[string]unit, wsDirs []string) { - s.mu.Lock() - defer s.mu.Unlock() - - s.files.Dirs().Range(func(dir string) { - for _, wsDir := range wsDirs { - if source.InDir(wsDir, dir) { - patterns[dir] = unit{} - } - } - }) -} - -// workspaceDirs returns the workspace directories for the loaded modules. -// -// A workspace directory is, roughly speaking, a directory for which we care -// about file changes. -func (s *snapshot) workspaceDirs(ctx context.Context) []string { - dirSet := make(map[string]unit) - - // Dirs should, at the very least, contain the working directory and folder. - dirSet[s.view.goCommandDir.Filename()] = unit{} - dirSet[s.view.folder.Dir.Filename()] = unit{} - - // Additionally, if e.g. go.work indicates other workspace modules, we should - // include their directories too. - if s.workspaceModFilesErr == nil { - for modFile := range s.workspaceModFiles { - dir := filepath.Dir(modFile.Filename()) - dirSet[dir] = unit{} - } - } - var dirs []string - for d := range dirSet { - dirs = append(dirs, d) - } - sort.Strings(dirs) - return dirs -} - -// watchSubdirs reports whether gopls should request separate file watchers for -// each relevant subdirectory. This is necessary only for clients (namely VS -// Code) that do not send notifications for individual files in a directory -// when the entire directory is deleted. -func (s *snapshot) watchSubdirs() bool { - switch p := s.Options().SubdirWatchPatterns; p { - case source.SubdirWatchPatternsOn: - return true - case source.SubdirWatchPatternsOff: - return false - case source.SubdirWatchPatternsAuto: - // See the documentation of InternalOptions.SubdirWatchPatterns for an - // explanation of why VS Code gets a different default value here. - // - // Unfortunately, there is no authoritative list of client names, nor any - // requirements that client names do not change. We should update the VS - // Code extension to set a default value of "subdirWatchPatterns" to "on", - // so that this workaround is only temporary. - if s.Options().ClientInfo != nil && s.Options().ClientInfo.Name == "Visual Studio Code" { - return true - } - return false - default: - bug.Reportf("invalid subdirWatchPatterns: %q", p) - return false - } -} - -// filesInDir returns all files observed by the snapshot that are contained in -// a directory with the provided URI. -func (s *snapshot) filesInDir(uri span.URI) []span.URI { - s.mu.Lock() - defer s.mu.Unlock() - - dir := uri.Filename() - if !s.files.Dirs().Contains(dir) { - return nil - } - var files []span.URI - s.files.Range(func(uri span.URI, _ source.FileHandle) { - if source.InDir(dir, uri.Filename()) { - files = append(files, uri) - } - }) - return files -} - -func (s *snapshot) WorkspaceMetadata(ctx context.Context) ([]*source.Metadata, error) { - if err := s.awaitLoaded(ctx); err != nil { - return nil, err - } - - s.mu.Lock() - defer s.mu.Unlock() - - meta := make([]*source.Metadata, 0, len(s.workspacePackages)) - for id := range s.workspacePackages { - meta = append(meta, s.meta.metadata[id]) - } - return meta, nil -} - -// Symbols extracts and returns symbol information for every file contained in -// a loaded package. It awaits snapshot loading. -// -// TODO(rfindley): move this to the top of cache/symbols.go -func (s *snapshot) Symbols(ctx context.Context, workspaceOnly bool) (map[span.URI][]source.Symbol, error) { - if err := s.awaitLoaded(ctx); err != nil { - return nil, err - } - - var ( - meta []*source.Metadata - err error - ) - if workspaceOnly { - meta, err = s.WorkspaceMetadata(ctx) - } else { - meta, err = s.AllMetadata(ctx) - } - if err != nil { - return nil, fmt.Errorf("loading metadata: %v", err) - } - - goFiles := make(map[span.URI]struct{}) - for _, m := range meta { - for _, uri := range m.GoFiles { - goFiles[uri] = struct{}{} - } - for _, uri := range m.CompiledGoFiles { - goFiles[uri] = struct{}{} - } - } - - // Symbolize them in parallel. - var ( - group errgroup.Group - nprocs = 2 * runtime.GOMAXPROCS(-1) // symbolize is a mix of I/O and CPU - resultMu sync.Mutex - result = make(map[span.URI][]source.Symbol) - ) - group.SetLimit(nprocs) - for uri := range goFiles { - uri := uri - group.Go(func() error { - symbols, err := s.symbolize(ctx, uri) - if err != nil { - return err - } - resultMu.Lock() - result[uri] = symbols - resultMu.Unlock() - return nil - }) - } - // Keep going on errors, but log the first failure. - // Partial results are better than no symbol results. - if err := group.Wait(); err != nil { - event.Error(ctx, "getting snapshot symbols", err) - } - return result, nil -} - -func (s *snapshot) AllMetadata(ctx context.Context) ([]*source.Metadata, error) { - if err := s.awaitLoaded(ctx); err != nil { - return nil, err - } - - s.mu.Lock() - g := s.meta - s.mu.Unlock() - - meta := make([]*source.Metadata, 0, len(g.metadata)) - for _, m := range g.metadata { - meta = append(meta, m) - } - return meta, nil -} - -// TODO(rfindley): clarify that this is only active modules. Or update to just -// use findRootPattern. -func (s *snapshot) GoModForFile(uri span.URI) span.URI { - return moduleForURI(s.workspaceModFiles, uri) -} - -func moduleForURI(modFiles map[span.URI]struct{}, uri span.URI) span.URI { - var match span.URI - for modURI := range modFiles { - if !source.InDir(filepath.Dir(modURI.Filename()), uri.Filename()) { - continue - } - if len(modURI) > len(match) { - match = modURI - } - } - return match -} - -// nearestModFile finds the nearest go.mod file contained in the directory -// containing uri, or a parent of that directory. -// -// The given uri must be a file, not a directory. -func nearestModFile(ctx context.Context, uri span.URI, fs source.FileSource) (span.URI, error) { - dir := filepath.Dir(uri.Filename()) - mod, err := findRootPattern(ctx, dir, "go.mod", fs) - if err != nil { - return "", err - } - return span.URIFromPath(mod), nil -} - -func (s *snapshot) Metadata(id PackageID) *source.Metadata { - s.mu.Lock() - defer s.mu.Unlock() - return s.meta.metadata[id] -} - -// clearShouldLoad clears package IDs that no longer need to be reloaded after -// scopes has been loaded. -func (s *snapshot) clearShouldLoad(scopes ...loadScope) { - s.mu.Lock() - defer s.mu.Unlock() - - for _, scope := range scopes { - switch scope := scope.(type) { - case packageLoadScope: - scopePath := PackagePath(scope) - var toDelete []PackageID - for id, pkgPaths := range s.shouldLoad { - for _, pkgPath := range pkgPaths { - if pkgPath == scopePath { - toDelete = append(toDelete, id) - } - } - } - for _, id := range toDelete { - delete(s.shouldLoad, id) - } - case fileLoadScope: - uri := span.URI(scope) - ids := s.meta.ids[uri] - for _, id := range ids { - delete(s.shouldLoad, id) - } - } - } -} - -func (s *snapshot) FindFile(uri span.URI) source.FileHandle { - s.view.markKnown(uri) - - s.mu.Lock() - defer s.mu.Unlock() - - result, _ := s.files.Get(uri) - return result -} - -// ReadFile returns a File for the given URI. If the file is unknown it is added -// to the managed set. -// -// ReadFile succeeds even if the file does not exist. A non-nil error return -// indicates some type of internal error, for example if ctx is cancelled. -func (s *snapshot) ReadFile(ctx context.Context, uri span.URI) (source.FileHandle, error) { - s.mu.Lock() - defer s.mu.Unlock() - - return lockedSnapshot{s}.ReadFile(ctx, uri) -} - -// preloadFiles delegates to the view FileSource to read the requested uris in -// parallel, without holding the snapshot lock. -func (s *snapshot) preloadFiles(ctx context.Context, uris []span.URI) { - files := make([]source.FileHandle, len(uris)) - var wg sync.WaitGroup - iolimit := make(chan struct{}, 20) // I/O concurrency limiting semaphore - for i, uri := range uris { - wg.Add(1) - iolimit <- struct{}{} - go func(i int, uri span.URI) { - defer wg.Done() - fh, err := s.view.fs.ReadFile(ctx, uri) - <-iolimit - if err != nil && ctx.Err() == nil { - event.Error(ctx, fmt.Sprintf("reading %s", uri), err) - return - } - files[i] = fh - }(i, uri) - } - wg.Wait() - - s.mu.Lock() - defer s.mu.Unlock() - - for i, fh := range files { - if fh == nil { - continue // error logged above - } - uri := uris[i] - if _, ok := s.files.Get(uri); !ok { - s.files.Set(uri, fh) - } - } -} - -// A lockedSnapshot implements the source.FileSource interface while holding -// the lock for the wrapped snapshot. -type lockedSnapshot struct{ wrapped *snapshot } - -func (s lockedSnapshot) ReadFile(ctx context.Context, uri span.URI) (source.FileHandle, error) { - s.wrapped.view.markKnown(uri) - if fh, ok := s.wrapped.files.Get(uri); ok { - return fh, nil - } - - fh, err := s.wrapped.view.fs.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - s.wrapped.files.Set(uri, fh) - return fh, nil -} - -func (s *snapshot) IsOpen(uri span.URI) bool { - s.mu.Lock() - defer s.mu.Unlock() - - fh, _ := s.files.Get(uri) - _, open := fh.(*Overlay) - return open -} - -// TODO(rfindley): it would make sense for awaitLoaded to return metadata. -func (s *snapshot) awaitLoaded(ctx context.Context) error { - loadErr := s.awaitLoadedAllErrors(ctx) - - // TODO(rfindley): eliminate this function as part of simplifying - // CriticalErrors. - if loadErr != nil { - return loadErr.MainError - } - return nil -} - -func (s *snapshot) CriticalError(ctx context.Context) *source.CriticalError { - // If we couldn't compute workspace mod files, then the load below is - // invalid. - // - // TODO(rfindley): is this a clear error to present to the user? - if s.workspaceModFilesErr != nil { - return &source.CriticalError{MainError: s.workspaceModFilesErr} - } - - loadErr := s.awaitLoadedAllErrors(ctx) - if loadErr != nil && errors.Is(loadErr.MainError, context.Canceled) { - return nil - } - - // Even if packages didn't fail to load, we still may want to show - // additional warnings. - if loadErr == nil { - active, _ := s.WorkspaceMetadata(ctx) - if msg := shouldShowAdHocPackagesWarning(s, active); msg != "" { - return &source.CriticalError{ - MainError: errors.New(msg), - } - } - // Even if workspace packages were returned, there still may be an error - // with the user's workspace layout. Workspace packages that only have the - // ID "command-line-arguments" are usually a symptom of a bad workspace - // configuration. - // - // This heuristic is path-dependent: we only get command-line-arguments - // packages when we've loaded using file scopes, which only occurs - // on-demand or via orphaned file reloading. - // - // TODO(rfindley): re-evaluate this heuristic. - if containsCommandLineArguments(active) { - err, diags := s.workspaceLayoutError(ctx) - if err != nil { - if ctx.Err() != nil { - return nil // see the API documentation for source.Snapshot - } - return &source.CriticalError{ - MainError: err, - Diagnostics: diags, - } - } - } - return nil - } - - if errMsg := loadErr.MainError.Error(); strings.Contains(errMsg, "cannot find main module") || strings.Contains(errMsg, "go.mod file not found") { - err, diags := s.workspaceLayoutError(ctx) - if err != nil { - if ctx.Err() != nil { - return nil // see the API documentation for source.Snapshot - } - return &source.CriticalError{ - MainError: err, - Diagnostics: diags, - } - } - } - return loadErr -} - -// A portion of this text is expected by TestBrokenWorkspace_OutsideModule. -const adHocPackagesWarning = `You are outside of a module and outside of $GOPATH/src. -If you are using modules, please open your editor to a directory in your module. -If you believe this warning is incorrect, please file an issue: https://github.com/golang/go/issues/new.` - -func shouldShowAdHocPackagesWarning(snapshot *snapshot, active []*source.Metadata) string { - if !snapshot.validBuildConfiguration() { - for _, m := range active { - // A blank entry in DepsByImpPath - // indicates a missing dependency. - for _, importID := range m.DepsByImpPath { - if importID == "" { - return adHocPackagesWarning - } - } - } - } - return "" -} - -func containsCommandLineArguments(metas []*source.Metadata) bool { - for _, m := range metas { - if source.IsCommandLineArguments(m.ID) { - return true - } - } - return false -} - -func (s *snapshot) awaitLoadedAllErrors(ctx context.Context) *source.CriticalError { - // Do not return results until the snapshot's view has been initialized. - s.AwaitInitialized(ctx) - - // TODO(rfindley): Should we be more careful about returning the - // initialization error? Is it possible for the initialization error to be - // corrected without a successful reinitialization? - if err := s.getInitializationError(); err != nil { - return err - } - - // TODO(rfindley): revisit this handling. Calling reloadWorkspace with a - // cancelled context should have the same effect, so this preemptive handling - // should not be necessary. - // - // Also: GetCriticalError ignores context cancellation errors. Should we be - // returning nil here? - if ctx.Err() != nil { - return &source.CriticalError{MainError: ctx.Err()} - } - - // TODO(rfindley): reloading is not idempotent: if we try to reload or load - // orphaned files below and fail, we won't try again. For that reason, we - // could get different results from subsequent calls to this function, which - // may cause critical errors to be suppressed. - - if err := s.reloadWorkspace(ctx); err != nil { - diags := s.extractGoCommandErrors(ctx, err) - return &source.CriticalError{ - MainError: err, - Diagnostics: diags, - } - } - - if err := s.reloadOrphanedOpenFiles(ctx); err != nil { - diags := s.extractGoCommandErrors(ctx, err) - return &source.CriticalError{ - MainError: err, - Diagnostics: diags, - } - } - return nil -} - -func (s *snapshot) getInitializationError() *source.CriticalError { - s.mu.Lock() - defer s.mu.Unlock() - - return s.initializedErr -} - -func (s *snapshot) AwaitInitialized(ctx context.Context) { - select { - case <-ctx.Done(): - return - case <-s.view.initialWorkspaceLoad: - } - // We typically prefer to run something as intensive as the IWL without - // blocking. I'm not sure if there is a way to do that here. - s.initialize(ctx, false) -} - -// reloadWorkspace reloads the metadata for all invalidated workspace packages. -func (s *snapshot) reloadWorkspace(ctx context.Context) error { - var scopes []loadScope - var seen map[PackagePath]bool - s.mu.Lock() - for _, pkgPaths := range s.shouldLoad { - for _, pkgPath := range pkgPaths { - if seen == nil { - seen = make(map[PackagePath]bool) - } - if seen[pkgPath] { - continue - } - seen[pkgPath] = true - scopes = append(scopes, packageLoadScope(pkgPath)) - } - } - s.mu.Unlock() - - if len(scopes) == 0 { - return nil - } - - // If the view's build configuration is invalid, we cannot reload by - // package path. Just reload the directory instead. - if !s.validBuildConfiguration() { - scopes = []loadScope{viewLoadScope("LOAD_INVALID_VIEW")} - } - - err := s.load(ctx, false, scopes...) - - // Unless the context was canceled, set "shouldLoad" to false for all - // of the metadata we attempted to load. - if !errors.Is(err, context.Canceled) { - s.clearShouldLoad(scopes...) - } - - return err -} - -// reloadOrphanedOpenFiles attempts to load a package for each open file that -// does not yet have an associated package. If loading finishes without being -// canceled, any files still not contained in a package are marked as unloadable. -// -// An error is returned if the load is canceled. -func (s *snapshot) reloadOrphanedOpenFiles(ctx context.Context) error { - s.mu.Lock() - meta := s.meta - s.mu.Unlock() - // When we load ./... or a package path directly, we may not get packages - // that exist only in overlays. As a workaround, we search all of the files - // available in the snapshot and reload their metadata individually using a - // file= query if the metadata is unavailable. - open := s.overlays() - var files []*Overlay - for _, o := range open { - uri := o.URI() - if s.IsBuiltin(uri) || s.FileKind(o) != source.Go { - continue - } - if len(meta.ids[uri]) == 0 { - files = append(files, o) - } - } - - // Filter to files that are not known to be unloadable. - s.mu.Lock() - loadable := files[:0] - for _, file := range files { - if !s.unloadableFiles.Contains(file.URI()) { - loadable = append(loadable, file) - } - } - files = loadable - s.mu.Unlock() - - if len(files) == 0 { - return nil - } - - var uris []span.URI - for _, file := range files { - uris = append(uris, file.URI()) - } - - event.Log(ctx, "reloadOrphanedFiles reloading", tag.Files.Of(uris)) - - var g errgroup.Group - - cpulimit := runtime.GOMAXPROCS(0) - g.SetLimit(cpulimit) - - // Load files one-at-a-time. go/packages can return at most one - // command-line-arguments package per query. - for _, file := range files { - file := file - g.Go(func() error { - return s.load(ctx, false, fileLoadScope(file.URI())) - }) - } - - // If we failed to load some files, i.e. they have no metadata, - // mark the failures so we don't bother retrying until the file's - // content changes. - // - // TODO(rfindley): is it possible that the load stopped early for an - // unrelated errors? If so, add a fallback? - - if err := g.Wait(); err != nil { - // Check for context cancellation so that we don't incorrectly mark files - // as unloadable, but don't return before setting all workspace packages. - if ctx.Err() != nil { - return ctx.Err() - } - - if !errors.Is(err, errNoPackages) { - event.Error(ctx, "reloadOrphanedFiles: failed to load", err, tag.Files.Of(uris)) - } - } - - // If the context was not canceled, we assume that the result of loading - // packages is deterministic (though as we saw in golang/go#59318, it may not - // be in the presence of bugs). Marking all unloaded files as unloadable here - // prevents us from falling into recursive reloading where we only make a bit - // of progress each time. - s.mu.Lock() - defer s.mu.Unlock() - for _, file := range files { - // TODO(rfindley): instead of locking here, we should have load return the - // metadata graph that resulted from loading. - uri := file.URI() - if len(s.meta.ids[uri]) == 0 { - s.unloadableFiles.Add(uri) - } - } - - return nil -} - -// OrphanedFileDiagnostics reports diagnostics describing why open files have -// no packages or have only command-line-arguments packages. -// -// If the resulting diagnostic is nil, the file is either not orphaned or we -// can't produce a good diagnostic. -// -// TODO(rfindley): reconcile the definition of "orphaned" here with -// reloadOrphanedFiles. The latter does not include files with -// command-line-arguments packages. -func (s *snapshot) OrphanedFileDiagnostics(ctx context.Context) (map[span.URI]*source.Diagnostic, error) { - if err := s.awaitLoaded(ctx); err != nil { - return nil, err - } - - var files []*Overlay - -searchOverlays: - for _, o := range s.overlays() { - uri := o.URI() - if s.IsBuiltin(uri) || s.FileKind(o) != source.Go { - continue - } - md, err := s.MetadataForFile(ctx, uri) - if err != nil { - return nil, err - } - for _, m := range md { - if !source.IsCommandLineArguments(m.ID) || m.Standalone { - continue searchOverlays - } - } - files = append(files, o) - } - if len(files) == 0 { - return nil, nil - } - - loadedModFiles := make(map[span.URI]struct{}) // all mod files, including dependencies - ignoredFiles := make(map[span.URI]bool) // files reported in packages.Package.IgnoredFiles - - meta, err := s.AllMetadata(ctx) - if err != nil { - return nil, err - } - - for _, meta := range meta { - if meta.Module != nil && meta.Module.GoMod != "" { - gomod := span.URIFromPath(meta.Module.GoMod) - loadedModFiles[gomod] = struct{}{} - } - for _, ignored := range meta.IgnoredFiles { - ignoredFiles[ignored] = true - } - } - - diagnostics := make(map[span.URI]*source.Diagnostic) - for _, fh := range files { - // Only warn about orphaned files if the file is well-formed enough to - // actually be part of a package. - // - // Use ParseGo as for open files this is likely to be a cache hit (we'll have ) - pgf, err := s.ParseGo(ctx, fh, source.ParseHeader) - if err != nil { - continue - } - if !pgf.File.Name.Pos().IsValid() { - continue - } - rng, err := pgf.PosRange(pgf.File.Name.Pos(), pgf.File.Name.End()) - if err != nil { - continue - } - - var ( - msg string // if non-empty, report a diagnostic with this message - suggestedFixes []source.SuggestedFix // associated fixes, if any - ) - - // If we have a relevant go.mod file, check whether the file is orphaned - // due to its go.mod file being inactive. We could also offer a - // prescriptive diagnostic in the case that there is no go.mod file, but it - // is harder to be precise in that case, and less important. - if goMod, err := nearestModFile(ctx, fh.URI(), s); err == nil && goMod != "" { - if _, ok := loadedModFiles[goMod]; !ok { - modDir := filepath.Dir(goMod.Filename()) - viewDir := s.view.folder.Dir.Filename() - - // When the module is underneath the view dir, we offer - // "use all modules" quick-fixes. - inDir := source.InDir(viewDir, modDir) - - if rel, err := filepath.Rel(viewDir, modDir); err == nil { - modDir = rel - } - - var fix string - if s.view.goversion >= 18 { - if s.view.gowork != "" { - fix = fmt.Sprintf("To fix this problem, you can add this module to your go.work file (%s)", s.view.gowork) - if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work use`", command.RunGoWorkArgs{ - ViewID: s.view.ID(), - Args: []string{"use", modDir}, - }); err == nil { - suggestedFixes = append(suggestedFixes, source.SuggestedFix{ - Title: "Use this module in your go.work file", - Command: &cmd, - ActionKind: protocol.QuickFix, - }) - } - - if inDir { - if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work use -r`", command.RunGoWorkArgs{ - ViewID: s.view.ID(), - Args: []string{"use", "-r", "."}, - }); err == nil { - suggestedFixes = append(suggestedFixes, source.SuggestedFix{ - Title: "Use all modules in your workspace", - Command: &cmd, - ActionKind: protocol.QuickFix, - }) - } - } - } else { - fix = "To fix this problem, you can add a go.work file that uses this directory." - - if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work init && go work use`", command.RunGoWorkArgs{ - ViewID: s.view.ID(), - InitFirst: true, - Args: []string{"use", modDir}, - }); err == nil { - suggestedFixes = []source.SuggestedFix{ - { - Title: "Add a go.work file using this module", - Command: &cmd, - ActionKind: protocol.QuickFix, - }, - } - } - - if inDir { - if cmd, err := command.NewRunGoWorkCommandCommand("Run `go work init && go work use -r`", command.RunGoWorkArgs{ - ViewID: s.view.ID(), - InitFirst: true, - Args: []string{"use", "-r", "."}, - }); err == nil { - suggestedFixes = append(suggestedFixes, source.SuggestedFix{ - Title: "Add a go.work file using all modules in your workspace", - Command: &cmd, - ActionKind: protocol.QuickFix, - }) - } - } - } - } else { - fix = `To work with multiple modules simultaneously, please upgrade to Go 1.18 or -later, reinstall gopls, and use a go.work file.` - } - msg = fmt.Sprintf(`This file is within module %q, which is not included in your workspace. -%s -See the documentation for more information on setting up your workspace: -https://github.com/golang/tools/blob/master/gopls/doc/workspace.md.`, modDir, fix) - } - } - - if msg == "" && ignoredFiles[fh.URI()] { - // TODO(rfindley): use the constraint package to check if the file - // _actually_ satisfies the current build context. - hasConstraint := false - walkConstraints(pgf.File, func(constraint.Expr) bool { - hasConstraint = true - return false - }) - var fix string - if hasConstraint { - fix = `This file may be excluded due to its build tags; try adding "-tags=" to your gopls "buildFlags" configuration -See the documentation for more information on working with build tags: -https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags-string.` - } else if strings.Contains(filepath.Base(fh.URI().Filename()), "_") { - fix = `This file may be excluded due to its GOOS/GOARCH, or other build constraints.` - } else { - fix = `This file is ignored by your gopls build.` // we don't know why - } - msg = fmt.Sprintf("No packages found for open file %s.\n%s", fh.URI().Filename(), fix) - } - - if msg != "" { - d := &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityWarning, - Source: source.ListError, - Message: msg, - SuggestedFixes: suggestedFixes, - } - if ok := source.BundleQuickFixes(d); !ok { - bug.Reportf("failed to bundle quick fixes for %v", d) - } - // Only report diagnostics if we detect an actual exclusion. - diagnostics[fh.URI()] = d - } - } - return diagnostics, nil -} - -// TODO(golang/go#53756): this function needs to consider more than just the -// absolute URI, for example: -// - the position of /vendor/ with respect to the relevant module root -// - whether or not go.work is in use (as vendoring isn't supported in workspace mode) -// -// Most likely, each call site of inVendor needs to be reconsidered to -// understand and correctly implement the desired behavior. -func inVendor(uri span.URI) bool { - _, after, found := strings.Cut(string(uri), "/vendor/") - // Only subdirectories of /vendor/ are considered vendored - // (/vendor/a/foo.go is vendored, /vendor/foo.go is not). - return found && strings.Contains(after, "/") -} - -func (s *snapshot) clone(ctx context.Context, changes map[span.URI]source.FileHandle) (*snapshot, func()) { - ctx, done := event.Start(ctx, "cache.snapshot.clone") - defer done() - - s.mu.Lock() - defer s.mu.Unlock() - - backgroundCtx, cancel := context.WithCancel(event.Detach(xcontext.Detach(s.backgroundCtx))) - result := &snapshot{ - sequenceID: s.sequenceID + 1, - globalID: nextSnapshotID(), - store: s.store, - view: s.view, - backgroundCtx: backgroundCtx, - cancel: cancel, - builtin: s.builtin, - initialized: s.initialized, - initializedErr: s.initializedErr, - packages: s.packages.Clone(), - activePackages: s.activePackages.Clone(), - files: s.files.Clone(changes), - symbolizeHandles: cloneWithout(s.symbolizeHandles, changes), - workspacePackages: make(map[PackageID]PackagePath, len(s.workspacePackages)), - unloadableFiles: s.unloadableFiles.Clone(), // not cloneWithout: typing in a file doesn't necessarily make it loadable - parseModHandles: cloneWithout(s.parseModHandles, changes), - parseWorkHandles: cloneWithout(s.parseWorkHandles, changes), - modTidyHandles: cloneWithout(s.modTidyHandles, changes), - modWhyHandles: cloneWithout(s.modWhyHandles, changes), - modVulnHandles: cloneWithout(s.modVulnHandles, changes), - workspaceModFiles: s.workspaceModFiles, - workspaceModFilesErr: s.workspaceModFilesErr, - importGraph: s.importGraph, - pkgIndex: s.pkgIndex, - } - - // Create a lease on the new snapshot. - // (Best to do this early in case the code below hides an - // incref/decref operation that might destroy it prematurely.) - release := result.Acquire() - - reinit := false - - // Changes to vendor tree may require reinitialization, - // either because of an initialization error - // (e.g. "inconsistent vendoring detected"), or because - // one or more modules may have moved into or out of the - // vendor tree after 'go mod vendor' or 'rm -fr vendor/'. - // - // TODO(rfindley): revisit the location of this check. - for uri := range changes { - if inVendor(uri) && s.initializedErr != nil || - strings.HasSuffix(string(uri), "/vendor/modules.txt") { - reinit = true - break - } - } - - // Collect observed file handles for changed URIs from the old snapshot, if - // they exist. Importantly, we don't call ReadFile here: consider the case - // where a file is added on disk; we don't want to read the newly added file - // into the old snapshot, as that will break our change detection below. - oldFiles := make(map[span.URI]source.FileHandle) - for uri := range changes { - if fh, ok := s.files.Get(uri); ok { - oldFiles[uri] = fh - } - } - // changedOnDisk determines if the new file handle may have changed on disk. - // It over-approximates, returning true if the new file is saved and either - // the old file wasn't saved, or the on-disk contents changed. - // - // oldFH may be nil. - changedOnDisk := func(oldFH, newFH source.FileHandle) bool { - if !newFH.SameContentsOnDisk() { - return false - } - if oe, ne := (oldFH != nil && fileExists(oldFH)), fileExists(newFH); !oe || !ne { - return oe != ne - } - return !oldFH.SameContentsOnDisk() || oldFH.FileIdentity() != newFH.FileIdentity() - } - - if workURI, _ := s.view.GOWORK(); workURI != "" { - if newFH, ok := changes[workURI]; ok { - result.workspaceModFiles, result.workspaceModFilesErr = computeWorkspaceModFiles(ctx, s.view.gomod, workURI, s.view.effectiveGO111MODULE(), result) - if changedOnDisk(oldFiles[workURI], newFH) { - reinit = true - } - } - } - - // Reinitialize if any workspace mod file has changed on disk. - for uri, newFH := range changes { - if _, ok := result.workspaceModFiles[uri]; ok && changedOnDisk(oldFiles[uri], newFH) { - reinit = true - } - } - - // Finally, process sumfile changes that may affect loading. - for uri, newFH := range changes { - if !changedOnDisk(oldFiles[uri], newFH) { - continue // like with go.mod files, we only reinit when things change on disk - } - dir, base := filepath.Split(uri.Filename()) - if base == "go.work.sum" && s.view.gowork != "" { - if dir == filepath.Dir(s.view.gowork) { - reinit = true - } - } - if base == "go.sum" { - modURI := span.URIFromPath(filepath.Join(dir, "go.mod")) - if _, active := result.workspaceModFiles[modURI]; active { - reinit = true - } - } - } - - // The snapshot should be initialized if either s was uninitialized, or we've - // detected a change that triggers reinitialization. - if reinit { - result.initialized = false - } - - // directIDs keeps track of package IDs that have directly changed. - // Note: this is not a set, it's a map from id to invalidateMetadata. - directIDs := map[PackageID]bool{} - - // Invalidate all package metadata if the workspace module has changed. - if reinit { - for k := range s.meta.metadata { - // TODO(rfindley): this seems brittle; can we just start over? - directIDs[k] = true - } - } - - // Compute invalidations based on file changes. - anyImportDeleted := false // import deletions can resolve cycles - anyFileOpenedOrClosed := false // opened files affect workspace packages - anyFileAdded := false // adding a file can resolve missing dependencies - - for uri, newFH := range changes { - // The original FileHandle for this URI is cached on the snapshot. - oldFH, _ := oldFiles[uri] // may be nil - _, oldOpen := oldFH.(*Overlay) - _, newOpen := newFH.(*Overlay) - - anyFileOpenedOrClosed = anyFileOpenedOrClosed || (oldOpen != newOpen) - anyFileAdded = anyFileAdded || (oldFH == nil || !fileExists(oldFH)) && fileExists(newFH) - - // If uri is a Go file, check if it has changed in a way that would - // invalidate metadata. Note that we can't use s.view.FileKind here, - // because the file type that matters is not what the *client* tells us, - // but what the Go command sees. - var invalidateMetadata, pkgFileChanged, importDeleted bool - if strings.HasSuffix(uri.Filename(), ".go") { - invalidateMetadata, pkgFileChanged, importDeleted = metadataChanges(ctx, s, oldFH, newFH) - } - if invalidateMetadata { - // If this is a metadata-affecting change, perhaps a reload will succeed. - result.unloadableFiles.Remove(uri) - } - - invalidateMetadata = invalidateMetadata || reinit - anyImportDeleted = anyImportDeleted || importDeleted - - // Mark all of the package IDs containing the given file. - filePackageIDs := invalidatedPackageIDs(uri, s.meta.ids, pkgFileChanged) - for id := range filePackageIDs { - directIDs[id] = directIDs[id] || invalidateMetadata // may insert 'false' - } - - // Invalidate the previous modTidyHandle if any of the files have been - // saved or if any of the metadata has been invalidated. - // - // TODO(rfindley): this seems like too-aggressive invalidation of mod - // results. We should instead thread through overlays to the Go command - // invocation and only run this if invalidateMetadata (and perhaps then - // still do it less frequently). - if invalidateMetadata || fileWasSaved(oldFH, newFH) { - // Only invalidate mod tidy results for the most relevant modfile in the - // workspace. This is a potentially lossy optimization for workspaces - // with many modules (such as google-cloud-go, which has 145 modules as - // of writing). - // - // While it is theoretically possible that a change in workspace module A - // could affect the mod-tidiness of workspace module B (if B transitively - // requires A), such changes are probably unlikely and not worth the - // penalty of re-running go mod tidy for everything. Note that mod tidy - // ignores GOWORK, so the two modules would have to be related by a chain - // of replace directives. - // - // We could improve accuracy by inspecting replace directives, using - // overlays in go mod tidy, and/or checking for metadata changes from the - // on-disk content. - // - // Note that we iterate the modTidyHandles map here, rather than e.g. - // using nearestModFile, because we don't have access to an accurate - // FileSource at this point in the snapshot clone. - const onlyInvalidateMostRelevant = true - if onlyInvalidateMostRelevant { - deleteMostRelevantModFile(result.modTidyHandles, uri) - } else { - result.modTidyHandles.Clear() - } - - // TODO(rfindley): should we apply the above heuristic to mod vuln or mod - // why handles as well? - // - // TODO(rfindley): no tests fail if I delete the line below. - result.modWhyHandles.Clear() - result.modVulnHandles.Clear() - } - } - - // Deleting an import can cause list errors due to import cycles to be - // resolved. The best we can do without parsing the list error message is to - // hope that list errors may have been resolved by a deleted import. - // - // We could do better by parsing the list error message. We already do this - // to assign a better range to the list error, but for such critical - // functionality as metadata, it's better to be conservative until it proves - // impractical. - // - // We could also do better by looking at which imports were deleted and - // trying to find cycles they are involved in. This fails when the file goes - // from an unparseable state to a parseable state, as we don't have a - // starting point to compare with. - if anyImportDeleted { - for id, metadata := range s.meta.metadata { - if len(metadata.Errors) > 0 { - directIDs[id] = true - } - } - } - - // Adding a file can resolve missing dependencies from existing packages. - // - // We could be smart here and try to guess which packages may have been - // fixed, but until that proves necessary, just invalidate metadata for any - // package with missing dependencies. - if anyFileAdded { - for id, metadata := range s.meta.metadata { - for _, impID := range metadata.DepsByImpPath { - if impID == "" { // missing import - directIDs[id] = true - break - } - } - } - } - - // Invalidate reverse dependencies too. - // idsToInvalidate keeps track of transitive reverse dependencies. - // If an ID is present in the map, invalidate its types. - // If an ID's value is true, invalidate its metadata too. - idsToInvalidate := map[PackageID]bool{} - var addRevDeps func(PackageID, bool) - addRevDeps = func(id PackageID, invalidateMetadata bool) { - current, seen := idsToInvalidate[id] - newInvalidateMetadata := current || invalidateMetadata - - // If we've already seen this ID, and the value of invalidate - // metadata has not changed, we can return early. - if seen && current == newInvalidateMetadata { - return - } - idsToInvalidate[id] = newInvalidateMetadata - for _, rid := range s.meta.importedBy[id] { - addRevDeps(rid, invalidateMetadata) - } - } - for id, invalidateMetadata := range directIDs { - addRevDeps(id, invalidateMetadata) - } - - // Invalidated package information. - for id, invalidateMetadata := range idsToInvalidate { - if _, ok := directIDs[id]; ok || invalidateMetadata { - result.packages.Delete(id) - } else { - if entry, hit := result.packages.Get(id); hit { - ph := entry.clone(false) - result.packages.Set(id, ph, nil) - } - } - result.activePackages.Delete(id) - } - - // Any packages that need loading in s still need loading in the new - // snapshot. - for k, v := range s.shouldLoad { - if result.shouldLoad == nil { - result.shouldLoad = make(map[PackageID][]PackagePath) - } - result.shouldLoad[k] = v - } - - // Compute which metadata updates are required. We only need to invalidate - // packages directly containing the affected file, and only if it changed in - // a relevant way. - metadataUpdates := make(map[PackageID]*source.Metadata) - for k, v := range s.meta.metadata { - invalidateMetadata := idsToInvalidate[k] - - // For metadata that has been newly invalidated, capture package paths - // requiring reloading in the shouldLoad map. - if invalidateMetadata && !source.IsCommandLineArguments(v.ID) { - if result.shouldLoad == nil { - result.shouldLoad = make(map[PackageID][]PackagePath) - } - needsReload := []PackagePath{v.PkgPath} - if v.ForTest != "" && v.ForTest != v.PkgPath { - // When reloading test variants, always reload their ForTest package as - // well. Otherwise, we may miss test variants in the resulting load. - // - // TODO(rfindley): is this actually sufficient? Is it possible that - // other test variants may be invalidated? Either way, we should - // determine exactly what needs to be reloaded here. - needsReload = append(needsReload, v.ForTest) - } - result.shouldLoad[k] = needsReload - } - - // Check whether the metadata should be deleted. - if invalidateMetadata { - metadataUpdates[k] = nil - continue - } - - } - - // Update metadata, if necessary. - result.meta = s.meta.Clone(metadataUpdates) - - // Update workspace and active packages, if necessary. - if result.meta != s.meta || anyFileOpenedOrClosed { - result.workspacePackages = computeWorkspacePackagesLocked(result, result.meta) - result.resetActivePackagesLocked() - } else { - result.workspacePackages = s.workspacePackages - } - - // Don't bother copying the importedBy graph, - // as it changes each time we update metadata. - - // TODO(rfindley): consolidate the this workspace mode detection with - // workspace invalidation. - workspaceModeChanged := s.workspaceMode() != result.workspaceMode() - - // If the snapshot's workspace mode has changed, the packages loaded using - // the previous mode are no longer relevant, so clear them out. - if workspaceModeChanged { - result.workspacePackages = map[PackageID]PackagePath{} - } - return result, release -} - -func cloneWithout[V any](m *persistent.Map[span.URI, V], changes map[span.URI]source.FileHandle) *persistent.Map[span.URI, V] { - m2 := m.Clone() - for k := range changes { - m2.Delete(k) - } - return m2 -} - -// deleteMostRelevantModFile deletes the mod file most likely to be the mod -// file for the changed URI, if it exists. -// -// Specifically, this is the longest mod file path in a directory containing -// changed. This might not be accurate if there is another mod file closer to -// changed that happens not to be present in the map, but that's OK: the goal -// of this function is to guarantee that IF the nearest mod file is present in -// the map, it is invalidated. -func deleteMostRelevantModFile(m *persistent.Map[span.URI, *memoize.Promise], changed span.URI) { - var mostRelevant span.URI - changedFile := changed.Filename() - - m.Range(func(modURI span.URI, _ *memoize.Promise) { - if len(modURI) > len(mostRelevant) { - if source.InDir(filepath.Dir(modURI.Filename()), changedFile) { - mostRelevant = modURI - } - } - }) - if mostRelevant != "" { - m.Delete(mostRelevant) - } -} - -// invalidatedPackageIDs returns all packages invalidated by a change to uri. -// If we haven't seen this URI before, we guess based on files in the same -// directory. This is of course incorrect in build systems where packages are -// not organized by directory. -// -// If packageFileChanged is set, the file is either a new file, or has a new -// package name. In this case, all known packages in the directory will be -// invalidated. -func invalidatedPackageIDs(uri span.URI, known map[span.URI][]PackageID, packageFileChanged bool) map[PackageID]struct{} { - invalidated := make(map[PackageID]struct{}) - - // At a minimum, we invalidate packages known to contain uri. - for _, id := range known[uri] { - invalidated[id] = struct{}{} - } - - // If the file didn't move to a new package, we should only invalidate the - // packages it is currently contained inside. - if !packageFileChanged && len(invalidated) > 0 { - return invalidated - } - - // This is a file we don't yet know about, or which has moved packages. Guess - // relevant packages by considering files in the same directory. - - // Cache of FileInfo to avoid unnecessary stats for multiple files in the - // same directory. - stats := make(map[string]struct { - os.FileInfo - error - }) - getInfo := func(dir string) (os.FileInfo, error) { - if res, ok := stats[dir]; ok { - return res.FileInfo, res.error - } - fi, err := os.Stat(dir) - stats[dir] = struct { - os.FileInfo - error - }{fi, err} - return fi, err - } - dir := filepath.Dir(uri.Filename()) - fi, err := getInfo(dir) - if err == nil { - // Aggregate all possibly relevant package IDs. - for knownURI, ids := range known { - knownDir := filepath.Dir(knownURI.Filename()) - knownFI, err := getInfo(knownDir) - if err != nil { - continue - } - if os.SameFile(fi, knownFI) { - for _, id := range ids { - invalidated[id] = struct{}{} - } - } - } - } - return invalidated -} - -// fileWasSaved reports whether the FileHandle passed in has been saved. It -// accomplishes this by checking to see if the original and current FileHandles -// are both overlays, and if the current FileHandle is saved while the original -// FileHandle was not saved. -func fileWasSaved(originalFH, currentFH source.FileHandle) bool { - c, ok := currentFH.(*Overlay) - if !ok || c == nil { - return true - } - o, ok := originalFH.(*Overlay) - if !ok || o == nil { - return c.saved - } - return !o.saved && c.saved -} - -// metadataChanges detects features of the change from oldFH->newFH that may -// affect package metadata. -// -// It uses lockedSnapshot to access cached parse information. lockedSnapshot -// must be locked. -// -// The result parameters have the following meaning: -// - invalidate means that package metadata for packages containing the file -// should be invalidated. -// - pkgFileChanged means that the file->package associates for the file have -// changed (possibly because the file is new, or because its package name has -// changed). -// - importDeleted means that an import has been deleted, or we can't -// determine if an import was deleted due to errors. -func metadataChanges(ctx context.Context, lockedSnapshot *snapshot, oldFH, newFH source.FileHandle) (invalidate, pkgFileChanged, importDeleted bool) { - if oe, ne := oldFH != nil && fileExists(oldFH), fileExists(newFH); !oe || !ne { // existential changes - changed := oe != ne - return changed, changed, !ne // we don't know if an import was deleted - } - - // If the file hasn't changed, there's no need to reload. - if oldFH.FileIdentity() == newFH.FileIdentity() { - return false, false, false - } - - fset := token.NewFileSet() - // Parse headers to compare package names and imports. - oldHeads, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseHeader, false, oldFH) - newHeads, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseHeader, false, newFH) - - if oldErr != nil || newErr != nil { - errChanged := (oldErr == nil) != (newErr == nil) - return errChanged, errChanged, (newErr != nil) // we don't know if an import was deleted - } - - oldHead := oldHeads[0] - newHead := newHeads[0] - - // `go list` fails completely if the file header cannot be parsed. If we go - // from a non-parsing state to a parsing state, we should reload. - if oldHead.ParseErr != nil && newHead.ParseErr == nil { - return true, true, true // We don't know what changed, so fall back on full invalidation. - } - - // If a package name has changed, the set of package imports may have changed - // in ways we can't detect here. Assume an import has been deleted. - if oldHead.File.Name.Name != newHead.File.Name.Name { - return true, true, true - } - - // Check whether package imports have changed. Only consider potentially - // valid imports paths. - oldImports := validImports(oldHead.File.Imports) - newImports := validImports(newHead.File.Imports) - - for path := range newImports { - if _, ok := oldImports[path]; ok { - delete(oldImports, path) - } else { - invalidate = true // a new, potentially valid import was added - } - } - - if len(oldImports) > 0 { - invalidate = true - importDeleted = true - } - - // If the change does not otherwise invalidate metadata, get the full ASTs in - // order to check magic comments. - // - // Note: if this affects performance we can probably avoid parsing in the - // common case by first scanning the source for potential comments. - if !invalidate { - origFulls, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseFull, false, oldFH) - newFulls, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseFull, false, newFH) - if oldErr == nil && newErr == nil { - invalidate = magicCommentsChanged(origFulls[0].File, newFulls[0].File) - } else { - // At this point, we shouldn't ever fail to produce a ParsedGoFile, as - // we're already past header parsing. - bug.Reportf("metadataChanges: unparseable file %v (old error: %v, new error: %v)", oldFH.URI(), oldErr, newErr) - } - } - - return invalidate, pkgFileChanged, importDeleted -} - -func magicCommentsChanged(original *ast.File, current *ast.File) bool { - oldComments := extractMagicComments(original) - newComments := extractMagicComments(current) - if len(oldComments) != len(newComments) { - return true - } - for i := range oldComments { - if oldComments[i] != newComments[i] { - return true - } - } - return false -} - -// validImports extracts the set of valid import paths from imports. -func validImports(imports []*ast.ImportSpec) map[string]struct{} { - m := make(map[string]struct{}) - for _, spec := range imports { - if path := spec.Path.Value; validImportPath(path) { - m[path] = struct{}{} - } - } - return m -} - -func validImportPath(path string) bool { - path, err := strconv.Unquote(path) - if err != nil { - return false - } - if path == "" { - return false - } - if path[len(path)-1] == '/' { - return false - } - return true -} - -var buildConstraintOrEmbedRe = regexp.MustCompile(`^//(go:embed|go:build|\s*\+build).*`) - -// extractMagicComments finds magic comments that affect metadata in f. -func extractMagicComments(f *ast.File) []string { - var results []string - for _, cg := range f.Comments { - for _, c := range cg.List { - if buildConstraintOrEmbedRe.MatchString(c.Text) { - results = append(results, c.Text) - } - } - } - return results -} - -func (s *snapshot) BuiltinFile(ctx context.Context) (*source.ParsedGoFile, error) { - s.AwaitInitialized(ctx) - - s.mu.Lock() - builtin := s.builtin - s.mu.Unlock() - - if builtin == "" { - return nil, fmt.Errorf("no builtin package for view %s", s.view.folder.Name) - } - - fh, err := s.ReadFile(ctx, builtin) - if err != nil { - return nil, err - } - // For the builtin file only, we need syntactic object resolution - // (since we can't type check). - mode := source.ParseFull &^ source.SkipObjectResolution - pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh) - if err != nil { - return nil, err - } - return pgfs[0], nil -} - -func (s *snapshot) IsBuiltin(uri span.URI) bool { - s.mu.Lock() - defer s.mu.Unlock() - // We should always get the builtin URI in a canonical form, so use simple - // string comparison here. span.CompareURI is too expensive. - return uri == s.builtin -} - -func (s *snapshot) setBuiltin(path string) { - s.mu.Lock() - defer s.mu.Unlock() - - s.builtin = span.URIFromPath(path) -} diff --git a/gopls/internal/lsp/cache/symbols.go b/gopls/internal/lsp/cache/symbols.go deleted file mode 100644 index 3ecd794303b..00000000000 --- a/gopls/internal/lsp/cache/symbols.go +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "go/ast" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/gopls/internal/astutil" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" -) - -// symbolize returns the result of symbolizing the file identified by uri, using a cache. -func (s *snapshot) symbolize(ctx context.Context, uri span.URI) ([]source.Symbol, error) { - - s.mu.Lock() - entry, hit := s.symbolizeHandles.Get(uri) - s.mu.Unlock() - - type symbolizeResult struct { - symbols []source.Symbol - err error - } - - // Cache miss? - if !hit { - fh, err := s.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - type symbolHandleKey source.Hash - key := symbolHandleKey(fh.FileIdentity().Hash) - promise, release := s.store.Promise(key, func(ctx context.Context, arg interface{}) interface{} { - symbols, err := symbolizeImpl(ctx, arg.(*snapshot), fh) - return symbolizeResult{symbols, err} - }) - - entry = promise - - s.mu.Lock() - s.symbolizeHandles.Set(uri, entry, func(_, _ interface{}) { release() }) - s.mu.Unlock() - } - - // Await result. - v, err := s.awaitPromise(ctx, entry) - if err != nil { - return nil, err - } - res := v.(symbolizeResult) - return res.symbols, res.err -} - -// symbolizeImpl reads and parses a file and extracts symbols from it. -func symbolizeImpl(ctx context.Context, snapshot *snapshot, fh source.FileHandle) ([]source.Symbol, error) { - pgfs, err := snapshot.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseFull, false, fh) - if err != nil { - return nil, err - } - - w := &symbolWalker{ - tokFile: pgfs[0].Tok, - mapper: pgfs[0].Mapper, - } - w.fileDecls(pgfs[0].File.Decls) - - return w.symbols, w.firstError -} - -type symbolWalker struct { - // for computing positions - tokFile *token.File - mapper *protocol.Mapper - - symbols []source.Symbol - firstError error -} - -func (w *symbolWalker) atNode(node ast.Node, name string, kind protocol.SymbolKind, path ...*ast.Ident) { - var b strings.Builder - for _, ident := range path { - if ident != nil { - b.WriteString(ident.Name) - b.WriteString(".") - } - } - b.WriteString(name) - - rng, err := w.mapper.NodeRange(w.tokFile, node) - if err != nil { - w.error(err) - return - } - sym := source.Symbol{ - Name: b.String(), - Kind: kind, - Range: rng, - } - w.symbols = append(w.symbols, sym) -} - -func (w *symbolWalker) error(err error) { - if err != nil && w.firstError == nil { - w.firstError = err - } -} - -func (w *symbolWalker) fileDecls(decls []ast.Decl) { - for _, decl := range decls { - switch decl := decl.(type) { - case *ast.FuncDecl: - kind := protocol.Function - var recv *ast.Ident - if decl.Recv.NumFields() > 0 { - kind = protocol.Method - _, recv, _ = astutil.UnpackRecv(decl.Recv.List[0].Type) - } - w.atNode(decl.Name, decl.Name.Name, kind, recv) - case *ast.GenDecl: - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.TypeSpec: - kind := guessKind(spec) - w.atNode(spec.Name, spec.Name.Name, kind) - w.walkType(spec.Type, spec.Name) - case *ast.ValueSpec: - for _, name := range spec.Names { - kind := protocol.Variable - if decl.Tok == token.CONST { - kind = protocol.Constant - } - w.atNode(name, name.Name, kind) - } - } - } - } - } -} - -func guessKind(spec *ast.TypeSpec) protocol.SymbolKind { - switch spec.Type.(type) { - case *ast.InterfaceType: - return protocol.Interface - case *ast.StructType: - return protocol.Struct - case *ast.FuncType: - return protocol.Function - } - return protocol.Class -} - -// walkType processes symbols related to a type expression. path is path of -// nested type identifiers to the type expression. -func (w *symbolWalker) walkType(typ ast.Expr, path ...*ast.Ident) { - switch st := typ.(type) { - case *ast.StructType: - for _, field := range st.Fields.List { - w.walkField(field, protocol.Field, protocol.Field, path...) - } - case *ast.InterfaceType: - for _, field := range st.Methods.List { - w.walkField(field, protocol.Interface, protocol.Method, path...) - } - } -} - -// walkField processes symbols related to the struct field or interface method. -// -// unnamedKind and namedKind are the symbol kinds if the field is resp. unnamed -// or named. path is the path of nested identifiers containing the field. -func (w *symbolWalker) walkField(field *ast.Field, unnamedKind, namedKind protocol.SymbolKind, path ...*ast.Ident) { - if len(field.Names) == 0 { - switch typ := field.Type.(type) { - case *ast.SelectorExpr: - // embedded qualified type - w.atNode(field, typ.Sel.Name, unnamedKind, path...) - default: - w.atNode(field, types.ExprString(field.Type), unnamedKind, path...) - } - } - for _, name := range field.Names { - w.atNode(name, name.Name, namedKind, path...) - w.walkType(field.Type, append(path, name)...) - } -} diff --git a/gopls/internal/lsp/cache/view.go b/gopls/internal/lsp/cache/view.go deleted file mode 100644 index 246669c3290..00000000000 --- a/gopls/internal/lsp/cache/view.go +++ /dev/null @@ -1,1234 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cache implements the caching layer for gopls. -package cache - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "os" - "path" - "path/filepath" - "regexp" - "sort" - "strings" - "sync" - "time" - - "golang.org/x/mod/modfile" - "golang.org/x/mod/semver" - exec "golang.org/x/sys/execabs" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/vulncheck" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/imports" - "golang.org/x/tools/internal/xcontext" -) - -// A Folder represents an LSP workspace folder, together with its per-folder -// options. -// -// Folders (Name and Dir) are specified by the 'initialize' and subsequent -// 'didChangeWorkspaceFolders' requests; their options come from -// didChangeConfiguration. -// -// Folders must not be mutated, as they may be shared across multiple views. -type Folder struct { - Dir span.URI - Name string - Options *source.Options -} - -type View struct { - id string - - gocmdRunner *gocommand.Runner // limits go command concurrency - - folder *Folder - - // Workspace information. The fields below are immutable, and together with - // options define the build list. Any change to these fields results in a new - // View. - *workspaceInformation // Go environment information - - importsState *importsState - - // moduleUpgrades tracks known upgrades for module paths in each modfile. - // Each modfile has a map of module name to upgrade version. - moduleUpgradesMu sync.Mutex - moduleUpgrades map[span.URI]map[string]string - - // vulns maps each go.mod file's URI to its known vulnerabilities. - vulnsMu sync.Mutex - vulns map[span.URI]*vulncheck.Result - - // parseCache holds an LRU cache of recently parsed files. - parseCache *parseCache - - // fs is the file source used to populate this view. - fs *overlayFS - - // knownFiles tracks files that the view has accessed. - // TODO(golang/go#57558): this notion is fundamentally problematic, and - // should be removed. - knownFilesMu sync.Mutex - knownFiles map[span.URI]bool - - // initCancelFirstAttempt can be used to terminate the view's first - // attempt at initialization. - initCancelFirstAttempt context.CancelFunc - - // Track the latest snapshot via the snapshot field, guarded by snapshotMu. - // - // Invariant: whenever the snapshot field is overwritten, destroy(snapshot) - // is called on the previous (overwritten) snapshot while snapshotMu is held, - // incrementing snapshotWG. During shutdown the final snapshot is - // overwritten with nil and destroyed, guaranteeing that all observed - // snapshots have been destroyed via the destroy method, and snapshotWG may - // be waited upon to let these destroy operations complete. - snapshotMu sync.Mutex - snapshot *snapshot // latest snapshot; nil after shutdown has been called - releaseSnapshot func() // called when snapshot is no longer needed - snapshotWG sync.WaitGroup // refcount for pending destroy operations - - // initialWorkspaceLoad is closed when the first workspace initialization has - // completed. If we failed to load, we only retry if the go.mod file changes, - // to avoid too many go/packages calls. - initialWorkspaceLoad chan struct{} - - // initializationSema is used limit concurrent initialization of snapshots in - // the view. We use a channel instead of a mutex to avoid blocking when a - // context is canceled. - // - // This field (along with snapshot.initialized) guards against duplicate - // initialization of snapshots. Do not change it without adjusting snapshot - // accordingly. - initializationSema chan struct{} -} - -// workspaceInformation holds the defining features of the View workspace. -// -// This type is compared to see if the View needs to be reconstructed. -type workspaceInformation struct { - // `go env` variables that need to be tracked by gopls. - goEnv - - // gomod holds the relevant go.mod file for this workspace. - gomod span.URI - - // The Go version in use: X in Go 1.X. - goversion int - - // The complete output of the go version command. - // (Call gocommand.ParseGoVersionOutput to extract a version - // substring such as go1.19.1 or go1.20-rc.1, go1.21-abcdef01.) - goversionOutput string - - // hasGopackagesDriver is true if the user has a value set for the - // GOPACKAGESDRIVER environment variable or a gopackagesdriver binary on - // their machine. - hasGopackagesDriver bool - - // inGOPATH reports whether the workspace directory is contained in a GOPATH - // directory. - inGOPATH bool - - // goCommandDir is the dir to use for running go commands. - // - // The only case where this should matter is if we've narrowed the workspace to - // a single nested module. In that case, the go command won't be able to find - // the module unless we tell it the nested directory. - goCommandDir span.URI -} - -// effectiveGO111MODULE reports the value of GO111MODULE effective in the go -// command at this go version, assuming at least Go 1.16. -func (w workspaceInformation) effectiveGO111MODULE() go111module { - switch w.GO111MODULE() { - case "off": - return off - case "on", "": - return on - default: - return auto - } -} - -// A ViewType describes how we load package information for a view. -// -// This is used for constructing the go/packages.Load query, and for -// interpreting missing packages, imports, or errors. -// -// Each view has a ViewType which is derived from its immutable workspace -// information -- any environment change that would affect the view type -// results in a new view. -type ViewType int - -const ( - // GoPackagesDriverView is a view with a non-empty GOPACKAGESDRIVER - // environment variable. - GoPackagesDriverView ViewType = iota - - // GOPATHView is a view in GOPATH mode. - // - // I.e. in GOPATH, with GO111MODULE=off, or GO111MODULE=auto with no - // go.mod file. - GOPATHView - - // GoModuleView is a view in module mode with a single Go module. - GoModuleView - - // GoWorkView is a view in module mode with a go.work file. - GoWorkView - - // An AdHocView is a collection of files in a given directory, not in GOPATH - // or a module. - AdHocView -) - -// ViewType derives the type of the view from its workspace information. -// -// TODO(rfindley): this logic is overlapping and slightly inconsistent with -// validBuildConfiguration. As part of zero-config-gopls (golang/go#57979), fix -// this inconsistency and consolidate on the ViewType abstraction. -func (w workspaceInformation) ViewType() ViewType { - if w.hasGopackagesDriver { - return GoPackagesDriverView - } - go111module := w.effectiveGO111MODULE() - if w.gowork != "" && go111module != off { - return GoWorkView - } - if w.gomod != "" && go111module != off { - return GoModuleView - } - if w.inGOPATH && go111module != on { - return GOPATHView - } - return AdHocView -} - -// moduleMode reports whether the current snapshot uses Go modules. -// -// From https://go.dev/ref/mod, module mode is active if either of the -// following hold: -// - GO111MODULE=on -// - GO111MODULE=auto and we are inside a module or have a GOWORK value. -// -// Additionally, this method returns false if GOPACKAGESDRIVER is set. -// -// TODO(rfindley): use this more widely. -func (w workspaceInformation) moduleMode() bool { - switch w.ViewType() { - case GoModuleView, GoWorkView: - return true - default: - return false - } -} - -// GOWORK returns the effective GOWORK value for this workspace, if -// any, in URI form. -// -// The second result reports whether the effective GOWORK value is "" because -// GOWORK=off. -func (w workspaceInformation) GOWORK() (span.URI, bool) { - if w.gowork == "off" || w.gowork == "" { - return "", w.gowork == "off" - } - return span.URIFromPath(w.gowork), false -} - -// GO111MODULE returns the value of GO111MODULE to use for running the go -// command. It differs from the user's environment in order to allow for the -// more forgiving default value "auto" when using recent go versions. -// -// TODO(rfindley): it is probably not worthwhile diverging from the go command -// here. The extra forgiveness may be nice, but breaks the invariant that -// running the go command from the command line produces the same build list. -// -// Put differently: we shouldn't go out of our way to make GOPATH work, when -// the go command does not. -func (w workspaceInformation) GO111MODULE() string { - if w.go111module == "" { - return "auto" - } - return w.go111module -} - -type go111module int - -const ( - off = go111module(iota) - auto - on -) - -// goEnv holds important environment variables that gopls cares about. -type goEnv struct { - gocache, gopath, goroot, goprivate, gomodcache, gowork, goflags string - - // go111module holds the value of GO111MODULE as reported by go env. - // - // Don't use this value directly, because we choose to use a different - // default (auto) on Go 1.16 and later, to avoid spurious errors. Use - // the effectiveGO111MODULE method instead. - go111module string -} - -// loadGoEnv loads `go env` values into the receiver, using the provided user -// environment and go command runner. -func (env *goEnv) load(ctx context.Context, folder string, configEnv []string, runner *gocommand.Runner) error { - vars := env.vars() - - // We can save ~200 ms by requesting only the variables we care about. - args := []string{"-json"} - for k := range vars { - args = append(args, k) - } - - inv := gocommand.Invocation{ - Verb: "env", - Args: args, - Env: configEnv, - WorkingDir: folder, - } - stdout, err := runner.Run(ctx, inv) - if err != nil { - return err - } - envMap := make(map[string]string) - if err := json.Unmarshal(stdout.Bytes(), &envMap); err != nil { - return fmt.Errorf("internal error unmarshaling JSON from 'go env': %w", err) - } - for key, ptr := range vars { - *ptr = envMap[key] - } - - return nil -} - -func (env goEnv) String() string { - var vars []string - for govar, ptr := range env.vars() { - vars = append(vars, fmt.Sprintf("%s=%s", govar, *ptr)) - } - sort.Strings(vars) - return "[" + strings.Join(vars, ", ") + "]" -} - -// vars returns a map from Go environment variable to field value containing it. -func (env *goEnv) vars() map[string]*string { - return map[string]*string{ - "GOCACHE": &env.gocache, - "GOPATH": &env.gopath, - "GOROOT": &env.goroot, - "GOPRIVATE": &env.goprivate, - "GOMODCACHE": &env.gomodcache, - "GO111MODULE": &env.go111module, - "GOWORK": &env.gowork, - "GOFLAGS": &env.goflags, - } -} - -// workspaceMode holds various flags defining how the gopls workspace should -// behave. They may be derived from the environment, user configuration, or -// depend on the Go version. -// -// TODO(rfindley): remove workspace mode, in favor of explicit checks. -type workspaceMode int - -const ( - moduleMode workspaceMode = 1 << iota - - // tempModfile indicates whether or not the -modfile flag should be used. - tempModfile -) - -func (v *View) ID() string { return v.id } - -// tempModFile creates a temporary go.mod file based on the contents -// of the given go.mod file. On success, it is the caller's -// responsibility to call the cleanup function when the file is no -// longer needed. -func tempModFile(modFh source.FileHandle, gosum []byte) (tmpURI span.URI, cleanup func(), err error) { - filenameHash := source.Hashf("%s", modFh.URI().Filename()) - tmpMod, err := os.CreateTemp("", fmt.Sprintf("go.%s.*.mod", filenameHash)) - if err != nil { - return "", nil, err - } - defer tmpMod.Close() - - tmpURI = span.URIFromPath(tmpMod.Name()) - tmpSumName := sumFilename(tmpURI) - - content, err := modFh.Content() - if err != nil { - return "", nil, err - } - - if _, err := tmpMod.Write(content); err != nil { - return "", nil, err - } - - // We use a distinct name here to avoid subtlety around the fact - // that both 'return' and 'defer' update the "cleanup" variable. - doCleanup := func() { - _ = os.Remove(tmpSumName) - _ = os.Remove(tmpURI.Filename()) - } - - // Be careful to clean up if we return an error from this function. - defer func() { - if err != nil { - doCleanup() - cleanup = nil - } - }() - - // Create an analogous go.sum, if one exists. - if gosum != nil { - if err := os.WriteFile(tmpSumName, gosum, 0655); err != nil { - return "", nil, err - } - } - - return tmpURI, doCleanup, nil -} - -// Name returns the user visible name of this view. -func (v *View) Name() string { - return v.folder.Name -} - -// Folder returns the folder at the base of this view. -func (v *View) Folder() span.URI { - return v.folder.Dir -} - -// SetFolderOptions updates the options of each View associated with the folder -// of the given URI. -// -// Calling this may cause each related view to be invalidated and a replacement -// view added to the session. -func (s *Session) SetFolderOptions(ctx context.Context, uri span.URI, options *source.Options) error { - s.viewMu.Lock() - defer s.viewMu.Unlock() - - for _, v := range s.views { - if v.folder.Dir == uri { - folder2 := *v.folder - folder2.Options = options - info, err := getWorkspaceInformation(ctx, s.gocmdRunner, s, &folder2) - if err != nil { - return err - } - if _, err := s.updateViewLocked(ctx, v, info, &folder2); err != nil { - return err - } - } - } - return nil -} - -// viewEnv returns a string describing the environment of a newly created view. -// -// It must not be called concurrently with any other view methods. -func viewEnv(v *View) string { - env := v.folder.Options.EnvSlice() - buildFlags := append([]string{}, v.folder.Options.BuildFlags...) - - var buf bytes.Buffer - fmt.Fprintf(&buf, `go info for %v -(go dir %s) -(go version %s) -(valid build configuration = %v) -(build flags: %v) -(selected go env: %v) -`, - v.folder.Dir.Filename(), - v.goCommandDir.Filename(), - strings.TrimRight(v.workspaceInformation.goversionOutput, "\n"), - v.snapshot.validBuildConfiguration(), - buildFlags, - v.goEnv, - ) - - for _, v := range env { - s := strings.SplitN(v, "=", 2) - if len(s) != 2 { - continue - } - } - - return buf.String() -} - -func (s *snapshot) RunProcessEnvFunc(ctx context.Context, fn func(context.Context, *imports.Options) error) error { - return s.view.importsState.runProcessEnvFunc(ctx, s, fn) -} - -// separated out from its sole use in locateTemplateFiles for testability -func fileHasExtension(path string, suffixes []string) bool { - ext := filepath.Ext(path) - if ext != "" && ext[0] == '.' { - ext = ext[1:] - } - for _, s := range suffixes { - if s != "" && ext == s { - return true - } - } - return false -} - -// locateTemplateFiles ensures that the snapshot has mapped template files -// within the workspace folder. -func (s *snapshot) locateTemplateFiles(ctx context.Context) { - suffixes := s.Options().TemplateExtensions - if len(suffixes) == 0 { - return - } - - searched := 0 - filterFunc := s.view.filterFunc() - err := filepath.WalkDir(s.view.folder.Dir.Filename(), func(path string, entry os.DirEntry, err error) error { - if err != nil { - return err - } - if entry.IsDir() { - return nil - } - if fileLimit > 0 && searched > fileLimit { - return errExhausted - } - searched++ - if !fileHasExtension(path, suffixes) { - return nil - } - uri := span.URIFromPath(path) - if filterFunc(uri) { - return nil - } - // Get the file in order to include it in the snapshot. - // TODO(golang/go#57558): it is fundamentally broken to track files in this - // way; we may lose them if configuration or layout changes cause a view to - // be recreated. - // - // Furthermore, this operation must ignore errors, including context - // cancellation, or risk leaving the snapshot in an undefined state. - s.ReadFile(ctx, uri) - return nil - }) - if err != nil { - event.Error(ctx, "searching for template files failed", err) - } -} - -func (v *View) contains(uri span.URI) bool { - // If we've expanded the go dir to a parent directory, consider if the - // expanded dir contains the uri. - // TODO(rfindley): should we ignore the root here? It is not provided by the - // user. It would be better to explicitly consider the set of active modules - // wherever relevant. - inGoDir := false - if source.InDir(v.goCommandDir.Filename(), v.folder.Dir.Filename()) { - inGoDir = source.InDir(v.goCommandDir.Filename(), uri.Filename()) - } - inFolder := source.InDir(v.folder.Dir.Filename(), uri.Filename()) - - if !inGoDir && !inFolder { - return false - } - - return !v.filterFunc()(uri) -} - -// filterFunc returns a func that reports whether uri is filtered by the currently configured -// directoryFilters. -func (v *View) filterFunc() func(span.URI) bool { - folderDir := v.folder.Dir.Filename() - filterer := buildFilterer(folderDir, v.gomodcache, v.folder.Options) - return func(uri span.URI) bool { - // Only filter relative to the configured root directory. - if source.InDir(folderDir, uri.Filename()) { - return pathExcludedByFilter(strings.TrimPrefix(uri.Filename(), folderDir), filterer) - } - return false - } -} - -func (v *View) relevantChange(c source.FileModification) bool { - // If the file is known to the view, the change is relevant. - if v.knownFile(c.URI) { - return true - } - // The go.work file may not be "known" because we first access it through the - // session. As a result, treat changes to the view's go.work file as always - // relevant, even if they are only on-disk changes. - // - // TODO(rfindley): Make sure the go.work files are always known - // to the view. - if gowork, _ := v.GOWORK(); gowork == c.URI { - return true - } - - // Note: CL 219202 filtered out on-disk changes here that were not known to - // the view, but this introduces a race when changes arrive before the view - // is initialized (and therefore, before it knows about files). Since that CL - // had neither test nor associated issue, and cited only emacs behavior, this - // logic was deleted. - - return v.contains(c.URI) -} - -func (v *View) markKnown(uri span.URI) { - v.knownFilesMu.Lock() - defer v.knownFilesMu.Unlock() - if v.knownFiles == nil { - v.knownFiles = make(map[span.URI]bool) - } - v.knownFiles[uri] = true -} - -// knownFile reports whether the specified valid URI (or an alias) is known to the view. -func (v *View) knownFile(uri span.URI) bool { - v.knownFilesMu.Lock() - defer v.knownFilesMu.Unlock() - return v.knownFiles[uri] -} - -// shutdown releases resources associated with the view, and waits for ongoing -// work to complete. -func (v *View) shutdown() { - // Cancel the initial workspace load if it is still running. - v.initCancelFirstAttempt() - - v.snapshotMu.Lock() - if v.snapshot != nil { - v.snapshot.cancel() - v.releaseSnapshot() - v.destroy(v.snapshot, "View.shutdown") - v.snapshot = nil - v.releaseSnapshot = nil - } - v.snapshotMu.Unlock() - - v.snapshotWG.Wait() -} - -// While go list ./... skips directories starting with '.', '_', or 'testdata', -// gopls may still load them via file queries. Explicitly filter them out. -func (s *snapshot) IgnoredFile(uri span.URI) bool { - // Fast path: if uri doesn't contain '.', '_', or 'testdata', it is not - // possible that it is ignored. - { - uriStr := string(uri) - if !strings.Contains(uriStr, ".") && !strings.Contains(uriStr, "_") && !strings.Contains(uriStr, "testdata") { - return false - } - } - - s.ignoreFilterOnce.Do(func() { - var dirs []string - if len(s.workspaceModFiles) == 0 { - for _, entry := range filepath.SplitList(s.view.gopath) { - dirs = append(dirs, filepath.Join(entry, "src")) - } - } else { - dirs = append(dirs, s.view.gomodcache) - for m := range s.workspaceModFiles { - dirs = append(dirs, filepath.Dir(m.Filename())) - } - } - s.ignoreFilter = newIgnoreFilter(dirs) - }) - - return s.ignoreFilter.ignored(uri.Filename()) -} - -// An ignoreFilter implements go list's exclusion rules via its 'ignored' method. -type ignoreFilter struct { - prefixes []string // root dirs, ending in filepath.Separator -} - -// newIgnoreFilter returns a new ignoreFilter implementing exclusion rules -// relative to the provided directories. -func newIgnoreFilter(dirs []string) *ignoreFilter { - f := new(ignoreFilter) - for _, d := range dirs { - f.prefixes = append(f.prefixes, filepath.Clean(d)+string(filepath.Separator)) - } - return f -} - -func (f *ignoreFilter) ignored(filename string) bool { - for _, prefix := range f.prefixes { - if suffix := strings.TrimPrefix(filename, prefix); suffix != filename { - if checkIgnored(suffix) { - return true - } - } - } - return false -} - -// checkIgnored implements go list's exclusion rules. -// Quoting “go help list”: -// -// Directory and file names that begin with "." or "_" are ignored -// by the go tool, as are directories named "testdata". -func checkIgnored(suffix string) bool { - // Note: this could be further optimized by writing a HasSegment helper, a - // segment-boundary respecting variant of strings.Contains. - for _, component := range strings.Split(suffix, string(filepath.Separator)) { - if len(component) == 0 { - continue - } - if component[0] == '.' || component[0] == '_' || component == "testdata" { - return true - } - } - return false -} - -func (v *View) Snapshot() (source.Snapshot, func(), error) { - return v.getSnapshot() -} - -func (v *View) getSnapshot() (*snapshot, func(), error) { - v.snapshotMu.Lock() - defer v.snapshotMu.Unlock() - if v.snapshot == nil { - return nil, nil, errors.New("view is shutdown") - } - return v.snapshot, v.snapshot.Acquire(), nil -} - -func (s *snapshot) initialize(ctx context.Context, firstAttempt bool) { - select { - case <-ctx.Done(): - return - case s.view.initializationSema <- struct{}{}: - } - - defer func() { - <-s.view.initializationSema - }() - - s.mu.Lock() - initialized := s.initialized - s.mu.Unlock() - - if initialized { - return - } - - s.loadWorkspace(ctx, firstAttempt) -} - -func (s *snapshot) loadWorkspace(ctx context.Context, firstAttempt bool) (loadErr error) { - // A failure is retryable if it may have been due to context cancellation, - // and this is not the initial workspace load (firstAttempt==true). - // - // The IWL runs on a detached context with a long (~10m) timeout, so - // if the context was canceled we consider loading to have failed - // permanently. - retryableFailure := func() bool { - return loadErr != nil && ctx.Err() != nil && !firstAttempt - } - defer func() { - if !retryableFailure() { - s.mu.Lock() - s.initialized = true - s.mu.Unlock() - } - if firstAttempt { - close(s.view.initialWorkspaceLoad) - } - }() - - // TODO(rFindley): we should only locate template files on the first attempt, - // or guard it via a different mechanism. - s.locateTemplateFiles(ctx) - - // Collect module paths to load by parsing go.mod files. If a module fails to - // parse, capture the parsing failure as a critical diagnostic. - var scopes []loadScope // scopes to load - var modDiagnostics []*source.Diagnostic // diagnostics for broken go.mod files - addError := func(uri span.URI, err error) { - modDiagnostics = append(modDiagnostics, &source.Diagnostic{ - URI: uri, - Severity: protocol.SeverityError, - Source: source.ListError, - Message: err.Error(), - }) - } - - // TODO(rfindley): this should be predicated on the s.view.moduleMode(). - // There is no point loading ./... if we have an empty go.work. - if len(s.workspaceModFiles) > 0 { - for modURI := range s.workspaceModFiles { - // Verify that the modfile is valid before trying to load it. - // - // TODO(rfindley): now that we no longer need to parse the modfile in - // order to load scope, we could move these diagnostics to a more general - // location where we diagnose problems with modfiles or the workspace. - // - // Be careful not to add context cancellation errors as critical module - // errors. - fh, err := s.ReadFile(ctx, modURI) - if err != nil { - if ctx.Err() != nil { - return ctx.Err() - } - addError(modURI, err) - continue - } - parsed, err := s.ParseMod(ctx, fh) - if err != nil { - if ctx.Err() != nil { - return ctx.Err() - } - addError(modURI, err) - continue - } - if parsed.File == nil || parsed.File.Module == nil { - addError(modURI, fmt.Errorf("no module path for %s", modURI)) - continue - } - moduleDir := filepath.Dir(modURI.Filename()) - // Previously, we loaded /... for each module path, but that - // is actually incorrect when the pattern may match packages in more than - // one module. See golang/go#59458 for more details. - scopes = append(scopes, moduleLoadScope{dir: moduleDir, modulePath: parsed.File.Module.Mod.Path}) - } - } else { - scopes = append(scopes, viewLoadScope("LOAD_VIEW")) - } - - // If we're loading anything, ensure we also load builtin, - // since it provides fake definitions (and documentation) - // for types like int that are used everywhere. - if len(scopes) > 0 { - scopes = append(scopes, packageLoadScope("builtin")) - } - loadErr = s.load(ctx, true, scopes...) - - if retryableFailure() { - return loadErr - } - - var criticalErr *source.CriticalError - switch { - case loadErr != nil && ctx.Err() != nil: - event.Error(ctx, fmt.Sprintf("initial workspace load: %v", loadErr), loadErr) - criticalErr = &source.CriticalError{ - MainError: loadErr, - } - case loadErr != nil: - event.Error(ctx, "initial workspace load failed", loadErr) - extractedDiags := s.extractGoCommandErrors(ctx, loadErr) - criticalErr = &source.CriticalError{ - MainError: loadErr, - Diagnostics: append(modDiagnostics, extractedDiags...), - } - case len(modDiagnostics) == 1: - criticalErr = &source.CriticalError{ - MainError: fmt.Errorf(modDiagnostics[0].Message), - Diagnostics: modDiagnostics, - } - case len(modDiagnostics) > 1: - criticalErr = &source.CriticalError{ - MainError: fmt.Errorf("error loading module names"), - Diagnostics: modDiagnostics, - } - } - - // Lock the snapshot when setting the initialized error. - s.mu.Lock() - defer s.mu.Unlock() - s.initializedErr = criticalErr - return loadErr -} - -// invalidateContent invalidates the content of a Go file, -// including any position and type information that depends on it. -// -// invalidateContent returns a non-nil snapshot for the new content, along with -// a callback which the caller must invoke to release that snapshot. -// -// newOptions may be nil, in which case options remain unchanged. -func (v *View) invalidateContent(ctx context.Context, changes map[span.URI]source.FileHandle) (*snapshot, func()) { - // Detach the context so that content invalidation cannot be canceled. - ctx = xcontext.Detach(ctx) - - // This should be the only time we hold the view's snapshot lock for any period of time. - v.snapshotMu.Lock() - defer v.snapshotMu.Unlock() - - prevSnapshot, prevReleaseSnapshot := v.snapshot, v.releaseSnapshot - - if prevSnapshot == nil { - panic("invalidateContent called after shutdown") - } - - // Cancel all still-running previous requests, since they would be - // operating on stale data. - prevSnapshot.cancel() - - // Do not clone a snapshot until its view has finished initializing. - prevSnapshot.AwaitInitialized(ctx) - - // Save one lease of the cloned snapshot in the view. - v.snapshot, v.releaseSnapshot = prevSnapshot.clone(ctx, changes) - - prevReleaseSnapshot() - v.destroy(prevSnapshot, "View.invalidateContent") - - // Return a second lease to the caller. - return v.snapshot, v.snapshot.Acquire() -} - -func getWorkspaceInformation(ctx context.Context, runner *gocommand.Runner, fs source.FileSource, folder *Folder) (*workspaceInformation, error) { - if err := checkPathCase(folder.Dir.Filename()); err != nil { - return nil, fmt.Errorf("invalid workspace folder path: %w; check that the casing of the configured workspace folder path agrees with the casing reported by the operating system", err) - } - info := new(workspaceInformation) - var err error - inv := gocommand.Invocation{ - WorkingDir: folder.Dir.Filename(), - Env: folder.Options.EnvSlice(), - } - info.goversion, err = gocommand.GoVersion(ctx, inv, runner) - if err != nil { - return info, err - } - info.goversionOutput, err = gocommand.GoVersionOutput(ctx, inv, runner) - if err != nil { - return info, err - } - if err := info.load(ctx, folder.Dir.Filename(), folder.Options.EnvSlice(), runner); err != nil { - return info, err - } - // The value of GOPACKAGESDRIVER is not returned through the go command. - gopackagesdriver := os.Getenv("GOPACKAGESDRIVER") - // A user may also have a gopackagesdriver binary on their machine, which - // works the same way as setting GOPACKAGESDRIVER. - tool, _ := exec.LookPath("gopackagesdriver") - info.hasGopackagesDriver = gopackagesdriver != "off" && (gopackagesdriver != "" || tool != "") - - // filterFunc is the path filter function for this workspace folder. Notably, - // it is relative to folder (which is specified by the user), not root. - filterFunc := pathExcludedByFilterFunc(folder.Dir.Filename(), info.gomodcache, folder.Options) - info.gomod, err = findWorkspaceModFile(ctx, folder.Dir, fs, filterFunc) - if err != nil { - return info, err - } - - // Check if the workspace is within any GOPATH directory. - for _, gp := range filepath.SplitList(info.gopath) { - if source.InDir(filepath.Join(gp, "src"), folder.Dir.Filename()) { - info.inGOPATH = true - break - } - } - - // Compute the "working directory", which is where we run go commands. - // - // Note: if gowork is in use, this will default to the workspace folder. In - // the past, we would instead use the folder containing go.work. This should - // not make a difference, and in fact may improve go list error messages. - // - // TODO(golang/go#57514): eliminate the expandWorkspaceToModule setting - // entirely. - if folder.Options.ExpandWorkspaceToModule && info.gomod != "" { - info.goCommandDir = span.URIFromPath(filepath.Dir(info.gomod.Filename())) - } else { - info.goCommandDir = folder.Dir - } - return info, nil -} - -// findWorkspaceModFile searches for a single go.mod file relative to the given -// folder URI, using the following algorithm: -// 1. if there is a go.mod file in a parent directory, return it -// 2. else, if there is exactly one nested module, return it -// 3. else, return "" -func findWorkspaceModFile(ctx context.Context, folderURI span.URI, fs source.FileSource, excludePath func(string) bool) (span.URI, error) { - folder := folderURI.Filename() - match, err := findRootPattern(ctx, folder, "go.mod", fs) - if err != nil { - if ctxErr := ctx.Err(); ctxErr != nil { - return "", ctxErr - } - return "", err - } - if match != "" { - return span.URIFromPath(match), nil - } - - // ...else we should check if there's exactly one nested module. - all, err := findModules(folderURI, excludePath, 2) - if err == errExhausted { - // Fall-back behavior: if we don't find any modules after searching 10000 - // files, assume there are none. - event.Log(ctx, fmt.Sprintf("stopped searching for modules after %d files", fileLimit)) - return "", nil - } - if err != nil { - return "", err - } - if len(all) == 1 { - // range to access first element. - for uri := range all { - return uri, nil - } - } - return "", nil -} - -// findRootPattern looks for files with the given basename in dir or any parent -// directory of dir, using the provided FileSource. It returns the first match, -// starting from dir and search parents. -// -// The resulting string is either the file path of a matching file with the -// given basename, or "" if none was found. -func findRootPattern(ctx context.Context, dir, basename string, fs source.FileSource) (string, error) { - for dir != "" { - target := filepath.Join(dir, basename) - fh, err := fs.ReadFile(ctx, span.URIFromPath(target)) - if err != nil { - return "", err // context cancelled - } - if fileExists(fh) { - return target, nil - } - // Trailing separators must be trimmed, otherwise filepath.Split is a noop. - next, _ := filepath.Split(strings.TrimRight(dir, string(filepath.Separator))) - if next == dir { - break - } - dir = next - } - return "", nil -} - -// OS-specific path case check, for case-insensitive filesystems. -var checkPathCase = defaultCheckPathCase - -func defaultCheckPathCase(path string) error { - return nil -} - -func (v *View) IsGoPrivatePath(target string) bool { - return globsMatchPath(v.goprivate, target) -} - -func (v *View) ModuleUpgrades(modfile span.URI) map[string]string { - v.moduleUpgradesMu.Lock() - defer v.moduleUpgradesMu.Unlock() - - upgrades := map[string]string{} - for mod, ver := range v.moduleUpgrades[modfile] { - upgrades[mod] = ver - } - return upgrades -} - -func (v *View) RegisterModuleUpgrades(modfile span.URI, upgrades map[string]string) { - // Return early if there are no upgrades. - if len(upgrades) == 0 { - return - } - - v.moduleUpgradesMu.Lock() - defer v.moduleUpgradesMu.Unlock() - - m := v.moduleUpgrades[modfile] - if m == nil { - m = make(map[string]string) - v.moduleUpgrades[modfile] = m - } - for mod, ver := range upgrades { - m[mod] = ver - } -} - -func (v *View) ClearModuleUpgrades(modfile span.URI) { - v.moduleUpgradesMu.Lock() - defer v.moduleUpgradesMu.Unlock() - - delete(v.moduleUpgrades, modfile) -} - -const maxGovulncheckResultAge = 1 * time.Hour // Invalidate results older than this limit. -var timeNow = time.Now // for testing - -func (v *View) Vulnerabilities(modfiles ...span.URI) map[span.URI]*vulncheck.Result { - m := make(map[span.URI]*vulncheck.Result) - now := timeNow() - v.vulnsMu.Lock() - defer v.vulnsMu.Unlock() - - if len(modfiles) == 0 { // empty means all modfiles - for modfile := range v.vulns { - modfiles = append(modfiles, modfile) - } - } - for _, modfile := range modfiles { - vuln := v.vulns[modfile] - if vuln != nil && now.Sub(vuln.AsOf) > maxGovulncheckResultAge { - v.vulns[modfile] = nil // same as SetVulnerabilities(modfile, nil) - vuln = nil - } - m[modfile] = vuln - } - return m -} - -func (v *View) SetVulnerabilities(modfile span.URI, vulns *vulncheck.Result) { - v.vulnsMu.Lock() - defer v.vulnsMu.Unlock() - - v.vulns[modfile] = vulns -} - -func (v *View) GoVersion() int { - return v.workspaceInformation.goversion -} - -func (v *View) GoVersionString() string { - return gocommand.ParseGoVersionOutput(v.workspaceInformation.goversionOutput) -} - -// Copied from -// https://cs.opensource.google/go/go/+/master:src/cmd/go/internal/str/path.go;l=58;drc=2910c5b4a01a573ebc97744890a07c1a3122c67a -func globsMatchPath(globs, target string) bool { - for globs != "" { - // Extract next non-empty glob in comma-separated list. - var glob string - if i := strings.Index(globs, ","); i >= 0 { - glob, globs = globs[:i], globs[i+1:] - } else { - glob, globs = globs, "" - } - if glob == "" { - continue - } - - // A glob with N+1 path elements (N slashes) needs to be matched - // against the first N+1 path elements of target, - // which end just before the N+1'th slash. - n := strings.Count(glob, "/") - prefix := target - // Walk target, counting slashes, truncating at the N+1'th slash. - for i := 0; i < len(target); i++ { - if target[i] == '/' { - if n == 0 { - prefix = target[:i] - break - } - n-- - } - } - if n > 0 { - // Not enough prefix elements. - continue - } - matched, _ := path.Match(glob, prefix) - if matched { - return true - } - } - return false -} - -var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`) - -// TODO(rstambler): Consolidate modURI and modContent back into a FileHandle -// after we have a version of the workspace go.mod file on disk. Getting a -// FileHandle from the cache for temporary files is problematic, since we -// cannot delete it. -func (s *snapshot) vendorEnabled(ctx context.Context, modURI span.URI, modContent []byte) (bool, error) { - // Legacy GOPATH workspace? - if s.workspaceMode()&moduleMode == 0 { - return false, nil - } - - // Explicit -mod flag? - matches := modFlagRegexp.FindStringSubmatch(s.view.goflags) - if len(matches) != 0 { - modFlag := matches[1] - if modFlag != "" { - // Don't override an explicit '-mod=vendor' argument. - // We do want to override '-mod=readonly': it would break various module code lenses, - // and on 1.16 we know -modfile is available, so we won't mess with go.mod anyway. - return modFlag == "vendor", nil - } - } - - modFile, err := modfile.Parse(modURI.Filename(), modContent, nil) - if err != nil { - return false, err - } - - // No vendor directory? - // TODO(golang/go#57514): this is wrong if the working dir is not the module - // root. - if fi, err := os.Stat(filepath.Join(s.view.goCommandDir.Filename(), "vendor")); err != nil || !fi.IsDir() { - return false, nil - } - - // Vendoring enabled by default by go declaration in go.mod? - vendorEnabled := modFile.Go != nil && modFile.Go.Version != "" && semver.Compare("v"+modFile.Go.Version, "v1.14") >= 0 - return vendorEnabled, nil -} - -// TODO(rfindley): clean up the redundancy of allFilesExcluded, -// pathExcludedByFilterFunc, pathExcludedByFilter, view.filterFunc... -func allFilesExcluded(files []string, filterFunc func(span.URI) bool) bool { - for _, f := range files { - uri := span.URIFromPath(f) - if !filterFunc(uri) { - return false - } - } - return true -} - -func pathExcludedByFilterFunc(folder, gomodcache string, opts *source.Options) func(string) bool { - filterer := buildFilterer(folder, gomodcache, opts) - return func(path string) bool { - return pathExcludedByFilter(path, filterer) - } -} - -// pathExcludedByFilter reports whether the path (relative to the workspace -// folder) should be excluded by the configured directory filters. -// -// TODO(rfindley): passing root and gomodcache here makes it confusing whether -// path should be absolute or relative, and has already caused at least one -// bug. -func pathExcludedByFilter(path string, filterer *source.Filterer) bool { - path = strings.TrimPrefix(filepath.ToSlash(path), "/") - return filterer.Disallow(path) -} - -func buildFilterer(folder, gomodcache string, opts *source.Options) *source.Filterer { - filters := opts.DirectoryFilters - - if pref := strings.TrimPrefix(gomodcache, folder); pref != gomodcache { - modcacheFilter := "-" + strings.TrimPrefix(filepath.ToSlash(pref), "/") - filters = append(filters, modcacheFilter) - } - return source.NewFilterer(filters) -} diff --git a/gopls/internal/lsp/cache/view_test.go b/gopls/internal/lsp/cache/view_test.go deleted file mode 100644 index 2b7249b69ab..00000000000 --- a/gopls/internal/lsp/cache/view_test.go +++ /dev/null @@ -1,303 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -package cache - -import ( - "context" - "encoding/json" - "os" - "path/filepath" - "testing" - "time" - - "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/vulncheck" -) - -func TestCaseInsensitiveFilesystem(t *testing.T) { - base := t.TempDir() - - inner := filepath.Join(base, "a/B/c/DEFgh") - if err := os.MkdirAll(inner, 0777); err != nil { - t.Fatal(err) - } - file := filepath.Join(inner, "f.go") - if err := os.WriteFile(file, []byte("hi"), 0777); err != nil { - t.Fatal(err) - } - if _, err := os.Stat(filepath.Join(inner, "F.go")); err != nil { - t.Skip("filesystem is case-sensitive") - } - - tests := []struct { - path string - err bool - }{ - {file, false}, - {filepath.Join(inner, "F.go"), true}, - {filepath.Join(base, "a/b/c/defgh/f.go"), true}, - } - for _, tt := range tests { - err := checkPathCase(tt.path) - if err != nil != tt.err { - t.Errorf("checkPathCase(%q) = %v, wanted error: %v", tt.path, err, tt.err) - } - } -} - -func TestFindWorkspaceModFile(t *testing.T) { - workspace := ` --- a/go.mod -- -module a --- a/x/x.go -package x --- a/x/y/y.go -package x --- b/go.mod -- -module b --- b/c/go.mod -- -module bc --- d/gopls.mod -- -module d-goplsworkspace --- d/e/go.mod -- -module de --- f/g/go.mod -- -module fg -` - dir, err := fake.Tempdir(fake.UnpackTxt(workspace)) - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(dir) - - tests := []struct { - folder, want string - }{ - {"", ""}, // no module at root, and more than one nested module - {"a", "a/go.mod"}, - {"a/x", "a/go.mod"}, - {"a/x/y", "a/go.mod"}, - {"b/c", "b/c/go.mod"}, - {"d", "d/e/go.mod"}, - {"d/e", "d/e/go.mod"}, - {"f", "f/g/go.mod"}, - } - - for _, test := range tests { - ctx := context.Background() - rel := fake.RelativeTo(dir) - folderURI := span.URIFromPath(rel.AbsPath(test.folder)) - excludeNothing := func(string) bool { return false } - got, err := findWorkspaceModFile(ctx, folderURI, New(nil), excludeNothing) - if err != nil { - t.Fatal(err) - } - want := span.URI("") - if test.want != "" { - want = span.URIFromPath(rel.AbsPath(test.want)) - } - if got != want { - t.Errorf("findWorkspaceModFile(%q) = %q, want %q", test.folder, got, want) - } - } -} - -func TestInVendor(t *testing.T) { - for _, tt := range []struct { - path string - inVendor bool - }{ - {"foo/vendor/x.go", false}, - {"foo/vendor/x/x.go", true}, - {"foo/x.go", false}, - {"foo/vendor/foo.txt", false}, - {"foo/vendor/modules.txt", false}, - } { - if got := inVendor(span.URIFromPath(tt.path)); got != tt.inVendor { - t.Errorf("expected %s inVendor %v, got %v", tt.path, tt.inVendor, got) - } - } -} - -func TestFilters(t *testing.T) { - tests := []struct { - filters []string - included []string - excluded []string - }{ - { - included: []string{"x"}, - }, - { - filters: []string{"-"}, - excluded: []string{"x", "x/a"}, - }, - { - filters: []string{"-x", "+y"}, - included: []string{"y", "y/a", "z"}, - excluded: []string{"x", "x/a"}, - }, - { - filters: []string{"-x", "+x/y", "-x/y/z"}, - included: []string{"x/y", "x/y/a", "a"}, - excluded: []string{"x", "x/a", "x/y/z/a"}, - }, - { - filters: []string{"+foobar", "-foo"}, - included: []string{"foobar", "foobar/a"}, - excluded: []string{"foo", "foo/a"}, - }, - } - - for _, tt := range tests { - filterer := source.NewFilterer(tt.filters) - for _, inc := range tt.included { - if pathExcludedByFilter(inc, filterer) { - t.Errorf("filters %q excluded %v, wanted included", tt.filters, inc) - } - } - for _, exc := range tt.excluded { - if !pathExcludedByFilter(exc, filterer) { - t.Errorf("filters %q included %v, wanted excluded", tt.filters, exc) - } - } - } -} - -func TestSuffixes(t *testing.T) { - type file struct { - path string - want bool - } - type cases struct { - option []string - files []file - } - tests := []cases{ - {[]string{"tmpl", "gotmpl"}, []file{ // default - {"foo", false}, - {"foo.tmpl", true}, - {"foo.gotmpl", true}, - {"tmpl", false}, - {"tmpl.go", false}}, - }, - {[]string{"tmpl", "gotmpl", "html", "gohtml"}, []file{ - {"foo.gotmpl", true}, - {"foo.html", true}, - {"foo.gohtml", true}, - {"html", false}}, - }, - {[]string{"tmpl", "gotmpl", ""}, []file{ // possible user mistake - {"foo.gotmpl", true}, - {"foo.go", false}, - {"foo", false}}, - }, - } - for _, a := range tests { - suffixes := a.option - for _, b := range a.files { - got := fileHasExtension(b.path, suffixes) - if got != b.want { - t.Errorf("got %v, want %v, option %q, file %q (%+v)", - got, b.want, a.option, b.path, b) - } - } - } -} - -func TestView_Vulnerabilities(t *testing.T) { - // TODO(hyangah): use t.Cleanup when we get rid of go1.13 legacy CI. - defer func() { timeNow = time.Now }() - - now := time.Now() - - view := &View{ - vulns: make(map[span.URI]*vulncheck.Result), - } - file1, file2 := span.URIFromPath("f1/go.mod"), span.URIFromPath("f2/go.mod") - - vuln1 := &vulncheck.Result{AsOf: now.Add(-(maxGovulncheckResultAge * 3) / 4)} // already ~3/4*maxGovulncheckResultAge old - view.SetVulnerabilities(file1, vuln1) - - vuln2 := &vulncheck.Result{AsOf: now} // fresh. - view.SetVulnerabilities(file2, vuln2) - - t.Run("fresh", func(t *testing.T) { - got := view.Vulnerabilities() - want := map[span.URI]*vulncheck.Result{ - file1: vuln1, - file2: vuln2, - } - - if diff := cmp.Diff(toJSON(want), toJSON(got)); diff != "" { - t.Errorf("view.Vulnerabilities() mismatch (-want +got):\n%s", diff) - } - }) - - // maxGovulncheckResultAge/2 later - timeNow = func() time.Time { return now.Add(maxGovulncheckResultAge / 2) } - t.Run("after30min", func(t *testing.T) { - got := view.Vulnerabilities() - want := map[span.URI]*vulncheck.Result{ - file1: nil, // expired. - file2: vuln2, - } - - if diff := cmp.Diff(toJSON(want), toJSON(got)); diff != "" { - t.Errorf("view.Vulnerabilities() mismatch (-want +got):\n%s", diff) - } - }) - - // maxGovulncheckResultAge later - timeNow = func() time.Time { return now.Add(maxGovulncheckResultAge + time.Minute) } - - t.Run("after1hr", func(t *testing.T) { - got := view.Vulnerabilities() - want := map[span.URI]*vulncheck.Result{ - file1: nil, - file2: nil, - } - - if diff := cmp.Diff(toJSON(want), toJSON(got)); diff != "" { - t.Errorf("view.Vulnerabilities() mismatch (-want +got):\n%s", diff) - } - }) -} - -func toJSON(x interface{}) string { - b, _ := json.MarshalIndent(x, "", " ") - return string(b) -} - -func TestIgnoreFilter(t *testing.T) { - tests := []struct { - dirs []string - path string - want bool - }{ - {[]string{"a"}, "a/testdata/foo", true}, - {[]string{"a"}, "a/_ignore/foo", true}, - {[]string{"a"}, "a/.ignore/foo", true}, - {[]string{"a"}, "b/testdata/foo", false}, - {[]string{"a"}, "testdata/foo", false}, - {[]string{"a", "b"}, "b/testdata/foo", true}, - {[]string{"a"}, "atestdata/foo", false}, - } - - for _, test := range tests { - // convert to filepaths, for convenience - for i, dir := range test.dirs { - test.dirs[i] = filepath.FromSlash(dir) - } - test.path = filepath.FromSlash(test.path) - - f := newIgnoreFilter(test.dirs) - if got := f.ignored(test.path); got != test.want { - t.Errorf("newIgnoreFilter(%q).ignore(%q) = %t, want %t", test.dirs, test.path, got, test.want) - } - } -} diff --git a/gopls/internal/lsp/cache/workspace.go b/gopls/internal/lsp/cache/workspace.go deleted file mode 100644 index e344f4950cc..00000000000 --- a/gopls/internal/lsp/cache/workspace.go +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "context" - "errors" - "fmt" - "io/fs" - "path/filepath" - "strings" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" -) - -// TODO(rfindley): now that experimentalWorkspaceModule is gone, this file can -// be massively cleaned up and/or removed. - -// computeWorkspaceModFiles computes the set of workspace mod files based on the -// value of go.mod, go.work, and GO111MODULE. -func computeWorkspaceModFiles(ctx context.Context, gomod, gowork span.URI, go111module go111module, fs source.FileSource) (map[span.URI]struct{}, error) { - if go111module == off { - return nil, nil - } - if gowork != "" { - fh, err := fs.ReadFile(ctx, gowork) - if err != nil { - return nil, err - } - content, err := fh.Content() - if err != nil { - return nil, err - } - filename := gowork.Filename() - dir := filepath.Dir(filename) - workFile, err := modfile.ParseWork(filename, content, nil) - if err != nil { - return nil, fmt.Errorf("parsing go.work: %w", err) - } - modFiles := make(map[span.URI]struct{}) - for _, use := range workFile.Use { - modDir := filepath.FromSlash(use.Path) - if !filepath.IsAbs(modDir) { - modDir = filepath.Join(dir, modDir) - } - modURI := span.URIFromPath(filepath.Join(modDir, "go.mod")) - modFiles[modURI] = struct{}{} - } - return modFiles, nil - } - if gomod != "" { - return map[span.URI]struct{}{gomod: {}}, nil - } - return nil, nil -} - -// isGoMod reports if uri is a go.mod file. -func isGoMod(uri span.URI) bool { - return filepath.Base(uri.Filename()) == "go.mod" -} - -// isGoWork reports if uri is a go.work file. -func isGoWork(uri span.URI) bool { - return filepath.Base(uri.Filename()) == "go.work" -} - -// fileExists reports whether the file has a Content (which may be empty). -// An overlay exists even if it is not reflected in the file system. -func fileExists(fh source.FileHandle) bool { - _, err := fh.Content() - return err == nil -} - -// errExhausted is returned by findModules if the file scan limit is reached. -var errExhausted = errors.New("exhausted") - -// Limit go.mod search to 1 million files. As a point of reference, -// Kubernetes has 22K files (as of 2020-11-24). -// -// Note: per golang/go#56496, the previous limit of 1M files was too slow, at -// which point this limit was decreased to 100K. -const fileLimit = 100_000 - -// findModules recursively walks the root directory looking for go.mod files, -// returning the set of modules it discovers. If modLimit is non-zero, -// searching stops once modLimit modules have been found. -// -// TODO(rfindley): consider overlays. -func findModules(root span.URI, excludePath func(string) bool, modLimit int) (map[span.URI]struct{}, error) { - // Walk the view's folder to find all modules in the view. - modFiles := make(map[span.URI]struct{}) - searched := 0 - errDone := errors.New("done") - err := filepath.WalkDir(root.Filename(), func(path string, info fs.DirEntry, err error) error { - if err != nil { - // Probably a permission error. Keep looking. - return filepath.SkipDir - } - // For any path that is not the workspace folder, check if the path - // would be ignored by the go command. Vendor directories also do not - // contain workspace modules. - if info.IsDir() && path != root.Filename() { - suffix := strings.TrimPrefix(path, root.Filename()) - switch { - case checkIgnored(suffix), - strings.Contains(filepath.ToSlash(suffix), "/vendor/"), - excludePath(suffix): - return filepath.SkipDir - } - } - // We're only interested in go.mod files. - uri := span.URIFromPath(path) - if isGoMod(uri) { - modFiles[uri] = struct{}{} - } - if modLimit > 0 && len(modFiles) >= modLimit { - return errDone - } - searched++ - if fileLimit > 0 && searched >= fileLimit { - return errExhausted - } - return nil - }) - if err == errDone { - return modFiles, nil - } - return modFiles, err -} diff --git a/gopls/internal/lsp/call_hierarchy.go b/gopls/internal/lsp/call_hierarchy.go deleted file mode 100644 index fb916e94a7f..00000000000 --- a/gopls/internal/lsp/call_hierarchy.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" -) - -func (s *Server) prepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareCallHierarchy") - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - - return source.PrepareCallHierarchy(ctx, snapshot, fh, params.Position) -} - -func (s *Server) incomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.incomingCalls") - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.Item.URI, source.Go) - defer release() - if !ok { - return nil, err - } - - return source.IncomingCalls(ctx, snapshot, fh, params.Item.Range.Start) -} - -func (s *Server) outgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.outgoingCalls") - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.Item.URI, source.Go) - defer release() - if !ok { - return nil, err - } - - return source.OutgoingCalls(ctx, snapshot, fh, params.Item.Range.Start) -} diff --git a/gopls/internal/lsp/cmd/call_hierarchy.go b/gopls/internal/lsp/cmd/call_hierarchy.go deleted file mode 100644 index f623f75fa9b..00000000000 --- a/gopls/internal/lsp/cmd/call_hierarchy.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// callHierarchy implements the callHierarchy verb for gopls. -type callHierarchy struct { - app *Application -} - -func (c *callHierarchy) Name() string { return "call_hierarchy" } -func (c *callHierarchy) Parent() string { return c.app.Name() } -func (c *callHierarchy) Usage() string { return "" } -func (c *callHierarchy) ShortHelp() string { return "display selected identifier's call hierarchy" } -func (c *callHierarchy) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - - $ # 1-indexed location (:line:column or :#offset) of the target identifier - $ gopls call_hierarchy helper/helper.go:8:6 - $ gopls call_hierarchy helper/helper.go:#53 -`) - printFlagDefaults(f) -} - -func (c *callHierarchy) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("call_hierarchy expects 1 argument (position)") - } - - conn, err := c.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - file, err := conn.openFile(ctx, from.URI()) - if err != nil { - return err - } - - loc, err := file.mapper.SpanLocation(from) - if err != nil { - return err - } - - p := protocol.CallHierarchyPrepareParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), - } - - callItems, err := conn.PrepareCallHierarchy(ctx, &p) - if err != nil { - return err - } - if len(callItems) == 0 { - return fmt.Errorf("function declaration identifier not found at %v", args[0]) - } - - for _, item := range callItems { - incomingCalls, err := conn.IncomingCalls(ctx, &protocol.CallHierarchyIncomingCallsParams{Item: item}) - if err != nil { - return err - } - for i, call := range incomingCalls { - // From the spec: CallHierarchyIncomingCall.FromRanges is relative to - // the caller denoted by CallHierarchyIncomingCall.from. - printString, err := callItemPrintString(ctx, conn, call.From, call.From.URI, call.FromRanges) - if err != nil { - return err - } - fmt.Printf("caller[%d]: %s\n", i, printString) - } - - printString, err := callItemPrintString(ctx, conn, item, "", nil) - if err != nil { - return err - } - fmt.Printf("identifier: %s\n", printString) - - outgoingCalls, err := conn.OutgoingCalls(ctx, &protocol.CallHierarchyOutgoingCallsParams{Item: item}) - if err != nil { - return err - } - for i, call := range outgoingCalls { - // From the spec: CallHierarchyOutgoingCall.FromRanges is the range - // relative to the caller, e.g the item passed to - printString, err := callItemPrintString(ctx, conn, call.To, item.URI, call.FromRanges) - if err != nil { - return err - } - fmt.Printf("callee[%d]: %s\n", i, printString) - } - } - - return nil -} - -// callItemPrintString returns a protocol.CallHierarchyItem object represented as a string. -// item and call ranges (protocol.Range) are converted to user friendly spans (1-indexed). -func callItemPrintString(ctx context.Context, conn *connection, item protocol.CallHierarchyItem, callsURI protocol.DocumentURI, calls []protocol.Range) (string, error) { - itemFile, err := conn.openFile(ctx, item.URI.SpanURI()) - if err != nil { - return "", err - } - itemSpan, err := itemFile.mapper.RangeSpan(item.Range) - if err != nil { - return "", err - } - - var callRanges []string - if callsURI != "" { - callsFile, err := conn.openFile(ctx, callsURI.SpanURI()) - if err != nil { - return "", err - } - for _, rng := range calls { - call, err := callsFile.mapper.RangeSpan(rng) - if err != nil { - return "", err - } - callRange := fmt.Sprintf("%d:%d-%d", call.Start().Line(), call.Start().Column(), call.End().Column()) - callRanges = append(callRanges, callRange) - } - } - - printString := fmt.Sprintf("function %s in %v", item.Name, itemSpan) - if len(calls) > 0 { - printString = fmt.Sprintf("ranges %s in %s from/to %s", strings.Join(callRanges, ", "), callsURI.SpanURI().Filename(), printString) - } - return printString, nil -} diff --git a/gopls/internal/lsp/cmd/check.go b/gopls/internal/lsp/cmd/check.go deleted file mode 100644 index a529f143884..00000000000 --- a/gopls/internal/lsp/cmd/check.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - - "golang.org/x/tools/gopls/internal/span" -) - -// check implements the check verb for gopls. -type check struct { - app *Application -} - -func (c *check) Name() string { return "check" } -func (c *check) Parent() string { return c.app.Name() } -func (c *check) Usage() string { return "" } -func (c *check) ShortHelp() string { return "show diagnostic results for the specified file" } -func (c *check) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: show the diagnostic results of this file: - - $ gopls check internal/lsp/cmd/check.go -`) - printFlagDefaults(f) -} - -// Run performs the check on the files specified by args and prints the -// results to stdout. -func (c *check) Run(ctx context.Context, args ...string) error { - if len(args) == 0 { - // no files, so no results - return nil - } - checking := map[span.URI]*cmdFile{} - var uris []span.URI - // now we ready to kick things off - conn, err := c.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - for _, arg := range args { - uri := span.URIFromPath(arg) - uris = append(uris, uri) - file, err := conn.openFile(ctx, uri) - if err != nil { - return err - } - checking[uri] = file - } - if err := conn.diagnoseFiles(ctx, uris); err != nil { - return err - } - conn.client.filesMu.Lock() - defer conn.client.filesMu.Unlock() - - for _, file := range checking { - for _, d := range file.diagnostics { - spn, err := file.mapper.RangeSpan(d.Range) - if err != nil { - return fmt.Errorf("Could not convert position %v for %q", d.Range, d.Message) - } - fmt.Printf("%v: %v\n", spn, d.Message) - } - } - return nil -} diff --git a/gopls/internal/lsp/cmd/cmd.go b/gopls/internal/lsp/cmd/cmd.go deleted file mode 100644 index 714a5bf4baa..00000000000 --- a/gopls/internal/lsp/cmd/cmd.go +++ /dev/null @@ -1,801 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cmd handles the gopls command line. -// It contains a handler for each of the modes, along with all the flag handling -// and the command line output format. -package cmd - -import ( - "context" - "flag" - "fmt" - "log" - "os" - "reflect" - "sort" - "strings" - "sync" - "text/tabwriter" - "time" - - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/browser" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/filecache" - "golang.org/x/tools/gopls/internal/lsp/lsprpc" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/jsonrpc2" - "golang.org/x/tools/internal/tool" - "golang.org/x/tools/internal/xcontext" -) - -// Application is the main application as passed to tool.Main -// It handles the main command line parsing and dispatch to the sub commands. -type Application struct { - // Core application flags - - // Embed the basic profiling flags supported by the tool package - tool.Profile - - // We include the server configuration directly for now, so the flags work - // even without the verb. - // TODO: Remove this when we stop allowing the serve verb by default. - Serve Serve - - // the options configuring function to invoke when building a server - options func(*source.Options) - - // The name of the binary, used in help and telemetry. - name string - - // The working directory to run commands in. - wd string - - // The environment variables to use. - env []string - - // Support for remote LSP server. - Remote string `flag:"remote" help:"forward all commands to a remote lsp specified by this flag. With no special prefix, this is assumed to be a TCP address. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. If 'auto', or prefixed by 'auto;', the remote address is automatically resolved based on the executing environment."` - - // Verbose enables verbose logging. - Verbose bool `flag:"v,verbose" help:"verbose output"` - - // VeryVerbose enables a higher level of verbosity in logging output. - VeryVerbose bool `flag:"vv,veryverbose" help:"very verbose output"` - - // Control ocagent export of telemetry - OCAgent string `flag:"ocagent" help:"the address of the ocagent (e.g. http://localhost:55678), or off"` - - // PrepareOptions is called to update the options when a new view is built. - // It is primarily to allow the behavior of gopls to be modified by hooks. - PrepareOptions func(*source.Options) - - // editFlags holds flags that control how file edit operations - // are applied, in particular when the server makes an ApplyEdits - // downcall to the client. Present only for commands that apply edits. - editFlags *EditFlags -} - -// EditFlags defines flags common to {fix,format,imports,rename} -// that control how edits are applied to the client's files. -// -// The type is exported for flag reflection. -// -// The -write, -diff, and -list flags are orthogonal but any -// of them suppresses the default behavior, which is to print -// the edited file contents. -type EditFlags struct { - Write bool `flag:"w,write" help:"write edited content to source files"` - Preserve bool `flag:"preserve" help:"with -write, make copies of original files"` - Diff bool `flag:"d,diff" help:"display diffs instead of edited file content"` - List bool `flag:"l,list" help:"display names of edited files"` -} - -func (app *Application) verbose() bool { - return app.Verbose || app.VeryVerbose -} - -// New returns a new Application ready to run. -func New(name, wd string, env []string, options func(*source.Options)) *Application { - if wd == "" { - wd, _ = os.Getwd() - } - app := &Application{ - options: options, - name: name, - wd: wd, - env: env, - OCAgent: "off", //TODO: Remove this line to default the exporter to on - - Serve: Serve{ - RemoteListenTimeout: 1 * time.Minute, - }, - } - app.Serve.app = app - return app -} - -// Name implements tool.Application returning the binary name. -func (app *Application) Name() string { return app.name } - -// Usage implements tool.Application returning empty extra argument usage. -func (app *Application) Usage() string { return "" } - -// ShortHelp implements tool.Application returning the main binary help. -func (app *Application) ShortHelp() string { - return "" -} - -// DetailedHelp implements tool.Application returning the main binary help. -// This includes the short help for all the sub commands. -func (app *Application) DetailedHelp(f *flag.FlagSet) { - w := tabwriter.NewWriter(f.Output(), 0, 0, 2, ' ', 0) - defer w.Flush() - - fmt.Fprint(w, ` -gopls is a Go language server. - -It is typically used with an editor to provide language features. When no -command is specified, gopls will default to the 'serve' command. The language -features can also be accessed via the gopls command-line interface. - -Usage: - gopls help [] - -Command: -`) - fmt.Fprint(w, "\nMain\t\n") - for _, c := range app.mainCommands() { - fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) - } - fmt.Fprint(w, "\t\nFeatures\t\n") - for _, c := range app.featureCommands() { - fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) - } - if app.verbose() { - fmt.Fprint(w, "\t\nInternal Use Only\t\n") - for _, c := range app.internalCommands() { - fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) - } - } - fmt.Fprint(w, "\nflags:\n") - printFlagDefaults(f) -} - -// this is a slightly modified version of flag.PrintDefaults to give us control -func printFlagDefaults(s *flag.FlagSet) { - var flags [][]*flag.Flag - seen := map[flag.Value]int{} - s.VisitAll(func(f *flag.Flag) { - if i, ok := seen[f.Value]; !ok { - seen[f.Value] = len(flags) - flags = append(flags, []*flag.Flag{f}) - } else { - flags[i] = append(flags[i], f) - } - }) - for _, entry := range flags { - sort.SliceStable(entry, func(i, j int) bool { - return len(entry[i].Name) < len(entry[j].Name) - }) - var b strings.Builder - for i, f := range entry { - switch i { - case 0: - b.WriteString(" -") - default: - b.WriteString(",-") - } - b.WriteString(f.Name) - } - - f := entry[0] - name, usage := flag.UnquoteUsage(f) - if len(name) > 0 { - b.WriteString("=") - b.WriteString(name) - } - // Boolean flags of one ASCII letter are so common we - // treat them specially, putting their usage on the same line. - if b.Len() <= 4 { // space, space, '-', 'x'. - b.WriteString("\t") - } else { - // Four spaces before the tab triggers good alignment - // for both 4- and 8-space tab stops. - b.WriteString("\n \t") - } - b.WriteString(strings.ReplaceAll(usage, "\n", "\n \t")) - if !isZeroValue(f, f.DefValue) { - if reflect.TypeOf(f.Value).Elem().Name() == "stringValue" { - fmt.Fprintf(&b, " (default %q)", f.DefValue) - } else { - fmt.Fprintf(&b, " (default %v)", f.DefValue) - } - } - fmt.Fprint(s.Output(), b.String(), "\n") - } -} - -// isZeroValue is copied from the flags package -func isZeroValue(f *flag.Flag, value string) bool { - // Build a zero value of the flag's Value type, and see if the - // result of calling its String method equals the value passed in. - // This works unless the Value type is itself an interface type. - typ := reflect.TypeOf(f.Value) - var z reflect.Value - if typ.Kind() == reflect.Ptr { - z = reflect.New(typ.Elem()) - } else { - z = reflect.Zero(typ) - } - return value == z.Interface().(flag.Value).String() -} - -// Run takes the args after top level flag processing, and invokes the correct -// sub command as specified by the first argument. -// If no arguments are passed it will invoke the server sub command, as a -// temporary measure for compatibility. -func (app *Application) Run(ctx context.Context, args ...string) error { - // In the category of "things we can do while waiting for the Go command": - // Pre-initialize the filecache, which takes ~50ms to hash the gopls - // executable, and immediately runs a gc. - filecache.Start() - - ctx = debug.WithInstance(ctx, app.wd, app.OCAgent) - if len(args) == 0 { - s := flag.NewFlagSet(app.Name(), flag.ExitOnError) - return tool.Run(ctx, s, &app.Serve, args) - } - command, args := args[0], args[1:] - for _, c := range app.Commands() { - if c.Name() == command { - s := flag.NewFlagSet(app.Name(), flag.ExitOnError) - return tool.Run(ctx, s, c, args) - } - } - return tool.CommandLineErrorf("Unknown command %v", command) -} - -// Commands returns the set of commands supported by the gopls tool on the -// command line. -// The command is specified by the first non flag argument. -func (app *Application) Commands() []tool.Application { - var commands []tool.Application - commands = append(commands, app.mainCommands()...) - commands = append(commands, app.featureCommands()...) - commands = append(commands, app.internalCommands()...) - return commands -} - -func (app *Application) mainCommands() []tool.Application { - return []tool.Application{ - &app.Serve, - &version{app: app}, - &bug{app: app}, - &help{app: app}, - &apiJSON{app: app}, - &licenses{app: app}, - } -} - -func (app *Application) internalCommands() []tool.Application { - return []tool.Application{ - &vulncheck{app: app}, - } -} - -func (app *Application) featureCommands() []tool.Application { - return []tool.Application{ - &callHierarchy{app: app}, - &check{app: app}, - &definition{app: app}, - &foldingRanges{app: app}, - &format{app: app}, - &highlight{app: app}, - &implementation{app: app}, - &imports{app: app}, - newRemote(app, ""), - newRemote(app, "inspect"), - &links{app: app}, - &prepareRename{app: app}, - &references{app: app}, - &rename{app: app}, - &semtok{app: app}, - &signature{app: app}, - &stats{app: app}, - &suggestedFix{app: app}, - &symbols{app: app}, - - &workspaceSymbol{app: app}, - } -} - -var ( - internalMu sync.Mutex - internalConnections = make(map[string]*connection) -) - -// connect creates and initializes a new in-process gopls session. -// -// If onProgress is set, it is called for each new progress notification. -func (app *Application) connect(ctx context.Context, onProgress func(*protocol.ProgressParams)) (*connection, error) { - switch { - case app.Remote == "": - client := newClient(app, onProgress) - options := source.DefaultOptions(app.options) - server := lsp.NewServer(cache.NewSession(ctx, cache.New(nil)), client, options) - conn := newConnection(server, client) - if err := conn.initialize(protocol.WithClient(ctx, client), app.options); err != nil { - return nil, err - } - return conn, nil - - case strings.HasPrefix(app.Remote, "internal@"): - internalMu.Lock() - defer internalMu.Unlock() - opts := source.DefaultOptions(app.options) - key := fmt.Sprintf("%s %v %v %v", app.wd, opts.PreferredContentFormat, opts.HierarchicalDocumentSymbolSupport, opts.SymbolMatcher) - if c := internalConnections[key]; c != nil { - return c, nil - } - remote := app.Remote[len("internal@"):] - ctx := xcontext.Detach(ctx) //TODO:a way of shutting down the internal server - connection, err := app.connectRemote(ctx, remote) - if err != nil { - return nil, err - } - internalConnections[key] = connection - return connection, nil - default: - return app.connectRemote(ctx, app.Remote) - } -} - -func (app *Application) connectRemote(ctx context.Context, remote string) (*connection, error) { - conn, err := lsprpc.ConnectToRemote(ctx, remote) - if err != nil { - return nil, err - } - stream := jsonrpc2.NewHeaderStream(conn) - cc := jsonrpc2.NewConn(stream) - server := protocol.ServerDispatcher(cc) - client := newClient(app, nil) - connection := newConnection(server, client) - ctx = protocol.WithClient(ctx, connection.client) - cc.Go(ctx, - protocol.Handlers( - protocol.ClientHandler(client, jsonrpc2.MethodNotFound))) - return connection, connection.initialize(ctx, app.options) -} - -var matcherString = map[source.SymbolMatcher]string{ - source.SymbolFuzzy: "fuzzy", - source.SymbolCaseSensitive: "caseSensitive", - source.SymbolCaseInsensitive: "caseInsensitive", -} - -func (c *connection) initialize(ctx context.Context, options func(*source.Options)) error { - params := &protocol.ParamInitialize{} - params.RootURI = protocol.URIFromPath(c.client.app.wd) - params.Capabilities.Workspace.Configuration = true - - // Make sure to respect configured options when sending initialize request. - opts := source.DefaultOptions(options) - // If you add an additional option here, you must update the map key in connect. - params.Capabilities.TextDocument.Hover = &protocol.HoverClientCapabilities{ - ContentFormat: []protocol.MarkupKind{opts.PreferredContentFormat}, - } - params.Capabilities.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport = opts.HierarchicalDocumentSymbolSupport - params.Capabilities.TextDocument.SemanticTokens = protocol.SemanticTokensClientCapabilities{} - params.Capabilities.TextDocument.SemanticTokens.Formats = []protocol.TokenFormat{"relative"} - params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true - params.Capabilities.TextDocument.SemanticTokens.Requests.Full.Value = true - params.Capabilities.TextDocument.SemanticTokens.TokenTypes = lsp.SemanticTypes() - params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = lsp.SemanticModifiers() - - // If the subcommand has registered a progress handler, report the progress - // capability. - if c.client.onProgress != nil { - params.Capabilities.Window.WorkDoneProgress = true - } - - params.InitializationOptions = map[string]interface{}{ - "symbolMatcher": matcherString[opts.SymbolMatcher], - } - if _, err := c.Server.Initialize(ctx, params); err != nil { - return err - } - if err := c.Server.Initialized(ctx, &protocol.InitializedParams{}); err != nil { - return err - } - return nil -} - -type connection struct { - protocol.Server - client *cmdClient -} - -// cmdClient defines the protocol.Client interface behavior of the gopls CLI tool. -type cmdClient struct { - app *Application - onProgress func(*protocol.ProgressParams) - - diagnosticsMu sync.Mutex - diagnosticsDone chan struct{} - - filesMu sync.Mutex // guards files map and each cmdFile.diagnostics - files map[span.URI]*cmdFile -} - -type cmdFile struct { - uri span.URI - mapper *protocol.Mapper - err error - diagnostics []protocol.Diagnostic -} - -func newClient(app *Application, onProgress func(*protocol.ProgressParams)) *cmdClient { - return &cmdClient{ - app: app, - onProgress: onProgress, - files: make(map[span.URI]*cmdFile), - } -} - -func newConnection(server protocol.Server, client *cmdClient) *connection { - return &connection{ - Server: server, - client: client, - } -} - -// fileURI converts a DocumentURI to a file:// span.URI, panicking if it's not a file. -func fileURI(uri protocol.DocumentURI) span.URI { - sURI := uri.SpanURI() - if !sURI.IsFile() { - panic(fmt.Sprintf("%q is not a file URI", uri)) - } - return sURI -} - -func (c *cmdClient) CodeLensRefresh(context.Context) error { return nil } - -func (c *cmdClient) LogTrace(context.Context, *protocol.LogTraceParams) error { return nil } - -func (c *cmdClient) ShowMessage(ctx context.Context, p *protocol.ShowMessageParams) error { return nil } - -func (c *cmdClient) ShowMessageRequest(ctx context.Context, p *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) { - return nil, nil -} - -func (c *cmdClient) LogMessage(ctx context.Context, p *protocol.LogMessageParams) error { - switch p.Type { - case protocol.Error: - log.Print("Error:", p.Message) - case protocol.Warning: - log.Print("Warning:", p.Message) - case protocol.Info: - if c.app.verbose() { - log.Print("Info:", p.Message) - } - case protocol.Log: - if c.app.verbose() { - log.Print("Log:", p.Message) - } - default: - if c.app.verbose() { - log.Print(p.Message) - } - } - return nil -} - -func (c *cmdClient) Event(ctx context.Context, t *interface{}) error { return nil } - -func (c *cmdClient) RegisterCapability(ctx context.Context, p *protocol.RegistrationParams) error { - return nil -} - -func (c *cmdClient) UnregisterCapability(ctx context.Context, p *protocol.UnregistrationParams) error { - return nil -} - -func (c *cmdClient) WorkspaceFolders(ctx context.Context) ([]protocol.WorkspaceFolder, error) { - return nil, nil -} - -func (c *cmdClient) Configuration(ctx context.Context, p *protocol.ParamConfiguration) ([]interface{}, error) { - results := make([]interface{}, len(p.Items)) - for i, item := range p.Items { - if item.Section != "gopls" { - continue - } - env := map[string]interface{}{} - for _, value := range c.app.env { - l := strings.SplitN(value, "=", 2) - if len(l) != 2 { - continue - } - env[l[0]] = l[1] - } - m := map[string]interface{}{ - "env": env, - "analyses": map[string]bool{ - "fillreturns": true, - "nonewvars": true, - "noresultvalues": true, - "undeclaredname": true, - }, - } - if c.app.VeryVerbose { - m["verboseOutput"] = true - } - results[i] = m - } - return results, nil -} - -func (c *cmdClient) ApplyEdit(ctx context.Context, p *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) { - if err := c.applyWorkspaceEdit(&p.Edit); err != nil { - return &protocol.ApplyWorkspaceEditResult{FailureReason: err.Error()}, nil - } - return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil -} - -// applyWorkspaceEdit applies a complete WorkspaceEdit to the client's -// files, honoring the preferred edit mode specified by cli.app.editMode. -// (Used by rename and by ApplyEdit downcalls.) -func (cli *cmdClient) applyWorkspaceEdit(edit *protocol.WorkspaceEdit) error { - var orderedURIs []string - edits := map[span.URI][]protocol.TextEdit{} - for _, c := range edit.DocumentChanges { - if c.TextDocumentEdit != nil { - uri := fileURI(c.TextDocumentEdit.TextDocument.URI) - edits[uri] = append(edits[uri], c.TextDocumentEdit.Edits...) - orderedURIs = append(orderedURIs, string(uri)) - } - if c.RenameFile != nil { - return fmt.Errorf("client does not support file renaming (%s -> %s)", - c.RenameFile.OldURI, - c.RenameFile.NewURI) - } - } - sort.Strings(orderedURIs) - for _, u := range orderedURIs { - uri := span.URIFromURI(u) - f := cli.openFile(uri) - if f.err != nil { - return f.err - } - if err := applyTextEdits(f.mapper, edits[uri], cli.app.editFlags); err != nil { - return err - } - } - return nil -} - -// applyTextEdits applies a list of edits to the mapper file content, -// using the preferred edit mode. It is a no-op if there are no edits. -func applyTextEdits(mapper *protocol.Mapper, edits []protocol.TextEdit, flags *EditFlags) error { - if len(edits) == 0 { - return nil - } - newContent, renameEdits, err := source.ApplyProtocolEdits(mapper, edits) - if err != nil { - return err - } - - filename := mapper.URI.Filename() - - if flags.List { - fmt.Println(filename) - } - - if flags.Write { - if flags.Preserve { - if err := os.Rename(filename, filename+".orig"); err != nil { - return err - } - } - if err := os.WriteFile(filename, newContent, 0644); err != nil { - return err - } - } - - if flags.Diff { - unified, err := diff.ToUnified(filename+".orig", filename, string(mapper.Content), renameEdits, diff.DefaultContextLines) - if err != nil { - return err - } - fmt.Print(unified) - } - - // No flags: just print edited file content. - // TODO(adonovan): how is this ever useful with multiple files? - if !(flags.List || flags.Write || flags.Diff) { - os.Stdout.Write(newContent) - } - - return nil -} - -func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishDiagnosticsParams) error { - if p.URI == "gopls://diagnostics-done" { - close(c.diagnosticsDone) - } - // Don't worry about diagnostics without versions. - if p.Version == 0 { - return nil - } - - c.filesMu.Lock() - defer c.filesMu.Unlock() - - file := c.getFile(fileURI(p.URI)) - file.diagnostics = append(file.diagnostics, p.Diagnostics...) - - // Perform a crude in-place deduplication. - // TODO(golang/go#60122): replace the ad-hoc gopls/diagnoseFiles - // non-standard request with support for textDocument/diagnostic, - // so that we don't need to do this de-duplication. - type key [6]interface{} - seen := make(map[key]bool) - out := file.diagnostics[:0] - for _, d := range file.diagnostics { - var codeHref string - if desc := d.CodeDescription; desc != nil { - codeHref = desc.Href - } - k := key{d.Range, d.Severity, d.Code, codeHref, d.Source, d.Message} - if !seen[k] { - seen[k] = true - out = append(out, d) - } - } - file.diagnostics = out - - return nil -} - -func (c *cmdClient) Progress(_ context.Context, params *protocol.ProgressParams) error { - if c.onProgress != nil { - c.onProgress(params) - } - return nil -} - -func (c *cmdClient) ShowDocument(ctx context.Context, params *protocol.ShowDocumentParams) (*protocol.ShowDocumentResult, error) { - var success bool - if params.External { - // Open URI in external browser. - success = browser.Open(string(params.URI)) - } else { - // Open file in editor, optionally taking focus and selecting a range. - // (cmdClient has no editor. Should it fork+exec $EDITOR?) - log.Printf("Server requested that client editor open %q (takeFocus=%t, selection=%+v)", - params.URI, params.TakeFocus, params.Selection) - success = true - } - return &protocol.ShowDocumentResult{Success: success}, nil -} - -func (c *cmdClient) WorkDoneProgressCreate(context.Context, *protocol.WorkDoneProgressCreateParams) error { - return nil -} - -func (c *cmdClient) DiagnosticRefresh(context.Context) error { - return nil -} - -func (c *cmdClient) InlayHintRefresh(context.Context) error { - return nil -} - -func (c *cmdClient) SemanticTokensRefresh(context.Context) error { - return nil -} - -func (c *cmdClient) InlineValueRefresh(context.Context) error { - return nil -} - -func (c *cmdClient) getFile(uri span.URI) *cmdFile { - file, found := c.files[uri] - if !found || file.err != nil { - file = &cmdFile{ - uri: uri, - } - c.files[uri] = file - } - if file.mapper == nil { - content, err := os.ReadFile(uri.Filename()) - if err != nil { - file.err = fmt.Errorf("getFile: %v: %v", uri, err) - return file - } - file.mapper = protocol.NewMapper(uri, content) - } - return file -} - -func (c *cmdClient) openFile(uri span.URI) *cmdFile { - c.filesMu.Lock() - defer c.filesMu.Unlock() - return c.getFile(uri) -} - -// TODO(adonovan): provide convenience helpers to: -// - map a (URI, protocol.Range) to a MappedRange; -// - parse a command-line argument to a MappedRange. -func (c *connection) openFile(ctx context.Context, uri span.URI) (*cmdFile, error) { - file := c.client.openFile(uri) - if file.err != nil { - return nil, file.err - } - - p := &protocol.DidOpenTextDocumentParams{ - TextDocument: protocol.TextDocumentItem{ - URI: protocol.URIFromSpanURI(uri), - LanguageID: "go", - Version: 1, - Text: string(file.mapper.Content), - }, - } - if err := c.Server.DidOpen(ctx, p); err != nil { - // TODO(adonovan): is this assignment concurrency safe? - file.err = fmt.Errorf("%v: %v", uri, err) - return nil, file.err - } - return file, nil -} - -func (c *connection) semanticTokens(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { - // use range to avoid limits on full - resp, err := c.Server.SemanticTokensRange(ctx, p) - if err != nil { - return nil, err - } - return resp, nil -} - -func (c *connection) diagnoseFiles(ctx context.Context, files []span.URI) error { - var untypedFiles []interface{} - for _, file := range files { - untypedFiles = append(untypedFiles, string(file)) - } - c.client.diagnosticsMu.Lock() - defer c.client.diagnosticsMu.Unlock() - - c.client.diagnosticsDone = make(chan struct{}) - _, err := c.Server.NonstandardRequest(ctx, "gopls/diagnoseFiles", map[string]interface{}{"files": untypedFiles}) - if err != nil { - close(c.client.diagnosticsDone) - return err - } - - <-c.client.diagnosticsDone - return nil -} - -func (c *connection) terminate(ctx context.Context) { - if strings.HasPrefix(c.client.app.Remote, "internal@") { - // internal connections need to be left alive for the next test - return - } - //TODO: do we need to handle errors on these calls? - c.Shutdown(ctx) - //TODO: right now calling exit terminates the process, we should rethink that - //server.Exit(ctx) -} - -// Implement io.Closer. -func (c *cmdClient) Close() error { - return nil -} diff --git a/gopls/internal/lsp/cmd/definition.go b/gopls/internal/lsp/cmd/definition.go deleted file mode 100644 index ef0033cdb1e..00000000000 --- a/gopls/internal/lsp/cmd/definition.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "os" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// A Definition is the result of a 'definition' query. -type Definition struct { - Span span.Span `json:"span"` // span of the definition - Description string `json:"description"` // description of the denoted object -} - -// These constant is printed in the help, and then used in a test to verify the -// help is still valid. -// They refer to "Set" in "flag.FlagSet" from the DetailedHelp method below. -const ( - exampleLine = 44 - exampleColumn = 47 - exampleOffset = 1270 -) - -// definition implements the definition verb for gopls. -type definition struct { - app *Application - - JSON bool `flag:"json" help:"emit output in JSON format"` - MarkdownSupported bool `flag:"markdown" help:"support markdown in responses"` -} - -func (d *definition) Name() string { return "definition" } -func (d *definition) Parent() string { return d.app.Name() } -func (d *definition) Usage() string { return "[definition-flags] " } -func (d *definition) ShortHelp() string { return "show declaration of selected identifier" } -func (d *definition) DetailedHelp(f *flag.FlagSet) { - fmt.Fprintf(f.Output(), ` -Example: show the definition of the identifier at syntax at offset %[1]v in this file (flag.FlagSet): - - $ gopls definition internal/lsp/cmd/definition.go:%[1]v:%[2]v - $ gopls definition internal/lsp/cmd/definition.go:#%[3]v - -definition-flags: -`, exampleLine, exampleColumn, exampleOffset) - printFlagDefaults(f) -} - -// Run performs the definition query as specified by args and prints the -// results to stdout. -func (d *definition) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("definition expects 1 argument") - } - // Plaintext makes more sense for the command line. - opts := d.app.options - d.app.options = func(o *source.Options) { - if opts != nil { - opts(o) - } - o.PreferredContentFormat = protocol.PlainText - if d.MarkdownSupported { - o.PreferredContentFormat = protocol.Markdown - } - } - conn, err := d.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - from := span.Parse(args[0]) - file, err := conn.openFile(ctx, from.URI()) - if err != nil { - return err - } - loc, err := file.mapper.SpanLocation(from) - if err != nil { - return err - } - p := protocol.DefinitionParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), - } - locs, err := conn.Definition(ctx, &p) - if err != nil { - return fmt.Errorf("%v: %v", from, err) - } - - if len(locs) == 0 { - return fmt.Errorf("%v: not an identifier", from) - } - file, err = conn.openFile(ctx, fileURI(locs[0].URI)) - if err != nil { - return fmt.Errorf("%v: %v", from, err) - } - definition, err := file.mapper.LocationSpan(locs[0]) - if err != nil { - return fmt.Errorf("%v: %v", from, err) - } - - q := protocol.HoverParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), - } - hover, err := conn.Hover(ctx, &q) - if err != nil { - return fmt.Errorf("%v: %v", from, err) - } - var description string - if hover != nil { - description = strings.TrimSpace(hover.Contents.Value) - } - - result := &Definition{ - Span: definition, - Description: description, - } - if d.JSON { - enc := json.NewEncoder(os.Stdout) - enc.SetIndent("", "\t") - return enc.Encode(result) - } - fmt.Printf("%v", result.Span) - if len(result.Description) > 0 { - fmt.Printf(": defined here as %s", result.Description) - } - fmt.Printf("\n") - return nil -} diff --git a/gopls/internal/lsp/cmd/folding_range.go b/gopls/internal/lsp/cmd/folding_range.go deleted file mode 100644 index d2cc875ba25..00000000000 --- a/gopls/internal/lsp/cmd/folding_range.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// foldingRanges implements the folding_ranges verb for gopls -type foldingRanges struct { - app *Application -} - -func (r *foldingRanges) Name() string { return "folding_ranges" } -func (r *foldingRanges) Parent() string { return r.app.Name() } -func (r *foldingRanges) Usage() string { return "" } -func (r *foldingRanges) ShortHelp() string { return "display selected file's folding ranges" } -func (r *foldingRanges) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - - $ gopls folding_ranges helper/helper.go -`) - printFlagDefaults(f) -} - -func (r *foldingRanges) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("folding_ranges expects 1 argument (file)") - } - - conn, err := r.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - if _, err := conn.openFile(ctx, from.URI()); err != nil { - return err - } - - p := protocol.FoldingRangeParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(from.URI()), - }, - } - - ranges, err := conn.FoldingRange(ctx, &p) - if err != nil { - return err - } - - for _, r := range ranges { - fmt.Printf("%v:%v-%v:%v\n", - r.StartLine+1, - r.StartCharacter+1, - r.EndLine+1, - r.EndCharacter+1, - ) - } - - return nil -} diff --git a/gopls/internal/lsp/cmd/format.go b/gopls/internal/lsp/cmd/format.go deleted file mode 100644 index 73a8d7f582b..00000000000 --- a/gopls/internal/lsp/cmd/format.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" -) - -// format implements the format verb for gopls. -type format struct { - EditFlags - app *Application -} - -func (c *format) Name() string { return "format" } -func (c *format) Parent() string { return c.app.Name() } -func (c *format) Usage() string { return "[format-flags] " } -func (c *format) ShortHelp() string { return "format the code according to the go standard" } -func (c *format) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -The arguments supplied may be simple file names, or ranges within files. - -Example: reformat this file: - - $ gopls format -w internal/lsp/cmd/check.go - -format-flags: -`) - printFlagDefaults(f) -} - -// Run performs the check on the files specified by args and prints the -// results to stdout. -func (c *format) Run(ctx context.Context, args ...string) error { - if len(args) == 0 { - return nil - } - c.app.editFlags = &c.EditFlags - conn, err := c.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - for _, arg := range args { - spn := span.Parse(arg) - file, err := conn.openFile(ctx, spn.URI()) - if err != nil { - return err - } - loc, err := file.mapper.SpanLocation(spn) - if err != nil { - return err - } - if loc.Range.Start != loc.Range.End { - return fmt.Errorf("only full file formatting supported") - } - p := protocol.DocumentFormattingParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, - } - edits, err := conn.Formatting(ctx, &p) - if err != nil { - return fmt.Errorf("%v: %v", spn, err) - } - if err := applyTextEdits(file.mapper, edits, c.app.editFlags); err != nil { - return err - } - } - return nil -} diff --git a/gopls/internal/lsp/cmd/highlight.go b/gopls/internal/lsp/cmd/highlight.go deleted file mode 100644 index 8c1ceca6d03..00000000000 --- a/gopls/internal/lsp/cmd/highlight.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// highlight implements the highlight verb for gopls. -type highlight struct { - app *Application -} - -func (r *highlight) Name() string { return "highlight" } -func (r *highlight) Parent() string { return r.app.Name() } -func (r *highlight) Usage() string { return "" } -func (r *highlight) ShortHelp() string { return "display selected identifier's highlights" } -func (r *highlight) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - - $ # 1-indexed location (:line:column or :#offset) of the target identifier - $ gopls highlight helper/helper.go:8:6 - $ gopls highlight helper/helper.go:#53 -`) - printFlagDefaults(f) -} - -func (r *highlight) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("highlight expects 1 argument (position)") - } - - conn, err := r.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - file, err := conn.openFile(ctx, from.URI()) - if err != nil { - return err - } - - loc, err := file.mapper.SpanLocation(from) - if err != nil { - return err - } - - p := protocol.DocumentHighlightParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), - } - highlights, err := conn.DocumentHighlight(ctx, &p) - if err != nil { - return err - } - - var results []span.Span - for _, h := range highlights { - s, err := file.mapper.RangeSpan(h.Range) - if err != nil { - return err - } - results = append(results, s) - } - // Sort results to make tests deterministic since DocumentHighlight uses a map. - span.SortSpans(results) - - for _, s := range results { - fmt.Println(s) - } - return nil -} diff --git a/gopls/internal/lsp/cmd/implementation.go b/gopls/internal/lsp/cmd/implementation.go deleted file mode 100644 index ceffc0fecb1..00000000000 --- a/gopls/internal/lsp/cmd/implementation.go +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - "sort" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// implementation implements the implementation verb for gopls -type implementation struct { - app *Application -} - -func (i *implementation) Name() string { return "implementation" } -func (i *implementation) Parent() string { return i.app.Name() } -func (i *implementation) Usage() string { return "" } -func (i *implementation) ShortHelp() string { return "display selected identifier's implementation" } -func (i *implementation) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - - $ # 1-indexed location (:line:column or :#offset) of the target identifier - $ gopls implementation helper/helper.go:8:6 - $ gopls implementation helper/helper.go:#53 -`) - printFlagDefaults(f) -} - -func (i *implementation) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("implementation expects 1 argument (position)") - } - - conn, err := i.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - file, err := conn.openFile(ctx, from.URI()) - if err != nil { - return err - } - - loc, err := file.mapper.SpanLocation(from) - if err != nil { - return err - } - - p := protocol.ImplementationParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), - } - implementations, err := conn.Implementation(ctx, &p) - if err != nil { - return err - } - - var spans []string - for _, impl := range implementations { - f, err := conn.openFile(ctx, fileURI(impl.URI)) - if err != nil { - return err - } - span, err := f.mapper.LocationSpan(impl) - if err != nil { - return err - } - spans = append(spans, fmt.Sprint(span)) - } - sort.Strings(spans) - - for _, s := range spans { - fmt.Println(s) - } - - return nil -} diff --git a/gopls/internal/lsp/cmd/imports.go b/gopls/internal/lsp/cmd/imports.go deleted file mode 100644 index d014d03881e..00000000000 --- a/gopls/internal/lsp/cmd/imports.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// imports implements the import verb for gopls. -type imports struct { - EditFlags - app *Application -} - -func (t *imports) Name() string { return "imports" } -func (t *imports) Parent() string { return t.app.Name() } -func (t *imports) Usage() string { return "[imports-flags] " } -func (t *imports) ShortHelp() string { return "updates import statements" } -func (t *imports) DetailedHelp(f *flag.FlagSet) { - fmt.Fprintf(f.Output(), ` -Example: update imports statements in a file: - - $ gopls imports -w internal/lsp/cmd/check.go - -imports-flags: -`) - printFlagDefaults(f) -} - -// Run performs diagnostic checks on the file specified and either; -// - if -w is specified, updates the file in place; -// - if -d is specified, prints out unified diffs of the changes; or -// - otherwise, prints the new versions to stdout. -func (t *imports) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("imports expects 1 argument") - } - t.app.editFlags = &t.EditFlags - conn, err := t.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - uri := from.URI() - file, err := conn.openFile(ctx, uri) - if err != nil { - return err - } - actions, err := conn.CodeAction(ctx, &protocol.CodeActionParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), - }, - }) - if err != nil { - return fmt.Errorf("%v: %v", from, err) - } - var edits []protocol.TextEdit - for _, a := range actions { - if a.Title != "Organize Imports" { - continue - } - for _, c := range a.Edit.DocumentChanges { - if c.TextDocumentEdit != nil { - if fileURI(c.TextDocumentEdit.TextDocument.URI) == uri { - edits = append(edits, c.TextDocumentEdit.Edits...) - } - } - } - } - return applyTextEdits(file.mapper, edits, t.app.editFlags) -} diff --git a/gopls/internal/lsp/cmd/info.go b/gopls/internal/lsp/cmd/info.go deleted file mode 100644 index b0f08bbef67..00000000000 --- a/gopls/internal/lsp/cmd/info.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "bytes" - "context" - "encoding/json" - "flag" - "fmt" - "net/url" - "os" - "sort" - "strings" - - goplsbug "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/browser" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/filecache" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/tool" -) - -// help implements the help command. -type help struct { - app *Application -} - -func (h *help) Name() string { return "help" } -func (h *help) Parent() string { return h.app.Name() } -func (h *help) Usage() string { return "" } -func (h *help) ShortHelp() string { return "print usage information for subcommands" } -func (h *help) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` - -Examples: -$ gopls help # main gopls help message -$ gopls help remote # help on 'remote' command -$ gopls help remote sessions # help on 'remote sessions' subcommand -`) - printFlagDefaults(f) -} - -// Run prints help information about a subcommand. -func (h *help) Run(ctx context.Context, args ...string) error { - find := func(cmds []tool.Application, name string) tool.Application { - for _, cmd := range cmds { - if cmd.Name() == name { - return cmd - } - } - return nil - } - - // Find the subcommand denoted by args (empty => h.app). - var cmd tool.Application = h.app - for i, arg := range args { - cmd = find(getSubcommands(cmd), arg) - if cmd == nil { - return tool.CommandLineErrorf( - "no such subcommand: %s", strings.Join(args[:i+1], " ")) - } - } - - // 'gopls help cmd subcmd' is equivalent to 'gopls cmd subcmd -h'. - // The flag package prints the usage information (defined by tool.Run) - // when it sees the -h flag. - fs := flag.NewFlagSet(cmd.Name(), flag.ExitOnError) - return tool.Run(ctx, fs, h.app, append(args[:len(args):len(args)], "-h")) -} - -// version implements the version command. -type version struct { - JSON bool `flag:"json" help:"outputs in json format."` - - app *Application -} - -func (v *version) Name() string { return "version" } -func (v *version) Parent() string { return v.app.Name() } -func (v *version) Usage() string { return "" } -func (v *version) ShortHelp() string { return "print the gopls version information" } -func (v *version) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ``) - printFlagDefaults(f) -} - -// Run prints version information to stdout. -func (v *version) Run(ctx context.Context, args ...string) error { - var mode = debug.PlainText - if v.JSON { - mode = debug.JSON - } - - return debug.PrintVersionInfo(ctx, os.Stdout, v.app.verbose(), mode) -} - -// bug implements the bug command. -type bug struct { - app *Application -} - -func (b *bug) Name() string { return "bug" } -func (b *bug) Parent() string { return b.app.Name() } -func (b *bug) Usage() string { return "" } -func (b *bug) ShortHelp() string { return "report a bug in gopls" } -func (b *bug) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ``) - printFlagDefaults(f) -} - -const goplsBugPrefix = "x/tools/gopls: " -const goplsBugHeader = `ATTENTION: Please answer these questions BEFORE submitting your issue. Thanks! - -#### What did you do? -If possible, provide a recipe for reproducing the error. -A complete runnable program is good. -A link on play.golang.org is better. -A failing unit test is the best. - -#### What did you expect to see? - - -#### What did you see instead? - - -` - -// Run collects some basic information and then prepares an issue ready to -// be reported. -func (b *bug) Run(ctx context.Context, args ...string) error { - // This undocumented environment variable allows - // the cmd integration test (and maintainers) to - // trigger a call to bug.Report. - if msg := os.Getenv("TEST_GOPLS_BUG"); msg != "" { - filecache.Start() // register bug handler - goplsbug.Report(msg) - return nil - } - - // Enumerate bug reports, grouped and sorted. - _, reports := filecache.BugReports() - sort.Slice(reports, func(i, j int) bool { - x, y := reports[i], reports[i] - if x.Key != y.Key { - return x.Key < y.Key // ascending key order - } - return y.AtTime.Before(x.AtTime) // most recent first - }) - keyDenom := make(map[string]int) // key is "file:line" - for _, report := range reports { - keyDenom[report.Key]++ - } - - // Privacy: the content of 'public' will be posted to GitHub - // to populate an issue textarea. Even though the user must - // submit the form to share the information with the world, - // merely populating the form causes us to share the - // information with GitHub itself. - // - // For that reason, we cannot write private information to - // public, such as bug reports, which may quote source code. - public := &bytes.Buffer{} - fmt.Fprint(public, goplsBugHeader) - if len(reports) > 0 { - fmt.Fprintf(public, "#### Internal errors\n\n") - fmt.Fprintf(public, "Gopls detected %d internal errors, %d distinct:\n", - len(reports), len(keyDenom)) - for key, denom := range keyDenom { - fmt.Fprintf(public, "- %s (%d)\n", key, denom) - } - fmt.Fprintf(public, "\nPlease copy the full information printed by `gopls bug` here, if you are comfortable sharing it.\n\n") - } - debug.PrintVersionInfo(ctx, public, true, debug.Markdown) - body := public.String() - title := strings.Join(args, " ") - if !strings.HasPrefix(title, goplsBugPrefix) { - title = goplsBugPrefix + title - } - if !browser.Open("https://github.com/golang/go/issues/new?title=" + url.QueryEscape(title) + "&body=" + url.QueryEscape(body)) { - fmt.Print("Please file a new issue at golang.org/issue/new using this template:\n\n") - fmt.Print(body) - } - - // Print bug reports to stdout (not GitHub). - keyNum := make(map[string]int) - for _, report := range reports { - fmt.Printf("-- %v -- \n", report.AtTime) - - // Append seq number (e.g. " (1/2)") for repeated keys. - var seq string - if denom := keyDenom[report.Key]; denom > 1 { - keyNum[report.Key]++ - seq = fmt.Sprintf(" (%d/%d)", keyNum[report.Key], denom) - } - - // Privacy: - // - File and Stack may contain the name of the user that built gopls. - // - Description may contain names of the user's packages/files/symbols. - fmt.Printf("%s:%d: %s%s\n\n", report.File, report.Line, report.Description, seq) - fmt.Printf("%s\n\n", report.Stack) - } - if len(reports) > 0 { - fmt.Printf("Please copy the above information into the GitHub issue, if you are comfortable sharing it.\n") - } - - return nil -} - -type apiJSON struct { - app *Application -} - -func (j *apiJSON) Name() string { return "api-json" } -func (j *apiJSON) Parent() string { return j.app.Name() } -func (j *apiJSON) Usage() string { return "" } -func (j *apiJSON) ShortHelp() string { return "print json describing gopls API" } -func (j *apiJSON) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ``) - printFlagDefaults(f) -} - -func (j *apiJSON) Run(ctx context.Context, args ...string) error { - js, err := json.MarshalIndent(source.GeneratedAPIJSON, "", "\t") - if err != nil { - return err - } - fmt.Fprint(os.Stdout, string(js)) - return nil -} - -type licenses struct { - app *Application -} - -func (l *licenses) Name() string { return "licenses" } -func (l *licenses) Parent() string { return l.app.Name() } -func (l *licenses) Usage() string { return "" } -func (l *licenses) ShortHelp() string { return "print licenses of included software" } -func (l *licenses) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ``) - printFlagDefaults(f) -} - -const licensePreamble = ` -gopls is made available under the following BSD-style license: - -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -gopls implements the LSP specification, which is made available under the following license: - -Copyright (c) Microsoft Corporation - -All rights reserved. - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT -OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -gopls also includes software made available under these licenses: -` - -func (l *licenses) Run(ctx context.Context, args ...string) error { - opts := source.DefaultOptions(l.app.options) - txt := licensePreamble - if opts.LicensesText == "" { - txt += "(development gopls, license information not available)" - } else { - txt += opts.LicensesText - } - fmt.Fprint(os.Stdout, txt) - return nil -} diff --git a/gopls/internal/lsp/cmd/references.go b/gopls/internal/lsp/cmd/references.go deleted file mode 100644 index e340d33f2f2..00000000000 --- a/gopls/internal/lsp/cmd/references.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - "sort" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// references implements the references verb for gopls -type references struct { - IncludeDeclaration bool `flag:"d,declaration" help:"include the declaration of the specified identifier in the results"` - - app *Application -} - -func (r *references) Name() string { return "references" } -func (r *references) Parent() string { return r.app.Name() } -func (r *references) Usage() string { return "[references-flags] " } -func (r *references) ShortHelp() string { return "display selected identifier's references" } -func (r *references) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - - $ # 1-indexed location (:line:column or :#offset) of the target identifier - $ gopls references helper/helper.go:8:6 - $ gopls references helper/helper.go:#53 - -references-flags: -`) - printFlagDefaults(f) -} - -func (r *references) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("references expects 1 argument (position)") - } - - conn, err := r.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - file, err := conn.openFile(ctx, from.URI()) - if err != nil { - return err - } - loc, err := file.mapper.SpanLocation(from) - if err != nil { - return err - } - p := protocol.ReferenceParams{ - Context: protocol.ReferenceContext{ - IncludeDeclaration: r.IncludeDeclaration, - }, - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), - } - locations, err := conn.References(ctx, &p) - if err != nil { - return err - } - var spans []string - for _, l := range locations { - f, err := conn.openFile(ctx, fileURI(l.URI)) - if err != nil { - return err - } - // convert location to span for user-friendly 1-indexed line - // and column numbers - span, err := f.mapper.LocationSpan(l) - if err != nil { - return err - } - spans = append(spans, fmt.Sprint(span)) - } - - sort.Strings(spans) - for _, s := range spans { - fmt.Println(s) - } - return nil -} diff --git a/gopls/internal/lsp/cmd/rename.go b/gopls/internal/lsp/cmd/rename.go deleted file mode 100644 index 5ad7aa44494..00000000000 --- a/gopls/internal/lsp/cmd/rename.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// rename implements the rename verb for gopls. -type rename struct { - EditFlags - app *Application -} - -func (r *rename) Name() string { return "rename" } -func (r *rename) Parent() string { return r.app.Name() } -func (r *rename) Usage() string { return "[rename-flags] " } -func (r *rename) ShortHelp() string { return "rename selected identifier" } -func (r *rename) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - - $ # 1-based location (:line:column or :#position) of the thing to change - $ gopls rename helper/helper.go:8:6 Foo - $ gopls rename helper/helper.go:#53 Foo - -rename-flags: -`) - printFlagDefaults(f) -} - -// Run renames the specified identifier and either; -// - if -w is specified, updates the file(s) in place; -// - if -d is specified, prints out unified diffs of the changes; or -// - otherwise, prints the new versions to stdout. -func (r *rename) Run(ctx context.Context, args ...string) error { - if len(args) != 2 { - return tool.CommandLineErrorf("definition expects 2 arguments (position, new name)") - } - r.app.editFlags = &r.EditFlags - conn, err := r.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - file, err := conn.openFile(ctx, from.URI()) - if err != nil { - return err - } - loc, err := file.mapper.SpanLocation(from) - if err != nil { - return err - } - p := protocol.RenameParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, - Position: loc.Range.Start, - NewName: args[1], - } - edit, err := conn.Rename(ctx, &p) - if err != nil { - return err - } - return conn.client.applyWorkspaceEdit(edit) -} diff --git a/gopls/internal/lsp/cmd/serve.go b/gopls/internal/lsp/cmd/serve.go deleted file mode 100644 index a04e6dc75d3..00000000000 --- a/gopls/internal/lsp/cmd/serve.go +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "errors" - "flag" - "fmt" - "io" - "log" - "os" - "time" - - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/lsprpc" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/fakenet" - "golang.org/x/tools/internal/jsonrpc2" - "golang.org/x/tools/internal/tool" -) - -// Serve is a struct that exposes the configurable parts of the LSP server as -// flags, in the right form for tool.Main to consume. -type Serve struct { - Logfile string `flag:"logfile" help:"filename to log to. if value is \"auto\", then logging to a default output file is enabled"` - Mode string `flag:"mode" help:"no effect"` - Port int `flag:"port" help:"port on which to run gopls for debugging purposes"` - Address string `flag:"listen" help:"address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used."` - IdleTimeout time.Duration `flag:"listen.timeout" help:"when used with -listen, shut down the server when there are no connected clients for this duration"` - Trace bool `flag:"rpc.trace" help:"print the full rpc trace in lsp inspector format"` - Debug string `flag:"debug" help:"serve debug information on the supplied address"` - - RemoteListenTimeout time.Duration `flag:"remote.listen.timeout" help:"when used with -remote=auto, the -listen.timeout value used to start the daemon"` - RemoteDebug string `flag:"remote.debug" help:"when used with -remote=auto, the -debug value used to start the daemon"` - RemoteLogfile string `flag:"remote.logfile" help:"when used with -remote=auto, the -logfile value used to start the daemon"` - - app *Application -} - -func (s *Serve) Name() string { return "serve" } -func (s *Serve) Parent() string { return s.app.Name() } -func (s *Serve) Usage() string { return "[server-flags]" } -func (s *Serve) ShortHelp() string { - return "run a server for Go code using the Language Server Protocol" -} -func (s *Serve) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` gopls [flags] [server-flags] - -The server communicates using JSONRPC2 on stdin and stdout, and is intended to be run directly as -a child of an editor process. - -server-flags: -`) - printFlagDefaults(f) -} - -func (s *Serve) remoteArgs(network, address string) []string { - args := []string{"serve", - "-listen", fmt.Sprintf(`%s;%s`, network, address), - } - if s.RemoteDebug != "" { - args = append(args, "-debug", s.RemoteDebug) - } - if s.RemoteListenTimeout != 0 { - args = append(args, "-listen.timeout", s.RemoteListenTimeout.String()) - } - if s.RemoteLogfile != "" { - args = append(args, "-logfile", s.RemoteLogfile) - } - return args -} - -// Run configures a server based on the flags, and then runs it. -// It blocks until the server shuts down. -func (s *Serve) Run(ctx context.Context, args ...string) error { - if len(args) > 0 { - return tool.CommandLineErrorf("server does not take arguments, got %v", args) - } - - di := debug.GetInstance(ctx) - isDaemon := s.Address != "" || s.Port != 0 - if di != nil { - closeLog, err := di.SetLogFile(s.Logfile, isDaemon) - if err != nil { - return err - } - defer closeLog() - di.ServerAddress = s.Address - di.Serve(ctx, s.Debug) - } - var ss jsonrpc2.StreamServer - if s.app.Remote != "" { - var err error - ss, err = lsprpc.NewForwarder(s.app.Remote, s.remoteArgs) - if err != nil { - return fmt.Errorf("creating forwarder: %w", err) - } - } else { - ss = lsprpc.NewStreamServer(cache.New(nil), isDaemon, s.app.options) - } - - var network, addr string - if s.Address != "" { - network, addr = lsprpc.ParseAddr(s.Address) - } - if s.Port != 0 { - network = "tcp" - // TODO(adonovan): should gopls ever be listening on network - // sockets, or only local ones? - // - // Ian says this was added in anticipation of - // something related to "VS Code remote" that turned - // out to be unnecessary. So I propose we limit it to - // localhost, if only so that we avoid the macOS - // firewall prompt. - // - // Hana says: "s.Address is for the remote access (LSP) - // and s.Port is for debugging purpose (according to - // the Server type documentation). I am not sure why the - // existing code here is mixing up and overwriting addr. - // For debugging endpoint, I think localhost makes perfect sense." - // - // TODO(adonovan): disentangle Address and Port, - // and use only localhost for the latter. - addr = fmt.Sprintf(":%v", s.Port) - } - if addr != "" { - log.Printf("Gopls daemon: listening on %s network, address %s...", network, addr) - defer log.Printf("Gopls daemon: exiting") - return jsonrpc2.ListenAndServe(ctx, network, addr, ss, s.IdleTimeout) - } - stream := jsonrpc2.NewHeaderStream(fakenet.NewConn("stdio", os.Stdin, os.Stdout)) - if s.Trace && di != nil { - stream = protocol.LoggingStream(stream, di.LogWriter) - } - conn := jsonrpc2.NewConn(stream) - err := ss.ServeStream(ctx, conn) - if errors.Is(err, io.EOF) { - return nil - } - return err -} diff --git a/gopls/internal/lsp/cmd/symbols.go b/gopls/internal/lsp/cmd/symbols.go deleted file mode 100644 index a6045d741ad..00000000000 --- a/gopls/internal/lsp/cmd/symbols.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "sort" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tool" -) - -// symbols implements the symbols verb for gopls -type symbols struct { - app *Application -} - -func (r *symbols) Name() string { return "symbols" } -func (r *symbols) Parent() string { return r.app.Name() } -func (r *symbols) Usage() string { return "" } -func (r *symbols) ShortHelp() string { return "display selected file's symbols" } -func (r *symbols) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - $ gopls symbols helper/helper.go -`) - printFlagDefaults(f) -} -func (r *symbols) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("symbols expects 1 argument (position)") - } - - conn, err := r.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - from := span.Parse(args[0]) - p := protocol.DocumentSymbolParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(from.URI()), - }, - } - symbols, err := conn.DocumentSymbol(ctx, &p) - if err != nil { - return err - } - for _, s := range symbols { - if m, ok := s.(map[string]interface{}); ok { - s, err = mapToSymbol(m) - if err != nil { - return err - } - } - switch t := s.(type) { - case protocol.DocumentSymbol: - printDocumentSymbol(t) - case protocol.SymbolInformation: - printSymbolInformation(t) - } - } - return nil -} - -func mapToSymbol(m map[string]interface{}) (interface{}, error) { - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - if _, ok := m["selectionRange"]; ok { - var s protocol.DocumentSymbol - if err := json.Unmarshal(b, &s); err != nil { - return nil, err - } - return s, nil - } - - var s protocol.SymbolInformation - if err := json.Unmarshal(b, &s); err != nil { - return nil, err - } - return s, nil -} - -func printDocumentSymbol(s protocol.DocumentSymbol) { - fmt.Printf("%s %s %s\n", s.Name, s.Kind, positionToString(s.SelectionRange)) - // Sort children for consistency - sort.Slice(s.Children, func(i, j int) bool { - return s.Children[i].Name < s.Children[j].Name - }) - for _, c := range s.Children { - fmt.Printf("\t%s %s %s\n", c.Name, c.Kind, positionToString(c.SelectionRange)) - } -} - -func printSymbolInformation(s protocol.SymbolInformation) { - fmt.Printf("%s %s %s\n", s.Name, s.Kind, positionToString(s.Location.Range)) -} - -func positionToString(r protocol.Range) string { - return fmt.Sprintf("%v:%v-%v:%v", - r.Start.Line+1, - r.Start.Character+1, - r.End.Line+1, - r.End.Character+1, - ) -} diff --git a/gopls/internal/lsp/cmd/usage/api-json.hlp b/gopls/internal/lsp/cmd/usage/api-json.hlp deleted file mode 100644 index cb9fbfbea9d..00000000000 --- a/gopls/internal/lsp/cmd/usage/api-json.hlp +++ /dev/null @@ -1,4 +0,0 @@ -print json describing gopls API - -Usage: - gopls [flags] api-json diff --git a/gopls/internal/lsp/cmd/usage/definition.hlp b/gopls/internal/lsp/cmd/usage/definition.hlp deleted file mode 100644 index 500e6c9a417..00000000000 --- a/gopls/internal/lsp/cmd/usage/definition.hlp +++ /dev/null @@ -1,15 +0,0 @@ -show declaration of selected identifier - -Usage: - gopls [flags] definition [definition-flags] - -Example: show the definition of the identifier at syntax at offset 44 in this file (flag.FlagSet): - - $ gopls definition internal/lsp/cmd/definition.go:44:47 - $ gopls definition internal/lsp/cmd/definition.go:#1270 - -definition-flags: - -json - emit output in JSON format - -markdown - support markdown in responses diff --git a/gopls/internal/lsp/cmd/workspace_symbol.go b/gopls/internal/lsp/cmd/workspace_symbol.go deleted file mode 100644 index 520d6bce810..00000000000 --- a/gopls/internal/lsp/cmd/workspace_symbol.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cmd - -import ( - "context" - "flag" - "fmt" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/tool" -) - -// workspaceSymbol implements the workspace_symbol verb for gopls. -type workspaceSymbol struct { - Matcher string `flag:"matcher" help:"specifies the type of matcher: fuzzy, fastfuzzy, casesensitive, or caseinsensitive.\nThe default is caseinsensitive."` - - app *Application -} - -func (r *workspaceSymbol) Name() string { return "workspace_symbol" } -func (r *workspaceSymbol) Parent() string { return r.app.Name() } -func (r *workspaceSymbol) Usage() string { return "[workspace_symbol-flags] " } -func (r *workspaceSymbol) ShortHelp() string { return "search symbols in workspace" } -func (r *workspaceSymbol) DetailedHelp(f *flag.FlagSet) { - fmt.Fprint(f.Output(), ` -Example: - - $ gopls workspace_symbol -matcher fuzzy 'wsymbols' - -workspace_symbol-flags: -`) - printFlagDefaults(f) -} - -func (r *workspaceSymbol) Run(ctx context.Context, args ...string) error { - if len(args) != 1 { - return tool.CommandLineErrorf("workspace_symbol expects 1 argument") - } - - opts := r.app.options - r.app.options = func(o *source.Options) { - if opts != nil { - opts(o) - } - switch strings.ToLower(r.Matcher) { - case "fuzzy": - o.SymbolMatcher = source.SymbolFuzzy - case "casesensitive": - o.SymbolMatcher = source.SymbolCaseSensitive - case "fastfuzzy": - o.SymbolMatcher = source.SymbolFastFuzzy - default: - o.SymbolMatcher = source.SymbolCaseInsensitive - } - } - - conn, err := r.app.connect(ctx, nil) - if err != nil { - return err - } - defer conn.terminate(ctx) - - p := protocol.WorkspaceSymbolParams{ - Query: args[0], - } - - symbols, err := conn.Symbol(ctx, &p) - if err != nil { - return err - } - for _, s := range symbols { - f, err := conn.openFile(ctx, fileURI(s.Location.URI)) - if err != nil { - return err - } - span, err := f.mapper.LocationSpan(s.Location) - if err != nil { - return err - } - fmt.Printf("%s %s %s\n", span, s.Name, s.Kind) - } - - return nil -} diff --git a/gopls/internal/lsp/code_action.go b/gopls/internal/lsp/code_action.go deleted file mode 100644 index 8978ab5bfed..00000000000 --- a/gopls/internal/lsp/code_action.go +++ /dev/null @@ -1,715 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "fmt" - "go/ast" - "sort" - "strings" - - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct" - "golang.org/x/tools/gopls/internal/lsp/analysis/infertypeargs" - "golang.org/x/tools/gopls/internal/lsp/analysis/stubmethods" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/mod" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" - "golang.org/x/tools/internal/imports" -) - -func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeAction") - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - uri := fh.URI() - - // Determine the supported actions for this file kind. - kind := snapshot.FileKind(fh) - supportedCodeActions, ok := snapshot.Options().SupportedCodeActions[kind] - if !ok { - return nil, fmt.Errorf("no supported code actions for %v file kind", kind) - } - if len(supportedCodeActions) == 0 { - return nil, nil // not an error if there are none supported - } - - // The Only field of the context specifies which code actions the client wants. - // If Only is empty, assume that the client wants all of the non-explicit code actions. - var want map[protocol.CodeActionKind]bool - { - // Explicit Code Actions are opt-in and shouldn't be returned to the client unless - // requested using Only. - // TODO: Add other CodeLenses such as GoGenerate, RegenerateCgo, etc.. - explicit := map[protocol.CodeActionKind]bool{ - protocol.GoTest: true, - } - - if len(params.Context.Only) == 0 { - want = supportedCodeActions - } else { - want = make(map[protocol.CodeActionKind]bool) - for _, only := range params.Context.Only { - for k, v := range supportedCodeActions { - if only == k || strings.HasPrefix(string(k), string(only)+".") { - want[k] = want[k] || v - } - } - want[only] = want[only] || explicit[only] - } - } - } - if len(want) == 0 { - return nil, fmt.Errorf("no supported code action to execute for %s, wanted %v", uri, params.Context.Only) - } - - switch kind { - case source.Mod: - var actions []protocol.CodeAction - - fixes, err := s.codeActionsMatchingDiagnostics(ctx, fh.URI(), snapshot, params.Context.Diagnostics, want) - if err != nil { - return nil, err - } - - // Group vulnerability fixes by their range, and select only the most - // appropriate upgrades. - // - // TODO(rfindley): can this instead be accomplished on the diagnosis side, - // so that code action handling remains uniform? - vulnFixes := make(map[protocol.Range][]protocol.CodeAction) - searchFixes: - for _, fix := range fixes { - for _, diag := range fix.Diagnostics { - if diag.Source == string(source.Govulncheck) || diag.Source == string(source.Vulncheck) { - vulnFixes[diag.Range] = append(vulnFixes[diag.Range], fix) - continue searchFixes - } - } - actions = append(actions, fix) - } - - for _, fixes := range vulnFixes { - fixes = mod.SelectUpgradeCodeActions(fixes) - actions = append(actions, fixes...) - } - - return actions, nil - - case source.Go: - diagnostics := params.Context.Diagnostics - - // Don't suggest fixes for generated files, since they are generally - // not useful and some editors may apply them automatically on save. - if source.IsGenerated(ctx, snapshot, uri) { - return nil, nil - } - - actions, err := s.codeActionsMatchingDiagnostics(ctx, uri, snapshot, diagnostics, want) - if err != nil { - return nil, err - } - - // Only compute quick fixes if there are any diagnostics to fix. - wantQuickFixes := want[protocol.QuickFix] && len(diagnostics) > 0 - - // Code actions requiring syntax information alone. - if wantQuickFixes || want[protocol.SourceOrganizeImports] || want[protocol.RefactorExtract] { - pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull) - if err != nil { - return nil, err - } - - // Process any missing imports and pair them with the diagnostics they - // fix. - if wantQuickFixes || want[protocol.SourceOrganizeImports] { - importEdits, importEditsPerFix, err := source.AllImportsFixes(ctx, snapshot, pgf) - if err != nil { - event.Error(ctx, "imports fixes", err, tag.File.Of(fh.URI().Filename())) - importEdits = nil - importEditsPerFix = nil - } - - // Separate this into a set of codeActions per diagnostic, where - // each action is the addition, removal, or renaming of one import. - if wantQuickFixes { - for _, importFix := range importEditsPerFix { - fixed := fixedByImportFix(importFix.Fix, diagnostics) - if len(fixed) == 0 { - continue - } - actions = append(actions, protocol.CodeAction{ - Title: importFixTitle(importFix.Fix), - Kind: protocol.QuickFix, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(fh, importFix.Edits), - }, - Diagnostics: fixed, - }) - } - } - - // Send all of the import edits as one code action if the file is - // being organized. - if want[protocol.SourceOrganizeImports] && len(importEdits) > 0 { - actions = append(actions, protocol.CodeAction{ - Title: "Organize Imports", - Kind: protocol.SourceOrganizeImports, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(fh, importEdits), - }, - }) - } - } - - if want[protocol.RefactorExtract] { - extractions, err := refactorExtract(ctx, snapshot, pgf, params.Range) - if err != nil { - return nil, err - } - actions = append(actions, extractions...) - } - } - - var stubMethodsDiagnostics []protocol.Diagnostic - if wantQuickFixes && snapshot.Options().IsAnalyzerEnabled(stubmethods.Analyzer.Name) { - for _, pd := range diagnostics { - if stubmethods.MatchesMessage(pd.Message) { - stubMethodsDiagnostics = append(stubMethodsDiagnostics, pd) - } - } - } - - // Code actions requiring type information. - if len(stubMethodsDiagnostics) > 0 || - want[protocol.RefactorRewrite] || - want[protocol.RefactorInline] || - want[protocol.GoTest] { - pkg, pgf, err := source.NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - for _, pd := range stubMethodsDiagnostics { - start, end, err := pgf.RangePos(pd.Range) - if err != nil { - return nil, err - } - action, ok, err := func() (_ protocol.CodeAction, _ bool, rerr error) { - // golang/go#61693: code actions were refactored to run outside of the - // analysis framework, but as a result they lost their panic recovery. - // - // Stubmethods "should never fail"", but put back the panic recovery as a - // defensive measure. - defer func() { - if r := recover(); r != nil { - rerr = bug.Errorf("stubmethods panicked: %v", r) - } - }() - d, ok := stubmethods.DiagnosticForError(pkg.FileSet(), pgf.File, start, end, pd.Message, pkg.GetTypesInfo()) - if !ok { - return protocol.CodeAction{}, false, nil - } - cmd, err := command.NewApplyFixCommand(d.Message, command.ApplyFixArgs{ - URI: protocol.URIFromSpanURI(pgf.URI), - Fix: source.StubMethods, - Range: pd.Range, - }) - if err != nil { - return protocol.CodeAction{}, false, err - } - return protocol.CodeAction{ - Title: d.Message, - Kind: protocol.QuickFix, - Command: &cmd, - Diagnostics: []protocol.Diagnostic{pd}, - }, true, nil - }() - if err != nil { - return nil, err - } - if ok { - actions = append(actions, action) - } - } - - if want[protocol.RefactorRewrite] { - rewrites, err := refactorRewrite(ctx, snapshot, pkg, pgf, fh, params.Range) - if err != nil { - return nil, err - } - actions = append(actions, rewrites...) - } - - if want[protocol.RefactorInline] { - rewrites, err := refactorInline(ctx, snapshot, pkg, pgf, fh, params.Range) - if err != nil { - return nil, err - } - actions = append(actions, rewrites...) - } - - if want[protocol.GoTest] { - fixes, err := goTest(ctx, snapshot, pkg, pgf, params.Range) - if err != nil { - return nil, err - } - actions = append(actions, fixes...) - } - } - - return actions, nil - - default: - // Unsupported file kind for a code action. - return nil, nil - } -} - -func (s *Server) findMatchingDiagnostics(uri span.URI, pd protocol.Diagnostic) []*source.Diagnostic { - s.diagnosticsMu.Lock() - defer s.diagnosticsMu.Unlock() - - var sds []*source.Diagnostic - for _, report := range s.diagnostics[uri].reports { - for _, sd := range report.diags { - sameDiagnostic := (pd.Message == strings.TrimSpace(sd.Message) && // extra space may have been trimmed when converting to protocol.Diagnostic - protocol.CompareRange(pd.Range, sd.Range) == 0 && - pd.Source == string(sd.Source)) - - if sameDiagnostic { - sds = append(sds, sd) - } - } - } - return sds -} - -func (s *Server) getSupportedCodeActions() []protocol.CodeActionKind { - allCodeActionKinds := make(map[protocol.CodeActionKind]struct{}) - for _, kinds := range s.Options().SupportedCodeActions { - for kind := range kinds { - allCodeActionKinds[kind] = struct{}{} - } - } - var result []protocol.CodeActionKind - for kind := range allCodeActionKinds { - result = append(result, kind) - } - sort.Slice(result, func(i, j int) bool { - return result[i] < result[j] - }) - return result -} - -func importFixTitle(fix *imports.ImportFix) string { - var str string - switch fix.FixType { - case imports.AddImport: - str = fmt.Sprintf("Add import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) - case imports.DeleteImport: - str = fmt.Sprintf("Delete import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) - case imports.SetImportName: - str = fmt.Sprintf("Rename import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) - } - return str -} - -// fixedByImportFix filters the provided slice of diagnostics to those that -// would be fixed by the provided imports fix. -func fixedByImportFix(fix *imports.ImportFix, diagnostics []protocol.Diagnostic) []protocol.Diagnostic { - var results []protocol.Diagnostic - for _, diagnostic := range diagnostics { - switch { - // "undeclared name: X" may be an unresolved import. - case strings.HasPrefix(diagnostic.Message, "undeclared name: "): - ident := strings.TrimPrefix(diagnostic.Message, "undeclared name: ") - if ident == fix.IdentName { - results = append(results, diagnostic) - } - // "undefined: X" may be an unresolved import at Go 1.20+. - case strings.HasPrefix(diagnostic.Message, "undefined: "): - ident := strings.TrimPrefix(diagnostic.Message, "undefined: ") - if ident == fix.IdentName { - results = append(results, diagnostic) - } - // "could not import: X" may be an invalid import. - case strings.HasPrefix(diagnostic.Message, "could not import: "): - ident := strings.TrimPrefix(diagnostic.Message, "could not import: ") - if ident == fix.IdentName { - results = append(results, diagnostic) - } - // "X imported but not used" is an unused import. - // "X imported but not used as Y" is an unused import. - case strings.Contains(diagnostic.Message, " imported but not used"): - idx := strings.Index(diagnostic.Message, " imported but not used") - importPath := diagnostic.Message[:idx] - if importPath == fmt.Sprintf("%q", fix.StmtInfo.ImportPath) { - results = append(results, diagnostic) - } - } - } - return results -} - -func refactorExtract(ctx context.Context, snapshot source.Snapshot, pgf *source.ParsedGoFile, rng protocol.Range) ([]protocol.CodeAction, error) { - if rng.Start == rng.End { - return nil, nil - } - - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, err - } - puri := protocol.URIFromSpanURI(pgf.URI) - var commands []protocol.Command - if _, ok, methodOk, _ := source.CanExtractFunction(pgf.Tok, start, end, pgf.Src, pgf.File); ok { - cmd, err := command.NewApplyFixCommand("Extract function", command.ApplyFixArgs{ - URI: puri, - Fix: source.ExtractFunction, - Range: rng, - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - if methodOk { - cmd, err := command.NewApplyFixCommand("Extract method", command.ApplyFixArgs{ - URI: puri, - Fix: source.ExtractMethod, - Range: rng, - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - } - if _, _, ok, _ := source.CanExtractVariable(start, end, pgf.File); ok { - cmd, err := command.NewApplyFixCommand("Extract variable", command.ApplyFixArgs{ - URI: puri, - Fix: source.ExtractVariable, - Range: rng, - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - var actions []protocol.CodeAction - for i := range commands { - actions = append(actions, protocol.CodeAction{ - Title: commands[i].Title, - Kind: protocol.RefactorExtract, - Command: &commands[i], - }) - } - return actions, nil -} - -func refactorRewrite(ctx context.Context, snapshot source.Snapshot, pkg source.Package, pgf *source.ParsedGoFile, fh source.FileHandle, rng protocol.Range) (_ []protocol.CodeAction, rerr error) { - // golang/go#61693: code actions were refactored to run outside of the - // analysis framework, but as a result they lost their panic recovery. - // - // These code actions should never fail, but put back the panic recovery as a - // defensive measure. - defer func() { - if r := recover(); r != nil { - rerr = bug.Errorf("refactor.rewrite code actions panicked: %v", r) - } - }() - - var actions []protocol.CodeAction - - if canRemoveParameter(pkg, pgf, rng) { - cmd, err := command.NewChangeSignatureCommand("remove unused parameter", command.ChangeSignatureArgs{ - RemoveParameter: protocol.Location{ - URI: protocol.URIFromSpanURI(pgf.URI), - Range: rng, - }, - }) - if err != nil { - return nil, err - } - actions = append(actions, protocol.CodeAction{ - Title: "Refactor: remove unused parameter", - Kind: protocol.RefactorRewrite, - Command: &cmd, - }) - } - - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, err - } - - var commands []protocol.Command - if _, ok, _ := source.CanInvertIfCondition(pgf.File, start, end); ok { - cmd, err := command.NewApplyFixCommand("Invert if condition", command.ApplyFixArgs{ - URI: protocol.URIFromSpanURI(pgf.URI), - Fix: source.InvertIfCondition, - Range: rng, - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - - // N.B.: an inspector only pays for itself after ~5 passes, which means we're - // currently not getting a good deal on this inspection. - // - // TODO: Consider removing the inspection after convenienceAnalyzers are removed. - inspect := inspector.New([]*ast.File{pgf.File}) - if snapshot.Options().IsAnalyzerEnabled(fillstruct.Analyzer.Name) { - for _, d := range fillstruct.DiagnoseFillableStructs(inspect, start, end, pkg.GetTypes(), pkg.GetTypesInfo()) { - rng, err := pgf.Mapper.PosRange(pgf.Tok, d.Pos, d.End) - if err != nil { - return nil, err - } - cmd, err := command.NewApplyFixCommand(d.Message, command.ApplyFixArgs{ - URI: protocol.URIFromSpanURI(pgf.URI), - Fix: source.FillStruct, - Range: rng, - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - } - - for i := range commands { - actions = append(actions, protocol.CodeAction{ - Title: commands[i].Title, - Kind: protocol.RefactorRewrite, - Command: &commands[i], - }) - } - - if snapshot.Options().IsAnalyzerEnabled(infertypeargs.Analyzer.Name) { - for _, d := range infertypeargs.DiagnoseInferableTypeArgs(pkg.FileSet(), inspect, start, end, pkg.GetTypes(), pkg.GetTypesInfo()) { - if len(d.SuggestedFixes) != 1 { - panic(fmt.Sprintf("unexpected number of suggested fixes from infertypeargs: %v", len(d.SuggestedFixes))) - } - fix := d.SuggestedFixes[0] - var edits []protocol.TextEdit - for _, analysisEdit := range fix.TextEdits { - rng, err := pgf.Mapper.PosRange(pgf.Tok, analysisEdit.Pos, analysisEdit.End) - if err != nil { - return nil, err - } - edits = append(edits, protocol.TextEdit{ - Range: rng, - NewText: string(analysisEdit.NewText), - }) - } - actions = append(actions, protocol.CodeAction{ - Title: "Simplify type arguments", - Kind: protocol.RefactorRewrite, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(fh, edits), - }, - }) - } - } - - return actions, nil -} - -// canRemoveParameter reports whether we can remove the function parameter -// indicated by the given [start, end) range. -// -// This is true if: -// - [start, end) is contained within an unused field or parameter name -// - ... of a non-method function declaration. -func canRemoveParameter(pkg source.Package, pgf *source.ParsedGoFile, rng protocol.Range) bool { - info := source.FindParam(pgf, rng) - if info.Decl == nil || info.Field == nil { - return false - } - - if info.Decl.Body == nil { - return false // external function - } - - if len(info.Field.Names) == 0 { - return true // no names => field is unused - } - if info.Name == nil { - return false // no name is indicated - } - if info.Name.Name == "_" { - return true // trivially unused - } - - obj := pkg.GetTypesInfo().Defs[info.Name] - if obj == nil { - return false // something went wrong - } - - used := false - ast.Inspect(info.Decl.Body, func(node ast.Node) bool { - if n, ok := node.(*ast.Ident); ok && pkg.GetTypesInfo().Uses[n] == obj { - used = true - } - return !used // keep going until we find a use - }) - return !used -} - -// refactorInline returns inline actions available at the specified range. -func refactorInline(ctx context.Context, snapshot source.Snapshot, pkg source.Package, pgf *source.ParsedGoFile, fh source.FileHandle, rng protocol.Range) ([]protocol.CodeAction, error) { - var commands []protocol.Command - - // If range is within call expression, offer inline action. - if _, fn, err := source.EnclosingStaticCall(pkg, pgf, rng); err == nil { - cmd, err := command.NewApplyFixCommand(fmt.Sprintf("Inline call to %s", fn.Name()), command.ApplyFixArgs{ - URI: protocol.URIFromSpanURI(pgf.URI), - Fix: source.InlineCall, - Range: rng, - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - - // Convert commands to actions. - var actions []protocol.CodeAction - for i := range commands { - actions = append(actions, protocol.CodeAction{ - Title: commands[i].Title, - Kind: protocol.RefactorInline, - Command: &commands[i], - }) - } - return actions, nil -} - -func documentChanges(fh source.FileHandle, edits []protocol.TextEdit) []protocol.DocumentChanges { - return []protocol.DocumentChanges{ - { - TextDocumentEdit: &protocol.TextDocumentEdit{ - TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ - Version: fh.Version(), - TextDocumentIdentifier: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(fh.URI()), - }, - }, - Edits: nonNilSliceTextEdit(edits), - }, - }, - } -} - -// codeActionsMatchingDiagnostics fetches code actions for the provided -// diagnostics, by first attempting to unmarshal code actions directly from the -// bundled protocol.Diagnostic.Data field, and failing that by falling back on -// fetching a matching source.Diagnostic from the set of stored diagnostics for -// this file. -func (s *Server) codeActionsMatchingDiagnostics(ctx context.Context, uri span.URI, snapshot source.Snapshot, pds []protocol.Diagnostic, want map[protocol.CodeActionKind]bool) ([]protocol.CodeAction, error) { - var actions []protocol.CodeAction - var unbundled []protocol.Diagnostic // diagnostics without bundled code actions in their Data field - for _, pd := range pds { - bundled := source.BundledQuickFixes(pd) - if len(bundled) > 0 { - for _, fix := range bundled { - if want[fix.Kind] { - actions = append(actions, fix) - } - } - } else { - // No bundled actions: keep searching for a match. - unbundled = append(unbundled, pd) - } - } - - for _, pd := range unbundled { - for _, sd := range s.findMatchingDiagnostics(uri, pd) { - diagActions, err := codeActionsForDiagnostic(ctx, snapshot, sd, &pd, want) - if err != nil { - return nil, err - } - actions = append(actions, diagActions...) - } - } - return actions, nil -} - -func codeActionsForDiagnostic(ctx context.Context, snapshot source.Snapshot, sd *source.Diagnostic, pd *protocol.Diagnostic, want map[protocol.CodeActionKind]bool) ([]protocol.CodeAction, error) { - var actions []protocol.CodeAction - for _, fix := range sd.SuggestedFixes { - if !want[fix.ActionKind] { - continue - } - changes := []protocol.DocumentChanges{} // must be a slice - for uri, edits := range fix.Edits { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - changes = append(changes, documentChanges(fh, edits)...) - } - action := protocol.CodeAction{ - Title: fix.Title, - Kind: fix.ActionKind, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: changes, - }, - Command: fix.Command, - } - action.Diagnostics = []protocol.Diagnostic{*pd} - actions = append(actions, action) - } - return actions, nil -} - -func goTest(ctx context.Context, snapshot source.Snapshot, pkg source.Package, pgf *source.ParsedGoFile, rng protocol.Range) ([]protocol.CodeAction, error) { - fns, err := source.TestsAndBenchmarks(pkg, pgf) - if err != nil { - return nil, err - } - - var tests, benchmarks []string - for _, fn := range fns.Tests { - if !protocol.Intersect(fn.Rng, rng) { - continue - } - tests = append(tests, fn.Name) - } - for _, fn := range fns.Benchmarks { - if !protocol.Intersect(fn.Rng, rng) { - continue - } - benchmarks = append(benchmarks, fn.Name) - } - - if len(tests) == 0 && len(benchmarks) == 0 { - return nil, nil - } - - cmd, err := command.NewTestCommand("Run tests and benchmarks", protocol.URIFromSpanURI(pgf.URI), tests, benchmarks) - if err != nil { - return nil, err - } - return []protocol.CodeAction{{ - Title: cmd.Title, - Kind: protocol.GoTest, - Command: &cmd, - }}, nil -} - -type unit = struct{} diff --git a/gopls/internal/lsp/code_lens.go b/gopls/internal/lsp/code_lens.go deleted file mode 100644 index da7598604b0..00000000000 --- a/gopls/internal/lsp/code_lens.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "fmt" - "sort" - - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/mod" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) codeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeLens", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - var lenses map[command.Command]source.LensFunc - switch snapshot.FileKind(fh) { - case source.Mod: - lenses = mod.LensFuncs() - case source.Go: - lenses = source.LensFuncs() - default: - // Unsupported file kind for a code lens. - return nil, nil - } - var result []protocol.CodeLens - for cmd, lf := range lenses { - if !snapshot.Options().Codelenses[string(cmd)] { - continue - } - added, err := lf(ctx, snapshot, fh) - // Code lens is called on every keystroke, so we should just operate in - // a best-effort mode, ignoring errors. - if err != nil { - event.Error(ctx, fmt.Sprintf("code lens %s failed", cmd), err) - continue - } - result = append(result, added...) - } - sort.Slice(result, func(i, j int) bool { - a, b := result[i], result[j] - if cmp := protocol.CompareRange(a.Range, b.Range); cmp != 0 { - return cmp < 0 - } - return a.Command.Command < b.Command.Command - }) - return result, nil -} diff --git a/gopls/internal/lsp/command.go b/gopls/internal/lsp/command.go deleted file mode 100644 index da9d44e0c21..00000000000 --- a/gopls/internal/lsp/command.go +++ /dev/null @@ -1,1260 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "os" - "path/filepath" - "runtime" - "runtime/pprof" - "sort" - "strings" - "sync" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/progress" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/gopls/internal/vulncheck" - "golang.org/x/tools/gopls/internal/vulncheck/scan" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/tokeninternal" - "golang.org/x/tools/internal/xcontext" -) - -func (s *Server) executeCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (interface{}, error) { - ctx, done := event.Start(ctx, "lsp.Server.executeCommand") - defer done() - - var found bool - for _, name := range s.Options().SupportedCommands { - if name == params.Command { - found = true - break - } - } - if !found { - return nil, fmt.Errorf("%s is not a supported command", params.Command) - } - - handler := &commandHandler{ - s: s, - params: params, - } - return command.Dispatch(ctx, params, handler) -} - -type commandHandler struct { - s *Server - params *protocol.ExecuteCommandParams -} - -func (h *commandHandler) MaybePromptForTelemetry(ctx context.Context) error { - go h.s.maybePromptForTelemetry(ctx, true) - return nil -} - -func (*commandHandler) AddTelemetryCounters(_ context.Context, args command.AddTelemetryCountersArgs) error { - if len(args.Names) != len(args.Values) { - return fmt.Errorf("Names and Values must have the same length") - } - // invalid counter update requests will be silently dropped. (no audience) - telemetry.AddForwardedCounters(args.Names, args.Values) - return nil -} - -// commandConfig configures common command set-up and execution. -type commandConfig struct { - async bool // whether to run the command asynchronously. Async commands can only return errors. - requireSave bool // whether all files must be saved for the command to work - progress string // title to use for progress reporting. If empty, no progress will be reported. - forView string // view to resolve to a snapshot; incompatible with forURI - forURI protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil. -} - -// commandDeps is evaluated from a commandConfig. Note that not all fields may -// be populated, depending on which configuration is set. See comments in-line -// for details. -type commandDeps struct { - snapshot source.Snapshot // present if cfg.forURI was set - fh source.FileHandle // present if cfg.forURI was set - work *progress.WorkDone // present cfg.progress was set -} - -type commandFunc func(context.Context, commandDeps) error - -// run performs command setup for command execution, and invokes the given run -// function. If cfg.async is set, run executes the given func in a separate -// goroutine, and returns as soon as setup is complete and the goroutine is -// scheduled. -// -// Invariant: if the resulting error is non-nil, the given run func will -// (eventually) be executed exactly once. -func (c *commandHandler) run(ctx context.Context, cfg commandConfig, run commandFunc) (err error) { - if cfg.requireSave { - var unsaved []string - for _, overlay := range c.s.session.Overlays() { - if !overlay.SameContentsOnDisk() { - unsaved = append(unsaved, overlay.URI().Filename()) - } - } - if len(unsaved) > 0 { - return fmt.Errorf("All files must be saved first (unsaved: %v).", unsaved) - } - } - var deps commandDeps - if cfg.forURI != "" && cfg.forView != "" { - return bug.Errorf("internal error: forURI=%q, forView=%q", cfg.forURI, cfg.forView) - } - if cfg.forURI != "" { - var ok bool - var release func() - deps.snapshot, deps.fh, ok, release, err = c.s.beginFileRequest(ctx, cfg.forURI, source.UnknownKind) - defer release() - if !ok { - if err != nil { - return err - } - return fmt.Errorf("invalid file URL: %v", cfg.forURI) - } - } else if cfg.forView != "" { - view, err := c.s.session.View(cfg.forView) - if err != nil { - return err - } - var release func() - deps.snapshot, release, err = view.Snapshot() - if err != nil { - return err - } - defer release() - } - ctx, cancel := context.WithCancel(xcontext.Detach(ctx)) - if cfg.progress != "" { - deps.work = c.s.progress.Start(ctx, cfg.progress, "Running...", c.params.WorkDoneToken, cancel) - } - runcmd := func() error { - defer cancel() - err := run(ctx, deps) - if deps.work != nil { - switch { - case errors.Is(err, context.Canceled): - deps.work.End(ctx, "canceled") - case err != nil: - event.Error(ctx, "command error", err) - deps.work.End(ctx, "failed") - default: - deps.work.End(ctx, "completed") - } - } - return err - } - if cfg.async { - go func() { - if err := runcmd(); err != nil { - if showMessageErr := c.s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ - Type: protocol.Error, - Message: err.Error(), - }); showMessageErr != nil { - event.Error(ctx, fmt.Sprintf("failed to show message: %q", err.Error()), showMessageErr) - } - } - }() - return nil - } - return runcmd() -} - -func (c *commandHandler) ApplyFix(ctx context.Context, args command.ApplyFixArgs) error { - return c.run(ctx, commandConfig{ - // Note: no progress here. Applying fixes should be quick. - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - edits, err := source.ApplyFix(ctx, args.Fix, deps.snapshot, deps.fh, args.Range) - if err != nil { - return err - } - changes := []protocol.DocumentChanges{} // must be a slice - for _, edit := range edits { - edit := edit - changes = append(changes, protocol.DocumentChanges{ - TextDocumentEdit: &edit, - }) - } - r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: changes, - }, - }) - if err != nil { - return err - } - if !r.Applied { - return errors.New(r.FailureReason) - } - return nil - }) -} - -func (c *commandHandler) RegenerateCgo(ctx context.Context, args command.URIArg) error { - return c.run(ctx, commandConfig{ - progress: "Regenerating Cgo", - }, func(ctx context.Context, _ commandDeps) error { - var wg sync.WaitGroup // tracks work done on behalf of this function, incl. diagnostics - wg.Add(1) - defer wg.Done() - - // Track progress on this operation for testing. - if c.s.Options().VerboseWorkDoneProgress { - work := c.s.progress.Start(ctx, DiagnosticWorkTitle(FromRegenerateCgo), "Calculating file diagnostics...", nil, nil) - go func() { - wg.Wait() - work.End(ctx, "Done.") - }() - } - - // Resetting the view causes cgo to be regenerated via `go list`. - v, err := c.s.session.ResetView(ctx, args.URI.SpanURI()) - if err != nil { - return err - } - - snapshot, release, err := v.Snapshot() - if err != nil { - return err - } - wg.Add(1) - go func() { - c.s.diagnoseSnapshot(snapshot, nil, true, 0) - release() - wg.Done() - }() - return nil - }) -} - -func (c *commandHandler) CheckUpgrades(ctx context.Context, args command.CheckUpgradesArgs) error { - return c.run(ctx, commandConfig{ - forURI: args.URI, - progress: "Checking for upgrades", - }, func(ctx context.Context, deps commandDeps) error { - upgrades, err := c.s.getUpgrades(ctx, deps.snapshot, args.URI.SpanURI(), args.Modules) - if err != nil { - return err - } - deps.snapshot.View().RegisterModuleUpgrades(args.URI.SpanURI(), upgrades) - // Re-diagnose the snapshot to publish the new module diagnostics. - c.s.diagnoseSnapshot(deps.snapshot, nil, false, 0) - return nil - }) -} - -func (c *commandHandler) AddDependency(ctx context.Context, args command.DependencyArgs) error { - return c.GoGetModule(ctx, args) -} - -func (c *commandHandler) UpgradeDependency(ctx context.Context, args command.DependencyArgs) error { - return c.GoGetModule(ctx, args) -} - -func (c *commandHandler) ResetGoModDiagnostics(ctx context.Context, args command.ResetGoModDiagnosticsArgs) error { - return c.run(ctx, commandConfig{ - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - // Clear all diagnostics coming from the upgrade check source and vulncheck. - // This will clear the diagnostics in all go.mod files, but they - // will be re-calculated when the snapshot is diagnosed again. - if args.DiagnosticSource == "" || args.DiagnosticSource == string(source.UpgradeNotification) { - deps.snapshot.View().ClearModuleUpgrades(args.URI.SpanURI()) - c.s.clearDiagnosticSource(modCheckUpgradesSource) - } - - if args.DiagnosticSource == "" || args.DiagnosticSource == string(source.Govulncheck) { - deps.snapshot.View().SetVulnerabilities(args.URI.SpanURI(), nil) - c.s.clearDiagnosticSource(modVulncheckSource) - } - - // Re-diagnose the snapshot to remove the diagnostics. - c.s.diagnoseSnapshot(deps.snapshot, nil, false, 0) - return nil - }) -} - -func (c *commandHandler) GoGetModule(ctx context.Context, args command.DependencyArgs) error { - return c.run(ctx, commandConfig{ - progress: "Running go get", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI.SpanURI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - return runGoGetModule(invoke, args.AddRequire, args.GoCmdArgs) - }) - }) -} - -// TODO(rFindley): UpdateGoSum, Tidy, and Vendor could probably all be one command. -func (c *commandHandler) UpdateGoSum(ctx context.Context, args command.URIArgs) error { - return c.run(ctx, commandConfig{ - progress: "Updating go.sum", - }, func(ctx context.Context, deps commandDeps) error { - for _, uri := range args.URIs { - snapshot, fh, ok, release, err := c.s.beginFileRequest(ctx, uri, source.UnknownKind) - defer release() - if !ok { - return err - } - if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - _, err := invoke("list", "all") - return err - }); err != nil { - return err - } - } - return nil - }) -} - -func (c *commandHandler) Tidy(ctx context.Context, args command.URIArgs) error { - return c.run(ctx, commandConfig{ - requireSave: true, - progress: "Running go mod tidy", - }, func(ctx context.Context, deps commandDeps) error { - for _, uri := range args.URIs { - snapshot, fh, ok, release, err := c.s.beginFileRequest(ctx, uri, source.UnknownKind) - defer release() - if !ok { - return err - } - if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - _, err := invoke("mod", "tidy") - return err - }); err != nil { - return err - } - } - return nil - }) -} - -func (c *commandHandler) Vendor(ctx context.Context, args command.URIArg) error { - return c.run(ctx, commandConfig{ - requireSave: true, - progress: "Running go mod vendor", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - // Use RunGoCommandPiped here so that we don't compete with any other go - // command invocations. go mod vendor deletes modules.txt before recreating - // it, and therefore can run into file locking issues on Windows if that - // file is in use by another process, such as go list. - // - // If golang/go#44119 is resolved, go mod vendor will instead modify - // modules.txt in-place. In that case we could theoretically allow this - // command to run concurrently. - err := deps.snapshot.RunGoCommandPiped(ctx, source.Normal|source.AllowNetwork, &gocommand.Invocation{ - Verb: "mod", - Args: []string{"vendor"}, - WorkingDir: filepath.Dir(args.URI.SpanURI().Filename()), - }, &bytes.Buffer{}, &bytes.Buffer{}) - return err - }) -} - -func (c *commandHandler) EditGoDirective(ctx context.Context, args command.EditGoDirectiveArgs) error { - return c.run(ctx, commandConfig{ - requireSave: true, // if go.mod isn't saved it could cause a problem - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - snapshot, fh, ok, release, err := c.s.beginFileRequest(ctx, args.URI, source.UnknownKind) - defer release() - if !ok { - return err - } - if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - _, err := invoke("mod", "edit", "-go", args.Version) - return err - }); err != nil { - return err - } - return nil - }) -} - -func (c *commandHandler) RemoveDependency(ctx context.Context, args command.RemoveDependencyArgs) error { - return c.run(ctx, commandConfig{ - progress: "Removing dependency", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - // See the documentation for OnlyDiagnostic. - // - // TODO(rfindley): In Go 1.17+, we will be able to use the go command - // without checking if the module is tidy. - if args.OnlyDiagnostic { - return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI.SpanURI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - if err := runGoGetModule(invoke, false, []string{args.ModulePath + "@none"}); err != nil { - return err - } - _, err := invoke("mod", "tidy") - return err - }) - } - pm, err := deps.snapshot.ParseMod(ctx, deps.fh) - if err != nil { - return err - } - edits, err := dropDependency(deps.snapshot, pm, args.ModulePath) - if err != nil { - return err - } - response, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: []protocol.DocumentChanges{ - { - TextDocumentEdit: &protocol.TextDocumentEdit{ - TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ - Version: deps.fh.Version(), - TextDocumentIdentifier: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(deps.fh.URI()), - }, - }, - Edits: nonNilSliceTextEdit(edits), - }, - }, - }, - }, - }) - if err != nil { - return err - } - if !response.Applied { - return fmt.Errorf("edits not applied because of %s", response.FailureReason) - } - return nil - }) -} - -// dropDependency returns the edits to remove the given require from the go.mod -// file. -func dropDependency(snapshot source.Snapshot, pm *source.ParsedModule, modulePath string) ([]protocol.TextEdit, error) { - // We need a private copy of the parsed go.mod file, since we're going to - // modify it. - copied, err := modfile.Parse("", pm.Mapper.Content, nil) - if err != nil { - return nil, err - } - if err := copied.DropRequire(modulePath); err != nil { - return nil, err - } - copied.Cleanup() - newContent, err := copied.Format() - if err != nil { - return nil, err - } - // Calculate the edits to be made due to the change. - diff := snapshot.Options().ComputeEdits(string(pm.Mapper.Content), string(newContent)) - return source.ToProtocolEdits(pm.Mapper, diff) -} - -func (c *commandHandler) Test(ctx context.Context, uri protocol.DocumentURI, tests, benchmarks []string) error { - return c.RunTests(ctx, command.RunTestsArgs{ - URI: uri, - Tests: tests, - Benchmarks: benchmarks, - }) -} - -func (c *commandHandler) RunTests(ctx context.Context, args command.RunTestsArgs) error { - return c.run(ctx, commandConfig{ - async: true, - progress: "Running go test", - requireSave: true, - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - if err := c.runTests(ctx, deps.snapshot, deps.work, args.URI, args.Tests, args.Benchmarks); err != nil { - return fmt.Errorf("running tests failed: %w", err) - } - return nil - }) -} - -func (c *commandHandler) runTests(ctx context.Context, snapshot source.Snapshot, work *progress.WorkDone, uri protocol.DocumentURI, tests, benchmarks []string) error { - // TODO: fix the error reporting when this runs async. - meta, err := source.NarrowestMetadataForFile(ctx, snapshot, uri.SpanURI()) - if err != nil { - return err - } - pkgPath := string(meta.ForTest) - - // create output - buf := &bytes.Buffer{} - ew := progress.NewEventWriter(ctx, "test") - out := io.MultiWriter(ew, progress.NewWorkDoneWriter(ctx, work), buf) - - // Run `go test -run Func` on each test. - var failedTests int - for _, funcName := range tests { - inv := &gocommand.Invocation{ - Verb: "test", - Args: []string{pkgPath, "-v", "-count=1", "-run", fmt.Sprintf("^%s$", funcName)}, - WorkingDir: filepath.Dir(uri.SpanURI().Filename()), - } - if err := snapshot.RunGoCommandPiped(ctx, source.Normal, inv, out, out); err != nil { - if errors.Is(err, context.Canceled) { - return err - } - failedTests++ - } - } - - // Run `go test -run=^$ -bench Func` on each test. - var failedBenchmarks int - for _, funcName := range benchmarks { - inv := &gocommand.Invocation{ - Verb: "test", - Args: []string{pkgPath, "-v", "-run=^$", "-bench", fmt.Sprintf("^%s$", funcName)}, - WorkingDir: filepath.Dir(uri.SpanURI().Filename()), - } - if err := snapshot.RunGoCommandPiped(ctx, source.Normal, inv, out, out); err != nil { - if errors.Is(err, context.Canceled) { - return err - } - failedBenchmarks++ - } - } - - var title string - if len(tests) > 0 && len(benchmarks) > 0 { - title = "tests and benchmarks" - } else if len(tests) > 0 { - title = "tests" - } else if len(benchmarks) > 0 { - title = "benchmarks" - } else { - return errors.New("No functions were provided") - } - message := fmt.Sprintf("all %s passed", title) - if failedTests > 0 && failedBenchmarks > 0 { - message = fmt.Sprintf("%d / %d tests failed and %d / %d benchmarks failed", failedTests, len(tests), failedBenchmarks, len(benchmarks)) - } else if failedTests > 0 { - message = fmt.Sprintf("%d / %d tests failed", failedTests, len(tests)) - } else if failedBenchmarks > 0 { - message = fmt.Sprintf("%d / %d benchmarks failed", failedBenchmarks, len(benchmarks)) - } - if failedTests > 0 || failedBenchmarks > 0 { - message += "\n" + buf.String() - } - - return c.s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ - Type: protocol.Info, - Message: message, - }) -} - -func (c *commandHandler) Generate(ctx context.Context, args command.GenerateArgs) error { - title := "Running go generate ." - if args.Recursive { - title = "Running go generate ./..." - } - return c.run(ctx, commandConfig{ - requireSave: true, - progress: title, - forURI: args.Dir, - }, func(ctx context.Context, deps commandDeps) error { - er := progress.NewEventWriter(ctx, "generate") - - pattern := "." - if args.Recursive { - pattern = "./..." - } - inv := &gocommand.Invocation{ - Verb: "generate", - Args: []string{"-x", pattern}, - WorkingDir: args.Dir.SpanURI().Filename(), - } - stderr := io.MultiWriter(er, progress.NewWorkDoneWriter(ctx, deps.work)) - if err := deps.snapshot.RunGoCommandPiped(ctx, source.Normal, inv, er, stderr); err != nil { - return err - } - return nil - }) -} - -func (c *commandHandler) GoGetPackage(ctx context.Context, args command.GoGetPackageArgs) error { - return c.run(ctx, commandConfig{ - forURI: args.URI, - progress: "Running go get", - }, func(ctx context.Context, deps commandDeps) error { - // Run on a throwaway go.mod, otherwise it'll write to the real one. - stdout, err := deps.snapshot.RunGoCommandDirect(ctx, source.WriteTemporaryModFile|source.AllowNetwork, &gocommand.Invocation{ - Verb: "list", - Args: []string{"-f", "{{.Module.Path}}@{{.Module.Version}}", args.Pkg}, - WorkingDir: filepath.Dir(args.URI.SpanURI().Filename()), - }) - if err != nil { - return err - } - ver := strings.TrimSpace(stdout.String()) - return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI.SpanURI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - if args.AddRequire { - if err := addModuleRequire(invoke, []string{ver}); err != nil { - return err - } - } - _, err := invoke(append([]string{"get", "-d"}, args.Pkg)...) - return err - }) - }) -} - -func (s *Server) runGoModUpdateCommands(ctx context.Context, snapshot source.Snapshot, uri span.URI, run func(invoke func(...string) (*bytes.Buffer, error)) error) error { - tmpModfile, newModBytes, newSumBytes, err := snapshot.RunGoCommands(ctx, true, filepath.Dir(uri.Filename()), run) - if err != nil { - return err - } - if !tmpModfile { - return nil - } - modURI := snapshot.GoModForFile(uri) - sumURI := span.URIFromPath(strings.TrimSuffix(modURI.Filename(), ".mod") + ".sum") - modEdits, err := collectFileEdits(ctx, snapshot, modURI, newModBytes) - if err != nil { - return err - } - sumEdits, err := collectFileEdits(ctx, snapshot, sumURI, newSumBytes) - if err != nil { - return err - } - return applyFileEdits(ctx, s.client, append(sumEdits, modEdits...)) -} - -// collectFileEdits collects any file edits required to transform the snapshot -// file specified by uri to the provided new content. -// -// If the file is not open, collectFileEdits simply writes the new content to -// disk. -// -// TODO(rfindley): fix this API asymmetry. It should be up to the caller to -// write the file or apply the edits. -func collectFileEdits(ctx context.Context, snapshot source.Snapshot, uri span.URI, newContent []byte) ([]protocol.TextDocumentEdit, error) { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - oldContent, err := fh.Content() - if err != nil && !os.IsNotExist(err) { - return nil, err - } - - if bytes.Equal(oldContent, newContent) { - return nil, nil - } - - // Sending a workspace edit to a closed file causes VS Code to open the - // file and leave it unsaved. We would rather apply the changes directly, - // especially to go.sum, which should be mostly invisible to the user. - if !snapshot.IsOpen(uri) { - err := os.WriteFile(uri.Filename(), newContent, 0666) - return nil, err - } - - m := protocol.NewMapper(fh.URI(), oldContent) - diff := snapshot.Options().ComputeEdits(string(oldContent), string(newContent)) - edits, err := source.ToProtocolEdits(m, diff) - if err != nil { - return nil, err - } - return []protocol.TextDocumentEdit{{ - TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ - Version: fh.Version(), - TextDocumentIdentifier: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), - }, - }, - Edits: edits, - }}, nil -} - -func applyFileEdits(ctx context.Context, cli protocol.Client, edits []protocol.TextDocumentEdit) error { - if len(edits) == 0 { - return nil - } - documentChanges := []protocol.DocumentChanges{} // must be a slice - for _, change := range edits { - change := change - documentChanges = append(documentChanges, protocol.DocumentChanges{ - TextDocumentEdit: &change, - }) - } - response, err := cli.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: documentChanges, - }, - }) - if err != nil { - return err - } - if !response.Applied { - return fmt.Errorf("edits not applied because of %s", response.FailureReason) - } - return nil -} - -func runGoGetModule(invoke func(...string) (*bytes.Buffer, error), addRequire bool, args []string) error { - if addRequire { - if err := addModuleRequire(invoke, args); err != nil { - return err - } - } - _, err := invoke(append([]string{"get", "-d"}, args...)...) - return err -} - -func addModuleRequire(invoke func(...string) (*bytes.Buffer, error), args []string) error { - // Using go get to create a new dependency results in an - // `// indirect` comment we may not want. The only way to avoid it - // is to add the require as direct first. Then we can use go get to - // update go.sum and tidy up. - _, err := invoke(append([]string{"mod", "edit", "-require"}, args...)...) - return err -} - -func (s *Server) getUpgrades(ctx context.Context, snapshot source.Snapshot, uri span.URI, modules []string) (map[string]string, error) { - stdout, err := snapshot.RunGoCommandDirect(ctx, source.Normal|source.AllowNetwork, &gocommand.Invocation{ - Verb: "list", - Args: append([]string{"-m", "-u", "-json"}, modules...), - WorkingDir: filepath.Dir(uri.Filename()), - ModFlag: "readonly", - }) - if err != nil { - return nil, err - } - - upgrades := map[string]string{} - for dec := json.NewDecoder(stdout); dec.More(); { - mod := &gocommand.ModuleJSON{} - if err := dec.Decode(mod); err != nil { - return nil, err - } - if mod.Update == nil { - continue - } - upgrades[mod.Path] = mod.Update.Version - } - return upgrades, nil -} - -func (c *commandHandler) GCDetails(ctx context.Context, uri protocol.DocumentURI) error { - return c.ToggleGCDetails(ctx, command.URIArg{URI: uri}) -} - -func (c *commandHandler) ToggleGCDetails(ctx context.Context, args command.URIArg) error { - return c.run(ctx, commandConfig{ - requireSave: true, - progress: "Toggling GC Details", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - meta, err := source.NarrowestMetadataForFile(ctx, deps.snapshot, deps.fh.URI()) - if err != nil { - return err - } - c.s.gcOptimizationDetailsMu.Lock() - if _, ok := c.s.gcOptimizationDetails[meta.ID]; ok { - delete(c.s.gcOptimizationDetails, meta.ID) - c.s.clearDiagnosticSource(gcDetailsSource) - } else { - c.s.gcOptimizationDetails[meta.ID] = struct{}{} - } - c.s.gcOptimizationDetailsMu.Unlock() - c.s.diagnoseSnapshot(deps.snapshot, nil, false, 0) - return nil - }) -} - -func (c *commandHandler) ListKnownPackages(ctx context.Context, args command.URIArg) (command.ListKnownPackagesResult, error) { - var result command.ListKnownPackagesResult - err := c.run(ctx, commandConfig{ - progress: "Listing packages", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - pkgs, err := source.KnownPackagePaths(ctx, deps.snapshot, deps.fh) - for _, pkg := range pkgs { - result.Packages = append(result.Packages, string(pkg)) - } - return err - }) - return result, err -} - -func (c *commandHandler) ListImports(ctx context.Context, args command.URIArg) (command.ListImportsResult, error) { - var result command.ListImportsResult - err := c.run(ctx, commandConfig{ - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - fh, err := deps.snapshot.ReadFile(ctx, args.URI.SpanURI()) - if err != nil { - return err - } - pgf, err := deps.snapshot.ParseGo(ctx, fh, source.ParseHeader) - if err != nil { - return err - } - fset := tokeninternal.FileSetFor(pgf.Tok) - for _, group := range astutil.Imports(fset, pgf.File) { - for _, imp := range group { - if imp.Path == nil { - continue - } - var name string - if imp.Name != nil { - name = imp.Name.Name - } - result.Imports = append(result.Imports, command.FileImport{ - Path: string(source.UnquoteImportPath(imp)), - Name: name, - }) - } - } - meta, err := source.NarrowestMetadataForFile(ctx, deps.snapshot, args.URI.SpanURI()) - if err != nil { - return err // e.g. cancelled - } - for pkgPath := range meta.DepsByPkgPath { - result.PackageImports = append(result.PackageImports, - command.PackageImport{Path: string(pkgPath)}) - } - sort.Slice(result.PackageImports, func(i, j int) bool { - return result.PackageImports[i].Path < result.PackageImports[j].Path - }) - return nil - }) - return result, err -} - -func (c *commandHandler) AddImport(ctx context.Context, args command.AddImportArgs) error { - return c.run(ctx, commandConfig{ - progress: "Adding import", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - edits, err := source.AddImport(ctx, deps.snapshot, deps.fh, args.ImportPath) - if err != nil { - return fmt.Errorf("could not add import: %v", err) - } - if _, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(deps.fh, edits), - }, - }); err != nil { - return fmt.Errorf("could not apply import edits: %v", err) - } - return nil - }) -} - -func (c *commandHandler) StartDebugging(ctx context.Context, args command.DebuggingArgs) (result command.DebuggingResult, _ error) { - addr := args.Addr - if addr == "" { - addr = "localhost:0" - } - di := debug.GetInstance(ctx) - if di == nil { - return result, errors.New("internal error: server has no debugging instance") - } - listenedAddr, err := di.Serve(ctx, addr) - if err != nil { - return result, fmt.Errorf("starting debug server: %w", err) - } - result.URLs = []string{"http://" + listenedAddr} - openClientBrowser(ctx, c.s.client, result.URLs[0]) - return result, nil -} - -func (c *commandHandler) StartProfile(ctx context.Context, args command.StartProfileArgs) (result command.StartProfileResult, _ error) { - file, err := os.CreateTemp("", "gopls-profile-*") - if err != nil { - return result, fmt.Errorf("creating temp profile file: %v", err) - } - - c.s.ongoingProfileMu.Lock() - defer c.s.ongoingProfileMu.Unlock() - - if c.s.ongoingProfile != nil { - file.Close() // ignore error - return result, fmt.Errorf("profile already started (for %q)", c.s.ongoingProfile.Name()) - } - - if err := pprof.StartCPUProfile(file); err != nil { - file.Close() // ignore error - return result, fmt.Errorf("starting profile: %v", err) - } - - c.s.ongoingProfile = file - return result, nil -} - -func (c *commandHandler) StopProfile(ctx context.Context, args command.StopProfileArgs) (result command.StopProfileResult, _ error) { - c.s.ongoingProfileMu.Lock() - defer c.s.ongoingProfileMu.Unlock() - - prof := c.s.ongoingProfile - c.s.ongoingProfile = nil - - if prof == nil { - return result, fmt.Errorf("no ongoing profile") - } - - pprof.StopCPUProfile() - if err := prof.Close(); err != nil { - return result, fmt.Errorf("closing profile file: %v", err) - } - result.File = prof.Name() - return result, nil -} - -// Copy of pkgLoadConfig defined in internal/lsp/cmd/vulncheck.go -// TODO(hyangah): decide where to define this. -type pkgLoadConfig struct { - // BuildFlags is a list of command-line flags to be passed through to - // the build system's query tool. - BuildFlags []string - - // If Tests is set, the loader includes related test packages. - Tests bool -} - -func (c *commandHandler) FetchVulncheckResult(ctx context.Context, arg command.URIArg) (map[protocol.DocumentURI]*vulncheck.Result, error) { - ret := map[protocol.DocumentURI]*vulncheck.Result{} - err := c.run(ctx, commandConfig{forURI: arg.URI}, func(ctx context.Context, deps commandDeps) error { - if deps.snapshot.Options().Vulncheck == source.ModeVulncheckImports { - for _, modfile := range deps.snapshot.ModFiles() { - res, err := deps.snapshot.ModVuln(ctx, modfile) - if err != nil { - return err - } - ret[protocol.URIFromSpanURI(modfile)] = res - } - } - // Overwrite if there is any govulncheck-based result. - for modfile, result := range deps.snapshot.View().Vulnerabilities() { - ret[protocol.URIFromSpanURI(modfile)] = result - } - return nil - }) - return ret, err -} - -func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.VulncheckArgs) (command.RunVulncheckResult, error) { - if args.URI == "" { - return command.RunVulncheckResult{}, errors.New("VulncheckArgs is missing URI field") - } - - // Return the workdone token so that clients can identify when this - // vulncheck invocation is complete. - // - // Since the run function executes asynchronously, we use a channel to - // synchronize the start of the run and return the token. - tokenChan := make(chan protocol.ProgressToken, 1) - err := c.run(ctx, commandConfig{ - async: true, // need to be async to be cancellable - progress: "govulncheck", - requireSave: true, - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - tokenChan <- deps.work.Token() - - workDoneWriter := progress.NewWorkDoneWriter(ctx, deps.work) - dir := filepath.Dir(args.URI.SpanURI().Filename()) - pattern := args.Pattern - - result, err := scan.RunGovulncheck(ctx, pattern, deps.snapshot, dir, workDoneWriter) - if err != nil { - return err - } - - deps.snapshot.View().SetVulnerabilities(args.URI.SpanURI(), result) - c.s.diagnoseSnapshot(deps.snapshot, nil, false, 0) - - affecting := make(map[string]bool, len(result.Entries)) - for _, finding := range result.Findings { - if len(finding.Trace) > 1 { // at least 2 frames if callstack exists (vulnerability, entry) - affecting[finding.OSV] = true - } - } - if len(affecting) == 0 { - return c.s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ - Type: protocol.Info, - Message: "No vulnerabilities found", - }) - } - affectingOSVs := make([]string, 0, len(affecting)) - for id := range affecting { - affectingOSVs = append(affectingOSVs, id) - } - sort.Strings(affectingOSVs) - return c.s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ - Type: protocol.Warning, - Message: fmt.Sprintf("Found %v", strings.Join(affectingOSVs, ", ")), - }) - }) - if err != nil { - return command.RunVulncheckResult{}, err - } - select { - case <-ctx.Done(): - return command.RunVulncheckResult{}, ctx.Err() - case token := <-tokenChan: - return command.RunVulncheckResult{Token: token}, nil - } -} - -// MemStats implements the MemStats command. It returns an error as a -// future-proof API, but the resulting error is currently always nil. -func (c *commandHandler) MemStats(ctx context.Context) (command.MemStatsResult, error) { - // GC a few times for stable results. - runtime.GC() - runtime.GC() - runtime.GC() - var m runtime.MemStats - runtime.ReadMemStats(&m) - return command.MemStatsResult{ - HeapAlloc: m.HeapAlloc, - HeapInUse: m.HeapInuse, - TotalAlloc: m.TotalAlloc, - }, nil -} - -// WorkspaceStats implements the WorkspaceStats command, reporting information -// about the current state of the loaded workspace for the current session. -func (c *commandHandler) WorkspaceStats(ctx context.Context) (command.WorkspaceStatsResult, error) { - var res command.WorkspaceStatsResult - res.Files.Total, res.Files.Largest, res.Files.Errs = c.s.session.Cache().FileStats() - - for _, view := range c.s.session.Views() { - vs, err := collectViewStats(ctx, view) - if err != nil { - return res, err - } - res.Views = append(res.Views, vs) - } - return res, nil -} - -func collectViewStats(ctx context.Context, view *cache.View) (command.ViewStats, error) { - s, release, err := view.Snapshot() - if err != nil { - return command.ViewStats{}, err - } - defer release() - - allMD, err := s.AllMetadata(ctx) - if err != nil { - return command.ViewStats{}, err - } - allPackages := collectPackageStats(allMD) - - wsMD, err := s.WorkspaceMetadata(ctx) - if err != nil { - return command.ViewStats{}, err - } - workspacePackages := collectPackageStats(wsMD) - - var ids []source.PackageID - for _, m := range wsMD { - ids = append(ids, m.ID) - } - - diags, err := s.PackageDiagnostics(ctx, ids...) - if err != nil { - return command.ViewStats{}, err - } - - ndiags := 0 - for _, d := range diags { - ndiags += len(d) - } - - return command.ViewStats{ - GoCommandVersion: view.GoVersionString(), - AllPackages: allPackages, - WorkspacePackages: workspacePackages, - Diagnostics: ndiags, - }, nil -} - -func collectPackageStats(md []*source.Metadata) command.PackageStats { - var stats command.PackageStats - stats.Packages = len(md) - modules := make(map[string]bool) - - for _, m := range md { - n := len(m.CompiledGoFiles) - stats.CompiledGoFiles += n - if n > stats.LargestPackage { - stats.LargestPackage = n - } - if m.Module != nil { - modules[m.Module.Path] = true - } - } - stats.Modules = len(modules) - - return stats -} - -// RunGoWorkCommand invokes `go work ` with the provided arguments. -// -// args.InitFirst controls whether to first run `go work init`. This allows a -// single command to both create and recursively populate a go.work file -- as -// of writing there is no `go work init -r`. -// -// Some thought went into implementing this command. Unlike the go.mod commands -// above, this command simply invokes the go command and relies on the client -// to notify gopls of file changes via didChangeWatchedFile notifications. -// We could instead run these commands with GOWORK set to a temp file, but that -// poses the following problems: -// - directory locations in the resulting temp go.work file will be computed -// relative to the directory containing that go.work. If the go.work is in a -// tempdir, the directories will need to be translated to/from that dir. -// - it would be simpler to use a temp go.work file in the workspace -// directory, or whichever directory contains the real go.work file, but -// that sets a bad precedent of writing to a user-owned directory. We -// shouldn't start doing that. -// - Sending workspace edits to create a go.work file would require using -// the CreateFile resource operation, which would need to be tested in every -// client as we haven't used it before. We don't have time for that right -// now. -// -// Therefore, we simply require that the current go.work file is saved (if it -// exists), and delegate to the go command. -func (c *commandHandler) RunGoWorkCommand(ctx context.Context, args command.RunGoWorkArgs) error { - return c.run(ctx, commandConfig{ - progress: "Running go work command", - forView: args.ViewID, - }, func(ctx context.Context, deps commandDeps) (runErr error) { - snapshot := deps.snapshot - view := snapshot.View().(*cache.View) - viewDir := view.Folder().Filename() - - // If the user has explicitly set GOWORK=off, we should warn them - // explicitly and avoid potentially misleading errors below. - goworkURI, off := view.GOWORK() - if off { - return fmt.Errorf("cannot modify go.work files when GOWORK=off") - } - gowork := goworkURI.Filename() - - if goworkURI != "" { - fh, err := snapshot.ReadFile(ctx, goworkURI) - if err != nil { - return fmt.Errorf("reading current go.work file: %v", err) - } - if !fh.SameContentsOnDisk() { - return fmt.Errorf("must save workspace file %s before running go work commands", goworkURI) - } - } else { - if !args.InitFirst { - // If go.work does not exist, we should have detected that and asked - // for InitFirst. - return bug.Errorf("internal error: cannot run go work command: required go.work file not found") - } - gowork = filepath.Join(viewDir, "go.work") - if err := c.invokeGoWork(ctx, viewDir, gowork, []string{"init"}); err != nil { - return fmt.Errorf("running `go work init`: %v", err) - } - } - - return c.invokeGoWork(ctx, viewDir, gowork, args.Args) - }) -} - -func (c *commandHandler) invokeGoWork(ctx context.Context, viewDir, gowork string, args []string) error { - inv := gocommand.Invocation{ - Verb: "work", - Args: args, - WorkingDir: viewDir, - Env: append(os.Environ(), fmt.Sprintf("GOWORK=%s", gowork)), - } - if _, err := c.s.session.GoCommandRunner().Run(ctx, inv); err != nil { - return fmt.Errorf("running go work command: %v", err) - } - return nil -} - -// openClientBrowser causes the LSP client to open the specified URL -// in an external browser. -func openClientBrowser(ctx context.Context, cli protocol.Client, url protocol.URI) { - showDocumentImpl(ctx, cli, url, nil) -} - -// openClientEditor causes the LSP client to open the specified document -// and select the indicated range. -func openClientEditor(ctx context.Context, cli protocol.Client, loc protocol.Location) { - showDocumentImpl(ctx, cli, protocol.URI(loc.URI), &loc.Range) -} - -func showDocumentImpl(ctx context.Context, cli protocol.Client, url protocol.URI, rangeOpt *protocol.Range) { - // In principle we shouldn't send a showDocument request to a - // client that doesn't support it, as reported by - // ShowDocumentClientCapabilities. But even clients that do - // support it may defer the real work of opening the document - // asynchronously, to avoid deadlocks due to rentrancy. - // - // For example: client sends request to server; server sends - // showDocument to client; client opens editor; editor causes - // new RPC to be sent to server, which is still busy with - // previous request. (This happens in eglot.) - // - // So we can't rely on the success/failure information. - // That's the reason this function doesn't return an error. - - // "External" means run the system-wide handler (e.g. open(1) - // on macOS or xdg-open(1) on Linux) for this URL, ignoring - // TakeFocus and Selection. Note that this may still end up - // opening the same editor (e.g. VSCode) for a file: URL. - res, err := cli.ShowDocument(ctx, &protocol.ShowDocumentParams{ - URI: url, - External: rangeOpt == nil, - TakeFocus: true, - Selection: rangeOpt, // optional - }) - if err != nil { - event.Error(ctx, "client.showDocument: %v", err) - } else if res != nil && !res.Success { - event.Log(ctx, fmt.Sprintf("client declined to open document %v", url)) - } -} - -func (c *commandHandler) ChangeSignature(ctx context.Context, args command.ChangeSignatureArgs) error { - return c.run(ctx, commandConfig{ - forURI: args.RemoveParameter.URI, - }, func(ctx context.Context, deps commandDeps) error { - // For now, gopls only supports removing unused parameters. - changes, err := source.RemoveUnusedParameter(ctx, deps.fh, args.RemoveParameter.Range, deps.snapshot) - if err != nil { - return err - } - r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: changes, - }, - }) - if !r.Applied { - return fmt.Errorf("failed to apply edits: %v", r.FailureReason) - } - - return nil - }) -} diff --git a/gopls/internal/lsp/command/generate.go b/gopls/internal/lsp/command/generate.go deleted file mode 100644 index b7907e60f5c..00000000000 --- a/gopls/internal/lsp/command/generate.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build ignore -// +build ignore - -package main - -import ( - "log" - "os" - - "golang.org/x/tools/gopls/internal/lsp/command/gen" -) - -func main() { - content, err := gen.Generate() - if err != nil { - log.Fatal(err) - } - if err := os.WriteFile("command_gen.go", content, 0644); err != nil { - log.Fatal(err) - } -} diff --git a/gopls/internal/lsp/completion.go b/gopls/internal/lsp/completion.go deleted file mode 100644 index 66b3a3945bf..00000000000 --- a/gopls/internal/lsp/completion.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "fmt" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/source/completion" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/gopls/internal/lsp/work" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) completion(ctx context.Context, params *protocol.CompletionParams) (_ *protocol.CompletionList, rerr error) { - recordLatency := telemetry.StartLatencyTimer("completion") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.completion", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - var candidates []completion.CompletionItem - var surrounding *completion.Selection - switch snapshot.FileKind(fh) { - case source.Go: - candidates, surrounding, err = completion.Completion(ctx, snapshot, fh, params.Position, params.Context) - case source.Mod: - candidates, surrounding = nil, nil - case source.Work: - cl, err := work.Completion(ctx, snapshot, fh, params.Position) - if err != nil { - break - } - return cl, nil - case source.Tmpl: - var cl *protocol.CompletionList - cl, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context) - if err != nil { - break // use common error handling, candidates==nil - } - return cl, nil - } - if err != nil { - event.Error(ctx, "no completions found", err, tag.Position.Of(params.Position)) - } - if candidates == nil { - return &protocol.CompletionList{ - IsIncomplete: true, - Items: []protocol.CompletionItem{}, - }, nil - } - - rng, err := surrounding.Range() - if err != nil { - return nil, err - } - - // When using deep completions/fuzzy matching, report results as incomplete so - // client fetches updated completions after every key stroke. - options := snapshot.Options() - incompleteResults := options.DeepCompletion || options.Matcher == source.Fuzzy - - items := toProtocolCompletionItems(candidates, rng, options) - - return &protocol.CompletionList{ - IsIncomplete: incompleteResults, - Items: items, - }, nil -} - -func toProtocolCompletionItems(candidates []completion.CompletionItem, rng protocol.Range, options *source.Options) []protocol.CompletionItem { - var ( - items = make([]protocol.CompletionItem, 0, len(candidates)) - numDeepCompletionsSeen int - ) - for i, candidate := range candidates { - // Limit the number of deep completions to not overwhelm the user in cases - // with dozens of deep completion matches. - if candidate.Depth > 0 { - if !options.DeepCompletion { - continue - } - if numDeepCompletionsSeen >= completion.MaxDeepCompletions { - continue - } - numDeepCompletionsSeen++ - } - insertText := candidate.InsertText - if options.InsertTextFormat == protocol.SnippetTextFormat { - insertText = candidate.Snippet() - } - - // This can happen if the client has snippets disabled but the - // candidate only supports snippet insertion. - if insertText == "" { - continue - } - - doc := &protocol.Or_CompletionItem_documentation{ - Value: protocol.MarkupContent{ - Kind: protocol.Markdown, - Value: source.CommentToMarkdown(candidate.Documentation, options), - }, - } - if options.PreferredContentFormat != protocol.Markdown { - doc.Value = candidate.Documentation - } - item := protocol.CompletionItem{ - Label: candidate.Label, - Detail: candidate.Detail, - Kind: candidate.Kind, - TextEdit: &protocol.TextEdit{ - NewText: insertText, - Range: rng, - }, - InsertTextFormat: &options.InsertTextFormat, - AdditionalTextEdits: candidate.AdditionalTextEdits, - // This is a hack so that the client sorts completion results in the order - // according to their score. This can be removed upon the resolution of - // https://github.com/Microsoft/language-server-protocol/issues/348. - SortText: fmt.Sprintf("%05d", i), - - // Trim operators (VSCode doesn't like weird characters in - // filterText). - FilterText: strings.TrimLeft(candidate.InsertText, "&*"), - - Preselect: i == 0, - Documentation: doc, - Tags: nonNilSliceCompletionItemTag(candidate.Tags), - Deprecated: candidate.Deprecated, - } - items = append(items, item) - } - return items -} diff --git a/gopls/internal/lsp/debug/info.go b/gopls/internal/lsp/debug/info.go deleted file mode 100644 index 579e54978b7..00000000000 --- a/gopls/internal/lsp/debug/info.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package debug exports debug information for gopls. -package debug - -import ( - "context" - "encoding/json" - "fmt" - "io" - "os" - "runtime" - "runtime/debug" - "strings" -) - -type PrintMode int - -const ( - PlainText = PrintMode(iota) - Markdown - HTML - JSON -) - -// Version is a manually-updated mechanism for tracking versions. -func Version() string { - if info, ok := debug.ReadBuildInfo(); ok { - if info.Main.Version != "" { - return info.Main.Version - } - } - return "(unknown)" -} - -// ServerVersion is the format used by gopls to report its version to the -// client. This format is structured so that the client can parse it easily. -type ServerVersion struct { - *debug.BuildInfo - Version string -} - -// VersionInfo returns the build info for the gopls process. If it was not -// built in module mode, we return a GOPATH-specific message with the -// hardcoded version. -func VersionInfo() *ServerVersion { - if info, ok := debug.ReadBuildInfo(); ok { - return &ServerVersion{ - Version: Version(), - BuildInfo: info, - } - } - return &ServerVersion{ - Version: Version(), - BuildInfo: &debug.BuildInfo{ - Path: "gopls, built in GOPATH mode", - GoVersion: runtime.Version(), - }, - } -} - -// PrintServerInfo writes HTML debug info to w for the Instance. -func (i *Instance) PrintServerInfo(ctx context.Context, w io.Writer) { - section(w, HTML, "Server Instance", func() { - fmt.Fprintf(w, "Start time: %v\n", i.StartTime) - fmt.Fprintf(w, "LogFile: %s\n", i.Logfile) - fmt.Fprintf(w, "pid: %d\n", os.Getpid()) - fmt.Fprintf(w, "Working directory: %s\n", i.Workdir) - fmt.Fprintf(w, "Address: %s\n", i.ServerAddress) - fmt.Fprintf(w, "Debug address: %s\n", i.DebugAddress()) - }) - PrintVersionInfo(ctx, w, true, HTML) - section(w, HTML, "Command Line", func() { - fmt.Fprintf(w, "cmdline") - }) -} - -// PrintVersionInfo writes version information to w, using the output format -// specified by mode. verbose controls whether additional information is -// written, including section headers. -func PrintVersionInfo(_ context.Context, w io.Writer, verbose bool, mode PrintMode) error { - info := VersionInfo() - if mode == JSON { - return printVersionInfoJSON(w, info) - } - - if !verbose { - printBuildInfo(w, info, false, mode) - return nil - } - section(w, mode, "Build info", func() { - printBuildInfo(w, info, true, mode) - }) - return nil -} - -func printVersionInfoJSON(w io.Writer, info *ServerVersion) error { - js, err := json.MarshalIndent(info, "", "\t") - if err != nil { - return err - } - _, err = fmt.Fprint(w, string(js)) - return err -} - -func section(w io.Writer, mode PrintMode, title string, body func()) { - switch mode { - case PlainText: - fmt.Fprintln(w, title) - fmt.Fprintln(w, strings.Repeat("-", len(title))) - body() - case Markdown: - fmt.Fprintf(w, "#### %s\n\n```\n", title) - body() - fmt.Fprintf(w, "```\n") - case HTML: - fmt.Fprintf(w, "

    %s

    \n
    \n", title)
    -		body()
    -		fmt.Fprint(w, "
    \n") - } -} - -func printBuildInfo(w io.Writer, info *ServerVersion, verbose bool, mode PrintMode) { - fmt.Fprintf(w, "%v %v\n", info.Path, Version()) - printModuleInfo(w, info.Main, mode) - if !verbose { - return - } - for _, dep := range info.Deps { - printModuleInfo(w, *dep, mode) - } - fmt.Fprintf(w, "go: %v\n", info.GoVersion) -} - -func printModuleInfo(w io.Writer, m debug.Module, _ PrintMode) { - fmt.Fprintf(w, " %s@%s", m.Path, m.Version) - if m.Sum != "" { - fmt.Fprintf(w, " %s", m.Sum) - } - if m.Replace != nil { - fmt.Fprintf(w, " => %v", m.Replace.Path) - } - fmt.Fprintf(w, "\n") -} - -type field struct { - index []int -} - -var fields []field - -type sessionOption struct { - Name string - Type string - Current string - Default string -} diff --git a/gopls/internal/lsp/debug/serve.go b/gopls/internal/lsp/debug/serve.go deleted file mode 100644 index 8aa2938c228..00000000000 --- a/gopls/internal/lsp/debug/serve.go +++ /dev/null @@ -1,864 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package debug - -import ( - "bytes" - "context" - "errors" - "fmt" - "html/template" - "io" - stdlog "log" - "net" - "net/http" - "net/http/pprof" - "os" - "path" - "path/filepath" - "runtime" - "strconv" - "strings" - "sync" - "time" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/debug/log" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/core" - "golang.org/x/tools/internal/event/export" - "golang.org/x/tools/internal/event/export/metric" - "golang.org/x/tools/internal/event/export/ocagent" - "golang.org/x/tools/internal/event/export/prometheus" - "golang.org/x/tools/internal/event/keys" - "golang.org/x/tools/internal/event/label" - "golang.org/x/tools/internal/event/tag" -) - -type contextKeyType int - -const ( - instanceKey contextKeyType = iota - traceKey -) - -// An Instance holds all debug information associated with a gopls instance. -type Instance struct { - Logfile string - StartTime time.Time - ServerAddress string - Workdir string - OCAgentConfig string - - LogWriter io.Writer - - exporter event.Exporter - - ocagent *ocagent.Exporter - prometheus *prometheus.Exporter - rpcs *Rpcs - traces *traces - State *State - - serveMu sync.Mutex - debugAddress string - listenedDebugAddress string -} - -// State holds debugging information related to the server state. -type State struct { - mu sync.Mutex - clients []*Client - servers []*Server -} - -func (st *State) Bugs() []bug.Bug { - return bug.List() -} - -// Caches returns the set of Cache objects currently being served. -func (st *State) Caches() []*cache.Cache { - var caches []*cache.Cache - seen := make(map[string]struct{}) - for _, client := range st.Clients() { - cache := client.Session.Cache() - if _, found := seen[cache.ID()]; found { - continue - } - seen[cache.ID()] = struct{}{} - caches = append(caches, cache) - } - return caches -} - -// Cache returns the Cache that matches the supplied id. -func (st *State) Cache(id string) *cache.Cache { - for _, c := range st.Caches() { - if c.ID() == id { - return c - } - } - return nil -} - -// Analysis returns the global Analysis template value. -func (st *State) Analysis() (_ analysisTmpl) { return } - -type analysisTmpl struct{} - -func (analysisTmpl) AnalyzerRunTimes() []cache.LabelDuration { return cache.AnalyzerRunTimes() } - -// Sessions returns the set of Session objects currently being served. -func (st *State) Sessions() []*cache.Session { - var sessions []*cache.Session - for _, client := range st.Clients() { - sessions = append(sessions, client.Session) - } - return sessions -} - -// Session returns the Session that matches the supplied id. -func (st *State) Session(id string) *cache.Session { - for _, s := range st.Sessions() { - if s.ID() == id { - return s - } - } - return nil -} - -// Views returns the set of View objects currently being served. -func (st *State) Views() []*cache.View { - var views []*cache.View - for _, s := range st.Sessions() { - views = append(views, s.Views()...) - } - return views -} - -// View returns the View that matches the supplied id. -func (st *State) View(id string) *cache.View { - for _, v := range st.Views() { - if v.ID() == id { - return v - } - } - return nil -} - -// Clients returns the set of Clients currently being served. -func (st *State) Clients() []*Client { - st.mu.Lock() - defer st.mu.Unlock() - clients := make([]*Client, len(st.clients)) - copy(clients, st.clients) - return clients -} - -// Client returns the Client matching the supplied id. -func (st *State) Client(id string) *Client { - for _, c := range st.Clients() { - if c.Session.ID() == id { - return c - } - } - return nil -} - -// Servers returns the set of Servers the instance is currently connected to. -func (st *State) Servers() []*Server { - st.mu.Lock() - defer st.mu.Unlock() - servers := make([]*Server, len(st.servers)) - copy(servers, st.servers) - return servers -} - -// A Client is an incoming connection from a remote client. -type Client struct { - Session *cache.Session - DebugAddress string - Logfile string - GoplsPath string - ServerID string - Service protocol.Server -} - -// A Server is an outgoing connection to a remote LSP server. -type Server struct { - ID string - DebugAddress string - Logfile string - GoplsPath string - ClientID string -} - -// addClient adds a client to the set being served. -func (st *State) addClient(session *cache.Session) { - st.mu.Lock() - defer st.mu.Unlock() - st.clients = append(st.clients, &Client{Session: session}) -} - -// dropClient removes a client from the set being served. -func (st *State) dropClient(session *cache.Session) { - st.mu.Lock() - defer st.mu.Unlock() - for i, c := range st.clients { - if c.Session == session { - copy(st.clients[i:], st.clients[i+1:]) - st.clients[len(st.clients)-1] = nil - st.clients = st.clients[:len(st.clients)-1] - return - } - } -} - -// updateServer updates a server to the set being queried. In practice, there should -// be at most one remote server. -func (st *State) updateServer(server *Server) { - st.mu.Lock() - defer st.mu.Unlock() - for i, existing := range st.servers { - if existing.ID == server.ID { - // Replace, rather than mutate, to avoid a race. - newServers := make([]*Server, len(st.servers)) - copy(newServers, st.servers[:i]) - newServers[i] = server - copy(newServers[i+1:], st.servers[i+1:]) - st.servers = newServers - return - } - } - st.servers = append(st.servers, server) -} - -// dropServer drops a server from the set being queried. -func (st *State) dropServer(id string) { - st.mu.Lock() - defer st.mu.Unlock() - for i, s := range st.servers { - if s.ID == id { - copy(st.servers[i:], st.servers[i+1:]) - st.servers[len(st.servers)-1] = nil - st.servers = st.servers[:len(st.servers)-1] - return - } - } -} - -// an http.ResponseWriter that filters writes -type filterResponse struct { - w http.ResponseWriter - edit func([]byte) []byte -} - -func (c filterResponse) Header() http.Header { - return c.w.Header() -} - -func (c filterResponse) Write(buf []byte) (int, error) { - ans := c.edit(buf) - return c.w.Write(ans) -} - -func (c filterResponse) WriteHeader(n int) { - c.w.WriteHeader(n) -} - -// replace annoying nuls by spaces -func cmdline(w http.ResponseWriter, r *http.Request) { - fake := filterResponse{ - w: w, - edit: func(buf []byte) []byte { - return bytes.ReplaceAll(buf, []byte{0}, []byte{' '}) - }, - } - pprof.Cmdline(fake, r) -} - -func (i *Instance) getCache(r *http.Request) interface{} { - return i.State.Cache(path.Base(r.URL.Path)) -} - -func (i *Instance) getAnalysis(r *http.Request) interface{} { - return i.State.Analysis() -} - -func (i *Instance) getSession(r *http.Request) interface{} { - return i.State.Session(path.Base(r.URL.Path)) -} - -func (i *Instance) getClient(r *http.Request) interface{} { - return i.State.Client(path.Base(r.URL.Path)) -} - -func (i *Instance) getServer(r *http.Request) interface{} { - i.State.mu.Lock() - defer i.State.mu.Unlock() - id := path.Base(r.URL.Path) - for _, s := range i.State.servers { - if s.ID == id { - return s - } - } - return nil -} - -func (i *Instance) getView(r *http.Request) interface{} { - return i.State.View(path.Base(r.URL.Path)) -} - -func (i *Instance) getFile(r *http.Request) interface{} { - identifier := path.Base(r.URL.Path) - sid := path.Base(path.Dir(r.URL.Path)) - s := i.State.Session(sid) - if s == nil { - return nil - } - for _, o := range s.Overlays() { - // TODO(adonovan): understand and document this comparison. - if o.FileIdentity().Hash.String() == identifier { - return o - } - } - return nil -} - -func (i *Instance) getInfo(r *http.Request) interface{} { - buf := &bytes.Buffer{} - i.PrintServerInfo(r.Context(), buf) - return template.HTML(buf.String()) -} - -func (i *Instance) AddService(s protocol.Server, session *cache.Session) { - for _, c := range i.State.clients { - if c.Session == session { - c.Service = s - return - } - } - stdlog.Printf("unable to find a Client to add the protocol.Server to") -} - -func getMemory(_ *http.Request) interface{} { - var m runtime.MemStats - runtime.ReadMemStats(&m) - return m -} - -func init() { - event.SetExporter(makeGlobalExporter(os.Stderr)) -} - -func GetInstance(ctx context.Context) *Instance { - if ctx == nil { - return nil - } - v := ctx.Value(instanceKey) - if v == nil { - return nil - } - return v.(*Instance) -} - -// WithInstance creates debug instance ready for use using the supplied -// configuration and stores it in the returned context. -func WithInstance(ctx context.Context, workdir, agent string) context.Context { - i := &Instance{ - StartTime: time.Now(), - Workdir: workdir, - OCAgentConfig: agent, - } - i.LogWriter = os.Stderr - ocConfig := ocagent.Discover() - //TODO: we should not need to adjust the discovered configuration - ocConfig.Address = i.OCAgentConfig - i.ocagent = ocagent.Connect(ocConfig) - i.prometheus = prometheus.New() - i.rpcs = &Rpcs{} - i.traces = &traces{} - i.State = &State{} - i.exporter = makeInstanceExporter(i) - return context.WithValue(ctx, instanceKey, i) -} - -// SetLogFile sets the logfile for use with this instance. -func (i *Instance) SetLogFile(logfile string, isDaemon bool) (func(), error) { - // TODO: probably a better solution for deferring closure to the caller would - // be for the debug instance to itself be closed, but this fixes the - // immediate bug of logs not being captured. - closeLog := func() {} - if logfile != "" { - if logfile == "auto" { - if isDaemon { - logfile = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-daemon-%d.log", os.Getpid())) - } else { - logfile = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.log", os.Getpid())) - } - } - f, err := os.Create(logfile) - if err != nil { - return nil, fmt.Errorf("unable to create log file: %w", err) - } - closeLog = func() { - defer f.Close() - } - stdlog.SetOutput(io.MultiWriter(os.Stderr, f)) - i.LogWriter = f - } - i.Logfile = logfile - return closeLog, nil -} - -// Serve starts and runs a debug server in the background on the given addr. -// It also logs the port the server starts on, to allow for :0 auto assigned -// ports. -func (i *Instance) Serve(ctx context.Context, addr string) (string, error) { - stdlog.SetFlags(stdlog.Lshortfile) - if addr == "" { - return "", nil - } - i.serveMu.Lock() - defer i.serveMu.Unlock() - - if i.listenedDebugAddress != "" { - // Already serving. Return the bound address. - return i.listenedDebugAddress, nil - } - - i.debugAddress = addr - listener, err := net.Listen("tcp", i.debugAddress) - if err != nil { - return "", err - } - i.listenedDebugAddress = listener.Addr().String() - - port := listener.Addr().(*net.TCPAddr).Port - if strings.HasSuffix(i.debugAddress, ":0") { - stdlog.Printf("debug server listening at http://localhost:%d", port) - } - event.Log(ctx, "Debug serving", tag.Port.Of(port)) - go func() { - mux := http.NewServeMux() - mux.HandleFunc("/", render(MainTmpl, func(*http.Request) interface{} { return i })) - mux.HandleFunc("/debug/", render(DebugTmpl, nil)) - mux.HandleFunc("/debug/pprof/", pprof.Index) - mux.HandleFunc("/debug/pprof/cmdline", cmdline) - mux.HandleFunc("/debug/pprof/profile", pprof.Profile) - mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) - mux.HandleFunc("/debug/pprof/trace", pprof.Trace) - if i.prometheus != nil { - mux.HandleFunc("/metrics/", i.prometheus.Serve) - } - if i.rpcs != nil { - mux.HandleFunc("/rpc/", render(RPCTmpl, i.rpcs.getData)) - } - if i.traces != nil { - mux.HandleFunc("/trace/", render(TraceTmpl, i.traces.getData)) - } - mux.HandleFunc("/analysis/", render(AnalysisTmpl, i.getAnalysis)) - mux.HandleFunc("/cache/", render(CacheTmpl, i.getCache)) - mux.HandleFunc("/session/", render(SessionTmpl, i.getSession)) - mux.HandleFunc("/view/", render(ViewTmpl, i.getView)) - mux.HandleFunc("/client/", render(ClientTmpl, i.getClient)) - mux.HandleFunc("/server/", render(ServerTmpl, i.getServer)) - mux.HandleFunc("/file/", render(FileTmpl, i.getFile)) - mux.HandleFunc("/info", render(InfoTmpl, i.getInfo)) - mux.HandleFunc("/memory", render(MemoryTmpl, getMemory)) - - // Internal debugging helpers. - mux.HandleFunc("/gc", func(w http.ResponseWriter, r *http.Request) { - runtime.GC() - runtime.GC() - runtime.GC() - http.Redirect(w, r, "/memory", http.StatusTemporaryRedirect) - }) - mux.HandleFunc("/_makeabug", func(w http.ResponseWriter, r *http.Request) { - bug.Report("bug here") - http.Error(w, "made a bug", http.StatusOK) - }) - - if err := http.Serve(listener, mux); err != nil { - event.Error(ctx, "Debug server failed", err) - return - } - event.Log(ctx, "Debug server finished") - }() - return i.listenedDebugAddress, nil -} - -func (i *Instance) DebugAddress() string { - i.serveMu.Lock() - defer i.serveMu.Unlock() - return i.debugAddress -} - -func (i *Instance) ListenedDebugAddress() string { - i.serveMu.Lock() - defer i.serveMu.Unlock() - return i.listenedDebugAddress -} - -func makeGlobalExporter(stderr io.Writer) event.Exporter { - p := export.Printer{} - var pMu sync.Mutex - return func(ctx context.Context, ev core.Event, lm label.Map) context.Context { - i := GetInstance(ctx) - - if event.IsLog(ev) { - // Don't log context cancellation errors. - if err := keys.Err.Get(ev); errors.Is(err, context.Canceled) { - return ctx - } - // Make sure any log messages without an instance go to stderr. - if i == nil { - pMu.Lock() - p.WriteEvent(stderr, ev, lm) - pMu.Unlock() - } - level := log.LabeledLevel(lm) - // Exclude trace logs from LSP logs. - if level < log.Trace { - ctx = protocol.LogEvent(ctx, ev, lm, messageType(level)) - } - } - if i == nil { - return ctx - } - return i.exporter(ctx, ev, lm) - } -} - -func messageType(l log.Level) protocol.MessageType { - switch l { - case log.Error: - return protocol.Error - case log.Warning: - return protocol.Warning - case log.Debug: - return protocol.Log - } - return protocol.Info -} - -func makeInstanceExporter(i *Instance) event.Exporter { - exporter := func(ctx context.Context, ev core.Event, lm label.Map) context.Context { - if i.ocagent != nil { - ctx = i.ocagent.ProcessEvent(ctx, ev, lm) - } - if i.prometheus != nil { - ctx = i.prometheus.ProcessEvent(ctx, ev, lm) - } - if i.rpcs != nil { - ctx = i.rpcs.ProcessEvent(ctx, ev, lm) - } - if i.traces != nil { - ctx = i.traces.ProcessEvent(ctx, ev, lm) - } - if event.IsLog(ev) { - if s := cache.KeyCreateSession.Get(ev); s != nil { - i.State.addClient(s) - } - if sid := tag.NewServer.Get(ev); sid != "" { - i.State.updateServer(&Server{ - ID: sid, - Logfile: tag.Logfile.Get(ev), - DebugAddress: tag.DebugAddress.Get(ev), - GoplsPath: tag.GoplsPath.Get(ev), - ClientID: tag.ClientID.Get(ev), - }) - } - if s := cache.KeyShutdownSession.Get(ev); s != nil { - i.State.dropClient(s) - } - if sid := tag.EndServer.Get(ev); sid != "" { - i.State.dropServer(sid) - } - if s := cache.KeyUpdateSession.Get(ev); s != nil { - if c := i.State.Client(s.ID()); c != nil { - c.DebugAddress = tag.DebugAddress.Get(ev) - c.Logfile = tag.Logfile.Get(ev) - c.ServerID = tag.ServerID.Get(ev) - c.GoplsPath = tag.GoplsPath.Get(ev) - } - } - } - return ctx - } - // StdTrace must be above export.Spans below (by convention, export - // middleware applies its wrapped exporter last). - exporter = StdTrace(exporter) - metrics := metric.Config{} - registerMetrics(&metrics) - exporter = metrics.Exporter(exporter) - exporter = export.Spans(exporter) - exporter = export.Labels(exporter) - return exporter -} - -type dataFunc func(*http.Request) interface{} - -func render(tmpl *template.Template, fun dataFunc) func(http.ResponseWriter, *http.Request) { - return func(w http.ResponseWriter, r *http.Request) { - var data interface{} - if fun != nil { - data = fun(r) - } - if err := tmpl.Execute(w, data); err != nil { - event.Error(context.Background(), "", err) - http.Error(w, err.Error(), http.StatusInternalServerError) - } - } -} - -func commas(s string) string { - for i := len(s); i > 3; { - i -= 3 - s = s[:i] + "," + s[i:] - } - return s -} - -func fuint64(v uint64) string { - return commas(strconv.FormatUint(v, 10)) -} - -func fuint32(v uint32) string { - return commas(strconv.FormatUint(uint64(v), 10)) -} - -func fcontent(v []byte) string { - return string(v) -} - -var BaseTemplate = template.Must(template.New("").Parse(` - - -{{template "title" .}} - -{{block "head" .}}{{end}} - - -Main -Info -Memory -Profiling -Metrics -RPC -Trace -Analysis -
    -

    {{template "title" .}}

    -{{block "body" .}} -Unknown page -{{end}} - - - -{{define "cachelink"}}Cache {{.}}{{end}} -{{define "clientlink"}}Client {{.}}{{end}} -{{define "serverlink"}}Server {{.}}{{end}} -{{define "sessionlink"}}Session {{.}}{{end}} -{{define "viewlink"}}View {{.}}{{end}} -`)).Funcs(template.FuncMap{ - "fuint64": fuint64, - "fuint32": fuint32, - "fcontent": fcontent, - "localAddress": func(s string) string { - // Try to translate loopback addresses to localhost, both for cosmetics and - // because unspecified ipv6 addresses can break links on Windows. - // - // TODO(rfindley): In the future, it would be better not to assume the - // server is running on localhost, and instead construct this address using - // the remote host. - host, port, err := net.SplitHostPort(s) - if err != nil { - return s - } - ip := net.ParseIP(host) - if ip == nil { - return s - } - if ip.IsLoopback() || ip.IsUnspecified() { - return "localhost:" + port - } - return s - }, - // TODO(rfindley): re-enable option inspection. - // "options": func(s *cache.Session) []sessionOption { - // return showOptions(s.Options()) - // }, -}) - -var MainTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}GoPls server information{{end}} -{{define "body"}} -

    Caches

    -
      {{range .State.Caches}}
    • {{template "cachelink" .ID}}
    • {{end}}
    -

    Sessions

    -
      {{range .State.Sessions}}
    • {{template "sessionlink" .ID}} from {{template "cachelink" .Cache.ID}}
    • {{end}}
    -

    Clients

    -
      {{range .State.Clients}}
    • {{template "clientlink" .Session.ID}}
    • {{end}}
    -

    Servers

    -
      {{range .State.Servers}}
    • {{template "serverlink" .ID}}
    • {{end}}
    -

    Bug reports

    -
    {{range .State.Bugs}}
    {{.Key}}
    {{.Description}}
    {{end}}
    -{{end}} -`)) - -var InfoTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}GoPls version information{{end}} -{{define "body"}} -{{.}} -{{end}} -`)) - -var MemoryTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}Gopls memory usage{{end}} -{{define "head"}}{{end}} -{{define "body"}} -
    -

    Stats

    - - - - - - - - - - - - - - - - -
    Allocated bytes{{fuint64 .HeapAlloc}}
    Total allocated bytes{{fuint64 .TotalAlloc}}
    System bytes{{fuint64 .Sys}}
    Heap system bytes{{fuint64 .HeapSys}}
    Malloc calls{{fuint64 .Mallocs}}
    Frees{{fuint64 .Frees}}
    Idle heap bytes{{fuint64 .HeapIdle}}
    In use bytes{{fuint64 .HeapInuse}}
    Released to system bytes{{fuint64 .HeapReleased}}
    Heap object count{{fuint64 .HeapObjects}}
    Stack in use bytes{{fuint64 .StackInuse}}
    Stack from system bytes{{fuint64 .StackSys}}
    Bucket hash bytes{{fuint64 .BuckHashSys}}
    GC metadata bytes{{fuint64 .GCSys}}
    Off heap bytes{{fuint64 .OtherSys}}
    -

    By size

    - - -{{range .BySize}}{{end}} -
    SizeMallocsFrees
    {{fuint32 .Size}}{{fuint64 .Mallocs}}{{fuint64 .Frees}}
    -{{end}} -`)) - -var DebugTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}GoPls Debug pages{{end}} -{{define "body"}} -Profiling -{{end}} -`)) - -var CacheTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}Cache {{.ID}}{{end}} -{{define "body"}} -

    memoize.Store entries

    -
      {{range $k,$v := .MemStats}}
    • {{$k}} - {{$v}}
    • {{end}}
    -{{end}} -`)) - -var AnalysisTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}Analysis{{end}} -{{define "body"}} -

    Analyzer.Run times

    -
      {{range .AnalyzerRunTimes}}
    • {{.Duration}} {{.Label}}
    • {{end}}
    -{{end}} -`)) - -var ClientTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}Client {{.Session.ID}}{{end}} -{{define "body"}} -Using session: {{template "sessionlink" .Session.ID}}
    -{{if .DebugAddress}}Debug this client at: {{localAddress .DebugAddress}}
    {{end}} -Logfile: {{.Logfile}}
    -Gopls Path: {{.GoplsPath}}
    -

    Diagnostics

    -{{/*Service: []protocol.Server; each server has map[uri]fileReports; - each fileReport: map[diagnosticSoure]diagnosticReport - diagnosticSource is one of 5 source - diagnosticReport: snapshotID and map[hash]*source.Diagnostic - sourceDiagnostic: struct { - Range protocol.Range - Message string - Source string - Code string - CodeHref string - Severity protocol.DiagnosticSeverity - Tags []protocol.DiagnosticTag - - Related []RelatedInformation - } - RelatedInformation: struct { - URI span.URI - Range protocol.Range - Message string - } - */}} -
      {{range $k, $v := .Service.Diagnostics}}
    • {{$k}}:
        {{range $v}}
      1. {{.}}
      2. {{end}}
    • {{end}}
    -{{end}} -`)) - -var ServerTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}Server {{.ID}}{{end}} -{{define "body"}} -{{if .DebugAddress}}Debug this server at: {{localAddress .DebugAddress}}
    {{end}} -Logfile: {{.Logfile}}
    -Gopls Path: {{.GoplsPath}}
    -{{end}} -`)) - -var SessionTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}Session {{.ID}}{{end}} -{{define "body"}} -From: {{template "cachelink" .Cache.ID}}
    -

    Views

    -
      {{range .Views}}
    • {{.Name}} is {{template "viewlink" .ID}} in {{.Folder}}
    • {{end}}
    -

    Overlays

    -{{$session := .}} - -{{end}} -`)) - -var ViewTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}View {{.ID}}{{end}} -{{define "body"}} -Name: {{.Name}}
    -Folder: {{.Folder}}
    -{{end}} -`)) - -var FileTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` -{{define "title"}}Overlay {{.FileIdentity.Hash}}{{end}} -{{define "body"}} -{{with .}} - URI: {{.URI}}
    - Identifier: {{.FileIdentity.Hash}}
    - Version: {{.Version}}
    - Kind: {{.Kind}}
    -{{end}} -

    Contents

    -
    {{fcontent .Content}}
    -{{end}} -`)) diff --git a/gopls/internal/lsp/definition.go b/gopls/internal/lsp/definition.go deleted file mode 100644 index 892e48d6377..00000000000 --- a/gopls/internal/lsp/definition.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) definition(ctx context.Context, params *protocol.DefinitionParams) (_ []protocol.Location, rerr error) { - recordLatency := telemetry.StartLatencyTimer("definition") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.definition", tag.URI.Of(params.TextDocument.URI)) - defer done() - - // TODO(rfindley): definition requests should be multiplexed across all views. - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - switch kind := snapshot.FileKind(fh); kind { - case source.Tmpl: - return template.Definition(snapshot, fh, params.Position) - case source.Go: - return source.Definition(ctx, snapshot, fh, params.Position) - default: - return nil, fmt.Errorf("can't find definitions for file type %s", kind) - } -} - -func (s *Server) typeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "lsp.Server.typeDefinition", tag.URI.Of(params.TextDocument.URI)) - defer done() - - // TODO(rfindley): type definition requests should be multiplexed across all views. - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - switch kind := snapshot.FileKind(fh); kind { - case source.Go: - return source.TypeDefinition(ctx, snapshot, fh, params.Position) - default: - return nil, fmt.Errorf("can't find type definitions for file type %s", kind) - } -} diff --git a/gopls/internal/lsp/diagnostics.go b/gopls/internal/lsp/diagnostics.go deleted file mode 100644 index 4ea02cff9bf..00000000000 --- a/gopls/internal/lsp/diagnostics.go +++ /dev/null @@ -1,870 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "crypto/sha256" - "errors" - "fmt" - "os" - "path/filepath" - "sort" - "strings" - "sync" - "time" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/mod" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/gopls/internal/lsp/work" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -// TODO(rfindley): simplify this very complicated logic for publishing -// diagnostics. While doing so, ensure that we can test subtle logic such as -// for multi-pass diagnostics. - -// diagnosticSource differentiates different sources of diagnostics. -// -// Diagnostics from the same source overwrite each other, whereas diagnostics -// from different sources do not. Conceptually, the server state is a mapping -// from diagnostics source to a set of diagnostics, and each storeDiagnostics -// operation updates one entry of that mapping. -type diagnosticSource int - -const ( - modParseSource diagnosticSource = iota - modTidySource - gcDetailsSource - analysisSource - typeCheckSource - orphanedSource - workSource - modCheckUpgradesSource - modVulncheckSource // source.Govulncheck + source.Vulncheck -) - -// A diagnosticReport holds results for a single diagnostic source. -type diagnosticReport struct { - snapshotID source.GlobalSnapshotID // global snapshot ID on which the report was computed - publishedHash string // last published hash for this (URI, source) - diags map[string]*source.Diagnostic -} - -// fileReports holds a collection of diagnostic reports for a single file, as -// well as the hash of the last published set of diagnostics. -type fileReports struct { - // publishedSnapshotID is the last snapshot ID for which we have "published" - // diagnostics (though the publishDiagnostics notification may not have - // actually been sent, if nothing changed). - // - // Specifically, publishedSnapshotID is updated to a later snapshot ID when - // we either: - // (1) publish diagnostics for the file for a snapshot, or - // (2) determine that published diagnostics are valid for a new snapshot. - // - // Notably publishedSnapshotID may not match the snapshot id on individual reports in - // the reports map: - // - we may have published partial diagnostics from only a subset of - // diagnostic sources for which new results have been computed, or - // - we may have started computing reports for an even new snapshot, but not - // yet published. - // - // This prevents gopls from publishing stale diagnostics. - publishedSnapshotID source.GlobalSnapshotID - - // publishedHash is a hash of the latest diagnostics published for the file. - publishedHash string - - // If set, mustPublish marks diagnostics as needing publication, independent - // of whether their publishedHash has changed. - mustPublish bool - - // The last stored diagnostics for each diagnostic source. - reports map[diagnosticSource]*diagnosticReport -} - -func (d diagnosticSource) String() string { - switch d { - case modParseSource: - return "FromModParse" - case modTidySource: - return "FromModTidy" - case gcDetailsSource: - return "FromGCDetails" - case analysisSource: - return "FromAnalysis" - case typeCheckSource: - return "FromTypeChecking" - case orphanedSource: - return "FromOrphans" - case workSource: - return "FromGoWork" - case modCheckUpgradesSource: - return "FromCheckForUpgrades" - case modVulncheckSource: - return "FromModVulncheck" - default: - return fmt.Sprintf("From?%d?", d) - } -} - -// hashDiagnostics computes a hash to identify diags. -// -// hashDiagnostics mutates its argument (via sorting). -func hashDiagnostics(diags ...*source.Diagnostic) string { - if len(diags) == 0 { - return emptyDiagnosticsHash - } - return computeDiagnosticHash(diags...) -} - -// opt: pre-computed hash for empty diagnostics -var emptyDiagnosticsHash = computeDiagnosticHash() - -// computeDiagnosticHash should only be called from hashDiagnostics. -// -// TODO(rfindley): this should use source.Hash. -func computeDiagnosticHash(diags ...*source.Diagnostic) string { - source.SortDiagnostics(diags) - h := sha256.New() - for _, d := range diags { - for _, t := range d.Tags { - fmt.Fprintf(h, "tag: %s\n", t) - } - for _, r := range d.Related { - fmt.Fprintf(h, "related: %s %s %s\n", r.Location.URI.SpanURI(), r.Message, r.Location.Range) - } - fmt.Fprintf(h, "code: %s\n", d.Code) - fmt.Fprintf(h, "codeHref: %s\n", d.CodeHref) - fmt.Fprintf(h, "message: %s\n", d.Message) - fmt.Fprintf(h, "range: %s\n", d.Range) - fmt.Fprintf(h, "severity: %s\n", d.Severity) - fmt.Fprintf(h, "source: %s\n", d.Source) - if d.BundledFixes != nil { - fmt.Fprintf(h, "fixes: %s\n", *d.BundledFixes) - } - } - return fmt.Sprintf("%x", h.Sum(nil)) -} - -func (s *Server) diagnoseSnapshots(snapshots map[source.Snapshot][]span.URI, onDisk bool, cause ModificationSource) { - var diagnosticWG sync.WaitGroup - for snapshot, uris := range snapshots { - if snapshot.Options().DiagnosticsTrigger == source.DiagnosticsOnSave && cause == FromDidChange { - continue // user requested to update the diagnostics only on save. do not diagnose yet. - } - diagnosticWG.Add(1) - go func(snapshot source.Snapshot, uris []span.URI) { - defer diagnosticWG.Done() - s.diagnoseSnapshot(snapshot, uris, onDisk, snapshot.Options().DiagnosticsDelay) - }(snapshot, uris) - } - diagnosticWG.Wait() -} - -// diagnoseSnapshot computes and publishes diagnostics for the given snapshot. -// -// If delay is non-zero, computing diagnostics does not start until after this -// delay has expired, to allow work to be cancelled by subsequent changes. -// -// If changedURIs is non-empty, it is a set of recently changed files that -// should be diagnosed immediately, and onDisk reports whether these file -// changes came from a change to on-disk files. -// -// TODO(rfindley): eliminate the onDisk parameter, which looks misplaced. If we -// don't want to diagnose changes on disk, filter out the changedURIs. -func (s *Server) diagnoseSnapshot(snapshot source.Snapshot, changedURIs []span.URI, onDisk bool, delay time.Duration) { - ctx := snapshot.BackgroundContext() - ctx, done := event.Start(ctx, "Server.diagnoseSnapshot", source.SnapshotLabels(snapshot)...) - defer done() - - if delay > 0 { - // 2-phase diagnostics. - // - // The first phase just parses and type-checks (but - // does not analyze) packages directly affected by - // file modifications. - // - // The second phase runs after the delay, and does everything. - // - // We wait a brief delay before the first phase, to allow higher priority - // work such as autocompletion to acquire the type checking mutex (though - // typically both diagnosing changed files and performing autocompletion - // will be doing the same work: recomputing active packages). - const minDelay = 20 * time.Millisecond - select { - case <-time.After(minDelay): - case <-ctx.Done(): - return - } - - if len(changedURIs) > 0 { - s.diagnoseChangedFiles(ctx, snapshot, changedURIs, onDisk) - s.publishDiagnostics(ctx, false, snapshot) - } - - if delay < minDelay { - delay = 0 - } else { - delay -= minDelay - } - - select { - case <-time.After(delay): - case <-ctx.Done(): - return - } - } - - s.diagnose(ctx, snapshot, analyzeOpenPackages) - s.publishDiagnostics(ctx, true, snapshot) -} - -func (s *Server) diagnoseChangedFiles(ctx context.Context, snapshot source.Snapshot, uris []span.URI, onDisk bool) { - ctx, done := event.Start(ctx, "Server.diagnoseChangedFiles", source.SnapshotLabels(snapshot)...) - defer done() - - toDiagnose := make(map[source.PackageID]*source.Metadata) - for _, uri := range uris { - // If the change is only on-disk and the file is not open, don't - // directly request its package. It may not be a workspace package. - if onDisk && !snapshot.IsOpen(uri) { - continue - } - // If the file is not known to the snapshot (e.g., if it was deleted), - // don't diagnose it. - if snapshot.FindFile(uri) == nil { - continue - } - - // Don't request type-checking for builtin.go: it's not a real package. - if snapshot.IsBuiltin(uri) { - continue - } - - // Don't diagnose files that are ignored by `go list` (e.g. testdata). - if snapshot.IgnoredFile(uri) { - continue - } - - // Find all packages that include this file and diagnose them in parallel. - meta, err := source.NarrowestMetadataForFile(ctx, snapshot, uri) - if err != nil { - if ctx.Err() != nil { - return - } - // TODO(findleyr): we should probably do something with the error here, - // but as of now this can fail repeatedly if load fails, so can be too - // noisy to log (and we'll handle things later in the slow pass). - continue - } - toDiagnose[meta.ID] = meta - } - s.diagnosePkgs(ctx, snapshot, toDiagnose, nil) -} - -// analysisMode parameterizes analysis behavior of a call to diagnosePkgs. -type analysisMode int - -const ( - analyzeNothing analysisMode = iota // don't run any analysis - analyzeOpenPackages // run analysis on packages with open files - analyzeEverything // run analysis on all packages -) - -// diagnose is a helper function for running diagnostics with a given context. -// Do not call it directly. forceAnalysis is only true for testing purposes. -func (s *Server) diagnose(ctx context.Context, snapshot source.Snapshot, analyze analysisMode) { - ctx, done := event.Start(ctx, "Server.diagnose", source.SnapshotLabels(snapshot)...) - defer done() - - // Wait for a free diagnostics slot. - // TODO(adonovan): opt: shouldn't it be the analysis implementation's - // job to de-dup and limit resource consumption? In any case this - // function spends most its time waiting for awaitLoaded, at - // least initially. - select { - case <-ctx.Done(): - return - case s.diagnosticsSema <- struct{}{}: - } - defer func() { - <-s.diagnosticsSema - }() - - // common code for dispatching diagnostics - store := func(dsource diagnosticSource, operation string, diagsByFile map[span.URI][]*source.Diagnostic, err error, merge bool) { - if err != nil { - event.Error(ctx, "warning: while "+operation, err, source.SnapshotLabels(snapshot)...) - } - for uri, diags := range diagsByFile { - if uri == "" { - event.Error(ctx, "missing URI while "+operation, fmt.Errorf("empty URI"), tag.Directory.Of(snapshot.View().Folder().Filename())) - continue - } - s.storeDiagnostics(snapshot, uri, dsource, diags, merge) - } - } - - // Diagnostics below are organized by increasing specificity: - // go.work > mod > mod upgrade > mod vuln > package, etc. - - // Diagnose go.work file. - workReports, workErr := work.Diagnostics(ctx, snapshot) - if ctx.Err() != nil { - return - } - store(workSource, "diagnosing go.work file", workReports, workErr, true) - - // Diagnose go.mod file. - modReports, modErr := mod.Diagnostics(ctx, snapshot) - if ctx.Err() != nil { - return - } - store(modParseSource, "diagnosing go.mod file", modReports, modErr, true) - - // Diagnose go.mod upgrades. - upgradeReports, upgradeErr := mod.UpgradeDiagnostics(ctx, snapshot) - if ctx.Err() != nil { - return - } - store(modCheckUpgradesSource, "diagnosing go.mod upgrades", upgradeReports, upgradeErr, true) - - // Diagnose vulnerabilities. - vulnReports, vulnErr := mod.VulnerabilityDiagnostics(ctx, snapshot) - if ctx.Err() != nil { - return - } - store(modVulncheckSource, "diagnosing vulnerabilities", vulnReports, vulnErr, false) - - workspace, err := snapshot.WorkspaceMetadata(ctx) - if s.shouldIgnoreError(ctx, snapshot, err) { - return - } - criticalErr := snapshot.CriticalError(ctx) - if ctx.Err() != nil { // must check ctx after GetCriticalError - return - } - - // Show the error as a progress error report so that it appears in the - // status bar. If a client doesn't support progress reports, the error - // will still be shown as a ShowMessage. If there is no error, any running - // error progress reports will be closed. - s.showCriticalErrorStatus(ctx, snapshot, criticalErr) - - // Diagnose template (.tmpl) files. - for _, f := range snapshot.Templates() { - diags := template.Diagnose(f) - s.storeDiagnostics(snapshot, f.URI(), typeCheckSource, diags, true) - } - - // If there are no workspace packages, there is nothing to diagnose and - // there are no orphaned files. - if len(workspace) == 0 { - return - } - - var wg sync.WaitGroup // for potentially slow operations below - - // Maybe run go mod tidy (if it has been invalidated). - // - // Since go mod tidy can be slow, we run it concurrently to diagnostics. - wg.Add(1) - go func() { - defer wg.Done() - modTidyReports, err := mod.TidyDiagnostics(ctx, snapshot) - store(modTidySource, "running go mod tidy", modTidyReports, err, true) - }() - - // Run type checking and go/analysis diagnosis of packages in parallel. - var ( - seen = map[span.URI]struct{}{} - toDiagnose = make(map[source.PackageID]*source.Metadata) - toAnalyze = make(map[source.PackageID]unit) - ) - for _, m := range workspace { - var hasNonIgnored, hasOpenFile bool - for _, uri := range m.CompiledGoFiles { - seen[uri] = struct{}{} - if !hasNonIgnored && !snapshot.IgnoredFile(uri) { - hasNonIgnored = true - } - if !hasOpenFile && snapshot.IsOpen(uri) { - hasOpenFile = true - } - } - if hasNonIgnored { - toDiagnose[m.ID] = m - if analyze == analyzeEverything || analyze == analyzeOpenPackages && hasOpenFile { - toAnalyze[m.ID] = unit{} - } - } - } - - wg.Add(1) - go func() { - s.diagnosePkgs(ctx, snapshot, toDiagnose, toAnalyze) - wg.Done() - }() - - wg.Wait() - - // Orphaned files. - // Confirm that every opened file belongs to a package (if any exist in - // the workspace). Otherwise, add a diagnostic to the file. - if diags, err := snapshot.OrphanedFileDiagnostics(ctx); err == nil { - for uri, diag := range diags { - s.storeDiagnostics(snapshot, uri, orphanedSource, []*source.Diagnostic{diag}, true) - } - } else { - if ctx.Err() == nil { - event.Error(ctx, "computing orphaned file diagnostics", err, source.SnapshotLabels(snapshot)...) - } - } -} - -// diagnosePkgs type checks packages in toDiagnose, and analyzes packages in -// toAnalyze, merging their diagnostics. Packages in toAnalyze must be a subset -// of the packages in toDiagnose. -// -// It also implements gc_details diagnostics. -// -// TODO(rfindley): revisit handling of analysis gc_details. It may be possible -// to merge this function with Server.diagnose, thereby avoiding the two layers -// of concurrent dispatch: as of writing we concurrently run TidyDiagnostics -// and diagnosePkgs, and diagnosePkgs concurrently runs PackageDiagnostics and -// analysis. -func (s *Server) diagnosePkgs(ctx context.Context, snapshot source.Snapshot, toDiagnose map[source.PackageID]*source.Metadata, toAnalyze map[source.PackageID]unit) { - ctx, done := event.Start(ctx, "Server.diagnosePkgs", source.SnapshotLabels(snapshot)...) - defer done() - - // Analyze and type-check concurrently, since they are independent - // operations. - var ( - wg sync.WaitGroup - pkgDiags map[span.URI][]*source.Diagnostic - analysisDiags = make(map[span.URI][]*source.Diagnostic) - ) - - // Collect package diagnostics. - wg.Add(1) - go func() { - defer wg.Done() - var ids []source.PackageID - for id := range toDiagnose { - ids = append(ids, id) - } - var err error - pkgDiags, err = snapshot.PackageDiagnostics(ctx, ids...) - if err != nil { - event.Error(ctx, "warning: diagnostics failed", err, source.SnapshotLabels(snapshot)...) - } - }() - - // Get diagnostics from analysis framework. - // This includes type-error analyzers, which suggest fixes to compiler errors. - wg.Add(1) - go func() { - defer wg.Done() - diags, err := source.Analyze(ctx, snapshot, toAnalyze, s.progress) - if err != nil { - var tagStr string // sorted comma-separated list of package IDs - { - // TODO(adonovan): replace with a generic map[S]any -> string - // function in the tag package, and use maps.Keys + slices.Sort. - keys := make([]string, 0, len(toDiagnose)) - for id := range toDiagnose { - keys = append(keys, string(id)) - } - sort.Strings(keys) - tagStr = strings.Join(keys, ",") - } - event.Error(ctx, "warning: analyzing package", err, append(source.SnapshotLabels(snapshot), tag.Package.Of(tagStr))...) - return - } - for uri, diags := range diags { - analysisDiags[uri] = append(analysisDiags[uri], diags...) - } - }() - - wg.Wait() - - // TODO(rfindley): remove the guards against snapshot.IsBuiltin, after the - // gopls@v0.12.0 release. Packages should not be producing diagnostics for - // the builtin file: I do not know why this logic existed previously. - - // Merge analysis diagnostics with package diagnostics, and store the - // resulting analysis diagnostics. - for uri, adiags := range analysisDiags { - if snapshot.IsBuiltin(uri) { - bug.Reportf("go/analysis reported diagnostics for the builtin file: %v", adiags) - continue - } - tdiags := pkgDiags[uri] - var tdiags2, adiags2 []*source.Diagnostic - source.CombineDiagnostics(tdiags, adiags, &tdiags2, &adiags2) - pkgDiags[uri] = tdiags2 - s.storeDiagnostics(snapshot, uri, analysisSource, adiags2, true) - } - - // golang/go#59587: guarantee that we store type-checking diagnostics for every compiled - // package file. - // - // Without explicitly storing empty diagnostics, the eager diagnostics - // publication for changed files will not publish anything for files with - // empty diagnostics. - storedPkgDiags := make(map[span.URI]bool) - for _, m := range toDiagnose { - for _, uri := range m.CompiledGoFiles { - s.storeDiagnostics(snapshot, uri, typeCheckSource, pkgDiags[uri], true) - storedPkgDiags[uri] = true - } - } - // Store the package diagnostics. - for uri, diags := range pkgDiags { - if storedPkgDiags[uri] { - continue - } - // builtin.go exists only for documentation purposes, and is not valid Go code. - // Don't report distracting errors - if snapshot.IsBuiltin(uri) { - bug.Reportf("type checking reported diagnostics for the builtin file: %v", diags) - continue - } - s.storeDiagnostics(snapshot, uri, typeCheckSource, diags, true) - } - - // Process requested gc_details diagnostics. - // - // TODO(rfindley): this could be improved: - // 1. This should memoize its results if the package has not changed. - // 2. This should not even run gc_details if the package contains unsaved - // files. - // 3. See note below about using FindFile. - var toGCDetail map[source.PackageID]*source.Metadata - s.gcOptimizationDetailsMu.Lock() - for id := range s.gcOptimizationDetails { - if m, ok := toDiagnose[id]; ok { - if toGCDetail == nil { - toGCDetail = make(map[source.PackageID]*source.Metadata) - } - toGCDetail[id] = m - } - } - s.gcOptimizationDetailsMu.Unlock() - - for _, m := range toGCDetail { - gcReports, err := source.GCOptimizationDetails(ctx, snapshot, m) - if err != nil { - event.Error(ctx, "warning: gc details", err, append(source.SnapshotLabels(snapshot), tag.Package.Of(string(m.ID)))...) - } - s.gcOptimizationDetailsMu.Lock() - _, enableGCDetails := s.gcOptimizationDetails[m.ID] - - // NOTE(golang/go#44826): hold the gcOptimizationDetails lock, and re-check - // whether gc optimization details are enabled, while storing gc_details - // results. This ensures that the toggling of GC details and clearing of - // diagnostics does not race with storing the results here. - if enableGCDetails { - for uri, diags := range gcReports { - // TODO(rfindley): remove the use of FindFile here, and use ReadFile - // instead. Isn't it enough to know that the package came from the - // snapshot? Any reports should apply to the snapshot. - fh := snapshot.FindFile(uri) - // Don't publish gc details for unsaved buffers, since the underlying - // logic operates on the file on disk. - if fh == nil || !fh.SameContentsOnDisk() { - continue - } - s.storeDiagnostics(snapshot, uri, gcDetailsSource, diags, true) - } - } - s.gcOptimizationDetailsMu.Unlock() - } -} - -// mustPublishDiagnostics marks the uri as needing publication, independent of -// whether the published contents have changed. -// -// This can be used for ensuring gopls publishes diagnostics after certain file -// events. -func (s *Server) mustPublishDiagnostics(uri span.URI) { - s.diagnosticsMu.Lock() - defer s.diagnosticsMu.Unlock() - - if s.diagnostics[uri] == nil { - s.diagnostics[uri] = &fileReports{ - publishedHash: hashDiagnostics(), // Hash for 0 diagnostics. - reports: map[diagnosticSource]*diagnosticReport{}, - } - } - s.diagnostics[uri].mustPublish = true -} - -// storeDiagnostics stores results from a single diagnostic source. If merge is -// true, it merges results into any existing results for this snapshot. -// -// Mutates (sorts) diags. -// -// TODO(hyangah): investigate whether we can unconditionally overwrite previous report.diags -// with the new diags and eliminate the need for the `merge` flag. -func (s *Server) storeDiagnostics(snapshot source.Snapshot, uri span.URI, dsource diagnosticSource, diags []*source.Diagnostic, merge bool) { - // Safeguard: ensure that the file actually exists in the snapshot - // (see golang.org/issues/38602). - fh := snapshot.FindFile(uri) - if fh == nil { - return - } - - s.diagnosticsMu.Lock() - defer s.diagnosticsMu.Unlock() - if s.diagnostics[uri] == nil { - s.diagnostics[uri] = &fileReports{ - publishedHash: hashDiagnostics(), // Hash for 0 diagnostics. - reports: map[diagnosticSource]*diagnosticReport{}, - } - } - report := s.diagnostics[uri].reports[dsource] - if report == nil { - report = new(diagnosticReport) - s.diagnostics[uri].reports[dsource] = report - } - // Don't set obsolete diagnostics. - if report.snapshotID > snapshot.GlobalID() { - return - } - if report.diags == nil || report.snapshotID != snapshot.GlobalID() || !merge { - report.diags = map[string]*source.Diagnostic{} - } - report.snapshotID = snapshot.GlobalID() - for _, d := range diags { - report.diags[hashDiagnostics(d)] = d - } -} - -// clearDiagnosticSource clears all diagnostics for a given source type. It is -// necessary for cases where diagnostics have been invalidated by something -// other than a snapshot change, for example when gc_details is toggled. -func (s *Server) clearDiagnosticSource(dsource diagnosticSource) { - s.diagnosticsMu.Lock() - defer s.diagnosticsMu.Unlock() - for _, reports := range s.diagnostics { - delete(reports.reports, dsource) - } -} - -const WorkspaceLoadFailure = "Error loading workspace" - -// showCriticalErrorStatus shows the error as a progress report. -// If the error is nil, it clears any existing error progress report. -func (s *Server) showCriticalErrorStatus(ctx context.Context, snapshot source.Snapshot, err *source.CriticalError) { - s.criticalErrorStatusMu.Lock() - defer s.criticalErrorStatusMu.Unlock() - - // Remove all newlines so that the error message can be formatted in a - // status bar. - var errMsg string - if err != nil { - event.Error(ctx, "errors loading workspace", err.MainError, source.SnapshotLabels(snapshot)...) - for _, d := range err.Diagnostics { - s.storeDiagnostics(snapshot, d.URI, modParseSource, []*source.Diagnostic{d}, true) - } - errMsg = strings.ReplaceAll(err.MainError.Error(), "\n", " ") - } - - if s.criticalErrorStatus == nil { - if errMsg != "" { - s.criticalErrorStatus = s.progress.Start(ctx, WorkspaceLoadFailure, errMsg, nil, nil) - } - return - } - - // If an error is already shown to the user, update it or mark it as - // resolved. - if errMsg == "" { - s.criticalErrorStatus.End(ctx, "Done.") - s.criticalErrorStatus = nil - } else { - s.criticalErrorStatus.Report(ctx, errMsg, 0) - } -} - -// publishDiagnostics collects and publishes any unpublished diagnostic reports. -func (s *Server) publishDiagnostics(ctx context.Context, final bool, snapshot source.Snapshot) { - ctx, done := event.Start(ctx, "Server.publishDiagnostics", source.SnapshotLabels(snapshot)...) - defer done() - - s.diagnosticsMu.Lock() - defer s.diagnosticsMu.Unlock() - - for uri, r := range s.diagnostics { - // Global snapshot IDs are monotonic, so we use them to enforce an ordering - // for diagnostics. - // - // If we've already delivered diagnostics for a future snapshot for this - // file, do not deliver them. See golang/go#42837 for an example of why - // this is necessary. - // - // TODO(rfindley): even using a global snapshot ID, this mechanism is - // potentially racy: elsewhere in the code (e.g. invalidateContent) we - // allow for multiple views track a given file. In this case, we should - // either only report diagnostics for snapshots from the "best" view of a - // URI, or somehow merge diagnostics from multiple views. - if r.publishedSnapshotID > snapshot.GlobalID() { - continue - } - - anyReportsChanged := false - reportHashes := map[diagnosticSource]string{} - var diags []*source.Diagnostic - for dsource, report := range r.reports { - if report.snapshotID != snapshot.GlobalID() { - continue - } - var reportDiags []*source.Diagnostic - for _, d := range report.diags { - diags = append(diags, d) - reportDiags = append(reportDiags, d) - } - - hash := hashDiagnostics(reportDiags...) - if hash != report.publishedHash { - anyReportsChanged = true - } - reportHashes[dsource] = hash - } - - if !final && !anyReportsChanged { - // Don't invalidate existing reports on the client if we haven't got any - // new information. - continue - } - - hash := hashDiagnostics(diags...) - if hash == r.publishedHash && !r.mustPublish { - // Update snapshotID to be the latest snapshot for which this diagnostic - // hash is valid. - r.publishedSnapshotID = snapshot.GlobalID() - continue - } - var version int32 - if fh := snapshot.FindFile(uri); fh != nil { // file may have been deleted - version = fh.Version() - } - if err := s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{ - Diagnostics: toProtocolDiagnostics(diags), - URI: protocol.URIFromSpanURI(uri), - Version: version, - }); err == nil { - r.publishedHash = hash - r.mustPublish = false // diagnostics have been successfully published - r.publishedSnapshotID = snapshot.GlobalID() - // When we publish diagnostics for a file, we must update the - // publishedHash for every report, not just the reports that were - // published. Eliding a report is equivalent to publishing empty - // diagnostics. - for dsource, report := range r.reports { - if hash, ok := reportHashes[dsource]; ok { - report.publishedHash = hash - } else { - // The report was not (yet) stored for this snapshot. Record that we - // published no diagnostics from this source. - report.publishedHash = hashDiagnostics() - } - } - } else { - if ctx.Err() != nil { - // Publish may have failed due to a cancelled context. - return - } - event.Error(ctx, "publishReports: failed to deliver diagnostic", err, tag.URI.Of(uri)) - } - } -} - -func toProtocolDiagnostics(diagnostics []*source.Diagnostic) []protocol.Diagnostic { - reports := []protocol.Diagnostic{} - for _, diag := range diagnostics { - pdiag := protocol.Diagnostic{ - // diag.Message might start with \n or \t - Message: strings.TrimSpace(diag.Message), - Range: diag.Range, - Severity: diag.Severity, - Source: string(diag.Source), - Tags: emptySliceDiagnosticTag(diag.Tags), - RelatedInformation: diag.Related, - Data: diag.BundledFixes, - } - if diag.Code != "" { - pdiag.Code = diag.Code - } - if diag.CodeHref != "" { - pdiag.CodeDescription = &protocol.CodeDescription{Href: diag.CodeHref} - } - reports = append(reports, pdiag) - } - return reports -} - -func (s *Server) shouldIgnoreError(ctx context.Context, snapshot source.Snapshot, err error) bool { - if err == nil { // if there is no error at all - return false - } - if errors.Is(err, context.Canceled) { - return true - } - // If the folder has no Go code in it, we shouldn't spam the user with a warning. - // TODO(rfindley): surely it is not correct to walk the folder here just to - // suppress diagnostics, every time we compute diagnostics. - var hasGo bool - _ = filepath.Walk(snapshot.View().Folder().Filename(), func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - if !strings.HasSuffix(info.Name(), ".go") { - return nil - } - hasGo = true - return errors.New("done") - }) - return !hasGo -} - -// Diagnostics formattedfor the debug server -// (all the relevant fields of Server are private) -// (The alternative is to export them) -func (s *Server) Diagnostics() map[string][]string { - ans := make(map[string][]string) - s.diagnosticsMu.Lock() - defer s.diagnosticsMu.Unlock() - for k, v := range s.diagnostics { - fn := k.Filename() - for typ, d := range v.reports { - if len(d.diags) == 0 { - continue - } - for _, dx := range d.diags { - ans[fn] = append(ans[fn], auxStr(dx, d, typ)) - } - } - } - return ans -} - -func auxStr(v *source.Diagnostic, d *diagnosticReport, typ diagnosticSource) string { - // Tags? RelatedInformation? - msg := fmt.Sprintf("(%s)%q(source:%q,code:%q,severity:%s,snapshot:%d,type:%s)", - v.Range, v.Message, v.Source, v.Code, v.Severity, d.snapshotID, typ) - for _, r := range v.Related { - msg += fmt.Sprintf(" [%s:%s,%q]", r.Location.URI.SpanURI().Filename(), r.Location.Range, r.Message) - } - return msg -} diff --git a/gopls/internal/lsp/fake/doc.go b/gopls/internal/lsp/fake/doc.go deleted file mode 100644 index 6051781de01..00000000000 --- a/gopls/internal/lsp/fake/doc.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package fake provides fake implementations of a text editor, LSP client -// plugin, and Sandbox environment for use in tests. -// -// The Editor type provides a high level API for text editor operations -// (open/modify/save/close a buffer, jump to definition, etc.), and the Client -// type exposes an LSP client for the editor that can be connected to a -// language server. By default, the Editor and Client should be compliant with -// the LSP spec: their intended use is to verify server compliance with the -// spec in a variety of environment. Possible future enhancements of these -// types may allow them to misbehave in configurable ways, but that is not -// their primary use. -// -// The Sandbox type provides a facility for executing tests with a temporary -// directory, module proxy, and GOPATH. -package fake diff --git a/gopls/internal/lsp/folding_range.go b/gopls/internal/lsp/folding_range.go deleted file mode 100644 index e3b4987d391..00000000000 --- a/gopls/internal/lsp/folding_range.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) foldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { - ctx, done := event.Start(ctx, "lsp.Server.foldingRange", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - - ranges, err := source.FoldingRange(ctx, snapshot, fh, snapshot.Options().LineFoldingOnly) - if err != nil { - return nil, err - } - return toProtocolFoldingRanges(ranges) -} - -func toProtocolFoldingRanges(ranges []*source.FoldingRangeInfo) ([]protocol.FoldingRange, error) { - result := make([]protocol.FoldingRange, 0, len(ranges)) - for _, info := range ranges { - rng := info.MappedRange.Range() - result = append(result, protocol.FoldingRange{ - StartLine: rng.Start.Line, - StartCharacter: rng.Start.Character, - EndLine: rng.End.Line, - EndCharacter: rng.End.Character, - Kind: string(info.Kind), - }) - } - return result, nil -} diff --git a/gopls/internal/lsp/format.go b/gopls/internal/lsp/format.go deleted file mode 100644 index a6197a68e59..00000000000 --- a/gopls/internal/lsp/format.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/mod" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/work" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) formatting(ctx context.Context, params *protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) { - ctx, done := event.Start(ctx, "lsp.Server.formatting", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - switch snapshot.FileKind(fh) { - case source.Mod: - return mod.Format(ctx, snapshot, fh) - case source.Go: - return source.Format(ctx, snapshot, fh) - case source.Work: - return work.Format(ctx, snapshot, fh) - } - return nil, nil -} diff --git a/gopls/internal/lsp/general.go b/gopls/internal/lsp/general.go deleted file mode 100644 index 02e9f700efd..00000000000 --- a/gopls/internal/lsp/general.go +++ /dev/null @@ -1,707 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "encoding/json" - "fmt" - "go/build" - "log" - "os" - "path" - "path/filepath" - "sort" - "strings" - "sync" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/jsonrpc2" -) - -func (s *Server) initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { - ctx, done := event.Start(ctx, "lsp.Server.initialize") - defer done() - - telemetry.RecordClientInfo(params) - - s.stateMu.Lock() - if s.state >= serverInitializing { - defer s.stateMu.Unlock() - return nil, fmt.Errorf("%w: initialize called while server in %v state", jsonrpc2.ErrInvalidRequest, s.state) - } - s.state = serverInitializing - s.stateMu.Unlock() - - // For uniqueness, use the gopls PID rather than params.ProcessID (the client - // pid). Some clients might start multiple gopls servers, though they - // probably shouldn't. - pid := os.Getpid() - s.tempDir = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.%s", pid, s.session.ID())) - err := os.Mkdir(s.tempDir, 0700) - if err != nil { - // MkdirTemp could fail due to permissions issues. This is a problem with - // the user's environment, but should not block gopls otherwise behaving. - // All usage of s.tempDir should be predicated on having a non-empty - // s.tempDir. - event.Error(ctx, "creating temp dir", err) - s.tempDir = "" - } - s.progress.SetSupportsWorkDoneProgress(params.Capabilities.Window.WorkDoneProgress) - - options := s.Options().Clone() - // TODO(rfindley): remove the error return from handleOptionResults, and - // eliminate this defer. - defer func() { s.SetOptions(options) }() - - if err := s.handleOptionResults(ctx, source.SetOptions(options, params.InitializationOptions)); err != nil { - return nil, err - } - options.ForClientCapabilities(params.ClientInfo, params.Capabilities) - - if options.ShowBugReports { - // Report the next bug that occurs on the server. - bug.Handle(func(b bug.Bug) { - msg := &protocol.ShowMessageParams{ - Type: protocol.Error, - Message: fmt.Sprintf("A bug occurred on the server: %s\nLocation:%s", b.Description, b.Key), - } - go func() { - if err := s.eventuallyShowMessage(context.Background(), msg); err != nil { - log.Printf("error showing bug: %v", err) - } - }() - }) - } - - folders := params.WorkspaceFolders - if len(folders) == 0 { - if params.RootURI != "" { - folders = []protocol.WorkspaceFolder{{ - URI: string(params.RootURI), - Name: path.Base(params.RootURI.SpanURI().Filename()), - }} - } - } - for _, folder := range folders { - uri := span.URIFromURI(folder.URI) - if !uri.IsFile() { - continue - } - s.pendingFolders = append(s.pendingFolders, folder) - } - // gopls only supports URIs with a file:// scheme, so if we have no - // workspace folders with a supported scheme, fail to initialize. - if len(folders) > 0 && len(s.pendingFolders) == 0 { - return nil, fmt.Errorf("unsupported URI schemes: %v (gopls only supports file URIs)", folders) - } - - var codeActionProvider interface{} = true - if ca := params.Capabilities.TextDocument.CodeAction; len(ca.CodeActionLiteralSupport.CodeActionKind.ValueSet) > 0 { - // If the client has specified CodeActionLiteralSupport, - // send the code actions we support. - // - // Using CodeActionOptions is only valid if codeActionLiteralSupport is set. - codeActionProvider = &protocol.CodeActionOptions{ - CodeActionKinds: s.getSupportedCodeActions(), - } - } - var renameOpts interface{} = true - if r := params.Capabilities.TextDocument.Rename; r != nil && r.PrepareSupport { - renameOpts = protocol.RenameOptions{ - PrepareProvider: r.PrepareSupport, - } - } - - versionInfo := debug.VersionInfo() - - // golang/go#45732: Warn users who've installed sergi/go-diff@v1.2.0, since - // it will corrupt the formatting of their files. - for _, dep := range versionInfo.Deps { - if dep.Path == "github.com/sergi/go-diff" && dep.Version == "v1.2.0" { - if err := s.eventuallyShowMessage(ctx, &protocol.ShowMessageParams{ - Message: `It looks like you have a bad gopls installation. -Please reinstall gopls by running 'GO111MODULE=on go install golang.org/x/tools/gopls@latest'. -See https://github.com/golang/go/issues/45732 for more information.`, - Type: protocol.Error, - }); err != nil { - return nil, err - } - } - } - - goplsVersion, err := json.Marshal(versionInfo) - if err != nil { - return nil, err - } - - return &protocol.InitializeResult{ - Capabilities: protocol.ServerCapabilities{ - CallHierarchyProvider: &protocol.Or_ServerCapabilities_callHierarchyProvider{Value: true}, - CodeActionProvider: codeActionProvider, - CodeLensProvider: &protocol.CodeLensOptions{}, // must be non-nil to enable the code lens capability - CompletionProvider: &protocol.CompletionOptions{ - TriggerCharacters: []string{"."}, - }, - DefinitionProvider: &protocol.Or_ServerCapabilities_definitionProvider{Value: true}, - TypeDefinitionProvider: &protocol.Or_ServerCapabilities_typeDefinitionProvider{Value: true}, - ImplementationProvider: &protocol.Or_ServerCapabilities_implementationProvider{Value: true}, - DocumentFormattingProvider: &protocol.Or_ServerCapabilities_documentFormattingProvider{Value: true}, - DocumentSymbolProvider: &protocol.Or_ServerCapabilities_documentSymbolProvider{Value: true}, - WorkspaceSymbolProvider: &protocol.Or_ServerCapabilities_workspaceSymbolProvider{Value: true}, - ExecuteCommandProvider: &protocol.ExecuteCommandOptions{ - Commands: nonNilSliceString(options.SupportedCommands), - }, - FoldingRangeProvider: &protocol.Or_ServerCapabilities_foldingRangeProvider{Value: true}, - HoverProvider: &protocol.Or_ServerCapabilities_hoverProvider{Value: true}, - DocumentHighlightProvider: &protocol.Or_ServerCapabilities_documentHighlightProvider{Value: true}, - DocumentLinkProvider: &protocol.DocumentLinkOptions{}, - InlayHintProvider: protocol.InlayHintOptions{}, - ReferencesProvider: &protocol.Or_ServerCapabilities_referencesProvider{Value: true}, - RenameProvider: renameOpts, - SelectionRangeProvider: &protocol.Or_ServerCapabilities_selectionRangeProvider{Value: true}, - SemanticTokensProvider: protocol.SemanticTokensOptions{ - Range: &protocol.Or_SemanticTokensOptions_range{Value: true}, - Full: &protocol.Or_SemanticTokensOptions_full{Value: true}, - Legend: protocol.SemanticTokensLegend{ - TokenTypes: nonNilSliceString(options.SemanticTypes), - TokenModifiers: nonNilSliceString(options.SemanticMods), - }, - }, - SignatureHelpProvider: &protocol.SignatureHelpOptions{ - TriggerCharacters: []string{"(", ","}, - }, - TextDocumentSync: &protocol.TextDocumentSyncOptions{ - Change: protocol.Incremental, - OpenClose: true, - Save: &protocol.SaveOptions{ - IncludeText: false, - }, - }, - Workspace: &protocol.Workspace6Gn{ - WorkspaceFolders: &protocol.WorkspaceFolders5Gn{ - Supported: true, - ChangeNotifications: "workspace/didChangeWorkspaceFolders", - }, - }, - }, - ServerInfo: &protocol.PServerInfoMsg_initialize{ - Name: "gopls", - Version: string(goplsVersion), - }, - }, nil -} - -func (s *Server) initialized(ctx context.Context, params *protocol.InitializedParams) error { - ctx, done := event.Start(ctx, "lsp.Server.initialized") - defer done() - - s.stateMu.Lock() - if s.state >= serverInitialized { - defer s.stateMu.Unlock() - return fmt.Errorf("%w: initialized called while server in %v state", jsonrpc2.ErrInvalidRequest, s.state) - } - s.state = serverInitialized - s.stateMu.Unlock() - - for _, not := range s.notifications { - s.client.ShowMessage(ctx, not) - } - s.notifications = nil - - if err := s.addFolders(ctx, s.pendingFolders); err != nil { - return err - } - s.pendingFolders = nil - s.checkViewGoVersions() - - var registrations []protocol.Registration - options := s.Options() - if options.ConfigurationSupported && options.DynamicConfigurationSupported { - registrations = append(registrations, protocol.Registration{ - ID: "workspace/didChangeConfiguration", - Method: "workspace/didChangeConfiguration", - }) - } - if len(registrations) > 0 { - if err := s.client.RegisterCapability(ctx, &protocol.RegistrationParams{ - Registrations: registrations, - }); err != nil { - return err - } - } - - // Ask (maybe) about enabling telemetry. Do this asynchronously, as it's OK - // for users to ignore or dismiss the question. - go s.maybePromptForTelemetry(ctx, options.TelemetryPrompt) - - return nil -} - -// GoVersionTable maps Go versions to the gopls version in which support will -// be deprecated, and the final gopls version supporting them without warnings. -// Keep this in sync with gopls/README.md. -// -// Must be sorted in ascending order of Go version. -// -// Mutable for testing. -var GoVersionTable = []GoVersionSupport{ - {12, "", "v0.7.5"}, - {15, "", "v0.9.5"}, - {16, "v0.13.0", "v0.11.0"}, - {17, "v0.13.0", "v0.11.0"}, -} - -// GoVersionSupport holds information about end-of-life Go version support. -type GoVersionSupport struct { - GoVersion int - DeprecatedVersion string // if unset, the version is already deprecated - InstallGoplsVersion string -} - -// OldestSupportedGoVersion is the last X in Go 1.X that this version of gopls -// supports. -func OldestSupportedGoVersion() int { - return GoVersionTable[len(GoVersionTable)-1].GoVersion + 1 -} - -// versionMessage returns the warning/error message to display if the user has -// the given Go version, if any. The goVersion variable is the X in Go 1.X. If -// fromBuild is set, the Go version is the version used to build gopls. -// Otherwise, it is the go command version. -// -// If goVersion is invalid (< 0), it returns "", 0. -func versionMessage(goVersion int, fromBuild bool) (string, protocol.MessageType) { - if goVersion < 0 { - return "", 0 - } - - for _, v := range GoVersionTable { - if goVersion <= v.GoVersion { - var msgBuilder strings.Builder - - mType := protocol.Error - if fromBuild { - fmt.Fprintf(&msgBuilder, "Gopls was built with Go version 1.%d", goVersion) - } else { - fmt.Fprintf(&msgBuilder, "Found Go version 1.%d", goVersion) - } - if v.DeprecatedVersion != "" { - // not deprecated yet, just a warning - fmt.Fprintf(&msgBuilder, ", which will be unsupported by gopls %s. ", v.DeprecatedVersion) - mType = protocol.Warning - } else { - fmt.Fprint(&msgBuilder, ", which is not supported by this version of gopls. ") - } - fmt.Fprintf(&msgBuilder, "Please upgrade to Go 1.%d or later and reinstall gopls. ", OldestSupportedGoVersion()) - fmt.Fprintf(&msgBuilder, "If you can't upgrade and want this message to go away, please install gopls %s. ", v.InstallGoplsVersion) - fmt.Fprint(&msgBuilder, "See https://go.dev/s/gopls-support-policy for more details.") - - return msgBuilder.String(), mType - } - } - return "", 0 -} - -// checkViewGoVersions checks whether any Go version used by a view is too old, -// raising a showMessage notification if so. -// -// It should be called after views change. -func (s *Server) checkViewGoVersions() { - oldestVersion, fromBuild := go1Point(), true - for _, view := range s.session.Views() { - viewVersion := view.GoVersion() - if oldestVersion == -1 || viewVersion < oldestVersion { - oldestVersion, fromBuild = viewVersion, false - } - telemetry.RecordViewGoVersion(viewVersion) - } - - if msg, mType := versionMessage(oldestVersion, fromBuild); msg != "" { - s.eventuallyShowMessage(context.Background(), &protocol.ShowMessageParams{ - Type: mType, - Message: msg, - }) - } -} - -// go1Point returns the x in Go 1.x. If an error occurs extracting the go -// version, it returns -1. -// -// Copied from the testenv package. -func go1Point() int { - for i := len(build.Default.ReleaseTags) - 1; i >= 0; i-- { - var version int - if _, err := fmt.Sscanf(build.Default.ReleaseTags[i], "go1.%d", &version); err != nil { - continue - } - return version - } - return -1 -} - -func (s *Server) addFolders(ctx context.Context, folders []protocol.WorkspaceFolder) error { - originalViews := len(s.session.Views()) - viewErrors := make(map[span.URI]error) - - var ndiagnose sync.WaitGroup // number of unfinished diagnose calls - if s.Options().VerboseWorkDoneProgress { - work := s.progress.Start(ctx, DiagnosticWorkTitle(FromInitialWorkspaceLoad), "Calculating diagnostics for initial workspace load...", nil, nil) - defer func() { - go func() { - ndiagnose.Wait() - work.End(ctx, "Done.") - }() - }() - } - // Only one view gets to have a workspace. - var nsnapshots sync.WaitGroup // number of unfinished snapshot initializations - for _, folder := range folders { - uri := span.URIFromURI(folder.URI) - // Ignore non-file URIs. - if !uri.IsFile() { - continue - } - work := s.progress.Start(ctx, "Setting up workspace", "Loading packages...", nil, nil) - snapshot, release, err := s.addView(ctx, folder.Name, uri) - if err != nil { - if err == source.ErrViewExists { - continue - } - viewErrors[uri] = err - work.End(ctx, fmt.Sprintf("Error loading packages: %s", err)) - continue - } - // Inv: release() must be called once. - - // Initialize snapshot asynchronously. - initialized := make(chan struct{}) - nsnapshots.Add(1) - go func() { - snapshot.AwaitInitialized(ctx) - work.End(ctx, "Finished loading packages.") - nsnapshots.Done() - close(initialized) // signal - }() - - // Diagnose the newly created view asynchronously. - ndiagnose.Add(1) - go func() { - s.diagnoseSnapshot(snapshot, nil, false, 0) - <-initialized - release() - ndiagnose.Done() - }() - } - - // Wait for snapshots to be initialized so that all files are known. - // (We don't need to wait for diagnosis to finish.) - nsnapshots.Wait() - - // Register for file watching notifications, if they are supported. - if err := s.updateWatchedDirectories(ctx); err != nil { - event.Error(ctx, "failed to register for file watching notifications", err) - } - - if len(viewErrors) > 0 { - errMsg := fmt.Sprintf("Error loading workspace folders (expected %v, got %v)\n", len(folders), len(s.session.Views())-originalViews) - for uri, err := range viewErrors { - errMsg += fmt.Sprintf("failed to load view for %s: %v\n", uri, err) - } - return s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ - Type: protocol.Error, - Message: errMsg, - }) - } - return nil -} - -// updateWatchedDirectories compares the current set of directories to watch -// with the previously registered set of directories. If the set of directories -// has changed, we unregister and re-register for file watching notifications. -// updatedSnapshots is the set of snapshots that have been updated. -func (s *Server) updateWatchedDirectories(ctx context.Context) error { - patterns := s.session.FileWatchingGlobPatterns(ctx) - - s.watchedGlobPatternsMu.Lock() - defer s.watchedGlobPatternsMu.Unlock() - - // Nothing to do if the set of workspace directories is unchanged. - if equalURISet(s.watchedGlobPatterns, patterns) { - return nil - } - - // If the set of directories to watch has changed, register the updates and - // unregister the previously watched directories. This ordering avoids a - // period where no files are being watched. Still, if a user makes on-disk - // changes before these updates are complete, we may miss them for the new - // directories. - prevID := s.watchRegistrationCount - 1 - if err := s.registerWatchedDirectoriesLocked(ctx, patterns); err != nil { - return err - } - if prevID >= 0 { - return s.client.UnregisterCapability(ctx, &protocol.UnregistrationParams{ - Unregisterations: []protocol.Unregistration{{ - ID: watchedFilesCapabilityID(prevID), - Method: "workspace/didChangeWatchedFiles", - }}, - }) - } - return nil -} - -func watchedFilesCapabilityID(id int) string { - return fmt.Sprintf("workspace/didChangeWatchedFiles-%d", id) -} - -func equalURISet(m1, m2 map[string]struct{}) bool { - if len(m1) != len(m2) { - return false - } - for k := range m1 { - _, ok := m2[k] - if !ok { - return false - } - } - return true -} - -// registerWatchedDirectoriesLocked sends the workspace/didChangeWatchedFiles -// registrations to the client and updates s.watchedDirectories. -// The caller must not subsequently mutate patterns. -func (s *Server) registerWatchedDirectoriesLocked(ctx context.Context, patterns map[string]struct{}) error { - if !s.Options().DynamicWatchedFilesSupported { - return nil - } - s.watchedGlobPatterns = patterns - watchers := make([]protocol.FileSystemWatcher, 0, len(patterns)) // must be a slice - val := protocol.WatchChange | protocol.WatchDelete | protocol.WatchCreate - for pattern := range patterns { - watchers = append(watchers, protocol.FileSystemWatcher{ - GlobPattern: pattern, - Kind: &val, - }) - } - - if err := s.client.RegisterCapability(ctx, &protocol.RegistrationParams{ - Registrations: []protocol.Registration{{ - ID: watchedFilesCapabilityID(s.watchRegistrationCount), - Method: "workspace/didChangeWatchedFiles", - RegisterOptions: protocol.DidChangeWatchedFilesRegistrationOptions{ - Watchers: watchers, - }, - }}, - }); err != nil { - return err - } - s.watchRegistrationCount++ - return nil -} - -// Options returns the current server options. -// -// The caller must not modify the result. -func (s *Server) Options() *source.Options { - s.optionsMu.Lock() - defer s.optionsMu.Unlock() - return s.options -} - -// SetOptions sets the current server options. -// -// The caller must not subsequently modify the options. -func (s *Server) SetOptions(opts *source.Options) { - s.optionsMu.Lock() - defer s.optionsMu.Unlock() - s.options = opts -} - -func (s *Server) fetchFolderOptions(ctx context.Context, folder span.URI) (*source.Options, error) { - if opts := s.Options(); !opts.ConfigurationSupported { - return opts, nil - } - configs, err := s.client.Configuration(ctx, &protocol.ParamConfiguration{ - Items: []protocol.ConfigurationItem{{ - ScopeURI: string(folder), - Section: "gopls", - }}, - }, - ) - if err != nil { - return nil, fmt.Errorf("failed to get workspace configuration from client (%s): %v", folder, err) - } - - folderOpts := s.Options().Clone() - for _, config := range configs { - if err := s.handleOptionResults(ctx, source.SetOptions(folderOpts, config)); err != nil { - return nil, err - } - } - return folderOpts, nil -} - -func (s *Server) eventuallyShowMessage(ctx context.Context, msg *protocol.ShowMessageParams) error { - s.stateMu.Lock() - defer s.stateMu.Unlock() - if s.state == serverInitialized { - return s.client.ShowMessage(ctx, msg) - } - s.notifications = append(s.notifications, msg) - return nil -} - -func (s *Server) handleOptionResults(ctx context.Context, results source.OptionResults) error { - var warnings, errors []string - for _, result := range results { - switch result.Error.(type) { - case nil: - // nothing to do - case *source.SoftError: - warnings = append(warnings, result.Error.Error()) - default: - errors = append(errors, result.Error.Error()) - } - } - - // Sort messages, but put errors first. - // - // Having stable content for the message allows clients to de-duplicate. This - // matters because we may send duplicate warnings for clients that support - // dynamic configuration: one for the initial settings, and then more for the - // individual view settings. - var msgs []string - msgType := protocol.Warning - if len(errors) > 0 { - msgType = protocol.Error - sort.Strings(errors) - msgs = append(msgs, errors...) - } - if len(warnings) > 0 { - sort.Strings(warnings) - msgs = append(msgs, warnings...) - } - - if len(msgs) > 0 { - // Settings - combined := "Invalid settings: " + strings.Join(msgs, "; ") - params := &protocol.ShowMessageParams{ - Type: msgType, - Message: combined, - } - return s.eventuallyShowMessage(ctx, params) - } - - return nil -} - -// beginFileRequest checks preconditions for a file-oriented request and routes -// it to a snapshot. -// We don't want to return errors for benign conditions like wrong file type, -// so callers should do if !ok { return err } rather than if err != nil. -// The returned cleanup function is non-nil even in case of false/error result. -func (s *Server) beginFileRequest(ctx context.Context, pURI protocol.DocumentURI, expectKind source.FileKind) (source.Snapshot, source.FileHandle, bool, func(), error) { - uri := pURI.SpanURI() - if !uri.IsFile() { - // Not a file URI. Stop processing the request, but don't return an error. - return nil, nil, false, func() {}, nil - } - view, err := s.session.ViewOf(uri) - if err != nil { - return nil, nil, false, func() {}, err - } - snapshot, release, err := view.Snapshot() - if err != nil { - return nil, nil, false, func() {}, err - } - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - release() - return nil, nil, false, func() {}, err - } - if expectKind != source.UnknownKind && snapshot.FileKind(fh) != expectKind { - // Wrong kind of file. Nothing to do. - release() - return nil, nil, false, func() {}, nil - } - return snapshot, fh, true, release, nil -} - -// shutdown implements the 'shutdown' LSP handler. It releases resources -// associated with the server and waits for all ongoing work to complete. -func (s *Server) shutdown(ctx context.Context) error { - ctx, done := event.Start(ctx, "lsp.Server.shutdown") - defer done() - - s.stateMu.Lock() - defer s.stateMu.Unlock() - if s.state < serverInitialized { - event.Log(ctx, "server shutdown without initialization") - } - if s.state != serverShutDown { - // drop all the active views - s.session.Shutdown(ctx) - s.state = serverShutDown - if s.tempDir != "" { - if err := os.RemoveAll(s.tempDir); err != nil { - event.Error(ctx, "removing temp dir", err) - } - } - } - return nil -} - -func (s *Server) exit(ctx context.Context) error { - ctx, done := event.Start(ctx, "lsp.Server.exit") - defer done() - - s.stateMu.Lock() - defer s.stateMu.Unlock() - - s.client.Close() - - if s.state != serverShutDown { - // TODO: We should be able to do better than this. - os.Exit(1) - } - // We don't terminate the process on a normal exit, we just allow it to - // close naturally if needed after the connection is closed. - return nil -} - -// TODO: when we can assume go1.18, replace with generic -// (after retiring support for go1.17) -func nonNilSliceString(x []string) []string { - if x == nil { - return []string{} - } - return x -} -func nonNilSliceTextEdit(x []protocol.TextEdit) []protocol.TextEdit { - if x == nil { - return []protocol.TextEdit{} - } - - return x -} -func nonNilSliceCompletionItemTag(x []protocol.CompletionItemTag) []protocol.CompletionItemTag { - if x == nil { - return []protocol.CompletionItemTag{} - } - return x -} -func emptySliceDiagnosticTag(x []protocol.DiagnosticTag) []protocol.DiagnosticTag { - if x == nil { - return []protocol.DiagnosticTag{} - } - return x -} diff --git a/gopls/internal/lsp/general_test.go b/gopls/internal/lsp/general_test.go deleted file mode 100644 index 6bc0dc1cb2b..00000000000 --- a/gopls/internal/lsp/general_test.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -func TestVersionMessage(t *testing.T) { - tests := []struct { - goVersion int - fromBuild bool - wantContains []string // string fragments that we expect to see - wantType protocol.MessageType - }{ - {-1, false, nil, 0}, - {12, false, []string{"1.12", "not supported", "upgrade to Go 1.18", "install gopls v0.7.5"}, protocol.Error}, - {13, false, []string{"1.13", "not supported", "upgrade to Go 1.18", "install gopls v0.9.5"}, protocol.Error}, - {15, false, []string{"1.15", "not supported", "upgrade to Go 1.18", "install gopls v0.9.5"}, protocol.Error}, - {15, true, []string{"Gopls was built with Go version 1.15", "not supported", "upgrade to Go 1.18", "install gopls v0.9.5"}, protocol.Error}, - {16, false, []string{"1.16", "will be unsupported by gopls v0.13.0", "upgrade to Go 1.18", "install gopls v0.11.0"}, protocol.Warning}, - {17, false, []string{"1.17", "will be unsupported by gopls v0.13.0", "upgrade to Go 1.18", "install gopls v0.11.0"}, protocol.Warning}, - {17, true, []string{"Gopls was built with Go version 1.17", "will be unsupported by gopls v0.13.0", "upgrade to Go 1.18", "install gopls v0.11.0"}, protocol.Warning}, - } - - for _, test := range tests { - gotMsg, gotType := versionMessage(test.goVersion, test.fromBuild) - - if len(test.wantContains) == 0 && gotMsg != "" { - t.Errorf("versionMessage(%d) = %q, want \"\"", test.goVersion, gotMsg) - } - - for _, want := range test.wantContains { - if !strings.Contains(gotMsg, want) { - t.Errorf("versionMessage(%d) = %q, want containing %q", test.goVersion, gotMsg, want) - } - } - - if gotType != test.wantType { - t.Errorf("versionMessage(%d) = returned message type %d, want %d", test.goVersion, gotType, test.wantType) - } - } -} diff --git a/gopls/internal/lsp/helper/README.md b/gopls/internal/lsp/helper/README.md deleted file mode 100644 index 5124279929f..00000000000 --- a/gopls/internal/lsp/helper/README.md +++ /dev/null @@ -1,35 +0,0 @@ -# Generate server_gen.go - -`helper` generates the file `../server_gen.go` (in package -`internal/lsp`) which contains stub declarations of server methods. - -To invoke it, run `go generate` in the `gopls/internal/lsp` directory. - -It is derived from `gopls/internal/lsp/protocol/tsserver.go`, which -itself is generated from the protocol downloaded from VSCode, so be -sure to run `go generate` in the protocol first. Or run `go generate -./...` twice in the gopls directory. - -It decides what stubs are needed and their signatures -by looking at the `Server` interface (`-t` flag). These all look somewhat like -`Resolve(context.Context, *CompletionItem) (*CompletionItem, error)`. - -It then parses the `lsp` directory (`-u` flag) to see if there is a corresponding -implementation function (which in this case would be named `resolve`). If so -it discovers the parameter names needed, and generates (in `server_gen.go`) code -like - -``` go -func (s *Server) resolve(ctx context.Context, params *protocol.CompletionItem) (*protocol.CompletionItem, error) { - return s.resolve(ctx, params) -} -``` - -If `resolve` is not defined (and it is not), then the body of the generated function is - -```go - return nil, notImplemented("resolve") -``` - -So to add a capability currently not implemented, just define it somewhere in `lsp`. -In this case, just define `func (s *Server) resolve(...)` and re-generate `server_gen.go`. diff --git a/gopls/internal/lsp/helper/helper.go b/gopls/internal/lsp/helper/helper.go deleted file mode 100644 index 187908f3d4d..00000000000 --- a/gopls/internal/lsp/helper/helper.go +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// The helper command generates the declaration of the concrete -// 'server' type that implements the abstract Server interface defined -// in protocol/tsserver.go (which is itself generated from the LSP -// protocol). -// -// To run, invoke "go generate" in the parent (lsp) directory. -// -// TODO(adonovan): merge this into the main LSP generator. -package main - -import ( - "bytes" - "flag" - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "log" - "os" - "sort" - "strings" - "text/template" -) - -var ( - typ = flag.String("t", "Server", "generate code for this type") - def = flag.String("d", "", "the file the type is defined in") // this relies on punning - use = flag.String("u", "", "look for uses in this package") - out = flag.String("o", "", "where to write the generated file") -) - -func main() { - log.SetFlags(log.Lshortfile) - flag.Parse() - if *typ == "" || *def == "" || *use == "" || *out == "" { - flag.PrintDefaults() - os.Exit(1) - } - // read the type definition and see what methods we're looking for - doTypes() - - // parse the package and see which methods are defined - doUses() - - output() -} - -// replace "\\\n" with nothing before using -var tmpl = `// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -// Code generated by gopls/internal/lsp/helper. DO NOT EDIT. - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -{{range $key, $v := .Stuff}} -func (s *{{$.Type}}) {{$v.Name}}({{.Param}}) {{.Result}} { - {{if ne .Found ""}} return s.{{.Internal}}({{.Invoke}})\ - {{else}}return {{if lt 1 (len .Results)}}nil, {{end}}notImplemented("{{.Name}}"){{end}} -} -{{end}} -` - -func output() { - // put in empty param names as needed - for _, t := range types { - if t.paramnames == nil { - t.paramnames = make([]string, len(t.paramtypes)) - } - for i, p := range t.paramtypes { - cm := "" - if i > 0 { - cm = ", " - } - t.Param += fmt.Sprintf("%s%s %s", cm, t.paramnames[i], p) - this := t.paramnames[i] - if this == "_" { - this = "nil" - } - t.Invoke += fmt.Sprintf("%s%s", cm, this) - } - if len(t.Results) > 1 { - t.Result = "(" - } - for i, r := range t.Results { - cm := "" - if i > 0 { - cm = ", " - } - t.Result += fmt.Sprintf("%s%s", cm, r) - } - if len(t.Results) > 1 { - t.Result += ")" - } - } - - fd, err := os.Create(*out) - if err != nil { - log.Fatal(err) - } - t, err := template.New("foo").Parse(tmpl) - if err != nil { - log.Fatal(err) - } - type par struct { - Type string - Stuff []*Function - } - p := par{*typ, types} - if false { // debugging the template - t.Execute(os.Stderr, &p) - } - buf := bytes.NewBuffer(nil) - err = t.Execute(buf, &p) - if err != nil { - log.Fatal(err) - } - ans, err := format.Source(bytes.Replace(buf.Bytes(), []byte("\\\n"), []byte{}, -1)) - if err != nil { - log.Fatal(err) - } - fd.Write(ans) -} - -func doUses() { - fset := token.NewFileSet() - pkgs, err := parser.ParseDir(fset, *use, nil, 0) - if err != nil { - log.Fatalf("%q:%v", *use, err) - } - pkg := pkgs["lsp"] // CHECK - files := pkg.Files - for fname, f := range files { - for _, d := range f.Decls { - fd, ok := d.(*ast.FuncDecl) - if !ok { - continue - } - nm := fd.Name.String() - if ast.IsExported(nm) { - // we're looking for things like didChange - continue - } - if fx, ok := byname[nm]; ok { - if fx.Found != "" { - log.Fatalf("found %s in %s and %s", fx.Internal, fx.Found, fname) - } - fx.Found = fname - // and the Paramnames - ft := fd.Type - for _, f := range ft.Params.List { - nm := "" - if len(f.Names) > 0 { - nm = f.Names[0].String() - if nm == "_" { - nm = "_gen" - } - } - fx.paramnames = append(fx.paramnames, nm) - } - } - } - } - if false { - for i, f := range types { - log.Printf("%d %s %s", i, f.Internal, f.Found) - } - } -} - -type Function struct { - Name string - Internal string // first letter lower case - paramtypes []string - paramnames []string - Results []string - Param string - Result string // do it in code, easier than in a template - Invoke string - Found string // file it was found in -} - -var types []*Function -var byname = map[string]*Function{} // internal names - -func doTypes() { - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, *def, nil, 0) - if err != nil { - log.Fatal(err) - } - fd, err := os.Create("/tmp/ast") - if err != nil { - log.Fatal(err) - } - ast.Fprint(fd, fset, f, ast.NotNilFilter) - ast.Inspect(f, inter) - sort.Slice(types, func(i, j int) bool { return types[i].Name < types[j].Name }) - if false { - for i, f := range types { - log.Printf("%d %s(%v) %v", i, f.Name, f.paramtypes, f.Results) - } - } -} - -func inter(n ast.Node) bool { - x, ok := n.(*ast.TypeSpec) - if !ok || x.Name.Name != *typ { - return true - } - m := x.Type.(*ast.InterfaceType).Methods.List - for _, fld := range m { - fn := fld.Type.(*ast.FuncType) - p := fn.Params.List - r := fn.Results.List - fx := &Function{ - Name: fld.Names[0].String(), - } - fx.Internal = strings.ToLower(fx.Name[:1]) + fx.Name[1:] - for _, f := range p { - fx.paramtypes = append(fx.paramtypes, whatis(f.Type)) - } - for _, f := range r { - fx.Results = append(fx.Results, whatis(f.Type)) - } - types = append(types, fx) - byname[fx.Internal] = fx - } - return false -} - -func whatis(x ast.Expr) string { - switch n := x.(type) { - case *ast.SelectorExpr: - return whatis(n.X) + "." + n.Sel.String() - case *ast.StarExpr: - return "*" + whatis(n.X) - case *ast.Ident: - if ast.IsExported(n.Name) { - // these are from package protocol - return "protocol." + n.Name - } - return n.Name - case *ast.ArrayType: - return "[]" + whatis(n.Elt) - case *ast.InterfaceType: - return "interface{}" - default: - log.Fatalf("Fatal %T", x) - return fmt.Sprintf("%T", x) - } -} diff --git a/gopls/internal/lsp/highlight.go b/gopls/internal/lsp/highlight.go deleted file mode 100644 index c0c2502e5f1..00000000000 --- a/gopls/internal/lsp/highlight.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) documentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentHighlight", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - - if snapshot.FileKind(fh) == source.Tmpl { - return template.Highlight(ctx, snapshot, fh, params.Position) - } - - rngs, err := source.Highlight(ctx, snapshot, fh, params.Position) - if err != nil { - event.Error(ctx, "no highlight", err) - } - return toProtocolHighlight(rngs), nil -} - -func toProtocolHighlight(rngs []protocol.Range) []protocol.DocumentHighlight { - result := make([]protocol.DocumentHighlight, 0, len(rngs)) - kind := protocol.Text - for _, rng := range rngs { - result = append(result, protocol.DocumentHighlight{ - Kind: kind, - Range: rng, - }) - } - return result -} diff --git a/gopls/internal/lsp/hover.go b/gopls/internal/lsp/hover.go deleted file mode 100644 index 263a1c8ac72..00000000000 --- a/gopls/internal/lsp/hover.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/mod" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/gopls/internal/lsp/work" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) hover(ctx context.Context, params *protocol.HoverParams) (_ *protocol.Hover, rerr error) { - recordLatency := telemetry.StartLatencyTimer("hover") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.hover", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - switch snapshot.FileKind(fh) { - case source.Mod: - return mod.Hover(ctx, snapshot, fh, params.Position) - case source.Go: - return source.Hover(ctx, snapshot, fh, params.Position) - case source.Tmpl: - return template.Hover(ctx, snapshot, fh, params.Position) - case source.Work: - return work.Hover(ctx, snapshot, fh, params.Position) - } - return nil, nil -} diff --git a/gopls/internal/lsp/implementation.go b/gopls/internal/lsp/implementation.go deleted file mode 100644 index bc527b3b58a..00000000000 --- a/gopls/internal/lsp/implementation.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) implementation(ctx context.Context, params *protocol.ImplementationParams) (_ []protocol.Location, rerr error) { - recordLatency := telemetry.StartLatencyTimer("implementation") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.implementation", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - return source.Implementation(ctx, snapshot, fh, params.Position) -} diff --git a/gopls/internal/lsp/inlay_hint.go b/gopls/internal/lsp/inlay_hint.go deleted file mode 100644 index 39b51abcbc6..00000000000 --- a/gopls/internal/lsp/inlay_hint.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/mod" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) inlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { - ctx, done := event.Start(ctx, "lsp.Server.inlayHint", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - switch snapshot.FileKind(fh) { - case source.Mod: - return mod.InlayHint(ctx, snapshot, fh, params.Range) - case source.Go: - return source.InlayHint(ctx, snapshot, fh, params.Range) - } - return nil, nil -} diff --git a/gopls/internal/lsp/lsp_test.go b/gopls/internal/lsp/lsp_test.go deleted file mode 100644 index 8ef127d8ff4..00000000000 --- a/gopls/internal/lsp/lsp_test.go +++ /dev/null @@ -1,616 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "bytes" - "context" - "fmt" - "os" - "path/filepath" - "sort" - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/tests" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/testenv" -) - -func TestMain(m *testing.M) { - bug.PanicOnBugs = true - testenv.ExitIfSmallMachine() - - os.Exit(m.Run()) -} - -// TestLSP runs the marker tests in files beneath testdata/ using -// implementations of each of the marker operations that make LSP RPCs to a -// gopls server. -func TestLSP(t *testing.T) { - tests.RunTests(t, "testdata", true, testLSP) -} - -func testLSP(t *testing.T, datum *tests.Data) { - ctx := tests.Context(t) - - // Setting a debug instance suppresses logging to stderr, but ensures that we - // still e.g. convert events into runtime/trace/instrumentation. - // - // Previously, we called event.SetExporter(nil), which turns off all - // instrumentation. - ctx = debug.WithInstance(ctx, "", "off") - - session := cache.NewSession(ctx, cache.New(nil)) - options := source.DefaultOptions(tests.DefaultOptions) - options.SetEnvSlice(datum.Config.Env) - folder := &cache.Folder{ - Dir: span.URIFromPath(datum.Config.Dir), - Name: datum.Config.Dir, - Options: options, - } - view, snapshot, release, err := session.NewView(ctx, folder) - if err != nil { - t.Fatal(err) - } - - defer session.RemoveView(view) - - // Only run the -modfile specific tests in module mode with Go 1.14 or above. - datum.ModfileFlagAvailable = len(snapshot.ModFiles()) > 0 && testenv.Go1Point() >= 14 - release() - - // Open all files for performance reasons, because gopls only - // keeps active packages (those with open files) in memory. - // - // In practice clients will only send document-oriented requests for open - // files. - var modifications []source.FileModification - for _, module := range datum.Exported.Modules { - for name := range module.Files { - filename := datum.Exported.File(module.Name, name) - if filepath.Ext(filename) != ".go" { - continue - } - content, err := datum.Exported.FileContents(filename) - if err != nil { - t.Fatal(err) - } - modifications = append(modifications, source.FileModification{ - URI: span.URIFromPath(filename), - Action: source.Open, - Version: -1, - Text: content, - LanguageID: "go", - }) - } - } - for filename, content := range datum.Config.Overlay { - if filepath.Ext(filename) != ".go" { - continue - } - modifications = append(modifications, source.FileModification{ - URI: span.URIFromPath(filename), - Action: source.Open, - Version: -1, - Text: content, - LanguageID: "go", - }) - } - if err := session.ModifyFiles(ctx, modifications); err != nil { - t.Fatal(err) - } - r := &runner{ - data: datum, - ctx: ctx, - editRecv: make(chan map[span.URI][]byte, 1), - } - - r.server = NewServer(session, testClient{runner: r}, options) - tests.Run(t, r, datum) -} - -// runner implements tests.Tests by making LSP RPCs to a gopls server. -type runner struct { - server *Server - data *tests.Data - diagnostics map[span.URI][]*source.Diagnostic - ctx context.Context - editRecv chan map[span.URI][]byte -} - -// testClient stubs any client functions that may be called by LSP functions. -type testClient struct { - protocol.Client - runner *runner -} - -func (c testClient) Close() error { - return nil -} - -// Trivially implement PublishDiagnostics so that we can call -// server.publishReports below to de-dup sent diagnostics. -func (c testClient) PublishDiagnostics(context.Context, *protocol.PublishDiagnosticsParams) error { - return nil -} - -func (c testClient) ShowMessage(context.Context, *protocol.ShowMessageParams) error { - return nil -} - -func (c testClient) ApplyEdit(ctx context.Context, params *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) { - res, err := applyTextDocumentEdits(c.runner, params.Edit.DocumentChanges) - if err != nil { - return nil, err - } - c.runner.editRecv <- res - return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil -} - -func (r *runner) CallHierarchy(t *testing.T, spn span.Span, expectedCalls *tests.CallHierarchyResult) { - mapper, err := r.data.Mapper(spn.URI()) - if err != nil { - t.Fatal(err) - } - loc, err := mapper.SpanLocation(spn) - if err != nil { - t.Fatalf("failed for %v: %v", spn, err) - } - - params := &protocol.CallHierarchyPrepareParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), - } - - items, err := r.server.PrepareCallHierarchy(r.ctx, params) - if err != nil { - t.Fatal(err) - } - if len(items) == 0 { - t.Fatalf("expected call hierarchy item to be returned for identifier at %v\n", loc.Range) - } - - callLocation := protocol.Location{ - URI: items[0].URI, - Range: items[0].Range, - } - if callLocation != loc { - t.Fatalf("expected server.PrepareCallHierarchy to return identifier at %v but got %v\n", loc, callLocation) - } - - incomingCalls, err := r.server.IncomingCalls(r.ctx, &protocol.CallHierarchyIncomingCallsParams{Item: items[0]}) - if err != nil { - t.Error(err) - } - var incomingCallItems []protocol.CallHierarchyItem - for _, item := range incomingCalls { - incomingCallItems = append(incomingCallItems, item.From) - } - msg := tests.DiffCallHierarchyItems(incomingCallItems, expectedCalls.IncomingCalls) - if msg != "" { - t.Errorf("incoming calls: %s", msg) - } - - outgoingCalls, err := r.server.OutgoingCalls(r.ctx, &protocol.CallHierarchyOutgoingCallsParams{Item: items[0]}) - if err != nil { - t.Error(err) - } - var outgoingCallItems []protocol.CallHierarchyItem - for _, item := range outgoingCalls { - outgoingCallItems = append(outgoingCallItems, item.To) - } - msg = tests.DiffCallHierarchyItems(outgoingCallItems, expectedCalls.OutgoingCalls) - if msg != "" { - t.Errorf("outgoing calls: %s", msg) - } -} - -func (r *runner) SemanticTokens(t *testing.T, spn span.Span) { - uri := spn.URI() - filename := uri.Filename() - // this is called solely for coverage in semantic.go - _, err := r.server.semanticTokensFull(r.ctx, &protocol.SemanticTokensParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), - }, - }) - if err != nil { - t.Errorf("%v for %s", err, filename) - } - _, err = r.server.semanticTokensRange(r.ctx, &protocol.SemanticTokensRangeParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), - }, - // any legal range. Just to exercise the call. - Range: protocol.Range{ - Start: protocol.Position{ - Line: 0, - Character: 0, - }, - End: protocol.Position{ - Line: 2, - Character: 0, - }, - }, - }) - if err != nil { - t.Errorf("%v for Range %s", err, filename) - } -} - -func (r *runner) SuggestedFix(t *testing.T, spn span.Span, actionKinds []tests.SuggestedFix, expectedActions int) { - uri := spn.URI() - view, err := r.server.session.ViewOf(uri) - if err != nil { - t.Fatal(err) - } - - m, err := r.data.Mapper(uri) - if err != nil { - t.Fatal(err) - } - rng, err := m.SpanRange(spn) - if err != nil { - t.Fatal(err) - } - // Get the diagnostics for this view if we have not done it before. - r.collectDiagnostics(view) - var diagnostics []protocol.Diagnostic - for _, d := range r.diagnostics[uri] { - // Compare the start positions rather than the entire range because - // some diagnostics have a range with the same start and end position (8:1-8:1). - // The current marker functionality prevents us from having a range of 0 length. - if protocol.ComparePosition(d.Range.Start, rng.Start) == 0 { - diagnostics = append(diagnostics, toProtocolDiagnostics([]*source.Diagnostic{d})...) - break - } - } - var codeActionKinds []protocol.CodeActionKind - for _, k := range actionKinds { - codeActionKinds = append(codeActionKinds, protocol.CodeActionKind(k.ActionKind)) - } - allActions, err := r.server.CodeAction(r.ctx, &protocol.CodeActionParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), - }, - Range: rng, - Context: protocol.CodeActionContext{ - Only: codeActionKinds, - Diagnostics: diagnostics, - }, - }) - if err != nil { - t.Fatalf("CodeAction %s failed: %v", spn, err) - } - var actions []protocol.CodeAction - for _, action := range allActions { - for _, fix := range actionKinds { - if strings.Contains(action.Title, fix.Title) { - actions = append(actions, action) - break - } - } - - } - if len(actions) != expectedActions { - var summaries []string - for _, a := range actions { - summaries = append(summaries, fmt.Sprintf("%q (%s)", a.Title, a.Kind)) - } - t.Fatalf("CodeAction(...): got %d code actions (%v), want %d", len(actions), summaries, expectedActions) - } - action := actions[0] - var match bool - for _, k := range codeActionKinds { - if action.Kind == k { - match = true - break - } - } - if !match { - t.Fatalf("unexpected kind for code action %s, got %v, want one of %v", action.Title, action.Kind, codeActionKinds) - } - var res map[span.URI][]byte - if cmd := action.Command; cmd != nil { - _, err := r.server.ExecuteCommand(r.ctx, &protocol.ExecuteCommandParams{ - Command: action.Command.Command, - Arguments: action.Command.Arguments, - }) - if err != nil { - t.Fatalf("error converting command %q to edits: %v", action.Command.Command, err) - } - res = <-r.editRecv - } else { - res, err = applyTextDocumentEdits(r, action.Edit.DocumentChanges) - if err != nil { - t.Fatal(err) - } - } - for u, got := range res { - want := r.data.Golden(t, "suggestedfix_"+tests.SpanName(spn), u.Filename(), func() ([]byte, error) { - return got, nil - }) - if diff := compare.Bytes(want, got); diff != "" { - t.Errorf("suggested fixes failed for %s:\n%s", u.Filename(), diff) - } - } -} - -func (r *runner) InlayHints(t *testing.T, spn span.Span) { - uri := spn.URI() - filename := uri.Filename() - - hints, err := r.server.InlayHint(r.ctx, &protocol.InlayHintParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), - }, - // TODO: add Range - }) - if err != nil { - t.Fatal(err) - } - - // Map inlay hints to text edits. - edits := make([]protocol.TextEdit, len(hints)) - for i, hint := range hints { - var paddingLeft, paddingRight string - if hint.PaddingLeft { - paddingLeft = " " - } - if hint.PaddingRight { - paddingRight = " " - } - edits[i] = protocol.TextEdit{ - Range: protocol.Range{Start: hint.Position, End: hint.Position}, - NewText: fmt.Sprintf("<%s%s%s>", paddingLeft, hint.Label[0].Value, paddingRight), - } - } - - m, err := r.data.Mapper(uri) - if err != nil { - t.Fatal(err) - } - got, _, err := source.ApplyProtocolEdits(m, edits) - if err != nil { - t.Error(err) - } - - withinlayHints := r.data.Golden(t, "inlayHint", filename, func() ([]byte, error) { - return got, nil - }) - - if !bytes.Equal(withinlayHints, got) { - t.Errorf("inlay hints failed for %s, expected:\n%s\ngot:\n%s", filename, withinlayHints, got) - } -} - -func (r *runner) Rename(t *testing.T, spn span.Span, newText string) { - tag := fmt.Sprintf("%s-rename", newText) - - uri := spn.URI() - filename := uri.Filename() - sm, err := r.data.Mapper(uri) - if err != nil { - t.Fatal(err) - } - loc, err := sm.SpanLocation(spn) - if err != nil { - t.Fatalf("failed for %v: %v", spn, err) - } - - wedit, err := r.server.Rename(r.ctx, &protocol.RenameParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, - Position: loc.Range.Start, - NewName: newText, - }) - if err != nil { - renamed := string(r.data.Golden(t, tag, filename, func() ([]byte, error) { - return []byte(err.Error()), nil - })) - if err.Error() != renamed { - t.Errorf("%s: rename failed for %s, expected:\n%v\ngot:\n%v\n", spn, newText, renamed, err) - } - return - } - res, err := applyTextDocumentEdits(r, wedit.DocumentChanges) - if err != nil { - t.Fatal(err) - } - var orderedURIs []string - for uri := range res { - orderedURIs = append(orderedURIs, string(uri)) - } - sort.Strings(orderedURIs) - - // Print the name and content of each modified file, - // concatenated, and compare against the golden. - var buf bytes.Buffer - for i := 0; i < len(res); i++ { - if i != 0 { - buf.WriteByte('\n') - } - uri := span.URIFromURI(orderedURIs[i]) - if len(res) > 1 { - buf.WriteString(filepath.Base(uri.Filename())) - buf.WriteString(":\n") - } - buf.Write(res[uri]) - } - got := buf.Bytes() - want := r.data.Golden(t, tag, filename, func() ([]byte, error) { - return got, nil - }) - if diff := compare.Bytes(want, got); diff != "" { - t.Errorf("rename failed for %s:\n%s", newText, diff) - } -} - -func applyTextDocumentEdits(r *runner, edits []protocol.DocumentChanges) (map[span.URI][]byte, error) { - res := make(map[span.URI][]byte) - for _, docEdits := range edits { - if docEdits.TextDocumentEdit != nil { - uri := docEdits.TextDocumentEdit.TextDocument.URI.SpanURI() - var m *protocol.Mapper - // If we have already edited this file, we use the edited version (rather than the - // file in its original state) so that we preserve our initial changes. - if content, ok := res[uri]; ok { - m = protocol.NewMapper(uri, content) - } else { - var err error - if m, err = r.data.Mapper(uri); err != nil { - return nil, err - } - } - patched, _, err := source.ApplyProtocolEdits(m, docEdits.TextDocumentEdit.Edits) - if err != nil { - return nil, err - } - res[uri] = patched - } - } - return res, nil -} - -func (r *runner) AddImport(t *testing.T, uri span.URI, expectedImport string) { - cmd, err := command.NewListKnownPackagesCommand("List Known Packages", command.URIArg{ - URI: protocol.URIFromSpanURI(uri), - }) - if err != nil { - t.Fatal(err) - } - resp, err := r.server.executeCommand(r.ctx, &protocol.ExecuteCommandParams{ - Command: cmd.Command, - Arguments: cmd.Arguments, - }) - if err != nil { - t.Fatal(err) - } - res := resp.(command.ListKnownPackagesResult) - var hasPkg bool - for _, p := range res.Packages { - if p == expectedImport { - hasPkg = true - break - } - } - if !hasPkg { - t.Fatalf("%s: got %v packages\nwant contains %q", command.ListKnownPackages, res.Packages, expectedImport) - } - cmd, err = command.NewAddImportCommand("Add Imports", command.AddImportArgs{ - URI: protocol.URIFromSpanURI(uri), - ImportPath: expectedImport, - }) - if err != nil { - t.Fatal(err) - } - _, err = r.server.executeCommand(r.ctx, &protocol.ExecuteCommandParams{ - Command: cmd.Command, - Arguments: cmd.Arguments, - }) - if err != nil { - t.Fatal(err) - } - got := (<-r.editRecv)[uri] - want := r.data.Golden(t, "addimport", uri.Filename(), func() ([]byte, error) { - return []byte(got), nil - }) - if want == nil { - t.Fatalf("golden file %q not found", uri.Filename()) - } - if diff := compare.Bytes(want, got); diff != "" { - t.Errorf("%s mismatch\n%s", command.AddImport, diff) - } -} - -func (r *runner) SelectionRanges(t *testing.T, spn span.Span) { - uri := spn.URI() - sm, err := r.data.Mapper(uri) - if err != nil { - t.Fatal(err) - } - loc, err := sm.SpanLocation(spn) - if err != nil { - t.Error(err) - } - - ranges, err := r.server.selectionRange(r.ctx, &protocol.SelectionRangeParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(uri), - }, - Positions: []protocol.Position{loc.Range.Start}, - }) - if err != nil { - t.Fatal(err) - } - - sb := &strings.Builder{} - for i, path := range ranges { - fmt.Fprintf(sb, "Ranges %d: ", i) - rng := path - for { - s, e, err := sm.RangeOffsets(rng.Range) - if err != nil { - t.Error(err) - } - - var snippet string - if e-s < 30 { - snippet = string(sm.Content[s:e]) - } else { - snippet = string(sm.Content[s:s+15]) + "..." + string(sm.Content[e-15:e]) - } - - fmt.Fprintf(sb, "\n\t%v %q", rng.Range, strings.ReplaceAll(snippet, "\n", "\\n")) - - if rng.Parent == nil { - break - } - rng = *rng.Parent - } - sb.WriteRune('\n') - } - got := sb.String() - - testName := "selectionrange_" + tests.SpanName(spn) - want := r.data.Golden(t, testName, uri.Filename(), func() ([]byte, error) { - return []byte(got), nil - }) - if want == nil { - t.Fatalf("golden file %q not found", uri.Filename()) - } - if diff := compare.Text(got, string(want)); diff != "" { - t.Errorf("%s mismatch\n%s", testName, diff) - } -} - -func (r *runner) collectDiagnostics(view *cache.View) { - if r.diagnostics != nil { - return - } - r.diagnostics = make(map[span.URI][]*source.Diagnostic) - - snapshot, release, err := view.Snapshot() - if err != nil { - panic(err) - } - defer release() - - // Always run diagnostics with analysis. - r.server.diagnose(r.ctx, snapshot, analyzeEverything) - for uri, reports := range r.server.diagnostics { - for _, report := range reports.reports { - for _, d := range report.diags { - r.diagnostics[uri] = append(r.diagnostics[uri], d) - } - } - } -} diff --git a/gopls/internal/lsp/lsprpc/binder.go b/gopls/internal/lsp/lsprpc/binder.go deleted file mode 100644 index 01e59f7bb62..00000000000 --- a/gopls/internal/lsp/lsprpc/binder.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc - -import ( - "context" - "encoding/json" - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/event" - jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" - "golang.org/x/tools/internal/xcontext" -) - -// The BinderFunc type adapts a bind function to implement the jsonrpc2.Binder -// interface. -type BinderFunc func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions - -func (f BinderFunc) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { - return f(ctx, conn) -} - -// Middleware defines a transformation of jsonrpc2 Binders, that may be -// composed to build jsonrpc2 servers. -type Middleware func(jsonrpc2_v2.Binder) jsonrpc2_v2.Binder - -// A ServerFunc is used to construct an LSP server for a given client. -type ServerFunc func(context.Context, protocol.ClientCloser) protocol.Server - -// ServerBinder binds incoming connections to a new server. -type ServerBinder struct { - newServer ServerFunc -} - -func NewServerBinder(newServer ServerFunc) *ServerBinder { - return &ServerBinder{newServer: newServer} -} - -func (b *ServerBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { - client := protocol.ClientDispatcherV2(conn) - server := b.newServer(ctx, client) - serverHandler := protocol.ServerHandlerV2(server) - // Wrap the server handler to inject the client into each request context, so - // that log events are reflected back to the client. - wrapped := jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { - ctx = protocol.WithClient(ctx, client) - return serverHandler.Handle(ctx, req) - }) - preempter := &canceler{ - conn: conn, - } - return jsonrpc2_v2.ConnectionOptions{ - Handler: wrapped, - Preempter: preempter, - } -} - -type canceler struct { - conn *jsonrpc2_v2.Connection -} - -func (c *canceler) Preempt(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { - if req.Method != "$/cancelRequest" { - return nil, jsonrpc2_v2.ErrNotHandled - } - var params protocol.CancelParams - if err := json.Unmarshal(req.Params, ¶ms); err != nil { - return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrParse, err) - } - var id jsonrpc2_v2.ID - switch raw := params.ID.(type) { - case float64: - id = jsonrpc2_v2.Int64ID(int64(raw)) - case string: - id = jsonrpc2_v2.StringID(raw) - default: - return nil, fmt.Errorf("%w: invalid ID type %T", jsonrpc2_v2.ErrParse, params.ID) - } - c.conn.Cancel(id) - return nil, nil -} - -type ForwardBinder struct { - dialer jsonrpc2_v2.Dialer - onBind func(*jsonrpc2_v2.Connection) -} - -func NewForwardBinder(dialer jsonrpc2_v2.Dialer) *ForwardBinder { - return &ForwardBinder{ - dialer: dialer, - } -} - -func (b *ForwardBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) (opts jsonrpc2_v2.ConnectionOptions) { - client := protocol.ClientDispatcherV2(conn) - clientBinder := NewClientBinder(func(context.Context, protocol.Server) protocol.Client { return client }) - - serverConn, err := jsonrpc2_v2.Dial(context.Background(), b.dialer, clientBinder) - if err != nil { - return jsonrpc2_v2.ConnectionOptions{ - Handler: jsonrpc2_v2.HandlerFunc(func(context.Context, *jsonrpc2_v2.Request) (interface{}, error) { - return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrInternal, err) - }), - } - } - - if b.onBind != nil { - b.onBind(serverConn) - } - server := protocol.ServerDispatcherV2(serverConn) - preempter := &canceler{ - conn: conn, - } - detached := xcontext.Detach(ctx) - go func() { - conn.Wait() - if err := serverConn.Close(); err != nil { - event.Log(detached, fmt.Sprintf("closing remote connection: %v", err)) - } - }() - return jsonrpc2_v2.ConnectionOptions{ - Handler: protocol.ServerHandlerV2(server), - Preempter: preempter, - } -} - -// A ClientFunc is used to construct an LSP client for a given server. -type ClientFunc func(context.Context, protocol.Server) protocol.Client - -// ClientBinder binds an LSP client to an incoming connection. -type ClientBinder struct { - newClient ClientFunc -} - -func NewClientBinder(newClient ClientFunc) *ClientBinder { - return &ClientBinder{newClient} -} - -func (b *ClientBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { - server := protocol.ServerDispatcherV2(conn) - client := b.newClient(ctx, server) - return jsonrpc2_v2.ConnectionOptions{ - Handler: protocol.ClientHandlerV2(client), - } -} diff --git a/gopls/internal/lsp/lsprpc/binder_test.go b/gopls/internal/lsp/lsprpc/binder_test.go deleted file mode 100644 index 3315c3eb775..00000000000 --- a/gopls/internal/lsp/lsprpc/binder_test.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc_test - -import ( - "context" - "regexp" - "strings" - "testing" - "time" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" - - . "golang.org/x/tools/gopls/internal/lsp/lsprpc" -) - -type TestEnv struct { - Conns []*jsonrpc2_v2.Connection - Servers []*jsonrpc2_v2.Server -} - -func (e *TestEnv) Shutdown(t *testing.T) { - for _, s := range e.Servers { - s.Shutdown() - } - for _, c := range e.Conns { - if err := c.Close(); err != nil { - t.Error(err) - } - } - for _, s := range e.Servers { - if err := s.Wait(); err != nil { - t.Error(err) - } - } -} - -func (e *TestEnv) serve(ctx context.Context, t *testing.T, server jsonrpc2_v2.Binder) (jsonrpc2_v2.Listener, *jsonrpc2_v2.Server) { - l, err := jsonrpc2_v2.NetPipeListener(ctx) - if err != nil { - t.Fatal(err) - } - s := jsonrpc2_v2.NewServer(ctx, l, server) - e.Servers = append(e.Servers, s) - return l, s -} - -func (e *TestEnv) dial(ctx context.Context, t *testing.T, dialer jsonrpc2_v2.Dialer, client jsonrpc2_v2.Binder, forwarded bool) *jsonrpc2_v2.Connection { - if forwarded { - l, _ := e.serve(ctx, t, NewForwardBinder(dialer)) - dialer = l.Dialer() - } - conn, err := jsonrpc2_v2.Dial(ctx, dialer, client) - if err != nil { - t.Fatal(err) - } - e.Conns = append(e.Conns, conn) - return conn -} - -func staticClientBinder(client protocol.Client) jsonrpc2_v2.Binder { - f := func(context.Context, protocol.Server) protocol.Client { return client } - return NewClientBinder(f) -} - -func staticServerBinder(server protocol.Server) jsonrpc2_v2.Binder { - f := func(ctx context.Context, client protocol.ClientCloser) protocol.Server { - return server - } - return NewServerBinder(f) -} - -func TestClientLoggingV2(t *testing.T) { - ctx := context.Background() - - for name, forwarded := range map[string]bool{ - "forwarded": true, - "standalone": false, - } { - t.Run(name, func(t *testing.T) { - client := FakeClient{Logs: make(chan string, 10)} - env := new(TestEnv) - defer env.Shutdown(t) - l, _ := env.serve(ctx, t, staticServerBinder(PingServer{})) - conn := env.dial(ctx, t, l.Dialer(), staticClientBinder(client), forwarded) - - if err := protocol.ServerDispatcherV2(conn).DidOpen(ctx, &protocol.DidOpenTextDocumentParams{}); err != nil { - t.Errorf("DidOpen: %v", err) - } - select { - case got := <-client.Logs: - want := "ping" - matched, err := regexp.MatchString(want, got) - if err != nil { - t.Fatal(err) - } - if !matched { - t.Errorf("got log %q, want a log containing %q", got, want) - } - case <-time.After(1 * time.Second): - t.Error("timeout waiting for client log") - } - }) - } -} - -func TestRequestCancellationV2(t *testing.T) { - ctx := context.Background() - - for name, forwarded := range map[string]bool{ - "forwarded": true, - "standalone": false, - } { - t.Run(name, func(t *testing.T) { - server := WaitableServer{ - Started: make(chan struct{}), - Completed: make(chan error), - } - env := new(TestEnv) - defer env.Shutdown(t) - l, _ := env.serve(ctx, t, staticServerBinder(server)) - client := FakeClient{Logs: make(chan string, 10)} - conn := env.dial(ctx, t, l.Dialer(), staticClientBinder(client), forwarded) - - sd := protocol.ServerDispatcherV2(conn) - ctx, cancel := context.WithCancel(ctx) - - result := make(chan error) - go func() { - _, err := sd.Hover(ctx, &protocol.HoverParams{}) - result <- err - }() - // Wait for the Hover request to start. - <-server.Started - cancel() - if err := <-result; err == nil { - t.Error("nil error for cancelled Hover(), want non-nil") - } - if err := <-server.Completed; err == nil || !strings.Contains(err.Error(), "cancelled hover") { - t.Errorf("Hover(): unexpected server-side error %v", err) - } - }) - } -} diff --git a/gopls/internal/lsp/lsprpc/commandinterceptor.go b/gopls/internal/lsp/lsprpc/commandinterceptor.go deleted file mode 100644 index 607ee9c9e9f..00000000000 --- a/gopls/internal/lsp/lsprpc/commandinterceptor.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc - -import ( - "context" - "encoding/json" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" -) - -// HandlerMiddleware is a middleware that only modifies the jsonrpc2 handler. -type HandlerMiddleware func(jsonrpc2_v2.Handler) jsonrpc2_v2.Handler - -// BindHandler transforms a HandlerMiddleware into a Middleware. -func BindHandler(hmw HandlerMiddleware) Middleware { - return Middleware(func(binder jsonrpc2_v2.Binder) jsonrpc2_v2.Binder { - return BinderFunc(func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { - opts := binder.Bind(ctx, conn) - opts.Handler = hmw(opts.Handler) - return opts - }) - }) -} - -func CommandInterceptor(command string, run func(*protocol.ExecuteCommandParams) (interface{}, error)) Middleware { - return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { - return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { - if req.Method == "workspace/executeCommand" { - var params protocol.ExecuteCommandParams - if err := json.Unmarshal(req.Params, ¶ms); err == nil { - if params.Command == command { - return run(¶ms) - } - } - } - - return delegate.Handle(ctx, req) - }) - }) -} diff --git a/gopls/internal/lsp/lsprpc/commandinterceptor_test.go b/gopls/internal/lsp/lsprpc/commandinterceptor_test.go deleted file mode 100644 index 555f15130cc..00000000000 --- a/gopls/internal/lsp/lsprpc/commandinterceptor_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc_test - -import ( - "context" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - - . "golang.org/x/tools/gopls/internal/lsp/lsprpc" -) - -func TestCommandInterceptor(t *testing.T) { - const command = "foo" - caught := false - intercept := func(_ *protocol.ExecuteCommandParams) (interface{}, error) { - caught = true - return map[string]interface{}{}, nil - } - - ctx := context.Background() - env := new(TestEnv) - defer env.Shutdown(t) - mw := CommandInterceptor(command, intercept) - l, _ := env.serve(ctx, t, mw(noopBinder)) - conn := env.dial(ctx, t, l.Dialer(), noopBinder, false) - - params := &protocol.ExecuteCommandParams{ - Command: command, - } - var res interface{} - err := conn.Call(ctx, "workspace/executeCommand", params).Await(ctx, &res) - if err != nil { - t.Fatal(err) - } - if !caught { - t.Errorf("workspace/executeCommand was not intercepted") - } -} diff --git a/gopls/internal/lsp/lsprpc/goenv.go b/gopls/internal/lsp/lsprpc/goenv.go deleted file mode 100644 index b7717844f17..00000000000 --- a/gopls/internal/lsp/lsprpc/goenv.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc - -import ( - "context" - "encoding/json" - "fmt" - "os" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/gocommand" - jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" -) - -func GoEnvMiddleware() (Middleware, error) { - return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { - return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { - if req.Method == "initialize" { - if err := addGoEnvToInitializeRequestV2(ctx, req); err != nil { - event.Error(ctx, "adding go env to initialize", err) - } - } - return delegate.Handle(ctx, req) - }) - }), nil -} - -func addGoEnvToInitializeRequestV2(ctx context.Context, req *jsonrpc2_v2.Request) error { - var params protocol.ParamInitialize - if err := json.Unmarshal(req.Params, ¶ms); err != nil { - return err - } - var opts map[string]interface{} - switch v := params.InitializationOptions.(type) { - case nil: - opts = make(map[string]interface{}) - case map[string]interface{}: - opts = v - default: - return fmt.Errorf("unexpected type for InitializationOptions: %T", v) - } - envOpt, ok := opts["env"] - if !ok { - envOpt = make(map[string]interface{}) - } - env, ok := envOpt.(map[string]interface{}) - if !ok { - return fmt.Errorf("env option is %T, expected a map", envOpt) - } - goenv, err := getGoEnv(ctx, env) - if err != nil { - return err - } - // We don't want to propagate GOWORK unless explicitly set since that could mess with - // path inference during cmd/go invocations, see golang/go#51825. - _, goworkSet := os.LookupEnv("GOWORK") - for govar, value := range goenv { - if govar == "GOWORK" && !goworkSet { - continue - } - env[govar] = value - } - opts["env"] = env - params.InitializationOptions = opts - raw, err := json.Marshal(params) - if err != nil { - return fmt.Errorf("marshaling updated options: %v", err) - } - req.Params = json.RawMessage(raw) - return nil -} - -func getGoEnv(ctx context.Context, env map[string]interface{}) (map[string]string, error) { - var runEnv []string - for k, v := range env { - runEnv = append(runEnv, fmt.Sprintf("%s=%s", k, v)) - } - runner := gocommand.Runner{} - output, err := runner.Run(ctx, gocommand.Invocation{ - Verb: "env", - Args: []string{"-json"}, - Env: runEnv, - }) - if err != nil { - return nil, err - } - envmap := make(map[string]string) - if err := json.Unmarshal(output.Bytes(), &envmap); err != nil { - return nil, err - } - return envmap, nil -} diff --git a/gopls/internal/lsp/lsprpc/goenv_test.go b/gopls/internal/lsp/lsprpc/goenv_test.go deleted file mode 100644 index 3030ef34dfc..00000000000 --- a/gopls/internal/lsp/lsprpc/goenv_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc_test - -import ( - "context" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/testenv" - - . "golang.org/x/tools/gopls/internal/lsp/lsprpc" -) - -type initServer struct { - protocol.Server - - params *protocol.ParamInitialize -} - -func (s *initServer) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { - s.params = params - return &protocol.InitializeResult{}, nil -} - -func TestGoEnvMiddleware(t *testing.T) { - testenv.NeedsTool(t, "go") - - ctx := context.Background() - - server := &initServer{} - env := new(TestEnv) - defer env.Shutdown(t) - l, _ := env.serve(ctx, t, staticServerBinder(server)) - mw, err := GoEnvMiddleware() - if err != nil { - t.Fatal(err) - } - binder := mw(NewForwardBinder(l.Dialer())) - l, _ = env.serve(ctx, t, binder) - conn := env.dial(ctx, t, l.Dialer(), noopBinder, true) - dispatch := protocol.ServerDispatcherV2(conn) - initParams := &protocol.ParamInitialize{} - initParams.InitializationOptions = map[string]interface{}{ - "env": map[string]interface{}{ - "GONOPROXY": "example.com", - }, - } - if _, err := dispatch.Initialize(ctx, initParams); err != nil { - t.Fatal(err) - } - - if server.params == nil { - t.Fatalf("initialize params are unset") - } - envOpts := server.params.InitializationOptions.(map[string]interface{})["env"].(map[string]interface{}) - - // Check for an arbitrary Go variable. It should be set. - if _, ok := envOpts["GOPRIVATE"]; !ok { - t.Errorf("Go environment variable GOPRIVATE unset in initialization options") - } - // Check that the variable present in our user config was not overwritten. - if got, want := envOpts["GONOPROXY"], "example.com"; got != want { - t.Errorf("GONOPROXY=%q, want %q", got, want) - } -} diff --git a/gopls/internal/lsp/lsprpc/middleware.go b/gopls/internal/lsp/lsprpc/middleware.go deleted file mode 100644 index 50089cde7dc..00000000000 --- a/gopls/internal/lsp/lsprpc/middleware.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc - -import ( - "context" - "encoding/json" - "fmt" - "sync" - - "golang.org/x/tools/internal/event" - jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" -) - -// Metadata holds arbitrary data transferred between jsonrpc2 peers. -type Metadata map[string]interface{} - -// PeerInfo holds information about a peering between jsonrpc2 servers. -type PeerInfo struct { - // RemoteID is the identity of the current server on its peer. - RemoteID int64 - - // LocalID is the identity of the peer on the server. - LocalID int64 - - // IsClient reports whether the peer is a client. If false, the peer is a - // server. - IsClient bool - - // Metadata holds arbitrary information provided by the peer. - Metadata Metadata -} - -// Handshaker handles both server and client handshaking over jsonrpc2. To -// instrument server-side handshaking, use Handshaker.Middleware. To instrument -// client-side handshaking, call Handshaker.ClientHandshake for any new -// client-side connections. -type Handshaker struct { - // Metadata will be shared with peers via handshaking. - Metadata Metadata - - mu sync.Mutex - prevID int64 - peers map[int64]PeerInfo -} - -// Peers returns the peer info this handshaker knows about by way of either the -// server-side handshake middleware, or client-side handshakes. -func (h *Handshaker) Peers() []PeerInfo { - h.mu.Lock() - defer h.mu.Unlock() - - var c []PeerInfo - for _, v := range h.peers { - c = append(c, v) - } - return c -} - -// Middleware is a jsonrpc2 middleware function to augment connection binding -// to handle the handshake method, and record disconnections. -func (h *Handshaker) Middleware(inner jsonrpc2_v2.Binder) jsonrpc2_v2.Binder { - return BinderFunc(func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { - opts := inner.Bind(ctx, conn) - - localID := h.nextID() - info := &PeerInfo{ - RemoteID: localID, - Metadata: h.Metadata, - } - - // Wrap the delegated handler to accept the handshake. - delegate := opts.Handler - opts.Handler = jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { - if req.Method == handshakeMethod { - var peerInfo PeerInfo - if err := json.Unmarshal(req.Params, &peerInfo); err != nil { - return nil, fmt.Errorf("%w: unmarshaling client info: %v", jsonrpc2_v2.ErrInvalidParams, err) - } - peerInfo.LocalID = localID - peerInfo.IsClient = true - h.recordPeer(peerInfo) - return info, nil - } - return delegate.Handle(ctx, req) - }) - - // Record the dropped client. - go h.cleanupAtDisconnect(conn, localID) - - return opts - }) -} - -// ClientHandshake performs a client-side handshake with the server at the -// other end of conn, recording the server's peer info and watching for conn's -// disconnection. -func (h *Handshaker) ClientHandshake(ctx context.Context, conn *jsonrpc2_v2.Connection) { - localID := h.nextID() - info := &PeerInfo{ - RemoteID: localID, - Metadata: h.Metadata, - } - - call := conn.Call(ctx, handshakeMethod, info) - var serverInfo PeerInfo - if err := call.Await(ctx, &serverInfo); err != nil { - event.Error(ctx, "performing handshake", err) - return - } - serverInfo.LocalID = localID - h.recordPeer(serverInfo) - - go h.cleanupAtDisconnect(conn, localID) -} - -func (h *Handshaker) nextID() int64 { - h.mu.Lock() - defer h.mu.Unlock() - - h.prevID++ - return h.prevID -} - -func (h *Handshaker) cleanupAtDisconnect(conn *jsonrpc2_v2.Connection, peerID int64) { - conn.Wait() - - h.mu.Lock() - defer h.mu.Unlock() - delete(h.peers, peerID) -} - -func (h *Handshaker) recordPeer(info PeerInfo) { - h.mu.Lock() - defer h.mu.Unlock() - if h.peers == nil { - h.peers = make(map[int64]PeerInfo) - } - h.peers[info.LocalID] = info -} diff --git a/gopls/internal/lsp/lsprpc/middleware_test.go b/gopls/internal/lsp/lsprpc/middleware_test.go deleted file mode 100644 index c528eae5c62..00000000000 --- a/gopls/internal/lsp/lsprpc/middleware_test.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsprpc_test - -import ( - "context" - "errors" - "fmt" - "testing" - "time" - - . "golang.org/x/tools/gopls/internal/lsp/lsprpc" - jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" -) - -var noopBinder = BinderFunc(func(context.Context, *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { - return jsonrpc2_v2.ConnectionOptions{} -}) - -func TestHandshakeMiddleware(t *testing.T) { - sh := &Handshaker{ - Metadata: Metadata{ - "answer": 42, - }, - } - ctx := context.Background() - env := new(TestEnv) - defer env.Shutdown(t) - l, _ := env.serve(ctx, t, sh.Middleware(noopBinder)) - conn := env.dial(ctx, t, l.Dialer(), noopBinder, false) - ch := &Handshaker{ - Metadata: Metadata{ - "question": 6 * 9, - }, - } - - check := func(connected bool) error { - clients := sh.Peers() - servers := ch.Peers() - want := 0 - if connected { - want = 1 - } - if got := len(clients); got != want { - return fmt.Errorf("got %d clients on the server, want %d", got, want) - } - if got := len(servers); got != want { - return fmt.Errorf("got %d servers on the client, want %d", got, want) - } - if !connected { - return nil - } - client := clients[0] - server := servers[0] - if _, ok := client.Metadata["question"]; !ok { - return errors.New("no client metadata") - } - if _, ok := server.Metadata["answer"]; !ok { - return errors.New("no server metadata") - } - if client.LocalID != server.RemoteID { - return fmt.Errorf("client.LocalID == %d, server.PeerID == %d", client.LocalID, server.RemoteID) - } - if client.RemoteID != server.LocalID { - return fmt.Errorf("client.PeerID == %d, server.LocalID == %d", client.RemoteID, server.LocalID) - } - return nil - } - - if err := check(false); err != nil { - t.Fatalf("before handshake: %v", err) - } - ch.ClientHandshake(ctx, conn) - if err := check(true); err != nil { - t.Fatalf("after handshake: %v", err) - } - conn.Close() - // Wait for up to ~2s for connections to get cleaned up. - delay := 25 * time.Millisecond - for retries := 3; retries >= 0; retries-- { - time.Sleep(delay) - err := check(false) - if err == nil { - return - } - if retries == 0 { - t.Fatalf("after closing connection: %v", err) - } - delay *= 4 - } -} diff --git a/gopls/internal/lsp/mod/code_lens.go b/gopls/internal/lsp/mod/code_lens.go deleted file mode 100644 index b93ac44f132..00000000000 --- a/gopls/internal/lsp/mod/code_lens.go +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mod - -import ( - "context" - "fmt" - "os" - "path/filepath" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" -) - -// LensFuncs returns the supported lensFuncs for go.mod files. -func LensFuncs() map[command.Command]source.LensFunc { - return map[command.Command]source.LensFunc{ - command.UpgradeDependency: upgradeLenses, - command.Tidy: tidyLens, - command.Vendor: vendorLens, - command.RunGovulncheck: vulncheckLenses, - } -} - -func upgradeLenses(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.CodeLens, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil || pm.File == nil { - return nil, err - } - uri := protocol.URIFromSpanURI(fh.URI()) - reset, err := command.NewResetGoModDiagnosticsCommand("Reset go.mod diagnostics", command.ResetGoModDiagnosticsArgs{URIArg: command.URIArg{URI: uri}}) - if err != nil { - return nil, err - } - // Put the `Reset go.mod diagnostics` codelens on the module statement. - modrng, err := moduleStmtRange(fh, pm) - if err != nil { - return nil, err - } - lenses := []protocol.CodeLens{{Range: modrng, Command: &reset}} - if len(pm.File.Require) == 0 { - // Nothing to upgrade. - return lenses, nil - } - var requires []string - for _, req := range pm.File.Require { - requires = append(requires, req.Mod.Path) - } - checkUpgrade, err := command.NewCheckUpgradesCommand("Check for upgrades", command.CheckUpgradesArgs{ - URI: uri, - Modules: requires, - }) - if err != nil { - return nil, err - } - upgradeTransitive, err := command.NewUpgradeDependencyCommand("Upgrade transitive dependencies", command.DependencyArgs{ - URI: uri, - AddRequire: false, - GoCmdArgs: []string{"-d", "-u", "-t", "./..."}, - }) - if err != nil { - return nil, err - } - upgradeDirect, err := command.NewUpgradeDependencyCommand("Upgrade direct dependencies", command.DependencyArgs{ - URI: uri, - AddRequire: false, - GoCmdArgs: append([]string{"-d"}, requires...), - }) - if err != nil { - return nil, err - } - - // Put the upgrade code lenses above the first require block or statement. - rng, err := firstRequireRange(fh, pm) - if err != nil { - return nil, err - } - - return append(lenses, []protocol.CodeLens{ - {Range: rng, Command: &checkUpgrade}, - {Range: rng, Command: &upgradeTransitive}, - {Range: rng, Command: &upgradeDirect}, - }...), nil -} - -func tidyLens(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.CodeLens, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil || pm.File == nil { - return nil, err - } - uri := protocol.URIFromSpanURI(fh.URI()) - cmd, err := command.NewTidyCommand("Run go mod tidy", command.URIArgs{URIs: []protocol.DocumentURI{uri}}) - if err != nil { - return nil, err - } - rng, err := moduleStmtRange(fh, pm) - if err != nil { - return nil, err - } - return []protocol.CodeLens{{ - Range: rng, - Command: &cmd, - }}, nil -} - -func vendorLens(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.CodeLens, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil || pm.File == nil { - return nil, err - } - if len(pm.File.Require) == 0 { - // Nothing to vendor. - return nil, nil - } - rng, err := moduleStmtRange(fh, pm) - if err != nil { - return nil, err - } - title := "Create vendor directory" - uri := protocol.URIFromSpanURI(fh.URI()) - cmd, err := command.NewVendorCommand(title, command.URIArg{URI: uri}) - if err != nil { - return nil, err - } - // Change the message depending on whether or not the module already has a - // vendor directory. - vendorDir := filepath.Join(filepath.Dir(fh.URI().Filename()), "vendor") - if info, _ := os.Stat(vendorDir); info != nil && info.IsDir() { - title = "Sync vendor directory" - } - return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil -} - -func moduleStmtRange(fh source.FileHandle, pm *source.ParsedModule) (protocol.Range, error) { - if pm.File == nil || pm.File.Module == nil || pm.File.Module.Syntax == nil { - return protocol.Range{}, fmt.Errorf("no module statement in %s", fh.URI()) - } - syntax := pm.File.Module.Syntax - return pm.Mapper.OffsetRange(syntax.Start.Byte, syntax.End.Byte) -} - -// firstRequireRange returns the range for the first "require" in the given -// go.mod file. This is either a require block or an individual require line. -func firstRequireRange(fh source.FileHandle, pm *source.ParsedModule) (protocol.Range, error) { - if len(pm.File.Require) == 0 { - return protocol.Range{}, fmt.Errorf("no requires in the file %s", fh.URI()) - } - var start, end modfile.Position - for _, stmt := range pm.File.Syntax.Stmt { - if b, ok := stmt.(*modfile.LineBlock); ok && len(b.Token) == 1 && b.Token[0] == "require" { - start, end = b.Span() - break - } - } - - firstRequire := pm.File.Require[0].Syntax - if start.Byte == 0 || firstRequire.Start.Byte < start.Byte { - start, end = firstRequire.Start, firstRequire.End - } - return pm.Mapper.OffsetRange(start.Byte, end.Byte) -} - -func vulncheckLenses(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.CodeLens, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil || pm.File == nil { - return nil, err - } - // Place the codelenses near the module statement. - // A module may not have the require block, - // but vulnerabilities can exist in standard libraries. - uri := protocol.URIFromSpanURI(fh.URI()) - rng, err := moduleStmtRange(fh, pm) - if err != nil { - return nil, err - } - - vulncheck, err := command.NewRunGovulncheckCommand("Run govulncheck", command.VulncheckArgs{ - URI: uri, - Pattern: "./...", - }) - if err != nil { - return nil, err - } - return []protocol.CodeLens{ - {Range: rng, Command: &vulncheck}, - }, nil -} diff --git a/gopls/internal/lsp/mod/diagnostics.go b/gopls/internal/lsp/mod/diagnostics.go deleted file mode 100644 index 9f901206988..00000000000 --- a/gopls/internal/lsp/mod/diagnostics.go +++ /dev/null @@ -1,559 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package mod provides core features related to go.mod file -// handling for use by Go editors and tools. -package mod - -import ( - "context" - "fmt" - "runtime" - "sort" - "strings" - "sync" - - "golang.org/x/mod/modfile" - "golang.org/x/mod/semver" - "golang.org/x/sync/errgroup" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" - "golang.org/x/tools/internal/event" -) - -// Diagnostics returns diagnostics from parsing the modules in the workspace. -func Diagnostics(ctx context.Context, snapshot source.Snapshot) (map[span.URI][]*source.Diagnostic, error) { - ctx, done := event.Start(ctx, "mod.Diagnostics", source.SnapshotLabels(snapshot)...) - defer done() - - return collectDiagnostics(ctx, snapshot, ModParseDiagnostics) -} - -// Diagnostics returns diagnostics from running go mod tidy. -func TidyDiagnostics(ctx context.Context, snapshot source.Snapshot) (map[span.URI][]*source.Diagnostic, error) { - ctx, done := event.Start(ctx, "mod.Diagnostics", source.SnapshotLabels(snapshot)...) - defer done() - - return collectDiagnostics(ctx, snapshot, ModTidyDiagnostics) -} - -// UpgradeDiagnostics returns upgrade diagnostics for the modules in the -// workspace with known upgrades. -func UpgradeDiagnostics(ctx context.Context, snapshot source.Snapshot) (map[span.URI][]*source.Diagnostic, error) { - ctx, done := event.Start(ctx, "mod.UpgradeDiagnostics", source.SnapshotLabels(snapshot)...) - defer done() - - return collectDiagnostics(ctx, snapshot, ModUpgradeDiagnostics) -} - -// VulnerabilityDiagnostics returns vulnerability diagnostics for the active modules in the -// workspace with known vulnerabilities. -func VulnerabilityDiagnostics(ctx context.Context, snapshot source.Snapshot) (map[span.URI][]*source.Diagnostic, error) { - ctx, done := event.Start(ctx, "mod.VulnerabilityDiagnostics", source.SnapshotLabels(snapshot)...) - defer done() - - return collectDiagnostics(ctx, snapshot, ModVulnerabilityDiagnostics) -} - -func collectDiagnostics(ctx context.Context, snapshot source.Snapshot, diagFn func(context.Context, source.Snapshot, source.FileHandle) ([]*source.Diagnostic, error)) (map[span.URI][]*source.Diagnostic, error) { - g, ctx := errgroup.WithContext(ctx) - cpulimit := runtime.GOMAXPROCS(0) - g.SetLimit(cpulimit) - - var mu sync.Mutex - reports := make(map[span.URI][]*source.Diagnostic) - - for _, uri := range snapshot.ModFiles() { - uri := uri - g.Go(func() error { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - diagnostics, err := diagFn(ctx, snapshot, fh) - if err != nil { - return err - } - for _, d := range diagnostics { - mu.Lock() - reports[d.URI] = append(reports[fh.URI()], d) - mu.Unlock() - } - return nil - }) - } - - if err := g.Wait(); err != nil { - return nil, err - } - return reports, nil -} - -// ModParseDiagnostics reports diagnostics from parsing the mod file. -func ModParseDiagnostics(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) (diagnostics []*source.Diagnostic, err error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - if pm == nil || len(pm.ParseErrors) == 0 { - return nil, err - } - return pm.ParseErrors, nil - } - return nil, nil -} - -// ModTidyDiagnostics reports diagnostics from running go mod tidy. -func ModTidyDiagnostics(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) (diagnostics []*source.Diagnostic, err error) { - pm, err := snapshot.ParseMod(ctx, fh) // memoized - if err != nil { - return nil, nil // errors reported by ModDiagnostics above - } - - tidied, err := snapshot.ModTidy(ctx, pm) - if err != nil && !source.IsNonFatalGoModError(err) { - event.Error(ctx, fmt.Sprintf("tidy: diagnosing %s", pm.URI), err) - } - if err == nil { - for _, d := range tidied.Diagnostics { - if d.URI != fh.URI() { - continue - } - diagnostics = append(diagnostics, d) - } - } - return diagnostics, nil -} - -// ModUpgradeDiagnostics adds upgrade quick fixes for individual modules if the upgrades -// are recorded in the view. -func ModUpgradeDiagnostics(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) (upgradeDiagnostics []*source.Diagnostic, err error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - // Don't return an error if there are parse error diagnostics to be shown, but also do not - // continue since we won't be able to show the upgrade diagnostics. - if pm != nil && len(pm.ParseErrors) != 0 { - return nil, nil - } - return nil, err - } - - upgrades := snapshot.View().ModuleUpgrades(fh.URI()) - for _, req := range pm.File.Require { - ver, ok := upgrades[req.Mod.Path] - if !ok || req.Mod.Version == ver { - continue - } - rng, err := pm.Mapper.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) - if err != nil { - return nil, err - } - // Upgrade to the exact version we offer the user, not the most recent. - title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, ver) - cmd, err := command.NewUpgradeDependencyCommand(title, command.DependencyArgs{ - URI: protocol.URIFromSpanURI(fh.URI()), - AddRequire: false, - GoCmdArgs: []string{req.Mod.Path + "@" + ver}, - }) - if err != nil { - return nil, err - } - upgradeDiagnostics = append(upgradeDiagnostics, &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityInformation, - Source: source.UpgradeNotification, - Message: fmt.Sprintf("%v can be upgraded", req.Mod.Path), - SuggestedFixes: []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, - }) - } - - return upgradeDiagnostics, nil -} - -const upgradeCodeActionPrefix = "Upgrade to " - -// ModVulnerabilityDiagnostics adds diagnostics for vulnerabilities in individual modules -// if the vulnerability is recorded in the view. -func ModVulnerabilityDiagnostics(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) (vulnDiagnostics []*source.Diagnostic, err error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - // Don't return an error if there are parse error diagnostics to be shown, but also do not - // continue since we won't be able to show the vulnerability diagnostics. - if pm != nil && len(pm.ParseErrors) != 0 { - return nil, nil - } - return nil, err - } - - diagSource := source.Govulncheck - vs := snapshot.View().Vulnerabilities(fh.URI())[fh.URI()] - if vs == nil && snapshot.Options().Vulncheck == source.ModeVulncheckImports { - vs, err = snapshot.ModVuln(ctx, fh.URI()) - if err != nil { - return nil, err - } - diagSource = source.Vulncheck - } - if vs == nil || len(vs.Findings) == 0 { - return nil, nil - } - - suggestRunOrResetGovulncheck, err := suggestGovulncheckAction(diagSource == source.Govulncheck, fh.URI()) - if err != nil { - // must not happen - return nil, err // TODO: bug report - } - vulnsByModule := make(map[string][]*govulncheck.Finding) - - for _, finding := range vs.Findings { - if vuln, typ := foundVuln(finding); typ == vulnCalled || typ == vulnImported { - vulnsByModule[vuln.Module] = append(vulnsByModule[vuln.Module], finding) - } - } - for _, req := range pm.File.Require { - mod := req.Mod.Path - findings := vulnsByModule[mod] - if len(findings) == 0 { - continue - } - // note: req.Syntax is the line corresponding to 'require', which means - // req.Syntax.Start can point to the beginning of the "require" keyword - // for a single line require (e.g. "require golang.org/x/mod v0.0.0"). - start := req.Syntax.Start.Byte - if len(req.Syntax.Token) == 3 { - start += len("require ") - } - rng, err := pm.Mapper.OffsetRange(start, req.Syntax.End.Byte) - if err != nil { - return nil, err - } - // Map affecting vulns to 'warning' level diagnostics, - // others to 'info' level diagnostics. - // Fixes will include only the upgrades for warning level diagnostics. - var warningFixes, infoFixes []source.SuggestedFix - var warningSet, infoSet = map[string]bool{}, map[string]bool{} - for _, finding := range findings { - // It is possible that the source code was changed since the last - // govulncheck run and information in the `vulns` info is stale. - // For example, imagine that a user is in the middle of updating - // problematic modules detected by the govulncheck run by applying - // quick fixes. Stale diagnostics can be confusing and prevent the - // user from quickly locating the next module to fix. - // Ideally we should rerun the analysis with the updated module - // dependencies or any other code changes, but we are not yet - // in the position of automatically triggering the analysis - // (govulncheck can take a while). We also don't know exactly what - // part of source code was changed since `vulns` was computed. - // As a heuristic, we assume that a user upgrades the affecting - // module to the version with the fix or the latest one, and if the - // version in the require statement is equal to or higher than the - // fixed version, skip generating a diagnostic about the vulnerability. - // Eventually, the user has to rerun govulncheck. - if finding.FixedVersion != "" && semver.IsValid(req.Mod.Version) && semver.Compare(finding.FixedVersion, req.Mod.Version) <= 0 { - continue - } - switch _, typ := foundVuln(finding); typ { - case vulnImported: - infoSet[finding.OSV] = true - case vulnCalled: - warningSet[finding.OSV] = true - } - // Upgrade to the exact version we offer the user, not the most recent. - if fixedVersion := finding.FixedVersion; semver.IsValid(fixedVersion) && semver.Compare(req.Mod.Version, fixedVersion) < 0 { - cmd, err := getUpgradeCodeAction(fh, req, fixedVersion) - if err != nil { - return nil, err // TODO: bug report - } - sf := source.SuggestedFixFromCommand(cmd, protocol.QuickFix) - switch _, typ := foundVuln(finding); typ { - case vulnImported: - infoFixes = append(infoFixes, sf) - case vulnCalled: - warningFixes = append(warningFixes, sf) - } - } - } - - if len(warningSet) == 0 && len(infoSet) == 0 { - continue - } - // Remove affecting osvs from the non-affecting osv list if any. - if len(warningSet) > 0 { - for k := range infoSet { - if warningSet[k] { - delete(infoSet, k) - } - } - } - // Add an upgrade for module@latest. - // TODO(suzmue): verify if latest is the same as fixedVersion. - latest, err := getUpgradeCodeAction(fh, req, "latest") - if err != nil { - return nil, err // TODO: bug report - } - sf := source.SuggestedFixFromCommand(latest, protocol.QuickFix) - if len(warningFixes) > 0 { - warningFixes = append(warningFixes, sf) - } - if len(infoFixes) > 0 { - infoFixes = append(infoFixes, sf) - } - if len(warningSet) > 0 { - warning := sortedKeys(warningSet) - warningFixes = append(warningFixes, suggestRunOrResetGovulncheck) - vulnDiagnostics = append(vulnDiagnostics, &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityWarning, - Source: diagSource, - Message: getVulnMessage(req.Mod.Path, warning, true, diagSource == source.Govulncheck), - SuggestedFixes: warningFixes, - }) - } - if len(infoSet) > 0 { - info := sortedKeys(infoSet) - infoFixes = append(infoFixes, suggestRunOrResetGovulncheck) - vulnDiagnostics = append(vulnDiagnostics, &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityInformation, - Source: diagSource, - Message: getVulnMessage(req.Mod.Path, info, false, diagSource == source.Govulncheck), - SuggestedFixes: infoFixes, - }) - } - } - - // TODO(hyangah): place this diagnostic on the `go` directive or `toolchain` directive - // after https://go.dev/issue/57001. - const diagnoseStdLib = false - - // If diagnosing the stdlib, add standard library vulnerability diagnostics - // on the module declaration. - // - // Only proceed if we have a valid module declaration on which to position - // the diagnostics. - if diagnoseStdLib && pm.File.Module != nil && pm.File.Module.Syntax != nil { - // Add standard library vulnerabilities. - stdlibVulns := vulnsByModule["stdlib"] - if len(stdlibVulns) == 0 { - return vulnDiagnostics, nil - } - - // Put the standard library diagnostic on the module declaration. - rng, err := pm.Mapper.OffsetRange(pm.File.Module.Syntax.Start.Byte, pm.File.Module.Syntax.End.Byte) - if err != nil { - return vulnDiagnostics, nil // TODO: bug report - } - - var warningSet, infoSet = map[string]bool{}, map[string]bool{} - for _, finding := range stdlibVulns { - switch _, typ := foundVuln(finding); typ { - case vulnImported: - infoSet[finding.OSV] = true - case vulnCalled: - warningSet[finding.OSV] = true - } - } - if len(warningSet) > 0 { - warning := sortedKeys(warningSet) - fixes := []source.SuggestedFix{suggestRunOrResetGovulncheck} - vulnDiagnostics = append(vulnDiagnostics, &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityWarning, - Source: diagSource, - Message: getVulnMessage("go", warning, true, diagSource == source.Govulncheck), - SuggestedFixes: fixes, - }) - - // remove affecting osvs from the non-affecting osv list if any. - for k := range infoSet { - if warningSet[k] { - delete(infoSet, k) - } - } - } - if len(infoSet) > 0 { - info := sortedKeys(infoSet) - fixes := []source.SuggestedFix{suggestRunOrResetGovulncheck} - vulnDiagnostics = append(vulnDiagnostics, &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityInformation, - Source: diagSource, - Message: getVulnMessage("go", info, false, diagSource == source.Govulncheck), - SuggestedFixes: fixes, - }) - } - } - - return vulnDiagnostics, nil -} - -type vulnFindingType int - -const ( - vulnUnknown vulnFindingType = iota - vulnCalled - vulnImported - vulnRequired -) - -// foundVuln returns the frame info describing discovered vulnerable symbol/package/module -// and how this vulnerability affects the analyzed package or module. -func foundVuln(finding *govulncheck.Finding) (*govulncheck.Frame, vulnFindingType) { - // finding.Trace is sorted from the imported vulnerable symbol to - // the entry point in the callstack. - // If Function is set, then Package must be set. Module will always be set. - // If Function is set it was found in the call graph, otherwise if Package is set - // it was found in the import graph, otherwise it was found in the require graph. - // See the documentation of govulncheck.Finding. - if len(finding.Trace) == 0 { // this shouldn't happen, but just in case... - return nil, vulnUnknown - } - vuln := finding.Trace[0] - if vuln.Package == "" { - return vuln, vulnRequired - } - if vuln.Function == "" { - return vuln, vulnImported - } - return vuln, vulnCalled -} - -func sortedKeys(m map[string]bool) []string { - ret := make([]string, 0, len(m)) - for k := range m { - ret = append(ret, k) - } - sort.Strings(ret) - return ret -} - -// suggestGovulncheckAction returns a code action that suggests either run govulncheck -// for more accurate investigation (if the present vulncheck diagnostics are based on -// analysis less accurate than govulncheck) or reset the existing govulncheck result -// (if the present vulncheck diagnostics are already based on govulncheck run). -func suggestGovulncheckAction(fromGovulncheck bool, uri span.URI) (source.SuggestedFix, error) { - if fromGovulncheck { - resetVulncheck, err := command.NewResetGoModDiagnosticsCommand("Reset govulncheck result", command.ResetGoModDiagnosticsArgs{ - URIArg: command.URIArg{URI: protocol.DocumentURI(uri)}, - DiagnosticSource: string(source.Govulncheck), - }) - if err != nil { - return source.SuggestedFix{}, err - } - return source.SuggestedFixFromCommand(resetVulncheck, protocol.QuickFix), nil - } - vulncheck, err := command.NewRunGovulncheckCommand("Run govulncheck to verify", command.VulncheckArgs{ - URI: protocol.DocumentURI(uri), - Pattern: "./...", - }) - if err != nil { - return source.SuggestedFix{}, err - } - return source.SuggestedFixFromCommand(vulncheck, protocol.QuickFix), nil -} - -func getVulnMessage(mod string, vulns []string, used, fromGovulncheck bool) string { - var b strings.Builder - if used { - switch len(vulns) { - case 1: - fmt.Fprintf(&b, "%v has a vulnerability used in the code: %v.", mod, vulns[0]) - default: - fmt.Fprintf(&b, "%v has vulnerabilities used in the code: %v.", mod, strings.Join(vulns, ", ")) - } - } else { - if fromGovulncheck { - switch len(vulns) { - case 1: - fmt.Fprintf(&b, "%v has a vulnerability %v that is not used in the code.", mod, vulns[0]) - default: - fmt.Fprintf(&b, "%v has known vulnerabilities %v that are not used in the code.", mod, strings.Join(vulns, ", ")) - } - } else { - switch len(vulns) { - case 1: - fmt.Fprintf(&b, "%v has a vulnerability %v.", mod, vulns[0]) - default: - fmt.Fprintf(&b, "%v has known vulnerabilities %v.", mod, strings.Join(vulns, ", ")) - } - } - } - return b.String() -} - -// href returns the url for the vulnerability information. -// Eventually we should retrieve the url embedded in the osv.Entry. -// While vuln.go.dev is under development, this always returns -// the page in pkg.go.dev. -func href(vulnID string) string { - return fmt.Sprintf("https://pkg.go.dev/vuln/%s", vulnID) -} - -func getUpgradeCodeAction(fh source.FileHandle, req *modfile.Require, version string) (protocol.Command, error) { - cmd, err := command.NewUpgradeDependencyCommand(upgradeTitle(version), command.DependencyArgs{ - URI: protocol.URIFromSpanURI(fh.URI()), - AddRequire: false, - GoCmdArgs: []string{req.Mod.Path + "@" + version}, - }) - if err != nil { - return protocol.Command{}, err - } - return cmd, nil -} - -func upgradeTitle(fixedVersion string) string { - title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, fixedVersion) - return title -} - -// SelectUpgradeCodeActions takes a list of code actions for a required module -// and returns a more selective list of upgrade code actions, -// where the code actions have been deduped. Code actions unrelated to upgrade -// are deduplicated by the name. -func SelectUpgradeCodeActions(actions []protocol.CodeAction) []protocol.CodeAction { - if len(actions) <= 1 { - return actions // return early if no sorting necessary - } - var versionedUpgrade, latestUpgrade, resetAction protocol.CodeAction - var chosenVersionedUpgrade string - var selected []protocol.CodeAction - - seenTitles := make(map[string]bool) - - for _, action := range actions { - if strings.HasPrefix(action.Title, upgradeCodeActionPrefix) { - if v := getUpgradeVersion(action); v == "latest" && latestUpgrade.Title == "" { - latestUpgrade = action - } else if versionedUpgrade.Title == "" || semver.Compare(v, chosenVersionedUpgrade) > 0 { - chosenVersionedUpgrade = v - versionedUpgrade = action - } - } else if strings.HasPrefix(action.Title, "Reset govulncheck") { - resetAction = action - } else if !seenTitles[action.Command.Title] { - seenTitles[action.Command.Title] = true - selected = append(selected, action) - } - } - if versionedUpgrade.Title != "" { - selected = append(selected, versionedUpgrade) - } - if latestUpgrade.Title != "" { - selected = append(selected, latestUpgrade) - } - if resetAction.Title != "" { - selected = append(selected, resetAction) - } - return selected -} - -func getUpgradeVersion(p protocol.CodeAction) string { - return strings.TrimPrefix(p.Title, upgradeCodeActionPrefix) -} diff --git a/gopls/internal/lsp/mod/format.go b/gopls/internal/lsp/mod/format.go deleted file mode 100644 index daa12dac9a4..00000000000 --- a/gopls/internal/lsp/mod/format.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mod - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" -) - -func Format(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.TextEdit, error) { - ctx, done := event.Start(ctx, "mod.Format") - defer done() - - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - return nil, err - } - formatted, err := pm.File.Format() - if err != nil { - return nil, err - } - // Calculate the edits to be made due to the change. - diffs := snapshot.Options().ComputeEdits(string(pm.Mapper.Content), string(formatted)) - return source.ToProtocolEdits(pm.Mapper, diffs) -} diff --git a/gopls/internal/lsp/mod/hover.go b/gopls/internal/lsp/mod/hover.go deleted file mode 100644 index b39993b2924..00000000000 --- a/gopls/internal/lsp/mod/hover.go +++ /dev/null @@ -1,378 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mod - -import ( - "bytes" - "context" - "fmt" - "sort" - "strings" - - "golang.org/x/mod/modfile" - "golang.org/x/mod/semver" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/vulncheck" - "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" - "golang.org/x/tools/gopls/internal/vulncheck/osv" - "golang.org/x/tools/internal/event" -) - -func Hover(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) (*protocol.Hover, error) { - var found bool - for _, uri := range snapshot.ModFiles() { - if fh.URI() == uri { - found = true - break - } - } - - // We only provide hover information for the view's go.mod files. - if !found { - return nil, nil - } - - ctx, done := event.Start(ctx, "mod.Hover") - defer done() - - // Get the position of the cursor. - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - return nil, fmt.Errorf("getting modfile handle: %w", err) - } - offset, err := pm.Mapper.PositionOffset(position) - if err != nil { - return nil, fmt.Errorf("computing cursor position: %w", err) - } - - // If the cursor position is on a module statement - if hover, ok := hoverOnModuleStatement(ctx, pm, offset, snapshot, fh); ok { - return hover, nil - } - return hoverOnRequireStatement(ctx, pm, offset, snapshot, fh) -} - -func hoverOnRequireStatement(ctx context.Context, pm *source.ParsedModule, offset int, snapshot source.Snapshot, fh source.FileHandle) (*protocol.Hover, error) { - // Confirm that the cursor is at the position of a require statement. - var req *modfile.Require - var startOffset, endOffset int - for _, r := range pm.File.Require { - dep := []byte(r.Mod.Path) - s, e := r.Syntax.Start.Byte, r.Syntax.End.Byte - i := bytes.Index(pm.Mapper.Content[s:e], dep) - if i == -1 { - continue - } - // Shift the start position to the location of the - // dependency within the require statement. - startOffset, endOffset = s+i, e - if startOffset <= offset && offset <= endOffset { - req = r - break - } - } - // TODO(hyangah): find position for info about vulnerabilities in Go - - // The cursor position is not on a require statement. - if req == nil { - return nil, nil - } - - // Get the vulnerability info. - fromGovulncheck := true - vs := snapshot.View().Vulnerabilities(fh.URI())[fh.URI()] - if vs == nil && snapshot.Options().Vulncheck == source.ModeVulncheckImports { - var err error - vs, err = snapshot.ModVuln(ctx, fh.URI()) - if err != nil { - return nil, err - } - fromGovulncheck = false - } - affecting, nonaffecting, osvs := lookupVulns(vs, req.Mod.Path, req.Mod.Version) - - // Get the `go mod why` results for the given file. - why, err := snapshot.ModWhy(ctx, fh) - if err != nil { - return nil, err - } - explanation, ok := why[req.Mod.Path] - if !ok { - return nil, nil - } - - // Get the range to highlight for the hover. - // TODO(hyangah): adjust the hover range to include the version number - // to match the diagnostics' range. - rng, err := pm.Mapper.OffsetRange(startOffset, endOffset) - if err != nil { - return nil, err - } - options := snapshot.Options() - isPrivate := snapshot.View().IsGoPrivatePath(req.Mod.Path) - header := formatHeader(req.Mod.Path, options) - explanation = formatExplanation(explanation, req, options, isPrivate) - vulns := formatVulnerabilities(affecting, nonaffecting, osvs, options, fromGovulncheck) - - return &protocol.Hover{ - Contents: protocol.MarkupContent{ - Kind: options.PreferredContentFormat, - Value: header + vulns + explanation, - }, - Range: rng, - }, nil -} - -func hoverOnModuleStatement(ctx context.Context, pm *source.ParsedModule, offset int, snapshot source.Snapshot, fh source.FileHandle) (*protocol.Hover, bool) { - module := pm.File.Module - if module == nil { - return nil, false // no module stmt - } - if offset < module.Syntax.Start.Byte || offset > module.Syntax.End.Byte { - return nil, false // cursor not in module stmt - } - - rng, err := pm.Mapper.OffsetRange(module.Syntax.Start.Byte, module.Syntax.End.Byte) - if err != nil { - return nil, false - } - fromGovulncheck := true - vs := snapshot.View().Vulnerabilities(fh.URI())[fh.URI()] - - if vs == nil && snapshot.Options().Vulncheck == source.ModeVulncheckImports { - vs, err = snapshot.ModVuln(ctx, fh.URI()) - if err != nil { - return nil, false - } - fromGovulncheck = false - } - modpath := "stdlib" - goVersion := snapshot.View().GoVersionString() - affecting, nonaffecting, osvs := lookupVulns(vs, modpath, goVersion) - options := snapshot.Options() - vulns := formatVulnerabilities(affecting, nonaffecting, osvs, options, fromGovulncheck) - - return &protocol.Hover{ - Contents: protocol.MarkupContent{ - Kind: options.PreferredContentFormat, - Value: vulns, - }, - Range: rng, - }, true -} - -func formatHeader(modpath string, options *source.Options) string { - var b strings.Builder - // Write the heading as an H3. - b.WriteString("#### " + modpath) - if options.PreferredContentFormat == protocol.Markdown { - b.WriteString("\n\n") - } else { - b.WriteRune('\n') - } - return b.String() -} - -func lookupVulns(vulns *vulncheck.Result, modpath, version string) (affecting, nonaffecting []*govulncheck.Finding, osvs map[string]*osv.Entry) { - if vulns == nil || len(vulns.Entries) == 0 { - return nil, nil, nil - } - for _, finding := range vulns.Findings { - vuln, typ := foundVuln(finding) - if vuln.Module != modpath { - continue - } - // It is possible that the source code was changed since the last - // govulncheck run and information in the `vulns` info is stale. - // For example, imagine that a user is in the middle of updating - // problematic modules detected by the govulncheck run by applying - // quick fixes. Stale diagnostics can be confusing and prevent the - // user from quickly locating the next module to fix. - // Ideally we should rerun the analysis with the updated module - // dependencies or any other code changes, but we are not yet - // in the position of automatically triggering the analysis - // (govulncheck can take a while). We also don't know exactly what - // part of source code was changed since `vulns` was computed. - // As a heuristic, we assume that a user upgrades the affecting - // module to the version with the fix or the latest one, and if the - // version in the require statement is equal to or higher than the - // fixed version, skip the vulnerability information in the hover. - // Eventually, the user has to rerun govulncheck. - if finding.FixedVersion != "" && semver.IsValid(version) && semver.Compare(finding.FixedVersion, version) <= 0 { - continue - } - switch typ { - case vulnCalled: - affecting = append(affecting, finding) - case vulnImported: - nonaffecting = append(nonaffecting, finding) - } - } - - // Remove affecting elements from nonaffecting. - // An OSV entry can appear in both lists if an OSV entry covers - // multiple packages imported but not all vulnerable symbols are used. - // The current wording of hover message doesn't clearly - // present this case well IMO, so let's skip reporting nonaffecting. - if len(affecting) > 0 && len(nonaffecting) > 0 { - affectingSet := map[string]bool{} - for _, f := range affecting { - affectingSet[f.OSV] = true - } - n := 0 - for _, v := range nonaffecting { - if !affectingSet[v.OSV] { - nonaffecting[n] = v - n++ - } - } - nonaffecting = nonaffecting[:n] - } - sort.Slice(nonaffecting, func(i, j int) bool { return nonaffecting[i].OSV < nonaffecting[j].OSV }) - sort.Slice(affecting, func(i, j int) bool { return affecting[i].OSV < affecting[j].OSV }) - return affecting, nonaffecting, vulns.Entries -} - -func fixedVersion(fixed string) string { - if fixed == "" { - return "No fix is available." - } - return "Fixed in " + fixed + "." -} - -func formatVulnerabilities(affecting, nonaffecting []*govulncheck.Finding, osvs map[string]*osv.Entry, options *source.Options, fromGovulncheck bool) string { - if len(osvs) == 0 || (len(affecting) == 0 && len(nonaffecting) == 0) { - return "" - } - byOSV := func(findings []*govulncheck.Finding) map[string][]*govulncheck.Finding { - m := make(map[string][]*govulncheck.Finding) - for _, f := range findings { - m[f.OSV] = append(m[f.OSV], f) - } - return m - } - affectingByOSV := byOSV(affecting) - nonaffectingByOSV := byOSV(nonaffecting) - - // TODO(hyangah): can we use go templates to generate hover messages? - // Then, we can use a different template for markdown case. - useMarkdown := options.PreferredContentFormat == protocol.Markdown - - var b strings.Builder - - if len(affectingByOSV) > 0 { - // TODO(hyangah): make the message more eyecatching (icon/codicon/color) - if len(affectingByOSV) == 1 { - fmt.Fprintf(&b, "\n**WARNING:** Found %d reachable vulnerability.\n", len(affectingByOSV)) - } else { - fmt.Fprintf(&b, "\n**WARNING:** Found %d reachable vulnerabilities.\n", len(affectingByOSV)) - } - } - for id, findings := range affectingByOSV { - fix := fixedVersion(findings[0].FixedVersion) - pkgs := vulnerablePkgsInfo(findings, useMarkdown) - osvEntry := osvs[id] - - if useMarkdown { - fmt.Fprintf(&b, "- [**%v**](%v) %v%v\n%v\n", id, href(id), osvEntry.Summary, pkgs, fix) - } else { - fmt.Fprintf(&b, " - [%v] %v (%v) %v%v\n", id, osvEntry.Summary, href(id), pkgs, fix) - } - } - if len(nonaffecting) > 0 { - if fromGovulncheck { - fmt.Fprintf(&b, "\n**Note:** The project imports packages with known vulnerabilities, but does not call the vulnerable code.\n") - } else { - fmt.Fprintf(&b, "\n**Note:** The project imports packages with known vulnerabilities. Use `govulncheck` to check if the project uses vulnerable symbols.\n") - } - } - for k, findings := range nonaffectingByOSV { - fix := fixedVersion(findings[0].FixedVersion) - pkgs := vulnerablePkgsInfo(findings, useMarkdown) - osvEntry := osvs[k] - - if useMarkdown { - fmt.Fprintf(&b, "- [%v](%v) %v%v\n%v\n", k, href(k), osvEntry.Summary, pkgs, fix) - } else { - fmt.Fprintf(&b, " - [%v] %v (%v) %v\n%v\n", k, osvEntry.Summary, href(k), pkgs, fix) - } - } - b.WriteString("\n") - return b.String() -} - -func vulnerablePkgsInfo(findings []*govulncheck.Finding, useMarkdown bool) string { - var b strings.Builder - seen := map[string]bool{} - for _, f := range findings { - p := f.Trace[0].Package - if !seen[p] { - seen[p] = true - if useMarkdown { - b.WriteString("\n * `") - } else { - b.WriteString("\n ") - } - b.WriteString(p) - if useMarkdown { - b.WriteString("`") - } - } - } - return b.String() -} - -func formatExplanation(text string, req *modfile.Require, options *source.Options, isPrivate bool) string { - text = strings.TrimSuffix(text, "\n") - splt := strings.Split(text, "\n") - length := len(splt) - - var b strings.Builder - - // If the explanation is 2 lines, then it is of the form: - // # golang.org/x/text/encoding - // (main module does not need package golang.org/x/text/encoding) - if length == 2 { - b.WriteString(splt[1]) - return b.String() - } - - imp := splt[length-1] // import path - reference := imp - // See golang/go#36998: don't link to modules matching GOPRIVATE. - if !isPrivate && options.PreferredContentFormat == protocol.Markdown { - target := imp - if strings.ToLower(options.LinkTarget) == "pkg.go.dev" { - target = strings.Replace(target, req.Mod.Path, req.Mod.String(), 1) - } - reference = fmt.Sprintf("[%s](%s)", imp, source.BuildLink(options.LinkTarget, target, "")) - } - b.WriteString("This module is necessary because " + reference + " is imported in") - - // If the explanation is 3 lines, then it is of the form: - // # golang.org/x/tools - // modtest - // golang.org/x/tools/go/packages - if length == 3 { - msg := fmt.Sprintf(" `%s`.", splt[1]) - b.WriteString(msg) - return b.String() - } - - // If the explanation is more than 3 lines, then it is of the form: - // # golang.org/x/text/language - // rsc.io/quote - // rsc.io/sampler - // golang.org/x/text/language - b.WriteString(":\n```text") - dash := "" - for _, imp := range splt[1 : length-1] { - dash += "-" - b.WriteString("\n" + dash + " " + imp) - } - b.WriteString("\n```") - return b.String() -} diff --git a/gopls/internal/lsp/prompt_test.go b/gopls/internal/lsp/prompt_test.go deleted file mode 100644 index d268d1f3a0c..00000000000 --- a/gopls/internal/lsp/prompt_test.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "path/filepath" - "sync" - "sync/atomic" - "testing" -) - -func TestAcquireFileLock(t *testing.T) { - name := filepath.Join(t.TempDir(), "config.json") - - const concurrency = 100 - var acquired int32 - var releasers [concurrency]func() - defer func() { - for _, r := range releasers { - if r != nil { - r() - } - } - }() - - var wg sync.WaitGroup - for i := range releasers { - i := i - wg.Add(1) - go func() { - defer wg.Done() - - release, ok, err := acquireLockFile(name) - if err != nil { - t.Errorf("Acquire failed: %v", err) - return - } - if ok { - atomic.AddInt32(&acquired, 1) - releasers[i] = release - } - }() - } - - wg.Wait() - - if acquired != 1 { - t.Errorf("Acquire succeeded %d times, expected exactly 1", acquired) - } -} - -func TestReleaseAndAcquireFileLock(t *testing.T) { - name := filepath.Join(t.TempDir(), "config.json") - - acquire := func() (func(), bool) { - t.Helper() - release, ok, err := acquireLockFile(name) - if err != nil { - t.Fatal(err) - } - return release, ok - } - - release, ok := acquire() - if !ok { - t.Fatal("failed to Acquire") - } - if release2, ok := acquire(); ok { - release() - release2() - t.Fatalf("Acquire succeeded unexpectedly") - } - - release() - release3, ok := acquire() - release3() - if !ok { - t.Fatalf("failed to Acquire") - } -} diff --git a/gopls/internal/lsp/protocol/context.go b/gopls/internal/lsp/protocol/context.go deleted file mode 100644 index a9ef48d0f0b..00000000000 --- a/gopls/internal/lsp/protocol/context.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package protocol - -import ( - "bytes" - "context" - - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/core" - "golang.org/x/tools/internal/event/export" - "golang.org/x/tools/internal/event/label" - "golang.org/x/tools/internal/xcontext" -) - -type contextKey int - -const ( - clientKey = contextKey(iota) -) - -func WithClient(ctx context.Context, client Client) context.Context { - return context.WithValue(ctx, clientKey, client) -} - -func LogEvent(ctx context.Context, ev core.Event, lm label.Map, mt MessageType) context.Context { - client, ok := ctx.Value(clientKey).(Client) - if !ok { - return ctx - } - buf := &bytes.Buffer{} - p := export.Printer{} - p.WriteEvent(buf, ev, lm) - msg := &LogMessageParams{Type: mt, Message: buf.String()} - // Handle messages generated via event.Error, which won't have a level Label. - if event.IsError(ev) { - msg.Type = Error - } - // TODO(adonovan): the goroutine here could cause log - // messages to be delivered out of order! Use a queue. - go client.LogMessage(xcontext.Detach(ctx), msg) - return ctx -} diff --git a/gopls/internal/lsp/protocol/generate/generate.go b/gopls/internal/lsp/protocol/generate/generate.go deleted file mode 100644 index 0496b7d060c..00000000000 --- a/gopls/internal/lsp/protocol/generate/generate.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package main - -import ( - "bytes" - "fmt" - "log" - "strings" -) - -// a newType is a type that needs a name and a definition -// These are the various types that the json specification doesn't name -type newType struct { - name string - properties Properties // for struct/literal types - items []*Type // for other types ("and", "tuple") - line int - kind string // Or, And, Tuple, Lit, Map - typ *Type -} - -func generateDoc(out *bytes.Buffer, doc string) { - if doc == "" { - return - } - - if !strings.Contains(doc, "\n") { - fmt.Fprintf(out, "// %s\n", doc) - return - } - var list bool - for _, line := range strings.Split(doc, "\n") { - // Lists in metaModel.json start with a dash. - // To make a go doc list they have to be preceded - // by a blank line, and indented. - // (see type TextDccumentFilter in protocol.go) - if len(line) > 0 && line[0] == '-' { - if !list { - list = true - fmt.Fprintf(out, "//\n") - } - fmt.Fprintf(out, "// %s\n", line) - } else { - if len(line) == 0 { - list = false - } - fmt.Fprintf(out, "// %s\n", line) - } - } -} - -// decide if a property is optional, and if it needs a * -// return ",omitempty" if it is optional, and "*" if it needs a pointer -func propStar(name string, t NameType, gotype string) (string, string) { - var opt, star string - if t.Optional { - star = "*" - opt = ",omitempty" - } - if strings.HasPrefix(gotype, "[]") || strings.HasPrefix(gotype, "map[") { - star = "" // passed by reference, so no need for * - } else { - switch gotype { - case "bool", "uint32", "int32", "string", "interface{}": - star = "" // gopls compatibility if t.Optional - } - } - ostar, oopt := star, opt - if newStar, ok := goplsStar[prop{name, t.Name}]; ok { - switch newStar { - case nothing: - star, opt = "", "" - case wantStar: - star, opt = "*", "" - case wantOpt: - star, opt = "", ",omitempty" - case wantOptStar: - star, opt = "*", ",omitempty" - } - if star == ostar && opt == oopt { // no change - log.Printf("goplsStar[ {%q, %q} ](%d) useless %s/%s %s/%s", name, t.Name, t.Line, ostar, star, oopt, opt) - } - usedGoplsStar[prop{name, t.Name}] = true - } - - return opt, star -} - -func goName(s string) string { - // Go naming conventions - if strings.HasSuffix(s, "Id") { - s = s[:len(s)-len("Id")] + "ID" - } else if strings.HasSuffix(s, "Uri") { - s = s[:len(s)-3] + "URI" - } else if s == "uri" { - s = "URI" - } else if s == "id" { - s = "ID" - } - - // renames for temporary GOPLS compatibility - if news := goplsType[s]; news != "" { - usedGoplsType[s] = true - s = news - } - // Names beginning _ are not exported - if strings.HasPrefix(s, "_") { - s = strings.Replace(s, "_", "X", 1) - } - if s != "string" { // base types are unchanged (textDocuemnt/diagnostic) - // Title is deprecated, but a) s is only one word, b) replacement is too heavy-weight - s = strings.Title(s) - } - return s -} diff --git a/gopls/internal/lsp/protocol/generate/main.go b/gopls/internal/lsp/protocol/generate/main.go deleted file mode 100644 index 6ac5813e6df..00000000000 --- a/gopls/internal/lsp/protocol/generate/main.go +++ /dev/null @@ -1,390 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -// The generate command generates Go declarations from VSCode's -// description of the Language Server Protocol. -// -// To run it, type 'go generate' in the parent (protocol) directory. -package main - -// see https://github.com/golang/go/issues/61217 for discussion of an issue - -import ( - "bytes" - "encoding/json" - "flag" - "fmt" - "go/format" - "log" - "os" - "os/exec" - "path/filepath" - "strings" -) - -const vscodeRepo = "https://github.com/microsoft/vscode-languageserver-node" - -// lspGitRef names a branch or tag in vscodeRepo. -// It implicitly determines the protocol version of the LSP used by gopls. -// For example, tag release/protocol/3.17.3 of the repo defines protocol version 3.17.0. -// (Point releases are reflected in the git tag version even when they are cosmetic -// and don't change the protocol.) -var lspGitRef = "release/protocol/3.17.4-next.2" - -var ( - repodir = flag.String("d", "", "directory containing clone of "+vscodeRepo) - outputdir = flag.String("o", ".", "output directory") - // PJW: not for real code - cmpdir = flag.String("c", "", "directory of earlier code") - doboth = flag.String("b", "", "generate and compare") - lineNumbers = flag.Bool("l", false, "add line numbers to generated output") -) - -func main() { - log.SetFlags(log.Lshortfile) // log file name and line number, not time - flag.Parse() - - processinline() -} - -func processinline() { - // A local repository may be specified during debugging. - // The default behavior is to download the canonical version. - if *repodir == "" { - tmpdir, err := os.MkdirTemp("", "") - if err != nil { - log.Fatal(err) - } - defer os.RemoveAll(tmpdir) // ignore error - - // Clone the repository. - cmd := exec.Command("git", "clone", "--quiet", "--depth=1", "-c", "advice.detachedHead=false", vscodeRepo, "--branch="+lspGitRef, "--single-branch", tmpdir) - cmd.Stdout = os.Stderr - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - log.Fatal(err) - } - - *repodir = tmpdir - } else { - lspGitRef = fmt.Sprintf("(not git, local dir %s)", *repodir) - } - - model := parse(filepath.Join(*repodir, "protocol/metaModel.json")) - - findTypeNames(model) - generateOutput(model) - - fileHdr = fileHeader(model) - - // write the files - writeclient() - writeserver() - writeprotocol() - writejsons() - - checkTables() -} - -// common file header for output files -var fileHdr string - -func writeclient() { - out := new(bytes.Buffer) - fmt.Fprintln(out, fileHdr) - out.WriteString( - `import ( - "context" - "encoding/json" - - "golang.org/x/tools/internal/jsonrpc2" -) -`) - out.WriteString("type Client interface {\n") - for _, k := range cdecls.keys() { - out.WriteString(cdecls[k]) - } - out.WriteString("}\n\n") - out.WriteString("func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) {\n") - out.WriteString("\tswitch r.Method() {\n") - for _, k := range ccases.keys() { - out.WriteString(ccases[k]) - } - out.WriteString(("\tdefault:\n\t\treturn false, nil\n\t}\n}\n\n")) - for _, k := range cfuncs.keys() { - out.WriteString(cfuncs[k]) - } - - x, err := format.Source(out.Bytes()) - if err != nil { - os.WriteFile("/tmp/a.go", out.Bytes(), 0644) - log.Fatalf("tsclient.go: %v", err) - } - - if err := os.WriteFile(filepath.Join(*outputdir, "tsclient.go"), x, 0644); err != nil { - log.Fatalf("%v writing tsclient.go", err) - } -} - -func writeserver() { - out := new(bytes.Buffer) - fmt.Fprintln(out, fileHdr) - out.WriteString( - `import ( - "context" - "encoding/json" - - "golang.org/x/tools/internal/jsonrpc2" -) -`) - out.WriteString("type Server interface {\n") - for _, k := range sdecls.keys() { - out.WriteString(sdecls[k]) - } - out.WriteString(` NonstandardRequest(ctx context.Context, method string, params interface{}) (interface{}, error) -} - -func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { - switch r.Method() { -`) - for _, k := range scases.keys() { - out.WriteString(scases[k]) - } - out.WriteString(("\tdefault:\n\t\treturn false, nil\n\t}\n}\n\n")) - for _, k := range sfuncs.keys() { - out.WriteString(sfuncs[k]) - } - out.WriteString(`func (s *serverDispatcher) NonstandardRequest(ctx context.Context, method string, params interface{}) (interface{}, error) { - var result interface{} - if err := s.sender.Call(ctx, method, params, &result); err != nil { - return nil, err - } - return result, nil -} -`) - - x, err := format.Source(out.Bytes()) - if err != nil { - os.WriteFile("/tmp/a.go", out.Bytes(), 0644) - log.Fatalf("tsserver.go: %v", err) - } - - if err := os.WriteFile(filepath.Join(*outputdir, "tsserver.go"), x, 0644); err != nil { - log.Fatalf("%v writing tsserver.go", err) - } -} - -func writeprotocol() { - out := new(bytes.Buffer) - fmt.Fprintln(out, fileHdr) - out.WriteString("import \"encoding/json\"\n\n") - - // The followiing are unneeded, but make the new code a superset of the old - hack := func(newer, existing string) { - if _, ok := types[existing]; !ok { - log.Fatalf("types[%q] not found", existing) - } - types[newer] = strings.Replace(types[existing], existing, newer, 1) - } - hack("ConfigurationParams", "ParamConfiguration") - hack("InitializeParams", "ParamInitialize") - hack("PreviousResultId", "PreviousResultID") - hack("WorkspaceFoldersServerCapabilities", "WorkspaceFolders5Gn") - hack("_InitializeParams", "XInitializeParams") - // and some aliases to make the new code contain the old - types["PrepareRename2Gn"] = "type PrepareRename2Gn = Msg_PrepareRename2Gn // (alias) line 13927\n" - types["PrepareRenameResult"] = "type PrepareRenameResult = Msg_PrepareRename2Gn // (alias) line 13927\n" - for _, k := range types.keys() { - if k == "WatchKind" { - types[k] = "type WatchKind = uint32 // line 13505" // strict gopls compatibility needs the '=' - } - out.WriteString(types[k]) - } - - out.WriteString("\nconst (\n") - for _, k := range consts.keys() { - out.WriteString(consts[k]) - } - out.WriteString(")\n\n") - x, err := format.Source(out.Bytes()) - if err != nil { - os.WriteFile("/tmp/a.go", out.Bytes(), 0644) - log.Fatalf("tsprotocol.go: %v", err) - } - if err := os.WriteFile(filepath.Join(*outputdir, "tsprotocol.go"), x, 0644); err != nil { - log.Fatalf("%v writing tsprotocol.go", err) - } -} - -func writejsons() { - out := new(bytes.Buffer) - fmt.Fprintln(out, fileHdr) - out.WriteString("import \"encoding/json\"\n\n") - out.WriteString("import \"fmt\"\n") - - out.WriteString(` -// UnmarshalError indicates that a JSON value did not conform to -// one of the expected cases of an LSP union type. -type UnmarshalError struct { - msg string -} - -func (e UnmarshalError) Error() string { - return e.msg -} -`) - - for _, k := range jsons.keys() { - out.WriteString(jsons[k]) - } - x, err := format.Source(out.Bytes()) - if err != nil { - os.WriteFile("/tmp/a.go", out.Bytes(), 0644) - log.Fatalf("tsjson.go: %v", err) - } - if err := os.WriteFile(filepath.Join(*outputdir, "tsjson.go"), x, 0644); err != nil { - log.Fatalf("%v writing tsjson.go", err) - } -} - -// create the common file header for the output files -func fileHeader(model Model) string { - fname := filepath.Join(*repodir, ".git", "HEAD") - buf, err := os.ReadFile(fname) - if err != nil { - log.Fatal(err) - } - buf = bytes.TrimSpace(buf) - var githash string - if len(buf) == 40 { - githash = string(buf[:40]) - } else if bytes.HasPrefix(buf, []byte("ref: ")) { - fname = filepath.Join(*repodir, ".git", string(buf[5:])) - buf, err = os.ReadFile(fname) - if err != nil { - log.Fatal(err) - } - githash = string(buf[:40]) - } else { - log.Fatalf("githash cannot be recovered from %s", fname) - } - - format := `// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated for LSP. DO NOT EDIT. - -package protocol - -// Code generated from %[1]s at ref %[2]s (hash %[3]s). -// %[4]s/blob/%[2]s/%[1]s -// LSP metaData.version = %[5]s. - -` - return fmt.Sprintf(format, - "protocol/metaModel.json", // 1 - lspGitRef, // 2 - githash, // 3 - vscodeRepo, // 4 - model.Version.Version) // 5 -} - -func parse(fname string) Model { - buf, err := os.ReadFile(fname) - if err != nil { - log.Fatal(err) - } - buf = addLineNumbers(buf) - var model Model - if err := json.Unmarshal(buf, &model); err != nil { - log.Fatal(err) - } - return model -} - -// Type.Value has to be treated specially for literals and maps -func (t *Type) UnmarshalJSON(data []byte) error { - // First unmarshal only the unambiguous fields. - var x struct { - Kind string `json:"kind"` - Items []*Type `json:"items"` - Element *Type `json:"element"` - Name string `json:"name"` - Key *Type `json:"key"` - Value any `json:"value"` - Line int `json:"line"` - } - if err := json.Unmarshal(data, &x); err != nil { - return err - } - *t = Type{ - Kind: x.Kind, - Items: x.Items, - Element: x.Element, - Name: x.Name, - Value: x.Value, - Line: x.Line, - } - - // Then unmarshal the 'value' field based on the kind. - // This depends on Unmarshal ignoring fields it doesn't know about. - switch x.Kind { - case "map": - var x struct { - Key *Type `json:"key"` - Value *Type `json:"value"` - } - if err := json.Unmarshal(data, &x); err != nil { - return fmt.Errorf("Type.kind=map: %v", err) - } - t.Key = x.Key - t.Value = x.Value - - case "literal": - var z struct { - Value ParseLiteral `json:"value"` - } - - if err := json.Unmarshal(data, &z); err != nil { - return fmt.Errorf("Type.kind=literal: %v", err) - } - t.Value = z.Value - - case "base", "reference", "array", "and", "or", "tuple", - "stringLiteral": - // no-op. never seen integerLiteral or booleanLiteral. - - default: - return fmt.Errorf("cannot decode Type.kind %q: %s", x.Kind, data) - } - return nil -} - -// which table entries were not used -func checkTables() { - for k := range disambiguate { - if !usedDisambiguate[k] { - log.Printf("disambiguate[%v] unused", k) - } - } - for k := range renameProp { - if !usedRenameProp[k] { - log.Printf("renameProp {%q, %q} unused", k[0], k[1]) - } - } - for k := range goplsStar { - if !usedGoplsStar[k] { - log.Printf("goplsStar {%q, %q} unused", k[0], k[1]) - } - } - for k := range goplsType { - if !usedGoplsType[k] { - log.Printf("unused goplsType[%q]->%s", k, goplsType[k]) - } - } -} diff --git a/gopls/internal/lsp/protocol/generate/main_test.go b/gopls/internal/lsp/protocol/generate/main_test.go deleted file mode 100644 index 5f336690687..00000000000 --- a/gopls/internal/lsp/protocol/generate/main_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package main - -import ( - "encoding/json" - "fmt" - "log" - "os" - "testing" -) - -// These tests require the result of -//"git clone https://github.com/microsoft/vscode-languageserver-node" in the HOME directory - -// this is not a test, but a way to get code coverage, -// (in vscode, just run the test with "go.coverOnSingleTest": true) -func TestAll(t *testing.T) { - t.Skip("needs vscode-languageserver-node repository") - *lineNumbers = true - log.SetFlags(log.Lshortfile) - main() -} - -// check that the parsed file includes all the information -// from the json file. This test will fail if the spec -// introduces new fields. (one can test this test by -// commenting out the version field in Model.) -func TestParseContents(t *testing.T) { - t.Skip("needs vscode-languageserver-node repository") - log.SetFlags(log.Lshortfile) - - // compute our parse of the specification - dir := os.Getenv("HOME") + "/vscode-languageserver-node" - fname := dir + "/protocol/metaModel.json" - v := parse(fname) - out, err := json.Marshal(v) - if err != nil { - t.Fatal(err) - } - var our interface{} - if err := json.Unmarshal(out, &our); err != nil { - t.Fatal(err) - } - - // process the json file - buf, err := os.ReadFile(fname) - if err != nil { - t.Fatalf("could not read metaModel.json: %v", err) - } - var raw interface{} - if err := json.Unmarshal(buf, &raw); err != nil { - t.Fatal(err) - } - - // convert to strings showing the fields - them := flatten(raw) - us := flatten(our) - - // everything in them should be in us - lesser := make(sortedMap[bool]) - for _, s := range them { - lesser[s] = true - } - greater := make(sortedMap[bool]) // set of fields we have - for _, s := range us { - greater[s] = true - } - for _, k := range lesser.keys() { // set if fields they have - if !greater[k] { - t.Errorf("missing %s", k) - } - } -} - -// flatten(nil) = "nil" -// flatten(v string) = fmt.Sprintf("%q", v) -// flatten(v float64)= fmt.Sprintf("%g", v) -// flatten(v bool) = fmt.Sprintf("%v", v) -// flatten(v []any) = []string{"[0]"flatten(v[0]), "[1]"flatten(v[1]), ...} -// flatten(v map[string]any) = {"key1": flatten(v["key1"]), "key2": flatten(v["key2"]), ...} -func flatten(x any) []string { - switch v := x.(type) { - case nil: - return []string{"nil"} - case string: - return []string{fmt.Sprintf("%q", v)} - case float64: - return []string{fmt.Sprintf("%g", v)} - case bool: - return []string{fmt.Sprintf("%v", v)} - case []any: - var ans []string - for i, x := range v { - idx := fmt.Sprintf("[%.3d]", i) - for _, s := range flatten(x) { - ans = append(ans, idx+s) - } - } - return ans - case map[string]any: - var ans []string - for k, x := range v { - idx := fmt.Sprintf("%q:", k) - for _, s := range flatten(x) { - ans = append(ans, idx+s) - } - } - return ans - default: - log.Fatalf("unexpected type %T", x) - return nil - } -} diff --git a/gopls/internal/lsp/protocol/generate/tables.go b/gopls/internal/lsp/protocol/generate/tables.go deleted file mode 100644 index aded1973a46..00000000000 --- a/gopls/internal/lsp/protocol/generate/tables.go +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package main - -import "log" - -// prop combines the name of a property with the name of the structure it is in. -type prop [2]string - -const ( - nothing = iota - wantStar - wantOpt - wantOptStar -) - -// goplsStar records the optionality of each field in the protocol. -// The comments are vague hints as to why removing the line is not trivial. -// A.B.C.D means that one of B or C would change to a pointer -// so a test or initialization would be needed -var goplsStar = map[prop]int{ - {"ClientCapabilities", "textDocument"}: wantOpt, // A.B.C.D at fake/editor.go:255 - {"ClientCapabilities", "window"}: wantOpt, // regtest failures - {"ClientCapabilities", "workspace"}: wantOpt, // regtest failures - {"CodeAction", "kind"}: wantOpt, // A.B.C.D - - {"CodeActionClientCapabilities", "codeActionLiteralSupport"}: wantOpt, // regtest failures - - {"CompletionClientCapabilities", "completionItem"}: wantOpt, // A.B.C.D - {"CompletionClientCapabilities", "insertTextMode"}: wantOpt, // A.B.C.D - {"CompletionItem", "kind"}: wantOpt, // need temporary variables - {"CompletionParams", "context"}: wantOpt, // needs nil checks - - {"Diagnostic", "severity"}: wantOpt, // nil checks or more careful thought - {"DidSaveTextDocumentParams", "text"}: wantOptStar, // capabilities_test.go:112 logic - {"DocumentHighlight", "kind"}: wantOpt, // need temporary variables - {"Hover", "range"}: wantOpt, // complex expressions - {"InlayHint", "kind"}: wantOpt, // temporary variables - - {"Lit_CompletionClientCapabilities_completionItem", "tagSupport"}: nothing, // A.B.C. - {"Lit_SemanticTokensClientCapabilities_requests", "full"}: nothing, // A.B.C.D - {"Lit_SemanticTokensClientCapabilities_requests", "range"}: nothing, // A.B.C.D - {"Lit_SemanticTokensClientCapabilities_requests_full_Item1", "delta"}: nothing, // A.B.C.D - {"Lit_SemanticTokensOptions_full_Item1", "delta"}: nothing, // A.B.C. - - {"Lit_TextDocumentContentChangeEvent_Item0", "range"}: wantStar, // == nil test - - {"TextDocumentClientCapabilities", "codeAction"}: wantOpt, // A.B.C.D - {"TextDocumentClientCapabilities", "completion"}: wantOpt, // A.B.C.D - {"TextDocumentClientCapabilities", "documentSymbol"}: wantOpt, // A.B.C.D - {"TextDocumentClientCapabilities", "publishDiagnostics"}: wantOpt, //A.B.C.D - {"TextDocumentClientCapabilities", "semanticTokens"}: wantOpt, // A.B.C.D - {"TextDocumentSyncOptions", "change"}: wantOpt, // &constant - {"WorkDoneProgressParams", "workDoneToken"}: wantOpt, // regtest - {"WorkspaceClientCapabilities", "didChangeConfiguration"}: wantOpt, // A.B.C.D - {"WorkspaceClientCapabilities", "didChangeWatchedFiles"}: wantOpt, // A.B.C.D -} - -// keep track of which entries in goplsStar are used -var usedGoplsStar = make(map[prop]bool) - -// For gopls compatibility, use a different, typically more restrictive, type for some fields. -var renameProp = map[prop]string{ - {"CancelParams", "id"}: "interface{}", - {"Command", "arguments"}: "[]json.RawMessage", - {"CompletionItem", "textEdit"}: "TextEdit", - {"Diagnostic", "code"}: "interface{}", - {"Diagnostic", "data"}: "json.RawMessage", // delay unmarshalling quickfixes - - {"DocumentDiagnosticReportPartialResult", "relatedDocuments"}: "map[DocumentURI]interface{}", - - {"ExecuteCommandParams", "arguments"}: "[]json.RawMessage", - {"FoldingRange", "kind"}: "string", - {"Hover", "contents"}: "MarkupContent", - {"InlayHint", "label"}: "[]InlayHintLabelPart", - - {"RelatedFullDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]interface{}", - {"RelatedUnchangedDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]interface{}", - - // PJW: this one is tricky. - {"ServerCapabilities", "codeActionProvider"}: "interface{}", - - {"ServerCapabilities", "inlayHintProvider"}: "interface{}", - // slightly tricky - {"ServerCapabilities", "renameProvider"}: "interface{}", - // slightly tricky - {"ServerCapabilities", "semanticTokensProvider"}: "interface{}", - // slightly tricky - {"ServerCapabilities", "textDocumentSync"}: "interface{}", - {"TextDocumentEdit", "edits"}: "[]TextEdit", - {"TextDocumentSyncOptions", "save"}: "SaveOptions", - {"WorkspaceEdit", "documentChanges"}: "[]DocumentChanges", -} - -// which entries of renameProp were used -var usedRenameProp = make(map[prop]bool) - -type adjust struct { - prefix, suffix string -} - -// disambiguate specifies prefixes or suffixes to add to all values of -// some enum types to avoid name conflicts -var disambiguate = map[string]adjust{ - "CodeActionTriggerKind": {"CodeAction", ""}, - "CompletionItemKind": {"", "Completion"}, - "CompletionItemTag": {"Compl", ""}, - "DiagnosticSeverity": {"Severity", ""}, - "DocumentDiagnosticReportKind": {"Diagnostic", ""}, - "FileOperationPatternKind": {"", "Pattern"}, - "InlineCompletionTriggerKind": {"Inline", ""}, - "InsertTextFormat": {"", "TextFormat"}, - "SemanticTokenModifiers": {"Mod", ""}, - "SemanticTokenTypes": {"", "Type"}, - "SignatureHelpTriggerKind": {"Sig", ""}, - "SymbolTag": {"", "Symbol"}, - "WatchKind": {"Watch", ""}, -} - -// which entries of disambiguate got used -var usedDisambiguate = make(map[string]bool) - -// for gopls compatibility, replace generated type names with existing ones -var goplsType = map[string]string{ - "And_RegOpt_textDocument_colorPresentation": "WorkDoneProgressOptionsAndTextDocumentRegistrationOptions", - "ConfigurationParams": "ParamConfiguration", - "DocumentDiagnosticParams": "string", - "DocumentDiagnosticReport": "string", - "DocumentUri": "DocumentURI", - "InitializeParams": "ParamInitialize", - "LSPAny": "interface{}", - - "Lit_CodeActionClientCapabilities_codeActionLiteralSupport": "PCodeActionLiteralSupportPCodeAction", - "Lit_CodeActionClientCapabilities_codeActionLiteralSupport_codeActionKind": "FCodeActionKindPCodeActionLiteralSupport", - - "Lit_CodeActionClientCapabilities_resolveSupport": "PResolveSupportPCodeAction", - "Lit_CodeAction_disabled": "PDisabledMsg_textDocument_codeAction", - "Lit_CompletionClientCapabilities_completionItem": "PCompletionItemPCompletion", - "Lit_CompletionClientCapabilities_completionItemKind": "PCompletionItemKindPCompletion", - - "Lit_CompletionClientCapabilities_completionItem_insertTextModeSupport": "FInsertTextModeSupportPCompletionItem", - - "Lit_CompletionClientCapabilities_completionItem_resolveSupport": "FResolveSupportPCompletionItem", - "Lit_CompletionClientCapabilities_completionItem_tagSupport": "FTagSupportPCompletionItem", - - "Lit_CompletionClientCapabilities_completionList": "PCompletionListPCompletion", - "Lit_CompletionList_itemDefaults": "PItemDefaultsMsg_textDocument_completion", - "Lit_CompletionList_itemDefaults_editRange_Item1": "FEditRangePItemDefaults", - "Lit_CompletionOptions_completionItem": "PCompletionItemPCompletionProvider", - "Lit_DocumentSymbolClientCapabilities_symbolKind": "PSymbolKindPDocumentSymbol", - "Lit_DocumentSymbolClientCapabilities_tagSupport": "PTagSupportPDocumentSymbol", - "Lit_FoldingRangeClientCapabilities_foldingRange": "PFoldingRangePFoldingRange", - "Lit_FoldingRangeClientCapabilities_foldingRangeKind": "PFoldingRangeKindPFoldingRange", - "Lit_GeneralClientCapabilities_staleRequestSupport": "PStaleRequestSupportPGeneral", - "Lit_InitializeResult_serverInfo": "PServerInfoMsg_initialize", - "Lit_InlayHintClientCapabilities_resolveSupport": "PResolveSupportPInlayHint", - "Lit_MarkedString_Item1": "Msg_MarkedString", - "Lit_NotebookDocumentChangeEvent_cells": "PCellsPChange", - "Lit_NotebookDocumentChangeEvent_cells_structure": "FStructurePCells", - "Lit_NotebookDocumentFilter_Item0": "Msg_NotebookDocumentFilter", - - "Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0": "PNotebookSelectorPNotebookDocumentSync", - - "Lit_PrepareRenameResult_Item1": "Msg_PrepareRename2Gn", - - "Lit_PublishDiagnosticsClientCapabilities_tagSupport": "PTagSupportPPublishDiagnostics", - "Lit_SemanticTokensClientCapabilities_requests": "PRequestsPSemanticTokens", - "Lit_SemanticTokensClientCapabilities_requests_full_Item1": "FFullPRequests", - "Lit_SemanticTokensClientCapabilities_requests_range_Item1": "FRangePRequests", - - "Lit_SemanticTokensOptions_full_Item1": "PFullESemanticTokensOptions", - "Lit_SemanticTokensOptions_range_Item1": "PRangeESemanticTokensOptions", - "Lit_ServerCapabilities_workspace": "Workspace6Gn", - - "Lit_ShowMessageRequestClientCapabilities_messageActionItem": "PMessageActionItemPShowMessage", - "Lit_SignatureHelpClientCapabilities_signatureInformation": "PSignatureInformationPSignatureHelp", - - "Lit_SignatureHelpClientCapabilities_signatureInformation_parameterInformation": "FParameterInformationPSignatureInformation", - - "Lit_TextDocumentContentChangeEvent_Item0": "Msg_TextDocumentContentChangeEvent", - "Lit_TextDocumentFilter_Item0": "Msg_TextDocumentFilter", - "Lit_TextDocumentFilter_Item1": "Msg_TextDocumentFilter", - "Lit_WorkspaceEditClientCapabilities_changeAnnotationSupport": "PChangeAnnotationSupportPWorkspaceEdit", - "Lit_WorkspaceSymbolClientCapabilities_resolveSupport": "PResolveSupportPSymbol", - "Lit_WorkspaceSymbolClientCapabilities_symbolKind": "PSymbolKindPSymbol", - "Lit_WorkspaceSymbolClientCapabilities_tagSupport": "PTagSupportPSymbol", - "Lit_WorkspaceSymbol_location_Item1": "PLocationMsg_workspace_symbol", - "Lit__InitializeParams_clientInfo": "Msg_XInitializeParams_clientInfo", - "Or_CompletionList_itemDefaults_editRange": "OrFEditRangePItemDefaults", - "Or_Declaration": "[]Location", - "Or_DidChangeConfigurationRegistrationOptions_section": "OrPSection_workspace_didChangeConfiguration", - "Or_GlobPattern": "string", - "Or_InlayHintLabelPart_tooltip": "OrPTooltipPLabel", - "Or_InlayHint_tooltip": "OrPTooltip_textDocument_inlayHint", - "Or_LSPAny": "interface{}", - "Or_NotebookDocumentFilter": "Msg_NotebookDocumentFilter", - "Or_NotebookDocumentSyncOptions_notebookSelector_Elem": "PNotebookSelectorPNotebookDocumentSync", - - "Or_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0_notebook": "OrFNotebookPNotebookSelector", - - "Or_ParameterInformation_documentation": "string", - "Or_ParameterInformation_label": "string", - "Or_PrepareRenameResult": "Msg_PrepareRename2Gn", - "Or_ProgressToken": "interface{}", - "Or_Result_textDocument_completion": "CompletionList", - "Or_Result_textDocument_declaration": "Or_textDocument_declaration", - "Or_Result_textDocument_definition": "[]Location", - "Or_Result_textDocument_documentSymbol": "[]interface{}", - "Or_Result_textDocument_implementation": "[]Location", - "Or_Result_textDocument_semanticTokens_full_delta": "interface{}", - "Or_Result_textDocument_typeDefinition": "[]Location", - "Or_Result_workspace_symbol": "[]SymbolInformation", - "Or_TextDocumentContentChangeEvent": "Msg_TextDocumentContentChangeEvent", - "Or_TextDocumentFilter": "Msg_TextDocumentFilter", - "Or_WorkspaceFoldersServerCapabilities_changeNotifications": "string", - "Or_WorkspaceSymbol_location": "OrPLocation_workspace_symbol", - "PrepareRenameResult": "PrepareRename2Gn", - "Tuple_ParameterInformation_label_Item1": "UIntCommaUInt", - "WorkspaceFoldersServerCapabilities": "WorkspaceFolders5Gn", - "[]LSPAny": "[]interface{}", - "[]Or_NotebookDocumentSyncOptions_notebookSelector_Elem": "[]PNotebookSelectorPNotebookDocumentSync", - "[]Or_Result_textDocument_codeAction_Item0_Elem": "[]CodeAction", - "[]PreviousResultId": "[]PreviousResultID", - "[]uinteger": "[]uint32", - "boolean": "bool", - "decimal": "float64", - "integer": "int32", - "map[DocumentUri][]TextEdit": "map[DocumentURI][]TextEdit", - "uinteger": "uint32", -} - -var usedGoplsType = make(map[string]bool) - -// methodNames is a map from the method to the name of the function that handles it -var methodNames = map[string]string{ - "$/cancelRequest": "CancelRequest", - "$/logTrace": "LogTrace", - "$/progress": "Progress", - "$/setTrace": "SetTrace", - "callHierarchy/incomingCalls": "IncomingCalls", - "callHierarchy/outgoingCalls": "OutgoingCalls", - "client/registerCapability": "RegisterCapability", - "client/unregisterCapability": "UnregisterCapability", - "codeAction/resolve": "ResolveCodeAction", - "codeLens/resolve": "ResolveCodeLens", - "completionItem/resolve": "ResolveCompletionItem", - "documentLink/resolve": "ResolveDocumentLink", - "exit": "Exit", - "initialize": "Initialize", - "initialized": "Initialized", - "inlayHint/resolve": "Resolve", - "notebookDocument/didChange": "DidChangeNotebookDocument", - "notebookDocument/didClose": "DidCloseNotebookDocument", - "notebookDocument/didOpen": "DidOpenNotebookDocument", - "notebookDocument/didSave": "DidSaveNotebookDocument", - "shutdown": "Shutdown", - "telemetry/event": "Event", - "textDocument/codeAction": "CodeAction", - "textDocument/codeLens": "CodeLens", - "textDocument/colorPresentation": "ColorPresentation", - "textDocument/completion": "Completion", - "textDocument/declaration": "Declaration", - "textDocument/definition": "Definition", - "textDocument/diagnostic": "Diagnostic", - "textDocument/didChange": "DidChange", - "textDocument/didClose": "DidClose", - "textDocument/didOpen": "DidOpen", - "textDocument/didSave": "DidSave", - "textDocument/documentColor": "DocumentColor", - "textDocument/documentHighlight": "DocumentHighlight", - "textDocument/documentLink": "DocumentLink", - "textDocument/documentSymbol": "DocumentSymbol", - "textDocument/foldingRange": "FoldingRange", - "textDocument/formatting": "Formatting", - "textDocument/hover": "Hover", - "textDocument/implementation": "Implementation", - "textDocument/inlayHint": "InlayHint", - "textDocument/inlineCompletion": "InlineCompletion", - "textDocument/inlineValue": "InlineValue", - "textDocument/linkedEditingRange": "LinkedEditingRange", - "textDocument/moniker": "Moniker", - "textDocument/onTypeFormatting": "OnTypeFormatting", - "textDocument/prepareCallHierarchy": "PrepareCallHierarchy", - "textDocument/prepareRename": "PrepareRename", - "textDocument/prepareTypeHierarchy": "PrepareTypeHierarchy", - "textDocument/publishDiagnostics": "PublishDiagnostics", - "textDocument/rangeFormatting": "RangeFormatting", - "textDocument/rangesFormatting": "RangesFormatting", - "textDocument/references": "References", - "textDocument/rename": "Rename", - "textDocument/selectionRange": "SelectionRange", - "textDocument/semanticTokens/full": "SemanticTokensFull", - "textDocument/semanticTokens/full/delta": "SemanticTokensFullDelta", - "textDocument/semanticTokens/range": "SemanticTokensRange", - "textDocument/signatureHelp": "SignatureHelp", - "textDocument/typeDefinition": "TypeDefinition", - "textDocument/willSave": "WillSave", - "textDocument/willSaveWaitUntil": "WillSaveWaitUntil", - "typeHierarchy/subtypes": "Subtypes", - "typeHierarchy/supertypes": "Supertypes", - "window/logMessage": "LogMessage", - "window/showDocument": "ShowDocument", - "window/showMessage": "ShowMessage", - "window/showMessageRequest": "ShowMessageRequest", - "window/workDoneProgress/cancel": "WorkDoneProgressCancel", - "window/workDoneProgress/create": "WorkDoneProgressCreate", - "workspace/applyEdit": "ApplyEdit", - "workspace/codeLens/refresh": "CodeLensRefresh", - "workspace/configuration": "Configuration", - "workspace/diagnostic": "DiagnosticWorkspace", - "workspace/diagnostic/refresh": "DiagnosticRefresh", - "workspace/didChangeConfiguration": "DidChangeConfiguration", - "workspace/didChangeWatchedFiles": "DidChangeWatchedFiles", - "workspace/didChangeWorkspaceFolders": "DidChangeWorkspaceFolders", - "workspace/didCreateFiles": "DidCreateFiles", - "workspace/didDeleteFiles": "DidDeleteFiles", - "workspace/didRenameFiles": "DidRenameFiles", - "workspace/executeCommand": "ExecuteCommand", - "workspace/inlayHint/refresh": "InlayHintRefresh", - "workspace/inlineValue/refresh": "InlineValueRefresh", - "workspace/semanticTokens/refresh": "SemanticTokensRefresh", - "workspace/symbol": "Symbol", - "workspace/willCreateFiles": "WillCreateFiles", - "workspace/willDeleteFiles": "WillDeleteFiles", - "workspace/willRenameFiles": "WillRenameFiles", - "workspace/workspaceFolders": "WorkspaceFolders", - "workspaceSymbol/resolve": "ResolveWorkspaceSymbol", -} - -func methodName(method string) string { - ans := methodNames[method] - if ans == "" { - log.Fatalf("unknown method %q", method) - } - return ans -} diff --git a/gopls/internal/lsp/protocol/generate/types.go b/gopls/internal/lsp/protocol/generate/types.go deleted file mode 100644 index 0d01ae43cb1..00000000000 --- a/gopls/internal/lsp/protocol/generate/types.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package main - -import ( - "fmt" - "sort" -) - -// Model contains the parsed version of the spec -type Model struct { - Version Metadata `json:"metaData"` - Requests []*Request `json:"requests"` - Notifications []*Notification `json:"notifications"` - Structures []*Structure `json:"structures"` - Enumerations []*Enumeration `json:"enumerations"` - TypeAliases []*TypeAlias `json:"typeAliases"` - Line int `json:"line"` -} - -// Metadata is information about the version of the spec -type Metadata struct { - Version string `json:"version"` - Line int `json:"line"` -} - -// A Request is the parsed version of an LSP request -type Request struct { - Documentation string `json:"documentation"` - ErrorData *Type `json:"errorData"` - Direction string `json:"messageDirection"` - Method string `json:"method"` - Params *Type `json:"params"` - PartialResult *Type `json:"partialResult"` - Proposed bool `json:"proposed"` - RegistrationMethod string `json:"registrationMethod"` - RegistrationOptions *Type `json:"registrationOptions"` - Result *Type `json:"result"` - Since string `json:"since"` - Line int `json:"line"` -} - -// A Notificatin is the parsed version of an LSP notification -type Notification struct { - Documentation string `json:"documentation"` - Direction string `json:"messageDirection"` - Method string `json:"method"` - Params *Type `json:"params"` - Proposed bool `json:"proposed"` - RegistrationMethod string `json:"registrationMethod"` - RegistrationOptions *Type `json:"registrationOptions"` - Since string `json:"since"` - Line int `json:"line"` -} - -// A Structure is the parsed version of an LSP structure from the spec -type Structure struct { - Documentation string `json:"documentation"` - Extends []*Type `json:"extends"` - Mixins []*Type `json:"mixins"` - Name string `json:"name"` - Properties []NameType `json:"properties"` - Proposed bool `json:"proposed"` - Since string `json:"since"` - Line int `json:"line"` -} - -// An enumeration is the parsed version of an LSP enumeration from the spec -type Enumeration struct { - Documentation string `json:"documentation"` - Name string `json:"name"` - Proposed bool `json:"proposed"` - Since string `json:"since"` - SupportsCustomValues bool `json:"supportsCustomValues"` - Type *Type `json:"type"` - Values []NameValue `json:"values"` - Line int `json:"line"` -} - -// A TypeAlias is the parsed version of an LSP type alias from the spec -type TypeAlias struct { - Documentation string `json:"documentation"` - Deprecated string `json:"deprecated"` - Name string `json:"name"` - Proposed bool `json:"proposed"` - Since string `json:"since"` - Type *Type `json:"type"` - Line int `json:"line"` -} - -// A NameValue describes an enumeration constant -type NameValue struct { - Documentation string `json:"documentation"` - Name string `json:"name"` - Proposed bool `json:"proposed"` - Since string `json:"since"` - Value any `json:"value"` // number or string - Line int `json:"line"` -} - -// A Type is the parsed version of an LSP type from the spec, -// or a Type the code constructs -type Type struct { - Kind string `json:"kind"` // -- which kind goes with which field -- - Items []*Type `json:"items"` // "and", "or", "tuple" - Element *Type `json:"element"` // "array" - Name string `json:"name"` // "base", "reference" - Key *Type `json:"key"` // "map" - Value any `json:"value"` // "map", "stringLiteral", "literal" - Line int `json:"line"` // JSON source line -} - -// ParsedLiteral is Type.Value when Type.Kind is "literal" -type ParseLiteral struct { - Properties `json:"properties"` -} - -// A NameType represents the name and type of a structure element -type NameType struct { - Name string `json:"name"` - Type *Type `json:"type"` - Optional bool `json:"optional"` - Documentation string `json:"documentation"` - Deprecated string `json:"deprecated"` - Since string `json:"since"` - Proposed bool `json:"proposed"` - Line int `json:"line"` -} - -// Properties are the collection of structure fields -type Properties []NameType - -// addLineNumbers adds a "line" field to each object in the JSON. -func addLineNumbers(buf []byte) []byte { - var ans []byte - // In the specification .json file, the delimiter '{' is - // always followed by a newline. There are other {s embedded in strings. - // json.Token does not return \n, or :, or , so using it would - // require parsing the json to reconstruct the missing information. - for linecnt, i := 1, 0; i < len(buf); i++ { - ans = append(ans, buf[i]) - switch buf[i] { - case '{': - if buf[i+1] == '\n' { - ans = append(ans, fmt.Sprintf(`"line": %d, `, linecnt)...) - // warning: this would fail if the spec file had - // `"value": {\n}`, but it does not, as comma is a separator. - } - case '\n': - linecnt++ - } - } - return ans -} - -type sortedMap[T any] map[string]T - -func (s sortedMap[T]) keys() []string { - var keys []string - for k := range s { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} diff --git a/gopls/internal/lsp/protocol/mapper.go b/gopls/internal/lsp/protocol/mapper.go deleted file mode 100644 index 9e683d9c787..00000000000 --- a/gopls/internal/lsp/protocol/mapper.go +++ /dev/null @@ -1,529 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package protocol - -// This file defines Mapper, which wraps a file content buffer -// ([]byte) and provides efficient conversion between every kind of -// position representation. -// -// gopls uses four main representations of position: -// -// 1. byte offsets, e.g. (start, end int), starting from zero. -// -// 2. go/token notation. Use these types when interacting directly -// with the go/* syntax packages: -// -// token.Pos -// token.FileSet -// token.File -// -// Because File.Offset and File.Pos panic on invalid inputs, -// we do not call them directly and instead use the safetoken package -// for these conversions. This is enforced by a static check. -// -// Beware also that the methods of token.File have two bugs for which -// safetoken contains workarounds: -// - #57490, whereby the parser may create ast.Nodes during error -// recovery whose computed positions are out of bounds (EOF+1). -// - #41029, whereby the wrong line number is returned for the EOF position. -// -// 3. the span package. -// -// span.Point = (line, col8, offset). -// span.Span = (uri URI, start, end span.Point) -// -// Line and column are 1-based. -// Columns are measured in bytes (UTF-8 codes). -// All fields are optional. -// -// These types are useful as intermediate conversions of validated -// ranges (though MappedRange is superior as it is self contained -// and universally convertible). Since their fields are optional -// they are also useful for parsing user-provided positions (e.g. in -// the CLI) before we have access to file contents. -// -// 4. protocol, the LSP RPC message format. -// -// protocol.Position = (Line, Character uint32) -// protocol.Range = (start, end Position) -// protocol.Location = (URI, protocol.Range) -// -// Line and Character are 0-based. -// Characters (columns) are measured in UTF-16 codes. -// -// protocol.Mapper holds the (URI, Content) of a file, enabling -// efficient mapping between byte offsets, span ranges, and -// protocol ranges. -// -// protocol.MappedRange holds a protocol.Mapper and valid (start, -// end int) byte offsets, enabling infallible, efficient conversion -// to any other format. - -import ( - "bytes" - "fmt" - "go/ast" - "go/token" - "path/filepath" - "sort" - "strings" - "sync" - "unicode/utf8" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" -) - -// A Mapper wraps the content of a file and provides mapping -// between byte offsets and notations of position such as: -// -// - (line, col8) pairs, where col8 is a 1-based UTF-8 column number -// (bytes), as used by the go/token and span packages. -// -// - (line, col16) pairs, where col16 is a 1-based UTF-16 column -// number, as used by the LSP protocol. -// -// All conversion methods are named "FromTo", where From and To are the two types. -// For example, the PointPosition method converts from a Point to a Position. -// -// Mapper does not intrinsically depend on go/token-based -// representations. Use safetoken to map between token.Pos <=> byte -// offsets, or the convenience methods such as PosPosition, -// NodePosition, or NodeRange. -// -// See overview comments at top of this file. -type Mapper struct { - URI span.URI - Content []byte - - // Line-number information is requested only for a tiny - // fraction of Mappers, so we compute it lazily. - // Call initLines() before accessing fields below. - linesOnce sync.Once - lineStart []int // byte offset of start of ith line (0-based); last=EOF iff \n-terminated - nonASCII bool - - // TODO(adonovan): adding an extra lineStart entry for EOF - // might simplify every method that accesses it. Try it out. -} - -// NewMapper creates a new mapper for the given URI and content. -func NewMapper(uri span.URI, content []byte) *Mapper { - return &Mapper{URI: uri, Content: content} -} - -// initLines populates the lineStart table. -func (m *Mapper) initLines() { - m.linesOnce.Do(func() { - nlines := bytes.Count(m.Content, []byte("\n")) - m.lineStart = make([]int, 1, nlines+1) // initially []int{0} - for offset, b := range m.Content { - if b == '\n' { - m.lineStart = append(m.lineStart, offset+1) - } - if b >= utf8.RuneSelf { - m.nonASCII = true - } - } - }) -} - -// -- conversions from span (UTF-8) domain -- - -// SpanLocation converts a (UTF-8) span to a protocol (UTF-16) range. -// Precondition: the URIs of SpanLocation and Mapper match. -func (m *Mapper) SpanLocation(s span.Span) (Location, error) { - rng, err := m.SpanRange(s) - if err != nil { - return Location{}, err - } - return m.RangeLocation(rng), nil -} - -// SpanRange converts a (UTF-8) span to a protocol (UTF-16) range. -// Precondition: the URIs of Span and Mapper match. -func (m *Mapper) SpanRange(s span.Span) (Range, error) { - // Assert that we aren't using the wrong mapper. - // We check only the base name, and case insensitively, - // because we can't assume clean paths, no symbolic links, - // case-sensitive directories. The authoritative answer - // requires querying the file system, and we don't want - // to do that. - if !strings.EqualFold(filepath.Base(string(m.URI)), filepath.Base(string(s.URI()))) { - return Range{}, bug.Errorf("mapper is for file %q instead of %q", m.URI, s.URI()) - } - start, err := m.PointPosition(s.Start()) - if err != nil { - return Range{}, fmt.Errorf("start: %w", err) - } - end, err := m.PointPosition(s.End()) - if err != nil { - return Range{}, fmt.Errorf("end: %w", err) - } - return Range{Start: start, End: end}, nil -} - -// PointPosition converts a valid span (UTF-8) point to a protocol (UTF-16) position. -func (m *Mapper) PointPosition(p span.Point) (Position, error) { - if p.HasPosition() { - line, col8 := p.Line()-1, p.Column()-1 // both 0-based - m.initLines() - if line >= len(m.lineStart) { - return Position{}, fmt.Errorf("line number %d out of range (max %d)", line, len(m.lineStart)) - } - offset := m.lineStart[line] - end := offset + col8 - - // Validate column. - if end > len(m.Content) { - return Position{}, fmt.Errorf("column is beyond end of file") - } else if line+1 < len(m.lineStart) && end >= m.lineStart[line+1] { - return Position{}, fmt.Errorf("column is beyond end of line") - } - - char := UTF16Len(m.Content[offset:end]) - return Position{Line: uint32(line), Character: uint32(char)}, nil - } - if p.HasOffset() { - return m.OffsetPosition(p.Offset()) - } - return Position{}, fmt.Errorf("point has neither offset nor line/column") -} - -// -- conversions from byte offsets -- - -// OffsetLocation converts a byte-offset interval to a protocol (UTF-16) location. -func (m *Mapper) OffsetLocation(start, end int) (Location, error) { - rng, err := m.OffsetRange(start, end) - if err != nil { - return Location{}, err - } - return m.RangeLocation(rng), nil -} - -// OffsetRange converts a byte-offset interval to a protocol (UTF-16) range. -func (m *Mapper) OffsetRange(start, end int) (Range, error) { - if start > end { - return Range{}, fmt.Errorf("start offset (%d) > end (%d)", start, end) - } - startPosition, err := m.OffsetPosition(start) - if err != nil { - return Range{}, fmt.Errorf("start: %v", err) - } - endPosition, err := m.OffsetPosition(end) - if err != nil { - return Range{}, fmt.Errorf("end: %v", err) - } - return Range{Start: startPosition, End: endPosition}, nil -} - -// OffsetSpan converts a byte-offset interval to a (UTF-8) span. -// The resulting span contains line, column, and offset information. -func (m *Mapper) OffsetSpan(start, end int) (span.Span, error) { - if start > end { - return span.Span{}, fmt.Errorf("start offset (%d) > end (%d)", start, end) - } - startPoint, err := m.OffsetPoint(start) - if err != nil { - return span.Span{}, fmt.Errorf("start: %v", err) - } - endPoint, err := m.OffsetPoint(end) - if err != nil { - return span.Span{}, fmt.Errorf("end: %v", err) - } - return span.New(m.URI, startPoint, endPoint), nil -} - -// OffsetPosition converts a byte offset to a protocol (UTF-16) position. -func (m *Mapper) OffsetPosition(offset int) (Position, error) { - if !(0 <= offset && offset <= len(m.Content)) { - return Position{}, fmt.Errorf("invalid offset %d (want 0-%d)", offset, len(m.Content)) - } - // No error may be returned after this point, - // even if the offset does not fall at a rune boundary. - // (See panic in MappedRange.Range reachable.) - - line, col16 := m.lineCol16(offset) - return Position{Line: uint32(line), Character: uint32(col16)}, nil -} - -// lineCol16 converts a valid byte offset to line and UTF-16 column numbers, both 0-based. -func (m *Mapper) lineCol16(offset int) (int, int) { - line, start, cr := m.line(offset) - var col16 int - if m.nonASCII { - col16 = UTF16Len(m.Content[start:offset]) - } else { - col16 = offset - start - } - if cr { - col16-- // retreat from \r at line end - } - return line, col16 -} - -// lineCol8 converts a valid byte offset to line and UTF-8 column numbers, both 0-based. -func (m *Mapper) lineCol8(offset int) (int, int) { - line, start, cr := m.line(offset) - col8 := offset - start - if cr { - col8-- // retreat from \r at line end - } - return line, col8 -} - -// line returns: -// - the 0-based index of the line that encloses the (valid) byte offset; -// - the start offset of that line; and -// - whether the offset denotes a carriage return (\r) at line end. -func (m *Mapper) line(offset int) (int, int, bool) { - m.initLines() - // In effect, binary search returns a 1-based result. - line := sort.Search(len(m.lineStart), func(i int) bool { - return offset < m.lineStart[i] - }) - - // Adjustment for line-endings: \r|\n is the same as |\r\n. - var eol int - if line == len(m.lineStart) { - eol = len(m.Content) // EOF - } else { - eol = m.lineStart[line] - 1 - } - cr := offset == eol && offset > 0 && m.Content[offset-1] == '\r' - - line-- // 0-based - - return line, m.lineStart[line], cr -} - -// OffsetPoint converts a byte offset to a span (UTF-8) point. -// The resulting point contains line, column, and offset information. -func (m *Mapper) OffsetPoint(offset int) (span.Point, error) { - if !(0 <= offset && offset <= len(m.Content)) { - return span.Point{}, fmt.Errorf("invalid offset %d (want 0-%d)", offset, len(m.Content)) - } - line, col8 := m.lineCol8(offset) - return span.NewPoint(line+1, col8+1, offset), nil -} - -// OffsetMappedRange returns a MappedRange for the given byte offsets. -// A MappedRange can be converted to any other form. -func (m *Mapper) OffsetMappedRange(start, end int) (MappedRange, error) { - if !(0 <= start && start <= end && end <= len(m.Content)) { - return MappedRange{}, fmt.Errorf("invalid offsets (%d, %d) (file %s has size %d)", start, end, m.URI, len(m.Content)) - } - return MappedRange{m, start, end}, nil -} - -// -- conversions from protocol (UTF-16) domain -- - -// LocationSpan converts a protocol (UTF-16) Location to a (UTF-8) span. -// Precondition: the URIs of Location and Mapper match. -func (m *Mapper) LocationSpan(l Location) (span.Span, error) { - // TODO(adonovan): check that l.URI matches m.URI. - return m.RangeSpan(l.Range) -} - -// RangeSpan converts a protocol (UTF-16) range to a (UTF-8) span. -// The resulting span has valid Positions and Offsets. -func (m *Mapper) RangeSpan(r Range) (span.Span, error) { - start, end, err := m.RangeOffsets(r) - if err != nil { - return span.Span{}, err - } - return m.OffsetSpan(start, end) -} - -// RangeOffsets converts a protocol (UTF-16) range to start/end byte offsets. -func (m *Mapper) RangeOffsets(r Range) (int, int, error) { - start, err := m.PositionOffset(r.Start) - if err != nil { - return 0, 0, err - } - end, err := m.PositionOffset(r.End) - if err != nil { - return 0, 0, err - } - return start, end, nil -} - -// PositionOffset converts a protocol (UTF-16) position to a byte offset. -func (m *Mapper) PositionOffset(p Position) (int, error) { - m.initLines() - - // Validate line number. - if p.Line > uint32(len(m.lineStart)) { - return 0, fmt.Errorf("line number %d out of range 0-%d", p.Line, len(m.lineStart)) - } else if p.Line == uint32(len(m.lineStart)) { - if p.Character == 0 { - return len(m.Content), nil // EOF - } - return 0, fmt.Errorf("column is beyond end of file") - } - - offset := m.lineStart[p.Line] - content := m.Content[offset:] // rest of file from start of enclosing line - - // Advance bytes up to the required number of UTF-16 codes. - col8 := 0 - for col16 := 0; col16 < int(p.Character); col16++ { - r, sz := utf8.DecodeRune(content) - if sz == 0 { - return 0, fmt.Errorf("column is beyond end of file") - } - if r == '\n' { - return 0, fmt.Errorf("column is beyond end of line") - } - if sz == 1 && r == utf8.RuneError { - return 0, fmt.Errorf("buffer contains invalid UTF-8 text") - } - content = content[sz:] - - if r >= 0x10000 { - col16++ // rune was encoded by a pair of surrogate UTF-16 codes - - if col16 == int(p.Character) { - break // requested position is in the middle of a rune - } - } - col8 += sz - } - return offset + col8, nil -} - -// PositionPoint converts a protocol (UTF-16) position to a span (UTF-8) point. -// The resulting point has a valid Position and Offset. -func (m *Mapper) PositionPoint(p Position) (span.Point, error) { - offset, err := m.PositionOffset(p) - if err != nil { - return span.Point{}, err - } - line, col8 := m.lineCol8(offset) - - return span.NewPoint(line+1, col8+1, offset), nil -} - -// -- go/token domain convenience methods -- - -// PosPosition converts a token pos to a protocol (UTF-16) position. -func (m *Mapper) PosPosition(tf *token.File, pos token.Pos) (Position, error) { - offset, err := safetoken.Offset(tf, pos) - if err != nil { - return Position{}, err - } - return m.OffsetPosition(offset) -} - -// PosLocation converts a token range to a protocol (UTF-16) location. -func (m *Mapper) PosLocation(tf *token.File, start, end token.Pos) (Location, error) { - startOffset, endOffset, err := safetoken.Offsets(tf, start, end) - if err != nil { - return Location{}, err - } - rng, err := m.OffsetRange(startOffset, endOffset) - if err != nil { - return Location{}, err - } - return m.RangeLocation(rng), nil -} - -// PosRange converts a token range to a protocol (UTF-16) range. -func (m *Mapper) PosRange(tf *token.File, start, end token.Pos) (Range, error) { - startOffset, endOffset, err := safetoken.Offsets(tf, start, end) - if err != nil { - return Range{}, err - } - return m.OffsetRange(startOffset, endOffset) -} - -// NodeRange converts a syntax node range to a protocol (UTF-16) range. -func (m *Mapper) NodeRange(tf *token.File, node ast.Node) (Range, error) { - return m.PosRange(tf, node.Pos(), node.End()) -} - -// RangeLocation pairs a protocol Range with its URI, in a Location. -func (m *Mapper) RangeLocation(rng Range) Location { - return Location{URI: URIFromSpanURI(m.URI), Range: rng} -} - -// PosMappedRange returns a MappedRange for the given token.Pos range. -func (m *Mapper) PosMappedRange(tf *token.File, start, end token.Pos) (MappedRange, error) { - startOffset, endOffset, err := safetoken.Offsets(tf, start, end) - if err != nil { - return MappedRange{}, nil - } - return m.OffsetMappedRange(startOffset, endOffset) -} - -// NodeMappedRange returns a MappedRange for the given node range. -func (m *Mapper) NodeMappedRange(tf *token.File, node ast.Node) (MappedRange, error) { - return m.PosMappedRange(tf, node.Pos(), node.End()) -} - -// -- MappedRange -- - -// A MappedRange represents a valid byte-offset range of a file. -// Through its Mapper it can be converted into other forms such -// as protocol.Range or span.Span. -// -// Construct one by calling Mapper.OffsetMappedRange with start/end offsets. -// From the go/token domain, call safetoken.Offsets first, -// or use a helper such as ParsedGoFile.MappedPosRange. -// -// Two MappedRanges produced the same Mapper are equal if and only if they -// denote the same range. Two MappedRanges produced by different Mappers -// are unequal even when they represent the same range of the same file. -type MappedRange struct { - Mapper *Mapper - start, end int // valid byte offsets: 0 <= start <= end <= len(Mapper.Content) -} - -// Offsets returns the (start, end) byte offsets of this range. -func (mr MappedRange) Offsets() (start, end int) { return mr.start, mr.end } - -// -- convenience functions -- - -// URI returns the URI of the range's file. -func (mr MappedRange) URI() span.URI { - return mr.Mapper.URI -} - -// Range returns the range in protocol (UTF-16) form. -func (mr MappedRange) Range() Range { - rng, err := mr.Mapper.OffsetRange(mr.start, mr.end) - if err != nil { - panic(err) // can't happen - } - return rng -} - -// Location returns the range in protocol location (UTF-16) form. -func (mr MappedRange) Location() Location { - return mr.Mapper.RangeLocation(mr.Range()) -} - -// Span returns the range in span (UTF-8) form. -func (mr MappedRange) Span() span.Span { - spn, err := mr.Mapper.OffsetSpan(mr.start, mr.end) - if err != nil { - panic(err) // can't happen - } - return spn -} - -// String formats the range in span (UTF-8) notation. -func (mr MappedRange) String() string { - return fmt.Sprint(mr.Span()) -} - -// LocationTextDocumentPositionParams converts its argument to its result. -func LocationTextDocumentPositionParams(loc Location) TextDocumentPositionParams { - return TextDocumentPositionParams{ - TextDocument: TextDocumentIdentifier{URI: loc.URI}, - Position: loc.Range.Start, - } -} diff --git a/gopls/internal/lsp/protocol/span.go b/gopls/internal/lsp/protocol/span.go deleted file mode 100644 index 5e1a7dab207..00000000000 --- a/gopls/internal/lsp/protocol/span.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package protocol - -import ( - "fmt" - "unicode/utf8" - - "golang.org/x/tools/gopls/internal/span" -) - -func URIFromSpanURI(uri span.URI) DocumentURI { - return DocumentURI(uri) // simple conversion -} - -func URIFromPath(path string) DocumentURI { - return URIFromSpanURI(span.URIFromPath(path)) // normalizing conversion -} - -func (u DocumentURI) SpanURI() span.URI { - return span.URIFromURI(string(u)) // normalizing conversion -} - -// CompareLocation defines a three-valued comparison over locations, -// lexicographically ordered by (URI, Range). -func CompareLocation(x, y Location) int { - if x.URI != y.URI { - if x.URI < y.URI { - return -1 - } else { - return +1 - } - } - return CompareRange(x.Range, y.Range) -} - -// CompareRange returns -1 if a is before b, 0 if a == b, and 1 if a is after b. -// -// A range a is defined to be 'before' b if a.Start is before b.Start, or -// a.Start == b.Start and a.End is before b.End. -func CompareRange(a, b Range) int { - if r := ComparePosition(a.Start, b.Start); r != 0 { - return r - } - return ComparePosition(a.End, b.End) -} - -// ComparePosition returns -1 if a is before b, 0 if a == b, and 1 if a is after b. -func ComparePosition(a, b Position) int { - if a.Line != b.Line { - if a.Line < b.Line { - return -1 - } else { - return +1 - } - } - if a.Character != b.Character { - if a.Character < b.Character { - return -1 - } else { - return +1 - } - } - return 0 -} - -func Intersect(a, b Range) bool { - if a.Start.Line > b.End.Line || a.End.Line < b.Start.Line { - return false - } - return !((a.Start.Line == b.End.Line) && a.Start.Character > b.End.Character || - (a.End.Line == b.Start.Line) && a.End.Character < b.Start.Character) -} - -// Format implements fmt.Formatter. -// -// Note: Formatter is implemented instead of Stringer (presumably) for -// performance reasons, though it is not clear that it matters in practice. -func (r Range) Format(f fmt.State, _ rune) { - fmt.Fprintf(f, "%v-%v", r.Start, r.End) -} - -// Format implements fmt.Formatter. -// -// See Range.Format for discussion of why the Formatter interface is -// implemented rather than Stringer. -func (p Position) Format(f fmt.State, _ rune) { - fmt.Fprintf(f, "%v:%v", p.Line, p.Character) -} - -// -- implementation helpers -- - -// UTF16Len returns the number of codes in the UTF-16 transcoding of s. -func UTF16Len(s []byte) int { - var n int - for len(s) > 0 { - n++ - - // Fast path for ASCII. - if s[0] < 0x80 { - s = s[1:] - continue - } - - r, size := utf8.DecodeRune(s) - if r >= 0x10000 { - n++ // surrogate pair - } - s = s[size:] - } - return n -} diff --git a/gopls/internal/lsp/references.go b/gopls/internal/lsp/references.go deleted file mode 100644 index d3d36235697..00000000000 --- a/gopls/internal/lsp/references.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) references(ctx context.Context, params *protocol.ReferenceParams) (_ []protocol.Location, rerr error) { - recordLatency := telemetry.StartLatencyTimer("references") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.references", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - if snapshot.FileKind(fh) == source.Tmpl { - return template.References(ctx, snapshot, fh, params) - } - return source.References(ctx, snapshot, fh, params.Position, params.Context.IncludeDeclaration) -} diff --git a/gopls/internal/lsp/regtest/doc.go b/gopls/internal/lsp/regtest/doc.go deleted file mode 100644 index 4f4c7c020ba..00000000000 --- a/gopls/internal/lsp/regtest/doc.go +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package regtest provides a framework for writing gopls regression tests. -// -// User reported regressions are often expressed in terms of editor -// interactions. For example: "When I open my editor in this directory, -// navigate to this file, and change this line, I get a diagnostic that doesn't -// make sense". In these cases reproducing, diagnosing, and writing a test to -// protect against this regression can be difficult. -// -// The regtest package provides an API for developers to express these types of -// user interactions in ordinary Go tests, validate them, and run them in a -// variety of execution modes. -// -// # Test package setup -// -// The regression test package uses a couple of uncommon patterns to reduce -// boilerplate in test bodies. First, it is intended to be imported as "." so -// that helpers do not need to be qualified. Second, it requires some setup -// that is currently implemented in the regtest.Main function, which must be -// invoked by TestMain. Therefore, a minimal regtest testing package looks -// like this: -// -// package lsptests -// -// import ( -// "fmt" -// "testing" -// -// "golang.org/x/tools/gopls/internal/hooks" -// . "golang.org/x/tools/gopls/internal/lsp/regtest" -// ) -// -// func TestMain(m *testing.M) { -// Main(m, hooks.Options) -// } -// -// # Writing a simple regression test -// -// To run a regression test use the regtest.Run function, which accepts a -// txtar-encoded archive defining the initial workspace state. This function -// sets up the workspace in a temporary directory, creates a fake text editor, -// starts gopls, and initializes an LSP session. It then invokes the provided -// test function with an *Env handle encapsulating the newly created -// environment. Because gopls may be run in various modes (as a sidecar or -// daemon process, with different settings), the test runner may perform this -// process multiple times, re-running the test function each time with a new -// environment. -// -// func TestOpenFile(t *testing.T) { -// const files = ` -// -- go.mod -- -// module mod.com -// -// go 1.12 -// -- foo.go -- -// package foo -// ` -// Run(t, files, func(t *testing.T, env *Env) { -// env.OpenFile("foo.go") -// }) -// } -// -// # Configuring Regtest Execution -// -// The regtest package exposes several options that affect the setup process -// described above. To use these options, use the WithOptions function: -// -// WithOptions(opts...).Run(...) -// -// See options.go for a full list of available options. -// -// # Operating on editor state -// -// To operate on editor state within the test body, the Env type provides -// access to the workspace directory (Env.SandBox), text editor (Env.Editor), -// LSP server (Env.Server), and 'awaiter' (Env.Awaiter). -// -// In most cases, operations on these primitive building blocks of the -// regression test environment expect a Context (which should be a child of -// env.Ctx), and return an error. To avoid boilerplate, the Env exposes a set -// of wrappers in wrappers.go for use in scripting: -// -// env.CreateBuffer("c/c.go", "") -// env.EditBuffer("c/c.go", fake.Edit{ -// Text: `package c`, -// }) -// -// These wrappers thread through Env.Ctx, and call t.Fatal on any errors. -// -// # Expressing expectations -// -// The general pattern for a regression test is to script interactions with the -// fake editor and sandbox, and assert that gopls behaves correctly after each -// state change. Unfortunately, this is complicated by the fact that state -// changes are communicated to gopls via unidirectional client->server -// notifications (didOpen, didChange, etc.), and resulting gopls behavior such -// as diagnostics, logs, or messages is communicated back via server->client -// notifications. Therefore, within regression tests we must be able to say "do -// this, and then eventually gopls should do that". To achieve this, the -// regtest package provides a framework for expressing conditions that must -// eventually be met, in terms of the Expectation type. -// -// To express the assertion that "eventually gopls must meet these -// expectations", use env.Await(...): -// -// env.RegexpReplace("x/x.go", `package x`, `package main`) -// env.Await(env.DiagnosticAtRegexp("x/main.go", `fmt`)) -// -// Await evaluates the provided expectations atomically, whenever the client -// receives a state-changing notification from gopls. See expectation.go for a -// full list of available expectations. -// -// A fundamental problem with this model is that if gopls never meets the -// provided expectations, the test runner will hang until the test timeout -// (which defaults to 10m). There are two ways to work around this poor -// behavior: -// -// 1. Use a precondition to define precisely when we expect conditions to be -// met. Gopls provides the OnceMet(precondition, expectations...) pattern -// to express ("once this precondition is met, the following expectations -// must all hold"). To instrument preconditions, gopls uses verbose -// progress notifications to inform the client about ongoing work (see -// CompletedWork). The most common precondition is to wait for gopls to be -// done processing all change notifications, for which the regtest package -// provides the AfterChange helper. For example: -// -// // We expect diagnostics to be cleared after gopls is done processing the -// // didSave notification. -// env.SaveBuffer("a/go.mod") -// env.AfterChange(EmptyDiagnostics("a/go.mod")) -// -// 2. Set a shorter timeout during development, if you expect to be breaking -// tests. By setting the environment variable GOPLS_REGTEST_TIMEOUT=5s, -// regression tests will time out after 5 seconds. -// -// # Tips & Tricks -// -// Here are some tips and tricks for working with regression tests: -// -// 1. Set the environment variable GOPLS_REGTEST_TIMEOUT=5s during development. -// 2. Run tests with -short. This will only run regression tests in the -// default gopls execution mode. -// 3. Use capture groups to narrow regexp positions. All regular-expression -// based positions (such as DiagnosticAtRegexp) will match the position of -// the first capture group, if any are provided. This can be used to -// identify a specific position in the code for a pattern that may occur in -// multiple places. For example `var (mu) sync.Mutex` matches the position -// of "mu" within the variable declaration. -// 4. Read diagnostics into a variable to implement more complicated -// assertions about diagnostic state in the editor. To do this, use the -// pattern OnceMet(precondition, ReadDiagnostics("file.go", &d)) to capture -// the current diagnostics as soon as the precondition is met. This is -// preferable to accessing the diagnostics directly, as it avoids races. -package regtest diff --git a/gopls/internal/lsp/regtest/marker.go b/gopls/internal/lsp/regtest/marker.go deleted file mode 100644 index 45ecc74e41a..00000000000 --- a/gopls/internal/lsp/regtest/marker.go +++ /dev/null @@ -1,2366 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package regtest - -import ( - "bytes" - "context" - "encoding/json" - "flag" - "fmt" - "go/token" - "go/types" - "io/fs" - "log" - "os" - "path" - "path/filepath" - "reflect" - "regexp" - "runtime" - "sort" - "strings" - "testing" - - "github.com/google/go-cmp/cmp" - "golang.org/x/tools/go/expect" - "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/lsprpc" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/tests" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/jsonrpc2" - "golang.org/x/tools/internal/jsonrpc2/servertest" - "golang.org/x/tools/internal/testenv" - "golang.org/x/tools/txtar" -) - -var update = flag.Bool("update", false, "if set, update test data during marker tests") - -// RunMarkerTests runs "marker" tests in the given test data directory. -// (In practice: ../../regtest/marker/testdata) -// -// Use this command to run the tests: -// -// $ go test ./gopls/internal/regtest/marker [-update] -// -// A marker test uses the '//@' marker syntax of the x/tools/go/expect package -// to annotate source code with various information such as locations and -// arguments of LSP operations to be executed by the test. The syntax following -// '@' is parsed as a comma-separated list of ordinary Go function calls, for -// example -// -// //@foo(a, "b", 3),bar(0) -// -// and delegates to a corresponding function to perform LSP-related operations. -// See the Marker types documentation below for a list of supported markers. -// -// Each call argument is converted to the type of the corresponding parameter of -// the designated function. The conversion logic may use the surrounding context, -// such as the position or nearby text. See the Argument conversion section below -// for the full set of special conversions. As a special case, the blank -// identifier '_' is treated as the zero value of the parameter type. -// -// The test runner collects test cases by searching the given directory for -// files with the .txt extension. Each file is interpreted as a txtar archive, -// which is extracted to a temporary directory. The relative path to the .txt -// file is used as the subtest name. The preliminary section of the file -// (before the first archive entry) is a free-form comment. -// -// These tests were inspired by (and in many places copied from) a previous -// iteration of the marker tests built on top of the packagestest framework. -// Key design decisions motivating this reimplementation are as follows: -// - The old tests had a single global session, causing interaction at a -// distance and several awkward workarounds. -// - The old tests could not be safely parallelized, because certain tests -// manipulated the server options -// - Relatedly, the old tests did not have a logic grouping of assertions into -// a single unit, resulting in clusters of files serving clusters of -// entangled assertions. -// - The old tests used locations in the source as test names and as the -// identity of golden content, meaning that a single edit could change the -// name of an arbitrary number of subtests, and making it difficult to -// manually edit golden content. -// - The old tests did not hew closely to LSP concepts, resulting in, for -// example, each marker implementation doing its own position -// transformations, and inventing its own mechanism for configuration. -// - The old tests had an ad-hoc session initialization process. The regtest -// environment has had more time devoted to its initialization, and has a -// more convenient API. -// - The old tests lacked documentation, and often had failures that were hard -// to understand. By starting from scratch, we can revisit these aspects. -// -// # Special files -// -// There are several types of file within the test archive that are given special -// treatment by the test runner: -// - "skip": the presence of this file causes the test to be skipped, with -// the file content used as the skip message. -// - "flags": this file is treated as a whitespace-separated list of flags -// that configure the MarkerTest instance. Supported flags: -// -min_go=go1.18 sets the minimum Go version for the test; -// -cgo requires that CGO_ENABLED is set and the cgo tool is available -// -write_sumfile=a,b,c instructs the test runner to generate go.sum files -// in these directories before running the test. -// -skip_goos=a,b,c instructs the test runner to skip the test for the -// listed GOOS values. -// -ignore_extra_diags suppresses errors for unmatched diagnostics -// TODO(rfindley): using build constraint expressions for -skip_goos would -// be clearer. -// -filter_builtins=false disables the filtering of builtins from -// completion results. -// -filter_keywords=false disables the filtering of keywords from -// completion results. -// TODO(rfindley): support flag values containing whitespace. -// - "settings.json": this file is parsed as JSON, and used as the -// session configuration (see gopls/doc/settings.md) -// - "capabilities.json": this file is parsed as JSON client capabilities, -// and applied as an overlay over the default editor client capabilities. -// see https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#clientCapabilities -// for more details. -// - "env": this file is parsed as a list of VAR=VALUE fields specifying the -// editor environment. -// - Golden files: Within the archive, file names starting with '@' are -// treated as "golden" content, and are not written to disk, but instead are -// made available to test methods expecting an argument of type *Golden, -// using the identifier following '@'. For example, if the first parameter of -// Foo were of type *Golden, the test runner would convert the identifier a -// in the call @foo(a, "b", 3) into a *Golden by collecting golden file -// data starting with "@a/". -// - proxy files: any file starting with proxy/ is treated as a Go proxy -// file. If present, these files are written to a separate temporary -// directory and GOPROXY is set to file://. -// -// # Marker types -// -// Markers are of two kinds. A few are "value markers" (e.g. @item), which are -// processed in a first pass and each computes a value that may be referred to -// by name later. Most are "action markers", which are processed in a second -// pass and take some action such as testing an LSP operation; they may refer -// to values computed by value markers. -// -// The following markers are supported within marker tests: -// -// - acceptcompletion(location, label, golden): specifies that accepting the -// completion candidate produced at the given location with provided label -// results in the given golden state. -// -// - codeaction(start, end, kind, golden, ...titles): specifies a code action -// to request for the given range. To support multi-line ranges, the range -// is defined to be between start.Start and end.End. The golden directory -// contains changed file content after the code action is applied. -// If titles are provided, they are used to filter the matching code -// action. -// -// TODO(rfindley): consolidate with codeactionedit, via a @loc2 marker that -// allows binding multi-line locations. -// -// - codeactionedit(range, kind, golden, ...titles): a shorter form of -// codeaction. Invokes a code action of the given kind for the given -// in-line range, and compares the resulting formatted unified *edits* -// (notably, not the full file content) with the golden directory. -// -// - codeactionerr(start, end, kind, wantError): specifies a codeaction that -// fails with an error that matches the expectation. -// -// - codelens(location, title): specifies that a codelens is expected at the -// given location, with given title. Must be used in conjunction with -// @codelenses. -// -// - codelenses(): specifies that textDocument/codeLens should be run for the -// current document, with results compared to the @codelens annotations in -// the current document. -// -// - complete(location, ...items): specifies expected completion results at -// the given location. Must be used in conjunction with @item. -// -// - diag(location, regexp): specifies an expected diagnostic matching the -// given regexp at the given location. The test runner requires -// a 1:1 correspondence between observed diagnostics and diag annotations. -// The diagnostics source and kind fields are ignored, to reduce fuss. -// -// The specified location must match the start position of the diagnostic, -// but end positions are ignored. -// -// TODO(adonovan): in the older marker framework, the annotation asserted -// two additional fields (source="compiler", kind="error"). Restore them? -// -// - def(src, dst location): perform a textDocument/definition request at -// the src location, and check the result points to the dst location. -// -// - documentLink(golden): asserts that textDocument/documentLink returns -// links as described by the golden file. -// -// - foldingrange(golden): perform a textDocument/foldingRange for the -// current document, and compare with the golden content, which is the -// original source annotated with numbered tags delimiting the resulting -// ranges (e.g. <1 kind="..."> ... ). -// -// - format(golden): perform a textDocument/format request for the enclosing -// file, and compare against the named golden file. If the formatting -// request succeeds, the golden file must contain the resulting formatted -// source. If the formatting request fails, the golden file must contain -// the error message. -// -// - highlight(src location, dsts ...location): makes a -// textDocument/highlight request at the given src location, which should -// highlight the provided dst locations. -// -// - hover(src, dst location, g Golden): perform a textDocument/hover at the -// src location, and checks that the result is the dst location, with hover -// content matching "hover.md" in the golden data g. -// -// - implementations(src location, want ...location): makes a -// textDocument/implementation query at the src location and -// checks that the resulting set of locations matches want. -// -// - item(label, details, kind): defines a completion item with the provided -// fields. This information is not positional, and therefore @item markers -// may occur anywhere in the source. Used in conjunction with @complete, -// snippet, or rank. -// -// TODO(rfindley): rethink whether floating @item annotations are the best -// way to specify completion results. -// -// - loc(name, location): specifies the name for a location in the source. These -// locations may be referenced by other markers. -// -// - preparerename(src, spn, placeholder): asserts that a textDocument/prepareRename -// request at the src location expands to the spn location, with given -// placeholder. If placeholder is "", this is treated as a negative -// assertion and prepareRename should return nil. -// -// - rename(location, new, golden): specifies a renaming of the -// identifier at the specified location to the new name. -// The golden directory contains the transformed files. -// -// - renameerr(location, new, wantError): specifies a renaming that -// fails with an error that matches the expectation. -// -// - signature(location, label, active): specifies that -// signatureHelp at the given location should match the provided string, with -// the active parameter (an index) highlighted. -// -// - suggestedfix(location, regexp, kind, golden): like diag, the location and -// regexp identify an expected diagnostic. This diagnostic must -// to have exactly one associated code action of the specified kind. -// This action is executed for its editing effects on the source files. -// Like rename, the golden directory contains the expected transformed files. -// -// - rank(location, ...completionItem): executes a textDocument/completion -// request at the given location, and verifies that each expected -// completion item occurs in the results, in the expected order. Other -// unexpected completion items may occur in the results. -// TODO(rfindley): this exists for compatibility with the old marker tests. -// Replace this with rankl, and rename. -// -// - rankl(location, ...label): like rank, but only cares about completion -// item labels. -// -// - refs(location, want ...location): executes a textDocument/references -// request at the first location and asserts that the result is the set of -// 'want' locations. The first want location must be the declaration -// (assumedly unique). -// -// - snippet(location, completionItem, snippet): executes a -// textDocument/completion request at the location, and searches for a -// result with label matching that of the provided completion item -// (TODO(rfindley): accept a label rather than a completion item). Check -// the the result snippet matches the provided snippet. -// -// - symbol(golden): makes a textDocument/documentSymbol request -// for the enclosing file, formats the response with one symbol -// per line, sorts it, and compares against the named golden file. -// Each line is of the form: -// -// dotted.symbol.name kind "detail" +n lines -// -// where the "+n lines" part indicates that the declaration spans -// several lines. The test otherwise makes no attempt to check -// location information. There is no point to using more than one -// @symbol marker in a given file. -// -// - workspacesymbol(query, golden): makes a workspace/symbol request for the -// given query, formats the response with one symbol per line, and compares -// against the named golden file. As workspace symbols are by definition a -// workspace-wide request, the location of the workspace symbol marker does -// not matter. Each line is of the form: -// -// location name kind -// -// # Argument conversion -// -// Marker arguments are first parsed by the go/expect package, which accepts -// the following tokens as defined by the Go spec: -// - string, int64, float64, and rune literals -// - true and false -// - nil -// - identifiers (type expect.Identifier) -// - regular expressions, denoted the two tokens re"abc" (type *regexp.Regexp) -// -// These values are passed as arguments to the corresponding parameter of the -// test function. Additional value conversions may occur for these argument -> -// parameter type pairs: -// - string->regexp: the argument is parsed as a regular expressions. -// - string->location: the argument is converted to the location of the first -// instance of the argument in the partial line preceding the note. -// - regexp->location: the argument is converted to the location of the first -// match for the argument in the partial line preceding the note. If the -// regular expression contains exactly one subgroup, the position of the -// subgroup is used rather than the position of the submatch. -// - name->location: the argument is replaced by the named location. -// - name->Golden: the argument is used to look up golden content prefixed by -// @. -// - {string,regexp,identifier}->wantError: a wantError type specifies -// an expected error message, either in the form of a substring that -// must be present, a regular expression that it must match, or an -// identifier (e.g. foo) such that the archive entry @foo -// exists and contains the exact expected error. -// -// # Example -// -// Here is a complete example: -// -// -- a.go -- -// package a -// -// const abc = 0x2a //@hover("b", "abc", abc),hover(" =", "abc", abc) -// -- @abc/hover.md -- -// ```go -// const abc untyped int = 42 -// ``` -// -// @hover("b", "abc", abc),hover(" =", "abc", abc) -// -// In this example, the @hover annotation tells the test runner to run the -// hoverMarker function, which has parameters: -// -// (mark marker, src, dsc protocol.Location, g *Golden). -// -// The first argument holds the test context, including fake editor with open -// files, and sandboxed directory. -// -// Argument converters translate the "b" and "abc" arguments into locations by -// interpreting each one as a regular expression and finding the location of -// its first match on the preceding portion of the line, and the abc identifier -// into a dictionary of golden content containing "hover.md". Then the -// hoverMarker method executes a textDocument/hover LSP request at the src -// position, and ensures the result spans "abc", with the markdown content from -// hover.md. (Note that the markdown content includes the expect annotation as -// the doc comment.) -// -// The next hover on the same line asserts the same result, but initiates the -// hover immediately after "abc" in the source. This tests that we find the -// preceding identifier when hovering. -// -// # Updating golden files -// -// To update golden content in the test archive, it is easier to regenerate -// content automatically rather than edit it by hand. To do this, run the -// tests with the -update flag. Only tests that actually run will be updated. -// -// In some cases, golden content will vary by Go version (for example, gopls -// produces different markdown at Go versions before the 1.19 go/doc update). -// By convention, the golden content in test archives should match the output -// at Go tip. Each test function can normalize golden content for older Go -// versions. -// -// Note that -update does not cause missing @diag or @loc markers to be added. -// -// # TODO -// -// This API is a work-in-progress, as we migrate existing marker tests from -// internal/lsp/tests. -// -// Remaining TODO: -// - reorganize regtest packages (and rename to just 'test'?) -// - Rename the files .txtar. -// - Provide some means by which locations in the standard library -// (or builtin.go) can be named, so that, for example, we can we -// can assert that MyError implements the built-in error type. -// - If possible, improve handling for optional arguments. Rather than have -// multiple variations of a marker, it would be nice to support a more -// flexible signature: can codeaction, codeactionedit, codeactionerr, and -// suggestedfix be consolidated? -// -// Existing marker tests (in ../testdata) to port: -// - CallHierarchy -// - SemanticTokens -// - SuggestedFixes -// - InlayHints -// - Renames -// - SelectionRanges -func RunMarkerTests(t *testing.T, dir string) { - // The marker tests must be able to run go/packages.Load. - testenv.NeedsGoPackages(t) - - tests, err := loadMarkerTests(dir) - if err != nil { - t.Fatal(err) - } - - // Opt: use a shared cache. - cache := cache.New(nil) - - for _, test := range tests { - test := test - t.Run(test.name, func(t *testing.T) { - t.Parallel() - if test.skipReason != "" { - t.Skip(test.skipReason) - } - for _, goos := range test.skipGOOS { - if runtime.GOOS == goos { - t.Skipf("skipping on %s due to -skip_goos", runtime.GOOS) - } - } - - // TODO(rfindley): it may be more useful to have full support for build - // constraints. - if test.minGoVersion != "" { - var go1point int - if _, err := fmt.Sscanf(test.minGoVersion, "go1.%d", &go1point); err != nil { - t.Fatalf("parsing -min_go version: %v", err) - } - testenv.NeedsGo1Point(t, go1point) - } - if test.cgo { - testenv.NeedsTool(t, "cgo") - } - config := fake.EditorConfig{ - Settings: test.settings, - CapabilitiesJSON: test.capabilities, - Env: test.env, - } - if _, ok := config.Settings["diagnosticsDelay"]; !ok { - if config.Settings == nil { - config.Settings = make(map[string]any) - } - config.Settings["diagnosticsDelay"] = "10ms" - } - // inv: config.Settings != nil - - run := &markerTestRun{ - test: test, - env: newEnv(t, cache, test.files, test.proxyFiles, test.writeGoSum, config), - settings: config.Settings, - values: make(map[expect.Identifier]any), - diags: make(map[protocol.Location][]protocol.Diagnostic), - extraNotes: make(map[protocol.DocumentURI]map[string][]*expect.Note), - } - // TODO(rfindley): make it easier to clean up the regtest environment. - defer run.env.Editor.Shutdown(context.Background()) // ignore error - defer run.env.Sandbox.Close() // ignore error - - // Open all files so that we operate consistently with LSP clients, and - // (pragmatically) so that we have a Mapper available via the fake - // editor. - // - // This also allows avoiding mutating the editor state in tests. - for file := range test.files { - run.env.OpenFile(file) - } - // Wait for the didOpen notifications to be processed, then collect - // diagnostics. - var diags map[string]*protocol.PublishDiagnosticsParams - run.env.AfterChange(ReadAllDiagnostics(&diags)) - for path, params := range diags { - uri := run.env.Sandbox.Workdir.URI(path) - for _, diag := range params.Diagnostics { - loc := protocol.Location{ - URI: uri, - Range: protocol.Range{ - Start: diag.Range.Start, - End: diag.Range.Start, // ignore end positions - }, - } - run.diags[loc] = append(run.diags[loc], diag) - } - } - - var markers []marker - for _, note := range test.notes { - mark := marker{run: run, note: note} - if fn, ok := valueMarkerFuncs[note.Name]; ok { - fn(mark) - } else if _, ok := actionMarkerFuncs[note.Name]; ok { - markers = append(markers, mark) // save for later - } else { - uri := mark.uri() - if run.extraNotes[uri] == nil { - run.extraNotes[uri] = make(map[string][]*expect.Note) - } - run.extraNotes[uri][note.Name] = append(run.extraNotes[uri][note.Name], note) - } - } - - // Invoke each remaining marker in the test. - for _, mark := range markers { - actionMarkerFuncs[mark.note.Name](mark) - } - - // Any remaining (un-eliminated) diagnostics are an error. - if !test.ignoreExtraDiags { - for loc, diags := range run.diags { - for _, diag := range diags { - t.Errorf("%s: unexpected diagnostic: %q", run.fmtLoc(loc), diag.Message) - } - } - } - - // TODO(rfindley): use these for whole-file marker tests. - for uri, extras := range run.extraNotes { - for name, extra := range extras { - if len(extra) > 0 { - t.Errorf("%s: %d unused %q markers", run.env.Sandbox.Workdir.URIToPath(uri), len(extra), name) - } - } - } - - formatted, err := formatTest(test) - if err != nil { - t.Errorf("formatTest: %v", err) - } else if *update { - filename := filepath.Join(dir, test.name) - if err := os.WriteFile(filename, formatted, 0644); err != nil { - t.Error(err) - } - } else { - // On go 1.19 and later, verify that the testdata has not changed. - // - // On earlier Go versions, the golden test data varies due to different - // markdown escaping. - // - // Only check this if the test hasn't already failed, otherwise we'd - // report duplicate mismatches of golden data. - if testenv.Go1Point() >= 19 && !t.Failed() { - // Otherwise, verify that formatted content matches. - if diff := compare.NamedText("formatted", "on-disk", string(formatted), string(test.content)); diff != "" { - t.Errorf("formatted test does not match on-disk content:\n%s", diff) - } - } - } - }) - } - - if abs, err := filepath.Abs(dir); err == nil && t.Failed() { - t.Logf("(Filenames are relative to %s.)", abs) - } -} - -// A marker holds state for the execution of a single @marker -// annotation in the source. -type marker struct { - run *markerTestRun - note *expect.Note -} - -// server returns the LSP server for the marker test run. -func (m marker) server() protocol.Server { - return m.run.env.Editor.Server -} - -// errorf reports an error with a prefix indicating the position of the marker note. -// -// It formats the error message using mark.sprintf. -func (mark marker) errorf(format string, args ...any) { - msg := mark.sprintf(format, args...) - // TODO(adonovan): consider using fmt.Fprintf(os.Stderr)+t.Fail instead of - // t.Errorf to avoid reporting uninteresting positions in the Go source of - // the driver. However, this loses the order of stderr wrt "FAIL: TestFoo" - // subtest dividers. - mark.run.env.T.Errorf("%s: %s", mark.run.fmtPos(mark.note.Pos), msg) -} - -// valueMarkerFunc returns a wrapper around a function that allows it to be -// called during the processing of value markers (e.g. @value(v, 123)) with marker -// arguments converted to function parameters. The provided function's first -// parameter must be of type 'marker', and it must return a value. -// -// Unlike action markers, which are executed for actions such as test -// assertions, value markers are all evaluated first, and each computes -// a value that is recorded by its identifier, which is the marker's first -// argument. These values may be referred to from an action marker by -// this identifier, e.g. @action(... , v, ...). -// -// For example, given a fn with signature -// -// func(mark marker, label, details, kind string) CompletionItem -// -// The result of valueMarkerFunc can associated with @item notes, and invoked -// as follows: -// -// //@item(FooCompletion, "Foo", "func() int", "func") -// -// The provided fn should not mutate the test environment. -func valueMarkerFunc(fn any) func(marker) { - ftype := reflect.TypeOf(fn) - if ftype.NumIn() == 0 || ftype.In(0) != markerType { - panic(fmt.Sprintf("value marker function %#v must accept marker as its first argument", ftype)) - } - if ftype.NumOut() != 1 { - panic(fmt.Sprintf("value marker function %#v must have exactly 1 result", ftype)) - } - - return func(mark marker) { - if len(mark.note.Args) == 0 || !is[expect.Identifier](mark.note.Args[0]) { - mark.errorf("first argument to a value marker function must be an identifier") - return - } - id := mark.note.Args[0].(expect.Identifier) - if alt, ok := mark.run.values[id]; ok { - mark.errorf("%s already declared as %T", id, alt) - return - } - args := append([]any{mark}, mark.note.Args[1:]...) - argValues, err := convertArgs(mark, ftype, args) - if err != nil { - mark.errorf("converting args: %v", err) - return - } - results := reflect.ValueOf(fn).Call(argValues) - mark.run.values[id] = results[0].Interface() - } -} - -// actionMarkerFunc returns a wrapper around a function that allows it to be -// called during the processing of action markers (e.g. @action("abc", 123)) -// with marker arguments converted to function parameters. The provided -// function's first parameter must be of type 'marker', and it must not return -// any values. -// -// The provided fn should not mutate the test environment. -func actionMarkerFunc(fn any) func(marker) { - ftype := reflect.TypeOf(fn) - if ftype.NumIn() == 0 || ftype.In(0) != markerType { - panic(fmt.Sprintf("action marker function %#v must accept marker as its first argument", ftype)) - } - if ftype.NumOut() != 0 { - panic(fmt.Sprintf("action marker function %#v cannot have results", ftype)) - } - - return func(mark marker) { - args := append([]any{mark}, mark.note.Args...) - argValues, err := convertArgs(mark, ftype, args) - if err != nil { - mark.errorf("converting args: %v", err) - return - } - reflect.ValueOf(fn).Call(argValues) - } -} - -func convertArgs(mark marker, ftype reflect.Type, args []any) ([]reflect.Value, error) { - var ( - argValues []reflect.Value - pnext int // next param index - p reflect.Type // current param - ) - for i, arg := range args { - if i < ftype.NumIn() { - p = ftype.In(pnext) - pnext++ - } else if p == nil || !ftype.IsVariadic() { - // The actual number of arguments expected by the mark varies, depending - // on whether this is a value marker or an action marker. - // - // Since this error indicates a bug, probably OK to have an imprecise - // error message here. - return nil, fmt.Errorf("too many arguments to %s", mark.note.Name) - } - elemType := p - if ftype.IsVariadic() && pnext == ftype.NumIn() { - elemType = p.Elem() - } - var v reflect.Value - if id, ok := arg.(expect.Identifier); ok && id == "_" { - v = reflect.Zero(elemType) - } else { - a, err := convert(mark, arg, elemType) - if err != nil { - return nil, err - } - v = reflect.ValueOf(a) - } - argValues = append(argValues, v) - } - // Check that we have sufficient arguments. If the function is variadic, we - // do not need arguments for the final parameter. - if pnext < ftype.NumIn()-1 || pnext == ftype.NumIn()-1 && !ftype.IsVariadic() { - // Same comment as above: OK to be vague here. - return nil, fmt.Errorf("not enough arguments to %s", mark.note.Name) - } - return argValues, nil -} - -// is reports whether arg is a T. -func is[T any](arg any) bool { - _, ok := arg.(T) - return ok -} - -// Supported value marker functions. See [valueMarkerFunc] for more details. -var valueMarkerFuncs = map[string]func(marker){ - "loc": valueMarkerFunc(locMarker), - "item": valueMarkerFunc(completionItemMarker), -} - -// Supported action marker functions. See [actionMarkerFunc] for more details. -var actionMarkerFuncs = map[string]func(marker){ - "acceptcompletion": actionMarkerFunc(acceptCompletionMarker), - "codeaction": actionMarkerFunc(codeActionMarker), - "codeactionedit": actionMarkerFunc(codeActionEditMarker), - "codeactionerr": actionMarkerFunc(codeActionErrMarker), - "codelenses": actionMarkerFunc(codeLensesMarker), - "complete": actionMarkerFunc(completeMarker), - "def": actionMarkerFunc(defMarker), - "diag": actionMarkerFunc(diagMarker), - "documentlink": actionMarkerFunc(documentLinkMarker), - "foldingrange": actionMarkerFunc(foldingRangeMarker), - "format": actionMarkerFunc(formatMarker), - "highlight": actionMarkerFunc(highlightMarker), - "hover": actionMarkerFunc(hoverMarker), - "implementation": actionMarkerFunc(implementationMarker), - "preparerename": actionMarkerFunc(prepareRenameMarker), - "rank": actionMarkerFunc(rankMarker), - "rankl": actionMarkerFunc(ranklMarker), - "refs": actionMarkerFunc(refsMarker), - "rename": actionMarkerFunc(renameMarker), - "renameerr": actionMarkerFunc(renameErrMarker), - "signature": actionMarkerFunc(signatureMarker), - "snippet": actionMarkerFunc(snippetMarker), - "suggestedfix": actionMarkerFunc(suggestedfixMarker), - "symbol": actionMarkerFunc(symbolMarker), - "typedef": actionMarkerFunc(typedefMarker), - "workspacesymbol": actionMarkerFunc(workspaceSymbolMarker), -} - -// markerTest holds all the test data extracted from a test txtar archive. -// -// See the documentation for RunMarkerTests for more information on the archive -// format. -type markerTest struct { - name string // relative path to the txtar file in the testdata dir - fset *token.FileSet // fileset used for parsing notes - content []byte // raw test content - archive *txtar.Archive // original test archive - settings map[string]any // gopls settings - capabilities []byte // content of capabilities.json file - env map[string]string // editor environment - proxyFiles map[string][]byte // proxy content - files map[string][]byte // data files from the archive (excluding special files) - notes []*expect.Note // extracted notes from data files - golden map[expect.Identifier]*Golden // extracted golden content, by identifier name - - skipReason string // the skip reason extracted from the "skip" archive file - flags []string // flags extracted from the special "flags" archive file. - - // Parsed flags values. - minGoVersion string - cgo bool - writeGoSum []string // comma separated dirs to write go sum for - skipGOOS []string // comma separated GOOS values to skip - ignoreExtraDiags bool - filterBuiltins bool - filterKeywords bool -} - -// flagSet returns the flagset used for parsing the special "flags" file in the -// test archive. -func (t *markerTest) flagSet() *flag.FlagSet { - flags := flag.NewFlagSet(t.name, flag.ContinueOnError) - flags.StringVar(&t.minGoVersion, "min_go", "", "if set, the minimum go1.X version required for this test") - flags.BoolVar(&t.cgo, "cgo", false, "if set, requires cgo (both the cgo tool and CGO_ENABLED=1)") - flags.Var((*stringListValue)(&t.writeGoSum), "write_sumfile", "if set, write the sumfile for these directories") - flags.Var((*stringListValue)(&t.skipGOOS), "skip_goos", "if set, skip this test on these GOOS values") - flags.BoolVar(&t.ignoreExtraDiags, "ignore_extra_diags", false, "if set, suppress errors for unmatched diagnostics") - flags.BoolVar(&t.filterBuiltins, "filter_builtins", true, "if set, filter builtins from completion results") - flags.BoolVar(&t.filterKeywords, "filter_keywords", true, "if set, filter keywords from completion results") - return flags -} - -// stringListValue implements flag.Value. -type stringListValue []string - -func (l *stringListValue) Set(s string) error { - if s != "" { - for _, d := range strings.Split(s, ",") { - *l = append(*l, strings.TrimSpace(d)) - } - } - return nil -} - -func (l stringListValue) String() string { - return strings.Join([]string(l), ",") -} - -func (t *markerTest) getGolden(id expect.Identifier) *Golden { - golden, ok := t.golden[id] - // If there was no golden content for this identifier, we must create one - // to handle the case where -update is set: we need a place to store - // the updated content. - if !ok { - golden = &Golden{id: id} - - // TODO(adonovan): the separation of markerTest (the - // static aspects) from markerTestRun (the dynamic - // ones) is evidently bogus because here we modify - // markerTest during execution. Let's merge the two. - t.golden[id] = golden - } - return golden -} - -// Golden holds extracted golden content for a single @ prefix. -// -// When -update is set, golden captures the updated golden contents for later -// writing. -type Golden struct { - id expect.Identifier - data map[string][]byte // key "" => @id itself - updated map[string][]byte -} - -// Get returns golden content for the given name, which corresponds to the -// relative path following the golden prefix @/. For example, to access -// the content of @foo/path/to/result.json from the Golden associated with -// @foo, name should be "path/to/result.json". -// -// If -update is set, the given update function will be called to get the -// updated golden content that should be written back to testdata. -// -// Marker functions must use this method instead of accessing data entries -// directly otherwise the -update operation will delete those entries. -// -// TODO(rfindley): rethink the logic here. We may want to separate Get and Set, -// and not delete golden content that isn't set. -func (g *Golden) Get(t testing.TB, name string, updated []byte) ([]byte, bool) { - if existing, ok := g.updated[name]; ok { - // Multiple tests may reference the same golden data, but if they do they - // must agree about its expected content. - if diff := compare.NamedText("existing", "updated", string(existing), string(updated)); diff != "" { - t.Errorf("conflicting updates for golden data %s/%s:\n%s", g.id, name, diff) - } - } - if g.updated == nil { - g.updated = make(map[string][]byte) - } - g.updated[name] = updated - if *update { - return updated, true - } - - res, ok := g.data[name] - return res, ok -} - -// loadMarkerTests walks the given dir looking for .txt files, which it -// interprets as a txtar archive. -// -// See the documentation for RunMarkerTests for more details on the test data -// archive. -func loadMarkerTests(dir string) ([]*markerTest, error) { - var tests []*markerTest - err := filepath.WalkDir(dir, func(path string, _ fs.DirEntry, err error) error { - if strings.HasSuffix(path, ".txt") { - content, err := os.ReadFile(path) - if err != nil { - return err - } - - name := strings.TrimPrefix(path, dir+string(filepath.Separator)) - test, err := loadMarkerTest(name, content) - if err != nil { - return fmt.Errorf("%s: %v", path, err) - } - tests = append(tests, test) - } - return err - }) - return tests, err -} - -func loadMarkerTest(name string, content []byte) (*markerTest, error) { - archive := txtar.Parse(content) - if len(archive.Files) == 0 { - return nil, fmt.Errorf("txtar file has no '-- filename --' sections") - } - if bytes.Contains(archive.Comment, []byte("\n-- ")) { - // This check is conservative, but the comment is only a comment. - return nil, fmt.Errorf("ill-formed '-- filename --' header in comment") - } - test := &markerTest{ - name: name, - fset: token.NewFileSet(), - content: content, - archive: archive, - files: make(map[string][]byte), - golden: make(map[expect.Identifier]*Golden), - } - for _, file := range archive.Files { - switch { - case file.Name == "skip": - reason := strings.ReplaceAll(string(file.Data), "\n", " ") - reason = strings.TrimSpace(reason) - test.skipReason = reason - - case file.Name == "flags": - test.flags = strings.Fields(string(file.Data)) - - case file.Name == "settings.json": - if err := json.Unmarshal(file.Data, &test.settings); err != nil { - return nil, err - } - - case file.Name == "capabilities.json": - test.capabilities = file.Data // lazily unmarshalled by the editor - - case file.Name == "env": - test.env = make(map[string]string) - fields := strings.Fields(string(file.Data)) - for _, field := range fields { - key, value, ok := strings.Cut(field, "=") - if !ok { - return nil, fmt.Errorf("env vars must be formatted as var=value, got %q", field) - } - test.env[key] = value - } - - case strings.HasPrefix(file.Name, "@"): // golden content - idstring, name, _ := strings.Cut(file.Name[len("@"):], "/") - id := expect.Identifier(idstring) - // Note that a file.Name of just "@id" gives (id, name) = ("id", ""). - if _, ok := test.golden[id]; !ok { - test.golden[id] = &Golden{ - id: id, - data: make(map[string][]byte), - } - } - test.golden[id].data[name] = file.Data - - case strings.HasPrefix(file.Name, "proxy/"): - name := file.Name[len("proxy/"):] - if test.proxyFiles == nil { - test.proxyFiles = make(map[string][]byte) - } - test.proxyFiles[name] = file.Data - - default: // ordinary file content - notes, err := expect.Parse(test.fset, file.Name, file.Data) - if err != nil { - return nil, fmt.Errorf("parsing notes in %q: %v", file.Name, err) - } - - // Reject common misspelling: "// @mark". - // TODO(adonovan): permit "// @" within a string. Detect multiple spaces. - if i := bytes.Index(file.Data, []byte("// @")); i >= 0 { - line := 1 + bytes.Count(file.Data[:i], []byte("\n")) - return nil, fmt.Errorf("%s:%d: unwanted space before marker (// @)", file.Name, line) - } - - test.notes = append(test.notes, notes...) - test.files[file.Name] = file.Data - } - - // Print a warning if we see what looks like "-- filename --" - // without the second "--". It's not necessarily wrong, - // but it should almost never appear in our test inputs. - if bytes.Contains(file.Data, []byte("\n-- ")) { - log.Printf("ill-formed '-- filename --' header in %s?", file.Name) - } - } - - // Parse flags after loading files, as they may have been set by the "flags" - // file. - if err := test.flagSet().Parse(test.flags); err != nil { - return nil, fmt.Errorf("parsing flags: %v", err) - } - - return test, nil -} - -// formatTest formats the test as a txtar archive. -func formatTest(test *markerTest) ([]byte, error) { - arch := &txtar.Archive{ - Comment: test.archive.Comment, - } - - updatedGolden := make(map[string][]byte) - for id, g := range test.golden { - for name, data := range g.updated { - filename := "@" + path.Join(string(id), name) // name may be "" - updatedGolden[filename] = data - } - } - - // Preserve the original ordering of archive files. - for _, file := range test.archive.Files { - switch file.Name { - // Preserve configuration files exactly as they were. They must have parsed - // if we got this far. - case "skip", "flags", "settings.json", "capabilities.json", "env": - arch.Files = append(arch.Files, file) - default: - if _, ok := test.files[file.Name]; ok { // ordinary file - arch.Files = append(arch.Files, file) - } else if strings.HasPrefix(file.Name, "proxy/") { // proxy file - arch.Files = append(arch.Files, file) - } else if data, ok := updatedGolden[file.Name]; ok { // golden file - arch.Files = append(arch.Files, txtar.File{Name: file.Name, Data: data}) - delete(updatedGolden, file.Name) - } - } - } - - // ...followed by any new golden files. - var newGoldenFiles []txtar.File - for filename, data := range updatedGolden { - // TODO(rfindley): it looks like this implicitly removes trailing newlines - // from golden content. Is there any way to fix that? Perhaps we should - // just make the diff tolerant of missing newlines? - newGoldenFiles = append(newGoldenFiles, txtar.File{Name: filename, Data: data}) - } - // Sort new golden files lexically. - sort.Slice(newGoldenFiles, func(i, j int) bool { - return newGoldenFiles[i].Name < newGoldenFiles[j].Name - }) - arch.Files = append(arch.Files, newGoldenFiles...) - - return txtar.Format(arch), nil -} - -// newEnv creates a new environment for a marker test. -// -// TODO(rfindley): simplify and refactor the construction of testing -// environments across regtests, marker tests, and benchmarks. -func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byte, writeGoSum []string, config fake.EditorConfig) *Env { - sandbox, err := fake.NewSandbox(&fake.SandboxConfig{ - RootDir: t.TempDir(), - Files: files, - ProxyFiles: proxyFiles, - }) - if err != nil { - t.Fatal(err) - } - - for _, dir := range writeGoSum { - if err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "..."}, []string{"GOWORK=off"}, true); err != nil { - t.Fatal(err) - } - } - - // Put a debug instance in the context to prevent logging to stderr. - // See associated TODO in runner.go: we should revisit this pattern. - ctx := context.Background() - ctx = debug.WithInstance(ctx, "", "off") - - awaiter := NewAwaiter(sandbox.Workdir) - ss := lsprpc.NewStreamServer(cache, false, hooks.Options) - server := servertest.NewPipeServer(ss, jsonrpc2.NewRawStream) - const skipApplyEdits = true // capture edits but don't apply them - editor, err := fake.NewEditor(sandbox, config).Connect(ctx, server, awaiter.Hooks(), skipApplyEdits) - if err != nil { - sandbox.Close() // ignore error - t.Fatal(err) - } - if err := awaiter.Await(ctx, InitialWorkspaceLoad); err != nil { - sandbox.Close() // ignore error - t.Fatal(err) - } - return &Env{ - T: t, - Ctx: ctx, - Editor: editor, - Sandbox: sandbox, - Awaiter: awaiter, - } -} - -// A markerTestRun holds the state of one run of a marker test archive. -type markerTestRun struct { - test *markerTest - env *Env - settings map[string]any - - // Collected information. - // Each @diag/@suggestedfix marker eliminates an entry from diags. - values map[expect.Identifier]any - diags map[protocol.Location][]protocol.Diagnostic // diagnostics by position; location end == start - - // Notes that weren't associated with a top-level marker func. They may be - // consumed by another marker (e.g. @codelenses collects @codelens markers). - // Any notes that aren't consumed are flagged as an error. - extraNotes map[protocol.DocumentURI]map[string][]*expect.Note -} - -// sprintf returns a formatted string after applying pre-processing to -// arguments of the following types: -// - token.Pos: formatted using (*markerTestRun).fmtPos -// - protocol.Location: formatted using (*markerTestRun).fmtLoc -func (c *marker) sprintf(format string, args ...any) string { - if false { - _ = fmt.Sprintf(format, args...) // enable vet printf checker - } - var args2 []any - for _, arg := range args { - switch arg := arg.(type) { - case token.Pos: - args2 = append(args2, c.run.fmtPos(arg)) - case protocol.Location: - args2 = append(args2, c.run.fmtLoc(arg)) - default: - args2 = append(args2, arg) - } - } - return fmt.Sprintf(format, args2...) -} - -// uri returns the URI of the file containing the marker. -func (mark marker) uri() protocol.DocumentURI { - return mark.run.env.Sandbox.Workdir.URI(mark.run.test.fset.File(mark.note.Pos).Name()) -} - -// path returns the relative path to the file containing the marker. -func (mark marker) path() string { - return mark.run.env.Sandbox.Workdir.RelPath(mark.run.test.fset.File(mark.note.Pos).Name()) -} - -// fmtLoc formats the given pos in the context of the test, using -// archive-relative paths for files and including the line number in the full -// archive file. -func (run *markerTestRun) fmtPos(pos token.Pos) string { - file := run.test.fset.File(pos) - if file == nil { - run.env.T.Errorf("position %d not in test fileset", pos) - return "" - } - m, err := run.env.Editor.Mapper(file.Name()) - if err != nil { - run.env.T.Errorf("%s", err) - return "" - } - loc, err := m.PosLocation(file, pos, pos) - if err != nil { - run.env.T.Errorf("Mapper(%s).PosLocation failed: %v", file.Name(), err) - } - return run.fmtLoc(loc) -} - -// fmtLoc formats the given location in the context of the test, using -// archive-relative paths for files and including the line number in the full -// archive file. -func (run *markerTestRun) fmtLoc(loc protocol.Location) string { - formatted := run.fmtLocDetails(loc, true) - if formatted == "" { - run.env.T.Errorf("unable to find %s in test archive", loc) - return "" - } - return formatted -} - -// See fmtLoc. If includeTxtPos is not set, the position in the full archive -// file is omitted. -// -// If the location cannot be found within the archive, fmtLocDetails returns "". -func (run *markerTestRun) fmtLocDetails(loc protocol.Location, includeTxtPos bool) string { - if loc == (protocol.Location{}) { - return "" - } - lines := bytes.Count(run.test.archive.Comment, []byte("\n")) - var name string - for _, f := range run.test.archive.Files { - lines++ // -- separator -- - uri := run.env.Sandbox.Workdir.URI(f.Name) - if uri == loc.URI { - name = f.Name - break - } - lines += bytes.Count(f.Data, []byte("\n")) - } - if name == "" { - return "" - } - m, err := run.env.Editor.Mapper(name) - if err != nil { - run.env.T.Errorf("internal error: %v", err) - return "" - } - s, err := m.LocationSpan(loc) - if err != nil { - run.env.T.Errorf("error formatting location %s: %v", loc, err) - return "" - } - - innerSpan := fmt.Sprintf("%d:%d", s.Start().Line(), s.Start().Column()) // relative to the embedded file - outerSpan := fmt.Sprintf("%d:%d", lines+s.Start().Line(), s.Start().Column()) // relative to the archive file - if s.Start() != s.End() { - if s.End().Line() == s.Start().Line() { - innerSpan += fmt.Sprintf("-%d", s.End().Column()) - outerSpan += fmt.Sprintf("-%d", s.End().Column()) - } else { - innerSpan += fmt.Sprintf("-%d:%d", s.End().Line(), s.End().Column()) - innerSpan += fmt.Sprintf("-%d:%d", lines+s.End().Line(), s.End().Column()) - } - } - - if includeTxtPos { - return fmt.Sprintf("%s:%s (%s:%s)", name, innerSpan, run.test.name, outerSpan) - } else { - return fmt.Sprintf("%s:%s", name, innerSpan) - } -} - -// ---- converters ---- - -// converter is the signature of argument converters. -// A converter should return an error rather than calling marker.errorf(). -type converter func(marker, any) (any, error) - -// Types with special conversions. -var ( - goldenType = reflect.TypeOf(&Golden{}) - locationType = reflect.TypeOf(protocol.Location{}) - markerType = reflect.TypeOf(marker{}) - regexpType = reflect.TypeOf(®exp.Regexp{}) - wantErrorType = reflect.TypeOf(wantError{}) -) - -func convert(mark marker, arg any, paramType reflect.Type) (any, error) { - if paramType == goldenType { - id, ok := arg.(expect.Identifier) - if !ok { - return nil, fmt.Errorf("invalid input type %T: golden key must be an identifier", arg) - } - return mark.run.test.getGolden(id), nil - } - if id, ok := arg.(expect.Identifier); ok { - if arg, ok := mark.run.values[id]; ok { - if !reflect.TypeOf(arg).AssignableTo(paramType) { - return nil, fmt.Errorf("cannot convert %v (%T) to %s", arg, arg, paramType) - } - return arg, nil - } - } - if reflect.TypeOf(arg).AssignableTo(paramType) { - return arg, nil // no conversion required - } - switch paramType { - case locationType: - return convertLocation(mark, arg) - case wantErrorType: - return convertWantError(mark, arg) - default: - return nil, fmt.Errorf("cannot convert %v (%T) to %s", arg, arg, paramType) - } -} - -// convertLocation converts a string or regexp argument into the protocol -// location corresponding to the first position of the string (or first match -// of the regexp) in the line preceding the note. -func convertLocation(mark marker, arg any) (protocol.Location, error) { - switch arg := arg.(type) { - case string: - startOff, preceding, m, err := linePreceding(mark.run, mark.note.Pos) - if err != nil { - return protocol.Location{}, err - } - idx := bytes.Index(preceding, []byte(arg)) - if idx < 0 { - return protocol.Location{}, fmt.Errorf("substring %q not found in %q", arg, preceding) - } - off := startOff + idx - return m.OffsetLocation(off, off+len(arg)) - case *regexp.Regexp: - return findRegexpInLine(mark.run, mark.note.Pos, arg) - default: - return protocol.Location{}, fmt.Errorf("cannot convert argument type %T to location (must be a string to match the preceding line)", arg) - } -} - -// findRegexpInLine searches the partial line preceding pos for a match for the -// regular expression re, returning a location spanning the first match. If re -// contains exactly one subgroup, the position of this subgroup match is -// returned rather than the position of the full match. -func findRegexpInLine(run *markerTestRun, pos token.Pos, re *regexp.Regexp) (protocol.Location, error) { - startOff, preceding, m, err := linePreceding(run, pos) - if err != nil { - return protocol.Location{}, err - } - - matches := re.FindSubmatchIndex(preceding) - if len(matches) == 0 { - return protocol.Location{}, fmt.Errorf("no match for regexp %q found in %q", re, string(preceding)) - } - var start, end int - switch len(matches) { - case 2: - // no subgroups: return the range of the regexp expression - start, end = matches[0], matches[1] - case 4: - // one subgroup: return its range - start, end = matches[2], matches[3] - default: - return protocol.Location{}, fmt.Errorf("invalid location regexp %q: expect either 0 or 1 subgroups, got %d", re, len(matches)/2-1) - } - - return m.OffsetLocation(start+startOff, end+startOff) -} - -func linePreceding(run *markerTestRun, pos token.Pos) (int, []byte, *protocol.Mapper, error) { - file := run.test.fset.File(pos) - posn := safetoken.Position(file, pos) - lineStart := file.LineStart(posn.Line) - startOff, endOff, err := safetoken.Offsets(file, lineStart, pos) - if err != nil { - return 0, nil, nil, err - } - m, err := run.env.Editor.Mapper(file.Name()) - if err != nil { - return 0, nil, nil, err - } - return startOff, m.Content[startOff:endOff], m, nil -} - -// convertWantError converts a string, regexp, or identifier -// argument into a wantError. The string is a substring of the -// expected error, the regexp is a pattern than matches the expected -// error, and the identifier is a golden file containing the expected -// error. -func convertWantError(mark marker, arg any) (wantError, error) { - switch arg := arg.(type) { - case string: - return wantError{substr: arg}, nil - case *regexp.Regexp: - return wantError{pattern: arg}, nil - case expect.Identifier: - golden := mark.run.test.getGolden(arg) - return wantError{golden: golden}, nil - default: - return wantError{}, fmt.Errorf("cannot convert %T to wantError (want: string, regexp, or identifier)", arg) - } -} - -// A wantError represents an expectation of a specific error message. -// -// It may be indicated in one of three ways, in 'expect' notation: -// - an identifier 'foo', to compare with the contents of the golden section @foo; -// - a pattern expression re"ab.*c", to match against a regular expression; -// - a string literal "abc", to check for a substring. -type wantError struct { - golden *Golden - pattern *regexp.Regexp - substr string -} - -func (we wantError) String() string { - if we.golden != nil { - return fmt.Sprintf("error from @%s entry", we.golden.id) - } else if we.pattern != nil { - return fmt.Sprintf("error matching %#q", we.pattern) - } else { - return fmt.Sprintf("error with substring %q", we.substr) - } -} - -// check asserts that 'err' matches the wantError's expectations. -func (we wantError) check(mark marker, err error) { - if err == nil { - mark.errorf("@%s succeeded unexpectedly, want %v", mark.note.Name, we) - return - } - got := err.Error() - - if we.golden != nil { - // Error message must match @id golden file. - wantBytes, ok := we.golden.Get(mark.run.env.T, "", []byte(got)) - if !ok { - mark.errorf("@%s: missing @%s entry", mark.note.Name, we.golden.id) - return - } - want := strings.TrimSpace(string(wantBytes)) - if got != want { - // (ignore leading/trailing space) - mark.errorf("@%s failed with wrong error: got:\n%s\nwant:\n%s\ndiff:\n%s", - mark.note.Name, got, want, compare.Text(want, got)) - } - - } else if we.pattern != nil { - // Error message must match regular expression pattern. - if !we.pattern.MatchString(got) { - mark.errorf("got error %q, does not match pattern %#q", got, we.pattern) - } - - } else if !strings.Contains(got, we.substr) { - // Error message must contain expected substring. - mark.errorf("got error %q, want substring %q", got, we.substr) - } -} - -// checkChangedFiles compares the files changed by an operation with their expected (golden) state. -func checkChangedFiles(mark marker, changed map[string][]byte, golden *Golden) { - // Check changed files match expectations. - for filename, got := range changed { - if want, ok := golden.Get(mark.run.env.T, filename, got); !ok { - mark.errorf("%s: unexpected change to file %s; got:\n%s", - mark.note.Name, filename, got) - - } else if string(got) != string(want) { - mark.errorf("%s: wrong file content for %s: got:\n%s\nwant:\n%s\ndiff:\n%s", - mark.note.Name, filename, got, want, - compare.Bytes(want, got)) - } - } - - // Report unmet expectations. - for filename := range golden.data { - if _, ok := changed[filename]; !ok { - want, _ := golden.Get(mark.run.env.T, filename, nil) - mark.errorf("%s: missing change to file %s; want:\n%s", - mark.note.Name, filename, want) - } - } -} - -// checkDiffs computes unified diffs for each changed file, and compares with -// the diff content stored in the given golden directory. -func checkDiffs(mark marker, changed map[string][]byte, golden *Golden) { - diffs := make(map[string]string) - for name, after := range changed { - before := mark.run.env.FileContent(name) - edits := diff.Strings(before, string(after)) - d, err := diff.ToUnified("before", "after", before, edits, 0) - if err != nil { - // Can't happen: edits are consistent. - log.Fatalf("internal error in diff.ToUnified: %v", err) - } - diffs[name] = d - } - // Check changed files match expectations. - for filename, got := range diffs { - if want, ok := golden.Get(mark.run.env.T, filename, []byte(got)); !ok { - mark.errorf("%s: unexpected change to file %s; got diff:\n%s", - mark.note.Name, filename, got) - - } else if got != string(want) { - mark.errorf("%s: wrong diff for %s:\n\ngot:\n%s\n\nwant:\n%s\n", - mark.note.Name, filename, got, want) - } - } - // Report unmet expectations. - for filename := range golden.data { - if _, ok := changed[filename]; !ok { - want, _ := golden.Get(mark.run.env.T, filename, nil) - mark.errorf("%s: missing change to file %s; want:\n%s", - mark.note.Name, filename, want) - } - } -} - -// ---- marker functions ---- - -// TODO(rfindley): consolidate documentation of these markers. They are already -// documented above, so much of the documentation here is redundant. - -// completionItem is a simplified summary of a completion item. -type completionItem struct { - Label, Detail, Kind, Documentation string -} - -func completionItemMarker(mark marker, label string, other ...string) completionItem { - if len(other) > 3 { - mark.errorf("too many arguments to @item: expect at most 4") - } - item := completionItem{ - Label: label, - } - if len(other) > 0 { - item.Detail = other[0] - } - if len(other) > 1 { - item.Kind = other[1] - } - if len(other) > 2 { - item.Documentation = other[2] - } - return item -} - -func rankMarker(mark marker, src protocol.Location, items ...completionItem) { - list := mark.run.env.Completion(src) - var got []string - // Collect results that are present in items, preserving their order. - for _, g := range list.Items { - for _, w := range items { - if g.Label == w.Label { - got = append(got, g.Label) - break - } - } - } - var want []string - for _, w := range items { - want = append(want, w.Label) - } - if diff := cmp.Diff(want, got); diff != "" { - mark.errorf("completion rankings do not match (-want +got):\n%s", diff) - } -} - -func ranklMarker(mark marker, src protocol.Location, labels ...string) { - list := mark.run.env.Completion(src) - var got []string - // Collect results that are present in items, preserving their order. - for _, g := range list.Items { - for _, label := range labels { - if g.Label == label { - got = append(got, g.Label) - break - } - } - } - if diff := cmp.Diff(labels, got); diff != "" { - mark.errorf("completion rankings do not match (-want +got):\n%s", diff) - } -} - -func snippetMarker(mark marker, src protocol.Location, item completionItem, want string) { - list := mark.run.env.Completion(src) - var ( - found bool - got string - all []string // for errors - ) - items := filterBuiltinsAndKeywords(mark, list.Items) - for _, i := range items { - all = append(all, i.Label) - if i.Label == item.Label { - found = true - if i.TextEdit != nil { - got = i.TextEdit.NewText - } - break - } - } - if !found { - mark.errorf("no completion item found matching %s (got: %v)", item.Label, all) - return - } - if got != want { - mark.errorf("snippets do not match: got %q, want %q", got, want) - } -} - -// completeMarker implements the @complete marker, running -// textDocument/completion at the given src location and asserting that the -// results match the expected results. -func completeMarker(mark marker, src protocol.Location, want ...completionItem) { - list := mark.run.env.Completion(src) - items := filterBuiltinsAndKeywords(mark, list.Items) - var got []completionItem - for i, item := range items { - simplified := completionItem{ - Label: item.Label, - Detail: item.Detail, - Kind: fmt.Sprint(item.Kind), - } - if item.Documentation != nil { - switch v := item.Documentation.Value.(type) { - case string: - simplified.Documentation = v - case protocol.MarkupContent: - simplified.Documentation = strings.TrimSpace(v.Value) // trim newlines - } - } - // Support short-hand notation: if Detail, Kind, or Documentation are omitted from the - // item, don't match them. - if i < len(want) { - if want[i].Detail == "" { - simplified.Detail = "" - } - if want[i].Kind == "" { - simplified.Kind = "" - } - if want[i].Documentation == "" { - simplified.Documentation = "" - } - } - got = append(got, simplified) - } - if len(want) == 0 { - want = nil // got is nil if empty - } - if diff := cmp.Diff(want, got); diff != "" { - mark.errorf("Completion(...) returned unexpect results (-want +got):\n%s", diff) - } -} - -// filterBuiltinsAndKeywords filters out builtins and keywords from completion -// results. -// -// It over-approximates, and does not detect if builtins are shadowed. -func filterBuiltinsAndKeywords(mark marker, items []protocol.CompletionItem) []protocol.CompletionItem { - keep := 0 - for _, item := range items { - if mark.run.test.filterKeywords && item.Kind == protocol.KeywordCompletion { - continue - } - if mark.run.test.filterBuiltins && types.Universe.Lookup(item.Label) != nil { - continue - } - items[keep] = item - keep++ - } - return items[:keep] -} - -// acceptCompletionMarker implements the @acceptCompletion marker, running -// textDocument/completion at the given src location and accepting the -// candidate with the given label. The resulting source must match the provided -// golden content. -func acceptCompletionMarker(mark marker, src protocol.Location, label string, golden *Golden) { - list := mark.run.env.Completion(src) - var selected *protocol.CompletionItem - for _, item := range list.Items { - if item.Label == label { - selected = &item - break - } - } - if selected == nil { - mark.errorf("Completion(...) did not return an item labeled %q", label) - return - } - filename := mark.path() - mapper, err := mark.run.env.Editor.Mapper(filename) - if err != nil { - mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) - return - } - - patched, _, err := source.ApplyProtocolEdits(mapper, append([]protocol.TextEdit{ - *selected.TextEdit, - }, selected.AdditionalTextEdits...)) - - if err != nil { - mark.errorf("ApplyProtocolEdits failed: %v", err) - return - } - changes := map[string][]byte{filename: patched} - // Check the file state. - checkChangedFiles(mark, changes, golden) -} - -// defMarker implements the @def marker, running textDocument/definition at -// the given src location and asserting that there is exactly one resulting -// location, matching dst. -// -// TODO(rfindley): support a variadic destination set. -func defMarker(mark marker, src, dst protocol.Location) { - got := mark.run.env.GoToDefinition(src) - if got != dst { - mark.errorf("definition location does not match:\n\tgot: %s\n\twant %s", - mark.run.fmtLoc(got), mark.run.fmtLoc(dst)) - } -} - -func typedefMarker(mark marker, src, dst protocol.Location) { - got := mark.run.env.TypeDefinition(src) - if got != dst { - mark.errorf("type definition location does not match:\n\tgot: %s\n\twant %s", - mark.run.fmtLoc(got), mark.run.fmtLoc(dst)) - } -} - -func foldingRangeMarker(mark marker, g *Golden) { - env := mark.run.env - ranges, err := mark.server().FoldingRange(env.Ctx, &protocol.FoldingRangeParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: mark.uri()}, - }) - if err != nil { - mark.errorf("foldingRange failed: %v", err) - return - } - var edits []protocol.TextEdit - insert := func(line, char uint32, text string) { - pos := protocol.Position{Line: line, Character: char} - edits = append(edits, protocol.TextEdit{ - Range: protocol.Range{ - Start: pos, - End: pos, - }, - NewText: text, - }) - } - for i, rng := range ranges { - insert(rng.StartLine, rng.StartCharacter, fmt.Sprintf("<%d kind=%q>", i, rng.Kind)) - insert(rng.EndLine, rng.EndCharacter, fmt.Sprintf("", i)) - } - filename := mark.path() - mapper, err := env.Editor.Mapper(filename) - if err != nil { - mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) - return - } - got, _, err := source.ApplyProtocolEdits(mapper, edits) - if err != nil { - mark.errorf("ApplyProtocolEdits failed: %v", err) - return - } - want, _ := g.Get(mark.run.env.T, "", got) - if diff := compare.Bytes(want, got); diff != "" { - mark.errorf("foldingRange mismatch:\n%s", diff) - } -} - -// formatMarker implements the @format marker. -func formatMarker(mark marker, golden *Golden) { - edits, err := mark.server().Formatting(mark.run.env.Ctx, &protocol.DocumentFormattingParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: mark.uri()}, - }) - var got []byte - if err != nil { - got = []byte(err.Error() + "\n") // all golden content is newline terminated - } else { - env := mark.run.env - filename := mark.path() - mapper, err := env.Editor.Mapper(filename) - if err != nil { - mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) - } - - got, _, err = source.ApplyProtocolEdits(mapper, edits) - if err != nil { - mark.errorf("ApplyProtocolEdits failed: %v", err) - return - } - } - - compareGolden(mark, "format", got, golden) -} - -func highlightMarker(mark marker, src protocol.Location, dsts ...protocol.Location) { - highlights := mark.run.env.DocumentHighlight(src) - var got []protocol.Range - for _, h := range highlights { - got = append(got, h.Range) - } - - var want []protocol.Range - for _, d := range dsts { - want = append(want, d.Range) - } - - sortRanges := func(s []protocol.Range) { - sort.Slice(s, func(i, j int) bool { - return protocol.CompareRange(s[i], s[j]) < 0 - }) - } - - sortRanges(got) - sortRanges(want) - - if diff := cmp.Diff(want, got); diff != "" { - mark.errorf("DocumentHighlight(%v) mismatch (-want +got):\n%s", src, diff) - } -} - -// hoverMarker implements the @hover marker, running textDocument/hover at the -// given src location and asserting that the resulting hover is over the dst -// location (typically a span surrounding src), and that the markdown content -// matches the golden content. -func hoverMarker(mark marker, src, dst protocol.Location, golden *Golden) { - content, gotDst := mark.run.env.Hover(src) - if gotDst != dst { - mark.errorf("hover location does not match:\n\tgot: %s\n\twant %s)", mark.run.fmtLoc(gotDst), mark.run.fmtLoc(dst)) - } - gotMD := "" - if content != nil { - gotMD = content.Value - } - wantMD := "" - if golden != nil { - wantBytes, _ := golden.Get(mark.run.env.T, "hover.md", []byte(gotMD)) - wantMD = string(wantBytes) - } - // Normalize newline termination: archive files can't express non-newline - // terminated files. - if strings.HasSuffix(wantMD, "\n") && !strings.HasSuffix(gotMD, "\n") { - gotMD += "\n" - } - if diff := tests.DiffMarkdown(wantMD, gotMD); diff != "" { - mark.errorf("hover markdown mismatch (-want +got):\n%s", diff) - } -} - -// locMarker implements the @loc marker. It is executed before other -// markers, so that locations are available. -func locMarker(mark marker, loc protocol.Location) protocol.Location { return loc } - -// diagMarker implements the @diag marker. It eliminates diagnostics from -// the observed set in mark.test. -func diagMarker(mark marker, loc protocol.Location, re *regexp.Regexp) { - if _, ok := removeDiagnostic(mark, loc, re); !ok { - mark.errorf("no diagnostic at %v matches %q", loc, re) - } -} - -// removeDiagnostic looks for a diagnostic matching loc at the given position. -// -// If found, it returns (diag, true), and eliminates the matched diagnostic -// from the unmatched set. -// -// If not found, it returns (protocol.Diagnostic{}, false). -func removeDiagnostic(mark marker, loc protocol.Location, re *regexp.Regexp) (protocol.Diagnostic, bool) { - loc.Range.End = loc.Range.Start // diagnostics ignore end position. - diags := mark.run.diags[loc] - for i, diag := range diags { - if re.MatchString(diag.Message) { - mark.run.diags[loc] = append(diags[:i], diags[i+1:]...) - return diag, true - } - } - return protocol.Diagnostic{}, false -} - -// renameMarker implements the @rename(location, new, golden) marker. -func renameMarker(mark marker, loc protocol.Location, newName string, golden *Golden) { - changed, err := rename(mark.run.env, loc, newName) - if err != nil { - mark.errorf("rename failed: %v. (Use @renameerr for expected errors.)", err) - return - } - checkChangedFiles(mark, changed, golden) -} - -// renameErrMarker implements the @renamererr(location, new, error) marker. -func renameErrMarker(mark marker, loc protocol.Location, newName string, wantErr wantError) { - _, err := rename(mark.run.env, loc, newName) - wantErr.check(mark, err) -} - -func signatureMarker(mark marker, src protocol.Location, label string, active int64) { - got := mark.run.env.SignatureHelp(src) - if label == "" { - if got != nil && len(got.Signatures) > 0 { - mark.errorf("signatureHelp = %v, want 0 signatures", got) - } - return - } - if got == nil || len(got.Signatures) != 1 { - mark.errorf("signatureHelp = %v, want exactly 1 signature", got) - return - } - if got := got.Signatures[0].Label; got != label { - mark.errorf("signatureHelp: got label %q, want %q", got, label) - } - if got := int64(got.ActiveParameter); got != active { - mark.errorf("signatureHelp: got active parameter %d, want %d", got, active) - } -} - -// rename returns the new contents of the files that would be modified -// by renaming the identifier at loc to newName. -func rename(env *Env, loc protocol.Location, newName string) (map[string][]byte, error) { - // We call Server.Rename directly, instead of - // env.Editor.Rename(env.Ctx, loc, newName) - // to isolate Rename from PrepareRename, and because we don't - // want to modify the file system in a scenario with multiple - // @rename markers. - - editMap, err := env.Editor.Server.Rename(env.Ctx, &protocol.RenameParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, - Position: loc.Range.Start, - NewName: string(newName), - }) - if err != nil { - return nil, err - } - - fileChanges := make(map[string][]byte) - if err := applyDocumentChanges(env, editMap.DocumentChanges, fileChanges); err != nil { - return nil, fmt.Errorf("applying document changes: %v", err) - } - return fileChanges, nil -} - -// applyDocumentChanges applies the given document changes to the editor buffer -// content, recording the resulting contents in the fileChanges map. It is an -// error for a change to an edit a file that is already present in the -// fileChanges map. -func applyDocumentChanges(env *Env, changes []protocol.DocumentChanges, fileChanges map[string][]byte) error { - getMapper := func(path string) (*protocol.Mapper, error) { - if _, ok := fileChanges[path]; ok { - return nil, fmt.Errorf("internal error: %s is already edited", path) - } - return env.Editor.Mapper(path) - } - - for _, change := range changes { - if change.RenameFile != nil { - // rename - oldFile := env.Sandbox.Workdir.URIToPath(change.RenameFile.OldURI) - mapper, err := getMapper(oldFile) - if err != nil { - return err - } - newFile := env.Sandbox.Workdir.URIToPath(change.RenameFile.NewURI) - fileChanges[newFile] = mapper.Content - } else { - // edit - filename := env.Sandbox.Workdir.URIToPath(change.TextDocumentEdit.TextDocument.URI) - mapper, err := getMapper(filename) - if err != nil { - return err - } - patched, _, err := source.ApplyProtocolEdits(mapper, change.TextDocumentEdit.Edits) - if err != nil { - return err - } - fileChanges[filename] = patched - } - } - - return nil -} - -func codeActionMarker(mark marker, start, end protocol.Location, actionKind string, g *Golden, titles ...string) { - // Request the range from start.Start to end.End. - loc := start - loc.Range.End = end.Range.End - - // Apply the fix it suggests. - changed, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil, titles) - if err != nil { - mark.errorf("codeAction failed: %v", err) - return - } - - // Check the file state. - checkChangedFiles(mark, changed, g) -} - -func codeActionEditMarker(mark marker, loc protocol.Location, actionKind string, g *Golden, titles ...string) { - changed, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil, titles) - if err != nil { - mark.errorf("codeAction failed: %v", err) - return - } - - checkDiffs(mark, changed, g) -} - -func codeActionErrMarker(mark marker, start, end protocol.Location, actionKind string, wantErr wantError) { - loc := start - loc.Range.End = end.Range.End - _, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil, nil) - wantErr.check(mark, err) -} - -// codeLensesMarker runs the @codelenses() marker, collecting @codelens marks -// in the current file and comparing with the result of the -// textDocument/codeLens RPC. -func codeLensesMarker(mark marker) { - type codeLens struct { - Range protocol.Range - Title string - } - - lenses := mark.run.env.CodeLens(mark.path()) - var got []codeLens - for _, lens := range lenses { - title := "" - if lens.Command != nil { - title = lens.Command.Title - } - got = append(got, codeLens{lens.Range, title}) - } - - var want []codeLens - mark.consumeExtraNotes("codelens", actionMarkerFunc(func(_ marker, loc protocol.Location, title string) { - want = append(want, codeLens{loc.Range, title}) - })) - - for _, s := range [][]codeLens{got, want} { - sort.Slice(s, func(i, j int) bool { - li, lj := s[i], s[j] - if c := protocol.CompareRange(li.Range, lj.Range); c != 0 { - return c < 0 - } - return li.Title < lj.Title - }) - } - - if diff := cmp.Diff(want, got); diff != "" { - mark.errorf("codelenses: unexpected diff (-want +got):\n%s", diff) - } -} - -func documentLinkMarker(mark marker, g *Golden) { - var b bytes.Buffer - links := mark.run.env.DocumentLink(mark.path()) - for _, l := range links { - if l.Target == nil { - mark.errorf("%s: nil link target", l.Range) - continue - } - loc := protocol.Location{URI: mark.uri(), Range: l.Range} - fmt.Fprintln(&b, mark.run.fmtLocDetails(loc, false), *l.Target) - } - - compareGolden(mark, "documentLink", b.Bytes(), g) -} - -// consumeExtraNotes runs the provided func for each extra note with the given -// name, and deletes all matching notes. -func (mark marker) consumeExtraNotes(name string, f func(marker)) { - uri := mark.uri() - notes := mark.run.extraNotes[uri][name] - delete(mark.run.extraNotes[uri], name) - - for _, note := range notes { - f(marker{run: mark.run, note: note}) - } -} - -// suggestedfixMarker implements the @suggestedfix(location, regexp, -// kind, golden) marker. It acts like @diag(location, regexp), to set -// the expectation of a diagnostic, but then it applies the first code -// action of the specified kind suggested by the matched diagnostic. -func suggestedfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, golden *Golden) { - loc.Range.End = loc.Range.Start // diagnostics ignore end position. - // Find and remove the matching diagnostic. - diag, ok := removeDiagnostic(mark, loc, re) - if !ok { - mark.errorf("no diagnostic at %v matches %q", loc, re) - return - } - - // Apply the fix it suggests. - changed, err := codeAction(mark.run.env, loc.URI, diag.Range, "quickfix", &diag, nil) - if err != nil { - mark.errorf("suggestedfix failed: %v. (Use @suggestedfixerr for expected errors.)", err) - return - } - - // Check the file state. - checkDiffs(mark, changed, golden) -} - -// codeAction executes a textDocument/codeAction request for the specified -// location and kind. If diag is non-nil, it is used as the code action -// context. -// -// The resulting map contains resulting file contents after the code action is -// applied. Currently, this function does not support code actions that return -// edits directly; it only supports code action commands. -func codeAction(env *Env, uri protocol.DocumentURI, rng protocol.Range, actionKind string, diag *protocol.Diagnostic, titles []string) (map[string][]byte, error) { - changes, err := codeActionChanges(env, uri, rng, actionKind, diag, titles) - if err != nil { - return nil, err - } - fileChanges := make(map[string][]byte) - if err := applyDocumentChanges(env, changes, fileChanges); err != nil { - return nil, fmt.Errorf("applying document changes: %v", err) - } - return fileChanges, nil -} - -// codeActionChanges executes a textDocument/codeAction request for the -// specified location and kind, and captures the resulting document changes. -// If diag is non-nil, it is used as the code action context. -// If titles is non-empty, the code action title must be present among the provided titles. -func codeActionChanges(env *Env, uri protocol.DocumentURI, rng protocol.Range, actionKind string, diag *protocol.Diagnostic, titles []string) ([]protocol.DocumentChanges, error) { - // Request all code actions that apply to the diagnostic. - // (The protocol supports filtering using Context.Only={actionKind} - // but we can give a better error if we don't filter.) - params := &protocol.CodeActionParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: uri}, - Range: rng, - Context: protocol.CodeActionContext{ - Only: nil, // => all kinds - }, - } - if diag != nil { - params.Context.Diagnostics = []protocol.Diagnostic{*diag} - } - - actions, err := env.Editor.Server.CodeAction(env.Ctx, params) - if err != nil { - return nil, err - } - - // Find the sole candidates CodeAction of the specified kind (e.g. refactor.rewrite). - var candidates []protocol.CodeAction - for _, act := range actions { - if act.Kind == protocol.CodeActionKind(actionKind) { - if len(titles) > 0 { - for _, f := range titles { - if act.Title == f { - candidates = append(candidates, act) - break - } - } - } else { - candidates = append(candidates, act) - } - } - } - if len(candidates) != 1 { - for _, act := range actions { - env.T.Logf("found CodeAction Kind=%s Title=%q", act.Kind, act.Title) - } - return nil, fmt.Errorf("found %d CodeActions of kind %s matching filters %v for this diagnostic, want 1", len(candidates), actionKind, titles) - } - action := candidates[0] - - // Apply the codeAction. - // - // Spec: - // "If a code action provides an edit and a command, first the edit is - // executed and then the command." - // An action may specify an edit and/or a command, to be - // applied in that order. But since applyDocumentChanges(env, - // action.Edit.DocumentChanges) doesn't compose, for now we - // assert that actions return one or the other. - if action.Edit != nil { - if action.Edit.Changes != nil { - env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title) - } - if action.Edit.DocumentChanges != nil { - if action.Command != nil { - env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title) - } - return action.Edit.DocumentChanges, nil - } - } - - if action.Command != nil { - // This is a typical CodeAction command: - // - // Title: "Implement error" - // Command: gopls.apply_fix - // Arguments: [{"Fix":"stub_methods","URI":".../a.go","Range":...}}] - // - // The client makes an ExecuteCommand RPC to the server, - // which dispatches it to the ApplyFix handler. - // ApplyFix dispatches to the "stub_methods" suggestedfix hook (the meat). - // The server then makes an ApplyEdit RPC to the client, - // whose Awaiter hook gathers the edits instead of applying them. - - _ = env.Awaiter.takeDocumentChanges() // reset (assuming Env is confined to this thread) - - if _, err := env.Editor.Server.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ - Command: action.Command.Command, - Arguments: action.Command.Arguments, - }); err != nil { - return nil, err - } - return env.Awaiter.takeDocumentChanges(), nil - } - - return nil, nil -} - -// TODO(adonovan): suggestedfixerr - -// refsMarker implements the @refs marker. -func refsMarker(mark marker, src protocol.Location, want ...protocol.Location) { - refs := func(includeDeclaration bool, want []protocol.Location) error { - got, err := mark.server().References(mark.run.env.Ctx, &protocol.ReferenceParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), - Context: protocol.ReferenceContext{ - IncludeDeclaration: includeDeclaration, - }, - }) - if err != nil { - return err - } - - return compareLocations(mark, got, want) - } - - for _, includeDeclaration := range []bool{false, true} { - // Ignore first 'want' location if we didn't request the declaration. - // TODO(adonovan): don't assume a single declaration: - // there may be >1 if corresponding methods are considered. - want := want - if !includeDeclaration && len(want) > 0 { - want = want[1:] - } - if err := refs(includeDeclaration, want); err != nil { - mark.errorf("refs(includeDeclaration=%t) failed: %v", - includeDeclaration, err) - } - } -} - -// implementationMarker implements the @implementation marker. -func implementationMarker(mark marker, src protocol.Location, want ...protocol.Location) { - got, err := mark.server().Implementation(mark.run.env.Ctx, &protocol.ImplementationParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), - }) - if err != nil { - mark.errorf("implementation at %s failed: %v", src, err) - return - } - if err := compareLocations(mark, got, want); err != nil { - mark.errorf("implementation: %v", err) - } -} - -func prepareRenameMarker(mark marker, src, spn protocol.Location, placeholder string) { - params := &protocol.PrepareRenameParams{ - TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), - } - got, err := mark.run.env.Editor.Server.PrepareRename(mark.run.env.Ctx, params) - if err != nil { - mark.run.env.T.Fatal(err) - } - if placeholder == "" { - if got != nil { - mark.errorf("PrepareRename(...) = %v, want nil", got) - } - return - } - want := &protocol.PrepareRename2Gn{Range: spn.Range, Placeholder: placeholder} - if diff := cmp.Diff(want, got); diff != "" { - mark.errorf("mismatching PrepareRename result:\n%s", diff) - } -} - -// symbolMarker implements the @symbol marker. -func symbolMarker(mark marker, golden *Golden) { - // Retrieve information about all symbols in this file. - symbols, err := mark.server().DocumentSymbol(mark.run.env.Ctx, &protocol.DocumentSymbolParams{ - TextDocument: protocol.TextDocumentIdentifier{URI: mark.uri()}, - }) - if err != nil { - mark.errorf("DocumentSymbol request failed: %v", err) - return - } - - // Format symbols one per line, sorted (in effect) by first column, a dotted name. - var lines []string - for _, symbol := range symbols { - // Each result element is a union of (legacy) - // SymbolInformation and (new) DocumentSymbol, - // so we ascertain which one and then transcode. - data, err := json.Marshal(symbol) - if err != nil { - mark.run.env.T.Fatal(err) - } - if _, ok := symbol.(map[string]any)["location"]; ok { - // This case is not reached because Editor initialization - // enables HierarchicalDocumentSymbolSupport. - // TODO(adonovan): test this too. - var sym protocol.SymbolInformation - if err := json.Unmarshal(data, &sym); err != nil { - mark.run.env.T.Fatal(err) - } - mark.errorf("fake Editor doesn't support SymbolInformation") - - } else { - var sym protocol.DocumentSymbol // new hierarchical hotness - if err := json.Unmarshal(data, &sym); err != nil { - mark.run.env.T.Fatal(err) - } - - // Print each symbol in the response tree. - var visit func(sym protocol.DocumentSymbol, prefix []string) - visit = func(sym protocol.DocumentSymbol, prefix []string) { - var out strings.Builder - out.WriteString(strings.Join(prefix, ".")) - fmt.Fprintf(&out, " %q", sym.Detail) - if delta := sym.Range.End.Line - sym.Range.Start.Line; delta > 0 { - fmt.Fprintf(&out, " +%d lines", delta) - } - lines = append(lines, out.String()) - - for _, child := range sym.Children { - visit(child, append(prefix, child.Name)) - } - } - visit(sym, []string{sym.Name}) - } - } - sort.Strings(lines) - lines = append(lines, "") // match trailing newline in .txtar file - got := []byte(strings.Join(lines, "\n")) - - // Compare with golden. - want, ok := golden.Get(mark.run.env.T, "", got) - if !ok { - mark.errorf("%s: missing golden file @%s", mark.note.Name, golden.id) - } else if diff := cmp.Diff(string(got), string(want)); diff != "" { - mark.errorf("%s: unexpected output: got:\n%s\nwant:\n%s\ndiff:\n%s", - mark.note.Name, got, want, diff) - } -} - -// compareLocations returns an error message if got and want are not -// the same set of locations. The marker is used only for fmtLoc. -func compareLocations(mark marker, got, want []protocol.Location) error { - toStrings := func(locs []protocol.Location) []string { - strs := make([]string, len(locs)) - for i, loc := range locs { - strs[i] = mark.run.fmtLoc(loc) - } - sort.Strings(strs) - return strs - } - if diff := cmp.Diff(toStrings(want), toStrings(got)); diff != "" { - return fmt.Errorf("incorrect result locations: (got %d, want %d):\n%s", - len(got), len(want), diff) - } - return nil -} - -func workspaceSymbolMarker(mark marker, query string, golden *Golden) { - params := &protocol.WorkspaceSymbolParams{ - Query: query, - } - - gotSymbols, err := mark.server().Symbol(mark.run.env.Ctx, params) - if err != nil { - mark.errorf("Symbol(%q) failed: %v", query, err) - return - } - var got bytes.Buffer - for _, s := range gotSymbols { - // Omit the txtar position of the symbol location; otherwise edits to the - // txtar archive lead to unexpected failures. - loc := mark.run.fmtLocDetails(s.Location, false) - // TODO(rfindley): can we do better here, by detecting if the location is - // relative to GOROOT? - if loc == "" { - loc = "" - } - fmt.Fprintf(&got, "%s %s %s\n", loc, s.Name, s.Kind) - } - - compareGolden(mark, fmt.Sprintf("Symbol(%q)", query), got.Bytes(), golden) -} - -// compareGolden compares the content of got with that of g.Get(""), reporting -// errors on any mismatch. -// -// TODO(rfindley): use this helper in more places. -func compareGolden(mark marker, op string, got []byte, g *Golden) { - want, ok := g.Get(mark.run.env.T, "", got) - if !ok { - mark.errorf("missing golden file @%s", g.id) - return - } - if diff := compare.Bytes(want, got); diff != "" { - mark.errorf("%s mismatch:\n%s", op, diff) - } -} diff --git a/gopls/internal/lsp/regtest/options.go b/gopls/internal/lsp/regtest/options.go deleted file mode 100644 index 7084d621f81..00000000000 --- a/gopls/internal/lsp/regtest/options.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package regtest - -import ( - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -type runConfig struct { - editor fake.EditorConfig - sandbox fake.SandboxConfig - modes Mode - skipHooks bool -} - -func defaultConfig() runConfig { - return runConfig{ - editor: fake.EditorConfig{ - Settings: map[string]interface{}{ - // Shorten the diagnostic delay to speed up test execution (else we'd add - // the default delay to each assertion about diagnostics) - "diagnosticsDelay": "10ms", - }, - }, - } -} - -// A RunOption augments the behavior of the test runner. -type RunOption interface { - set(*runConfig) -} - -type optionSetter func(*runConfig) - -func (f optionSetter) set(opts *runConfig) { - f(opts) -} - -// ProxyFiles configures a file proxy using the given txtar-encoded string. -func ProxyFiles(txt string) RunOption { - return optionSetter(func(opts *runConfig) { - opts.sandbox.ProxyFiles = fake.UnpackTxt(txt) - }) -} - -// Modes configures the execution modes that the test should run in. -// -// By default, modes are configured by the test runner. If this option is set, -// it overrides the set of default modes and the test runs in exactly these -// modes. -func Modes(modes Mode) RunOption { - return optionSetter(func(opts *runConfig) { - if opts.modes != 0 { - panic("modes set more than once") - } - opts.modes = modes - }) -} - -// WindowsLineEndings configures the editor to use windows line endings. -func WindowsLineEndings() RunOption { - return optionSetter(func(opts *runConfig) { - opts.editor.WindowsLineEndings = true - }) -} - -// ClientName sets the LSP client name. -func ClientName(name string) RunOption { - return optionSetter(func(opts *runConfig) { - opts.editor.ClientName = name - }) -} - -// Settings sets user-provided configuration for the LSP server. -// -// As a special case, the env setting must not be provided via Settings: use -// EnvVars instead. -type Settings map[string]interface{} - -func (s Settings) set(opts *runConfig) { - if opts.editor.Settings == nil { - opts.editor.Settings = make(map[string]interface{}) - } - for k, v := range s { - opts.editor.Settings[k] = v - } -} - -// WorkspaceFolders configures the workdir-relative workspace folders to send -// to the LSP server. By default the editor sends a single workspace folder -// corresponding to the workdir root. To explicitly configure no workspace -// folders, use WorkspaceFolders with no arguments. -func WorkspaceFolders(relFolders ...string) RunOption { - if len(relFolders) == 0 { - // Use an empty non-nil slice to signal explicitly no folders. - relFolders = []string{} - } - return optionSetter(func(opts *runConfig) { - opts.editor.WorkspaceFolders = relFolders - }) -} - -// EnvVars sets environment variables for the LSP session. When applying these -// variables to the session, the special string $SANDBOX_WORKDIR is replaced by -// the absolute path to the sandbox working directory. -type EnvVars map[string]string - -func (e EnvVars) set(opts *runConfig) { - if opts.editor.Env == nil { - opts.editor.Env = make(map[string]string) - } - for k, v := range e { - opts.editor.Env[k] = v - } -} - -// InGOPATH configures the workspace working directory to be GOPATH, rather -// than a separate working directory for use with modules. -func InGOPATH() RunOption { - return optionSetter(func(opts *runConfig) { - opts.sandbox.InGoPath = true - }) -} - -// MessageResponder configures the editor to respond to -// window/showMessageRequest messages using the provided function. -func MessageResponder(f func(*protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error)) RunOption { - return optionSetter(func(opts *runConfig) { - opts.editor.MessageResponder = f - }) -} diff --git a/gopls/internal/lsp/regtest/regtest.go b/gopls/internal/lsp/regtest/regtest.go deleted file mode 100644 index 6e14b916766..00000000000 --- a/gopls/internal/lsp/regtest/regtest.go +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package regtest - -import ( - "context" - "flag" - "fmt" - "os" - "runtime" - "testing" - "time" - - "golang.org/x/tools/gopls/internal/lsp/cmd" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/memoize" - "golang.org/x/tools/internal/testenv" - "golang.org/x/tools/internal/tool" -) - -var ( - runSubprocessTests = flag.Bool("enable_gopls_subprocess_tests", false, "run regtests against a gopls subprocess") - goplsBinaryPath = flag.String("gopls_test_binary", "", "path to the gopls binary for use as a remote, for use with the -enable_gopls_subprocess_tests flag") - regtestTimeout = flag.Duration("regtest_timeout", defaultRegtestTimeout(), "if nonzero, default timeout for each regtest; defaults to GOPLS_REGTEST_TIMEOUT") - skipCleanup = flag.Bool("regtest_skip_cleanup", false, "whether to skip cleaning up temp directories") - printGoroutinesOnFailure = flag.Bool("regtest_print_goroutines", false, "whether to print goroutines info on failure") - printLogs = flag.Bool("regtest_print_logs", false, "whether to print LSP logs") -) - -func defaultRegtestTimeout() time.Duration { - s := os.Getenv("GOPLS_REGTEST_TIMEOUT") - if s == "" { - return 0 - } - d, err := time.ParseDuration(s) - if err != nil { - fmt.Fprintf(os.Stderr, "invalid GOPLS_REGTEST_TIMEOUT %q: %v\n", s, err) - os.Exit(2) - } - return d -} - -var runner *Runner - -type regtestRunner interface { - Run(t *testing.T, files string, f TestFunc) -} - -func Run(t *testing.T, files string, f TestFunc) { - runner.Run(t, files, f) -} - -func WithOptions(opts ...RunOption) configuredRunner { - return configuredRunner{opts: opts} -} - -type configuredRunner struct { - opts []RunOption -} - -func (r configuredRunner) Run(t *testing.T, files string, f TestFunc) { - runner.Run(t, files, f, r.opts...) -} - -type RunMultiple []struct { - Name string - Runner regtestRunner -} - -func (r RunMultiple) Run(t *testing.T, files string, f TestFunc) { - for _, runner := range r { - t.Run(runner.Name, func(t *testing.T) { - runner.Runner.Run(t, files, f) - }) - } -} - -// DefaultModes returns the default modes to run for each regression test (they -// may be reconfigured by the tests themselves). -func DefaultModes() Mode { - modes := Default - if !testing.Short() { - modes |= Experimental | Forwarded - } - if *runSubprocessTests { - modes |= SeparateProcess - } - return modes -} - -// Main sets up and tears down the shared regtest state. -func Main(m *testing.M, hook func(*source.Options)) { - // golang/go#54461: enable additional debugging around hanging Go commands. - gocommand.DebugHangingGoCommands = true - - // If this magic environment variable is set, run gopls instead of the test - // suite. See the documentation for runTestAsGoplsEnvvar for more details. - if os.Getenv(runTestAsGoplsEnvvar) == "true" { - tool.Main(context.Background(), cmd.New("gopls", "", nil, hook), os.Args[1:]) - os.Exit(0) - } - - if !testenv.HasExec() { - fmt.Printf("skipping all tests: exec not supported on %s/%s\n", runtime.GOOS, runtime.GOARCH) - os.Exit(0) - } - testenv.ExitIfSmallMachine() - - // Disable GOPACKAGESDRIVER, as it can cause spurious test failures. - os.Setenv("GOPACKAGESDRIVER", "off") - - flag.Parse() - - runner = &Runner{ - DefaultModes: DefaultModes(), - Timeout: *regtestTimeout, - PrintGoroutinesOnFailure: *printGoroutinesOnFailure, - SkipCleanup: *skipCleanup, - OptionsHook: hook, - store: memoize.NewStore(memoize.NeverEvict), - } - - runner.goplsPath = *goplsBinaryPath - if runner.goplsPath == "" { - var err error - runner.goplsPath, err = os.Executable() - if err != nil { - panic(fmt.Sprintf("finding test binary path: %v", err)) - } - } - - dir, err := os.MkdirTemp("", "gopls-regtest-") - if err != nil { - panic(fmt.Errorf("creating regtest temp directory: %v", err)) - } - runner.tempDir = dir - - var code int - defer func() { - if err := runner.Close(); err != nil { - fmt.Fprintf(os.Stderr, "closing test runner: %v\n", err) - // Regtest cleanup is broken in go1.12 and earlier, and sometimes flakes on - // Windows due to file locking, but this is OK for our CI. - // - // Fail on go1.13+, except for windows and android which have shutdown problems. - if testenv.Go1Point() >= 13 && runtime.GOOS != "windows" && runtime.GOOS != "android" { - os.Exit(1) - } - } - os.Exit(code) - }() - code = m.Run() -} diff --git a/gopls/internal/lsp/rename.go b/gopls/internal/lsp/rename.go deleted file mode 100644 index 40c475aef67..00000000000 --- a/gopls/internal/lsp/rename.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "path/filepath" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { - ctx, done := event.Start(ctx, "lsp.Server.rename", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - // Because we don't handle directory renaming within source.Rename, source.Rename returns - // boolean value isPkgRenaming to determine whether an DocumentChanges of type RenameFile should - // be added to the return protocol.WorkspaceEdit value. - edits, isPkgRenaming, err := source.Rename(ctx, snapshot, fh, params.Position, params.NewName) - if err != nil { - return nil, err - } - - docChanges := []protocol.DocumentChanges{} // must be a slice - for uri, e := range edits { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - docChanges = append(docChanges, documentChanges(fh, e)...) - } - if isPkgRenaming { - // Update the last component of the file's enclosing directory. - oldBase := filepath.Dir(fh.URI().Filename()) - newURI := filepath.Join(filepath.Dir(oldBase), params.NewName) - docChanges = append(docChanges, protocol.DocumentChanges{ - RenameFile: &protocol.RenameFile{ - Kind: "rename", - OldURI: protocol.URIFromPath(oldBase), - NewURI: protocol.URIFromPath(newURI), - }, - }) - } - return &protocol.WorkspaceEdit{ - DocumentChanges: docChanges, - }, nil -} - -// prepareRename implements the textDocument/prepareRename handler. It may -// return (nil, nil) if there is no rename at the cursor position, but it is -// not desirable to display an error to the user. -// -// TODO(rfindley): why wouldn't we want to show an error to the user, if the -// user initiated a rename request at the cursor? -func (s *Server) prepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRename2Gn, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareRename", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - // Do not return errors here, as it adds clutter. - // Returning a nil result means there is not a valid rename. - item, usererr, err := source.PrepareRename(ctx, snapshot, fh, params.Position) - if err != nil { - // Return usererr here rather than err, to avoid cluttering the UI with - // internal error details. - return nil, usererr - } - return &protocol.PrepareRename2Gn{ - Range: item.Range, - Placeholder: item.Text, - }, nil -} diff --git a/gopls/internal/lsp/reset_golden.sh b/gopls/internal/lsp/reset_golden.sh deleted file mode 100755 index ff7f4d08208..00000000000 --- a/gopls/internal/lsp/reset_golden.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -# -# Copyright 2022 The Go Authors. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE file. -# -# Updates the *.golden files ... to match the tests' current behavior. - -set -eu - -GO117BIN="go1.17.9" - -command -v $GO117BIN >/dev/null 2>&1 || { - go install golang.org/dl/$GO117BIN@latest - $GO117BIN download -} - -find ./internal/lsp/testdata -name *.golden ! -name summary*.txt.golden -delete -# Here we intentionally do not run the ./internal/lsp/source tests with -# -golden. Eventually these tests will be deleted, and in the meantime they are -# redundant with the ./internal/lsp tests. -# -# Note: go1.17.9 tests must be run *before* go tests, as by convention the -# golden output should match the output of gopls built with the most recent -# version of Go. If output differs at 1.17, tests must be tolerant of the 1.17 -# output. -$GO117BIN test ./internal/lsp -golden -go test ./internal/lsp -golden -$GO117BIN test ./test -golden -go test ./test -golden diff --git a/gopls/internal/lsp/semantic.go b/gopls/internal/lsp/semantic.go deleted file mode 100644 index 5adf1cc4449..00000000000 --- a/gopls/internal/lsp/semantic.go +++ /dev/null @@ -1,1052 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "bytes" - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "log" - "path/filepath" - "sort" - "strings" - "time" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" - "golang.org/x/tools/internal/typeparams" -) - -// The LSP says that errors for the semantic token requests should only be returned -// for exceptions (a word not otherwise defined). This code treats a too-large file -// as an exception. On parse errors, the code does what it can. - -// reject full semantic token requests for large files -const maxFullFileSize int = 100000 - -// to control comprehensive logging of decisions (gopls semtok foo.go > /dev/null shows log output) -// semDebug should NEVER be true in checked-in code -const semDebug = false - -func (s *Server) semanticTokensFull(ctx context.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { - ctx, done := event.Start(ctx, "lsp.Server.semanticTokensFull", tag.URI.Of(params.TextDocument.URI)) - defer done() - - ret, err := s.computeSemanticTokens(ctx, params.TextDocument, nil) - return ret, err -} - -func (s *Server) semanticTokensRange(ctx context.Context, params *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { - ctx, done := event.Start(ctx, "lsp.Server.semanticTokensRange", tag.URI.Of(params.TextDocument.URI)) - defer done() - - ret, err := s.computeSemanticTokens(ctx, params.TextDocument, ¶ms.Range) - return ret, err -} - -func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { - ans := protocol.SemanticTokens{ - Data: []uint32{}, - } - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, td.URI, source.UnknownKind) - defer release() - if !ok { - return nil, err - } - if !snapshot.Options().SemanticTokens { - // return an error, so if the option changes - // the client won't remember the wrong answer - return nil, fmt.Errorf("semantictokens are disabled") - } - kind := snapshot.FileKind(fh) - if kind == source.Tmpl { - // this is a little cumbersome to avoid both exporting 'encoded' and its methods - // and to avoid import cycles - e := &encoded{ - ctx: ctx, - metadataSource: snapshot, - rng: rng, - tokTypes: snapshot.Options().SemanticTypes, - tokMods: snapshot.Options().SemanticMods, - } - add := func(line, start uint32, len uint32) { - e.add(line, start, len, tokMacro, nil) - } - data := func() []uint32 { - return e.Data() - } - return template.SemanticTokens(ctx, snapshot, fh.URI(), add, data) - } - if kind != source.Go { - return nil, nil - } - pkg, pgf, err := source.NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - - if rng == nil && len(pgf.Src) > maxFullFileSize { - err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)", - fh.URI().Filename(), len(pgf.Src), maxFullFileSize) - return nil, err - } - e := &encoded{ - ctx: ctx, - metadataSource: snapshot, - pgf: pgf, - rng: rng, - ti: pkg.GetTypesInfo(), - pkg: pkg, - fset: pkg.FileSet(), - tokTypes: snapshot.Options().SemanticTypes, - tokMods: snapshot.Options().SemanticMods, - noStrings: snapshot.Options().NoSemanticString, - noNumbers: snapshot.Options().NoSemanticNumber, - } - if err := e.init(); err != nil { - // e.init should never return an error, unless there's some - // seemingly impossible race condition - return nil, err - } - e.semantics() - ans.Data = e.Data() - // For delta requests, but we've never seen any. - ans.ResultID = fmt.Sprintf("%v", time.Now()) - return &ans, nil -} - -func (e *encoded) semantics() { - f := e.pgf.File - // may not be in range, but harmless - e.token(f.Package, len("package"), tokKeyword, nil) - e.token(f.Name.NamePos, len(f.Name.Name), tokNamespace, nil) - inspect := func(n ast.Node) bool { - return e.inspector(n) - } - for _, d := range f.Decls { - // only look at the decls that overlap the range - start, end := d.Pos(), d.End() - if end <= e.start || start >= e.end { - continue - } - ast.Inspect(d, inspect) - } - for _, cg := range f.Comments { - for _, c := range cg.List { - if strings.HasPrefix(c.Text, "//go:") { - e.godirective(c) - continue - } - if !strings.Contains(c.Text, "\n") { - e.token(c.Pos(), len(c.Text), tokComment, nil) - continue - } - e.multiline(c.Pos(), c.End(), c.Text, tokComment) - } - } -} - -type tokenType string - -const ( - tokNamespace tokenType = "namespace" - tokType tokenType = "type" - tokInterface tokenType = "interface" - tokTypeParam tokenType = "typeParameter" - tokParameter tokenType = "parameter" - tokVariable tokenType = "variable" - tokMethod tokenType = "method" - tokFunction tokenType = "function" - tokKeyword tokenType = "keyword" - tokComment tokenType = "comment" - tokString tokenType = "string" - tokNumber tokenType = "number" - tokOperator tokenType = "operator" - - tokMacro tokenType = "macro" // for templates -) - -func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string) { - if !start.IsValid() { - // This is not worth reporting. TODO(pjw): does it still happen? - return - } - if start >= e.end || start+token.Pos(leng) <= e.start { - return - } - // want a line and column from start (in LSP coordinates). Ignore line directives. - lspRange, err := e.pgf.PosRange(start, start+token.Pos(leng)) - if err != nil { - event.Error(e.ctx, "failed to convert to range", err) - return - } - if lspRange.End.Line != lspRange.Start.Line { - // this happens if users are typing at the end of the file, but report nothing - return - } - // token is all on one line - length := lspRange.End.Character - lspRange.Start.Character - e.add(lspRange.Start.Line, lspRange.Start.Character, length, typ, mods) -} - -func (e *encoded) add(line, start uint32, len uint32, tok tokenType, mod []string) { - x := semItem{line, start, len, tok, mod} - e.items = append(e.items, x) -} - -// semItem represents a token found walking the parse tree -type semItem struct { - line, start uint32 - len uint32 - typeStr tokenType - mods []string -} - -type encoded struct { - // the generated data - items []semItem - - noStrings bool - noNumbers bool - - ctx context.Context - // metadataSource is used to resolve imports - metadataSource source.MetadataSource - tokTypes, tokMods []string - pgf *source.ParsedGoFile - rng *protocol.Range - ti *types.Info - pkg source.Package - fset *token.FileSet - // allowed starting and ending token.Pos, set by init - // used to avoid looking at declarations not in range - start, end token.Pos - // path from the root of the parse tree, used for debugging - stack []ast.Node -} - -// convert the stack to a string, for debugging -func (e *encoded) strStack() string { - msg := []string{"["} - for i := len(e.stack) - 1; i >= 0; i-- { - s := e.stack[i] - msg = append(msg, fmt.Sprintf("%T", s)[5:]) - } - if len(e.stack) > 0 { - loc := e.stack[len(e.stack)-1].Pos() - if _, err := safetoken.Offset(e.pgf.Tok, loc); err != nil { - msg = append(msg, fmt.Sprintf("invalid position %v for %s", loc, e.pgf.URI)) - } else { - add := safetoken.Position(e.pgf.Tok, loc) - nm := filepath.Base(add.Filename) - msg = append(msg, fmt.Sprintf("(%s:%d,col:%d)", nm, add.Line, add.Column)) - } - } - msg = append(msg, "]") - return strings.Join(msg, " ") -} - -// find the line in the source -func (e *encoded) srcLine(x ast.Node) string { - file := e.pgf.Tok - line := safetoken.Line(file, x.Pos()) - start, err := safetoken.Offset(file, file.LineStart(line)) - if err != nil { - return "" - } - end := start - for ; end < len(e.pgf.Src) && e.pgf.Src[end] != '\n'; end++ { - - } - ans := e.pgf.Src[start:end] - return string(ans) -} - -func (e *encoded) inspector(n ast.Node) bool { - pop := func() { - e.stack = e.stack[:len(e.stack)-1] - } - if n == nil { - pop() - return true - } - e.stack = append(e.stack, n) - switch x := n.(type) { - case *ast.ArrayType: - case *ast.AssignStmt: - e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil) - case *ast.BasicLit: - if strings.Contains(x.Value, "\n") { - // has to be a string. - e.multiline(x.Pos(), x.End(), x.Value, tokString) - break - } - ln := len(x.Value) - what := tokNumber - if x.Kind == token.STRING { - what = tokString - } - e.token(x.Pos(), ln, what, nil) - case *ast.BinaryExpr: - e.token(x.OpPos, len(x.Op.String()), tokOperator, nil) - case *ast.BlockStmt: - case *ast.BranchStmt: - e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil) - // There's no semantic encoding for labels - case *ast.CallExpr: - if x.Ellipsis != token.NoPos { - e.token(x.Ellipsis, len("..."), tokOperator, nil) - } - case *ast.CaseClause: - iam := "case" - if x.List == nil { - iam = "default" - } - e.token(x.Case, len(iam), tokKeyword, nil) - case *ast.ChanType: - // chan | chan <- | <- chan - switch { - case x.Arrow == token.NoPos: - e.token(x.Begin, len("chan"), tokKeyword, nil) - case x.Arrow == x.Begin: - e.token(x.Arrow, 2, tokOperator, nil) - pos := e.findKeyword("chan", x.Begin+2, x.Value.Pos()) - e.token(pos, len("chan"), tokKeyword, nil) - case x.Arrow != x.Begin: - e.token(x.Begin, len("chan"), tokKeyword, nil) - e.token(x.Arrow, 2, tokOperator, nil) - } - case *ast.CommClause: - iam := len("case") - if x.Comm == nil { - iam = len("default") - } - e.token(x.Case, iam, tokKeyword, nil) - case *ast.CompositeLit: - case *ast.DeclStmt: - case *ast.DeferStmt: - e.token(x.Defer, len("defer"), tokKeyword, nil) - case *ast.Ellipsis: - e.token(x.Ellipsis, len("..."), tokOperator, nil) - case *ast.EmptyStmt: - case *ast.ExprStmt: - case *ast.Field: - case *ast.FieldList: - case *ast.ForStmt: - e.token(x.For, len("for"), tokKeyword, nil) - case *ast.FuncDecl: - case *ast.FuncLit: - case *ast.FuncType: - if x.Func != token.NoPos { - e.token(x.Func, len("func"), tokKeyword, nil) - } - case *ast.GenDecl: - e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil) - case *ast.GoStmt: - e.token(x.Go, len("go"), tokKeyword, nil) - case *ast.Ident: - e.ident(x) - case *ast.IfStmt: - e.token(x.If, len("if"), tokKeyword, nil) - if x.Else != nil { - // x.Body.End() or x.Body.End()+1, not that it matters - pos := e.findKeyword("else", x.Body.End(), x.Else.Pos()) - e.token(pos, len("else"), tokKeyword, nil) - } - case *ast.ImportSpec: - e.importSpec(x) - pop() - return false - case *ast.IncDecStmt: - e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil) - case *ast.IndexExpr: - case *typeparams.IndexListExpr: - case *ast.InterfaceType: - e.token(x.Interface, len("interface"), tokKeyword, nil) - case *ast.KeyValueExpr: - case *ast.LabeledStmt: - case *ast.MapType: - e.token(x.Map, len("map"), tokKeyword, nil) - case *ast.ParenExpr: - case *ast.RangeStmt: - e.token(x.For, len("for"), tokKeyword, nil) - // x.TokPos == token.NoPos is legal (for range foo {}) - offset := x.TokPos - if offset == token.NoPos { - offset = x.For - } - pos := e.findKeyword("range", offset, x.X.Pos()) - e.token(pos, len("range"), tokKeyword, nil) - case *ast.ReturnStmt: - e.token(x.Return, len("return"), tokKeyword, nil) - case *ast.SelectStmt: - e.token(x.Select, len("select"), tokKeyword, nil) - case *ast.SelectorExpr: - case *ast.SendStmt: - e.token(x.Arrow, len("<-"), tokOperator, nil) - case *ast.SliceExpr: - case *ast.StarExpr: - e.token(x.Star, len("*"), tokOperator, nil) - case *ast.StructType: - e.token(x.Struct, len("struct"), tokKeyword, nil) - case *ast.SwitchStmt: - e.token(x.Switch, len("switch"), tokKeyword, nil) - case *ast.TypeAssertExpr: - if x.Type == nil { - pos := e.findKeyword("type", x.Lparen, x.Rparen) - e.token(pos, len("type"), tokKeyword, nil) - } - case *ast.TypeSpec: - case *ast.TypeSwitchStmt: - e.token(x.Switch, len("switch"), tokKeyword, nil) - case *ast.UnaryExpr: - e.token(x.OpPos, len(x.Op.String()), tokOperator, nil) - case *ast.ValueSpec: - // things only seen with parsing or type errors, so ignore them - case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: - return true - // not going to see these - case *ast.File, *ast.Package: - e.unexpected(fmt.Sprintf("implement %T %s", x, safetoken.Position(e.pgf.Tok, x.Pos()))) - // other things we knowingly ignore - case *ast.Comment, *ast.CommentGroup: - pop() - return false - default: - e.unexpected(fmt.Sprintf("failed to implement %T", x)) - } - return true -} - -func (e *encoded) ident(x *ast.Ident) { - if e.ti == nil { - what, mods := e.unkIdent(x) - if what != "" { - e.token(x.Pos(), len(x.String()), what, mods) - } - if semDebug { - log.Printf(" nil %s/nil/nil %q %v %s", x.String(), what, mods, e.strStack()) - } - return - } - def := e.ti.Defs[x] - if def != nil { - what, mods := e.definitionFor(x, def) - if what != "" { - e.token(x.Pos(), len(x.String()), what, mods) - } - if semDebug { - log.Printf(" for %s/%T/%T got %s %v (%s)", x.String(), def, def.Type(), what, mods, e.strStack()) - } - return - } - use := e.ti.Uses[x] - tok := func(pos token.Pos, lng int, tok tokenType, mods []string) { - e.token(pos, lng, tok, mods) - q := "nil" - if use != nil { - q = fmt.Sprintf("%T", use.Type()) - } - if semDebug { - log.Printf(" use %s/%T/%s got %s %v (%s)", x.String(), use, q, tok, mods, e.strStack()) - } - } - - switch y := use.(type) { - case nil: - what, mods := e.unkIdent(x) - if what != "" { - tok(x.Pos(), len(x.String()), what, mods) - } else if semDebug { - // tok() wasn't called, so didn't log - log.Printf(" nil %s/%T/nil %q %v (%s)", x.String(), use, what, mods, e.strStack()) - } - return - case *types.Builtin: - tok(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"}) - case *types.Const: - mods := []string{"readonly"} - tt := y.Type() - if _, ok := tt.(*types.Basic); ok { - tok(x.Pos(), len(x.String()), tokVariable, mods) - break - } - if ttx, ok := tt.(*types.Named); ok { - if x.String() == "iota" { - e.unexpected(fmt.Sprintf("iota:%T", ttx)) - } - if _, ok := ttx.Underlying().(*types.Basic); ok { - tok(x.Pos(), len(x.String()), tokVariable, mods) - break - } - e.unexpected(fmt.Sprintf("%q/%T", x.String(), tt)) - } - // can this happen? Don't think so - e.unexpected(fmt.Sprintf("%s %T %#v", x.String(), tt, tt)) - case *types.Func: - tok(x.Pos(), len(x.Name), tokFunction, nil) - case *types.Label: - // nothing to map it to - case *types.Nil: - // nil is a predeclared identifier - tok(x.Pos(), len("nil"), tokVariable, []string{"readonly", "defaultLibrary"}) - case *types.PkgName: - tok(x.Pos(), len(x.Name), tokNamespace, nil) - case *types.TypeName: // could be a tokTpeParam - var mods []string - if _, ok := y.Type().(*types.Basic); ok { - mods = []string{"defaultLibrary"} - } else if _, ok := y.Type().(*typeparams.TypeParam); ok { - tok(x.Pos(), len(x.String()), tokTypeParam, mods) - break - } - tok(x.Pos(), len(x.String()), tokType, mods) - case *types.Var: - if isSignature(y) { - tok(x.Pos(), len(x.Name), tokFunction, nil) - } else if e.isParam(use.Pos()) { - // variable, unless use.pos is the pos of a Field in an ancestor FuncDecl - // or FuncLit and then it's a parameter - tok(x.Pos(), len(x.Name), tokParameter, nil) - } else { - tok(x.Pos(), len(x.Name), tokVariable, nil) - } - - default: - // can't happen - if use == nil { - msg := fmt.Sprintf("%#v %#v %#v", x, e.ti.Defs[x], e.ti.Uses[x]) - e.unexpected(msg) - } - if use.Type() != nil { - e.unexpected(fmt.Sprintf("%s %T/%T,%#v", x.String(), use, use.Type(), use)) - } else { - e.unexpected(fmt.Sprintf("%s %T", x.String(), use)) - } - } -} - -func (e *encoded) isParam(pos token.Pos) bool { - for i := len(e.stack) - 1; i >= 0; i-- { - switch n := e.stack[i].(type) { - case *ast.FuncDecl: - for _, f := range n.Type.Params.List { - for _, id := range f.Names { - if id.Pos() == pos { - return true - } - } - } - case *ast.FuncLit: - for _, f := range n.Type.Params.List { - for _, id := range f.Names { - if id.Pos() == pos { - return true - } - } - } - } - } - return false -} - -func isSignature(use types.Object) bool { - if _, ok := use.(*types.Var); !ok { - return false - } - v := use.Type() - if v == nil { - return false - } - if _, ok := v.(*types.Signature); ok { - return true - } - return false -} - -// both e.ti.Defs and e.ti.Uses are nil. use the parse stack. -// a lot of these only happen when the package doesn't compile -// but in that case it is all best-effort from the parse tree -func (e *encoded) unkIdent(x *ast.Ident) (tokenType, []string) { - def := []string{"definition"} - n := len(e.stack) - 2 // parent of Ident - if n < 0 { - e.unexpected("no stack?") - return "", nil - } - switch nd := e.stack[n].(type) { - case *ast.BinaryExpr, *ast.UnaryExpr, *ast.ParenExpr, *ast.StarExpr, - *ast.IncDecStmt, *ast.SliceExpr, *ast.ExprStmt, *ast.IndexExpr, - *ast.ReturnStmt, *ast.ChanType, *ast.SendStmt, - *ast.ForStmt, // possibly incomplete - *ast.IfStmt, /* condition */ - *ast.KeyValueExpr: // either key or value - return tokVariable, nil - case *typeparams.IndexListExpr: - return tokVariable, nil - case *ast.Ellipsis: - return tokType, nil - case *ast.CaseClause: - if n-2 >= 0 { - if _, ok := e.stack[n-2].(*ast.TypeSwitchStmt); ok { - return tokType, nil - } - } - return tokVariable, nil - case *ast.ArrayType: - if x == nd.Len { - // or maybe a Type Param, but we can't just from the parse tree - return tokVariable, nil - } else { - return tokType, nil - } - case *ast.MapType: - return tokType, nil - case *ast.CallExpr: - if x == nd.Fun { - return tokFunction, nil - } - return tokVariable, nil - case *ast.SwitchStmt: - return tokVariable, nil - case *ast.TypeAssertExpr: - if x == nd.X { - return tokVariable, nil - } else if x == nd.Type { - return tokType, nil - } - case *ast.ValueSpec: - for _, p := range nd.Names { - if p == x { - return tokVariable, def - } - } - for _, p := range nd.Values { - if p == x { - return tokVariable, nil - } - } - return tokType, nil - case *ast.SelectorExpr: // e.ti.Selections[nd] is nil, so no help - if n-1 >= 0 { - if ce, ok := e.stack[n-1].(*ast.CallExpr); ok { - // ... CallExpr SelectorExpr Ident (_.x()) - if ce.Fun == nd && nd.Sel == x { - return tokFunction, nil - } - } - } - return tokVariable, nil - case *ast.AssignStmt: - for _, p := range nd.Lhs { - // x := ..., or x = ... - if p == x { - if nd.Tok != token.DEFINE { - def = nil - } - return tokVariable, def // '_' in _ = ... - } - } - // RHS, = x - return tokVariable, nil - case *ast.TypeSpec: // it's a type if it is either the Name or the Type - if x == nd.Type { - def = nil - } - return tokType, def - case *ast.Field: - // ident could be type in a field, or a method in an interface type, or a variable - if x == nd.Type { - return tokType, nil - } - if n-2 >= 0 { - _, okit := e.stack[n-2].(*ast.InterfaceType) - _, okfl := e.stack[n-1].(*ast.FieldList) - if okit && okfl { - return tokMethod, def - } - } - return tokVariable, nil - case *ast.LabeledStmt, *ast.BranchStmt: - // nothing to report - case *ast.CompositeLit: - if nd.Type == x { - return tokType, nil - } - return tokVariable, nil - case *ast.RangeStmt: - if nd.Tok != token.DEFINE { - def = nil - } - return tokVariable, def - case *ast.FuncDecl: - return tokFunction, def - default: - msg := fmt.Sprintf("%T undexpected: %s %s%q", nd, x.Name, e.strStack(), e.srcLine(x)) - e.unexpected(msg) - } - return "", nil -} - -func isDeprecated(n *ast.CommentGroup) bool { - if n == nil { - return false - } - for _, c := range n.List { - if strings.HasPrefix(c.Text, "// Deprecated") { - return true - } - } - return false -} - -func (e *encoded) definitionFor(x *ast.Ident, def types.Object) (tokenType, []string) { - // PJW: def == types.Label? probably a nothing - // PJW: look into replacing these syntactic tests with types more generally - mods := []string{"definition"} - for i := len(e.stack) - 1; i >= 0; i-- { - s := e.stack[i] - switch y := s.(type) { - case *ast.AssignStmt, *ast.RangeStmt: - if x.Name == "_" { - return "", nil // not really a variable - } - return tokVariable, mods - case *ast.GenDecl: - if isDeprecated(y.Doc) { - mods = append(mods, "deprecated") - } - if y.Tok == token.CONST { - mods = append(mods, "readonly") - } - return tokVariable, mods - case *ast.FuncDecl: - // If x is immediately under a FuncDecl, it is a function or method - if i == len(e.stack)-2 { - if isDeprecated(y.Doc) { - mods = append(mods, "deprecated") - } - if y.Recv != nil { - return tokMethod, mods - } - return tokFunction, mods - } - // if x < ... < FieldList < FuncDecl, this is the receiver, a variable - // PJW: maybe not. it might be a typeparameter in the type of the receiver - if _, ok := e.stack[i+1].(*ast.FieldList); ok { - if _, ok := def.(*types.TypeName); ok { - return tokTypeParam, mods - } - return tokVariable, nil - } - // if x < ... < FieldList < FuncType < FuncDecl, this is a param - return tokParameter, mods - case *ast.FuncType: // is it in the TypeParams? - if isTypeParam(x, y) { - return tokTypeParam, mods - } - return tokParameter, mods - case *ast.InterfaceType: - return tokMethod, mods - case *ast.TypeSpec: - // GenDecl/Typespec/FuncType/FieldList/Field/Ident - // (type A func(b uint64)) (err error) - // b and err should not be tokType, but tokVaraible - // and in GenDecl/TpeSpec/StructType/FieldList/Field/Ident - // (type A struct{b uint64} - // but on type B struct{C}), C is a type, but is not being defined. - // GenDecl/TypeSpec/FieldList/Field/Ident is a typeParam - if _, ok := e.stack[i+1].(*ast.FieldList); ok { - return tokTypeParam, mods - } - fldm := e.stack[len(e.stack)-2] - if fld, ok := fldm.(*ast.Field); ok { - // if len(fld.names) == 0 this is a tokType, being used - if len(fld.Names) == 0 { - return tokType, nil - } - return tokVariable, mods - } - return tokType, mods - } - } - // can't happen - msg := fmt.Sprintf("failed to find the decl for %s", safetoken.Position(e.pgf.Tok, x.Pos())) - e.unexpected(msg) - return "", []string{""} -} - -func isTypeParam(x *ast.Ident, y *ast.FuncType) bool { - tp := typeparams.ForFuncType(y) - if tp == nil { - return false - } - for _, p := range tp.List { - for _, n := range p.Names { - if x == n { - return true - } - } - } - return false -} - -func (e *encoded) multiline(start, end token.Pos, val string, tok tokenType) { - f := e.fset.File(start) - // the hard part is finding the lengths of lines. include the \n - leng := func(line int) int { - n := f.LineStart(line) - if line >= f.LineCount() { - return f.Size() - int(n) - } - return int(f.LineStart(line+1) - n) - } - spos := safetoken.StartPosition(e.fset, start) - epos := safetoken.EndPosition(e.fset, end) - sline := spos.Line - eline := epos.Line - // first line is from spos.Column to end - e.token(start, leng(sline)-spos.Column, tok, nil) // leng(sline)-1 - (spos.Column-1) - for i := sline + 1; i < eline; i++ { - // intermediate lines are from 1 to end - e.token(f.LineStart(i), leng(i)-1, tok, nil) // avoid the newline - } - // last line is from 1 to epos.Column - e.token(f.LineStart(eline), epos.Column-1, tok, nil) // columns are 1-based -} - -// findKeyword finds a keyword rather than guessing its location -func (e *encoded) findKeyword(keyword string, start, end token.Pos) token.Pos { - offset := int(start) - e.pgf.Tok.Base() - last := int(end) - e.pgf.Tok.Base() - buf := e.pgf.Src - idx := bytes.Index(buf[offset:last], []byte(keyword)) - if idx != -1 { - return start + token.Pos(idx) - } - //(in unparsable programs: type _ <-<-chan int) - e.unexpected(fmt.Sprintf("not found:%s %v", keyword, safetoken.StartPosition(e.fset, start))) - return token.NoPos -} - -func (e *encoded) init() error { - if e.rng != nil { - var err error - e.start, e.end, err = e.pgf.RangePos(*e.rng) - if err != nil { - return fmt.Errorf("range span (%w) error for %s", err, e.pgf.File.Name) - } - } else { - tok := e.pgf.Tok - e.start, e.end = tok.Pos(0), tok.Pos(tok.Size()) // entire file - } - return nil -} - -func (e *encoded) Data() []uint32 { - // binary operators, at least, will be out of order - sort.Slice(e.items, func(i, j int) bool { - if e.items[i].line != e.items[j].line { - return e.items[i].line < e.items[j].line - } - return e.items[i].start < e.items[j].start - }) - typeMap, modMap := e.maps() - // each semantic token needs five values - // (see Integer Encoding for Tokens in the LSP spec) - x := make([]uint32, 5*len(e.items)) - var j int - var last semItem - for i := 0; i < len(e.items); i++ { - item := e.items[i] - typ, ok := typeMap[item.typeStr] - if !ok { - continue // client doesn't want typeStr - } - if item.typeStr == tokString && e.noStrings { - continue - } - if item.typeStr == tokNumber && e.noNumbers { - continue - } - if j == 0 { - x[0] = e.items[0].line - } else { - x[j] = item.line - last.line - } - x[j+1] = item.start - if j > 0 && x[j] == 0 { - x[j+1] = item.start - last.start - } - x[j+2] = item.len - x[j+3] = uint32(typ) - mask := 0 - for _, s := range item.mods { - // modMap[s] is 0 if the client doesn't want this modifier - mask |= modMap[s] - } - x[j+4] = uint32(mask) - j += 5 - last = item - } - return x[:j] -} - -func (e *encoded) importSpec(d *ast.ImportSpec) { - // a local package name or the last component of the Path - if d.Name != nil { - nm := d.Name.String() - if nm != "_" && nm != "." { - e.token(d.Name.Pos(), len(nm), tokNamespace, nil) - } - return // don't mark anything for . or _ - } - importPath := source.UnquoteImportPath(d) - if importPath == "" { - return - } - // Import strings are implementation defined. Try to match with parse information. - depID := e.pkg.Metadata().DepsByImpPath[importPath] - if depID == "" { - return - } - depMD := e.metadataSource.Metadata(depID) - if depMD == nil { - // unexpected, but impact is that maybe some import is not colored - return - } - // Check whether the original literal contains the package's declared name. - j := strings.LastIndex(d.Path.Value, string(depMD.Name)) - if j == -1 { - // Package name does not match import path, so there is nothing to report. - return - } - // Report virtual declaration at the position of the substring. - start := d.Path.Pos() + token.Pos(j) - e.token(start, len(depMD.Name), tokNamespace, nil) -} - -// log unexpected state -func (e *encoded) unexpected(msg string) { - if semDebug { - panic(msg) - } - event.Error(e.ctx, e.strStack(), errors.New(msg)) -} - -// SemType returns a string equivalent of the type, for gopls semtok -func SemType(n int) string { - tokTypes := SemanticTypes() - tokMods := SemanticModifiers() - if n >= 0 && n < len(tokTypes) { - return tokTypes[n] - } - // not found for some reason - return fmt.Sprintf("?%d[%d,%d]?", n, len(tokTypes), len(tokMods)) -} - -// SemMods returns the []string equivalent of the mods, for gopls semtok. -func SemMods(n int) []string { - tokMods := SemanticModifiers() - mods := []string{} - for i := 0; i < len(tokMods); i++ { - if (n & (1 << uint(i))) != 0 { - mods = append(mods, tokMods[i]) - } - } - return mods -} - -func (e *encoded) maps() (map[tokenType]int, map[string]int) { - tmap := make(map[tokenType]int) - mmap := make(map[string]int) - for i, t := range e.tokTypes { - tmap[tokenType(t)] = i - } - for i, m := range e.tokMods { - mmap[m] = 1 << uint(i) // go 1.12 compatibility - } - return tmap, mmap -} - -// SemanticTypes to use in case there is no client, as in the command line, or tests -func SemanticTypes() []string { - return semanticTypes[:] -} - -// SemanticModifiers to use in case there is no client. -func SemanticModifiers() []string { - return semanticModifiers[:] -} - -var ( - semanticTypes = [...]string{ - "namespace", "type", "class", "enum", "interface", - "struct", "typeParameter", "parameter", "variable", "property", "enumMember", - "event", "function", "method", "macro", "keyword", "modifier", "comment", - "string", "number", "regexp", "operator", - } - semanticModifiers = [...]string{ - "declaration", "definition", "readonly", "static", - "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", - } -) - -var godirectives = map[string]struct{}{ - // https://pkg.go.dev/cmd/compile - "noescape": {}, - "uintptrescapes": {}, - "noinline": {}, - "norace": {}, - "nosplit": {}, - "linkname": {}, - - // https://pkg.go.dev/go/build - "build": {}, - "binary-only-package": {}, - "embed": {}, -} - -// Tokenize godirective at the start of the comment c, if any, and the surrounding comment. -// If there is any failure, emits the entire comment as a tokComment token. -// Directives are highlighted as-is, even if used incorrectly. Typically there are -// dedicated analyzers that will warn about misuse. -func (e *encoded) godirective(c *ast.Comment) { - // First check if '//go:directive args...' is a valid directive. - directive, args, _ := strings.Cut(c.Text, " ") - kind, _ := stringsCutPrefix(directive, "//go:") - if _, ok := godirectives[kind]; !ok { - // Unknown go: directive. - e.token(c.Pos(), len(c.Text), tokComment, nil) - return - } - - // Make the 'go:directive' part stand out, the rest is comments. - e.token(c.Pos(), len("//"), tokComment, nil) - - directiveStart := c.Pos() + token.Pos(len("//")) - e.token(directiveStart, len(directive[len("//"):]), tokNamespace, nil) - - if len(args) > 0 { - tailStart := c.Pos() + token.Pos(len(directive)+len(" ")) - e.token(tailStart, len(args), tokComment, nil) - } -} - -// Go 1.20 strings.CutPrefix. -func stringsCutPrefix(s, prefix string) (after string, found bool) { - if !strings.HasPrefix(s, prefix) { - return s, false - } - return s[len(prefix):], true -} diff --git a/gopls/internal/lsp/server.go b/gopls/internal/lsp/server.go deleted file mode 100644 index a236779962f..00000000000 --- a/gopls/internal/lsp/server.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:generate go run ./helper -d protocol/tsserver.go -o server_gen.go -u . - -// Package lsp implements LSP for gopls. -package lsp - -import ( - "context" - "fmt" - "os" - "sync" - - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/progress" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/jsonrpc2" -) - -const concurrentAnalyses = 1 - -// NewServer creates an LSP server and binds it to handle incoming client -// messages on the supplied stream. -func NewServer(session *cache.Session, client protocol.ClientCloser, options *source.Options) *Server { - return &Server{ - diagnostics: map[span.URI]*fileReports{}, - gcOptimizationDetails: make(map[source.PackageID]struct{}), - watchedGlobPatterns: nil, // empty - changedFiles: make(map[span.URI]struct{}), - session: session, - client: client, - diagnosticsSema: make(chan struct{}, concurrentAnalyses), - progress: progress.NewTracker(client), - options: options, - } -} - -type serverState int - -const ( - serverCreated = serverState(iota) - serverInitializing // set once the server has received "initialize" request - serverInitialized // set once the server has received "initialized" request - serverShutDown -) - -func (s serverState) String() string { - switch s { - case serverCreated: - return "created" - case serverInitializing: - return "initializing" - case serverInitialized: - return "initialized" - case serverShutDown: - return "shutDown" - } - return fmt.Sprintf("(unknown state: %d)", int(s)) -} - -// Server implements the protocol.Server interface. -type Server struct { - client protocol.ClientCloser - - stateMu sync.Mutex - state serverState - // notifications generated before serverInitialized - notifications []*protocol.ShowMessageParams - - session *cache.Session - - tempDir string - - // changedFiles tracks files for which there has been a textDocument/didChange. - changedFilesMu sync.Mutex - changedFiles map[span.URI]struct{} - - // folders is only valid between initialize and initialized, and holds the - // set of folders to build views for when we are ready - pendingFolders []protocol.WorkspaceFolder - - // watchedGlobPatterns is the set of glob patterns that we have requested - // the client watch on disk. It will be updated as the set of directories - // that the server should watch changes. - // The map field may be reassigned but the map is immutable. - watchedGlobPatternsMu sync.Mutex - watchedGlobPatterns map[string]struct{} - watchRegistrationCount int - - diagnosticsMu sync.Mutex - diagnostics map[span.URI]*fileReports - - // gcOptimizationDetails describes the packages for which we want - // optimization details to be included in the diagnostics. The key is the - // ID of the package. - gcOptimizationDetailsMu sync.Mutex - gcOptimizationDetails map[source.PackageID]struct{} - - // diagnosticsSema limits the concurrency of diagnostics runs, which can be - // expensive. - diagnosticsSema chan struct{} - - progress *progress.Tracker - - // When the workspace fails to load, we show its status through a progress - // report with an error message. - criticalErrorStatusMu sync.Mutex - criticalErrorStatus *progress.WorkDone - - // Track an ongoing CPU profile created with the StartProfile command and - // terminated with the StopProfile command. - ongoingProfileMu sync.Mutex - ongoingProfile *os.File // if non-nil, an ongoing profile is writing to this file - - // Track most recently requested options. - optionsMu sync.Mutex - options *source.Options -} - -func (s *Server) workDoneProgressCancel(ctx context.Context, params *protocol.WorkDoneProgressCancelParams) error { - ctx, done := event.Start(ctx, "lsp.Server.workDoneProgressCancel") - defer done() - - return s.progress.Cancel(params.Token) -} - -func (s *Server) nonstandardRequest(ctx context.Context, method string, params interface{}) (interface{}, error) { - ctx, done := event.Start(ctx, "lsp.Server.nonstandardRequest") - defer done() - - switch method { - case "gopls/diagnoseFiles": - paramMap := params.(map[string]interface{}) - // TODO(adonovan): opt: parallelize FileDiagnostics(URI...), either - // by calling it in multiple goroutines or, better, by making - // the relevant APIs accept a set of URIs/packages. - for _, file := range paramMap["files"].([]interface{}) { - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, protocol.DocumentURI(file.(string)), source.UnknownKind) - defer release() - if !ok { - return nil, err - } - - fileID, diagnostics, err := s.diagnoseFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - if err := s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{ - URI: protocol.URIFromSpanURI(fh.URI()), - Diagnostics: toProtocolDiagnostics(diagnostics), - Version: fileID.Version(), - }); err != nil { - return nil, err - } - } - if err := s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{ - URI: "gopls://diagnostics-done", - }); err != nil { - return nil, err - } - return struct{}{}, nil - } - return nil, notImplemented(method) -} - -// fileDiagnostics reports diagnostics in the specified file, -// as used by the "gopls check" or "gopls fix" commands. -// -// TODO(adonovan): opt: this function is called in a loop from the -// "gopls/diagnoseFiles" nonstandard request handler. It would be more -// efficient to compute the set of packages and TypeCheck and -// Analyze them all at once. Or instead support textDocument/diagnostic -// (golang/go#60122). -func (s *Server) diagnoseFile(ctx context.Context, snapshot source.Snapshot, uri span.URI) (source.FileHandle, []*source.Diagnostic, error) { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, nil, err - } - pkg, _, err := source.NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, nil, err - } - pkgDiags, err := pkg.DiagnosticsForFile(ctx, snapshot, uri) - if err != nil { - return nil, nil, err - } - adiags, err := source.Analyze(ctx, snapshot, map[source.PackageID]unit{pkg.Metadata().ID: {}}, nil /* progress tracker */) - if err != nil { - return nil, nil, err - } - var td, ad []*source.Diagnostic // combine load/parse/type + analysis diagnostics - source.CombineDiagnostics(pkgDiags, adiags[uri], &td, &ad) - s.storeDiagnostics(snapshot, uri, typeCheckSource, td, true) - s.storeDiagnostics(snapshot, uri, analysisSource, ad, true) - return fh, append(td, ad...), nil -} - -func notImplemented(method string) error { - return fmt.Errorf("%w: %q not yet implemented", jsonrpc2.ErrMethodNotFound, method) -} diff --git a/gopls/internal/lsp/server_gen.go b/gopls/internal/lsp/server_gen.go deleted file mode 100644 index 7ed9190b789..00000000000 --- a/gopls/internal/lsp/server_gen.go +++ /dev/null @@ -1,309 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -// Code generated by gopls/internal/lsp/helper. DO NOT EDIT. - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -func (s *Server) CodeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { - return s.codeAction(ctx, params) -} - -func (s *Server) CodeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { - return s.codeLens(ctx, params) -} - -func (s *Server) ColorPresentation(context.Context, *protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) { - return nil, notImplemented("ColorPresentation") -} - -func (s *Server) Completion(ctx context.Context, params *protocol.CompletionParams) (*protocol.CompletionList, error) { - return s.completion(ctx, params) -} - -func (s *Server) Declaration(context.Context, *protocol.DeclarationParams) (*protocol.Or_textDocument_declaration, error) { - return nil, notImplemented("Declaration") -} - -func (s *Server) Definition(ctx context.Context, params *protocol.DefinitionParams) ([]protocol.Location, error) { - return s.definition(ctx, params) -} - -func (s *Server) Diagnostic(context.Context, *string) (*string, error) { - return nil, notImplemented("Diagnostic") -} - -func (s *Server) DiagnosticWorkspace(context.Context, *protocol.WorkspaceDiagnosticParams) (*protocol.WorkspaceDiagnosticReport, error) { - return nil, notImplemented("DiagnosticWorkspace") -} - -func (s *Server) DidChange(ctx context.Context, params *protocol.DidChangeTextDocumentParams) error { - return s.didChange(ctx, params) -} - -func (s *Server) DidChangeConfiguration(ctx context.Context, _gen *protocol.DidChangeConfigurationParams) error { - return s.didChangeConfiguration(ctx, _gen) -} - -func (s *Server) DidChangeNotebookDocument(context.Context, *protocol.DidChangeNotebookDocumentParams) error { - return notImplemented("DidChangeNotebookDocument") -} - -func (s *Server) DidChangeWatchedFiles(ctx context.Context, params *protocol.DidChangeWatchedFilesParams) error { - return s.didChangeWatchedFiles(ctx, params) -} - -func (s *Server) DidChangeWorkspaceFolders(ctx context.Context, params *protocol.DidChangeWorkspaceFoldersParams) error { - return s.didChangeWorkspaceFolders(ctx, params) -} - -func (s *Server) DidClose(ctx context.Context, params *protocol.DidCloseTextDocumentParams) error { - return s.didClose(ctx, params) -} - -func (s *Server) DidCloseNotebookDocument(context.Context, *protocol.DidCloseNotebookDocumentParams) error { - return notImplemented("DidCloseNotebookDocument") -} - -func (s *Server) DidCreateFiles(context.Context, *protocol.CreateFilesParams) error { - return notImplemented("DidCreateFiles") -} - -func (s *Server) DidDeleteFiles(context.Context, *protocol.DeleteFilesParams) error { - return notImplemented("DidDeleteFiles") -} - -func (s *Server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { - return s.didOpen(ctx, params) -} - -func (s *Server) DidOpenNotebookDocument(context.Context, *protocol.DidOpenNotebookDocumentParams) error { - return notImplemented("DidOpenNotebookDocument") -} - -func (s *Server) DidRenameFiles(context.Context, *protocol.RenameFilesParams) error { - return notImplemented("DidRenameFiles") -} - -func (s *Server) DidSave(ctx context.Context, params *protocol.DidSaveTextDocumentParams) error { - return s.didSave(ctx, params) -} - -func (s *Server) DidSaveNotebookDocument(context.Context, *protocol.DidSaveNotebookDocumentParams) error { - return notImplemented("DidSaveNotebookDocument") -} - -func (s *Server) DocumentColor(context.Context, *protocol.DocumentColorParams) ([]protocol.ColorInformation, error) { - return nil, notImplemented("DocumentColor") -} - -func (s *Server) DocumentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { - return s.documentHighlight(ctx, params) -} - -func (s *Server) DocumentLink(ctx context.Context, params *protocol.DocumentLinkParams) ([]protocol.DocumentLink, error) { - return s.documentLink(ctx, params) -} - -func (s *Server) DocumentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]interface{}, error) { - return s.documentSymbol(ctx, params) -} - -func (s *Server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (interface{}, error) { - return s.executeCommand(ctx, params) -} - -func (s *Server) Exit(ctx context.Context) error { - return s.exit(ctx) -} - -func (s *Server) FoldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { - return s.foldingRange(ctx, params) -} - -func (s *Server) Formatting(ctx context.Context, params *protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) { - return s.formatting(ctx, params) -} - -func (s *Server) Hover(ctx context.Context, params *protocol.HoverParams) (*protocol.Hover, error) { - return s.hover(ctx, params) -} - -func (s *Server) Implementation(ctx context.Context, params *protocol.ImplementationParams) ([]protocol.Location, error) { - return s.implementation(ctx, params) -} - -func (s *Server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { - return s.incomingCalls(ctx, params) -} - -func (s *Server) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { - return s.initialize(ctx, params) -} - -func (s *Server) Initialized(ctx context.Context, params *protocol.InitializedParams) error { - return s.initialized(ctx, params) -} - -func (s *Server) InlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { - return s.inlayHint(ctx, params) -} - -func (s *Server) InlineCompletion(context.Context, *protocol.InlineCompletionParams) (*protocol.Or_Result_textDocument_inlineCompletion, error) { - return nil, notImplemented("InlineCompletion") -} - -func (s *Server) InlineValue(context.Context, *protocol.InlineValueParams) ([]protocol.InlineValue, error) { - return nil, notImplemented("InlineValue") -} - -func (s *Server) LinkedEditingRange(context.Context, *protocol.LinkedEditingRangeParams) (*protocol.LinkedEditingRanges, error) { - return nil, notImplemented("LinkedEditingRange") -} - -func (s *Server) Moniker(context.Context, *protocol.MonikerParams) ([]protocol.Moniker, error) { - return nil, notImplemented("Moniker") -} - -func (s *Server) NonstandardRequest(ctx context.Context, method string, params interface{}) (interface{}, error) { - return s.nonstandardRequest(ctx, method, params) -} - -func (s *Server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) { - return nil, notImplemented("OnTypeFormatting") -} - -func (s *Server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { - return s.outgoingCalls(ctx, params) -} - -func (s *Server) PrepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { - return s.prepareCallHierarchy(ctx, params) -} - -func (s *Server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRename2Gn, error) { - return s.prepareRename(ctx, params) -} - -func (s *Server) PrepareTypeHierarchy(context.Context, *protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) { - return nil, notImplemented("PrepareTypeHierarchy") -} - -func (s *Server) Progress(context.Context, *protocol.ProgressParams) error { - return notImplemented("Progress") -} - -func (s *Server) RangeFormatting(context.Context, *protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) { - return nil, notImplemented("RangeFormatting") -} - -func (s *Server) RangesFormatting(context.Context, *protocol.DocumentRangesFormattingParams) ([]protocol.TextEdit, error) { - return nil, notImplemented("RangesFormatting") -} - -func (s *Server) References(ctx context.Context, params *protocol.ReferenceParams) ([]protocol.Location, error) { - return s.references(ctx, params) -} - -func (s *Server) Rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { - return s.rename(ctx, params) -} - -func (s *Server) Resolve(context.Context, *protocol.InlayHint) (*protocol.InlayHint, error) { - return nil, notImplemented("Resolve") -} - -func (s *Server) ResolveCodeAction(context.Context, *protocol.CodeAction) (*protocol.CodeAction, error) { - return nil, notImplemented("ResolveCodeAction") -} - -func (s *Server) ResolveCodeLens(context.Context, *protocol.CodeLens) (*protocol.CodeLens, error) { - return nil, notImplemented("ResolveCodeLens") -} - -func (s *Server) ResolveCompletionItem(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) { - return nil, notImplemented("ResolveCompletionItem") -} - -func (s *Server) ResolveDocumentLink(context.Context, *protocol.DocumentLink) (*protocol.DocumentLink, error) { - return nil, notImplemented("ResolveDocumentLink") -} - -func (s *Server) ResolveWorkspaceSymbol(context.Context, *protocol.WorkspaceSymbol) (*protocol.WorkspaceSymbol, error) { - return nil, notImplemented("ResolveWorkspaceSymbol") -} - -func (s *Server) SelectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { - return s.selectionRange(ctx, params) -} - -func (s *Server) SemanticTokensFull(ctx context.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { - return s.semanticTokensFull(ctx, params) -} - -func (s *Server) SemanticTokensFullDelta(context.Context, *protocol.SemanticTokensDeltaParams) (interface{}, error) { - return nil, notImplemented("SemanticTokensFullDelta") -} - -func (s *Server) SemanticTokensRange(ctx context.Context, params *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { - return s.semanticTokensRange(ctx, params) -} - -func (s *Server) SetTrace(context.Context, *protocol.SetTraceParams) error { - return notImplemented("SetTrace") -} - -func (s *Server) Shutdown(ctx context.Context) error { - return s.shutdown(ctx) -} - -func (s *Server) SignatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { - return s.signatureHelp(ctx, params) -} - -func (s *Server) Subtypes(context.Context, *protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) { - return nil, notImplemented("Subtypes") -} - -func (s *Server) Supertypes(context.Context, *protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) { - return nil, notImplemented("Supertypes") -} - -func (s *Server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolParams) ([]protocol.SymbolInformation, error) { - return s.symbol(ctx, params) -} - -func (s *Server) TypeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { - return s.typeDefinition(ctx, params) -} - -func (s *Server) WillCreateFiles(context.Context, *protocol.CreateFilesParams) (*protocol.WorkspaceEdit, error) { - return nil, notImplemented("WillCreateFiles") -} - -func (s *Server) WillDeleteFiles(context.Context, *protocol.DeleteFilesParams) (*protocol.WorkspaceEdit, error) { - return nil, notImplemented("WillDeleteFiles") -} - -func (s *Server) WillRenameFiles(context.Context, *protocol.RenameFilesParams) (*protocol.WorkspaceEdit, error) { - return nil, notImplemented("WillRenameFiles") -} - -func (s *Server) WillSave(context.Context, *protocol.WillSaveTextDocumentParams) error { - return notImplemented("WillSave") -} - -func (s *Server) WillSaveWaitUntil(context.Context, *protocol.WillSaveTextDocumentParams) ([]protocol.TextEdit, error) { - return nil, notImplemented("WillSaveWaitUntil") -} - -func (s *Server) WorkDoneProgressCancel(ctx context.Context, params *protocol.WorkDoneProgressCancelParams) error { - return s.workDoneProgressCancel(ctx, params) -} diff --git a/gopls/internal/lsp/signature_help.go b/gopls/internal/lsp/signature_help.go deleted file mode 100644 index b0249ebbc3f..00000000000 --- a/gopls/internal/lsp/signature_help.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) signatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { - ctx, done := event.Start(ctx, "lsp.Server.signatureHelp", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go) - defer release() - if !ok { - return nil, err - } - info, activeParameter, err := source.SignatureHelp(ctx, snapshot, fh, params.Position) - if err != nil { - event.Error(ctx, "no signature help", err, tag.Position.Of(params.Position)) - return nil, nil // sic? There could be many reasons for failure. - } - return &protocol.SignatureHelp{ - Signatures: []protocol.SignatureInformation{*info}, - ActiveParameter: uint32(activeParameter), - }, nil -} diff --git a/gopls/internal/lsp/source/add_import.go b/gopls/internal/lsp/source/add_import.go deleted file mode 100644 index cd8ec7ab70b..00000000000 --- a/gopls/internal/lsp/source/add_import.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/imports" -) - -// AddImport adds a single import statement to the given file -func AddImport(ctx context.Context, snapshot Snapshot, fh FileHandle, importPath string) ([]protocol.TextEdit, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - return ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ - StmtInfo: imports.ImportInfo{ - ImportPath: importPath, - }, - FixType: imports.AddImport, - }) -} diff --git a/gopls/internal/lsp/source/call_hierarchy.go b/gopls/internal/lsp/source/call_hierarchy.go deleted file mode 100644 index 8faf4251eee..00000000000 --- a/gopls/internal/lsp/source/call_hierarchy.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "path/filepath" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -// PrepareCallHierarchy returns an array of CallHierarchyItem for a file and the position within the file. -func PrepareCallHierarchy(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) ([]protocol.CallHierarchyItem, error) { - ctx, done := event.Start(ctx, "source.PrepareCallHierarchy") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - - _, obj, _ := referencedObject(pkg, pgf, pos) - if obj == nil { - return nil, nil - } - - if _, ok := obj.Type().Underlying().(*types.Signature); !ok { - return nil, nil - } - - declLoc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj)) - if err != nil { - return nil, err - } - rng := declLoc.Range - - callHierarchyItem := protocol.CallHierarchyItem{ - Name: obj.Name(), - Kind: protocol.Function, - Tags: []protocol.SymbolTag{}, - Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(declLoc.URI.SpanURI().Filename())), - URI: declLoc.URI, - Range: rng, - SelectionRange: rng, - } - return []protocol.CallHierarchyItem{callHierarchyItem}, nil -} - -// IncomingCalls returns an array of CallHierarchyIncomingCall for a file and the position within the file. -func IncomingCalls(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.CallHierarchyIncomingCall, error) { - ctx, done := event.Start(ctx, "source.IncomingCalls") - defer done() - - refs, err := references(ctx, snapshot, fh, pos, false) - if err != nil { - if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) { - return nil, nil - } - return nil, err - } - - // Group references by their enclosing function declaration. - incomingCalls := make(map[protocol.Location]*protocol.CallHierarchyIncomingCall) - for _, ref := range refs { - callItem, err := enclosingNodeCallItem(ctx, snapshot, ref.pkgPath, ref.location) - if err != nil { - event.Error(ctx, "error getting enclosing node", err, tag.Method.Of(string(ref.pkgPath))) - continue - } - loc := protocol.Location{ - URI: callItem.URI, - Range: callItem.Range, - } - call, ok := incomingCalls[loc] - if !ok { - call = &protocol.CallHierarchyIncomingCall{From: callItem} - incomingCalls[loc] = call - } - call.FromRanges = append(call.FromRanges, ref.location.Range) - } - - // Flatten the map of pointers into a slice of values. - incomingCallItems := make([]protocol.CallHierarchyIncomingCall, 0, len(incomingCalls)) - for _, callItem := range incomingCalls { - incomingCallItems = append(incomingCallItems, *callItem) - } - return incomingCallItems, nil -} - -// enclosingNodeCallItem creates a CallHierarchyItem representing the function call at loc. -func enclosingNodeCallItem(ctx context.Context, snapshot Snapshot, pkgPath PackagePath, loc protocol.Location) (protocol.CallHierarchyItem, error) { - // Parse the file containing the reference. - fh, err := snapshot.ReadFile(ctx, loc.URI.SpanURI()) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - // TODO(adonovan): opt: before parsing, trim the bodies of functions - // that don't contain the reference, using either a scanner-based - // implementation such as https://go.dev/play/p/KUrObH1YkX8 - // (~31% speedup), or a byte-oriented implementation (2x speedup). - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - start, end, err := pgf.RangePos(loc.Range) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - - // Find the enclosing function, if any, and the number of func literals in between. - var funcDecl *ast.FuncDecl - var funcLit *ast.FuncLit // innermost function literal - var litCount int - path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) -outer: - for _, node := range path { - switch n := node.(type) { - case *ast.FuncDecl: - funcDecl = n - break outer - case *ast.FuncLit: - litCount++ - if litCount > 1 { - continue - } - funcLit = n - } - } - - nameIdent := path[len(path)-1].(*ast.File).Name - kind := protocol.Package - if funcDecl != nil { - nameIdent = funcDecl.Name - kind = protocol.Function - } - - nameStart, nameEnd := nameIdent.Pos(), nameIdent.End() - if funcLit != nil { - nameStart, nameEnd = funcLit.Type.Func, funcLit.Type.Params.Pos() - kind = protocol.Function - } - rng, err := pgf.PosRange(nameStart, nameEnd) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - - name := nameIdent.Name - for i := 0; i < litCount; i++ { - name += ".func()" - } - - return protocol.CallHierarchyItem{ - Name: name, - Kind: kind, - Tags: []protocol.SymbolTag{}, - Detail: fmt.Sprintf("%s • %s", pkgPath, filepath.Base(fh.URI().Filename())), - URI: loc.URI, - Range: rng, - SelectionRange: rng, - }, nil -} - -// OutgoingCalls returns an array of CallHierarchyOutgoingCall for a file and the position within the file. -func OutgoingCalls(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) ([]protocol.CallHierarchyOutgoingCall, error) { - ctx, done := event.Start(ctx, "source.OutgoingCalls") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - - _, obj, _ := referencedObject(pkg, pgf, pos) - if obj == nil { - return nil, nil - } - - if _, ok := obj.Type().Underlying().(*types.Signature); !ok { - return nil, nil - } - - // Skip builtins. - if obj.Pkg() == nil { - return nil, nil - } - - if !obj.Pos().IsValid() { - return nil, bug.Errorf("internal error: object %s.%s missing position", obj.Pkg().Path(), obj.Name()) - } - - declFile := pkg.FileSet().File(obj.Pos()) - if declFile == nil { - return nil, bug.Errorf("file not found for %d", obj.Pos()) - } - - uri := span.URIFromPath(declFile.Name()) - offset, err := safetoken.Offset(declFile, obj.Pos()) - if err != nil { - return nil, err - } - - // Use TypecheckFull as we want to inspect the body of the function declaration. - declPkg, declPGF, err := NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, err - } - - declPos, err := safetoken.Pos(declPGF.Tok, offset) - if err != nil { - return nil, err - } - - declNode, _, _ := findDeclInfo([]*ast.File{declPGF.File}, declPos) - if declNode == nil { - // TODO(rfindley): why don't we return an error here, or even bug.Errorf? - return nil, nil - // return nil, bug.Errorf("failed to find declaration for object %s.%s", obj.Pkg().Path(), obj.Name()) - } - - type callRange struct { - start, end token.Pos - } - callRanges := []callRange{} - ast.Inspect(declNode, func(n ast.Node) bool { - if call, ok := n.(*ast.CallExpr); ok { - var start, end token.Pos - switch n := call.Fun.(type) { - case *ast.SelectorExpr: - start, end = n.Sel.NamePos, call.Lparen - case *ast.Ident: - start, end = n.NamePos, call.Lparen - case *ast.FuncLit: - // while we don't add the function literal as an 'outgoing' call - // we still want to traverse into it - return true - default: - // ignore any other kind of call expressions - // for ex: direct function literal calls since that's not an 'outgoing' call - return false - } - callRanges = append(callRanges, callRange{start: start, end: end}) - } - return true - }) - - outgoingCalls := map[token.Pos]*protocol.CallHierarchyOutgoingCall{} - for _, callRange := range callRanges { - _, obj, _ := referencedObject(declPkg, declPGF, callRange.start) - if obj == nil { - continue - } - - // ignore calls to builtin functions - if obj.Pkg() == nil { - continue - } - - outgoingCall, ok := outgoingCalls[obj.Pos()] - if !ok { - loc, err := mapPosition(ctx, declPkg.FileSet(), snapshot, obj.Pos(), obj.Pos()+token.Pos(len(obj.Name()))) - if err != nil { - return nil, err - } - outgoingCall = &protocol.CallHierarchyOutgoingCall{ - To: protocol.CallHierarchyItem{ - Name: obj.Name(), - Kind: protocol.Function, - Tags: []protocol.SymbolTag{}, - Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(loc.URI.SpanURI().Filename())), - URI: loc.URI, - Range: loc.Range, - SelectionRange: loc.Range, - }, - } - outgoingCalls[obj.Pos()] = outgoingCall - } - - rng, err := declPGF.PosRange(callRange.start, callRange.end) - if err != nil { - return nil, err - } - outgoingCall.FromRanges = append(outgoingCall.FromRanges, rng) - } - - outgoingCallItems := make([]protocol.CallHierarchyOutgoingCall, 0, len(outgoingCalls)) - for _, callItem := range outgoingCalls { - outgoingCallItems = append(outgoingCallItems, *callItem) - } - return outgoingCallItems, nil -} diff --git a/gopls/internal/lsp/source/code_lens.go b/gopls/internal/lsp/source/code_lens.go deleted file mode 100644 index c46bbad68fe..00000000000 --- a/gopls/internal/lsp/source/code_lens.go +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "go/ast" - "go/token" - "go/types" - "path/filepath" - "regexp" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" -) - -type LensFunc func(context.Context, Snapshot, FileHandle) ([]protocol.CodeLens, error) - -// LensFuncs returns the supported lensFuncs for Go files. -func LensFuncs() map[command.Command]LensFunc { - return map[command.Command]LensFunc{ - command.Generate: goGenerateCodeLens, - command.Test: runTestCodeLens, - command.RegenerateCgo: regenerateCgoLens, - command.GCDetails: toggleDetailsCodeLens, - } -} - -var ( - testRe = regexp.MustCompile("^Test[^a-z]") - benchmarkRe = regexp.MustCompile("^Benchmark[^a-z]") -) - -func runTestCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) { - var codeLens []protocol.CodeLens - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - fns, err := TestsAndBenchmarks(pkg, pgf) - if err != nil { - return nil, err - } - puri := protocol.URIFromSpanURI(fh.URI()) - for _, fn := range fns.Tests { - cmd, err := command.NewTestCommand("run test", puri, []string{fn.Name}, nil) - if err != nil { - return nil, err - } - rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start} - codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) - } - - for _, fn := range fns.Benchmarks { - cmd, err := command.NewTestCommand("run benchmark", puri, nil, []string{fn.Name}) - if err != nil { - return nil, err - } - rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start} - codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) - } - - if len(fns.Benchmarks) > 0 { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - // add a code lens to the top of the file which runs all benchmarks in the file - rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package) - if err != nil { - return nil, err - } - var benches []string - for _, fn := range fns.Benchmarks { - benches = append(benches, fn.Name) - } - cmd, err := command.NewTestCommand("run file benchmarks", puri, nil, benches) - if err != nil { - return nil, err - } - codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) - } - return codeLens, nil -} - -type TestFn struct { - Name string - Rng protocol.Range -} - -type TestFns struct { - Tests []TestFn - Benchmarks []TestFn -} - -func TestsAndBenchmarks(pkg Package, pgf *ParsedGoFile) (TestFns, error) { - var out TestFns - - if !strings.HasSuffix(pgf.URI.Filename(), "_test.go") { - return out, nil - } - - for _, d := range pgf.File.Decls { - fn, ok := d.(*ast.FuncDecl) - if !ok { - continue - } - - rng, err := pgf.NodeRange(fn) - if err != nil { - return out, err - } - - if matchTestFunc(fn, pkg, testRe, "T") { - out.Tests = append(out.Tests, TestFn{fn.Name.Name, rng}) - } - - if matchTestFunc(fn, pkg, benchmarkRe, "B") { - out.Benchmarks = append(out.Benchmarks, TestFn{fn.Name.Name, rng}) - } - } - - return out, nil -} - -func matchTestFunc(fn *ast.FuncDecl, pkg Package, nameRe *regexp.Regexp, paramID string) bool { - // Make sure that the function name matches a test function. - if !nameRe.MatchString(fn.Name.Name) { - return false - } - info := pkg.GetTypesInfo() - if info == nil { - return false - } - obj := info.ObjectOf(fn.Name) - if obj == nil { - return false - } - sig, ok := obj.Type().(*types.Signature) - if !ok { - return false - } - // Test functions should have only one parameter. - if sig.Params().Len() != 1 { - return false - } - - // Check the type of the only parameter - paramTyp, ok := sig.Params().At(0).Type().(*types.Pointer) - if !ok { - return false - } - named, ok := paramTyp.Elem().(*types.Named) - if !ok { - return false - } - namedObj := named.Obj() - if namedObj.Pkg().Path() != "testing" { - return false - } - return namedObj.Id() == paramID -} - -func goGenerateCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - const ggDirective = "//go:generate" - for _, c := range pgf.File.Comments { - for _, l := range c.List { - if !strings.HasPrefix(l.Text, ggDirective) { - continue - } - rng, err := pgf.PosRange(l.Pos(), l.Pos()+token.Pos(len(ggDirective))) - if err != nil { - return nil, err - } - dir := protocol.URIFromSpanURI(span.URIFromPath(filepath.Dir(fh.URI().Filename()))) - nonRecursiveCmd, err := command.NewGenerateCommand("run go generate", command.GenerateArgs{Dir: dir, Recursive: false}) - if err != nil { - return nil, err - } - recursiveCmd, err := command.NewGenerateCommand("run go generate ./...", command.GenerateArgs{Dir: dir, Recursive: true}) - if err != nil { - return nil, err - } - return []protocol.CodeLens{ - {Range: rng, Command: &recursiveCmd}, - {Range: rng, Command: &nonRecursiveCmd}, - }, nil - - } - } - return nil, nil -} - -func regenerateCgoLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - var c *ast.ImportSpec - for _, imp := range pgf.File.Imports { - if imp.Path.Value == `"C"` { - c = imp - } - } - if c == nil { - return nil, nil - } - rng, err := pgf.NodeRange(c) - if err != nil { - return nil, err - } - puri := protocol.URIFromSpanURI(fh.URI()) - cmd, err := command.NewRegenerateCgoCommand("regenerate cgo definitions", command.URIArg{URI: puri}) - if err != nil { - return nil, err - } - return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil -} - -func toggleDetailsCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - if !pgf.File.Package.IsValid() { - // Without a package name we have nowhere to put the codelens, so give up. - return nil, nil - } - rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package) - if err != nil { - return nil, err - } - puri := protocol.URIFromSpanURI(fh.URI()) - cmd, err := command.NewGCDetailsCommand("Toggle gc annotation details", puri) - if err != nil { - return nil, err - } - return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil -} diff --git a/gopls/internal/lsp/source/comment.go b/gopls/internal/lsp/source/comment.go deleted file mode 100644 index beed328ae86..00000000000 --- a/gopls/internal/lsp/source/comment.go +++ /dev/null @@ -1,384 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package source - -import ( - "bytes" - "io" - "regexp" - "strings" - "unicode" - "unicode/utf8" -) - -// CommentToMarkdown converts comment text to formatted markdown. -// The comment was prepared by DocReader, -// so it is known not to have leading, trailing blank lines -// nor to have trailing spaces at the end of lines. -// The comment markers have already been removed. -// -// Each line is converted into a markdown line and empty lines are just converted to -// newlines. Heading are prefixed with `### ` to make it a markdown heading. -// -// A span of indented lines retains a 4 space prefix block, with the common indent -// prefix removed unless empty, in which case it will be converted to a newline. -// -// URLs in the comment text are converted into links. -func CommentToMarkdown(text string, _ *Options) string { - buf := &bytes.Buffer{} - commentToMarkdown(buf, text) - return buf.String() -} - -var ( - mdNewline = []byte("\n") - mdHeader = []byte("### ") - mdIndent = []byte(" ") - mdLinkStart = []byte("[") - mdLinkDiv = []byte("](") - mdLinkEnd = []byte(")") -) - -func commentToMarkdown(w io.Writer, text string) { - blocks := blocks(text) - for i, b := range blocks { - switch b.op { - case opPara: - for _, line := range b.lines { - emphasize(w, line, true) - } - case opHead: - // The header block can consist of only one line. - // However, check the number of lines, just in case. - if len(b.lines) == 0 { - // Skip this block. - continue - } - header := b.lines[0] - - w.Write(mdHeader) - commentEscape(w, header, true) - // Header doesn't end with \n unlike the lines of other blocks. - w.Write(mdNewline) - case opPre: - for _, line := range b.lines { - if isBlank(line) { - w.Write(mdNewline) - continue - } - w.Write(mdIndent) - w.Write([]byte(line)) - } - } - - if i < len(blocks)-1 { - w.Write(mdNewline) - } - } -} - -const ( - ulquo = "“" - urquo = "”" -) - -var ( - markdownEscape = regexp.MustCompile(`([\\\x60*{}[\]()#+\-.!_>~|"$%&'\/:;<=?@^])`) - - unicodeQuoteReplacer = strings.NewReplacer("``", ulquo, "''", urquo) -) - -// commentEscape escapes comment text for markdown. If nice is set, -// also turn double ` and ' into “ and ”. -func commentEscape(w io.Writer, text string, nice bool) { - if nice { - text = convertQuotes(text) - } - text = escapeRegex(text) - w.Write([]byte(text)) -} - -func convertQuotes(text string) string { - return unicodeQuoteReplacer.Replace(text) -} - -func escapeRegex(text string) string { - return markdownEscape.ReplaceAllString(text, `\$1`) -} - -func emphasize(w io.Writer, line string, nice bool) { - for { - m := matchRx.FindStringSubmatchIndex(line) - if m == nil { - break - } - // m >= 6 (two parenthesized sub-regexps in matchRx, 1st one is urlRx) - - // write text before match - commentEscape(w, line[0:m[0]], nice) - - // adjust match for URLs - match := line[m[0]:m[1]] - if strings.Contains(match, "://") { - m0, m1 := m[0], m[1] - for _, s := range []string{"()", "{}", "[]"} { - open, close := s[:1], s[1:] // E.g., "(" and ")" - // require opening parentheses before closing parentheses (#22285) - if i := strings.Index(match, close); i >= 0 && i < strings.Index(match, open) { - m1 = m0 + i - match = line[m0:m1] - } - // require balanced pairs of parentheses (#5043) - for i := 0; strings.Count(match, open) != strings.Count(match, close) && i < 10; i++ { - m1 = strings.LastIndexAny(line[:m1], s) - match = line[m0:m1] - } - } - if m1 != m[1] { - // redo matching with shortened line for correct indices - m = matchRx.FindStringSubmatchIndex(line[:m[0]+len(match)]) - } - } - - // Following code has been modified from go/doc since words is always - // nil. All html formatting has also been transformed into markdown formatting - - // analyze match - url := "" - if m[2] >= 0 { - url = match - } - - // write match - if len(url) > 0 { - w.Write(mdLinkStart) - } - - commentEscape(w, match, nice) - - if len(url) > 0 { - w.Write(mdLinkDiv) - w.Write([]byte(urlReplacer.Replace(url))) - w.Write(mdLinkEnd) - } - - // advance - line = line[m[1]:] - } - commentEscape(w, line, nice) -} - -// Everything from here on is a copy of go/doc/comment.go - -const ( - // Regexp for Go identifiers - identRx = `[\pL_][\pL_0-9]*` - - // Regexp for URLs - // Match parens, and check later for balance - see #5043, #22285 - // Match .,:;?! within path, but not at end - see #18139, #16565 - // This excludes some rare yet valid urls ending in common punctuation - // in order to allow sentences ending in URLs. - - // protocol (required) e.g. http - protoPart = `(https?|ftp|file|gopher|mailto|nntp)` - // host (required) e.g. www.example.com or [::1]:8080 - hostPart = `([a-zA-Z0-9_@\-.\[\]:]+)` - // path+query+fragment (optional) e.g. /path/index.html?q=foo#bar - pathPart = `([.,:;?!]*[a-zA-Z0-9$'()*+&#=@~_/\-\[\]%])*` - - urlRx = protoPart + `://` + hostPart + pathPart -) - -var ( - matchRx = regexp.MustCompile(`(` + urlRx + `)|(` + identRx + `)`) - urlReplacer = strings.NewReplacer(`(`, `\(`, `)`, `\)`) -) - -func indentLen(s string) int { - i := 0 - for i < len(s) && (s[i] == ' ' || s[i] == '\t') { - i++ - } - return i -} - -func isBlank(s string) bool { - return len(s) == 0 || (len(s) == 1 && s[0] == '\n') -} - -func commonPrefix(a, b string) string { - i := 0 - for i < len(a) && i < len(b) && a[i] == b[i] { - i++ - } - return a[0:i] -} - -func unindent(block []string) { - if len(block) == 0 { - return - } - - // compute maximum common white prefix - prefix := block[0][0:indentLen(block[0])] - for _, line := range block { - if !isBlank(line) { - prefix = commonPrefix(prefix, line) - } - } - n := len(prefix) - - // remove - for i, line := range block { - if !isBlank(line) { - block[i] = line[n:] - } - } -} - -// heading returns the trimmed line if it passes as a section heading; -// otherwise it returns the empty string. -func heading(line string) string { - line = strings.TrimSpace(line) - if len(line) == 0 { - return "" - } - - // a heading must start with an uppercase letter - r, _ := utf8.DecodeRuneInString(line) - if !unicode.IsLetter(r) || !unicode.IsUpper(r) { - return "" - } - - // it must end in a letter or digit: - r, _ = utf8.DecodeLastRuneInString(line) - if !unicode.IsLetter(r) && !unicode.IsDigit(r) { - return "" - } - - // exclude lines with illegal characters. we allow "()," - if strings.ContainsAny(line, ";:!?+*/=[]{}_^°&§~%#@<\">\\") { - return "" - } - - // allow "'" for possessive "'s" only - for b := line; ; { - i := strings.IndexRune(b, '\'') - if i < 0 { - break - } - if i+1 >= len(b) || b[i+1] != 's' || (i+2 < len(b) && b[i+2] != ' ') { - return "" // not followed by "s " - } - b = b[i+2:] - } - - // allow "." when followed by non-space - for b := line; ; { - i := strings.IndexRune(b, '.') - if i < 0 { - break - } - if i+1 >= len(b) || b[i+1] == ' ' { - return "" // not followed by non-space - } - b = b[i+1:] - } - - return line -} - -type op int - -const ( - opPara op = iota - opHead - opPre -) - -type block struct { - op op - lines []string -} - -func blocks(text string) []block { - var ( - out []block - para []string - - lastWasBlank = false - lastWasHeading = false - ) - - close := func() { - if para != nil { - out = append(out, block{opPara, para}) - para = nil - } - } - - lines := strings.SplitAfter(text, "\n") - unindent(lines) - for i := 0; i < len(lines); { - line := lines[i] - if isBlank(line) { - // close paragraph - close() - i++ - lastWasBlank = true - continue - } - if indentLen(line) > 0 { - // close paragraph - close() - - // count indented or blank lines - j := i + 1 - for j < len(lines) && (isBlank(lines[j]) || indentLen(lines[j]) > 0) { - j++ - } - // but not trailing blank lines - for j > i && isBlank(lines[j-1]) { - j-- - } - pre := lines[i:j] - i = j - - unindent(pre) - - // put those lines in a pre block - out = append(out, block{opPre, pre}) - lastWasHeading = false - continue - } - - if lastWasBlank && !lastWasHeading && i+2 < len(lines) && - isBlank(lines[i+1]) && !isBlank(lines[i+2]) && indentLen(lines[i+2]) == 0 { - // current line is non-blank, surrounded by blank lines - // and the next non-blank line is not indented: this - // might be a heading. - if head := heading(line); head != "" { - close() - out = append(out, block{opHead, []string{head}}) - i += 2 - lastWasHeading = true - continue - } - } - - // open paragraph - lastWasBlank = false - lastWasHeading = false - para = append(para, lines[i]) - i++ - } - close() - - return out -} diff --git a/gopls/internal/lsp/source/comment_go118_test.go b/gopls/internal/lsp/source/comment_go118_test.go deleted file mode 100644 index 60bd14b9fc8..00000000000 --- a/gopls/internal/lsp/source/comment_go118_test.go +++ /dev/null @@ -1,371 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package source - -import ( - "bytes" - "reflect" - "strings" - "testing" -) - -// This file is a copy of go/doc/comment_test.go with the exception for -// the test cases for TestEmphasize and TestCommentEscape - -var headingTests = []struct { - line string - ok bool -}{ - {"Section", true}, - {"A typical usage", true}, - {"ΔΛΞ is Greek", true}, - {"Foo 42", true}, - {"", false}, - {"section", false}, - {"A typical usage:", false}, - {"This code:", false}, - {"δ is Greek", false}, - {"Foo §", false}, - {"Fermat's Last Sentence", true}, - {"Fermat's", true}, - {"'sX", false}, - {"Ted 'Too' Bar", false}, - {"Use n+m", false}, - {"Scanning:", false}, - {"N:M", false}, -} - -func TestIsHeading(t *testing.T) { - for _, tt := range headingTests { - if h := heading(tt.line); (len(h) > 0) != tt.ok { - t.Errorf("isHeading(%q) = %v, want %v", tt.line, h, tt.ok) - } - } -} - -var blocksTests = []struct { - in string - out []block - text string -}{ - { - in: `Para 1. -Para 1 line 2. - -Para 2. - -Section - -Para 3. - - pre - pre1 - -Para 4. - - pre - pre1 - - pre2 - -Para 5. - - - pre - - - pre1 - pre2 - -Para 6. - pre - pre2 -`, - out: []block{ - {opPara, []string{"Para 1.\n", "Para 1 line 2.\n"}}, - {opPara, []string{"Para 2.\n"}}, - {opHead, []string{"Section"}}, - {opPara, []string{"Para 3.\n"}}, - {opPre, []string{"pre\n", "pre1\n"}}, - {opPara, []string{"Para 4.\n"}}, - {opPre, []string{"pre\n", "pre1\n", "\n", "pre2\n"}}, - {opPara, []string{"Para 5.\n"}}, - {opPre, []string{"pre\n", "\n", "\n", "pre1\n", "pre2\n"}}, - {opPara, []string{"Para 6.\n"}}, - {opPre, []string{"pre\n", "pre2\n"}}, - }, - text: `. Para 1. Para 1 line 2. - -. Para 2. - - -. Section - -. Para 3. - -$ pre -$ pre1 - -. Para 4. - -$ pre -$ pre1 - -$ pre2 - -. Para 5. - -$ pre - - -$ pre1 -$ pre2 - -. Para 6. - -$ pre -$ pre2 -`, - }, - { - in: "Para.\n\tshould not be ``escaped''", - out: []block{ - {opPara, []string{"Para.\n"}}, - {opPre, []string{"should not be ``escaped''"}}, - }, - text: ". Para.\n\n$ should not be ``escaped''", - }, - { - in: "// A very long line of 46 char for line wrapping.", - out: []block{ - {opPara, []string{"// A very long line of 46 char for line wrapping."}}, - }, - text: `. // A very long line of 46 char for line -. // wrapping. -`, - }, - { - in: `/* A very long line of 46 char for line wrapping. -A very long line of 46 char for line wrapping. */`, - out: []block{ - {opPara, []string{"/* A very long line of 46 char for line wrapping.\n", "A very long line of 46 char for line wrapping. */"}}, - }, - text: `. /* A very long line of 46 char for line -. wrapping. A very long line of 46 char -. for line wrapping. */ -`, - }, -} - -func TestBlocks(t *testing.T) { - for i, tt := range blocksTests { - b := blocks(tt.in) - if !reflect.DeepEqual(b, tt.out) { - t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, b, tt.out) - } - } -} - -// This has been modified from go/doc to use markdown links instead of html ones -// and use markdown escaping instead oh html -var emphasizeTests = []struct { - in, out string -}{ - {"", ""}, - {"http://[::1]:8080/foo.txt", `[http\:\/\/\[\:\:1\]\:8080\/foo\.txt](http://[::1]:8080/foo.txt)`}, - {"before (https://www.google.com) after", `before \([https\:\/\/www\.google\.com](https://www.google.com)\) after`}, - {"before https://www.google.com:30/x/y/z:b::c. After", `before [https\:\/\/www\.google\.com\:30\/x\/y\/z\:b\:\:c](https://www.google.com:30/x/y/z:b::c)\. After`}, - {"http://www.google.com/path/:;!-/?query=%34b#093124", `[http\:\/\/www\.google\.com\/path\/\:\;\!\-\/\?query\=\%34b\#093124](http://www.google.com/path/:;!-/?query=%34b#093124)`}, - {"http://www.google.com/path/:;!-/?query=%34bar#093124", `[http\:\/\/www\.google\.com\/path\/\:\;\!\-\/\?query\=\%34bar\#093124](http://www.google.com/path/:;!-/?query=%34bar#093124)`}, - {"http://www.google.com/index.html!%20After", `[http\:\/\/www\.google\.com\/index\.html](http://www.google.com/index.html)\! After`}, - {"http://www.google.com/", `[http\:\/\/www\.google\.com\/](http://www.google.com/)`}, - {"https://www.google.com/", `[https\:\/\/www\.google\.com\/](https://www.google.com/)`}, - {"http://www.google.com/path.", `[http\:\/\/www\.google\.com\/path](http://www.google.com/path)\.`}, - {"http://en.wikipedia.org/wiki/Camellia_(cipher)", `[http\:\/\/en\.wikipedia\.org\/wiki\/Camellia\_\(cipher\)](http://en.wikipedia.org/wiki/Camellia_\(cipher\))`}, - {"(http://www.google.com/)", `\([http\:\/\/www\.google\.com\/](http://www.google.com/)\)`}, - {"http://gmail.com)/", `[http\:\/\/gmail\.com](http://gmail.com)\)`}, - {"((http://gmail.com))", `\(\([http\:\/\/gmail\.com](http://gmail.com)\)\)`}, - {"http://gmail.com ((http://gmail.com)) ()", `[http\:\/\/gmail\.com](http://gmail.com) \(\([http\:\/\/gmail\.com](http://gmail.com)\)\) \(\)`}, - {"Foo bar http://example.com/ quux!", `Foo bar [http\:\/\/example\.com\/](http://example.com/) quux\!`}, - {"Hello http://example.com/%2f/ /world.", `Hello [http\:\/\/example\.com\/\%2f\/](http://example.com/%2f/) \/world\.`}, - {"Lorem http: ipsum //host/path", `Lorem http\: ipsum \/\/host\/path`}, - {"javascript://is/not/linked", `javascript\:\/\/is\/not\/linked`}, - {"http://foo/", `[http\:\/\/foo](http://foo)`}, - {"art by [[https://www.example.com/person/][Person Name]]", `art by \[\[[https\:\/\/www\.example\.com\/person\/](https://www.example.com/person/)\]\[Person Name\]\]`}, - {"please visit (http://golang.org/)", `please visit \([http\:\/\/golang\.org\/](http://golang.org/)\)`}, - {"please visit http://golang.org/hello())", `please visit [http\:\/\/golang\.org\/hello\(\)](http://golang.org/hello\(\))\)`}, - {"http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD", `[http\:\/\/git\.qemu\.org\/\?p\=qemu\.git\;a\=blob\;f\=qapi\-schema\.json\;hb\=HEAD](http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD)`}, - {"https://foo.bar/bal/x(])", `[https\:\/\/foo\.bar\/bal\/x\(](https://foo.bar/bal/x\()\]\)`}, - {"foo [ http://bar(])", `foo \[ [http\:\/\/bar\(](http://bar\()\]\)`}, -} - -func TestEmphasize(t *testing.T) { - for i, tt := range emphasizeTests { - var buf bytes.Buffer - emphasize(&buf, tt.in, true) - out := buf.String() - if out != tt.out { - t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, out, tt.out) - } - } -} - -func TestCommentEscape(t *testing.T) { - //ldquo -> ulquo and rdquo -> urquo - commentTests := []struct { - in, out string - }{ - {"typically invoked as ``go tool asm'',", "typically invoked as " + ulquo + "go tool asm" + urquo + ","}, - {"For more detail, run ``go help test'' and ``go help testflag''", "For more detail, run " + ulquo + "go help test" + urquo + " and " + ulquo + "go help testflag" + urquo}} - for i, tt := range commentTests { - var buf strings.Builder - commentEscape(&buf, tt.in, true) - out := buf.String() - if out != tt.out { - t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, out, tt.out) - } - } -} - -func TestCommentToMarkdown(t *testing.T) { - tests := []struct { - in, out string - }{ - { - in: "F declaration.\n", - out: "F declaration\\.\n", - }, - { - in: ` -F declaration. Lorem ipsum dolor sit amet. -Etiam mattis eros at orci mollis molestie. -`, - out: ` -F declaration\. Lorem ipsum dolor sit amet\. -Etiam mattis eros at orci mollis molestie\. -`, - }, - { - in: ` -F declaration. - -Lorem ipsum dolor sit amet. -Sed id dui turpis. - - - - -Aenean tempus velit non auctor eleifend. -Aenean efficitur a sem id ultricies. - - -Phasellus efficitur mauris et viverra bibendum. -`, - out: ` -F declaration\. - -Lorem ipsum dolor sit amet\. -Sed id dui turpis\. - -Aenean tempus velit non auctor eleifend\. -Aenean efficitur a sem id ultricies\. - -Phasellus efficitur mauris et viverra bibendum\. -`, - }, - { - in: ` -F declaration. - -Aenean tempus velit non auctor eleifend. - -Section - -Lorem ipsum dolor sit amet, consectetur adipiscing elit. - - func foo() {} - - - func bar() {} - -Fusce lorem lacus. - - func foo() {} - - func bar() {} - -Maecenas in lobortis lectus. - - func foo() {} - - func bar() {} - -Phasellus efficitur mauris et viverra bibendum. -`, - out: ` -F declaration\. - -Aenean tempus velit non auctor eleifend\. - -### Section - -Lorem ipsum dolor sit amet, consectetur adipiscing elit\. - - func foo() {} - - - func bar() {} - -Fusce lorem lacus\. - - func foo() {} - - func bar() {} - -Maecenas in lobortis lectus\. - - func foo() {} - - func bar() {} - -Phasellus efficitur mauris et viverra bibendum\. -`, - }, - { - in: ` -F declaration. - - func foo() { - fmt.Println("foo") - } - func bar() { - fmt.Println("bar") - } -`, - out: ` -F declaration\. - - func foo() { - fmt.Println("foo") - } - func bar() { - fmt.Println("bar") - } -`, - }, - } - for i, tt := range tests { - // Comments start with new lines for better readability. So, we should trim them. - tt.in = strings.TrimPrefix(tt.in, "\n") - tt.out = strings.TrimPrefix(tt.out, "\n") - - if out := CommentToMarkdown(tt.in, nil); out != tt.out { - t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, out, tt.out) - } - } -} diff --git a/gopls/internal/lsp/source/comment_go119.go b/gopls/internal/lsp/source/comment_go119.go deleted file mode 100644 index c379a4a4faa..00000000000 --- a/gopls/internal/lsp/source/comment_go119.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package source - -// Starting with go1.19, the formatting of comments has changed, and there -// is a new package (go/doc/comment) for processing them. -// As long as gopls has to compile under earlier versions, tests -// have to pass with both the old and new code, which produce -// slightly different results. (cmd/test/definition.go, source/comment_test.go, -// and source/source_test.go) Each of the test files checks the results -// with a function, tests.CheckSameMarkdown, that accepts both the old and the new -// results. (The old code escapes many characters the new code does not, -// and the new code sometimes adds a blank line.) - -// When gopls no longer needs to compile with go1.18, the old comment.go should -// be replaced by this file, the golden test files should be updated. -// (and checkSameMarkdown() could be replaced by a simple comparison.) - -import ( - "fmt" - "go/doc/comment" -) - -// CommentToMarkdown converts comment text to formatted markdown. -// The comment was prepared by DocReader, -// so it is known not to have leading, trailing blank lines -// nor to have trailing spaces at the end of lines. -// The comment markers have already been removed. -func CommentToMarkdown(text string, options *Options) string { - var p comment.Parser - doc := p.Parse(text) - var pr comment.Printer - // The default produces {#Hdr-...} tags for headings. - // vscode displays thems, which is undesirable. - // The godoc for comment.Printer says the tags - // avoid a security problem. - pr.HeadingID = func(*comment.Heading) string { return "" } - pr.DocLinkURL = func(link *comment.DocLink) string { - msg := fmt.Sprintf("https://%s/%s", options.LinkTarget, link.ImportPath) - if link.Name != "" { - msg += "#" - if link.Recv != "" { - msg += link.Recv + "." - } - msg += link.Name - } - return msg - } - easy := pr.Markdown(doc) - return string(easy) -} diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go deleted file mode 100644 index 4044d8446fd..00000000000 --- a/gopls/internal/lsp/source/completion/completion.go +++ /dev/null @@ -1,3279 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package completion provides core functionality for code completion in Go -// editors and tools. -package completion - -import ( - "context" - "fmt" - "go/ast" - "go/constant" - "go/parser" - "go/printer" - "go/scanner" - "go/token" - "go/types" - "math" - "sort" - "strconv" - "strings" - "sync" - "sync/atomic" - "time" - "unicode" - - "golang.org/x/sync/errgroup" - "golang.org/x/tools/go/ast/astutil" - goplsastutil "golang.org/x/tools/gopls/internal/astutil" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/snippet" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/fuzzy" - "golang.org/x/tools/internal/imports" - "golang.org/x/tools/internal/typeparams" -) - -// A CompletionItem represents a possible completion suggested by the algorithm. -type CompletionItem struct { - - // Invariant: CompletionItem does not refer to syntax or types. - - // Label is the primary text the user sees for this completion item. - Label string - - // Detail is supplemental information to present to the user. - // This often contains the type or return type of the completion item. - Detail string - - // InsertText is the text to insert if this item is selected. - // Any of the prefix that has already been typed is not trimmed. - // The insert text does not contain snippets. - InsertText string - - Kind protocol.CompletionItemKind - Tags []protocol.CompletionItemTag - Deprecated bool // Deprecated, prefer Tags if available - - // An optional array of additional TextEdits that are applied when - // selecting this completion. - // - // Additional text edits should be used to change text unrelated to the current cursor position - // (for example adding an import statement at the top of the file if the completion item will - // insert an unqualified type). - AdditionalTextEdits []protocol.TextEdit - - // Depth is how many levels were searched to find this completion. - // For example when completing "foo<>", "fooBar" is depth 0, and - // "fooBar.Baz" is depth 1. - Depth int - - // Score is the internal relevance score. - // A higher score indicates that this completion item is more relevant. - Score float64 - - // snippet is the LSP snippet for the completion item. The LSP - // specification contains details about LSP snippets. For example, a - // snippet for a function with the following signature: - // - // func foo(a, b, c int) - // - // would be: - // - // foo(${1:a int}, ${2: b int}, ${3: c int}) - // - // If Placeholders is false in the CompletionOptions, the above - // snippet would instead be: - // - // foo(${1:}) - snippet *snippet.Builder - - // Documentation is the documentation for the completion item. - Documentation string - - // isSlice reports whether the underlying type of the object - // from which this candidate was derived is a slice. - // (Used to complete append() calls.) - isSlice bool -} - -// completionOptions holds completion specific configuration. -type completionOptions struct { - unimported bool - documentation bool - fullDocumentation bool - placeholders bool - literal bool - snippets bool - postfix bool - matcher source.Matcher - budget time.Duration - completeFunctionCalls bool -} - -// Snippet is a convenience returns the snippet if available, otherwise -// the InsertText. -// used for an item, depending on if the callee wants placeholders or not. -func (i *CompletionItem) Snippet() string { - if i.snippet != nil { - return i.snippet.String() - } - return i.InsertText -} - -// Scoring constants are used for weighting the relevance of different candidates. -const ( - // stdScore is the base score for all completion items. - stdScore float64 = 1.0 - - // highScore indicates a very relevant completion item. - highScore float64 = 10.0 - - // lowScore indicates an irrelevant or not useful completion item. - lowScore float64 = 0.01 -) - -// matcher matches a candidate's label against the user input. The -// returned score reflects the quality of the match. A score of zero -// indicates no match, and a score of one means a perfect match. -type matcher interface { - Score(candidateLabel string) (score float32) -} - -// prefixMatcher implements case sensitive prefix matching. -type prefixMatcher string - -func (pm prefixMatcher) Score(candidateLabel string) float32 { - if strings.HasPrefix(candidateLabel, string(pm)) { - return 1 - } - return -1 -} - -// insensitivePrefixMatcher implements case insensitive prefix matching. -type insensitivePrefixMatcher string - -func (ipm insensitivePrefixMatcher) Score(candidateLabel string) float32 { - if strings.HasPrefix(strings.ToLower(candidateLabel), string(ipm)) { - return 1 - } - return -1 -} - -// completer contains the necessary information for a single completion request. -type completer struct { - snapshot source.Snapshot - pkg source.Package - qf types.Qualifier // for qualifying typed expressions - mq source.MetadataQualifier // for syntactic qualifying - opts *completionOptions - - // completionContext contains information about the trigger for this - // completion request. - completionContext completionContext - - // fh is a handle to the file associated with this completion request. - fh source.FileHandle - - // filename is the name of the file associated with this completion request. - filename string - - // file is the AST of the file associated with this completion request. - file *ast.File - - // (tokFile, pos) is the position at which the request was triggered. - tokFile *token.File - pos token.Pos - - // path is the path of AST nodes enclosing the position. - path []ast.Node - - // seen is the map that ensures we do not return duplicate results. - seen map[types.Object]bool - - // items is the list of completion items returned. - items []CompletionItem - - // completionCallbacks is a list of callbacks to collect completions that - // require expensive operations. This includes operations where we search - // through the entire module cache. - completionCallbacks []func(context.Context, *imports.Options) error - - // surrounding describes the identifier surrounding the position. - surrounding *Selection - - // inference contains information we've inferred about ideal - // candidates such as the candidate's type. - inference candidateInference - - // enclosingFunc contains information about the function enclosing - // the position. - enclosingFunc *funcInfo - - // enclosingCompositeLiteral contains information about the composite literal - // enclosing the position. - enclosingCompositeLiteral *compLitInfo - - // deepState contains the current state of our deep completion search. - deepState deepCompletionState - - // matcher matches the candidates against the surrounding prefix. - matcher matcher - - // methodSetCache caches the types.NewMethodSet call, which is relatively - // expensive and can be called many times for the same type while searching - // for deep completions. - methodSetCache map[methodSetKey]*types.MethodSet - - // mapper converts the positions in the file from which the completion originated. - mapper *protocol.Mapper - - // startTime is when we started processing this completion request. It does - // not include any time the request spent in the queue. - // - // Note: in CL 503016, startTime move to *after* type checking, but it was - // subsequently determined that it was better to keep setting it *before* - // type checking, so that the completion budget best approximates the user - // experience. See golang/go#62665 for more details. - startTime time.Time - - // scopes contains all scopes defined by nodes in our path, - // including nil values for nodes that don't defined a scope. It - // also includes our package scope and the universal scope at the - // end. - scopes []*types.Scope -} - -// funcInfo holds info about a function object. -type funcInfo struct { - // sig is the function declaration enclosing the position. - sig *types.Signature - - // body is the function's body. - body *ast.BlockStmt -} - -type compLitInfo struct { - // cl is the *ast.CompositeLit enclosing the position. - cl *ast.CompositeLit - - // clType is the type of cl. - clType types.Type - - // kv is the *ast.KeyValueExpr enclosing the position, if any. - kv *ast.KeyValueExpr - - // inKey is true if we are certain the position is in the key side - // of a key-value pair. - inKey bool - - // maybeInFieldName is true if inKey is false and it is possible - // we are completing a struct field name. For example, - // "SomeStruct{<>}" will be inKey=false, but maybeInFieldName=true - // because we _could_ be completing a field name. - maybeInFieldName bool -} - -type importInfo struct { - importPath string - name string -} - -type methodSetKey struct { - typ types.Type - addressable bool -} - -type completionContext struct { - // triggerCharacter is the character used to trigger completion at current - // position, if any. - triggerCharacter string - - // triggerKind is information about how a completion was triggered. - triggerKind protocol.CompletionTriggerKind - - // commentCompletion is true if we are completing a comment. - commentCompletion bool - - // packageCompletion is true if we are completing a package name. - packageCompletion bool -} - -// A Selection represents the cursor position and surrounding identifier. -type Selection struct { - content string - tokFile *token.File - start, end, cursor token.Pos // relative to rng.TokFile - mapper *protocol.Mapper -} - -func (p Selection) Range() (protocol.Range, error) { - return p.mapper.PosRange(p.tokFile, p.start, p.end) -} - -func (p Selection) Prefix() string { - return p.content[:p.cursor-p.start] -} - -func (p Selection) Suffix() string { - return p.content[p.cursor-p.start:] -} - -func (c *completer) setSurrounding(ident *ast.Ident) { - if c.surrounding != nil { - return - } - if !(ident.Pos() <= c.pos && c.pos <= ident.End()) { - return - } - - c.surrounding = &Selection{ - content: ident.Name, - cursor: c.pos, - // Overwrite the prefix only. - tokFile: c.tokFile, - start: ident.Pos(), - end: ident.End(), - mapper: c.mapper, - } - - c.setMatcherFromPrefix(c.surrounding.Prefix()) -} - -func (c *completer) setMatcherFromPrefix(prefix string) { - switch c.opts.matcher { - case source.Fuzzy: - c.matcher = fuzzy.NewMatcher(prefix) - case source.CaseSensitive: - c.matcher = prefixMatcher(prefix) - default: - c.matcher = insensitivePrefixMatcher(strings.ToLower(prefix)) - } -} - -func (c *completer) getSurrounding() *Selection { - if c.surrounding == nil { - c.surrounding = &Selection{ - content: "", - cursor: c.pos, - tokFile: c.tokFile, - start: c.pos, - end: c.pos, - mapper: c.mapper, - } - } - return c.surrounding -} - -// candidate represents a completion candidate. -type candidate struct { - // obj is the types.Object to complete to. - // TODO(adonovan): eliminate dependence on go/types throughout this struct. - obj types.Object - - // score is used to rank candidates. - score float64 - - // name is the deep object name path, e.g. "foo.bar" - name string - - // detail is additional information about this item. If not specified, - // defaults to type string for the object. - detail string - - // path holds the path from the search root (excluding the candidate - // itself) for a deep candidate. - path []types.Object - - // pathInvokeMask is a bit mask tracking whether each entry in path - // should be formatted with "()" (i.e. whether it is a function - // invocation). - pathInvokeMask uint16 - - // mods contains modifications that should be applied to the - // candidate when inserted. For example, "foo" may be inserted as - // "*foo" or "foo()". - mods []typeModKind - - // addressable is true if a pointer can be taken to the candidate. - addressable bool - - // convertTo is a type that this candidate should be cast to. For - // example, if convertTo is float64, "foo" should be formatted as - // "float64(foo)". - convertTo types.Type - - // imp is the import that needs to be added to this package in order - // for this candidate to be valid. nil if no import needed. - imp *importInfo -} - -func (c candidate) hasMod(mod typeModKind) bool { - for _, m := range c.mods { - if m == mod { - return true - } - } - return false -} - -// ErrIsDefinition is an error that informs the user they got no -// completions because they tried to complete the name of a new object -// being defined. -type ErrIsDefinition struct { - objStr string -} - -func (e ErrIsDefinition) Error() string { - msg := "this is a definition" - if e.objStr != "" { - msg += " of " + e.objStr - } - return msg -} - -// Completion returns a list of possible candidates for completion, given a -// a file and a position. -// -// The selection is computed based on the preceding identifier and can be used by -// the client to score the quality of the completion. For instance, some clients -// may tolerate imperfect matches as valid completion results, since users may make typos. -func Completion(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, protoPos protocol.Position, protoContext protocol.CompletionContext) ([]CompletionItem, *Selection, error) { - ctx, done := event.Start(ctx, "completion.Completion") - defer done() - - startTime := time.Now() - - pkg, pgf, err := source.NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil || pgf.File.Package == token.NoPos { - // If we can't parse this file or find position for the package - // keyword, it may be missing a package declaration. Try offering - // suggestions for the package declaration. - // Note that this would be the case even if the keyword 'package' is - // present but no package name exists. - items, surrounding, innerErr := packageClauseCompletions(ctx, snapshot, fh, protoPos) - if innerErr != nil { - // return the error for GetParsedFile since it's more relevant in this situation. - return nil, nil, fmt.Errorf("getting file %s for Completion: %w (package completions: %v)", fh.URI(), err, innerErr) - } - return items, surrounding, nil - } - - pos, err := pgf.PositionPos(protoPos) - if err != nil { - return nil, nil, err - } - // Completion is based on what precedes the cursor. - // Find the path to the position before pos. - path, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1) - if path == nil { - return nil, nil, fmt.Errorf("cannot find node enclosing position") - } - - // Check if completion at this position is valid. If not, return early. - switch n := path[0].(type) { - case *ast.BasicLit: - // Skip completion inside literals except for ImportSpec - if len(path) > 1 { - if _, ok := path[1].(*ast.ImportSpec); ok { - break - } - } - return nil, nil, nil - case *ast.CallExpr: - if n.Ellipsis.IsValid() && pos > n.Ellipsis && pos <= n.Ellipsis+token.Pos(len("...")) { - // Don't offer completions inside or directly after "...". For - // example, don't offer completions at "<>" in "foo(bar...<>"). - return nil, nil, nil - } - case *ast.Ident: - // reject defining identifiers - if obj, ok := pkg.GetTypesInfo().Defs[n]; ok { - if v, ok := obj.(*types.Var); ok && v.IsField() && v.Embedded() { - // An anonymous field is also a reference to a type. - } else if pgf.File.Name == n { - // Don't skip completions if Ident is for package name. - break - } else { - objStr := "" - if obj != nil { - qual := types.RelativeTo(pkg.GetTypes()) - objStr = types.ObjectString(obj, qual) - } - ans, sel := definition(path, obj, pgf) - if ans != nil { - sort.Slice(ans, func(i, j int) bool { - return ans[i].Score > ans[j].Score - }) - return ans, sel, nil - } - return nil, nil, ErrIsDefinition{objStr: objStr} - } - } - } - - // Collect all surrounding scopes, innermost first. - scopes := source.CollectScopes(pkg.GetTypesInfo(), path, pos) - scopes = append(scopes, pkg.GetTypes().Scope(), types.Universe) - - opts := snapshot.Options() - c := &completer{ - pkg: pkg, - snapshot: snapshot, - qf: source.Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()), - mq: source.MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()), - completionContext: completionContext{ - triggerCharacter: protoContext.TriggerCharacter, - triggerKind: protoContext.TriggerKind, - }, - fh: fh, - filename: fh.URI().Filename(), - tokFile: pgf.Tok, - file: pgf.File, - path: path, - pos: pos, - seen: make(map[types.Object]bool), - enclosingFunc: enclosingFunction(path, pkg.GetTypesInfo()), - enclosingCompositeLiteral: enclosingCompositeLiteral(path, pos, pkg.GetTypesInfo()), - deepState: deepCompletionState{ - enabled: opts.DeepCompletion, - }, - opts: &completionOptions{ - matcher: opts.Matcher, - unimported: opts.CompleteUnimported, - documentation: opts.CompletionDocumentation && opts.HoverKind != source.NoDocumentation, - fullDocumentation: opts.HoverKind == source.FullDocumentation, - placeholders: opts.UsePlaceholders, - literal: opts.LiteralCompletions && opts.InsertTextFormat == protocol.SnippetTextFormat, - budget: opts.CompletionBudget, - snippets: opts.InsertTextFormat == protocol.SnippetTextFormat, - postfix: opts.ExperimentalPostfixCompletions, - completeFunctionCalls: opts.CompleteFunctionCalls, - }, - // default to a matcher that always matches - matcher: prefixMatcher(""), - methodSetCache: make(map[methodSetKey]*types.MethodSet), - mapper: pgf.Mapper, - startTime: startTime, - scopes: scopes, - } - - ctx, cancel := context.WithCancel(ctx) - defer cancel() - - // Compute the deadline for this operation. Deadline is relative to the - // search operation, not the entire completion RPC, as the work up until this - // point depends significantly on how long it took to type-check, which in - // turn depends on the timing of the request relative to other operations on - // the snapshot. Including that work in the budget leads to inconsistent - // results (and realistically, if type-checking took 200ms already, the user - // is unlikely to be significantly more bothered by e.g. another 100ms of - // search). - // - // Don't overload the context with this deadline, as we don't want to - // conflate user cancellation (=fail the operation) with our time limit - // (=stop searching and succeed with partial results). - var deadline *time.Time - if c.opts.budget > 0 { - d := startTime.Add(c.opts.budget) - deadline = &d - } - - if surrounding := c.containingIdent(pgf.Src); surrounding != nil { - c.setSurrounding(surrounding) - } - - c.inference = expectedCandidate(ctx, c) - - err = c.collectCompletions(ctx) - if err != nil { - return nil, nil, err - } - - // Deep search collected candidates and their members for more candidates. - c.deepSearch(ctx, 1, deadline) - - // At this point we have a sufficiently complete set of results, and want to - // return as close to the completion budget as possible. Previously, we - // avoided cancelling the context because it could result in partial results - // for e.g. struct fields. At this point, we have a minimal valid set of - // candidates, and so truncating due to context cancellation is acceptable. - if c.opts.budget > 0 { - timeoutDuration := time.Until(c.startTime.Add(c.opts.budget)) - ctx, cancel = context.WithTimeout(ctx, timeoutDuration) - defer cancel() - } - - for _, callback := range c.completionCallbacks { - if deadline == nil || time.Now().Before(*deadline) { - if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil { - return nil, nil, err - } - } - } - - // Search candidates populated by expensive operations like - // unimportedMembers etc. for more completion items. - c.deepSearch(ctx, 0, deadline) - - // Statement candidates offer an entire statement in certain contexts, as - // opposed to a single object. Add statement candidates last because they - // depend on other candidates having already been collected. - c.addStatementCandidates() - - c.sortItems() - return c.items, c.getSurrounding(), nil -} - -// collectCompletions adds possible completion candidates to either the deep -// search queue or completion items directly for different completion contexts. -func (c *completer) collectCompletions(ctx context.Context) error { - // Inside import blocks, return completions for unimported packages. - for _, importSpec := range c.file.Imports { - if !(importSpec.Path.Pos() <= c.pos && c.pos <= importSpec.Path.End()) { - continue - } - return c.populateImportCompletions(importSpec) - } - - // Inside comments, offer completions for the name of the relevant symbol. - for _, comment := range c.file.Comments { - if comment.Pos() < c.pos && c.pos <= comment.End() { - c.populateCommentCompletions(ctx, comment) - return nil - } - } - - // Struct literals are handled entirely separately. - if c.wantStructFieldCompletions() { - // If we are definitely completing a struct field name, deep completions - // don't make sense. - if c.enclosingCompositeLiteral.inKey { - c.deepState.enabled = false - } - return c.structLiteralFieldName(ctx) - } - - if lt := c.wantLabelCompletion(); lt != labelNone { - c.labels(lt) - return nil - } - - if c.emptySwitchStmt() { - // Empty switch statements only admit "default" and "case" keywords. - c.addKeywordItems(map[string]bool{}, highScore, CASE, DEFAULT) - return nil - } - - switch n := c.path[0].(type) { - case *ast.Ident: - if c.file.Name == n { - return c.packageNameCompletions(ctx, c.fh.URI(), n) - } else if sel, ok := c.path[1].(*ast.SelectorExpr); ok && sel.Sel == n { - // Is this the Sel part of a selector? - return c.selector(ctx, sel) - } - return c.lexical(ctx) - // The function name hasn't been typed yet, but the parens are there: - // recv.‸(arg) - case *ast.TypeAssertExpr: - // Create a fake selector expression. - // - // The name "_" is the convention used by go/parser to represent phantom - // selectors. - sel := &ast.Ident{NamePos: n.X.End() + token.Pos(len(".")), Name: "_"} - return c.selector(ctx, &ast.SelectorExpr{X: n.X, Sel: sel}) - case *ast.SelectorExpr: - return c.selector(ctx, n) - // At the file scope, only keywords are allowed. - case *ast.BadDecl, *ast.File: - c.addKeywordCompletions() - default: - // fallback to lexical completions - return c.lexical(ctx) - } - - return nil -} - -// containingIdent returns the *ast.Ident containing pos, if any. It -// synthesizes an *ast.Ident to allow completion in the face of -// certain syntax errors. -func (c *completer) containingIdent(src []byte) *ast.Ident { - // In the normal case, our leaf AST node is the identifier being completed. - if ident, ok := c.path[0].(*ast.Ident); ok { - return ident - } - - pos, tkn, lit := c.scanToken(src) - if !pos.IsValid() { - return nil - } - - fakeIdent := &ast.Ident{Name: lit, NamePos: pos} - - if _, isBadDecl := c.path[0].(*ast.BadDecl); isBadDecl { - // You don't get *ast.Idents at the file level, so look for bad - // decls and use the manually extracted token. - return fakeIdent - } else if c.emptySwitchStmt() { - // Only keywords are allowed in empty switch statements. - // *ast.Idents are not parsed, so we must use the manually - // extracted token. - return fakeIdent - } else if tkn.IsKeyword() { - // Otherwise, manually extract the prefix if our containing token - // is a keyword. This improves completion after an "accidental - // keyword", e.g. completing to "variance" in "someFunc(var<>)". - return fakeIdent - } - - return nil -} - -// scanToken scans pgh's contents for the token containing pos. -func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) { - tok := c.pkg.FileSet().File(c.pos) - - var s scanner.Scanner - s.Init(tok, contents, nil, 0) - for { - tknPos, tkn, lit := s.Scan() - if tkn == token.EOF || tknPos >= c.pos { - return token.NoPos, token.ILLEGAL, "" - } - - if len(lit) > 0 && tknPos <= c.pos && c.pos <= tknPos+token.Pos(len(lit)) { - return tknPos, tkn, lit - } - } -} - -func (c *completer) sortItems() { - sort.SliceStable(c.items, func(i, j int) bool { - // Sort by score first. - if c.items[i].Score != c.items[j].Score { - return c.items[i].Score > c.items[j].Score - } - - // Then sort by label so order stays consistent. This also has the - // effect of preferring shorter candidates. - return c.items[i].Label < c.items[j].Label - }) -} - -// emptySwitchStmt reports whether pos is in an empty switch or select -// statement. -func (c *completer) emptySwitchStmt() bool { - block, ok := c.path[0].(*ast.BlockStmt) - if !ok || len(block.List) > 0 || len(c.path) == 1 { - return false - } - - switch c.path[1].(type) { - case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: - return true - default: - return false - } -} - -// populateImportCompletions yields completions for an import path around the cursor. -// -// Completions are suggested at the directory depth of the given import path so -// that we don't overwhelm the user with a large list of possibilities. As an -// example, a completion for the prefix "golang" results in "golang.org/". -// Completions for "golang.org/" yield its subdirectories -// (i.e. "golang.org/x/"). The user is meant to accept completion suggestions -// until they reach a complete import path. -func (c *completer) populateImportCompletions(searchImport *ast.ImportSpec) error { - if !strings.HasPrefix(searchImport.Path.Value, `"`) { - return nil - } - - // deepSearch is not valuable for import completions. - c.deepState.enabled = false - - importPath := searchImport.Path.Value - - // Extract the text between the quotes (if any) in an import spec. - // prefix is the part of import path before the cursor. - prefixEnd := c.pos - searchImport.Path.Pos() - prefix := strings.Trim(importPath[:prefixEnd], `"`) - - // The number of directories in the import path gives us the depth at - // which to search. - depth := len(strings.Split(prefix, "/")) - 1 - - content := importPath - start, end := searchImport.Path.Pos(), searchImport.Path.End() - namePrefix, nameSuffix := `"`, `"` - // If a starting quote is present, adjust surrounding to either after the - // cursor or after the first slash (/), except if cursor is at the starting - // quote. Otherwise we provide a completion including the starting quote. - if strings.HasPrefix(importPath, `"`) && c.pos > searchImport.Path.Pos() { - content = content[1:] - start++ - if depth > 0 { - // Adjust textEdit start to replacement range. For ex: if current - // path was "golang.or/x/to<>ols/internal/", where <> is the cursor - // position, start of the replacement range would be after - // "golang.org/x/". - path := strings.SplitAfter(prefix, "/") - numChars := len(strings.Join(path[:len(path)-1], "")) - content = content[numChars:] - start += token.Pos(numChars) - } - namePrefix = "" - } - - // We won't provide an ending quote if one is already present, except if - // cursor is after the ending quote but still in import spec. This is - // because cursor has to be in our textEdit range. - if strings.HasSuffix(importPath, `"`) && c.pos < searchImport.Path.End() { - end-- - content = content[:len(content)-1] - nameSuffix = "" - } - - c.surrounding = &Selection{ - content: content, - cursor: c.pos, - tokFile: c.tokFile, - start: start, - end: end, - mapper: c.mapper, - } - - seenImports := make(map[string]struct{}) - for _, importSpec := range c.file.Imports { - if importSpec.Path.Value == importPath { - continue - } - seenImportPath, err := strconv.Unquote(importSpec.Path.Value) - if err != nil { - return err - } - seenImports[seenImportPath] = struct{}{} - } - - var mu sync.Mutex // guard c.items locally, since searchImports is called in parallel - seen := make(map[string]struct{}) - searchImports := func(pkg imports.ImportFix) { - path := pkg.StmtInfo.ImportPath - if _, ok := seenImports[path]; ok { - return - } - - // Any package path containing fewer directories than the search - // prefix is not a match. - pkgDirList := strings.Split(path, "/") - if len(pkgDirList) < depth+1 { - return - } - pkgToConsider := strings.Join(pkgDirList[:depth+1], "/") - - name := pkgDirList[depth] - // if we're adding an opening quote to completion too, set name to full - // package path since we'll need to overwrite that range. - if namePrefix == `"` { - name = pkgToConsider - } - - score := pkg.Relevance - if len(pkgDirList)-1 == depth { - score *= highScore - } else { - // For incomplete package paths, add a terminal slash to indicate that the - // user should keep triggering completions. - name += "/" - pkgToConsider += "/" - } - - if _, ok := seen[pkgToConsider]; ok { - return - } - seen[pkgToConsider] = struct{}{} - - mu.Lock() - defer mu.Unlock() - - name = namePrefix + name + nameSuffix - obj := types.NewPkgName(0, nil, name, types.NewPackage(pkgToConsider, name)) - c.deepState.enqueue(candidate{ - obj: obj, - detail: fmt.Sprintf("%q", pkgToConsider), - score: score, - }) - } - - c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - return imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env) - }) - return nil -} - -// populateCommentCompletions yields completions for comments preceding or in declarations. -func (c *completer) populateCommentCompletions(ctx context.Context, comment *ast.CommentGroup) { - // If the completion was triggered by a period, ignore it. These types of - // completions will not be useful in comments. - if c.completionContext.triggerCharacter == "." { - return - } - - // Using the comment position find the line after - file := c.pkg.FileSet().File(comment.End()) - if file == nil { - return - } - - // Deep completion doesn't work properly in comments since we don't - // have a type object to complete further. - c.deepState.enabled = false - c.completionContext.commentCompletion = true - - // Documentation isn't useful in comments, since it might end up being the - // comment itself. - c.opts.documentation = false - - commentLine := safetoken.Line(file, comment.End()) - - // comment is valid, set surrounding as word boundaries around cursor - c.setSurroundingForComment(comment) - - // Using the next line pos, grab and parse the exported symbol on that line - for _, n := range c.file.Decls { - declLine := safetoken.Line(file, n.Pos()) - // if the comment is not in, directly above or on the same line as a declaration - if declLine != commentLine && declLine != commentLine+1 && - !(n.Pos() <= comment.Pos() && comment.End() <= n.End()) { - continue - } - switch node := n.(type) { - // handle const, vars, and types - case *ast.GenDecl: - for _, spec := range node.Specs { - switch spec := spec.(type) { - case *ast.ValueSpec: - for _, name := range spec.Names { - if name.String() == "_" { - continue - } - obj := c.pkg.GetTypesInfo().ObjectOf(name) - c.deepState.enqueue(candidate{obj: obj, score: stdScore}) - } - case *ast.TypeSpec: - // add TypeSpec fields to completion - switch typeNode := spec.Type.(type) { - case *ast.StructType: - c.addFieldItems(ctx, typeNode.Fields) - case *ast.FuncType: - c.addFieldItems(ctx, typeNode.Params) - c.addFieldItems(ctx, typeNode.Results) - case *ast.InterfaceType: - c.addFieldItems(ctx, typeNode.Methods) - } - - if spec.Name.String() == "_" { - continue - } - - obj := c.pkg.GetTypesInfo().ObjectOf(spec.Name) - // Type name should get a higher score than fields but not highScore by default - // since field near a comment cursor gets a highScore - score := stdScore * 1.1 - // If type declaration is on the line after comment, give it a highScore. - if declLine == commentLine+1 { - score = highScore - } - - c.deepState.enqueue(candidate{obj: obj, score: score}) - } - } - // handle functions - case *ast.FuncDecl: - c.addFieldItems(ctx, node.Recv) - c.addFieldItems(ctx, node.Type.Params) - c.addFieldItems(ctx, node.Type.Results) - - // collect receiver struct fields - if node.Recv != nil { - for _, fields := range node.Recv.List { - for _, name := range fields.Names { - obj := c.pkg.GetTypesInfo().ObjectOf(name) - if obj == nil { - continue - } - - recvType := obj.Type().Underlying() - if ptr, ok := recvType.(*types.Pointer); ok { - recvType = ptr.Elem() - } - recvStruct, ok := recvType.Underlying().(*types.Struct) - if !ok { - continue - } - for i := 0; i < recvStruct.NumFields(); i++ { - field := recvStruct.Field(i) - c.deepState.enqueue(candidate{obj: field, score: lowScore}) - } - } - } - } - - if node.Name.String() == "_" { - continue - } - - obj := c.pkg.GetTypesInfo().ObjectOf(node.Name) - if obj == nil || obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() { - continue - } - - c.deepState.enqueue(candidate{obj: obj, score: highScore}) - } - } -} - -// sets word boundaries surrounding a cursor for a comment -func (c *completer) setSurroundingForComment(comments *ast.CommentGroup) { - var cursorComment *ast.Comment - for _, comment := range comments.List { - if c.pos >= comment.Pos() && c.pos <= comment.End() { - cursorComment = comment - break - } - } - // if cursor isn't in the comment - if cursorComment == nil { - return - } - - // index of cursor in comment text - cursorOffset := int(c.pos - cursorComment.Pos()) - start, end := cursorOffset, cursorOffset - for start > 0 && isValidIdentifierChar(cursorComment.Text[start-1]) { - start-- - } - for end < len(cursorComment.Text) && isValidIdentifierChar(cursorComment.Text[end]) { - end++ - } - - c.surrounding = &Selection{ - content: cursorComment.Text[start:end], - cursor: c.pos, - tokFile: c.tokFile, - start: token.Pos(int(cursorComment.Slash) + start), - end: token.Pos(int(cursorComment.Slash) + end), - mapper: c.mapper, - } - c.setMatcherFromPrefix(c.surrounding.Prefix()) -} - -// isValidIdentifierChar returns true if a byte is a valid go identifier -// character, i.e. unicode letter or digit or underscore. -func isValidIdentifierChar(char byte) bool { - charRune := rune(char) - return unicode.In(charRune, unicode.Letter, unicode.Digit) || char == '_' -} - -// adds struct fields, interface methods, function declaration fields to completion -func (c *completer) addFieldItems(ctx context.Context, fields *ast.FieldList) { - if fields == nil { - return - } - - cursor := c.surrounding.cursor - for _, field := range fields.List { - for _, name := range field.Names { - if name.String() == "_" { - continue - } - obj := c.pkg.GetTypesInfo().ObjectOf(name) - if obj == nil { - continue - } - - // if we're in a field comment/doc, score that field as more relevant - score := stdScore - if field.Comment != nil && field.Comment.Pos() <= cursor && cursor <= field.Comment.End() { - score = highScore - } else if field.Doc != nil && field.Doc.Pos() <= cursor && cursor <= field.Doc.End() { - score = highScore - } - - c.deepState.enqueue(candidate{obj: obj, score: score}) - } - } -} - -func (c *completer) wantStructFieldCompletions() bool { - clInfo := c.enclosingCompositeLiteral - if clInfo == nil { - return false - } - - return clInfo.isStruct() && (clInfo.inKey || clInfo.maybeInFieldName) -} - -func (c *completer) wantTypeName() bool { - return !c.completionContext.commentCompletion && c.inference.typeName.wantTypeName -} - -// See https://golang.org/issue/36001. Unimported completions are expensive. -const ( - maxUnimportedPackageNames = 5 - unimportedMemberTarget = 100 -) - -// selector finds completions for the specified selector expression. -func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error { - c.inference.objChain = objChain(c.pkg.GetTypesInfo(), sel.X) - - // True selector? - if tv, ok := c.pkg.GetTypesInfo().Types[sel.X]; ok { - c.methodsAndFields(tv.Type, tv.Addressable(), nil, c.deepState.enqueue) - c.addPostfixSnippetCandidates(ctx, sel) - return nil - } - - id, ok := sel.X.(*ast.Ident) - if !ok { - return nil - } - - // Treat sel as a qualified identifier. - var filter func(*source.Metadata) bool - needImport := false - if pkgName, ok := c.pkg.GetTypesInfo().Uses[id].(*types.PkgName); ok { - // Qualified identifier with import declaration. - imp := pkgName.Imported() - - // Known direct dependency? Expand using type information. - if _, ok := c.pkg.Metadata().DepsByPkgPath[source.PackagePath(imp.Path())]; ok { - c.packageMembers(imp, stdScore, nil, c.deepState.enqueue) - return nil - } - - // Imported declaration with missing type information. - // Fall through to shallow completion of unimported package members. - // Match candidate packages by path. - filter = func(m *source.Metadata) bool { - return strings.TrimPrefix(string(m.PkgPath), "vendor/") == imp.Path() - } - } else { - // Qualified identifier without import declaration. - // Match candidate packages by name. - filter = func(m *source.Metadata) bool { - return string(m.Name) == id.Name - } - needImport = true - } - - // Search unimported packages. - if !c.opts.unimported { - return nil // feature disabled - } - - // The deep completion algorithm is exceedingly complex and - // deeply coupled to the now obsolete notions that all - // token.Pos values can be interpreted by as a single FileSet - // belonging to the Snapshot and that all types.Object values - // are canonicalized by a single types.Importer mapping. - // These invariants are no longer true now that gopls uses - // an incremental approach, parsing and type-checking each - // package separately. - // - // Consequently, completion of symbols defined in packages that - // are not currently imported by the query file cannot use the - // deep completion machinery which is based on type information. - // Instead it must use only syntax information from a quick - // parse of top-level declarations (but not function bodies). - // - // TODO(adonovan): rewrite the deep completion machinery to - // not assume global Pos/Object realms and then use export - // data instead of the quick parse approach taken here. - - // First, we search among packages in the forward transitive - // closure of the workspace. - // We'll use a fast parse to extract package members - // from those that match the name/path criterion. - all, err := c.snapshot.AllMetadata(ctx) - if err != nil { - return err - } - known := make(map[source.PackagePath]*source.Metadata) - for _, m := range all { - if m.Name == "main" { - continue // not importable - } - if m.IsIntermediateTestVariant() { - continue - } - // The only test variant we admit is "p [p.test]" - // when we are completing within "p_test [p.test]", - // as in that case we would like to offer completions - // of the test variants' additional symbols. - if m.ForTest != "" && c.pkg.Metadata().PkgPath != m.ForTest+"_test" { - continue - } - if !filter(m) { - continue - } - // Prefer previous entry unless this one is its test variant. - if m.ForTest != "" || known[m.PkgPath] == nil { - known[m.PkgPath] = m - } - } - - paths := make([]string, 0, len(known)) - for path := range known { - paths = append(paths, string(path)) - } - - // Rank import paths as goimports would. - var relevances map[string]float64 - if len(paths) > 0 { - if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { - var err error - relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) - return err - }); err != nil { - return err - } - sort.Slice(paths, func(i, j int) bool { - return relevances[paths[i]] > relevances[paths[j]] - }) - } - - // quickParse does a quick parse of a single file of package m, - // extracts exported package members and adds candidates to c.items. - // TODO(rfindley): synchronizing access to c here does not feel right. - // Consider adding a concurrency-safe API for completer. - var cMu sync.Mutex // guards c.items and c.matcher - var enough int32 // atomic bool - quickParse := func(uri span.URI, m *source.Metadata) error { - if atomic.LoadInt32(&enough) != 0 { - return nil - } - - fh, err := c.snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - content, err := fh.Content() - if err != nil { - return err - } - path := string(m.PkgPath) - forEachPackageMember(content, func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl) { - if atomic.LoadInt32(&enough) != 0 { - return - } - - if !id.IsExported() { - return - } - - cMu.Lock() - score := c.matcher.Score(id.Name) - cMu.Unlock() - - if sel.Sel.Name != "_" && score == 0 { - return // not a match; avoid constructing the completion item below - } - - // The only detail is the kind and package: `var (from "example.com/foo")` - // TODO(adonovan): pretty-print FuncDecl.FuncType or TypeSpec.Type? - // TODO(adonovan): should this score consider the actual c.matcher.Score - // of the item? How does this compare with the deepState.enqueue path? - item := CompletionItem{ - Label: id.Name, - Detail: fmt.Sprintf("%s (from %q)", strings.ToLower(tok.String()), m.PkgPath), - InsertText: id.Name, - Score: float64(score) * unimportedScore(relevances[path]), - } - switch tok { - case token.FUNC: - item.Kind = protocol.FunctionCompletion - case token.VAR: - item.Kind = protocol.VariableCompletion - case token.CONST: - item.Kind = protocol.ConstantCompletion - case token.TYPE: - // Without types, we can't distinguish Class from Interface. - item.Kind = protocol.ClassCompletion - } - - if needImport { - imp := &importInfo{importPath: path} - if imports.ImportPathToAssumedName(path) != string(m.Name) { - imp.name = string(m.Name) - } - item.AdditionalTextEdits, _ = c.importEdits(imp) - } - - // For functions, add a parameter snippet. - if fn != nil { - paramList := func(list *ast.FieldList) []string { - var params []string - if list != nil { - var cfg printer.Config // slight overkill - param := func(name string, typ ast.Expr) { - var buf strings.Builder - buf.WriteString(name) - buf.WriteByte(' ') - cfg.Fprint(&buf, token.NewFileSet(), typ) - params = append(params, buf.String()) - } - - for _, field := range list.List { - if field.Names != nil { - for _, name := range field.Names { - param(name.Name, field.Type) - } - } else { - param("_", field.Type) - } - } - } - return params - } - - tparams := paramList(fn.Type.TypeParams) - params := paramList(fn.Type.Params) - var sn snippet.Builder - c.functionCallSnippet(id.Name, tparams, params, &sn) - item.snippet = &sn - } - - cMu.Lock() - c.items = append(c.items, item) - if len(c.items) >= unimportedMemberTarget { - atomic.StoreInt32(&enough, 1) - } - cMu.Unlock() - }) - return nil - } - - // Extract the package-level candidates using a quick parse. - var g errgroup.Group - for _, path := range paths { - m := known[source.PackagePath(path)] - for _, uri := range m.CompiledGoFiles { - uri := uri - g.Go(func() error { - return quickParse(uri, m) - }) - } - } - if err := g.Wait(); err != nil { - return err - } - - // In addition, we search in the module cache using goimports. - ctx, cancel := context.WithCancel(ctx) - var mu sync.Mutex - add := func(pkgExport imports.PackageExport) { - if ignoreUnimportedCompletion(pkgExport.Fix) { - return - } - - mu.Lock() - defer mu.Unlock() - // TODO(adonovan): what if the actual package has a vendor/ prefix? - if _, ok := known[source.PackagePath(pkgExport.Fix.StmtInfo.ImportPath)]; ok { - return // We got this one above. - } - - // Continue with untyped proposals. - pkg := types.NewPackage(pkgExport.Fix.StmtInfo.ImportPath, pkgExport.Fix.IdentName) - for _, export := range pkgExport.Exports { - score := unimportedScore(pkgExport.Fix.Relevance) - c.deepState.enqueue(candidate{ - obj: types.NewVar(0, pkg, export, nil), - score: score, - imp: &importInfo{ - importPath: pkgExport.Fix.StmtInfo.ImportPath, - name: pkgExport.Fix.StmtInfo.Name, - }, - }) - } - if len(c.items) >= unimportedMemberTarget { - cancel() - } - } - - c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - defer cancel() - return imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.GetTypes().Name(), opts.Env) - }) - return nil -} - -// unimportedScore returns a score for an unimported package that is generally -// lower than other candidates. -func unimportedScore(relevance float64) float64 { - return (stdScore + .1*relevance) / 2 -} - -func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo, cb func(candidate)) { - scope := pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - cb(candidate{ - obj: obj, - score: score, - imp: imp, - addressable: isVar(obj), - }) - } -} - -// ignoreUnimportedCompletion reports whether an unimported completion -// resulting in the given import should be ignored. -func ignoreUnimportedCompletion(fix *imports.ImportFix) bool { - // golang/go#60062: don't add unimported completion to golang.org/toolchain. - return fix != nil && strings.HasPrefix(fix.StmtInfo.ImportPath, "golang.org/toolchain") -} - -func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) { - mset := c.methodSetCache[methodSetKey{typ, addressable}] - if mset == nil { - if addressable && !types.IsInterface(typ) && !isPointer(typ) { - // Add methods of *T, which includes methods with receiver T. - mset = types.NewMethodSet(types.NewPointer(typ)) - } else { - // Add methods of T. - mset = types.NewMethodSet(typ) - } - c.methodSetCache[methodSetKey{typ, addressable}] = mset - } - - if isStarTestingDotF(typ) && addressable { - // is that a sufficient test? (or is more care needed?) - if c.fuzz(typ, mset, imp, cb, c.pkg.FileSet()) { - return - } - } - - for i := 0; i < mset.Len(); i++ { - cb(candidate{ - obj: mset.At(i).Obj(), - score: stdScore, - imp: imp, - addressable: addressable || isPointer(typ), - }) - } - - // Add fields of T. - eachField(typ, func(v *types.Var) { - cb(candidate{ - obj: v, - score: stdScore - 0.01, - imp: imp, - addressable: addressable || isPointer(typ), - }) - }) -} - -// isStarTestingDotF reports whether typ is *testing.F. -func isStarTestingDotF(typ types.Type) bool { - ptr, _ := typ.(*types.Pointer) - if ptr == nil { - return false - } - named, _ := ptr.Elem().(*types.Named) - if named == nil { - return false - } - obj := named.Obj() - // obj.Pkg is nil for the error type. - return obj != nil && obj.Pkg() != nil && obj.Pkg().Path() == "testing" && obj.Name() == "F" -} - -// lexical finds completions in the lexical environment. -func (c *completer) lexical(ctx context.Context) error { - var ( - builtinIota = types.Universe.Lookup("iota") - builtinNil = types.Universe.Lookup("nil") - - // TODO(rfindley): only allow "comparable" where it is valid (in constraint - // position or embedded in interface declarations). - // builtinComparable = types.Universe.Lookup("comparable") - ) - - // Track seen variables to avoid showing completions for shadowed variables. - // This works since we look at scopes from innermost to outermost. - seen := make(map[string]struct{}) - - // Process scopes innermost first. - for i, scope := range c.scopes { - if scope == nil { - continue - } - - Names: - for _, name := range scope.Names() { - declScope, obj := scope.LookupParent(name, c.pos) - if declScope != scope { - continue // Name was declared in some enclosing scope, or not at all. - } - - // If obj's type is invalid, find the AST node that defines the lexical block - // containing the declaration of obj. Don't resolve types for packages. - if !isPkgName(obj) && !typeIsValid(obj.Type()) { - // Match the scope to its ast.Node. If the scope is the package scope, - // use the *ast.File as the starting node. - var node ast.Node - if i < len(c.path) { - node = c.path[i] - } else if i == len(c.path) { // use the *ast.File for package scope - node = c.path[i-1] - } - if node != nil { - if resolved := resolveInvalid(c.pkg.FileSet(), obj, node, c.pkg.GetTypesInfo()); resolved != nil { - obj = resolved - } - } - } - - // Don't use LHS of decl in RHS. - for _, ident := range enclosingDeclLHS(c.path) { - if obj.Pos() == ident.Pos() { - continue Names - } - } - - // Don't suggest "iota" outside of const decls. - if obj == builtinIota && !c.inConstDecl() { - continue - } - - // Rank outer scopes lower than inner. - score := stdScore * math.Pow(.99, float64(i)) - - // Dowrank "nil" a bit so it is ranked below more interesting candidates. - if obj == builtinNil { - score /= 2 - } - - // If we haven't already added a candidate for an object with this name. - if _, ok := seen[obj.Name()]; !ok { - seen[obj.Name()] = struct{}{} - c.deepState.enqueue(candidate{ - obj: obj, - score: score, - addressable: isVar(obj), - }) - } - } - } - - if c.inference.objType != nil { - if named, _ := source.Deref(c.inference.objType).(*types.Named); named != nil { - // If we expected a named type, check the type's package for - // completion items. This is useful when the current file hasn't - // imported the type's package yet. - - if named.Obj() != nil && named.Obj().Pkg() != nil { - pkg := named.Obj().Pkg() - - // Make sure the package name isn't already in use by another - // object, and that this file doesn't import the package yet. - // TODO(adonovan): what if pkg.Path has vendor/ prefix? - if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.GetTypes() && !alreadyImports(c.file, source.ImportPath(pkg.Path())) { - seen[pkg.Name()] = struct{}{} - obj := types.NewPkgName(0, nil, pkg.Name(), pkg) - imp := &importInfo{ - importPath: pkg.Path(), - } - if imports.ImportPathToAssumedName(pkg.Path()) != pkg.Name() { - imp.name = pkg.Name() - } - c.deepState.enqueue(candidate{ - obj: obj, - score: stdScore, - imp: imp, - }) - } - } - } - } - - if c.opts.unimported { - if err := c.unimportedPackages(ctx, seen); err != nil { - return err - } - } - - if c.inference.typeName.isTypeParam { - // If we are completing a type param, offer each structural type. - // This ensures we suggest "[]int" and "[]float64" for a constraint - // with type union "[]int | []float64". - if t, _ := c.inference.objType.(*types.Interface); t != nil { - terms, _ := typeparams.InterfaceTermSet(t) - for _, term := range terms { - c.injectType(ctx, term.Type()) - } - } - } else { - c.injectType(ctx, c.inference.objType) - } - - // Add keyword completion items appropriate in the current context. - c.addKeywordCompletions() - - return nil -} - -// injectType manufactures candidates based on the given type. This is -// intended for types not discoverable via lexical search, such as -// composite and/or generic types. For example, if the type is "[]int", -// this method makes sure you get candidates "[]int{}" and "[]int" -// (the latter applies when completing a type name). -func (c *completer) injectType(ctx context.Context, t types.Type) { - if t == nil { - return - } - - t = source.Deref(t) - - // If we have an expected type and it is _not_ a named type, handle - // it specially. Non-named types like "[]int" will never be - // considered via a lexical search, so we need to directly inject - // them. Also allow generic types since lexical search does not - // infer instantiated versions of them. - if named, _ := t.(*types.Named); named == nil || typeparams.ForNamed(named).Len() > 0 { - // If our expected type is "[]int", this will add a literal - // candidate of "[]int{}". - c.literal(ctx, t, nil) - - if _, isBasic := t.(*types.Basic); !isBasic { - // If we expect a non-basic type name (e.g. "[]int"), hack up - // a named type whose name is literally "[]int". This allows - // us to reuse our object based completion machinery. - fakeNamedType := candidate{ - obj: types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qf), t), - score: stdScore, - } - // Make sure the type name matches before considering - // candidate. This cuts down on useless candidates. - if c.matchingTypeName(&fakeNamedType) { - c.deepState.enqueue(fakeNamedType) - } - } - } -} - -func (c *completer) unimportedPackages(ctx context.Context, seen map[string]struct{}) error { - var prefix string - if c.surrounding != nil { - prefix = c.surrounding.Prefix() - } - - // Don't suggest unimported packages if we have absolutely nothing - // to go on. - if prefix == "" { - return nil - } - - count := 0 - - // Search the forward transitive closure of the workspace. - all, err := c.snapshot.AllMetadata(ctx) - if err != nil { - return err - } - pkgNameByPath := make(map[source.PackagePath]string) - var paths []string // actually PackagePaths - for _, m := range all { - if m.ForTest != "" { - continue // skip all test variants - } - if m.Name == "main" { - continue // main is non-importable - } - if !strings.HasPrefix(string(m.Name), prefix) { - continue // not a match - } - paths = append(paths, string(m.PkgPath)) - pkgNameByPath[m.PkgPath] = string(m.Name) - } - - // Rank candidates using goimports' algorithm. - var relevances map[string]float64 - if len(paths) != 0 { - if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { - var err error - relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) - return err - }); err != nil { - return err - } - } - sort.Slice(paths, func(i, j int) bool { - if relevances[paths[i]] != relevances[paths[j]] { - return relevances[paths[i]] > relevances[paths[j]] - } - - // Fall back to lexical sort to keep truncated set of candidates - // in a consistent order. - return paths[i] < paths[j] - }) - - for _, path := range paths { - name := pkgNameByPath[source.PackagePath(path)] - if _, ok := seen[name]; ok { - continue - } - imp := &importInfo{ - importPath: path, - } - if imports.ImportPathToAssumedName(path) != name { - imp.name = name - } - if count >= maxUnimportedPackageNames { - return nil - } - c.deepState.enqueue(candidate{ - // Pass an empty *types.Package to disable deep completions. - obj: types.NewPkgName(0, nil, name, types.NewPackage(path, name)), - score: unimportedScore(relevances[path]), - imp: imp, - }) - count++ - } - - ctx, cancel := context.WithCancel(ctx) - - var mu sync.Mutex - add := func(pkg imports.ImportFix) { - if ignoreUnimportedCompletion(&pkg) { - return - } - mu.Lock() - defer mu.Unlock() - if _, ok := seen[pkg.IdentName]; ok { - return - } - if _, ok := relevances[pkg.StmtInfo.ImportPath]; ok { - return - } - - if count >= maxUnimportedPackageNames { - cancel() - return - } - - // Do not add the unimported packages to seen, since we can have - // multiple packages of the same name as completion suggestions, since - // only one will be chosen. - obj := types.NewPkgName(0, nil, pkg.IdentName, types.NewPackage(pkg.StmtInfo.ImportPath, pkg.IdentName)) - c.deepState.enqueue(candidate{ - obj: obj, - score: unimportedScore(pkg.Relevance), - imp: &importInfo{ - importPath: pkg.StmtInfo.ImportPath, - name: pkg.StmtInfo.Name, - }, - }) - count++ - } - c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - defer cancel() - return imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env) - }) - return nil -} - -// alreadyImports reports whether f has an import with the specified path. -func alreadyImports(f *ast.File, path source.ImportPath) bool { - for _, s := range f.Imports { - if source.UnquoteImportPath(s) == path { - return true - } - } - return false -} - -func (c *completer) inConstDecl() bool { - for _, n := range c.path { - if decl, ok := n.(*ast.GenDecl); ok && decl.Tok == token.CONST { - return true - } - } - return false -} - -// structLiteralFieldName finds completions for struct field names inside a struct literal. -func (c *completer) structLiteralFieldName(ctx context.Context) error { - clInfo := c.enclosingCompositeLiteral - - // Mark fields of the composite literal that have already been set, - // except for the current field. - addedFields := make(map[*types.Var]bool) - for _, el := range clInfo.cl.Elts { - if kvExpr, ok := el.(*ast.KeyValueExpr); ok { - if clInfo.kv == kvExpr { - continue - } - - if key, ok := kvExpr.Key.(*ast.Ident); ok { - if used, ok := c.pkg.GetTypesInfo().Uses[key]; ok { - if usedVar, ok := used.(*types.Var); ok { - addedFields[usedVar] = true - } - } - } - } - } - - deltaScore := 0.0001 - switch t := clInfo.clType.(type) { - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - field := t.Field(i) - if !addedFields[field] { - c.deepState.enqueue(candidate{ - obj: field, - score: highScore - float64(i)*deltaScore, - }) - } - } - - // Add lexical completions if we aren't certain we are in the key part of a - // key-value pair. - if clInfo.maybeInFieldName { - return c.lexical(ctx) - } - default: - return c.lexical(ctx) - } - - return nil -} - -func (cl *compLitInfo) isStruct() bool { - _, ok := cl.clType.(*types.Struct) - return ok -} - -// enclosingCompositeLiteral returns information about the composite literal enclosing the -// position. -func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo { - for _, n := range path { - switch n := n.(type) { - case *ast.CompositeLit: - // The enclosing node will be a composite literal if the user has just - // opened the curly brace (e.g. &x{<>) or the completion request is triggered - // from an already completed composite literal expression (e.g. &x{foo: 1, <>}) - // - // The position is not part of the composite literal unless it falls within the - // curly braces (e.g. "foo.Foo<>Struct{}"). - if !(n.Lbrace < pos && pos <= n.Rbrace) { - // Keep searching since we may yet be inside a composite literal. - // For example "Foo{B: Ba<>{}}". - break - } - - tv, ok := info.Types[n] - if !ok { - return nil - } - - clInfo := compLitInfo{ - cl: n, - clType: source.Deref(tv.Type).Underlying(), - } - - var ( - expr ast.Expr - hasKeys bool - ) - for _, el := range n.Elts { - // Remember the expression that the position falls in, if any. - if el.Pos() <= pos && pos <= el.End() { - expr = el - } - - if kv, ok := el.(*ast.KeyValueExpr); ok { - hasKeys = true - // If expr == el then we know the position falls in this expression, - // so also record kv as the enclosing *ast.KeyValueExpr. - if expr == el { - clInfo.kv = kv - break - } - } - } - - if clInfo.kv != nil { - // If in a *ast.KeyValueExpr, we know we are in the key if the position - // is to the left of the colon (e.g. "Foo{F<>: V}". - clInfo.inKey = pos <= clInfo.kv.Colon - } else if hasKeys { - // If we aren't in a *ast.KeyValueExpr but the composite literal has - // other *ast.KeyValueExprs, we must be on the key side of a new - // *ast.KeyValueExpr (e.g. "Foo{F: V, <>}"). - clInfo.inKey = true - } else { - switch clInfo.clType.(type) { - case *types.Struct: - if len(n.Elts) == 0 { - // If the struct literal is empty, next could be a struct field - // name or an expression (e.g. "Foo{<>}" could become "Foo{F:}" - // or "Foo{someVar}"). - clInfo.maybeInFieldName = true - } else if len(n.Elts) == 1 { - // If there is one expression and the position is in that expression - // and the expression is an identifier, we may be writing a field - // name or an expression (e.g. "Foo{F<>}"). - _, clInfo.maybeInFieldName = expr.(*ast.Ident) - } - case *types.Map: - // If we aren't in a *ast.KeyValueExpr we must be adding a new key - // to the map. - clInfo.inKey = true - } - } - - return &clInfo - default: - if breaksExpectedTypeInference(n, pos) { - return nil - } - } - } - - return nil -} - -// enclosingFunction returns the signature and body of the function -// enclosing the given position. -func enclosingFunction(path []ast.Node, info *types.Info) *funcInfo { - for _, node := range path { - switch t := node.(type) { - case *ast.FuncDecl: - if obj, ok := info.Defs[t.Name]; ok { - return &funcInfo{ - sig: obj.Type().(*types.Signature), - body: t.Body, - } - } - case *ast.FuncLit: - if typ, ok := info.Types[t]; ok { - if sig, _ := typ.Type.(*types.Signature); sig == nil { - // golang/go#49397: it should not be possible, but we somehow arrived - // here with a non-signature type, most likely due to AST mangling - // such that node.Type is not a FuncType. - return nil - } - return &funcInfo{ - sig: typ.Type.(*types.Signature), - body: t.Body, - } - } - } - } - return nil -} - -func (c *completer) expectedCompositeLiteralType() types.Type { - clInfo := c.enclosingCompositeLiteral - switch t := clInfo.clType.(type) { - case *types.Slice: - if clInfo.inKey { - return types.Typ[types.UntypedInt] - } - return t.Elem() - case *types.Array: - if clInfo.inKey { - return types.Typ[types.UntypedInt] - } - return t.Elem() - case *types.Map: - if clInfo.inKey { - return t.Key() - } - return t.Elem() - case *types.Struct: - // If we are completing a key (i.e. field name), there is no expected type. - if clInfo.inKey { - return nil - } - - // If we are in a key-value pair, but not in the key, then we must be on the - // value side. The expected type of the value will be determined from the key. - if clInfo.kv != nil { - if key, ok := clInfo.kv.Key.(*ast.Ident); ok { - for i := 0; i < t.NumFields(); i++ { - if field := t.Field(i); field.Name() == key.Name { - return field.Type() - } - } - } - } else { - // If we aren't in a key-value pair and aren't in the key, we must be using - // implicit field names. - - // The order of the literal fields must match the order in the struct definition. - // Find the element that the position belongs to and suggest that field's type. - if i := exprAtPos(c.pos, clInfo.cl.Elts); i < t.NumFields() { - return t.Field(i).Type() - } - } - } - return nil -} - -// typeMod represents an operator that changes the expected type. -type typeMod struct { - mod typeModKind - arrayLen int64 -} - -type typeModKind int - -const ( - dereference typeModKind = iota // pointer indirection: "*" - reference // adds level of pointer: "&" for values, "*" for type names - chanRead // channel read operator: "<-" - sliceType // make a slice type: "[]" in "[]int" - arrayType // make an array type: "[2]" in "[2]int" - invoke // make a function call: "()" in "foo()" - takeSlice // take slice of array: "[:]" in "foo[:]" - takeDotDotDot // turn slice into variadic args: "..." in "foo..." - index // index into slice/array: "[0]" in "foo[0]" -) - -type objKind int - -const ( - kindAny objKind = 0 - kindArray objKind = 1 << iota - kindSlice - kindChan - kindMap - kindStruct - kindString - kindInt - kindBool - kindBytes - kindPtr - kindFloat - kindComplex - kindError - kindStringer - kindFunc -) - -// penalizedObj represents an object that should be disfavored as a -// completion candidate. -type penalizedObj struct { - // objChain is the full "chain", e.g. "foo.bar().baz" becomes - // []types.Object{foo, bar, baz}. - objChain []types.Object - // penalty is score penalty in the range (0, 1). - penalty float64 -} - -// candidateInference holds information we have inferred about a type that can be -// used at the current position. -type candidateInference struct { - // objType is the desired type of an object used at the query position. - objType types.Type - - // objKind is a mask of expected kinds of types such as "map", "slice", etc. - objKind objKind - - // variadic is true if we are completing the initial variadic - // parameter. For example: - // append([]T{}, <>) // objType=T variadic=true - // append([]T{}, T{}, <>) // objType=T variadic=false - variadic bool - - // modifiers are prefixes such as "*", "&" or "<-" that influence how - // a candidate type relates to the expected type. - modifiers []typeMod - - // convertibleTo is a type our candidate type must be convertible to. - convertibleTo types.Type - - // typeName holds information about the expected type name at - // position, if any. - typeName typeNameInference - - // assignees are the types that would receive a function call's - // results at the position. For example: - // - // foo := 123 - // foo, bar := <> - // - // at "<>", the assignees are [int, ]. - assignees []types.Type - - // variadicAssignees is true if we could be completing an inner - // function call that fills out an outer function call's variadic - // params. For example: - // - // func foo(int, ...string) {} - // - // foo(<>) // variadicAssignees=true - // foo(bar<>) // variadicAssignees=true - // foo(bar, baz<>) // variadicAssignees=false - variadicAssignees bool - - // penalized holds expressions that should be disfavored as - // candidates. For example, it tracks expressions already used in a - // switch statement's other cases. Each expression is tracked using - // its entire object "chain" allowing differentiation between - // "a.foo" and "b.foo" when "a" and "b" are the same type. - penalized []penalizedObj - - // objChain contains the chain of objects representing the - // surrounding *ast.SelectorExpr. For example, if we are completing - // "foo.bar.ba<>", objChain will contain []types.Object{foo, bar}. - objChain []types.Object -} - -// typeNameInference holds information about the expected type name at -// position. -type typeNameInference struct { - // wantTypeName is true if we expect the name of a type. - wantTypeName bool - - // modifiers are prefixes such as "*", "&" or "<-" that influence how - // a candidate type relates to the expected type. - modifiers []typeMod - - // assertableFrom is a type that must be assertable to our candidate type. - assertableFrom types.Type - - // wantComparable is true if we want a comparable type. - wantComparable bool - - // seenTypeSwitchCases tracks types that have already been used by - // the containing type switch. - seenTypeSwitchCases []types.Type - - // compLitType is true if we are completing a composite literal type - // name, e.g "foo<>{}". - compLitType bool - - // isTypeParam is true if we are completing a type instantiation parameter - isTypeParam bool -} - -// expectedCandidate returns information about the expected candidate -// for an expression at the query position. -func expectedCandidate(ctx context.Context, c *completer) (inf candidateInference) { - inf.typeName = expectTypeName(c) - - if c.enclosingCompositeLiteral != nil { - inf.objType = c.expectedCompositeLiteralType() - } - -Nodes: - for i, node := range c.path { - switch node := node.(type) { - case *ast.BinaryExpr: - // Determine if query position comes from left or right of op. - e := node.X - if c.pos < node.OpPos { - e = node.Y - } - if tv, ok := c.pkg.GetTypesInfo().Types[e]; ok { - switch node.Op { - case token.LAND, token.LOR: - // Don't infer "bool" type for "&&" or "||". Often you want - // to compose a boolean expression from non-boolean - // candidates. - default: - inf.objType = tv.Type - } - break Nodes - } - case *ast.AssignStmt: - // Only rank completions if you are on the right side of the token. - if c.pos > node.TokPos { - i := exprAtPos(c.pos, node.Rhs) - if i >= len(node.Lhs) { - i = len(node.Lhs) - 1 - } - if tv, ok := c.pkg.GetTypesInfo().Types[node.Lhs[i]]; ok { - inf.objType = tv.Type - } - - // If we have a single expression on the RHS, record the LHS - // assignees so we can favor multi-return function calls with - // matching result values. - if len(node.Rhs) <= 1 { - for _, lhs := range node.Lhs { - inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(lhs)) - } - } else { - // Otherwise, record our single assignee, even if its type is - // not available. We use this info to downrank functions - // with the wrong number of result values. - inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(node.Lhs[i])) - } - } - return inf - case *ast.ValueSpec: - if node.Type != nil && c.pos > node.Type.End() { - inf.objType = c.pkg.GetTypesInfo().TypeOf(node.Type) - } - return inf - case *ast.CallExpr: - // Only consider CallExpr args if position falls between parens. - if node.Lparen < c.pos && c.pos <= node.Rparen { - // For type conversions like "int64(foo)" we can only infer our - // desired type is convertible to int64. - if typ := typeConversion(node, c.pkg.GetTypesInfo()); typ != nil { - inf.convertibleTo = typ - break Nodes - } - - sig, _ := c.pkg.GetTypesInfo().Types[node.Fun].Type.(*types.Signature) - - if sig != nil && typeparams.ForSignature(sig).Len() > 0 { - // If we are completing a generic func call, re-check the call expression. - // This allows type param inference to work in cases like: - // - // func foo[T any](T) {} - // foo[int](<>) // <- get "int" completions instead of "T" - // - // TODO: remove this after https://go.dev/issue/52503 - info := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} - types.CheckExpr(c.pkg.FileSet(), c.pkg.GetTypes(), node.Fun.Pos(), node.Fun, info) - sig, _ = info.Types[node.Fun].Type.(*types.Signature) - } - - if sig != nil { - inf = c.expectedCallParamType(inf, node, sig) - } - - if funIdent, ok := node.Fun.(*ast.Ident); ok { - obj := c.pkg.GetTypesInfo().ObjectOf(funIdent) - - if obj != nil && obj.Parent() == types.Universe { - // Defer call to builtinArgType so we can provide it the - // inferred type from its parent node. - defer func() { - inf = c.builtinArgType(obj, node, inf) - inf.objKind = c.builtinArgKind(ctx, obj, node) - }() - - // The expected type of builtin arguments like append() is - // the expected type of the builtin call itself. For - // example: - // - // var foo []int = append(<>) - // - // To find the expected type at <> we "skip" the append() - // node and get the expected type one level up, which is - // []int. - continue Nodes - } - } - - return inf - } - case *ast.ReturnStmt: - if c.enclosingFunc != nil { - sig := c.enclosingFunc.sig - // Find signature result that corresponds to our return statement. - if resultIdx := exprAtPos(c.pos, node.Results); resultIdx < len(node.Results) { - if resultIdx < sig.Results().Len() { - inf.objType = sig.Results().At(resultIdx).Type() - } - } - } - return inf - case *ast.CaseClause: - if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, node).(*ast.SwitchStmt); ok { - if tv, ok := c.pkg.GetTypesInfo().Types[swtch.Tag]; ok { - inf.objType = tv.Type - - // Record which objects have already been used in the case - // statements so we don't suggest them again. - for _, cc := range swtch.Body.List { - for _, caseExpr := range cc.(*ast.CaseClause).List { - // Don't record the expression we are currently completing. - if caseExpr.Pos() < c.pos && c.pos <= caseExpr.End() { - continue - } - - if objs := objChain(c.pkg.GetTypesInfo(), caseExpr); len(objs) > 0 { - inf.penalized = append(inf.penalized, penalizedObj{objChain: objs, penalty: 0.1}) - } - } - } - } - } - return inf - case *ast.SliceExpr: - // Make sure position falls within the brackets (e.g. "foo[a:<>]"). - if node.Lbrack < c.pos && c.pos <= node.Rbrack { - inf.objType = types.Typ[types.UntypedInt] - } - return inf - case *ast.IndexExpr: - // Make sure position falls within the brackets (e.g. "foo[<>]"). - if node.Lbrack < c.pos && c.pos <= node.Rbrack { - if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok { - switch t := tv.Type.Underlying().(type) { - case *types.Map: - inf.objType = t.Key() - case *types.Slice, *types.Array: - inf.objType = types.Typ[types.UntypedInt] - } - - if ct := expectedConstraint(tv.Type, 0); ct != nil { - inf.objType = ct - inf.typeName.wantTypeName = true - inf.typeName.isTypeParam = true - } - } - } - return inf - case *typeparams.IndexListExpr: - if node.Lbrack < c.pos && c.pos <= node.Rbrack { - if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok { - if ct := expectedConstraint(tv.Type, exprAtPos(c.pos, node.Indices)); ct != nil { - inf.objType = ct - inf.typeName.wantTypeName = true - inf.typeName.isTypeParam = true - } - } - } - return inf - case *ast.SendStmt: - // Make sure we are on right side of arrow (e.g. "foo <- <>"). - if c.pos > node.Arrow+1 { - if tv, ok := c.pkg.GetTypesInfo().Types[node.Chan]; ok { - if ch, ok := tv.Type.Underlying().(*types.Chan); ok { - inf.objType = ch.Elem() - } - } - } - return inf - case *ast.RangeStmt: - if source.NodeContains(node.X, c.pos) { - inf.objKind |= kindSlice | kindArray | kindMap | kindString - if node.Value == nil { - inf.objKind |= kindChan - } - } - return inf - case *ast.StarExpr: - inf.modifiers = append(inf.modifiers, typeMod{mod: dereference}) - case *ast.UnaryExpr: - switch node.Op { - case token.AND: - inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) - case token.ARROW: - inf.modifiers = append(inf.modifiers, typeMod{mod: chanRead}) - } - case *ast.DeferStmt, *ast.GoStmt: - inf.objKind |= kindFunc - return inf - default: - if breaksExpectedTypeInference(node, c.pos) { - return inf - } - } - } - - return inf -} - -func (c *completer) expectedCallParamType(inf candidateInference, node *ast.CallExpr, sig *types.Signature) candidateInference { - numParams := sig.Params().Len() - if numParams == 0 { - return inf - } - - exprIdx := exprAtPos(c.pos, node.Args) - - // If we have one or zero arg expressions, we may be - // completing to a function call that returns multiple - // values, in turn getting passed in to the surrounding - // call. Record the assignees so we can favor function - // calls that return matching values. - if len(node.Args) <= 1 && exprIdx == 0 { - for i := 0; i < sig.Params().Len(); i++ { - inf.assignees = append(inf.assignees, sig.Params().At(i).Type()) - } - - // Record that we may be completing into variadic parameters. - inf.variadicAssignees = sig.Variadic() - } - - // Make sure not to run past the end of expected parameters. - if exprIdx >= numParams { - inf.objType = sig.Params().At(numParams - 1).Type() - } else { - inf.objType = sig.Params().At(exprIdx).Type() - } - - if sig.Variadic() && exprIdx >= (numParams-1) { - // If we are completing a variadic param, deslice the variadic type. - inf.objType = deslice(inf.objType) - // Record whether we are completing the initial variadic param. - inf.variadic = exprIdx == numParams-1 && len(node.Args) <= numParams - - // Check if we can infer object kind from printf verb. - inf.objKind |= printfArgKind(c.pkg.GetTypesInfo(), node, exprIdx) - } - - // If our expected type is an uninstantiated generic type param, - // swap to the constraint which will do a decent job filtering - // candidates. - if tp, _ := inf.objType.(*typeparams.TypeParam); tp != nil { - inf.objType = tp.Constraint() - } - - return inf -} - -func expectedConstraint(t types.Type, idx int) types.Type { - var tp *typeparams.TypeParamList - if named, _ := t.(*types.Named); named != nil { - tp = typeparams.ForNamed(named) - } else if sig, _ := t.Underlying().(*types.Signature); sig != nil { - tp = typeparams.ForSignature(sig) - } - if tp == nil || idx >= tp.Len() { - return nil - } - return tp.At(idx).Constraint() -} - -// objChain decomposes e into a chain of objects if possible. For -// example, "foo.bar().baz" will yield []types.Object{foo, bar, baz}. -// If any part can't be turned into an object, return nil. -func objChain(info *types.Info, e ast.Expr) []types.Object { - var objs []types.Object - - for e != nil { - switch n := e.(type) { - case *ast.Ident: - obj := info.ObjectOf(n) - if obj == nil { - return nil - } - objs = append(objs, obj) - e = nil - case *ast.SelectorExpr: - obj := info.ObjectOf(n.Sel) - if obj == nil { - return nil - } - objs = append(objs, obj) - e = n.X - case *ast.CallExpr: - if len(n.Args) > 0 { - return nil - } - e = n.Fun - default: - return nil - } - } - - // Reverse order so the layout matches the syntactic order. - for i := 0; i < len(objs)/2; i++ { - objs[i], objs[len(objs)-1-i] = objs[len(objs)-1-i], objs[i] - } - - return objs -} - -// applyTypeModifiers applies the list of type modifiers to a type. -// It returns nil if the modifiers could not be applied. -func (ci candidateInference) applyTypeModifiers(typ types.Type, addressable bool) types.Type { - for _, mod := range ci.modifiers { - switch mod.mod { - case dereference: - // For every "*" indirection operator, remove a pointer layer - // from candidate type. - if ptr, ok := typ.Underlying().(*types.Pointer); ok { - typ = ptr.Elem() - } else { - return nil - } - case reference: - // For every "&" address operator, add another pointer layer to - // candidate type, if the candidate is addressable. - if addressable { - typ = types.NewPointer(typ) - } else { - return nil - } - case chanRead: - // For every "<-" operator, remove a layer of channelness. - if ch, ok := typ.(*types.Chan); ok { - typ = ch.Elem() - } else { - return nil - } - } - } - - return typ -} - -// applyTypeNameModifiers applies the list of type modifiers to a type name. -func (ci candidateInference) applyTypeNameModifiers(typ types.Type) types.Type { - for _, mod := range ci.typeName.modifiers { - switch mod.mod { - case reference: - typ = types.NewPointer(typ) - case arrayType: - typ = types.NewArray(typ, mod.arrayLen) - case sliceType: - typ = types.NewSlice(typ) - } - } - return typ -} - -// matchesVariadic returns true if we are completing a variadic -// parameter and candType is a compatible slice type. -func (ci candidateInference) matchesVariadic(candType types.Type) bool { - return ci.variadic && ci.objType != nil && assignableTo(candType, types.NewSlice(ci.objType)) -} - -// findSwitchStmt returns an *ast.CaseClause's corresponding *ast.SwitchStmt or -// *ast.TypeSwitchStmt. path should start from the case clause's first ancestor. -func findSwitchStmt(path []ast.Node, pos token.Pos, c *ast.CaseClause) ast.Stmt { - // Make sure position falls within a "case <>:" clause. - if exprAtPos(pos, c.List) >= len(c.List) { - return nil - } - // A case clause is always nested within a block statement in a switch statement. - if len(path) < 2 { - return nil - } - if _, ok := path[0].(*ast.BlockStmt); !ok { - return nil - } - switch s := path[1].(type) { - case *ast.SwitchStmt: - return s - case *ast.TypeSwitchStmt: - return s - default: - return nil - } -} - -// breaksExpectedTypeInference reports if an expression node's type is unrelated -// to its child expression node types. For example, "Foo{Bar: x.Baz(<>)}" should -// expect a function argument, not a composite literal value. -func breaksExpectedTypeInference(n ast.Node, pos token.Pos) bool { - switch n := n.(type) { - case *ast.CompositeLit: - // Doesn't break inference if pos is in type name. - // For example: "Foo<>{Bar: 123}" - return !source.NodeContains(n.Type, pos) - case *ast.CallExpr: - // Doesn't break inference if pos is in func name. - // For example: "Foo<>(123)" - return !source.NodeContains(n.Fun, pos) - case *ast.FuncLit, *ast.IndexExpr, *ast.SliceExpr: - return true - default: - return false - } -} - -// expectTypeName returns information about the expected type name at position. -func expectTypeName(c *completer) typeNameInference { - var inf typeNameInference - -Nodes: - for i, p := range c.path { - switch n := p.(type) { - case *ast.FieldList: - // Expect a type name if pos is in a FieldList. This applies to - // FuncType params/results, FuncDecl receiver, StructType, and - // InterfaceType. We don't need to worry about the field name - // because completion bails out early if pos is in an *ast.Ident - // that defines an object. - inf.wantTypeName = true - break Nodes - case *ast.CaseClause: - // Expect type names in type switch case clauses. - if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, n).(*ast.TypeSwitchStmt); ok { - // The case clause types must be assertable from the type switch parameter. - ast.Inspect(swtch.Assign, func(n ast.Node) bool { - if ta, ok := n.(*ast.TypeAssertExpr); ok { - inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(ta.X) - return false - } - return true - }) - inf.wantTypeName = true - - // Track the types that have already been used in this - // switch's case statements so we don't recommend them. - for _, e := range swtch.Body.List { - for _, typeExpr := range e.(*ast.CaseClause).List { - // Skip if type expression contains pos. We don't want to - // count it as already used if the user is completing it. - if typeExpr.Pos() < c.pos && c.pos <= typeExpr.End() { - continue - } - - if t := c.pkg.GetTypesInfo().TypeOf(typeExpr); t != nil { - inf.seenTypeSwitchCases = append(inf.seenTypeSwitchCases, t) - } - } - } - - break Nodes - } - return typeNameInference{} - case *ast.TypeAssertExpr: - // Expect type names in type assert expressions. - if n.Lparen < c.pos && c.pos <= n.Rparen { - // The type in parens must be assertable from the expression type. - inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(n.X) - inf.wantTypeName = true - break Nodes - } - return typeNameInference{} - case *ast.StarExpr: - inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) - case *ast.CompositeLit: - // We want a type name if position is in the "Type" part of a - // composite literal (e.g. "Foo<>{}"). - if n.Type != nil && n.Type.Pos() <= c.pos && c.pos <= n.Type.End() { - inf.wantTypeName = true - inf.compLitType = true - - if i < len(c.path)-1 { - // Track preceding "&" operator. Technically it applies to - // the composite literal and not the type name, but if - // affects our type completion nonetheless. - if u, ok := c.path[i+1].(*ast.UnaryExpr); ok && u.Op == token.AND { - inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) - } - } - } - break Nodes - case *ast.ArrayType: - // If we are inside the "Elt" part of an array type, we want a type name. - if n.Elt.Pos() <= c.pos && c.pos <= n.Elt.End() { - inf.wantTypeName = true - if n.Len == nil { - // No "Len" expression means a slice type. - inf.modifiers = append(inf.modifiers, typeMod{mod: sliceType}) - } else { - // Try to get the array type using the constant value of "Len". - tv, ok := c.pkg.GetTypesInfo().Types[n.Len] - if ok && tv.Value != nil && tv.Value.Kind() == constant.Int { - if arrayLen, ok := constant.Int64Val(tv.Value); ok { - inf.modifiers = append(inf.modifiers, typeMod{mod: arrayType, arrayLen: arrayLen}) - } - } - } - - // ArrayTypes can be nested, so keep going if our parent is an - // ArrayType. - if i < len(c.path)-1 { - if _, ok := c.path[i+1].(*ast.ArrayType); ok { - continue Nodes - } - } - - break Nodes - } - case *ast.MapType: - inf.wantTypeName = true - if n.Key != nil { - inf.wantComparable = source.NodeContains(n.Key, c.pos) - } else { - // If the key is empty, assume we are completing the key if - // pos is directly after the "map[". - inf.wantComparable = c.pos == n.Pos()+token.Pos(len("map[")) - } - break Nodes - case *ast.ValueSpec: - inf.wantTypeName = source.NodeContains(n.Type, c.pos) - break Nodes - case *ast.TypeSpec: - inf.wantTypeName = source.NodeContains(n.Type, c.pos) - default: - if breaksExpectedTypeInference(p, c.pos) { - return typeNameInference{} - } - } - } - - return inf -} - -func (c *completer) fakeObj(T types.Type) *types.Var { - return types.NewVar(token.NoPos, c.pkg.GetTypes(), "", T) -} - -// derivableTypes iterates types you can derive from t. For example, -// from "foo" we might derive "&foo", and "foo()". -func derivableTypes(t types.Type, addressable bool, f func(t types.Type, addressable bool, mod typeModKind) bool) bool { - switch t := t.Underlying().(type) { - case *types.Signature: - // If t is a func type with a single result, offer the result type. - if t.Results().Len() == 1 && f(t.Results().At(0).Type(), false, invoke) { - return true - } - case *types.Array: - if f(t.Elem(), true, index) { - return true - } - // Try converting array to slice. - if f(types.NewSlice(t.Elem()), false, takeSlice) { - return true - } - case *types.Pointer: - if f(t.Elem(), false, dereference) { - return true - } - case *types.Slice: - if f(t.Elem(), true, index) { - return true - } - case *types.Map: - if f(t.Elem(), false, index) { - return true - } - case *types.Chan: - if f(t.Elem(), false, chanRead) { - return true - } - } - - // Check if c is addressable and a pointer to c matches our type inference. - if addressable && f(types.NewPointer(t), false, reference) { - return true - } - - return false -} - -// anyCandType reports whether f returns true for any candidate type -// derivable from c. It searches up to three levels of type -// modification. For example, given "foo" we could discover "***foo" -// or "*foo()". -func (c *candidate) anyCandType(f func(t types.Type, addressable bool) bool) bool { - if c.obj == nil || c.obj.Type() == nil { - return false - } - - const maxDepth = 3 - - var searchTypes func(t types.Type, addressable bool, mods []typeModKind) bool - searchTypes = func(t types.Type, addressable bool, mods []typeModKind) bool { - if f(t, addressable) { - if len(mods) > 0 { - newMods := make([]typeModKind, len(mods)+len(c.mods)) - copy(newMods, mods) - copy(newMods[len(mods):], c.mods) - c.mods = newMods - } - return true - } - - if len(mods) == maxDepth { - return false - } - - return derivableTypes(t, addressable, func(t types.Type, addressable bool, mod typeModKind) bool { - return searchTypes(t, addressable, append(mods, mod)) - }) - } - - return searchTypes(c.obj.Type(), c.addressable, make([]typeModKind, 0, maxDepth)) -} - -// matchingCandidate reports whether cand matches our type inferences. -// It mutates cand's score in certain cases. -func (c *completer) matchingCandidate(cand *candidate) bool { - if c.completionContext.commentCompletion { - return false - } - - // Bail out early if we are completing a field name in a composite literal. - if v, ok := cand.obj.(*types.Var); ok && v.IsField() && c.wantStructFieldCompletions() { - return true - } - - if isTypeName(cand.obj) { - return c.matchingTypeName(cand) - } else if c.wantTypeName() { - // If we want a type, a non-type object never matches. - return false - } - - if c.inference.candTypeMatches(cand) { - return true - } - - candType := cand.obj.Type() - if candType == nil { - return false - } - - if sig, ok := candType.Underlying().(*types.Signature); ok { - if c.inference.assigneesMatch(cand, sig) { - // Invoke the candidate if its results are multi-assignable. - cand.mods = append(cand.mods, invoke) - return true - } - } - - // Default to invoking *types.Func candidates. This is so function - // completions in an empty statement (or other cases with no expected type) - // are invoked by default. - if isFunc(cand.obj) { - cand.mods = append(cand.mods, invoke) - } - - return false -} - -// candTypeMatches reports whether cand makes a good completion -// candidate given the candidate inference. cand's score may be -// mutated to downrank the candidate in certain situations. -func (ci *candidateInference) candTypeMatches(cand *candidate) bool { - var ( - expTypes = make([]types.Type, 0, 2) - variadicType types.Type - ) - if ci.objType != nil { - expTypes = append(expTypes, ci.objType) - - if ci.variadic { - variadicType = types.NewSlice(ci.objType) - expTypes = append(expTypes, variadicType) - } - } - - return cand.anyCandType(func(candType types.Type, addressable bool) bool { - // Take into account any type modifiers on the expected type. - candType = ci.applyTypeModifiers(candType, addressable) - if candType == nil { - return false - } - - if ci.convertibleTo != nil && convertibleTo(candType, ci.convertibleTo) { - return true - } - - for _, expType := range expTypes { - if isEmptyInterface(expType) { - continue - } - - matches := ci.typeMatches(expType, candType) - if !matches { - // If candType doesn't otherwise match, consider if we can - // convert candType directly to expType. - if considerTypeConversion(candType, expType, cand.path) { - cand.convertTo = expType - // Give a major score penalty so we always prefer directly - // assignable candidates, all else equal. - cand.score *= 0.5 - return true - } - - continue - } - - if expType == variadicType { - cand.mods = append(cand.mods, takeDotDotDot) - } - - // Lower candidate score for untyped conversions. This avoids - // ranking untyped constants above candidates with an exact type - // match. Don't lower score of builtin constants, e.g. "true". - if isUntyped(candType) && !types.Identical(candType, expType) && cand.obj.Parent() != types.Universe { - // Bigger penalty for deep completions into other packages to - // avoid random constants from other packages popping up all - // the time. - if len(cand.path) > 0 && isPkgName(cand.path[0]) { - cand.score *= 0.5 - } else { - cand.score *= 0.75 - } - } - - return true - } - - // If we don't have a specific expected type, fall back to coarser - // object kind checks. - if ci.objType == nil || isEmptyInterface(ci.objType) { - // If we were able to apply type modifiers to our candidate type, - // count that as a match. For example: - // - // var foo chan int - // <-fo<> - // - // We were able to apply the "<-" type modifier to "foo", so "foo" - // matches. - if len(ci.modifiers) > 0 { - return true - } - - // If we didn't have an exact type match, check if our object kind - // matches. - if ci.kindMatches(candType) { - if ci.objKind == kindFunc { - cand.mods = append(cand.mods, invoke) - } - return true - } - } - - return false - }) -} - -// considerTypeConversion returns true if we should offer a completion -// automatically converting "from" to "to". -func considerTypeConversion(from, to types.Type, path []types.Object) bool { - // Don't offer to convert deep completions from other packages. - // Otherwise there are many random package level consts/vars that - // pop up as candidates all the time. - if len(path) > 0 && isPkgName(path[0]) { - return false - } - - if _, ok := from.(*typeparams.TypeParam); ok { - return false - } - - if !convertibleTo(from, to) { - return false - } - - // Don't offer to convert ints to strings since that probably - // doesn't do what the user wants. - if isBasicKind(from, types.IsInteger) && isBasicKind(to, types.IsString) { - return false - } - - return true -} - -// typeMatches reports whether an object of candType makes a good -// completion candidate given the expected type expType. -func (ci *candidateInference) typeMatches(expType, candType types.Type) bool { - // Handle untyped values specially since AssignableTo gives false negatives - // for them (see https://golang.org/issue/32146). - if candBasic, ok := candType.Underlying().(*types.Basic); ok { - if expBasic, ok := expType.Underlying().(*types.Basic); ok { - // Note that the candidate and/or the expected can be untyped. - // In "fo<> == 100" the expected type is untyped, and the - // candidate could also be an untyped constant. - - // Sort by is_untyped and then by is_int to simplify below logic. - a, b := candBasic.Info(), expBasic.Info() - if a&types.IsUntyped == 0 || (b&types.IsInteger > 0 && b&types.IsUntyped > 0) { - a, b = b, a - } - - // If at least one is untyped... - if a&types.IsUntyped > 0 { - switch { - // Untyped integers are compatible with floats. - case a&types.IsInteger > 0 && b&types.IsFloat > 0: - return true - - // Check if their constant kind (bool|int|float|complex|string) matches. - // This doesn't take into account the constant value, so there will be some - // false positives due to integer sign and overflow. - case a&types.IsConstType == b&types.IsConstType: - return true - } - } - } - } - - // AssignableTo covers the case where the types are equal, but also handles - // cases like assigning a concrete type to an interface type. - return assignableTo(candType, expType) -} - -// kindMatches reports whether candType's kind matches our expected -// kind (e.g. slice, map, etc.). -func (ci *candidateInference) kindMatches(candType types.Type) bool { - return ci.objKind > 0 && ci.objKind&candKind(candType) > 0 -} - -// assigneesMatch reports whether an invocation of sig matches the -// number and type of any assignees. -func (ci *candidateInference) assigneesMatch(cand *candidate, sig *types.Signature) bool { - if len(ci.assignees) == 0 { - return false - } - - // Uniresult functions are always usable and are handled by the - // normal, non-assignees type matching logic. - if sig.Results().Len() == 1 { - return false - } - - // Don't prefer completing into func(...interface{}) calls since all - // functions would match. - if ci.variadicAssignees && len(ci.assignees) == 1 && isEmptyInterface(deslice(ci.assignees[0])) { - return false - } - - var numberOfResultsCouldMatch bool - if ci.variadicAssignees { - numberOfResultsCouldMatch = sig.Results().Len() >= len(ci.assignees)-1 - } else { - numberOfResultsCouldMatch = sig.Results().Len() == len(ci.assignees) - } - - // If our signature doesn't return the right number of values, it's - // not a match, so downrank it. For example: - // - // var foo func() (int, int) - // a, b, c := <> // downrank "foo()" since it only returns two values - if !numberOfResultsCouldMatch { - cand.score /= 2 - return false - } - - // If at least one assignee has a valid type, and all valid - // assignees match the corresponding sig result value, the signature - // is a match. - allMatch := false - for i := 0; i < sig.Results().Len(); i++ { - var assignee types.Type - - // If we are completing into variadic parameters, deslice the - // expected variadic type. - if ci.variadicAssignees && i >= len(ci.assignees)-1 { - assignee = ci.assignees[len(ci.assignees)-1] - if elem := deslice(assignee); elem != nil { - assignee = elem - } - } else { - assignee = ci.assignees[i] - } - - if assignee == nil || assignee == types.Typ[types.Invalid] { - continue - } - - allMatch = ci.typeMatches(assignee, sig.Results().At(i).Type()) - if !allMatch { - break - } - } - return allMatch -} - -func (c *completer) matchingTypeName(cand *candidate) bool { - if !c.wantTypeName() { - return false - } - - typeMatches := func(candType types.Type) bool { - // Take into account any type name modifier prefixes. - candType = c.inference.applyTypeNameModifiers(candType) - - if from := c.inference.typeName.assertableFrom; from != nil { - // Don't suggest the starting type in type assertions. For example, - // if "foo" is an io.Writer, don't suggest "foo.(io.Writer)". - if types.Identical(from, candType) { - return false - } - - if intf, ok := from.Underlying().(*types.Interface); ok { - if !types.AssertableTo(intf, candType) { - return false - } - } - } - - if c.inference.typeName.wantComparable && !types.Comparable(candType) { - return false - } - - // Skip this type if it has already been used in another type - // switch case. - for _, seen := range c.inference.typeName.seenTypeSwitchCases { - if types.Identical(candType, seen) { - return false - } - } - - // We can expect a type name and have an expected type in cases like: - // - // var foo []int - // foo = []i<> - // - // Where our expected type is "[]int", and we expect a type name. - if c.inference.objType != nil { - return assignableTo(candType, c.inference.objType) - } - - // Default to saying any type name is a match. - return true - } - - t := cand.obj.Type() - - if typeMatches(t) { - return true - } - - if !types.IsInterface(t) && typeMatches(types.NewPointer(t)) { - if c.inference.typeName.compLitType { - // If we are completing a composite literal type as in - // "foo<>{}", to make a pointer we must prepend "&". - cand.mods = append(cand.mods, reference) - } else { - // If we are completing a normal type name such as "foo<>", to - // make a pointer we must prepend "*". - cand.mods = append(cand.mods, dereference) - } - return true - } - - return false -} - -var ( - // "interface { Error() string }" (i.e. error) - errorIntf = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - - // "interface { String() string }" (i.e. fmt.Stringer) - stringerIntf = types.NewInterfaceType([]*types.Func{ - types.NewFunc(token.NoPos, nil, "String", types.NewSignature( - nil, - nil, - types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])), - false, - )), - }, nil).Complete() - - byteType = types.Universe.Lookup("byte").Type() -) - -// candKind returns the objKind of candType, if any. -func candKind(candType types.Type) objKind { - var kind objKind - - switch t := candType.Underlying().(type) { - case *types.Array: - kind |= kindArray - if t.Elem() == byteType { - kind |= kindBytes - } - case *types.Slice: - kind |= kindSlice - if t.Elem() == byteType { - kind |= kindBytes - } - case *types.Chan: - kind |= kindChan - case *types.Map: - kind |= kindMap - case *types.Pointer: - kind |= kindPtr - - // Some builtins handle array pointers as arrays, so just report a pointer - // to an array as an array. - if _, isArray := t.Elem().Underlying().(*types.Array); isArray { - kind |= kindArray - } - case *types.Basic: - switch info := t.Info(); { - case info&types.IsString > 0: - kind |= kindString - case info&types.IsInteger > 0: - kind |= kindInt - case info&types.IsFloat > 0: - kind |= kindFloat - case info&types.IsComplex > 0: - kind |= kindComplex - case info&types.IsBoolean > 0: - kind |= kindBool - } - case *types.Signature: - return kindFunc - } - - if types.Implements(candType, errorIntf) { - kind |= kindError - } - - if types.Implements(candType, stringerIntf) { - kind |= kindStringer - } - - return kind -} - -// innermostScope returns the innermost scope for c.pos. -func (c *completer) innermostScope() *types.Scope { - for _, s := range c.scopes { - if s != nil { - return s - } - } - return nil -} - -// isSlice reports whether the object's underlying type is a slice. -func isSlice(obj types.Object) bool { - if obj != nil && obj.Type() != nil { - if _, ok := obj.Type().Underlying().(*types.Slice); ok { - return true - } - } - return false -} - -// forEachPackageMember calls f(tok, id, fn) for each package-level -// TYPE/VAR/CONST/FUNC declaration in the Go source file, based on a -// quick partial parse. fn is non-nil only for function declarations. -// The AST position information is garbage. -func forEachPackageMember(content []byte, f func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl)) { - purged := goplsastutil.PurgeFuncBodies(content) - file, _ := parser.ParseFile(token.NewFileSet(), "", purged, 0) - for _, decl := range file.Decls { - switch decl := decl.(type) { - case *ast.GenDecl: - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.ValueSpec: // var/const - for _, id := range spec.Names { - f(decl.Tok, id, nil) - } - case *ast.TypeSpec: - f(decl.Tok, spec.Name, nil) - } - } - case *ast.FuncDecl: - if decl.Recv == nil { - f(token.FUNC, decl.Name, decl) - } - } - } -} diff --git a/gopls/internal/lsp/source/completion/definition.go b/gopls/internal/lsp/source/completion/definition.go deleted file mode 100644 index d7f51f0029c..00000000000 --- a/gopls/internal/lsp/source/completion/definition.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - "go/types" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/snippet" - "golang.org/x/tools/gopls/internal/lsp/source" -) - -// some function definitions in test files can be completed -// So far, TestFoo(t *testing.T), TestMain(m *testing.M) -// BenchmarkFoo(b *testing.B), FuzzFoo(f *testing.F) - -// path[0] is known to be *ast.Ident -func definition(path []ast.Node, obj types.Object, pgf *source.ParsedGoFile) ([]CompletionItem, *Selection) { - if _, ok := obj.(*types.Func); !ok { - return nil, nil // not a function at all - } - if !strings.HasSuffix(pgf.URI.Filename(), "_test.go") { - return nil, nil // not a test file - } - - name := path[0].(*ast.Ident).Name - if len(name) == 0 { - // can't happen - return nil, nil - } - start := path[0].Pos() - end := path[0].End() - sel := &Selection{ - content: "", - cursor: start, - tokFile: pgf.Tok, - start: start, - end: end, - mapper: pgf.Mapper, - } - var ans []CompletionItem - var hasParens bool - n, ok := path[1].(*ast.FuncDecl) - if !ok { - return nil, nil // can't happen - } - if n.Recv != nil { - return nil, nil // a method, not a function - } - t := n.Type.Params - if t.Closing != t.Opening { - hasParens = true - } - - // Always suggest TestMain, if possible - if strings.HasPrefix("TestMain", name) { - if hasParens { - ans = append(ans, defItem("TestMain", obj)) - } else { - ans = append(ans, defItem("TestMain(m *testing.M)", obj)) - } - } - - // If a snippet is possible, suggest it - if strings.HasPrefix("Test", name) { - if hasParens { - ans = append(ans, defItem("Test", obj)) - } else { - ans = append(ans, defSnippet("Test", "(t *testing.T)", obj)) - } - return ans, sel - } else if strings.HasPrefix("Benchmark", name) { - if hasParens { - ans = append(ans, defItem("Benchmark", obj)) - } else { - ans = append(ans, defSnippet("Benchmark", "(b *testing.B)", obj)) - } - return ans, sel - } else if strings.HasPrefix("Fuzz", name) { - if hasParens { - ans = append(ans, defItem("Fuzz", obj)) - } else { - ans = append(ans, defSnippet("Fuzz", "(f *testing.F)", obj)) - } - return ans, sel - } - - // Fill in the argument for what the user has already typed - if got := defMatches(name, "Test", path, "(t *testing.T)"); got != "" { - ans = append(ans, defItem(got, obj)) - } else if got := defMatches(name, "Benchmark", path, "(b *testing.B)"); got != "" { - ans = append(ans, defItem(got, obj)) - } else if got := defMatches(name, "Fuzz", path, "(f *testing.F)"); got != "" { - ans = append(ans, defItem(got, obj)) - } - return ans, sel -} - -// defMatches returns text for defItem, never for defSnippet -func defMatches(name, pat string, path []ast.Node, arg string) string { - if !strings.HasPrefix(name, pat) { - return "" - } - c, _ := utf8.DecodeRuneInString(name[len(pat):]) - if unicode.IsLower(c) { - return "" - } - fd, ok := path[1].(*ast.FuncDecl) - if !ok { - // we don't know what's going on - return "" - } - fp := fd.Type.Params - if len(fp.List) > 0 { - // signature already there, nothing to suggest - return "" - } - if fp.Opening != fp.Closing { - // nothing: completion works on words, not easy to insert arg - return "" - } - // suggesting signature too - return name + arg -} - -func defSnippet(prefix, suffix string, obj types.Object) CompletionItem { - var sn snippet.Builder - sn.WriteText(prefix) - sn.WritePlaceholder(func(b *snippet.Builder) { b.WriteText("Xxx") }) - sn.WriteText(suffix + " {\n\t") - sn.WriteFinalTabstop() - sn.WriteText("\n}") - return CompletionItem{ - Label: prefix + "Xxx" + suffix, - Detail: "tab, type the rest of the name, then tab", - Kind: protocol.FunctionCompletion, - Depth: 0, - Score: 10, - snippet: &sn, - Documentation: prefix + " test function", - isSlice: isSlice(obj), - } -} -func defItem(val string, obj types.Object) CompletionItem { - return CompletionItem{ - Label: val, - InsertText: val, - Kind: protocol.FunctionCompletion, - Depth: 0, - Score: 9, // prefer the snippets when available - Documentation: "complete the function name", - isSlice: isSlice(obj), - } -} diff --git a/gopls/internal/lsp/source/completion/format.go b/gopls/internal/lsp/source/completion/format.go deleted file mode 100644 index 89c5cb4ae97..00000000000 --- a/gopls/internal/lsp/source/completion/format.go +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/doc" - "go/types" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/snippet" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/imports" - "golang.org/x/tools/internal/typeparams" -) - -var ( - errNoMatch = errors.New("not a surrounding match") - errLowScore = errors.New("not a high scoring candidate") -) - -// item formats a candidate to a CompletionItem. -func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, error) { - obj := cand.obj - - // if the object isn't a valid match against the surrounding, return early. - matchScore := c.matcher.Score(cand.name) - if matchScore <= 0 { - return CompletionItem{}, errNoMatch - } - cand.score *= float64(matchScore) - - // Ignore deep candidates that won't be in the MaxDeepCompletions anyway. - if len(cand.path) != 0 && !c.deepState.isHighScore(cand.score) { - return CompletionItem{}, errLowScore - } - - // Handle builtin types separately. - if obj.Parent() == types.Universe { - return c.formatBuiltin(ctx, cand) - } - - var ( - label = cand.name - detail = types.TypeString(obj.Type(), c.qf) - insert = label - kind = protocol.TextCompletion - snip snippet.Builder - protocolEdits []protocol.TextEdit - ) - if obj.Type() == nil { - detail = "" - } - if isTypeName(obj) && c.wantTypeParams() { - x := cand.obj.(*types.TypeName) - if named, ok := x.Type().(*types.Named); ok { - tp := typeparams.ForNamed(named) - label += source.FormatTypeParams(tp) - insert = label // maintain invariant above (label == insert) - } - } - - snip.WriteText(insert) - - switch obj := obj.(type) { - case *types.TypeName: - detail, kind = source.FormatType(obj.Type(), c.qf) - case *types.Const: - kind = protocol.ConstantCompletion - case *types.Var: - if _, ok := obj.Type().(*types.Struct); ok { - detail = "struct{...}" // for anonymous structs - } else if obj.IsField() { - var err error - detail, err = source.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qf, c.mq) - if err != nil { - return CompletionItem{}, err - } - } - if obj.IsField() { - kind = protocol.FieldCompletion - c.structFieldSnippet(cand, detail, &snip) - } else { - kind = protocol.VariableCompletion - } - if obj.Type() == nil { - break - } - case *types.Func: - sig, ok := obj.Type().Underlying().(*types.Signature) - if !ok { - break - } - kind = protocol.FunctionCompletion - if sig != nil && sig.Recv() != nil { - kind = protocol.MethodCompletion - } - case *types.PkgName: - kind = protocol.ModuleCompletion - detail = fmt.Sprintf("%q", obj.Imported().Path()) - case *types.Label: - kind = protocol.ConstantCompletion - detail = "label" - } - - var prefix string - for _, mod := range cand.mods { - switch mod { - case reference: - prefix = "&" + prefix - case dereference: - prefix = "*" + prefix - case chanRead: - prefix = "<-" + prefix - } - } - - var ( - suffix string - funcType = obj.Type() - ) -Suffixes: - for _, mod := range cand.mods { - switch mod { - case invoke: - if sig, ok := funcType.Underlying().(*types.Signature); ok { - s, err := source.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qf, c.mq) - if err != nil { - return CompletionItem{}, err - } - c.functionCallSnippet("", s.TypeParams(), s.Params(), &snip) - if sig.Results().Len() == 1 { - funcType = sig.Results().At(0).Type() - } - detail = "func" + s.Format() - } - - if !c.opts.snippets { - // Without snippets the candidate will not include "()". Don't - // add further suffixes since they will be invalid. For - // example, with snippets "foo()..." would become "foo..." - // without snippets if we added the dotDotDot. - break Suffixes - } - case takeSlice: - suffix += "[:]" - case takeDotDotDot: - suffix += "..." - case index: - snip.WriteText("[") - snip.WritePlaceholder(nil) - snip.WriteText("]") - } - } - - // If this candidate needs an additional import statement, - // add the additional text edits needed. - if cand.imp != nil { - addlEdits, err := c.importEdits(cand.imp) - - if err != nil { - return CompletionItem{}, err - } - - protocolEdits = append(protocolEdits, addlEdits...) - if kind != protocol.ModuleCompletion { - if detail != "" { - detail += " " - } - detail += fmt.Sprintf("(from %q)", cand.imp.importPath) - } - } - - if cand.convertTo != nil { - typeName := types.TypeString(cand.convertTo, c.qf) - - switch t := cand.convertTo.(type) { - // We need extra parens when casting to these types. For example, - // we need "(*int)(foo)", not "*int(foo)". - case *types.Pointer, *types.Signature: - typeName = "(" + typeName + ")" - case *types.Basic: - // If the types are incompatible (as determined by typeMatches), then we - // must need a conversion here. However, if the target type is untyped, - // don't suggest converting to e.g. "untyped float" (golang/go#62141). - if t.Info()&types.IsUntyped != 0 { - typeName = types.TypeString(types.Default(cand.convertTo), c.qf) - } - } - - prefix = typeName + "(" + prefix - suffix = ")" - } - - if prefix != "" { - // If we are in a selector, add an edit to place prefix before selector. - if sel := enclosingSelector(c.path, c.pos); sel != nil { - edits, err := c.editText(sel.Pos(), sel.Pos(), prefix) - if err != nil { - return CompletionItem{}, err - } - protocolEdits = append(protocolEdits, edits...) - } else { - // If there is no selector, just stick the prefix at the start. - insert = prefix + insert - snip.PrependText(prefix) - } - } - - if suffix != "" { - insert += suffix - snip.WriteText(suffix) - } - - detail = strings.TrimPrefix(detail, "untyped ") - // override computed detail with provided detail, if something is provided. - if cand.detail != "" { - detail = cand.detail - } - item := CompletionItem{ - Label: label, - InsertText: insert, - AdditionalTextEdits: protocolEdits, - Detail: detail, - Kind: kind, - Score: cand.score, - Depth: len(cand.path), - snippet: &snip, - isSlice: isSlice(obj), - } - // If the user doesn't want documentation for completion items. - if !c.opts.documentation { - return item, nil - } - pos := safetoken.StartPosition(c.pkg.FileSet(), obj.Pos()) - - // We ignore errors here, because some types, like "unsafe" or "error", - // may not have valid positions that we can use to get documentation. - if !pos.IsValid() { - return item, nil - } - - comment, err := source.HoverDocForObject(ctx, c.snapshot, c.pkg.FileSet(), obj) - if err != nil { - event.Error(ctx, fmt.Sprintf("failed to find Hover for %q", obj.Name()), err) - return item, nil - } - if c.opts.fullDocumentation { - item.Documentation = comment.Text() - } else { - item.Documentation = doc.Synopsis(comment.Text()) - } - // The desired pattern is `^// Deprecated`, but the prefix has been removed - // TODO(rfindley): It doesn't look like this does the right thing for - // multi-line comments. - if strings.HasPrefix(comment.Text(), "Deprecated") { - if c.snapshot.Options().CompletionTags { - item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated} - } else if c.snapshot.Options().CompletionDeprecated { - item.Deprecated = true - } - } - - return item, nil -} - -// importEdits produces the text edits necessary to add the given import to the current file. -func (c *completer) importEdits(imp *importInfo) ([]protocol.TextEdit, error) { - if imp == nil { - return nil, nil - } - - pgf, err := c.pkg.File(span.URIFromPath(c.filename)) - if err != nil { - return nil, err - } - - return source.ComputeOneImportFixEdits(c.snapshot, pgf, &imports.ImportFix{ - StmtInfo: imports.ImportInfo{ - ImportPath: imp.importPath, - Name: imp.name, - }, - // IdentName is unused on this path and is difficult to get. - FixType: imports.AddImport, - }) -} - -func (c *completer) formatBuiltin(ctx context.Context, cand candidate) (CompletionItem, error) { - obj := cand.obj - item := CompletionItem{ - Label: obj.Name(), - InsertText: obj.Name(), - Score: cand.score, - } - switch obj.(type) { - case *types.Const: - item.Kind = protocol.ConstantCompletion - case *types.Builtin: - item.Kind = protocol.FunctionCompletion - sig, err := source.NewBuiltinSignature(ctx, c.snapshot, obj.Name()) - if err != nil { - return CompletionItem{}, err - } - item.Detail = "func" + sig.Format() - item.snippet = &snippet.Builder{} - c.functionCallSnippet(obj.Name(), sig.TypeParams(), sig.Params(), item.snippet) - case *types.TypeName: - if types.IsInterface(obj.Type()) { - item.Kind = protocol.InterfaceCompletion - } else { - item.Kind = protocol.ClassCompletion - } - case *types.Nil: - item.Kind = protocol.VariableCompletion - } - return item, nil -} - -// decide if the type params (if any) should be part of the completion -// which only possible for types.Named and types.Signature -// (so far, only in receivers, e.g.; func (s *GENERIC[K, V])..., which is a types.Named) -func (c *completer) wantTypeParams() bool { - // Need to be lexically in a receiver, and a child of an IndexListExpr - // (but IndexListExpr only exists with go1.18) - start := c.path[0].Pos() - for i, nd := range c.path { - if fd, ok := nd.(*ast.FuncDecl); ok { - if i > 0 && fd.Recv != nil && start < fd.Recv.End() { - return true - } else { - return false - } - } - } - return false -} diff --git a/gopls/internal/lsp/source/completion/package.go b/gopls/internal/lsp/source/completion/package.go deleted file mode 100644 index 00bf0518c64..00000000000 --- a/gopls/internal/lsp/source/completion/package.go +++ /dev/null @@ -1,351 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "bytes" - "context" - "errors" - "fmt" - "go/ast" - "go/parser" - "go/scanner" - "go/token" - "go/types" - "path/filepath" - "strings" - "unicode" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/fuzzy" -) - -// packageClauseCompletions offers completions for a package declaration when -// one is not present in the given file. -func packageClauseCompletions(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) ([]CompletionItem, *Selection, error) { - // We know that the AST for this file will be empty due to the missing - // package declaration, but parse it anyway to get a mapper. - // TODO(adonovan): opt: there's no need to parse just to get a mapper. - pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull) - if err != nil { - return nil, nil, err - } - - offset, err := pgf.Mapper.PositionOffset(position) - if err != nil { - return nil, nil, err - } - surrounding, err := packageCompletionSurrounding(pgf, offset) - if err != nil { - return nil, nil, fmt.Errorf("invalid position for package completion: %w", err) - } - - packageSuggestions, err := packageSuggestions(ctx, snapshot, fh.URI(), "") - if err != nil { - return nil, nil, err - } - - var items []CompletionItem - for _, pkg := range packageSuggestions { - insertText := fmt.Sprintf("package %s", pkg.name) - items = append(items, CompletionItem{ - Label: insertText, - Kind: protocol.ModuleCompletion, - InsertText: insertText, - Score: pkg.score, - }) - } - - return items, surrounding, nil -} - -// packageCompletionSurrounding returns surrounding for package completion if a -// package completions can be suggested at a given cursor offset. A valid location -// for package completion is above any declarations or import statements. -func packageCompletionSurrounding(pgf *source.ParsedGoFile, offset int) (*Selection, error) { - m := pgf.Mapper - // If the file lacks a package declaration, the parser will return an empty - // AST. As a work-around, try to parse an expression from the file contents. - fset := token.NewFileSet() - expr, _ := parser.ParseExprFrom(fset, m.URI.Filename(), pgf.Src, parser.Mode(0)) - if expr == nil { - return nil, fmt.Errorf("unparseable file (%s)", m.URI) - } - tok := fset.File(expr.Pos()) - cursor := tok.Pos(offset) - - // If we were able to parse out an identifier as the first expression from - // the file, it may be the beginning of a package declaration ("pack "). - // We can offer package completions if the cursor is in the identifier. - if name, ok := expr.(*ast.Ident); ok { - if cursor >= name.Pos() && cursor <= name.End() { - if !strings.HasPrefix(PACKAGE, name.Name) { - return nil, fmt.Errorf("cursor in non-matching ident") - } - return &Selection{ - content: name.Name, - cursor: cursor, - tokFile: tok, - start: name.Pos(), - end: name.End(), - mapper: m, - }, nil - } - } - - // The file is invalid, but it contains an expression that we were able to - // parse. We will use this expression to construct the cursor's - // "surrounding". - - // First, consider the possibility that we have a valid "package" keyword - // with an empty package name ("package "). "package" is parsed as an - // *ast.BadDecl since it is a keyword. This logic would allow "package" to - // appear on any line of the file as long as it's the first code expression - // in the file. - lines := strings.Split(string(pgf.Src), "\n") - cursorLine := safetoken.Line(tok, cursor) - if cursorLine <= 0 || cursorLine > len(lines) { - return nil, fmt.Errorf("invalid line number") - } - if safetoken.StartPosition(fset, expr.Pos()).Line == cursorLine { - words := strings.Fields(lines[cursorLine-1]) - if len(words) > 0 && words[0] == PACKAGE { - content := PACKAGE - // Account for spaces if there are any. - if len(words) > 1 { - content += " " - } - - start := expr.Pos() - end := token.Pos(int(expr.Pos()) + len(content) + 1) - // We have verified that we have a valid 'package' keyword as our - // first expression. Ensure that cursor is in this keyword or - // otherwise fallback to the general case. - if cursor >= start && cursor <= end { - return &Selection{ - content: content, - cursor: cursor, - tokFile: tok, - start: start, - end: end, - mapper: m, - }, nil - } - } - } - - // If the cursor is after the start of the expression, no package - // declaration will be valid. - if cursor > expr.Pos() { - return nil, fmt.Errorf("cursor after expression") - } - - // If the cursor is in a comment, don't offer any completions. - if cursorInComment(tok, cursor, m.Content) { - return nil, fmt.Errorf("cursor in comment") - } - - // The surrounding range in this case is the cursor. - return &Selection{ - content: "", - tokFile: tok, - start: cursor, - end: cursor, - cursor: cursor, - mapper: m, - }, nil -} - -func cursorInComment(file *token.File, cursor token.Pos, src []byte) bool { - var s scanner.Scanner - s.Init(file, src, func(_ token.Position, _ string) {}, scanner.ScanComments) - for { - pos, tok, lit := s.Scan() - if pos <= cursor && cursor <= token.Pos(int(pos)+len(lit)) { - return tok == token.COMMENT - } - if tok == token.EOF { - break - } - } - return false -} - -// packageNameCompletions returns name completions for a package clause using -// the current name as prefix. -func (c *completer) packageNameCompletions(ctx context.Context, fileURI span.URI, name *ast.Ident) error { - cursor := int(c.pos - name.NamePos) - if cursor < 0 || cursor > len(name.Name) { - return errors.New("cursor is not in package name identifier") - } - - c.completionContext.packageCompletion = true - - prefix := name.Name[:cursor] - packageSuggestions, err := packageSuggestions(ctx, c.snapshot, fileURI, prefix) - if err != nil { - return err - } - - for _, pkg := range packageSuggestions { - c.deepState.enqueue(pkg) - } - return nil -} - -// packageSuggestions returns a list of packages from workspace packages that -// have the given prefix and are used in the same directory as the given -// file. This also includes test packages for these packages (_test) and -// the directory name itself. -func packageSuggestions(ctx context.Context, snapshot source.Snapshot, fileURI span.URI, prefix string) (packages []candidate, err error) { - active, err := snapshot.WorkspaceMetadata(ctx) - if err != nil { - return nil, err - } - - toCandidate := func(name string, score float64) candidate { - obj := types.NewPkgName(0, nil, name, types.NewPackage("", name)) - return candidate{obj: obj, name: name, detail: name, score: score} - } - - matcher := fuzzy.NewMatcher(prefix) - - // Always try to suggest a main package - defer func() { - if score := float64(matcher.Score("main")); score > 0 { - packages = append(packages, toCandidate("main", score*lowScore)) - } - }() - - dirPath := filepath.Dir(fileURI.Filename()) - dirName := filepath.Base(dirPath) - if !isValidDirName(dirName) { - return packages, nil - } - pkgName := convertDirNameToPkgName(dirName) - - seenPkgs := make(map[source.PackageName]struct{}) - - // The `go` command by default only allows one package per directory but we - // support multiple package suggestions since gopls is build system agnostic. - for _, m := range active { - if m.Name == "main" || m.Name == "" { - continue - } - if _, ok := seenPkgs[m.Name]; ok { - continue - } - - // Only add packages that are previously used in the current directory. - var relevantPkg bool - for _, uri := range m.CompiledGoFiles { - if filepath.Dir(uri.Filename()) == dirPath { - relevantPkg = true - break - } - } - if !relevantPkg { - continue - } - - // Add a found package used in current directory as a high relevance - // suggestion and the test package for it as a medium relevance - // suggestion. - if score := float64(matcher.Score(string(m.Name))); score > 0 { - packages = append(packages, toCandidate(string(m.Name), score*highScore)) - } - seenPkgs[m.Name] = struct{}{} - - testPkgName := m.Name + "_test" - if _, ok := seenPkgs[testPkgName]; ok || strings.HasSuffix(string(m.Name), "_test") { - continue - } - if score := float64(matcher.Score(string(testPkgName))); score > 0 { - packages = append(packages, toCandidate(string(testPkgName), score*stdScore)) - } - seenPkgs[testPkgName] = struct{}{} - } - - // Add current directory name as a low relevance suggestion. - if _, ok := seenPkgs[pkgName]; !ok { - if score := float64(matcher.Score(string(pkgName))); score > 0 { - packages = append(packages, toCandidate(string(pkgName), score*lowScore)) - } - - testPkgName := pkgName + "_test" - if score := float64(matcher.Score(string(testPkgName))); score > 0 { - packages = append(packages, toCandidate(string(testPkgName), score*lowScore)) - } - } - - return packages, nil -} - -// isValidDirName checks whether the passed directory name can be used in -// a package path. Requirements for a package path can be found here: -// https://golang.org/ref/mod#go-mod-file-ident. -func isValidDirName(dirName string) bool { - if dirName == "" { - return false - } - - for i, ch := range dirName { - if isLetter(ch) || isDigit(ch) { - continue - } - if i == 0 { - // Directory name can start only with '_'. '.' is not allowed in module paths. - // '-' and '~' are not allowed because elements of package paths must be - // safe command-line arguments. - if ch == '_' { - continue - } - } else { - // Modules path elements can't end with '.' - if isAllowedPunctuation(ch) && (i != len(dirName)-1 || ch != '.') { - continue - } - } - - return false - } - return true -} - -// convertDirNameToPkgName converts a valid directory name to a valid package name. -// It leaves only letters and digits. All letters are mapped to lower case. -func convertDirNameToPkgName(dirName string) source.PackageName { - var buf bytes.Buffer - for _, ch := range dirName { - switch { - case isLetter(ch): - buf.WriteRune(unicode.ToLower(ch)) - - case buf.Len() != 0 && isDigit(ch): - buf.WriteRune(ch) - } - } - return source.PackageName(buf.String()) -} - -// isLetter and isDigit allow only ASCII characters because -// "Each path element is a non-empty string made of up ASCII letters, -// ASCII digits, and limited ASCII punctuation" -// (see https://golang.org/ref/mod#go-mod-file-ident). - -func isLetter(ch rune) bool { - return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' -} - -func isDigit(ch rune) bool { - return '0' <= ch && ch <= '9' -} - -func isAllowedPunctuation(ch rune) bool { - return ch == '_' || ch == '-' || ch == '~' || ch == '.' -} diff --git a/gopls/internal/lsp/source/completion/postfix_snippets.go b/gopls/internal/lsp/source/completion/postfix_snippets.go deleted file mode 100644 index a10004993b2..00000000000 --- a/gopls/internal/lsp/source/completion/postfix_snippets.go +++ /dev/null @@ -1,481 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - "log" - "reflect" - "strings" - "sync" - "text/template" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/snippet" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/imports" -) - -// Postfix snippets are artificial methods that allow the user to -// compose common operations in an "argument oriented" fashion. For -// example, instead of "sort.Slice(someSlice, ...)" a user can expand -// "someSlice.sort!". - -// postfixTmpl represents a postfix snippet completion candidate. -type postfixTmpl struct { - // label is the completion candidate's label presented to the user. - label string - - // details is passed along to the client as the candidate's details. - details string - - // body is the template text. See postfixTmplArgs for details on the - // facilities available to the template. - body string - - tmpl *template.Template -} - -// postfixTmplArgs are the template execution arguments available to -// the postfix snippet templates. -type postfixTmplArgs struct { - // StmtOK is true if it is valid to replace the selector with a - // statement. For example: - // - // func foo() { - // bar.sort! // statement okay - // - // someMethod(bar.sort!) // statement not okay - // } - StmtOK bool - - // X is the textual SelectorExpr.X. For example, when completing - // "foo.bar.print!", "X" is "foo.bar". - X string - - // Obj is the types.Object of SelectorExpr.X, if any. - Obj types.Object - - // Type is the type of "foo.bar" in "foo.bar.print!". - Type types.Type - - scope *types.Scope - snip snippet.Builder - importIfNeeded func(pkgPath string, scope *types.Scope) (name string, edits []protocol.TextEdit, err error) - edits []protocol.TextEdit - qf types.Qualifier - varNames map[string]bool -} - -var postfixTmpls = []postfixTmpl{{ - label: "sort", - details: "sort.Slice()", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -{{.Import "sort"}}.Slice({{.X}}, func({{.VarName nil "i"}}, {{.VarName nil "j"}} int) bool { - {{.Cursor}} -}) -{{- end}}`, -}, { - label: "last", - details: "s[len(s)-1]", - body: `{{if and (eq .Kind "slice") .Obj -}} -{{.X}}[len({{.X}})-1] -{{- end}}`, -}, { - label: "reverse", - details: "reverse slice", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -{{$i := .VarName nil "i"}}{{$j := .VarName nil "j" -}} -for {{$i}}, {{$j}} := 0, len({{.X}})-1; {{$i}} < {{$j}}; {{$i}}, {{$j}} = {{$i}}+1, {{$j}}-1 { - {{.X}}[{{$i}}], {{.X}}[{{$j}}] = {{.X}}[{{$j}}], {{.X}}[{{$i}}] -} -{{end}}`, -}, { - label: "range", - details: "range over slice", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -for {{.VarName nil "i"}}, {{.VarName .ElemType "v"}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "append", - details: "append and re-assign slice", - body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} -{{.X}} = append({{.X}}, {{.Cursor}}) -{{- end}}`, -}, { - label: "append", - details: "append to slice", - body: `{{if and (eq .Kind "slice") (not .StmtOK) -}} -append({{.X}}, {{.Cursor}}) -{{- end}}`, -}, { - label: "copy", - details: "duplicate slice", - body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} -{{$v := (.VarName nil (printf "%sCopy" .X))}}{{$v}} := make([]{{.TypeName .ElemType}}, len({{.X}})) -copy({{$v}}, {{.X}}) -{{end}}`, -}, { - label: "range", - details: "range over map", - body: `{{if and (eq .Kind "map") .StmtOK -}} -for {{.VarName .KeyType "k"}}, {{.VarName .ElemType "v"}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "clear", - details: "clear map contents", - body: `{{if and (eq .Kind "map") .StmtOK -}} -{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { - delete({{.X}}, {{$k}}) -} -{{end}}`, -}, { - label: "keys", - details: "create slice of keys", - body: `{{if and (eq .Kind "map") .StmtOK -}} -{{$keysVar := (.VarName nil "keys")}}{{$keysVar}} := make([]{{.TypeName .KeyType}}, 0, len({{.X}})) -{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { - {{$keysVar}} = append({{$keysVar}}, {{$k}}) -} -{{end}}`, -}, { - label: "range", - details: "range over channel", - body: `{{if and (eq .Kind "chan") .StmtOK -}} -for {{.VarName .ElemType "e"}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "var", - details: "assign to variables", - body: `{{if and (eq .Kind "tuple") .StmtOK -}} -{{$a := .}}{{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{$a.VarName $v.Type $v.Name}}{{end}} := {{.X}} -{{- end}}`, -}, { - label: "var", - details: "assign to variable", - body: `{{if and (ne .Kind "tuple") .StmtOK -}} -{{.VarName .Type ""}} := {{.X}} -{{- end}}`, -}, { - label: "print", - details: "print to stdout", - body: `{{if and (ne .Kind "tuple") .StmtOK -}} -{{.Import "fmt"}}.Printf("{{.EscapeQuotes .X}}: %v\n", {{.X}}) -{{- end}}`, -}, { - label: "print", - details: "print to stdout", - body: `{{if and (eq .Kind "tuple") .StmtOK -}} -{{.Import "fmt"}}.Println({{.X}}) -{{- end}}`, -}, { - label: "split", - details: "split string", - body: `{{if (eq (.TypeName .Type) "string") -}} -{{.Import "strings"}}.Split({{.X}}, "{{.Cursor}}") -{{- end}}`, -}, { - label: "join", - details: "join string slice", - body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}} -{{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}") -{{- end}}`, -}, { - label: "ifnotnil", - details: "if expr != nil", - body: `{{if and (or (eq .Kind "pointer") (eq .Kind "chan") (eq .Kind "signature") (eq .Kind "interface") (eq .Kind "map") (eq .Kind "slice")) .StmtOK -}} -if {{.X}} != nil {{"{"}} - {{.Cursor}} -{{"}"}} -{{- end}}`, -}} - -// Cursor indicates where the client's cursor should end up after the -// snippet is done. -func (a *postfixTmplArgs) Cursor() string { - a.snip.WriteFinalTabstop() - return "" -} - -// Import makes sure the package corresponding to path is imported, -// returning the identifier to use to refer to the package. -func (a *postfixTmplArgs) Import(path string) (string, error) { - name, edits, err := a.importIfNeeded(path, a.scope) - if err != nil { - return "", fmt.Errorf("couldn't import %q: %w", path, err) - } - a.edits = append(a.edits, edits...) - - return name, nil -} - -func (a *postfixTmplArgs) EscapeQuotes(v string) string { - return strings.ReplaceAll(v, `"`, `\\"`) -} - -// ElemType returns the Elem() type of xType, if applicable. -func (a *postfixTmplArgs) ElemType() types.Type { - if e, _ := a.Type.(interface{ Elem() types.Type }); e != nil { - return e.Elem() - } - return nil -} - -// Kind returns the underlying kind of type, e.g. "slice", "struct", -// etc. -func (a *postfixTmplArgs) Kind() string { - t := reflect.TypeOf(a.Type.Underlying()) - return strings.ToLower(strings.TrimPrefix(t.String(), "*types.")) -} - -// KeyType returns the type of X's key. KeyType panics if X is not a -// map. -func (a *postfixTmplArgs) KeyType() types.Type { - return a.Type.Underlying().(*types.Map).Key() -} - -// Tuple returns the tuple result vars if X is a call expression. -func (a *postfixTmplArgs) Tuple() []*types.Var { - tuple, _ := a.Type.(*types.Tuple) - if tuple == nil { - return nil - } - - typs := make([]*types.Var, 0, tuple.Len()) - for i := 0; i < tuple.Len(); i++ { - typs = append(typs, tuple.At(i)) - } - return typs -} - -// TypeName returns the textual representation of type t. -func (a *postfixTmplArgs) TypeName(t types.Type) (string, error) { - if t == nil || t == types.Typ[types.Invalid] { - return "", fmt.Errorf("invalid type: %v", t) - } - return types.TypeString(t, a.qf), nil -} - -// VarName returns a suitable variable name for the type t. If t -// implements the error interface, "err" is used. If t is not a named -// type then nonNamedDefault is used. Otherwise a name is made by -// abbreviating the type name. If the resultant name is already in -// scope, an integer is appended to make a unique name. -func (a *postfixTmplArgs) VarName(t types.Type, nonNamedDefault string) string { - if t == nil { - t = types.Typ[types.Invalid] - } - - var name string - // go/types predicates are undefined on types.Typ[types.Invalid]. - if !types.Identical(t, types.Typ[types.Invalid]) && types.Implements(t, errorIntf) { - name = "err" - } else if _, isNamed := source.Deref(t).(*types.Named); !isNamed { - name = nonNamedDefault - } - - if name == "" { - name = types.TypeString(t, func(p *types.Package) string { - return "" - }) - name = abbreviateTypeName(name) - } - - if dot := strings.LastIndex(name, "."); dot > -1 { - name = name[dot+1:] - } - - uniqueName := name - for i := 2; ; i++ { - if s, _ := a.scope.LookupParent(uniqueName, token.NoPos); s == nil && !a.varNames[uniqueName] { - break - } - uniqueName = fmt.Sprintf("%s%d", name, i) - } - - a.varNames[uniqueName] = true - - return uniqueName -} - -func (c *completer) addPostfixSnippetCandidates(ctx context.Context, sel *ast.SelectorExpr) { - if !c.opts.postfix { - return - } - - initPostfixRules() - - if sel == nil || sel.Sel == nil { - return - } - - selType := c.pkg.GetTypesInfo().TypeOf(sel.X) - if selType == nil { - return - } - - // Skip empty tuples since there is no value to operate on. - if tuple, ok := selType.Underlying().(*types.Tuple); ok && tuple == nil { - return - } - - tokFile := c.pkg.FileSet().File(c.pos) - - // Only replace sel with a statement if sel is already a statement. - var stmtOK bool - for i, n := range c.path { - if n == sel && i < len(c.path)-1 { - switch p := c.path[i+1].(type) { - case *ast.ExprStmt: - stmtOK = true - case *ast.AssignStmt: - // In cases like: - // - // foo.<> - // bar = 123 - // - // detect that "foo." makes up the entire statement since the - // apparent selector spans lines. - stmtOK = safetoken.Line(tokFile, c.pos) < safetoken.Line(tokFile, p.TokPos) - } - break - } - } - - scope := c.pkg.GetTypes().Scope().Innermost(c.pos) - if scope == nil { - return - } - - // afterDot is the position after selector dot, e.g. "|" in - // "foo.|print". - afterDot := sel.Sel.Pos() - - // We must detect dangling selectors such as: - // - // foo.<> - // bar - // - // and adjust afterDot so that we don't mistakenly delete the - // newline thinking "bar" is part of our selector. - if startLine := safetoken.Line(tokFile, sel.Pos()); startLine != safetoken.Line(tokFile, afterDot) { - if safetoken.Line(tokFile, c.pos) != startLine { - return - } - afterDot = c.pos - } - - for _, rule := range postfixTmpls { - // When completing foo.print<>, "print" is naturally overwritten, - // but we need to also remove "foo." so the snippet has a clean - // slate. - edits, err := c.editText(sel.Pos(), afterDot, "") - if err != nil { - event.Error(ctx, "error calculating postfix edits", err) - return - } - - tmplArgs := postfixTmplArgs{ - X: source.FormatNode(c.pkg.FileSet(), sel.X), - StmtOK: stmtOK, - Obj: exprObj(c.pkg.GetTypesInfo(), sel.X), - Type: selType, - qf: c.qf, - importIfNeeded: c.importIfNeeded, - scope: scope, - varNames: make(map[string]bool), - } - - // Feed the template straight into the snippet builder. This - // allows templates to build snippets as they are executed. - err = rule.tmpl.Execute(&tmplArgs.snip, &tmplArgs) - if err != nil { - event.Error(ctx, "error executing postfix template", err) - continue - } - - if strings.TrimSpace(tmplArgs.snip.String()) == "" { - continue - } - - score := c.matcher.Score(rule.label) - if score <= 0 { - continue - } - - c.items = append(c.items, CompletionItem{ - Label: rule.label + "!", - Detail: rule.details, - Score: float64(score) * 0.01, - Kind: protocol.SnippetCompletion, - snippet: &tmplArgs.snip, - AdditionalTextEdits: append(edits, tmplArgs.edits...), - }) - } -} - -var postfixRulesOnce sync.Once - -func initPostfixRules() { - postfixRulesOnce.Do(func() { - var idx int - for _, rule := range postfixTmpls { - var err error - rule.tmpl, err = template.New("postfix_snippet").Parse(rule.body) - if err != nil { - log.Panicf("error parsing postfix snippet template: %v", err) - } - postfixTmpls[idx] = rule - idx++ - } - postfixTmpls = postfixTmpls[:idx] - }) -} - -// importIfNeeded returns the package identifier and any necessary -// edits to import package pkgPath. -func (c *completer) importIfNeeded(pkgPath string, scope *types.Scope) (string, []protocol.TextEdit, error) { - defaultName := imports.ImportPathToAssumedName(pkgPath) - - // Check if file already imports pkgPath. - for _, s := range c.file.Imports { - // TODO(adonovan): what if pkgPath has a vendor/ suffix? - // This may be the cause of go.dev/issue/56291. - if source.UnquoteImportPath(s) == source.ImportPath(pkgPath) { - if s.Name == nil { - return defaultName, nil, nil - } - if s.Name.Name != "_" { - return s.Name.Name, nil, nil - } - } - } - - // Give up if the package's name is already in use by another object. - if _, obj := scope.LookupParent(defaultName, token.NoPos); obj != nil { - return "", nil, fmt.Errorf("import name %q of %q already in use", defaultName, pkgPath) - } - - edits, err := c.importEdits(&importInfo{ - importPath: pkgPath, - }) - if err != nil { - return "", nil, err - } - - return defaultName, edits, nil -} diff --git a/gopls/internal/lsp/source/completion/util.go b/gopls/internal/lsp/source/completion/util.go deleted file mode 100644 index 4b6ec09a092..00000000000 --- a/gopls/internal/lsp/source/completion/util.go +++ /dev/null @@ -1,344 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/typeparams" -) - -// exprAtPos returns the index of the expression containing pos. -func exprAtPos(pos token.Pos, args []ast.Expr) int { - for i, expr := range args { - if expr.Pos() <= pos && pos <= expr.End() { - return i - } - } - return len(args) -} - -// eachField invokes fn for each field that can be selected from a -// value of type T. -func eachField(T types.Type, fn func(*types.Var)) { - // TODO(adonovan): this algorithm doesn't exclude ambiguous - // selections that match more than one field/method. - // types.NewSelectionSet should do that for us. - - // for termination on recursive types - var seen typeutil.Map - - var visit func(T types.Type) - visit = func(T types.Type) { - if T, ok := source.Deref(T).Underlying().(*types.Struct); ok { - if seen.At(T) != nil { - return - } - - for i := 0; i < T.NumFields(); i++ { - f := T.Field(i) - fn(f) - if f.Anonymous() { - seen.Set(T, true) - visit(f.Type()) - } - } - } - } - visit(T) -} - -// typeIsValid reports whether typ doesn't contain any Invalid types. -func typeIsValid(typ types.Type) bool { - // Check named types separately, because we don't want - // to call Underlying() on them to avoid problems with recursive types. - if _, ok := typ.(*types.Named); ok { - return true - } - - switch typ := typ.Underlying().(type) { - case *types.Basic: - return typ.Kind() != types.Invalid - case *types.Array: - return typeIsValid(typ.Elem()) - case *types.Slice: - return typeIsValid(typ.Elem()) - case *types.Pointer: - return typeIsValid(typ.Elem()) - case *types.Map: - return typeIsValid(typ.Key()) && typeIsValid(typ.Elem()) - case *types.Chan: - return typeIsValid(typ.Elem()) - case *types.Signature: - return typeIsValid(typ.Params()) && typeIsValid(typ.Results()) - case *types.Tuple: - for i := 0; i < typ.Len(); i++ { - if !typeIsValid(typ.At(i).Type()) { - return false - } - } - return true - case *types.Struct, *types.Interface: - // Don't bother checking structs, interfaces for validity. - return true - default: - return false - } -} - -// resolveInvalid traverses the node of the AST that defines the scope -// containing the declaration of obj, and attempts to find a user-friendly -// name for its invalid type. The resulting Object and its Type are fake. -func resolveInvalid(fset *token.FileSet, obj types.Object, node ast.Node, info *types.Info) types.Object { - var resultExpr ast.Expr - ast.Inspect(node, func(node ast.Node) bool { - switch n := node.(type) { - case *ast.ValueSpec: - for _, name := range n.Names { - if info.Defs[name] == obj { - resultExpr = n.Type - } - } - return false - case *ast.Field: // This case handles parameters and results of a FuncDecl or FuncLit. - for _, name := range n.Names { - if info.Defs[name] == obj { - resultExpr = n.Type - } - } - return false - default: - return true - } - }) - // Construct a fake type for the object and return a fake object with this type. - typename := source.FormatNode(fset, resultExpr) - typ := types.NewNamed(types.NewTypeName(token.NoPos, obj.Pkg(), typename, nil), types.Typ[types.Invalid], nil) - return types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ) -} - -func isPointer(T types.Type) bool { - _, ok := T.(*types.Pointer) - return ok -} - -func isVar(obj types.Object) bool { - _, ok := obj.(*types.Var) - return ok -} - -func isTypeName(obj types.Object) bool { - _, ok := obj.(*types.TypeName) - return ok -} - -func isFunc(obj types.Object) bool { - _, ok := obj.(*types.Func) - return ok -} - -func isEmptyInterface(T types.Type) bool { - intf, _ := T.(*types.Interface) - return intf != nil && intf.NumMethods() == 0 && typeparams.IsMethodSet(intf) -} - -func isUntyped(T types.Type) bool { - if basic, ok := T.(*types.Basic); ok { - return basic.Info()&types.IsUntyped > 0 - } - return false -} - -func isPkgName(obj types.Object) bool { - _, ok := obj.(*types.PkgName) - return ok -} - -func isASTFile(n ast.Node) bool { - _, ok := n.(*ast.File) - return ok -} - -func deslice(T types.Type) types.Type { - if slice, ok := T.Underlying().(*types.Slice); ok { - return slice.Elem() - } - return nil -} - -// isSelector returns the enclosing *ast.SelectorExpr when pos is in the -// selector. -func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr { - if len(path) == 0 { - return nil - } - - if sel, ok := path[0].(*ast.SelectorExpr); ok { - return sel - } - - if _, ok := path[0].(*ast.Ident); ok && len(path) > 1 { - if sel, ok := path[1].(*ast.SelectorExpr); ok && pos >= sel.Sel.Pos() { - return sel - } - } - - return nil -} - -// enclosingDeclLHS returns LHS idents from containing value spec or -// assign statement. -func enclosingDeclLHS(path []ast.Node) []*ast.Ident { - for _, n := range path { - switch n := n.(type) { - case *ast.ValueSpec: - return n.Names - case *ast.AssignStmt: - ids := make([]*ast.Ident, 0, len(n.Lhs)) - for _, e := range n.Lhs { - if id, ok := e.(*ast.Ident); ok { - ids = append(ids, id) - } - } - return ids - } - } - - return nil -} - -// exprObj returns the types.Object associated with the *ast.Ident or -// *ast.SelectorExpr e. -func exprObj(info *types.Info, e ast.Expr) types.Object { - var ident *ast.Ident - switch expr := e.(type) { - case *ast.Ident: - ident = expr - case *ast.SelectorExpr: - ident = expr.Sel - default: - return nil - } - - return info.ObjectOf(ident) -} - -// typeConversion returns the type being converted to if call is a type -// conversion expression. -func typeConversion(call *ast.CallExpr, info *types.Info) types.Type { - // Type conversion (e.g. "float64(foo)"). - if fun, _ := exprObj(info, call.Fun).(*types.TypeName); fun != nil { - return fun.Type() - } - - return nil -} - -// fieldsAccessible returns whether s has at least one field accessible by p. -func fieldsAccessible(s *types.Struct, p *types.Package) bool { - for i := 0; i < s.NumFields(); i++ { - f := s.Field(i) - if f.Exported() || f.Pkg() == p { - return true - } - } - return false -} - -// prevStmt returns the statement that precedes the statement containing pos. -// For example: -// -// foo := 1 -// bar(1 + 2<>) -// -// If "<>" is pos, prevStmt returns "foo := 1" -func prevStmt(pos token.Pos, path []ast.Node) ast.Stmt { - var blockLines []ast.Stmt - for i := 0; i < len(path) && blockLines == nil; i++ { - switch n := path[i].(type) { - case *ast.BlockStmt: - blockLines = n.List - case *ast.CommClause: - blockLines = n.Body - case *ast.CaseClause: - blockLines = n.Body - } - } - - for i := len(blockLines) - 1; i >= 0; i-- { - if blockLines[i].End() < pos { - return blockLines[i] - } - } - - return nil -} - -// formatZeroValue produces Go code representing the zero value of T. It -// returns the empty string if T is invalid. -func formatZeroValue(T types.Type, qf types.Qualifier) string { - switch u := T.Underlying().(type) { - case *types.Basic: - switch { - case u.Info()&types.IsNumeric > 0: - return "0" - case u.Info()&types.IsString > 0: - return `""` - case u.Info()&types.IsBoolean > 0: - return "false" - default: - return "" - } - case *types.Pointer, *types.Interface, *types.Chan, *types.Map, *types.Slice, *types.Signature: - return "nil" - default: - return types.TypeString(T, qf) + "{}" - } -} - -// isBasicKind returns whether t is a basic type of kind k. -func isBasicKind(t types.Type, k types.BasicInfo) bool { - b, _ := t.Underlying().(*types.Basic) - return b != nil && b.Info()&k > 0 -} - -func (c *completer) editText(from, to token.Pos, newText string) ([]protocol.TextEdit, error) { - start, end, err := safetoken.Offsets(c.tokFile, from, to) - if err != nil { - return nil, err // can't happen: from/to came from c - } - return source.ToProtocolEdits(c.mapper, []diff.Edit{{ - Start: start, - End: end, - New: newText, - }}) -} - -// assignableTo is like types.AssignableTo, but returns false if -// either type is invalid. -func assignableTo(x, to types.Type) bool { - if x == types.Typ[types.Invalid] || to == types.Typ[types.Invalid] { - return false - } - - return types.AssignableTo(x, to) -} - -// convertibleTo is like types.ConvertibleTo, but returns false if -// either type is invalid. -func convertibleTo(x, to types.Type) bool { - if x == types.Typ[types.Invalid] || to == types.Typ[types.Invalid] { - return false - } - - return types.ConvertibleTo(x, to) -} diff --git a/gopls/internal/lsp/source/definition.go b/gopls/internal/lsp/source/definition.go deleted file mode 100644 index 60f101899b1..00000000000 --- a/gopls/internal/lsp/source/definition.go +++ /dev/null @@ -1,261 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" -) - -// Definition handles the textDocument/definition request for Go files. -func Definition(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "source.Definition") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - pos, err := pgf.PositionPos(position) - if err != nil { - return nil, err - } - - // Handle the case where the cursor is in an import. - importLocations, err := importDefinition(ctx, snapshot, pkg, pgf, pos) - if err != nil { - return nil, err - } - if len(importLocations) > 0 { - return importLocations, nil - } - - // Handle the case where the cursor is in the package name. - // We use "<= End" to accept a query immediately after the package name. - if pgf.File != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End() { - // If there's no package documentation, just use current file. - declFile := pgf - for _, pgf := range pkg.CompiledGoFiles() { - if pgf.File.Name != nil && pgf.File.Doc != nil { - declFile = pgf - break - } - } - loc, err := declFile.NodeLocation(declFile.File.Name) - if err != nil { - return nil, err - } - return []protocol.Location{loc}, nil - } - - // Handle the case where the cursor is in a linkname directive. - locations, err := LinknameDefinition(ctx, snapshot, pgf.Mapper, position) - if !errors.Is(err, ErrNoLinkname) { - return locations, err - } - - // Handle the case where the cursor is in an embed directive. - locations, err = EmbedDefinition(pgf.Mapper, position) - if !errors.Is(err, ErrNoEmbed) { - return locations, err - } - - // The general case: the cursor is on an identifier. - _, obj, _ := referencedObject(pkg, pgf, pos) - if obj == nil { - return nil, nil - } - - // Handle objects with no position: builtin, unsafe. - if !obj.Pos().IsValid() { - var pgf *ParsedGoFile - if obj.Parent() == types.Universe { - // pseudo-package "builtin" - builtinPGF, err := snapshot.BuiltinFile(ctx) - if err != nil { - return nil, err - } - pgf = builtinPGF - - } else if obj.Pkg() == types.Unsafe { - // package "unsafe" - unsafe := snapshot.Metadata("unsafe") - if unsafe == nil { - return nil, fmt.Errorf("no metadata for package 'unsafe'") - } - uri := unsafe.GoFiles[0] - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - pgf, err = snapshot.ParseGo(ctx, fh, ParseFull&^SkipObjectResolution) - if err != nil { - return nil, err - } - - } else { - return nil, bug.Errorf("internal error: no position for %v", obj.Name()) - } - // Inv: pgf ∈ {builtin,unsafe}.go - - // Use legacy (go/ast) object resolution. - astObj := pgf.File.Scope.Lookup(obj.Name()) - if astObj == nil { - // Every built-in should have documentation syntax. - return nil, bug.Errorf("internal error: no object for %s", obj.Name()) - } - decl, ok := astObj.Decl.(ast.Node) - if !ok { - return nil, bug.Errorf("internal error: no declaration for %s", obj.Name()) - } - loc, err := pgf.PosLocation(decl.Pos(), decl.Pos()+token.Pos(len(obj.Name()))) - if err != nil { - return nil, err - } - return []protocol.Location{loc}, nil - } - - // Finally, map the object position. - loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj)) - if err != nil { - return nil, err - } - return []protocol.Location{loc}, nil -} - -// referencedObject returns the identifier and object referenced at the -// specified position, which must be within the file pgf, for the purposes of -// definition/hover/call hierarchy operations. It returns a nil object if no -// object was found at the given position. -// -// If the returned identifier is a type-switch implicit (i.e. the x in x := -// e.(type)), the third result will be the type of the expression being -// switched on (the type of e in the example). This facilitates workarounds for -// limitations of the go/types API, which does not report an object for the -// identifier x. -// -// For embedded fields, referencedObject returns the type name object rather -// than the var (field) object. -// -// TODO(rfindley): this function exists to preserve the pre-existing behavior -// of source.Identifier. Eliminate this helper in favor of sharing -// functionality with objectsAt, after choosing suitable primitives. -func referencedObject(pkg Package, pgf *ParsedGoFile, pos token.Pos) (*ast.Ident, types.Object, types.Type) { - path := pathEnclosingObjNode(pgf.File, pos) - if len(path) == 0 { - return nil, nil, nil - } - var obj types.Object - info := pkg.GetTypesInfo() - switch n := path[0].(type) { - case *ast.Ident: - obj = info.ObjectOf(n) - // If n is the var's declaring ident in a type switch - // [i.e. the x in x := foo.(type)], it will not have an object. In this - // case, set obj to the first implicit object (if any), and return the type - // of the expression being switched on. - // - // The type switch may have no case clauses and thus no - // implicit objects; this is a type error ("unused x"), - if obj == nil { - if implicits, typ := typeSwitchImplicits(info, path); len(implicits) > 0 { - return n, implicits[0], typ - } - } - - // If the original position was an embedded field, we want to jump - // to the field's type definition, not the field's definition. - if v, ok := obj.(*types.Var); ok && v.Embedded() { - // types.Info.Uses contains the embedded field's *types.TypeName. - if typeName := info.Uses[n]; typeName != nil { - obj = typeName - } - } - return n, obj, nil - } - return nil, nil, nil -} - -// importDefinition returns locations defining a package referenced by the -// import spec containing pos. -// -// If pos is not inside an import spec, it returns nil, nil. -func importDefinition(ctx context.Context, s Snapshot, pkg Package, pgf *ParsedGoFile, pos token.Pos) ([]protocol.Location, error) { - var imp *ast.ImportSpec - for _, spec := range pgf.File.Imports { - // We use "<= End" to accept a query immediately after an ImportSpec. - if spec.Path.Pos() <= pos && pos <= spec.Path.End() { - imp = spec - } - } - if imp == nil { - return nil, nil - } - - importPath := UnquoteImportPath(imp) - impID := pkg.Metadata().DepsByImpPath[importPath] - if impID == "" { - return nil, fmt.Errorf("failed to resolve import %q", importPath) - } - impMetadata := s.Metadata(impID) - if impMetadata == nil { - return nil, fmt.Errorf("missing information for package %q", impID) - } - - var locs []protocol.Location - for _, f := range impMetadata.CompiledGoFiles { - fh, err := s.ReadFile(ctx, f) - if err != nil { - if ctx.Err() != nil { - return nil, ctx.Err() - } - continue - } - pgf, err := s.ParseGo(ctx, fh, ParseHeader) - if err != nil { - if ctx.Err() != nil { - return nil, ctx.Err() - } - continue - } - loc, err := pgf.NodeLocation(pgf.File) - if err != nil { - return nil, err - } - locs = append(locs, loc) - } - - if len(locs) == 0 { - return nil, fmt.Errorf("package %q has no readable files", impID) // incl. unsafe - } - - return locs, nil -} - -// TODO(rfindley): avoid the duplicate column mapping here, by associating a -// column mapper with each file handle. -func mapPosition(ctx context.Context, fset *token.FileSet, s FileSource, start, end token.Pos) (protocol.Location, error) { - file := fset.File(start) - uri := span.URIFromPath(file.Name()) - fh, err := s.ReadFile(ctx, uri) - if err != nil { - return protocol.Location{}, err - } - content, err := fh.Content() - if err != nil { - return protocol.Location{}, err - } - m := protocol.NewMapper(fh.URI(), content) - return m.PosLocation(file, start, end) -} diff --git a/gopls/internal/lsp/source/diagnostics.go b/gopls/internal/lsp/source/diagnostics.go deleted file mode 100644 index ff41c570ddd..00000000000 --- a/gopls/internal/lsp/source/diagnostics.go +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "encoding/json" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/progress" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" -) - -type SuggestedFix struct { - Title string - Edits map[span.URI][]protocol.TextEdit - Command *protocol.Command - ActionKind protocol.CodeActionKind -} - -// Analyze reports go/analysis-framework diagnostics in the specified package. -// -// If the provided tracker is non-nil, it may be used to provide notifications -// of the ongoing analysis pass. -func Analyze(ctx context.Context, snapshot Snapshot, pkgIDs map[PackageID]unit, tracker *progress.Tracker) (map[span.URI][]*Diagnostic, error) { - // Exit early if the context has been canceled. This also protects us - // from a race on Options, see golang/go#36699. - if ctx.Err() != nil { - return nil, ctx.Err() - } - - options := snapshot.Options() - categories := []map[string]*Analyzer{ - options.DefaultAnalyzers, - options.StaticcheckAnalyzers, - options.TypeErrorAnalyzers, - } - - var analyzers []*Analyzer - for _, cat := range categories { - for _, a := range cat { - analyzers = append(analyzers, a) - } - } - - analysisDiagnostics, err := snapshot.Analyze(ctx, pkgIDs, analyzers, tracker) - if err != nil { - return nil, err - } - - // Report diagnostics and errors from root analyzers. - reports := make(map[span.URI][]*Diagnostic) - for _, diag := range analysisDiagnostics { - reports[diag.URI] = append(reports[diag.URI], diag) - } - return reports, nil -} - -// CombineDiagnostics combines and filters list/parse/type diagnostics from -// tdiags with adiags, and appends the two lists to *outT and *outA, -// respectively. -// -// Type-error analyzers produce diagnostics that are redundant -// with type checker diagnostics, but more detailed (e.g. fixes). -// Rather than report two diagnostics for the same problem, -// we combine them by augmenting the type-checker diagnostic -// and discarding the analyzer diagnostic. -// -// If an analysis diagnostic has the same range and message as -// a list/parse/type diagnostic, the suggested fix information -// (et al) of the latter is merged into a copy of the former. -// This handles the case where a type-error analyzer suggests -// a fix to a type error, and avoids duplication. -// -// The use of out-slices, though irregular, allows the caller to -// easily choose whether to keep the results separate or combined. -// -// The arguments are not modified. -func CombineDiagnostics(tdiags []*Diagnostic, adiags []*Diagnostic, outT, outA *[]*Diagnostic) { - - // Build index of (list+parse+)type errors. - type key struct { - Range protocol.Range - message string - } - index := make(map[key]int) // maps (Range,Message) to index in tdiags slice - for i, diag := range tdiags { - index[key{diag.Range, diag.Message}] = i - } - - // Filter out analysis diagnostics that match type errors, - // retaining their suggested fix (etc) fields. - for _, diag := range adiags { - if i, ok := index[key{diag.Range, diag.Message}]; ok { - copy := *tdiags[i] - copy.SuggestedFixes = diag.SuggestedFixes - copy.Tags = diag.Tags - tdiags[i] = © - continue - } - - *outA = append(*outA, diag) - } - - *outT = append(*outT, tdiags...) -} - -// quickFixesJSON is a JSON-serializable list of quick fixes -// to be saved in the protocol.Diagnostic.Data field. -type quickFixesJSON struct { - // TODO(rfindley): pack some sort of identifier here for later - // lookup/validation? - Fixes []protocol.CodeAction -} - -// BundleQuickFixes attempts to bundle sd.SuggestedFixes into the -// sd.BundledFixes field, so that it can be round-tripped through the client. -// It returns false if the quick-fixes cannot be bundled. -func BundleQuickFixes(sd *Diagnostic) bool { - if len(sd.SuggestedFixes) == 0 { - return true - } - var actions []protocol.CodeAction - for _, fix := range sd.SuggestedFixes { - if fix.Edits != nil { - // For now, we only support bundled code actions that execute commands. - // - // In order to cleanly support bundled edits, we'd have to guarantee that - // the edits were generated on the current snapshot. But this naively - // implies that every fix would have to include a snapshot ID, which - // would require us to republish all diagnostics on each new snapshot. - // - // TODO(rfindley): in order to avoid this additional chatter, we'd need - // to build some sort of registry or other mechanism on the snapshot to - // check whether a diagnostic is still valid. - return false - } - action := protocol.CodeAction{ - Title: fix.Title, - Kind: fix.ActionKind, - Command: fix.Command, - } - actions = append(actions, action) - } - fixes := quickFixesJSON{ - Fixes: actions, - } - data, err := json.Marshal(fixes) - if err != nil { - bug.Reportf("marshalling quick fixes: %v", err) - return false - } - msg := json.RawMessage(data) - sd.BundledFixes = &msg - return true -} - -// BundledQuickFixes extracts any bundled codeActions from the -// diag.Data field. -func BundledQuickFixes(diag protocol.Diagnostic) []protocol.CodeAction { - if diag.Data == nil { - return nil - } - var fix quickFixesJSON - if err := json.Unmarshal(*diag.Data, &fix); err != nil { - bug.Reportf("unmarshalling quick fix: %v", err) - return nil - } - - var actions []protocol.CodeAction - for _, action := range fix.Fixes { - // See BundleQuickFixes: for now we only support bundling commands. - if action.Edit != nil { - bug.Reportf("bundled fix %q includes workspace edits", action.Title) - continue - } - // associate the action with the incoming diagnostic - // (Note that this does not mutate the fix.Fixes slice). - action.Diagnostics = []protocol.Diagnostic{diag} - actions = append(actions, action) - } - - return actions -} diff --git a/gopls/internal/lsp/source/embeddirective.go b/gopls/internal/lsp/source/embeddirective.go deleted file mode 100644 index d4e85d7add2..00000000000 --- a/gopls/internal/lsp/source/embeddirective.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "errors" - "fmt" - "io/fs" - "path/filepath" - "strconv" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -// ErrNoEmbed is returned by EmbedDefinition when no embed -// directive is found at a particular position. -// As such it indicates that other definitions could be worth checking. -var ErrNoEmbed = errors.New("no embed directive found") - -var errStopWalk = errors.New("stop walk") - -// EmbedDefinition finds a file matching the embed directive at pos in the mapped file. -// If there is no embed directive at pos, returns ErrNoEmbed. -// If multiple files match the embed pattern, one is picked at random. -func EmbedDefinition(m *protocol.Mapper, pos protocol.Position) ([]protocol.Location, error) { - pattern, _ := parseEmbedDirective(m, pos) - if pattern == "" { - return nil, ErrNoEmbed - } - - // Find the first matching file. - var match string - dir := filepath.Dir(m.URI.Filename()) - err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { - if e != nil { - return e - } - rel, err := filepath.Rel(dir, abs) - if err != nil { - return err - } - ok, err := filepath.Match(pattern, rel) - if err != nil { - return err - } - if ok && !d.IsDir() { - match = abs - return errStopWalk - } - return nil - }) - if err != nil && !errors.Is(err, errStopWalk) { - return nil, err - } - if match == "" { - return nil, fmt.Errorf("%q does not match any files in %q", pattern, dir) - } - - loc := protocol.Location{ - URI: protocol.URIFromPath(match), - Range: protocol.Range{ - Start: protocol.Position{Line: 0, Character: 0}, - }, - } - return []protocol.Location{loc}, nil -} - -// parseEmbedDirective attempts to parse a go:embed directive argument at pos. -// If successful it return the directive argument and its range, else zero values are returned. -func parseEmbedDirective(m *protocol.Mapper, pos protocol.Position) (string, protocol.Range) { - lineStart, err := m.PositionOffset(protocol.Position{Line: pos.Line, Character: 0}) - if err != nil { - return "", protocol.Range{} - } - lineEnd, err := m.PositionOffset(protocol.Position{Line: pos.Line + 1, Character: 0}) - if err != nil { - return "", protocol.Range{} - } - - text := string(m.Content[lineStart:lineEnd]) - if !strings.HasPrefix(text, "//go:embed") { - return "", protocol.Range{} - } - text = text[len("//go:embed"):] - offset := lineStart + len("//go:embed") - - // Find the first pattern in text that covers the offset of the pos we are looking for. - findOffset, err := m.PositionOffset(pos) - if err != nil { - return "", protocol.Range{} - } - patterns, err := parseGoEmbed(text, offset) - if err != nil { - return "", protocol.Range{} - } - for _, p := range patterns { - if p.startOffset <= findOffset && findOffset <= p.endOffset { - // Found our match. - rng, err := m.OffsetRange(p.startOffset, p.endOffset) - if err != nil { - return "", protocol.Range{} - } - return p.pattern, rng - } - } - - return "", protocol.Range{} -} - -type fileEmbed struct { - pattern string - startOffset int - endOffset int -} - -// parseGoEmbed patterns that come after the directive. -// -// Copied and adapted from go/build/read.go. -// Replaced token.Position with start/end offset (including quotes if present). -func parseGoEmbed(args string, offset int) ([]fileEmbed, error) { - trimBytes := func(n int) { - offset += n - args = args[n:] - } - trimSpace := func() { - trim := strings.TrimLeftFunc(args, unicode.IsSpace) - trimBytes(len(args) - len(trim)) - } - - var list []fileEmbed - for trimSpace(); args != ""; trimSpace() { - var path string - pathOffset := offset - Switch: - switch args[0] { - default: - i := len(args) - for j, c := range args { - if unicode.IsSpace(c) { - i = j - break - } - } - path = args[:i] - trimBytes(i) - - case '`': - var ok bool - path, _, ok = strings.Cut(args[1:], "`") - if !ok { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) - } - trimBytes(1 + len(path) + 1) - - case '"': - i := 1 - for ; i < len(args); i++ { - if args[i] == '\\' { - i++ - continue - } - if args[i] == '"' { - q, err := strconv.Unquote(args[:i+1]) - if err != nil { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1]) - } - path = q - trimBytes(i + 1) - break Switch - } - } - if i >= len(args) { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) - } - } - - if args != "" { - r, _ := utf8.DecodeRuneInString(args) - if !unicode.IsSpace(r) { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) - } - } - list = append(list, fileEmbed{ - pattern: path, - startOffset: pathOffset, - endOffset: offset, - }) - } - return list, nil -} diff --git a/gopls/internal/lsp/source/fix.go b/gopls/internal/lsp/source/fix.go deleted file mode 100644 index f024de03949..00000000000 --- a/gopls/internal/lsp/source/fix.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/analysis/embeddirective" - "golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct" - "golang.org/x/tools/gopls/internal/lsp/analysis/undeclaredname" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/imports" -) - -type ( - // SuggestedFixFunc is a function used to get the suggested fixes for a given - // gopls command, some of which are provided by go/analysis.Analyzers. Some of - // the analyzers in internal/lsp/analysis are not efficient enough to include - // suggested fixes with their diagnostics, so we have to compute them - // separately. Such analyzers should provide a function with a signature of - // SuggestedFixFunc. - // - // The returned FileSet must map all token.Pos found in the suggested text - // edits. - SuggestedFixFunc func(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error) - singleFileFixFunc func(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) -) - -// These strings identify kinds of suggested fix, both in Analyzer.Fix -// and in the ApplyFix subcommand (see ExecuteCommand and ApplyFixArgs.Fix). -const ( - FillStruct = "fill_struct" - StubMethods = "stub_methods" - UndeclaredName = "undeclared_name" - ExtractVariable = "extract_variable" - ExtractFunction = "extract_function" - ExtractMethod = "extract_method" - InlineCall = "inline_call" - InvertIfCondition = "invert_if_condition" - AddEmbedImport = "add_embed_import" -) - -// suggestedFixes maps a suggested fix command id to its handler. -var suggestedFixes = map[string]SuggestedFixFunc{ - FillStruct: singleFile(fillstruct.SuggestedFix), - UndeclaredName: singleFile(undeclaredname.SuggestedFix), - ExtractVariable: singleFile(extractVariable), - InlineCall: inlineCall, - ExtractFunction: singleFile(extractFunction), - ExtractMethod: singleFile(extractMethod), - InvertIfCondition: singleFile(invertIfCondition), - StubMethods: stubSuggestedFixFunc, - AddEmbedImport: addEmbedImport, -} - -// singleFile calls analyzers that expect inputs for a single file -func singleFile(sf singleFileFixFunc) SuggestedFixFunc { - return func(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, nil, err - } - start, end, err := pgf.RangePos(pRng) - if err != nil { - return nil, nil, err - } - fix, err := sf(pkg.FileSet(), start, end, pgf.Src, pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()) - return pkg.FileSet(), fix, err - } -} - -func SuggestedFixFromCommand(cmd protocol.Command, kind protocol.CodeActionKind) SuggestedFix { - return SuggestedFix{ - Title: cmd.Title, - Command: &cmd, - ActionKind: kind, - } -} - -// ApplyFix applies the command's suggested fix to the given file and -// range, returning the resulting edits. -func ApplyFix(ctx context.Context, fix string, snapshot Snapshot, fh FileHandle, pRng protocol.Range) ([]protocol.TextDocumentEdit, error) { - handler, ok := suggestedFixes[fix] - if !ok { - return nil, fmt.Errorf("no suggested fix function for %s", fix) - } - fset, suggestion, err := handler(ctx, snapshot, fh, pRng) - if err != nil { - return nil, err - } - if suggestion == nil { - return nil, nil - } - editsPerFile := map[span.URI]*protocol.TextDocumentEdit{} - for _, edit := range suggestion.TextEdits { - tokFile := fset.File(edit.Pos) - if tokFile == nil { - return nil, bug.Errorf("no file for edit position") - } - end := edit.End - if !end.IsValid() { - end = edit.Pos - } - fh, err := snapshot.ReadFile(ctx, span.URIFromPath(tokFile.Name())) - if err != nil { - return nil, err - } - te, ok := editsPerFile[fh.URI()] - if !ok { - te = &protocol.TextDocumentEdit{ - TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ - Version: fh.Version(), - TextDocumentIdentifier: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(fh.URI()), - }, - }, - } - editsPerFile[fh.URI()] = te - } - content, err := fh.Content() - if err != nil { - return nil, err - } - m := protocol.NewMapper(fh.URI(), content) - rng, err := m.PosRange(tokFile, edit.Pos, end) - if err != nil { - return nil, err - } - te.Edits = append(te.Edits, protocol.TextEdit{ - Range: rng, - NewText: string(edit.NewText), - }) - } - var edits []protocol.TextDocumentEdit - for _, edit := range editsPerFile { - edits = append(edits, *edit) - } - return edits, nil -} - -// fixedByImportingEmbed returns true if diag can be fixed by addEmbedImport. -func fixedByImportingEmbed(diag *Diagnostic) bool { - if diag == nil { - return false - } - return diag.Message == embeddirective.MissingImportMessage -} - -// addEmbedImport adds a missing embed "embed" import with blank name. -func addEmbedImport(ctx context.Context, snapshot Snapshot, fh FileHandle, _ protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, nil, fmt.Errorf("narrow pkg: %w", err) - } - - // Like source.AddImport, but with _ as Name and using our pgf. - protoEdits, err := ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ - StmtInfo: imports.ImportInfo{ - ImportPath: "embed", - Name: "_", - }, - FixType: imports.AddImport, - }) - if err != nil { - return nil, nil, fmt.Errorf("compute edits: %w", err) - } - - var edits []analysis.TextEdit - for _, e := range protoEdits { - start, end, err := pgf.RangePos(e.Range) - if err != nil { - return nil, nil, fmt.Errorf("map range: %w", err) - } - edits = append(edits, analysis.TextEdit{ - Pos: start, - End: end, - NewText: []byte(e.NewText), - }) - } - - fix := &analysis.SuggestedFix{ - Message: "Add embed import", - TextEdits: edits, - } - return pkg.FileSet(), fix, nil -} diff --git a/gopls/internal/lsp/source/folding_range.go b/gopls/internal/lsp/source/folding_range.go deleted file mode 100644 index 9f63c77a4f9..00000000000 --- a/gopls/internal/lsp/source/folding_range.go +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "go/ast" - "go/token" - "sort" - "strings" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" -) - -// FoldingRangeInfo holds range and kind info of folding for an ast.Node -type FoldingRangeInfo struct { - MappedRange protocol.MappedRange - Kind protocol.FoldingRangeKind -} - -// FoldingRange gets all of the folding range for f. -func FoldingRange(ctx context.Context, snapshot Snapshot, fh FileHandle, lineFoldingOnly bool) (ranges []*FoldingRangeInfo, err error) { - // TODO(suzmue): consider limiting the number of folding ranges returned, and - // implement a way to prioritize folding ranges in that case. - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - - // With parse errors, we wouldn't be able to produce accurate folding info. - // LSP protocol (3.16) currently does not have a way to handle this case - // (https://github.com/microsoft/language-server-protocol/issues/1200). - // We cannot return an error either because we are afraid some editors - // may not handle errors nicely. As a workaround, we now return an empty - // result and let the client handle this case by double check the file - // contents (i.e. if the file is not empty and the folding range result - // is empty, raise an internal error). - if pgf.ParseErr != nil { - return nil, nil - } - - // Get folding ranges for comments separately as they are not walked by ast.Inspect. - ranges = append(ranges, commentsFoldingRange(pgf)...) - - visit := func(n ast.Node) bool { - rng := foldingRangeFunc(pgf, n, lineFoldingOnly) - if rng != nil { - ranges = append(ranges, rng) - } - return true - } - // Walk the ast and collect folding ranges. - ast.Inspect(pgf.File, visit) - - sort.Slice(ranges, func(i, j int) bool { - irng := ranges[i].MappedRange.Range() - jrng := ranges[j].MappedRange.Range() - return protocol.CompareRange(irng, jrng) < 0 - }) - - return ranges, nil -} - -// foldingRangeFunc calculates the line folding range for ast.Node n -func foldingRangeFunc(pgf *ParsedGoFile, n ast.Node, lineFoldingOnly bool) *FoldingRangeInfo { - // TODO(suzmue): include trailing empty lines before the closing - // parenthesis/brace. - var kind protocol.FoldingRangeKind - var start, end token.Pos - switch n := n.(type) { - case *ast.BlockStmt: - // Fold between positions of or lines between "{" and "}". - var startList, endList token.Pos - if num := len(n.List); num != 0 { - startList, endList = n.List[0].Pos(), n.List[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startList, endList, lineFoldingOnly) - case *ast.CaseClause: - // Fold from position of ":" to end. - start, end = n.Colon+1, n.End() - case *ast.CommClause: - // Fold from position of ":" to end. - start, end = n.Colon+1, n.End() - case *ast.CallExpr: - // Fold from position of "(" to position of ")". - start, end = n.Lparen+1, n.Rparen - case *ast.FieldList: - // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace. - var startList, endList token.Pos - if num := len(n.List); num != 0 { - startList, endList = n.List[0].Pos(), n.List[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Opening, n.Closing, startList, endList, lineFoldingOnly) - case *ast.GenDecl: - // If this is an import declaration, set the kind to be protocol.Imports. - if n.Tok == token.IMPORT { - kind = protocol.Imports - } - // Fold between positions of or lines between "(" and ")". - var startSpecs, endSpecs token.Pos - if num := len(n.Specs); num != 0 { - startSpecs, endSpecs = n.Specs[0].Pos(), n.Specs[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Lparen, n.Rparen, startSpecs, endSpecs, lineFoldingOnly) - case *ast.BasicLit: - // Fold raw string literals from position of "`" to position of "`". - if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' { - start, end = n.Pos(), n.End() - } - case *ast.CompositeLit: - // Fold between positions of or lines between "{" and "}". - var startElts, endElts token.Pos - if num := len(n.Elts); num != 0 { - startElts, endElts = n.Elts[0].Pos(), n.Elts[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startElts, endElts, lineFoldingOnly) - } - - // Check that folding positions are valid. - if !start.IsValid() || !end.IsValid() { - return nil - } - // in line folding mode, do not fold if the start and end lines are the same. - if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { - return nil - } - mrng, err := pgf.PosMappedRange(start, end) - if err != nil { - bug.Errorf("%w", err) // can't happen - } - return &FoldingRangeInfo{ - MappedRange: mrng, - Kind: kind, - } -} - -// validLineFoldingRange returns start and end token.Pos for folding range if the range is valid. -// returns token.NoPos otherwise, which fails token.IsValid check -func validLineFoldingRange(tokFile *token.File, open, close, start, end token.Pos, lineFoldingOnly bool) (token.Pos, token.Pos) { - if lineFoldingOnly { - if !open.IsValid() || !close.IsValid() { - return token.NoPos, token.NoPos - } - - // Don't want to fold if the start/end is on the same line as the open/close - // as an example, the example below should *not* fold: - // var x = [2]string{"d", - // "e" } - if safetoken.Line(tokFile, open) == safetoken.Line(tokFile, start) || - safetoken.Line(tokFile, close) == safetoken.Line(tokFile, end) { - return token.NoPos, token.NoPos - } - - return open + 1, end - } - return open + 1, close -} - -// commentsFoldingRange returns the folding ranges for all comment blocks in file. -// The folding range starts at the end of the first line of the comment block, and ends at the end of the -// comment block and has kind protocol.Comment. -func commentsFoldingRange(pgf *ParsedGoFile) (comments []*FoldingRangeInfo) { - tokFile := pgf.Tok - for _, commentGrp := range pgf.File.Comments { - startGrpLine, endGrpLine := safetoken.Line(tokFile, commentGrp.Pos()), safetoken.Line(tokFile, commentGrp.End()) - if startGrpLine == endGrpLine { - // Don't fold single line comments. - continue - } - - firstComment := commentGrp.List[0] - startPos, endLinePos := firstComment.Pos(), firstComment.End() - startCmmntLine, endCmmntLine := safetoken.Line(tokFile, startPos), safetoken.Line(tokFile, endLinePos) - if startCmmntLine != endCmmntLine { - // If the first comment spans multiple lines, then we want to have the - // folding range start at the end of the first line. - endLinePos = token.Pos(int(startPos) + len(strings.Split(firstComment.Text, "\n")[0])) - } - mrng, err := pgf.PosMappedRange(endLinePos, commentGrp.End()) - if err != nil { - bug.Errorf("%w", err) // can't happen - } - comments = append(comments, &FoldingRangeInfo{ - // Fold from the end of the first line comment to the end of the comment block. - MappedRange: mrng, - Kind: protocol.Comment, - }) - } - return comments -} diff --git a/gopls/internal/lsp/source/format.go b/gopls/internal/lsp/source/format.go deleted file mode 100644 index 6eed4cb9d0b..00000000000 --- a/gopls/internal/lsp/source/format.go +++ /dev/null @@ -1,388 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package source provides core features for use by Go editors and tools. -package source - -import ( - "bytes" - "context" - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "strings" - "text/scanner" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/imports" - "golang.org/x/tools/internal/tokeninternal" -) - -// Format formats a file with a given range. -func Format(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.TextEdit, error) { - ctx, done := event.Start(ctx, "source.Format") - defer done() - - // Generated files shouldn't be edited. So, don't format them - if IsGenerated(ctx, snapshot, fh.URI()) { - return nil, fmt.Errorf("can't format %q: file is generated", fh.URI().Filename()) - } - - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - // Even if this file has parse errors, it might still be possible to format it. - // Using format.Node on an AST with errors may result in code being modified. - // Attempt to format the source of this file instead. - if pgf.ParseErr != nil { - formatted, err := formatSource(ctx, fh) - if err != nil { - return nil, err - } - return computeTextEdits(ctx, snapshot, pgf, string(formatted)) - } - - // format.Node changes slightly from one release to another, so the version - // of Go used to build the LSP server will determine how it formats code. - // This should be acceptable for all users, who likely be prompted to rebuild - // the LSP server on each Go release. - buf := &bytes.Buffer{} - fset := tokeninternal.FileSetFor(pgf.Tok) - if err := format.Node(buf, fset, pgf.File); err != nil { - return nil, err - } - formatted := buf.String() - - // Apply additional formatting, if any is supported. Currently, the only - // supported additional formatter is gofumpt. - if format := snapshot.Options().GofumptFormat; snapshot.Options().Gofumpt && format != nil { - // gofumpt can customize formatting based on language version and module - // path, if available. - // - // Try to derive this information, but fall-back on the default behavior. - // - // TODO: under which circumstances can we fail to find module information? - // Can this, for example, result in inconsistent formatting across saves, - // due to pending calls to packages.Load? - var langVersion, modulePath string - meta, err := NarrowestMetadataForFile(ctx, snapshot, fh.URI()) - if err == nil { - if mi := meta.Module; mi != nil { - langVersion = mi.GoVersion - modulePath = mi.Path - } - } - b, err := format(ctx, langVersion, modulePath, buf.Bytes()) - if err != nil { - return nil, err - } - formatted = string(b) - } - return computeTextEdits(ctx, snapshot, pgf, formatted) -} - -func formatSource(ctx context.Context, fh FileHandle) ([]byte, error) { - _, done := event.Start(ctx, "source.formatSource") - defer done() - - data, err := fh.Content() - if err != nil { - return nil, err - } - return format.Source(data) -} - -type ImportFix struct { - Fix *imports.ImportFix - Edits []protocol.TextEdit -} - -// AllImportsFixes formats f for each possible fix to the imports. -// In addition to returning the result of applying all edits, -// it returns a list of fixes that could be applied to the file, with the -// corresponding TextEdits that would be needed to apply that fix. -func AllImportsFixes(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) { - ctx, done := event.Start(ctx, "source.AllImportsFixes") - defer done() - - if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { - allFixEdits, editsPerFix, err = computeImportEdits(ctx, snapshot, pgf, opts) - return err - }); err != nil { - return nil, nil, fmt.Errorf("AllImportsFixes: %v", err) - } - return allFixEdits, editsPerFix, nil -} - -// computeImportEdits computes a set of edits that perform one or all of the -// necessary import fixes. -func computeImportEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) { - filename := pgf.URI.Filename() - - // Build up basic information about the original file. - allFixes, err := imports.FixImports(ctx, filename, pgf.Src, options) - if err != nil { - return nil, nil, err - } - - allFixEdits, err = computeFixEdits(snapshot, pgf, options, allFixes) - if err != nil { - return nil, nil, err - } - - // Apply all of the import fixes to the file. - // Add the edits for each fix to the result. - for _, fix := range allFixes { - edits, err := computeFixEdits(snapshot, pgf, options, []*imports.ImportFix{fix}) - if err != nil { - return nil, nil, err - } - editsPerFix = append(editsPerFix, &ImportFix{ - Fix: fix, - Edits: edits, - }) - } - return allFixEdits, editsPerFix, nil -} - -// ComputeOneImportFixEdits returns text edits for a single import fix. -func ComputeOneImportFixEdits(snapshot Snapshot, pgf *ParsedGoFile, fix *imports.ImportFix) ([]protocol.TextEdit, error) { - options := &imports.Options{ - LocalPrefix: snapshot.Options().Local, - // Defaults. - AllErrors: true, - Comments: true, - Fragment: true, - FormatOnly: false, - TabIndent: true, - TabWidth: 8, - } - return computeFixEdits(snapshot, pgf, options, []*imports.ImportFix{fix}) -} - -func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) { - // trim the original data to match fixedData - left, err := importPrefix(pgf.Src) - if err != nil { - return nil, err - } - extra := !strings.Contains(left, "\n") // one line may have more than imports - if extra { - left = string(pgf.Src) - } - if len(left) > 0 && left[len(left)-1] != '\n' { - left += "\n" - } - // Apply the fixes and re-parse the file so that we can locate the - // new imports. - flags := parser.ImportsOnly - if extra { - // used all of origData above, use all of it here too - flags = 0 - } - fixedData, err := imports.ApplyFixes(fixes, "", pgf.Src, options, flags) - if err != nil { - return nil, err - } - if fixedData == nil || fixedData[len(fixedData)-1] != '\n' { - fixedData = append(fixedData, '\n') // ApplyFixes may miss the newline, go figure. - } - edits := snapshot.Options().ComputeEdits(left, string(fixedData)) - return protocolEditsFromSource([]byte(left), edits) -} - -// importPrefix returns the prefix of the given file content through the final -// import statement. If there are no imports, the prefix is the package -// statement and any comment groups below it. -func importPrefix(src []byte) (string, error) { - fset := token.NewFileSet() - // do as little parsing as possible - f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments) - if err != nil { // This can happen if 'package' is misspelled - return "", fmt.Errorf("importPrefix: failed to parse: %s", err) - } - tok := fset.File(f.Pos()) - var importEnd int - for _, d := range f.Decls { - if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT { - if e, err := safetoken.Offset(tok, d.End()); err != nil { - return "", fmt.Errorf("importPrefix: %s", err) - } else if e > importEnd { - importEnd = e - } - } - } - - maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int { - offset, err := safetoken.Offset(tok, pos) - if err != nil { - return -1 - } - - // Don't go past the end of the file. - if offset > len(src) { - offset = len(src) - } - // The go/ast package does not account for different line endings, and - // specifically, in the text of a comment, it will strip out \r\n line - // endings in favor of \n. To account for these differences, we try to - // return a position on the next line whenever possible. - switch line := safetoken.Line(tok, tok.Pos(offset)); { - case line < tok.LineCount(): - nextLineOffset, err := safetoken.Offset(tok, tok.LineStart(line+1)) - if err != nil { - return -1 - } - // If we found a position that is at the end of a line, move the - // offset to the start of the next line. - if offset+1 == nextLineOffset { - offset = nextLineOffset - } - case isCommentNode, offset+1 == tok.Size(): - // If the last line of the file is a comment, or we are at the end - // of the file, the prefix is the entire file. - offset = len(src) - } - return offset - } - if importEnd == 0 { - pkgEnd := f.Name.End() - importEnd = maybeAdjustToLineEnd(pkgEnd, false) - } - for _, cgroup := range f.Comments { - for _, c := range cgroup.List { - if end, err := safetoken.Offset(tok, c.End()); err != nil { - return "", err - } else if end > importEnd { - startLine := safetoken.Position(tok, c.Pos()).Line - endLine := safetoken.Position(tok, c.End()).Line - - // Work around golang/go#41197 by checking if the comment might - // contain "\r", and if so, find the actual end position of the - // comment by scanning the content of the file. - startOffset, err := safetoken.Offset(tok, c.Pos()) - if err != nil { - return "", err - } - if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) { - if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 { - end = startOffset + commentEnd - } - } - importEnd = maybeAdjustToLineEnd(tok.Pos(end), true) - } - } - } - if importEnd > len(src) { - importEnd = len(src) - } - return string(src[:importEnd]), nil -} - -// scanForCommentEnd returns the offset of the end of the multi-line comment -// at the start of the given byte slice. -func scanForCommentEnd(src []byte) int { - var s scanner.Scanner - s.Init(bytes.NewReader(src)) - s.Mode ^= scanner.SkipComments - - t := s.Scan() - if t == scanner.Comment { - return s.Pos().Offset - } - return 0 -} - -func computeTextEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile, formatted string) ([]protocol.TextEdit, error) { - _, done := event.Start(ctx, "source.computeTextEdits") - defer done() - - edits := snapshot.Options().ComputeEdits(string(pgf.Src), formatted) - return ToProtocolEdits(pgf.Mapper, edits) -} - -// protocolEditsFromSource converts text edits to LSP edits using the original -// source. -func protocolEditsFromSource(src []byte, edits []diff.Edit) ([]protocol.TextEdit, error) { - m := protocol.NewMapper("", src) - var result []protocol.TextEdit - for _, edit := range edits { - rng, err := m.OffsetRange(edit.Start, edit.End) - if err != nil { - return nil, err - } - - if rng.Start == rng.End && edit.New == "" { - // Degenerate case, which may result from a diff tool wanting to delete - // '\r' in line endings. Filter it out. - continue - } - result = append(result, protocol.TextEdit{ - Range: rng, - NewText: edit.New, - }) - } - return result, nil -} - -// ToProtocolEdits converts diff.Edits to a non-nil slice of LSP TextEdits. -// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray -func ToProtocolEdits(m *protocol.Mapper, edits []diff.Edit) ([]protocol.TextEdit, error) { - // LSP doesn't require TextEditArray to be sorted: - // this is the receiver's concern. But govim, and perhaps - // other clients have historically relied on the order. - edits = append([]diff.Edit(nil), edits...) - diff.SortEdits(edits) - - result := make([]protocol.TextEdit, len(edits)) - for i, edit := range edits { - rng, err := m.OffsetRange(edit.Start, edit.End) - if err != nil { - return nil, err - } - result[i] = protocol.TextEdit{ - Range: rng, - NewText: edit.New, - } - } - return result, nil -} - -// FromProtocolEdits converts LSP TextEdits to diff.Edits. -// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray -func FromProtocolEdits(m *protocol.Mapper, edits []protocol.TextEdit) ([]diff.Edit, error) { - if edits == nil { - return nil, nil - } - result := make([]diff.Edit, len(edits)) - for i, edit := range edits { - start, end, err := m.RangeOffsets(edit.Range) - if err != nil { - return nil, err - } - result[i] = diff.Edit{ - Start: start, - End: end, - New: edit.NewText, - } - } - return result, nil -} - -// ApplyProtocolEdits applies the patch (edits) to m.Content and returns the result. -// It also returns the edits converted to diff-package form. -func ApplyProtocolEdits(m *protocol.Mapper, edits []protocol.TextEdit) ([]byte, []diff.Edit, error) { - diffEdits, err := FromProtocolEdits(m, edits) - if err != nil { - return nil, nil, err - } - out, err := diff.ApplyBytes(m.Content, diffEdits) - return out, diffEdits, err -} diff --git a/gopls/internal/lsp/source/highlight.go b/gopls/internal/lsp/source/highlight.go deleted file mode 100644 index adfc659e20c..00000000000 --- a/gopls/internal/lsp/source/highlight.go +++ /dev/null @@ -1,480 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/event" -) - -func Highlight(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) ([]protocol.Range, error) { - ctx, done := event.Start(ctx, "source.Highlight") - defer done() - - // We always want fully parsed files for highlight, regardless - // of whether the file belongs to a workspace package. - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, fmt.Errorf("getting package for Highlight: %w", err) - } - - pos, err := pgf.PositionPos(position) - if err != nil { - return nil, err - } - path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) - if len(path) == 0 { - return nil, fmt.Errorf("no enclosing position found for %v:%v", position.Line, position.Character) - } - // If start == end for astutil.PathEnclosingInterval, the 1-char interval - // following start is used instead. As a result, we might not get an exact - // match so we should check the 1-char interval to the left of the passed - // in position to see if that is an exact match. - if _, ok := path[0].(*ast.Ident); !ok { - if p, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1); p != nil { - switch p[0].(type) { - case *ast.Ident, *ast.SelectorExpr: - path = p // use preceding ident/selector - } - } - } - result, err := highlightPath(path, pgf.File, pkg.GetTypesInfo()) - if err != nil { - return nil, err - } - var ranges []protocol.Range - for rng := range result { - rng, err := pgf.PosRange(rng.start, rng.end) - if err != nil { - return nil, err - } - ranges = append(ranges, rng) - } - return ranges, nil -} - -func highlightPath(path []ast.Node, file *ast.File, info *types.Info) (map[posRange]struct{}, error) { - result := make(map[posRange]struct{}) - switch node := path[0].(type) { - case *ast.BasicLit: - // Import path string literal? - if len(path) > 1 { - if imp, ok := path[1].(*ast.ImportSpec); ok { - highlight := func(n ast.Node) { - result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} - } - - // Highlight the import itself... - highlight(imp) - - // ...and all references to it in the file. - if pkgname, ok := ImportedPkgName(info, imp); ok { - ast.Inspect(file, func(n ast.Node) bool { - if id, ok := n.(*ast.Ident); ok && - info.Uses[id] == pkgname { - highlight(id) - } - return true - }) - } - return result, nil - } - } - highlightFuncControlFlow(path, result) - case *ast.ReturnStmt, *ast.FuncDecl, *ast.FuncType: - highlightFuncControlFlow(path, result) - case *ast.Ident: - // Check if ident is inside return or func decl. - highlightFuncControlFlow(path, result) - highlightIdentifier(node, file, info, result) - case *ast.ForStmt, *ast.RangeStmt: - highlightLoopControlFlow(path, info, result) - case *ast.SwitchStmt: - highlightSwitchFlow(path, info, result) - case *ast.BranchStmt: - // BREAK can exit a loop, switch or select, while CONTINUE exit a loop so - // these need to be handled separately. They can also be embedded in any - // other loop/switch/select if they have a label. TODO: add support for - // GOTO and FALLTHROUGH as well. - switch node.Tok { - case token.BREAK: - if node.Label != nil { - highlightLabeledFlow(path, info, node, result) - } else { - highlightUnlabeledBreakFlow(path, info, result) - } - case token.CONTINUE: - if node.Label != nil { - highlightLabeledFlow(path, info, node, result) - } else { - highlightLoopControlFlow(path, info, result) - } - } - default: - // If the cursor is in an unidentified area, return empty results. - return nil, nil - } - return result, nil -} - -type posRange struct { - start, end token.Pos -} - -func highlightFuncControlFlow(path []ast.Node, result map[posRange]struct{}) { - var enclosingFunc ast.Node - var returnStmt *ast.ReturnStmt - var resultsList *ast.FieldList - inReturnList := false - -Outer: - // Reverse walk the path till we get to the func block. - for i, n := range path { - switch node := n.(type) { - case *ast.KeyValueExpr: - // If cursor is in a key: value expr, we don't want control flow highlighting - return - case *ast.CallExpr: - // If cursor is an arg in a callExpr, we don't want control flow highlighting. - if i > 0 { - for _, arg := range node.Args { - if arg == path[i-1] { - return - } - } - } - case *ast.Field: - inReturnList = true - case *ast.FuncLit: - enclosingFunc = n - resultsList = node.Type.Results - break Outer - case *ast.FuncDecl: - enclosingFunc = n - resultsList = node.Type.Results - break Outer - case *ast.ReturnStmt: - returnStmt = node - // If the cursor is not directly in a *ast.ReturnStmt, then - // we need to know if it is within one of the values that is being returned. - inReturnList = inReturnList || path[0] != returnStmt - } - } - // Cursor is not in a function. - if enclosingFunc == nil { - return - } - // If the cursor is on a "return" or "func" keyword, we should highlight all of the exit - // points of the function, including the "return" and "func" keywords. - highlightAllReturnsAndFunc := path[0] == returnStmt || path[0] == enclosingFunc - switch path[0].(type) { - case *ast.Ident, *ast.BasicLit: - // Cursor is in an identifier and not in a return statement or in the results list. - if returnStmt == nil && !inReturnList { - return - } - case *ast.FuncType: - highlightAllReturnsAndFunc = true - } - // The user's cursor may be within the return statement of a function, - // or within the result section of a function's signature. - // index := -1 - var nodes []ast.Node - if returnStmt != nil { - for _, n := range returnStmt.Results { - nodes = append(nodes, n) - } - } else if resultsList != nil { - for _, n := range resultsList.List { - nodes = append(nodes, n) - } - } - _, index := nodeAtPos(nodes, path[0].Pos()) - - // Highlight the correct argument in the function declaration return types. - if resultsList != nil && -1 < index && index < len(resultsList.List) { - rng := posRange{ - start: resultsList.List[index].Pos(), - end: resultsList.List[index].End(), - } - result[rng] = struct{}{} - } - // Add the "func" part of the func declaration. - if highlightAllReturnsAndFunc { - r := posRange{ - start: enclosingFunc.Pos(), - end: enclosingFunc.Pos() + token.Pos(len("func")), - } - result[r] = struct{}{} - } - ast.Inspect(enclosingFunc, func(n ast.Node) bool { - // Don't traverse any other functions. - switch n.(type) { - case *ast.FuncDecl, *ast.FuncLit: - return enclosingFunc == n - } - ret, ok := n.(*ast.ReturnStmt) - if !ok { - return true - } - var toAdd ast.Node - // Add the entire return statement, applies when highlight the word "return" or "func". - if highlightAllReturnsAndFunc { - toAdd = n - } - // Add the relevant field within the entire return statement. - if -1 < index && index < len(ret.Results) { - toAdd = ret.Results[index] - } - if toAdd != nil { - result[posRange{start: toAdd.Pos(), end: toAdd.End()}] = struct{}{} - } - return false - }) -} - -// highlightUnlabeledBreakFlow highlights the innermost enclosing for/range/switch or swlect -func highlightUnlabeledBreakFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { - // Reverse walk the path until we find closest loop, select, or switch. - for _, n := range path { - switch n.(type) { - case *ast.ForStmt, *ast.RangeStmt: - highlightLoopControlFlow(path, info, result) - return // only highlight the innermost statement - case *ast.SwitchStmt: - highlightSwitchFlow(path, info, result) - return - case *ast.SelectStmt: - // TODO: add highlight when breaking a select. - return - } - } -} - -// highlightLabeledFlow highlights the enclosing labeled for, range, -// or switch statement denoted by a labeled break or continue stmt. -func highlightLabeledFlow(path []ast.Node, info *types.Info, stmt *ast.BranchStmt, result map[posRange]struct{}) { - use := info.Uses[stmt.Label] - if use == nil { - return - } - for _, n := range path { - if label, ok := n.(*ast.LabeledStmt); ok && info.Defs[label.Label] == use { - switch label.Stmt.(type) { - case *ast.ForStmt, *ast.RangeStmt: - highlightLoopControlFlow([]ast.Node{label.Stmt, label}, info, result) - case *ast.SwitchStmt: - highlightSwitchFlow([]ast.Node{label.Stmt, label}, info, result) - } - return - } - } -} - -func labelFor(path []ast.Node) *ast.Ident { - if len(path) > 1 { - if n, ok := path[1].(*ast.LabeledStmt); ok { - return n.Label - } - } - return nil -} - -func highlightLoopControlFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { - var loop ast.Node - var loopLabel *ast.Ident - stmtLabel := labelFor(path) -Outer: - // Reverse walk the path till we get to the for loop. - for i := range path { - switch n := path[i].(type) { - case *ast.ForStmt, *ast.RangeStmt: - loopLabel = labelFor(path[i:]) - - if stmtLabel == nil || loopLabel == stmtLabel { - loop = n - break Outer - } - } - } - if loop == nil { - return - } - - // Add the for statement. - rng := posRange{ - start: loop.Pos(), - end: loop.Pos() + token.Pos(len("for")), - } - result[rng] = struct{}{} - - // Traverse AST to find branch statements within the same for-loop. - ast.Inspect(loop, func(n ast.Node) bool { - switch n.(type) { - case *ast.ForStmt, *ast.RangeStmt: - return loop == n - case *ast.SwitchStmt, *ast.SelectStmt: - return false - } - b, ok := n.(*ast.BranchStmt) - if !ok { - return true - } - if b.Label == nil || info.Uses[b.Label] == info.Defs[loopLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - return true - }) - - // Find continue statements in the same loop or switches/selects. - ast.Inspect(loop, func(n ast.Node) bool { - switch n.(type) { - case *ast.ForStmt, *ast.RangeStmt: - return loop == n - } - - if n, ok := n.(*ast.BranchStmt); ok && n.Tok == token.CONTINUE { - result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} - } - return true - }) - - // We don't need to check other for loops if we aren't looking for labeled statements. - if loopLabel == nil { - return - } - - // Find labeled branch statements in any loop. - ast.Inspect(loop, func(n ast.Node) bool { - b, ok := n.(*ast.BranchStmt) - if !ok { - return true - } - // statement with labels that matches the loop - if b.Label != nil && info.Uses[b.Label] == info.Defs[loopLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - return true - }) -} - -func highlightSwitchFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { - var switchNode ast.Node - var switchNodeLabel *ast.Ident - stmtLabel := labelFor(path) -Outer: - // Reverse walk the path till we get to the switch statement. - for i := range path { - switch n := path[i].(type) { - case *ast.SwitchStmt: - switchNodeLabel = labelFor(path[i:]) - if stmtLabel == nil || switchNodeLabel == stmtLabel { - switchNode = n - break Outer - } - } - } - // Cursor is not in a switch statement - if switchNode == nil { - return - } - - // Add the switch statement. - rng := posRange{ - start: switchNode.Pos(), - end: switchNode.Pos() + token.Pos(len("switch")), - } - result[rng] = struct{}{} - - // Traverse AST to find break statements within the same switch. - ast.Inspect(switchNode, func(n ast.Node) bool { - switch n.(type) { - case *ast.SwitchStmt: - return switchNode == n - case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt: - return false - } - - b, ok := n.(*ast.BranchStmt) - if !ok || b.Tok != token.BREAK { - return true - } - - if b.Label == nil || info.Uses[b.Label] == info.Defs[switchNodeLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - return true - }) - - // We don't need to check other switches if we aren't looking for labeled statements. - if switchNodeLabel == nil { - return - } - - // Find labeled break statements in any switch - ast.Inspect(switchNode, func(n ast.Node) bool { - b, ok := n.(*ast.BranchStmt) - if !ok || b.Tok != token.BREAK { - return true - } - - if b.Label != nil && info.Uses[b.Label] == info.Defs[switchNodeLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - - return true - }) -} - -func highlightIdentifier(id *ast.Ident, file *ast.File, info *types.Info, result map[posRange]struct{}) { - highlight := func(n ast.Node) { - result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} - } - - // obj may be nil if the Ident is undefined. - // In this case, the behavior expected by tests is - // to match other undefined Idents of the same name. - obj := info.ObjectOf(id) - - ast.Inspect(file, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.Ident: - if n.Name == id.Name && info.ObjectOf(n) == obj { - highlight(n) - } - - case *ast.ImportSpec: - pkgname, ok := ImportedPkgName(info, n) - if ok && pkgname == obj { - if n.Name != nil { - highlight(n.Name) - } else { - highlight(n) - } - } - } - return true - }) -} - -// ImportedPkgName returns the PkgName object declared by an ImportSpec. -// TODO(adonovan): make this a method of types.Info. -func ImportedPkgName(info *types.Info, imp *ast.ImportSpec) (*types.PkgName, bool) { - var obj types.Object - if imp.Name != nil { - obj = info.Defs[imp.Name] - } else { - obj = info.Implicits[imp] - } - pkgname, ok := obj.(*types.PkgName) - return pkgname, ok -} diff --git a/gopls/internal/lsp/source/hover.go b/gopls/internal/lsp/source/hover.go deleted file mode 100644 index 3c58074aff0..00000000000 --- a/gopls/internal/lsp/source/hover.go +++ /dev/null @@ -1,1069 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "encoding/json" - "fmt" - "go/ast" - "go/constant" - "go/doc" - "go/format" - "go/token" - "go/types" - "io/fs" - "path/filepath" - "strconv" - "strings" - "time" - "unicode/utf8" - - "golang.org/x/text/unicode/runenames" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/tokeninternal" - "golang.org/x/tools/internal/typeparams" -) - -// HoverJSON contains information used by hover. It is also the JSON returned -// for the "structured" hover format -type HoverJSON struct { - // Synopsis is a single sentence synopsis of the symbol's documentation. - Synopsis string `json:"synopsis"` - - // FullDocumentation is the symbol's full documentation. - FullDocumentation string `json:"fullDocumentation"` - - // Signature is the symbol's signature. - Signature string `json:"signature"` - - // SingleLine is a single line describing the symbol. - // This is recommended only for use in clients that show a single line for hover. - SingleLine string `json:"singleLine"` - - // SymbolName is the human-readable name to use for the symbol in links. - SymbolName string `json:"symbolName"` - - // LinkPath is the pkg.go.dev link for the given symbol. - // For example, the "go/ast" part of "pkg.go.dev/go/ast#Node". - LinkPath string `json:"linkPath"` - - // LinkAnchor is the pkg.go.dev link anchor for the given symbol. - // For example, the "Node" part of "pkg.go.dev/go/ast#Node". - LinkAnchor string `json:"linkAnchor"` -} - -// Hover implements the "textDocument/hover" RPC for Go files. -func Hover(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) { - ctx, done := event.Start(ctx, "source.Hover") - defer done() - - rng, h, err := hover(ctx, snapshot, fh, position) - if err != nil { - return nil, err - } - if h == nil { - return nil, nil - } - hover, err := formatHover(h, snapshot.Options()) - if err != nil { - return nil, err - } - return &protocol.Hover{ - Contents: protocol.MarkupContent{ - Kind: snapshot.Options().PreferredContentFormat, - Value: hover, - }, - Range: rng, - }, nil -} - -// hover computes hover information at the given position. If we do not support -// hovering at the position, it returns _, nil, nil: an error is only returned -// if the position is valid but we fail to compute hover information. -func hover(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) (protocol.Range, *HoverJSON, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return protocol.Range{}, nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return protocol.Range{}, nil, err - } - - // Handle hovering over import paths, which do not have an associated - // identifier. - for _, spec := range pgf.File.Imports { - // We are inclusive of the end point here to allow hovering when the cursor - // is just after the import path. - if spec.Path.Pos() <= pos && pos <= spec.Path.End() { - return hoverImport(ctx, snapshot, pkg, pgf, spec) - } - } - - // Handle hovering over the package name, which does not have an associated - // object. - // As with import paths, we allow hovering just after the package name. - if pgf.File.Name != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.Pos() { - return hoverPackageName(pkg, pgf) - } - - // Handle hovering over (non-import-path) literals. - if path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos); len(path) > 0 { - if lit, _ := path[0].(*ast.BasicLit); lit != nil { - return hoverLit(pgf, lit, pos) - } - } - - // Handle hovering over embed directive argument. - pattern, embedRng := parseEmbedDirective(pgf.Mapper, pp) - if pattern != "" { - return hoverEmbed(fh, embedRng, pattern) - } - - // Handle linkname directive by overriding what to look for. - var linkedRange *protocol.Range // range referenced by linkname directive, or nil - if pkgPath, name, offset := parseLinkname(pgf.Mapper, pp); pkgPath != "" && name != "" { - // rng covering 2nd linkname argument: pkgPath.name. - rng, err := pgf.PosRange(pgf.Tok.Pos(offset), pgf.Tok.Pos(offset+len(pkgPath)+len(".")+len(name))) - if err != nil { - return protocol.Range{}, nil, fmt.Errorf("range over linkname arg: %w", err) - } - linkedRange = &rng - - pkg, pgf, pos, err = findLinkname(ctx, snapshot, PackagePath(pkgPath), name) - if err != nil { - return protocol.Range{}, nil, fmt.Errorf("find linkname: %w", err) - } - } - - // The general case: compute hover information for the object referenced by - // the identifier at pos. - ident, obj, selectedType := referencedObject(pkg, pgf, pos) - if obj == nil || ident == nil { - return protocol.Range{}, nil, nil // no object to hover - } - - // Unless otherwise specified, rng covers the ident being hovered. - var rng protocol.Range - if linkedRange != nil { - rng = *linkedRange - } else { - rng, err = pgf.NodeRange(ident) - if err != nil { - return protocol.Range{}, nil, err - } - } - - // By convention, we qualify hover information relative to the package - // from which the request originated. - qf := Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()) - - // Handle type switch identifiers as a special case, since they don't have an - // object. - // - // There's not much useful information to provide. - if selectedType != nil { - fakeObj := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), selectedType) - signature := types.ObjectString(fakeObj, qf) - return rng, &HoverJSON{ - Signature: signature, - SingleLine: signature, - SymbolName: fakeObj.Name(), - }, nil - } - - // Handle builtins, which don't have a package or position. - if obj.Pkg() == nil { - h, err := hoverBuiltin(ctx, snapshot, obj) - return rng, h, err - } - - // For all other objects, consider the full syntax of their declaration in - // order to correctly compute their documentation, signature, and link. - declPGF, declPos, err := parseFull(ctx, snapshot, pkg.FileSet(), obj.Pos()) - if err != nil { - return protocol.Range{}, nil, fmt.Errorf("re-parsing declaration of %s: %v", obj.Name(), err) - } - decl, spec, field := findDeclInfo([]*ast.File{declPGF.File}, declPos) - comment := chooseDocComment(decl, spec, field) - docText := comment.Text() - - // By default, types.ObjectString provides a reasonable signature. - signature := objectString(obj, qf, declPos, declPGF.Tok, spec) - singleLineSignature := signature - - // TODO(rfindley): we could do much better for inferred signatures. - if inferred := inferredSignature(pkg.GetTypesInfo(), ident); inferred != nil { - if s := inferredSignatureString(obj, qf, inferred); s != "" { - signature = s - } - } - - // For "objects defined by a type spec", the signature produced by - // objectString is insufficient: - // (1) large structs are formatted poorly, with no newlines - // (2) we lose inline comments - // - // Furthermore, we include a summary of their method set. - // - // TODO(rfindley): this should use FormatVarType to get proper qualification - // of identifiers, and we should revisit the formatting of method set. - _, isTypeName := obj.(*types.TypeName) - _, isTypeParam := obj.Type().(*typeparams.TypeParam) - if isTypeName && !isTypeParam { - spec, ok := spec.(*ast.TypeSpec) - if !ok { - return protocol.Range{}, nil, bug.Errorf("type name %q without type spec", obj.Name()) - } - spec2 := *spec - // Don't duplicate comments when formatting type specs. - spec2.Doc = nil - spec2.Comment = nil - var b strings.Builder - b.WriteString("type ") - fset := tokeninternal.FileSetFor(declPGF.Tok) - if err := format.Node(&b, fset, &spec2); err != nil { - return protocol.Range{}, nil, err - } - - // Display the declared methods accessible from the identifier. - // - // (The format.Node call above displays any struct fields, public - // or private, in syntactic form. We choose not to recursively - // enumerate any fields and methods promoted from them.) - if !types.IsInterface(obj.Type()) { - sep := "\n\n" - for _, m := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { - // Show direct methods that are either exported, or defined in the - // current package. - if (m.Obj().Exported() || m.Obj().Pkg() == pkg.GetTypes()) && len(m.Index()) == 1 { - b.WriteString(sep) - sep = "\n" - b.WriteString(types.ObjectString(m.Obj(), qf)) - } - } - } - signature = b.String() - } - - // Compute link data (on pkg.go.dev or other documentation host). - // - // If linkPath is empty, the symbol is not linkable. - var ( - linkName string // => link title, always non-empty - linkPath string // => link path - anchor string // link anchor - linkMeta *Metadata // metadata for the linked package - ) - { - linkMeta = findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI) - if linkMeta == nil { - return protocol.Range{}, nil, bug.Errorf("no metadata for %s", declPGF.URI) - } - - // For package names, we simply link to their imported package. - if pkgName, ok := obj.(*types.PkgName); ok { - linkName = pkgName.Name() - linkPath = pkgName.Imported().Path() - impID := linkMeta.DepsByPkgPath[PackagePath(pkgName.Imported().Path())] - linkMeta = snapshot.Metadata(impID) - if linkMeta == nil { - return protocol.Range{}, nil, bug.Errorf("no metadata for %s", declPGF.URI) - } - } else { - // For all others, check whether the object is in the package scope, or - // an exported field or method of an object in the package scope. - // - // We try to match pkgsite's heuristics for what is linkable, and what is - // not. - var recv types.Object - switch obj := obj.(type) { - case *types.Func: - sig := obj.Type().(*types.Signature) - if sig.Recv() != nil { - tname := typeToObject(sig.Recv().Type()) - if tname != nil { // beware typed nil - recv = tname - } - } - case *types.Var: - if obj.IsField() { - if spec, ok := spec.(*ast.TypeSpec); ok { - typeName := spec.Name - scopeObj, _ := obj.Pkg().Scope().Lookup(typeName.Name).(*types.TypeName) - if scopeObj != nil { - if st, _ := scopeObj.Type().Underlying().(*types.Struct); st != nil { - for i := 0; i < st.NumFields(); i++ { - if obj == st.Field(i) { - recv = scopeObj - } - } - } - } - } - } - } - - // Even if the object is not available in package documentation, it may - // be embedded in a documented receiver. Detect this by searching - // enclosing selector expressions. - // - // TODO(rfindley): pkgsite doesn't document fields from embedding, just - // methods. - if recv == nil || !recv.Exported() { - path := pathEnclosingObjNode(pgf.File, pos) - if enclosing := searchForEnclosing(pkg.GetTypesInfo(), path); enclosing != nil { - recv = enclosing - } else { - recv = nil // note: just recv = ... could result in a typed nil. - } - } - - pkg := obj.Pkg() - if recv != nil { - linkName = fmt.Sprintf("(%s.%s).%s", pkg.Name(), recv.Name(), obj.Name()) - if obj.Exported() && recv.Exported() && pkg.Scope().Lookup(recv.Name()) == recv { - linkPath = pkg.Path() - anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name()) - } - } else { - linkName = fmt.Sprintf("%s.%s", pkg.Name(), obj.Name()) - if obj.Exported() && pkg.Scope().Lookup(obj.Name()) == obj { - linkPath = pkg.Path() - anchor = obj.Name() - } - } - } - } - - if snapshot.View().IsGoPrivatePath(linkPath) || linkMeta.ForTest != "" { - linkPath = "" - } else if linkMeta.Module != nil && linkMeta.Module.Version != "" { - mod := linkMeta.Module - linkPath = strings.Replace(linkPath, mod.Path, mod.Path+"@"+mod.Version, 1) - } - - return rng, &HoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - SingleLine: singleLineSignature, - SymbolName: linkName, - Signature: signature, - LinkPath: linkPath, - LinkAnchor: anchor, - }, nil -} - -// hoverBuiltin computes hover information when hovering over a builtin -// identifier. -func hoverBuiltin(ctx context.Context, snapshot Snapshot, obj types.Object) (*HoverJSON, error) { - // TODO(rfindley): link to the correct version of Go documentation. - builtin, err := snapshot.BuiltinFile(ctx) - if err != nil { - return nil, err - } - - if obj.Name() == "Error" { - signature := obj.String() - return &HoverJSON{ - Signature: signature, - SingleLine: signature, - // TODO(rfindley): these are better than the current behavior. - // SymbolName: "(error).Error", - // LinkPath: "builtin", - // LinkAnchor: "error.Error", - }, nil - } - - builtinObj := builtin.File.Scope.Lookup(obj.Name()) - if builtinObj == nil { - // All builtins should have a declaration in the builtin file. - return nil, bug.Errorf("no builtin object for %s", obj.Name()) - } - node, _ := builtinObj.Decl.(ast.Node) - if node == nil { - return nil, bug.Errorf("no declaration for %s", obj.Name()) - } - - var comment *ast.CommentGroup - path, _ := astutil.PathEnclosingInterval(builtin.File, node.Pos(), node.End()) - for _, n := range path { - switch n := n.(type) { - case *ast.GenDecl: - // Separate documentation and signature. - comment = n.Doc - node2 := *n - node2.Doc = nil - node = &node2 - case *ast.FuncDecl: - // Ditto. - comment = n.Doc - node2 := *n - node2.Doc = nil - node = &node2 - } - } - - signature := FormatNodeFile(builtin.Tok, node) - // Replace fake types with their common equivalent. - // TODO(rfindley): we should instead use obj.Type(), which would have the - // *actual* types of the builtin call. - signature = replacer.Replace(signature) - - docText := comment.Text() - return &HoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - Signature: signature, - SingleLine: obj.String(), - SymbolName: obj.Name(), - LinkPath: "builtin", - LinkAnchor: obj.Name(), - }, nil -} - -// hoverImport computes hover information when hovering over the import path of -// imp in the file pgf of pkg. -// -// If we do not have metadata for the hovered import, it returns _ -func hoverImport(ctx context.Context, snapshot Snapshot, pkg Package, pgf *ParsedGoFile, imp *ast.ImportSpec) (protocol.Range, *HoverJSON, error) { - rng, err := pgf.NodeRange(imp.Path) - if err != nil { - return protocol.Range{}, nil, err - } - - importPath := UnquoteImportPath(imp) - if importPath == "" { - return protocol.Range{}, nil, fmt.Errorf("invalid import path") - } - impID := pkg.Metadata().DepsByImpPath[importPath] - if impID == "" { - return protocol.Range{}, nil, fmt.Errorf("no package data for import %q", importPath) - } - impMetadata := snapshot.Metadata(impID) - if impMetadata == nil { - return protocol.Range{}, nil, bug.Errorf("failed to resolve import ID %q", impID) - } - - // Find the first file with a package doc comment. - var comment *ast.CommentGroup - for _, f := range impMetadata.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, f) - if err != nil { - if ctx.Err() != nil { - return protocol.Range{}, nil, ctx.Err() - } - continue - } - pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - if ctx.Err() != nil { - return protocol.Range{}, nil, ctx.Err() - } - continue - } - if pgf.File.Doc != nil { - comment = pgf.File.Doc - break - } - } - - docText := comment.Text() - return rng, &HoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - }, nil -} - -// hoverPackageName computes hover information for the package name of the file -// pgf in pkg. -func hoverPackageName(pkg Package, pgf *ParsedGoFile) (protocol.Range, *HoverJSON, error) { - var comment *ast.CommentGroup - for _, pgf := range pkg.CompiledGoFiles() { - if pgf.File.Doc != nil { - comment = pgf.File.Doc - break - } - } - rng, err := pgf.NodeRange(pgf.File.Name) - if err != nil { - return protocol.Range{}, nil, err - } - docText := comment.Text() - return rng, &HoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - // Note: including a signature is redundant, since the cursor is already on the - // package name. - }, nil -} - -// hoverLit computes hover information when hovering over the basic literal lit -// in the file pgf. The provided pos must be the exact position of the cursor, -// as it is used to extract the hovered rune in strings. -// -// For example, hovering over "\u2211" in "foo \u2211 bar" yields: -// -// '∑', U+2211, N-ARY SUMMATION -func hoverLit(pgf *ParsedGoFile, lit *ast.BasicLit, pos token.Pos) (protocol.Range, *HoverJSON, error) { - var ( - value string // if non-empty, a constant value to format in hover - r rune // if non-zero, format a description of this rune in hover - start, end token.Pos // hover span - ) - // Extract a rune from the current position. - // 'Ω', "...Ω...", or 0x03A9 => 'Ω', U+03A9, GREEK CAPITAL LETTER OMEGA - switch lit.Kind { - case token.CHAR: - s, err := strconv.Unquote(lit.Value) - if err != nil { - // If the conversion fails, it's because of an invalid syntax, therefore - // there is no rune to be found. - return protocol.Range{}, nil, nil - } - r, _ = utf8.DecodeRuneInString(s) - if r == utf8.RuneError { - return protocol.Range{}, nil, fmt.Errorf("rune error") - } - start, end = lit.Pos(), lit.End() - - case token.INT: - // Short literals (e.g. 99 decimal, 07 octal) are uninteresting. - if len(lit.Value) < 3 { - return protocol.Range{}, nil, nil - } - - v := constant.MakeFromLiteral(lit.Value, lit.Kind, 0) - if v.Kind() != constant.Int { - return protocol.Range{}, nil, nil - } - - switch lit.Value[:2] { - case "0x", "0X": - // As a special case, try to recognize hexadecimal literals as runes if - // they are within the range of valid unicode values. - if v, ok := constant.Int64Val(v); ok && v > 0 && v <= utf8.MaxRune && utf8.ValidRune(rune(v)) { - r = rune(v) - } - fallthrough - case "0o", "0O", "0b", "0B": - // Format the decimal value of non-decimal literals. - value = v.ExactString() - start, end = lit.Pos(), lit.End() - default: - return protocol.Range{}, nil, nil - } - - case token.STRING: - // It's a string, scan only if it contains a unicode escape sequence under or before the - // current cursor position. - litOffset, err := safetoken.Offset(pgf.Tok, lit.Pos()) - if err != nil { - return protocol.Range{}, nil, err - } - offset, err := safetoken.Offset(pgf.Tok, pos) - if err != nil { - return protocol.Range{}, nil, err - } - for i := offset - litOffset; i > 0; i-- { - // Start at the cursor position and search backward for the beginning of a rune escape sequence. - rr, _ := utf8.DecodeRuneInString(lit.Value[i:]) - if rr == utf8.RuneError { - return protocol.Range{}, nil, fmt.Errorf("rune error") - } - if rr == '\\' { - // Got the beginning, decode it. - var tail string - r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"') - if err != nil { - // If the conversion fails, it's because of an invalid syntax, - // therefore is no rune to be found. - return protocol.Range{}, nil, nil - } - // Only the rune escape sequence part of the string has to be highlighted, recompute the range. - runeLen := len(lit.Value) - (int(i) + len(tail)) - start = token.Pos(int(lit.Pos()) + int(i)) - end = token.Pos(int(start) + runeLen) - break - } - } - } - - if value == "" && r == 0 { // nothing to format - return protocol.Range{}, nil, nil - } - - rng, err := pgf.PosRange(start, end) - if err != nil { - return protocol.Range{}, nil, err - } - - var b strings.Builder - if value != "" { - b.WriteString(value) - } - if r != 0 { - runeName := runenames.Name(r) - if len(runeName) > 0 && runeName[0] == '<' { - // Check if the rune looks like an HTML tag. If so, trim the surrounding <> - // characters to work around https://github.com/microsoft/vscode/issues/124042. - runeName = strings.TrimRight(runeName[1:], ">") - } - if b.Len() > 0 { - b.WriteString(", ") - } - if strconv.IsPrint(r) { - fmt.Fprintf(&b, "'%c', ", r) - } - fmt.Fprintf(&b, "U+%04X, %s", r, runeName) - } - hover := b.String() - return rng, &HoverJSON{ - Synopsis: hover, - FullDocumentation: hover, - }, nil -} - -// hoverEmbed computes hover information for a filepath.Match pattern. -// Assumes that the pattern is relative to the location of fh. -func hoverEmbed(fh FileHandle, rng protocol.Range, pattern string) (protocol.Range, *HoverJSON, error) { - s := &strings.Builder{} - - dir := filepath.Dir(fh.URI().Filename()) - var matches []string - err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { - if e != nil { - return e - } - rel, err := filepath.Rel(dir, abs) - if err != nil { - return err - } - ok, err := filepath.Match(pattern, rel) - if err != nil { - return err - } - if ok && !d.IsDir() { - matches = append(matches, rel) - } - return nil - }) - if err != nil { - return protocol.Range{}, nil, err - } - - for _, m := range matches { - // TODO: Renders each file as separate markdown paragraphs. - // If forcing (a single) newline is possible it might be more clear. - fmt.Fprintf(s, "%s\n\n", m) - } - - json := &HoverJSON{ - Signature: fmt.Sprintf("Embedding %q", pattern), - Synopsis: s.String(), - FullDocumentation: s.String(), - } - return rng, json, nil -} - -// inferredSignatureString is a wrapper around the types.ObjectString function -// that adds more information to inferred signatures. It will return an empty string -// if the passed types.Object is not a signature. -func inferredSignatureString(obj types.Object, qf types.Qualifier, inferred *types.Signature) string { - // If the signature type was inferred, prefer the inferred signature with a - // comment showing the generic signature. - if sig, _ := obj.Type().(*types.Signature); sig != nil && typeparams.ForSignature(sig).Len() > 0 && inferred != nil { - obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred) - str := types.ObjectString(obj2, qf) - // Try to avoid overly long lines. - if len(str) > 60 { - str += "\n" - } else { - str += " " - } - str += "// " + types.TypeString(sig, qf) - return str - } - return "" -} - -// objectString is a wrapper around the types.ObjectString function. -// It handles adding more information to the object string. -// If spec is non-nil, it may be used to format additional declaration -// syntax, and file must be the token.File describing its positions. -func objectString(obj types.Object, qf types.Qualifier, declPos token.Pos, file *token.File, spec ast.Spec) string { - str := types.ObjectString(obj, qf) - - switch obj := obj.(type) { - case *types.Const: - var ( - declaration = obj.Val().String() // default formatted declaration - comment = "" // if non-empty, a clarifying comment - ) - - // Try to use the original declaration. - switch obj.Val().Kind() { - case constant.String: - // Usually the original declaration of a string doesn't carry much information. - // Also strings can be very long. So, just use the constant's value. - - default: - if spec, _ := spec.(*ast.ValueSpec); spec != nil { - for i, name := range spec.Names { - if declPos == name.Pos() { - if i < len(spec.Values) { - originalDeclaration := FormatNodeFile(file, spec.Values[i]) - if originalDeclaration != declaration { - comment = declaration - declaration = originalDeclaration - } - } - break - } - } - } - } - - // Special formatting cases. - switch typ := obj.Type().(type) { - case *types.Named: - // Try to add a formatted duration as an inline comment. - pkg := typ.Obj().Pkg() - if pkg.Path() == "time" && typ.Obj().Name() == "Duration" { - if d, ok := constant.Int64Val(obj.Val()); ok { - comment = time.Duration(d).String() - } - } - } - if comment == declaration { - comment = "" - } - - str += " = " + declaration - if comment != "" { - str += " // " + comment - } - } - return str -} - -// HoverDocForObject returns the best doc comment for obj (for which -// fset provides file/line information). -// -// TODO(rfindley): there appears to be zero(!) tests for this functionality. -func HoverDocForObject(ctx context.Context, snapshot Snapshot, fset *token.FileSet, obj types.Object) (*ast.CommentGroup, error) { - if _, isTypeName := obj.(*types.TypeName); isTypeName { - if _, isTypeParam := obj.Type().(*typeparams.TypeParam); isTypeParam { - return nil, nil - } - } - - pgf, pos, err := parseFull(ctx, snapshot, fset, obj.Pos()) - if err != nil { - return nil, fmt.Errorf("re-parsing: %v", err) - } - - decl, spec, field := findDeclInfo([]*ast.File{pgf.File}, pos) - return chooseDocComment(decl, spec, field), nil -} - -func chooseDocComment(decl ast.Decl, spec ast.Spec, field *ast.Field) *ast.CommentGroup { - if field != nil { - if field.Doc != nil { - return field.Doc - } - if field.Comment != nil { - return field.Comment - } - return nil - } - switch decl := decl.(type) { - case *ast.FuncDecl: - return decl.Doc - case *ast.GenDecl: - switch spec := spec.(type) { - case *ast.ValueSpec: - if spec.Doc != nil { - return spec.Doc - } - if decl.Doc != nil { - return decl.Doc - } - return spec.Comment - case *ast.TypeSpec: - if spec.Doc != nil { - return spec.Doc - } - if decl.Doc != nil { - return decl.Doc - } - return spec.Comment - } - } - return nil -} - -// parseFull fully parses the file corresponding to position pos (for -// which fset provides file/line information). -// -// It returns the resulting ParsedGoFile as well as new pos contained in the -// parsed file. -func parseFull(ctx context.Context, snapshot Snapshot, fset *token.FileSet, pos token.Pos) (*ParsedGoFile, token.Pos, error) { - f := fset.File(pos) - if f == nil { - return nil, 0, bug.Errorf("internal error: no file for position %d", pos) - } - - uri := span.URIFromPath(f.Name()) - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, 0, err - } - - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, 0, err - } - - offset, err := safetoken.Offset(f, pos) - if err != nil { - return nil, 0, bug.Errorf("offset out of bounds in %q", uri) - } - - fullPos, err := safetoken.Pos(pgf.Tok, offset) - if err != nil { - return nil, 0, err - } - - return pgf, fullPos, nil -} - -func formatHover(h *HoverJSON, options *Options) (string, error) { - signature := formatSignature(h, options) - - switch options.HoverKind { - case SingleLine: - return h.SingleLine, nil - case NoDocumentation: - return signature, nil - case Structured: - b, err := json.Marshal(h) - if err != nil { - return "", err - } - return string(b), nil - } - - link := formatLink(h, options) - doc := formatDoc(h, options) - - var b strings.Builder - parts := []string{signature, doc, link} - for i, el := range parts { - if el != "" { - b.WriteString(el) - - // If any elements of the remainder of the list are non-empty, - // write an extra newline. - if anyNonEmpty(parts[i+1:]) { - if options.PreferredContentFormat == protocol.Markdown { - b.WriteString("\n\n") - } else { - b.WriteRune('\n') - } - } - } - } - return b.String(), nil -} - -func formatSignature(h *HoverJSON, options *Options) string { - signature := h.Signature - if signature != "" && options.PreferredContentFormat == protocol.Markdown { - signature = fmt.Sprintf("```go\n%s\n```", signature) - } - return signature -} - -func formatLink(h *HoverJSON, options *Options) string { - if !options.LinksInHover || options.LinkTarget == "" || h.LinkPath == "" { - return "" - } - plainLink := BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor) - switch options.PreferredContentFormat { - case protocol.Markdown: - return fmt.Sprintf("[`%s` on %s](%s)", h.SymbolName, options.LinkTarget, plainLink) - case protocol.PlainText: - return "" - default: - return plainLink - } -} - -// BuildLink constructs a URL with the given target, path, and anchor. -func BuildLink(target, path, anchor string) string { - link := fmt.Sprintf("https://%s/%s", target, path) - if anchor == "" { - return link - } - return link + "#" + anchor -} - -func formatDoc(h *HoverJSON, options *Options) string { - var doc string - switch options.HoverKind { - case SynopsisDocumentation: - doc = h.Synopsis - case FullDocumentation: - doc = h.FullDocumentation - } - if options.PreferredContentFormat == protocol.Markdown { - return CommentToMarkdown(doc, options) - } - return doc -} - -func anyNonEmpty(x []string) bool { - for _, el := range x { - if el != "" { - return true - } - } - return false -} - -// findDeclInfo returns the syntax nodes involved in the declaration of the -// types.Object with position pos, searching the given list of file syntax -// trees. -// -// Pos may be the position of the name-defining identifier in a FuncDecl, -// ValueSpec, TypeSpec, Field, or as a special case the position of -// Ellipsis.Elt in an ellipsis field. -// -// If found, the resulting decl, spec, and field will be the inner-most -// instance of each node type surrounding pos. -// -// If field is non-nil, pos is the position of a field Var. If field is nil and -// spec is non-nil, pos is the position of a Var, Const, or TypeName object. If -// both field and spec are nil and decl is non-nil, pos is the position of a -// Func object. -// -// It returns a nil decl if no object-defining node is found at pos. -// -// TODO(rfindley): this function has tricky semantics, and may be worth unit -// testing and/or refactoring. -func findDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spec, field *ast.Field) { - // panic(found{}) breaks off the traversal and - // causes the function to return normally. - type found struct{} - defer func() { - switch x := recover().(type) { - case nil: - case found: - default: - panic(x) - } - }() - - // Visit the files in search of the node at pos. - stack := make([]ast.Node, 0, 20) - // Allocate the closure once, outside the loop. - f := func(n ast.Node) bool { - if n != nil { - stack = append(stack, n) // push - } else { - stack = stack[:len(stack)-1] // pop - return false - } - - // Skip subtrees (incl. files) that don't contain the search point. - if !(n.Pos() <= pos && pos < n.End()) { - return false - } - - switch n := n.(type) { - case *ast.Field: - findEnclosingDeclAndSpec := func() { - for i := len(stack) - 1; i >= 0; i-- { - switch n := stack[i].(type) { - case ast.Spec: - spec = n - case ast.Decl: - decl = n - return - } - } - } - - // Check each field name since you can have - // multiple names for the same type expression. - for _, id := range n.Names { - if id.Pos() == pos { - field = n - findEnclosingDeclAndSpec() - panic(found{}) - } - } - - // Check *ast.Field itself. This handles embedded - // fields which have no associated *ast.Ident name. - if n.Pos() == pos { - field = n - findEnclosingDeclAndSpec() - panic(found{}) - } - - // Also check "X" in "...X". This makes it easy to format variadic - // signature params properly. - // - // TODO(rfindley): I don't understand this comment. How does finding the - // field in this case make it easier to format variadic signature params? - if ell, ok := n.Type.(*ast.Ellipsis); ok && ell.Elt != nil && ell.Elt.Pos() == pos { - field = n - findEnclosingDeclAndSpec() - panic(found{}) - } - - case *ast.FuncDecl: - if n.Name.Pos() == pos { - decl = n - panic(found{}) - } - - case *ast.GenDecl: - for _, s := range n.Specs { - switch s := s.(type) { - case *ast.TypeSpec: - if s.Name.Pos() == pos { - decl = n - spec = s - panic(found{}) - } - case *ast.ValueSpec: - for _, id := range s.Names { - if id.Pos() == pos { - decl = n - spec = s - panic(found{}) - } - } - } - } - } - return true - } - for _, file := range files { - ast.Inspect(file, f) - } - - return nil, nil, nil -} diff --git a/gopls/internal/lsp/source/implementation.go b/gopls/internal/lsp/source/implementation.go deleted file mode 100644 index d9eb814099b..00000000000 --- a/gopls/internal/lsp/source/implementation.go +++ /dev/null @@ -1,495 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - "sort" - "strings" - "sync" - - "golang.org/x/sync/errgroup" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source/methodsets" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" -) - -// This file defines the new implementation of the 'implementation' -// operator that does not require type-checker data structures for an -// unbounded number of packages. -// -// TODO(adonovan): -// - Audit to ensure robustness in face of type errors. -// - Eliminate false positives due to 'tricky' cases of the global algorithm. -// - Ensure we have test coverage of: -// type aliases -// nil, PkgName, Builtin (all errors) -// any (empty result) -// method of unnamed interface type (e.g. var x interface { f() }) -// (the global algorithm may find implementations of this type -// but will not include it in the index.) - -// Implementation returns a new sorted array of locations of -// declarations of types that implement (or are implemented by) the -// type referred to at the given position. -// -// If the position denotes a method, the computation is applied to its -// receiver type and then its corresponding methods are returned. -func Implementation(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "source.Implementation") - defer done() - - locs, err := implementations(ctx, snapshot, f, pp) - if err != nil { - return nil, err - } - - // Sort and de-duplicate locations. - sort.Slice(locs, func(i, j int) bool { - return protocol.CompareLocation(locs[i], locs[j]) < 0 - }) - out := locs[:0] - for _, loc := range locs { - if len(out) == 0 || out[len(out)-1] != loc { - out = append(out, loc) - } - } - locs = out - - return locs, nil -} - -func implementations(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) ([]protocol.Location, error) { - obj, pkg, err := implementsObj(ctx, snapshot, fh.URI(), pp) - if err != nil { - return nil, err - } - - var localPkgs []Package - if obj.Pos().IsValid() { // no local package for error or error.Error - declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) - // Type-check the declaring package (incl. variants) for use - // by the "local" search, which uses type information to - // enumerate all types within the package that satisfy the - // query type, even those defined local to a function. - declURI := span.URIFromPath(declPosn.Filename) - declMetas, err := snapshot.MetadataForFile(ctx, declURI) - if err != nil { - return nil, err - } - RemoveIntermediateTestVariants(&declMetas) - if len(declMetas) == 0 { - return nil, fmt.Errorf("no packages for file %s", declURI) - } - ids := make([]PackageID, len(declMetas)) - for i, m := range declMetas { - ids[i] = m.ID - } - localPkgs, err = snapshot.TypeCheck(ctx, ids...) - if err != nil { - return nil, err - } - } - - // Is the selected identifier a type name or method? - // (For methods, report the corresponding method names.) - var queryType types.Type - var queryMethodID string - switch obj := obj.(type) { - case *types.TypeName: - queryType = obj.Type() - case *types.Func: - // For methods, use the receiver type, which may be anonymous. - if recv := obj.Type().(*types.Signature).Recv(); recv != nil { - queryType = recv.Type() - queryMethodID = obj.Id() - } - } - if queryType == nil { - return nil, bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj - } - - // Compute the method-set fingerprint used as a key to the global search. - key, hasMethods := methodsets.KeyOf(queryType) - if !hasMethods { - // A type with no methods yields an empty result. - // (No point reporting that every type satisfies 'any'.) - return nil, nil - } - - // The global search needs to look at every package in the - // forward transitive closure of the workspace; see package - // ./methodsets. - // - // For now we do all the type checking before beginning the search. - // TODO(adonovan): opt: search in parallel topological order - // so that we can overlap index lookup with typechecking. - // I suspect a number of algorithms on the result of TypeCheck could - // be optimized by being applied as soon as each package is available. - globalMetas, err := snapshot.AllMetadata(ctx) - if err != nil { - return nil, err - } - RemoveIntermediateTestVariants(&globalMetas) - globalIDs := make([]PackageID, 0, len(globalMetas)) - - var pkgPath PackagePath - if obj.Pkg() != nil { // nil for error - pkgPath = PackagePath(obj.Pkg().Path()) - } - for _, m := range globalMetas { - if m.PkgPath == pkgPath { - continue // declaring package is handled by local implementation - } - globalIDs = append(globalIDs, m.ID) - } - indexes, err := snapshot.MethodSets(ctx, globalIDs...) - if err != nil { - return nil, fmt.Errorf("querying method sets: %v", err) - } - - // Search local and global packages in parallel. - var ( - group errgroup.Group - locsMu sync.Mutex - locs []protocol.Location - ) - // local search - for _, localPkg := range localPkgs { - localPkg := localPkg - group.Go(func() error { - localLocs, err := localImplementations(ctx, snapshot, localPkg, queryType, queryMethodID) - if err != nil { - return err - } - locsMu.Lock() - locs = append(locs, localLocs...) - locsMu.Unlock() - return nil - }) - } - // global search - for _, index := range indexes { - index := index - group.Go(func() error { - for _, res := range index.Search(key, queryMethodID) { - loc := res.Location - // Map offsets to protocol.Locations in parallel (may involve I/O). - group.Go(func() error { - ploc, err := offsetToLocation(ctx, snapshot, loc.Filename, loc.Start, loc.End) - if err != nil { - return err - } - locsMu.Lock() - locs = append(locs, ploc) - locsMu.Unlock() - return nil - }) - } - return nil - }) - } - if err := group.Wait(); err != nil { - return nil, err - } - - return locs, nil -} - -// offsetToLocation converts an offset-based position to a protocol.Location, -// which requires reading the file. -func offsetToLocation(ctx context.Context, snapshot Snapshot, filename string, start, end int) (protocol.Location, error) { - uri := span.URIFromPath(filename) - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return protocol.Location{}, err // cancelled, perhaps - } - content, err := fh.Content() - if err != nil { - return protocol.Location{}, err // nonexistent or deleted ("can't happen") - } - m := protocol.NewMapper(uri, content) - return m.OffsetLocation(start, end) -} - -// implementsObj returns the object to query for implementations, which is a -// type name or method. -// -// The returned Package is the narrowest package containing ppos, which is the -// package using the resulting obj but not necessarily the declaring package. -func implementsObj(ctx context.Context, snapshot Snapshot, uri span.URI, ppos protocol.Position) (types.Object, Package, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, nil, err - } - pos, err := pgf.PositionPos(ppos) - if err != nil { - return nil, nil, err - } - - // This function inherits the limitation of its predecessor in - // requiring the selection to be an identifier (of a type or - // method). But there's no fundamental reason why one could - // not pose this query about any selected piece of syntax that - // has a type and thus a method set. - // (If LSP was more thorough about passing text selections as - // intervals to queries, you could ask about the method set of a - // subexpression such as x.f().) - - // TODO(adonovan): simplify: use objectsAt? - path := pathEnclosingObjNode(pgf.File, pos) - if path == nil { - return nil, nil, ErrNoIdentFound - } - id, ok := path[0].(*ast.Ident) - if !ok { - return nil, nil, ErrNoIdentFound - } - - // Is the object a type or method? Reject other kinds. - obj := pkg.GetTypesInfo().Uses[id] - if obj == nil { - // Check uses first (unlike ObjectOf) so that T in - // struct{T} is treated as a reference to a type, - // not a declaration of a field. - obj = pkg.GetTypesInfo().Defs[id] - } - switch obj := obj.(type) { - case *types.TypeName: - // ok - case *types.Func: - if obj.Type().(*types.Signature).Recv() == nil { - return nil, nil, fmt.Errorf("%s is a function, not a method", id.Name) - } - case nil: - return nil, nil, fmt.Errorf("%s denotes unknown object", id.Name) - default: - // e.g. *types.Var -> "var". - kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) - return nil, nil, fmt.Errorf("%s is a %s, not a type", id.Name, kind) - } - - return obj, pkg, nil -} - -// localImplementations searches within pkg for declarations of all -// types that are assignable to/from the query type, and returns a new -// unordered array of their locations. -// -// If methodID is non-empty, the function instead returns the location -// of each type's method (if any) of that ID. -// -// ("Local" refers to the search within the same package, but this -// function's results may include type declarations that are local to -// a function body. The global search index excludes such types -// because reliably naming such types is hard.) -func localImplementations(ctx context.Context, snapshot Snapshot, pkg Package, queryType types.Type, methodID string) ([]protocol.Location, error) { - queryType = methodsets.EnsurePointer(queryType) - - // Scan through all type declarations in the syntax. - var locs []protocol.Location - var methodLocs []methodsets.Location - for _, pgf := range pkg.CompiledGoFiles() { - ast.Inspect(pgf.File, func(n ast.Node) bool { - spec, ok := n.(*ast.TypeSpec) - if !ok { - return true // not a type declaration - } - def := pkg.GetTypesInfo().Defs[spec.Name] - if def == nil { - return true // "can't happen" for types - } - if def.(*types.TypeName).IsAlias() { - return true // skip type aliases to avoid duplicate reporting - } - candidateType := methodsets.EnsurePointer(def.Type()) - - // The historical behavior enshrined by this - // function rejects cases where both are - // (nontrivial) interface types? - // That seems like useful information. - // TODO(adonovan): UX: report I/I pairs too? - // The same question appears in the global algorithm (methodsets). - if !concreteImplementsIntf(candidateType, queryType) { - return true // not assignable - } - - // Ignore types with empty method sets. - // (No point reporting that every type satisfies 'any'.) - mset := types.NewMethodSet(candidateType) - if mset.Len() == 0 { - return true - } - - if methodID == "" { - // Found matching type. - locs = append(locs, mustLocation(pgf, spec.Name)) - return true - } - - // Find corresponding method. - // - // We can't use LookupFieldOrMethod because it requires - // the methodID's types.Package, which we don't know. - // We could recursively search pkg.Imports for it, - // but it's easier to walk the method set. - for i := 0; i < mset.Len(); i++ { - method := mset.At(i).Obj() - if method.Id() == methodID { - posn := safetoken.StartPosition(pkg.FileSet(), method.Pos()) - methodLocs = append(methodLocs, methodsets.Location{ - Filename: posn.Filename, - Start: posn.Offset, - End: posn.Offset + len(method.Name()), - }) - break - } - } - return true - }) - } - - // Finally convert method positions to protocol form by reading the files. - for _, mloc := range methodLocs { - loc, err := offsetToLocation(ctx, snapshot, mloc.Filename, mloc.Start, mloc.End) - if err != nil { - return nil, err - } - locs = append(locs, loc) - } - - // Special case: for types that satisfy error, report builtin.go (see #59527). - if types.Implements(queryType, errorInterfaceType) { - loc, err := errorLocation(ctx, snapshot) - if err != nil { - return nil, err - } - locs = append(locs, loc) - } - - return locs, nil -} - -var errorInterfaceType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -// errorLocation returns the location of the 'error' type in builtin.go. -func errorLocation(ctx context.Context, snapshot Snapshot) (protocol.Location, error) { - pgf, err := snapshot.BuiltinFile(ctx) - if err != nil { - return protocol.Location{}, err - } - for _, decl := range pgf.File.Decls { - if decl, ok := decl.(*ast.GenDecl); ok { - for _, spec := range decl.Specs { - if spec, ok := spec.(*ast.TypeSpec); ok && spec.Name.Name == "error" { - return pgf.NodeLocation(spec.Name) - } - } - } - } - return protocol.Location{}, fmt.Errorf("built-in error type not found") -} - -// concreteImplementsIntf returns true if a is an interface type implemented by -// concrete type b, or vice versa. -func concreteImplementsIntf(a, b types.Type) bool { - aIsIntf, bIsIntf := types.IsInterface(a), types.IsInterface(b) - - // Make sure exactly one is an interface type. - if aIsIntf == bIsIntf { - return false - } - - // Rearrange if needed so "a" is the concrete type. - if aIsIntf { - a, b = b, a - } - - // TODO(adonovan): this should really use GenericAssignableTo - // to report (e.g.) "ArrayList[T] implements List[T]", but - // GenericAssignableTo doesn't work correctly on pointers to - // generic named types. Thus the legacy implementation and the - // "local" part of implementations fail to report generics. - // The global algorithm based on subsets does the right thing. - return types.AssignableTo(a, b) -} - -var ( - // TODO(adonovan): why do various RPC handlers related to - // IncomingCalls return (nil, nil) on the protocol in response - // to this error? That seems like a violation of the protocol. - // Is it perhaps a workaround for VSCode behavior? - errNoObjectFound = errors.New("no object found") -) - -// pathEnclosingObjNode returns the AST path to the object-defining -// node associated with pos. "Object-defining" means either an -// *ast.Ident mapped directly to a types.Object or an ast.Node mapped -// implicitly to a types.Object. -func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { - var ( - path []ast.Node - found bool - ) - - ast.Inspect(f, func(n ast.Node) bool { - if found { - return false - } - - if n == nil { - path = path[:len(path)-1] - return false - } - - path = append(path, n) - - switch n := n.(type) { - case *ast.Ident: - // Include the position directly after identifier. This handles - // the common case where the cursor is right after the - // identifier the user is currently typing. Previously we - // handled this by calling astutil.PathEnclosingInterval twice, - // once for "pos" and once for "pos-1". - found = n.Pos() <= pos && pos <= n.End() - case *ast.ImportSpec: - if n.Path.Pos() <= pos && pos < n.Path.End() { - found = true - // If import spec has a name, add name to path even though - // position isn't in the name. - if n.Name != nil { - path = append(path, n.Name) - } - } - case *ast.StarExpr: - // Follow star expressions to the inner identifier. - if pos == n.Star { - pos = n.X.Pos() - } - } - - return !found - }) - - if len(path) == 0 { - return nil - } - - // Reverse path so leaf is first element. - for i := 0; i < len(path)/2; i++ { - path[i], path[len(path)-1-i] = path[len(path)-1-i], path[i] - } - - return path -} diff --git a/gopls/internal/lsp/source/inlay_hint.go b/gopls/internal/lsp/source/inlay_hint.go deleted file mode 100644 index 9a77d16a093..00000000000 --- a/gopls/internal/lsp/source/inlay_hint.go +++ /dev/null @@ -1,394 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/typeparams" -) - -const ( - maxLabelLength = 28 -) - -type InlayHintFunc func(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint - -type Hint struct { - Name string - Doc string - Run InlayHintFunc -} - -const ( - ParameterNames = "parameterNames" - AssignVariableTypes = "assignVariableTypes" - ConstantValues = "constantValues" - RangeVariableTypes = "rangeVariableTypes" - CompositeLiteralTypes = "compositeLiteralTypes" - CompositeLiteralFieldNames = "compositeLiteralFields" - FunctionTypeParameters = "functionTypeParameters" -) - -var AllInlayHints = map[string]*Hint{ - AssignVariableTypes: { - Name: AssignVariableTypes, - Doc: "Enable/disable inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```", - Run: assignVariableTypes, - }, - ParameterNames: { - Name: ParameterNames, - Doc: "Enable/disable inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```", - Run: parameterNames, - }, - ConstantValues: { - Name: ConstantValues, - Doc: "Enable/disable inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```", - Run: constantValues, - }, - RangeVariableTypes: { - Name: RangeVariableTypes, - Doc: "Enable/disable inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```", - Run: rangeVariableTypes, - }, - CompositeLiteralTypes: { - Name: CompositeLiteralTypes, - Doc: "Enable/disable inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```", - Run: compositeLiteralTypes, - }, - CompositeLiteralFieldNames: { - Name: CompositeLiteralFieldNames, - Doc: "Enable/disable inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```", - Run: compositeLiteralFields, - }, - FunctionTypeParameters: { - Name: FunctionTypeParameters, - Doc: "Enable/disable inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```", - Run: funcTypeParams, - }, -} - -func InlayHint(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) ([]protocol.InlayHint, error) { - ctx, done := event.Start(ctx, "source.InlayHint") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, fmt.Errorf("getting file for InlayHint: %w", err) - } - - // Collect a list of the inlay hints that are enabled. - inlayHintOptions := snapshot.Options().InlayHintOptions - var enabledHints []InlayHintFunc - for hint, enabled := range inlayHintOptions.Hints { - if !enabled { - continue - } - if h, ok := AllInlayHints[hint]; ok { - enabledHints = append(enabledHints, h.Run) - } - } - if len(enabledHints) == 0 { - return nil, nil - } - - info := pkg.GetTypesInfo() - q := Qualifier(pgf.File, pkg.GetTypes(), info) - - // Set the range to the full file if the range is not valid. - start, end := pgf.File.Pos(), pgf.File.End() - if pRng.Start.Line < pRng.End.Line || pRng.Start.Character < pRng.End.Character { - // Adjust start and end for the specified range. - var err error - start, end, err = pgf.RangePos(pRng) - if err != nil { - return nil, err - } - } - - var hints []protocol.InlayHint - ast.Inspect(pgf.File, func(node ast.Node) bool { - // If not in range, we can stop looking. - if node == nil || node.End() < start || node.Pos() > end { - return false - } - for _, fn := range enabledHints { - hints = append(hints, fn(node, pgf.Mapper, pgf.Tok, info, &q)...) - } - return true - }) - return hints, nil -} - -func parameterNames(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - callExpr, ok := node.(*ast.CallExpr) - if !ok { - return nil - } - signature, ok := info.TypeOf(callExpr.Fun).(*types.Signature) - if !ok { - return nil - } - - var hints []protocol.InlayHint - for i, v := range callExpr.Args { - start, err := m.PosPosition(tf, v.Pos()) - if err != nil { - continue - } - params := signature.Params() - // When a function has variadic params, we skip args after - // params.Len(). - if i > params.Len()-1 { - break - } - param := params.At(i) - // param.Name is empty for built-ins like append - if param.Name() == "" { - continue - } - // Skip the parameter name hint if the arg matches - // the parameter name. - if i, ok := v.(*ast.Ident); ok && i.Name == param.Name() { - continue - } - - label := param.Name() - if signature.Variadic() && i == params.Len()-1 { - label = label + "..." - } - hints = append(hints, protocol.InlayHint{ - Position: start, - Label: buildLabel(label + ":"), - Kind: protocol.Parameter, - PaddingRight: true, - }) - } - return hints -} - -func funcTypeParams(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - ce, ok := node.(*ast.CallExpr) - if !ok { - return nil - } - id, ok := ce.Fun.(*ast.Ident) - if !ok { - return nil - } - inst := typeparams.GetInstances(info)[id] - if inst.TypeArgs == nil { - return nil - } - start, err := m.PosPosition(tf, id.End()) - if err != nil { - return nil - } - var args []string - for i := 0; i < inst.TypeArgs.Len(); i++ { - args = append(args, inst.TypeArgs.At(i).String()) - } - if len(args) == 0 { - return nil - } - return []protocol.InlayHint{{ - Position: start, - Label: buildLabel("[" + strings.Join(args, ", ") + "]"), - Kind: protocol.Type, - }} -} - -func assignVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - stmt, ok := node.(*ast.AssignStmt) - if !ok || stmt.Tok != token.DEFINE { - return nil - } - - var hints []protocol.InlayHint - for _, v := range stmt.Lhs { - if h := variableType(v, m, tf, info, q); h != nil { - hints = append(hints, *h) - } - } - return hints -} - -func rangeVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - rStmt, ok := node.(*ast.RangeStmt) - if !ok { - return nil - } - var hints []protocol.InlayHint - if h := variableType(rStmt.Key, m, tf, info, q); h != nil { - hints = append(hints, *h) - } - if h := variableType(rStmt.Value, m, tf, info, q); h != nil { - hints = append(hints, *h) - } - return hints -} - -func variableType(e ast.Expr, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) *protocol.InlayHint { - typ := info.TypeOf(e) - if typ == nil { - return nil - } - end, err := m.PosPosition(tf, e.End()) - if err != nil { - return nil - } - return &protocol.InlayHint{ - Position: end, - Label: buildLabel(types.TypeString(typ, *q)), - Kind: protocol.Type, - PaddingLeft: true, - } -} - -func constantValues(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - genDecl, ok := node.(*ast.GenDecl) - if !ok || genDecl.Tok != token.CONST { - return nil - } - - var hints []protocol.InlayHint - for _, v := range genDecl.Specs { - spec, ok := v.(*ast.ValueSpec) - if !ok { - continue - } - end, err := m.PosPosition(tf, v.End()) - if err != nil { - continue - } - // Show hints when values are missing or at least one value is not - // a basic literal. - showHints := len(spec.Values) == 0 - checkValues := len(spec.Names) == len(spec.Values) - var values []string - for i, w := range spec.Names { - obj, ok := info.ObjectOf(w).(*types.Const) - if !ok || obj.Val().Kind() == constant.Unknown { - return nil - } - if checkValues { - switch spec.Values[i].(type) { - case *ast.BadExpr: - return nil - case *ast.BasicLit: - default: - if obj.Val().Kind() != constant.Bool { - showHints = true - } - } - } - values = append(values, fmt.Sprintf("%v", obj.Val())) - } - if !showHints || len(values) == 0 { - continue - } - hints = append(hints, protocol.InlayHint{ - Position: end, - Label: buildLabel("= " + strings.Join(values, ", ")), - PaddingLeft: true, - }) - } - return hints -} - -func compositeLiteralFields(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - compLit, ok := node.(*ast.CompositeLit) - if !ok { - return nil - } - typ := info.TypeOf(compLit) - if typ == nil { - return nil - } - if t, ok := typ.(*types.Pointer); ok { - typ = t.Elem() - } - strct, ok := typ.Underlying().(*types.Struct) - if !ok { - return nil - } - - var hints []protocol.InlayHint - var allEdits []protocol.TextEdit - for i, v := range compLit.Elts { - if _, ok := v.(*ast.KeyValueExpr); !ok { - start, err := m.PosPosition(tf, v.Pos()) - if err != nil { - continue - } - if i > strct.NumFields()-1 { - break - } - hints = append(hints, protocol.InlayHint{ - Position: start, - Label: buildLabel(strct.Field(i).Name() + ":"), - Kind: protocol.Parameter, - PaddingRight: true, - }) - allEdits = append(allEdits, protocol.TextEdit{ - Range: protocol.Range{Start: start, End: start}, - NewText: strct.Field(i).Name() + ": ", - }) - } - } - // It is not allowed to have a mix of keyed and unkeyed fields, so - // have the text edits add keys to all fields. - for i := range hints { - hints[i].TextEdits = allEdits - } - return hints -} - -func compositeLiteralTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - compLit, ok := node.(*ast.CompositeLit) - if !ok { - return nil - } - typ := info.TypeOf(compLit) - if typ == nil { - return nil - } - if compLit.Type != nil { - return nil - } - prefix := "" - if t, ok := typ.(*types.Pointer); ok { - typ = t.Elem() - prefix = "&" - } - // The type for this composite literal is implicit, add an inlay hint. - start, err := m.PosPosition(tf, compLit.Lbrace) - if err != nil { - return nil - } - return []protocol.InlayHint{{ - Position: start, - Label: buildLabel(fmt.Sprintf("%s%s", prefix, types.TypeString(typ, *q))), - Kind: protocol.Type, - }} -} - -func buildLabel(s string) []protocol.InlayHintLabelPart { - label := protocol.InlayHintLabelPart{ - Value: s, - } - if len(s) > maxLabelLength+len("...") { - label.Value = s[:maxLabelLength] + "..." - } - return []protocol.InlayHintLabelPart{label} -} diff --git a/gopls/internal/lsp/source/inline.go b/gopls/internal/lsp/source/inline.go deleted file mode 100644 index da3e8e5ae0c..00000000000 --- a/gopls/internal/lsp/source/inline.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -// This file defines the refactor.inline code action. - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - "runtime/debug" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/refactor/inline" -) - -// EnclosingStaticCall returns the innermost function call enclosing -// the selected range, along with the callee. -func EnclosingStaticCall(pkg Package, pgf *ParsedGoFile, rng protocol.Range) (*ast.CallExpr, *types.Func, error) { - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, nil, err - } - path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - - var call *ast.CallExpr -loop: - for _, n := range path { - switch n := n.(type) { - case *ast.FuncLit: - break loop - case *ast.CallExpr: - call = n - break loop - } - } - if call == nil { - return nil, nil, fmt.Errorf("no enclosing call") - } - if safetoken.Line(pgf.Tok, call.Lparen) != safetoken.Line(pgf.Tok, start) { - return nil, nil, fmt.Errorf("enclosing call is not on this line") - } - fn := typeutil.StaticCallee(pkg.GetTypesInfo(), call) - if fn == nil { - return nil, nil, fmt.Errorf("not a static call to a Go function") - } - return call, fn, nil -} - -func inlineCall(ctx context.Context, snapshot Snapshot, fh FileHandle, rng protocol.Range) (_ *token.FileSet, _ *analysis.SuggestedFix, err error) { - // Find enclosing static call. - callerPkg, callerPGF, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, nil, err - } - call, fn, err := EnclosingStaticCall(callerPkg, callerPGF, rng) - if err != nil { - return nil, nil, err - } - - // Locate callee by file/line and analyze it. - calleePosn := safetoken.StartPosition(callerPkg.FileSet(), fn.Pos()) - calleePkg, calleePGF, err := NarrowestPackageForFile(ctx, snapshot, span.URIFromPath(calleePosn.Filename)) - if err != nil { - return nil, nil, err - } - var calleeDecl *ast.FuncDecl - for _, decl := range calleePGF.File.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok { - posn := safetoken.StartPosition(calleePkg.FileSet(), decl.Name.Pos()) - if posn.Line == calleePosn.Line && posn.Column == calleePosn.Column { - calleeDecl = decl - break - } - } - } - if calleeDecl == nil { - return nil, nil, fmt.Errorf("can't find callee") - } - - // The inliner assumes that input is well-typed, - // but that is frequently not the case within gopls. - // Until we are able to harden the inliner, - // report panics as errors to avoid crashing the server. - bad := func(p Package) bool { return len(p.GetParseErrors())+len(p.GetTypeErrors()) > 0 } - if bad(calleePkg) || bad(callerPkg) { - defer func() { - if x := recover(); x != nil { - err = bug.Errorf("inlining failed unexpectedly: %v\nstack: %v", - x, debug.Stack()) - } - }() - } - - // Users can consult the gopls event log to see - // why a particular inlining strategy was chosen. - logf := logger(ctx, "inliner", snapshot.Options().VerboseOutput) - - callee, err := inline.AnalyzeCallee(logf, calleePkg.FileSet(), calleePkg.GetTypes(), calleePkg.GetTypesInfo(), calleeDecl, calleePGF.Src) - if err != nil { - return nil, nil, err - } - - // Inline the call. - caller := &inline.Caller{ - Fset: callerPkg.FileSet(), - Types: callerPkg.GetTypes(), - Info: callerPkg.GetTypesInfo(), - File: callerPGF.File, - Call: call, - Content: callerPGF.Src, - } - - got, err := inline.Inline(logf, caller, callee) - if err != nil { - return nil, nil, err - } - - // Suggest the fix. - return callerPkg.FileSet(), &analysis.SuggestedFix{ - Message: fmt.Sprintf("inline call of %v", callee), - TextEdits: diffToTextEdits(callerPGF.Tok, diff.Bytes(callerPGF.Src, got)), - }, nil -} - -// TODO(adonovan): change the inliner to instead accept an io.Writer. -func logger(ctx context.Context, name string, verbose bool) func(format string, args ...any) { - if verbose { - return func(format string, args ...any) { - event.Log(ctx, name+": "+fmt.Sprintf(format, args...)) - } - } else { - return func(string, ...any) {} - } -} diff --git a/gopls/internal/lsp/source/options.go b/gopls/internal/lsp/source/options.go deleted file mode 100644 index 74f9bed8d00..00000000000 --- a/gopls/internal/lsp/source/options.go +++ /dev/null @@ -1,1830 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "fmt" - "io" - "path/filepath" - "regexp" - "runtime" - "strings" - "sync" - "time" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/appends" - "golang.org/x/tools/go/analysis/passes/asmdecl" - "golang.org/x/tools/go/analysis/passes/assign" - "golang.org/x/tools/go/analysis/passes/atomic" - "golang.org/x/tools/go/analysis/passes/atomicalign" - "golang.org/x/tools/go/analysis/passes/bools" - "golang.org/x/tools/go/analysis/passes/buildtag" - "golang.org/x/tools/go/analysis/passes/cgocall" - "golang.org/x/tools/go/analysis/passes/composite" - "golang.org/x/tools/go/analysis/passes/copylock" - "golang.org/x/tools/go/analysis/passes/deepequalerrors" - "golang.org/x/tools/go/analysis/passes/defers" - "golang.org/x/tools/go/analysis/passes/directive" - "golang.org/x/tools/go/analysis/passes/errorsas" - "golang.org/x/tools/go/analysis/passes/fieldalignment" - "golang.org/x/tools/go/analysis/passes/httpresponse" - "golang.org/x/tools/go/analysis/passes/ifaceassert" - "golang.org/x/tools/go/analysis/passes/loopclosure" - "golang.org/x/tools/go/analysis/passes/lostcancel" - "golang.org/x/tools/go/analysis/passes/nilfunc" - "golang.org/x/tools/go/analysis/passes/nilness" - "golang.org/x/tools/go/analysis/passes/printf" - "golang.org/x/tools/go/analysis/passes/shadow" - "golang.org/x/tools/go/analysis/passes/shift" - "golang.org/x/tools/go/analysis/passes/slog" - "golang.org/x/tools/go/analysis/passes/sortslice" - "golang.org/x/tools/go/analysis/passes/stdmethods" - "golang.org/x/tools/go/analysis/passes/stringintconv" - "golang.org/x/tools/go/analysis/passes/structtag" - "golang.org/x/tools/go/analysis/passes/testinggoroutine" - "golang.org/x/tools/go/analysis/passes/tests" - "golang.org/x/tools/go/analysis/passes/timeformat" - "golang.org/x/tools/go/analysis/passes/unmarshal" - "golang.org/x/tools/go/analysis/passes/unreachable" - "golang.org/x/tools/go/analysis/passes/unsafeptr" - "golang.org/x/tools/go/analysis/passes/unusedresult" - "golang.org/x/tools/go/analysis/passes/unusedwrite" - "golang.org/x/tools/gopls/internal/lsp/analysis/deprecated" - "golang.org/x/tools/gopls/internal/lsp/analysis/embeddirective" - "golang.org/x/tools/gopls/internal/lsp/analysis/fillreturns" - "golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct" - "golang.org/x/tools/gopls/internal/lsp/analysis/infertypeargs" - "golang.org/x/tools/gopls/internal/lsp/analysis/nonewvars" - "golang.org/x/tools/gopls/internal/lsp/analysis/noresultvalues" - "golang.org/x/tools/gopls/internal/lsp/analysis/simplifycompositelit" - "golang.org/x/tools/gopls/internal/lsp/analysis/simplifyrange" - "golang.org/x/tools/gopls/internal/lsp/analysis/simplifyslice" - "golang.org/x/tools/gopls/internal/lsp/analysis/stubmethods" - "golang.org/x/tools/gopls/internal/lsp/analysis/undeclaredname" - "golang.org/x/tools/gopls/internal/lsp/analysis/unusedparams" - "golang.org/x/tools/gopls/internal/lsp/analysis/unusedvariable" - "golang.org/x/tools/gopls/internal/lsp/analysis/useany" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/diff/myers" -) - -var ( - optionsOnce sync.Once - defaultOptions *Options -) - -// DefaultOptions is the options that are used for Gopls execution independent -// of any externally provided configuration (LSP initialization, command -// invocation, etc.). -func DefaultOptions(overrides ...func(*Options)) *Options { - optionsOnce.Do(func() { - var commands []string - for _, c := range command.Commands { - commands = append(commands, c.ID()) - } - defaultOptions = &Options{ - ClientOptions: ClientOptions{ - InsertTextFormat: protocol.PlainTextTextFormat, - PreferredContentFormat: protocol.Markdown, - ConfigurationSupported: true, - DynamicConfigurationSupported: true, - DynamicRegistrationSemanticTokensSupported: true, - DynamicWatchedFilesSupported: true, - LineFoldingOnly: false, - HierarchicalDocumentSymbolSupport: true, - }, - ServerOptions: ServerOptions{ - SupportedCodeActions: map[FileKind]map[protocol.CodeActionKind]bool{ - Go: { - protocol.SourceFixAll: true, - protocol.SourceOrganizeImports: true, - protocol.QuickFix: true, - protocol.RefactorRewrite: true, - protocol.RefactorInline: true, - protocol.RefactorExtract: true, - }, - Mod: { - protocol.SourceOrganizeImports: true, - protocol.QuickFix: true, - }, - Work: {}, - Sum: {}, - Tmpl: {}, - }, - SupportedCommands: commands, - }, - UserOptions: UserOptions{ - BuildOptions: BuildOptions{ - ExpandWorkspaceToModule: true, - MemoryMode: ModeNormal, - DirectoryFilters: []string{"-**/node_modules"}, - TemplateExtensions: []string{}, - StandaloneTags: []string{"ignore"}, - }, - UIOptions: UIOptions{ - DiagnosticOptions: DiagnosticOptions{ - Annotations: map[Annotation]bool{ - Bounds: true, - Escape: true, - Inline: true, - Nil: true, - }, - Vulncheck: ModeVulncheckOff, - DiagnosticsDelay: 1 * time.Second, - DiagnosticsTrigger: DiagnosticsOnEdit, - AnalysisProgressReporting: true, - }, - InlayHintOptions: InlayHintOptions{}, - DocumentationOptions: DocumentationOptions{ - HoverKind: FullDocumentation, - LinkTarget: "pkg.go.dev", - LinksInHover: true, - }, - NavigationOptions: NavigationOptions{ - ImportShortcut: BothShortcuts, - SymbolMatcher: SymbolFastFuzzy, - SymbolStyle: DynamicSymbols, - SymbolScope: AllSymbolScope, - }, - CompletionOptions: CompletionOptions{ - Matcher: Fuzzy, - CompletionBudget: 100 * time.Millisecond, - ExperimentalPostfixCompletions: true, - CompleteFunctionCalls: true, - }, - Codelenses: map[string]bool{ - string(command.Generate): true, - string(command.RegenerateCgo): true, - string(command.Tidy): true, - string(command.GCDetails): false, - string(command.UpgradeDependency): true, - string(command.Vendor): true, - // TODO(hyangah): enable command.RunGovulncheck. - }, - }, - }, - InternalOptions: InternalOptions{ - LiteralCompletions: true, - TempModfile: true, - CompleteUnimported: true, - CompletionDocumentation: true, - DeepCompletion: true, - ChattyDiagnostics: true, - NewDiff: "new", - SubdirWatchPatterns: SubdirWatchPatternsAuto, - ReportAnalysisProgressAfter: 5 * time.Second, - TelemetryPrompt: false, - LinkifyShowMessage: false, - }, - Hooks: Hooks{ - // TODO(adonovan): switch to new diff.Strings implementation. - ComputeEdits: myers.ComputeEdits, - URLRegexp: urlRegexp(), - DefaultAnalyzers: defaultAnalyzers(), - TypeErrorAnalyzers: typeErrorAnalyzers(), - ConvenienceAnalyzers: convenienceAnalyzers(), - StaticcheckAnalyzers: map[string]*Analyzer{}, - GoDiff: true, - }, - } - }) - options := defaultOptions.Clone() - for _, override := range overrides { - if override != nil { - override(options) - } - } - return options -} - -// Options holds various configuration that affects Gopls execution, organized -// by the nature or origin of the settings. -type Options struct { - ClientOptions - ServerOptions - UserOptions - InternalOptions - Hooks -} - -// IsAnalyzerEnabled reports whether an analyzer with the given name is -// enabled. -// -// TODO(rfindley): refactor to simplify this function. We no longer need the -// different categories of analyzer. -func (opts *Options) IsAnalyzerEnabled(name string) bool { - for _, amap := range []map[string]*Analyzer{opts.DefaultAnalyzers, opts.TypeErrorAnalyzers, opts.ConvenienceAnalyzers, opts.StaticcheckAnalyzers} { - for _, analyzer := range amap { - if analyzer.Analyzer.Name == name && analyzer.IsEnabled(opts) { - return true - } - } - } - return false -} - -// ClientOptions holds LSP-specific configuration that is provided by the -// client. -type ClientOptions struct { - ClientInfo *protocol.Msg_XInitializeParams_clientInfo - InsertTextFormat protocol.InsertTextFormat - ConfigurationSupported bool - DynamicConfigurationSupported bool - DynamicRegistrationSemanticTokensSupported bool - DynamicWatchedFilesSupported bool - PreferredContentFormat protocol.MarkupKind - LineFoldingOnly bool - HierarchicalDocumentSymbolSupport bool - SemanticTypes []string - SemanticMods []string - RelatedInformationSupported bool - CompletionTags bool - CompletionDeprecated bool - SupportedResourceOperations []protocol.ResourceOperationKind -} - -// ServerOptions holds LSP-specific configuration that is provided by the -// server. -type ServerOptions struct { - SupportedCodeActions map[FileKind]map[protocol.CodeActionKind]bool - SupportedCommands []string -} - -type BuildOptions struct { - // BuildFlags is the set of flags passed on to the build system when invoked. - // It is applied to queries like `go list`, which is used when discovering files. - // The most common use is to set `-tags`. - BuildFlags []string - - // Env adds environment variables to external commands run by `gopls`, most notably `go list`. - Env map[string]string - - // DirectoryFilters can be used to exclude unwanted directories from the - // workspace. By default, all directories are included. Filters are an - // operator, `+` to include and `-` to exclude, followed by a path prefix - // relative to the workspace folder. They are evaluated in order, and - // the last filter that applies to a path controls whether it is included. - // The path prefix can be empty, so an initial `-` excludes everything. - // - // DirectoryFilters also supports the `**` operator to match 0 or more directories. - // - // Examples: - // - // Exclude node_modules at current depth: `-node_modules` - // - // Exclude node_modules at any depth: `-**/node_modules` - // - // Include only project_a: `-` (exclude everything), `+project_a` - // - // Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules` - DirectoryFilters []string - - // TemplateExtensions gives the extensions of file names that are treateed - // as template files. (The extension - // is the part of the file name after the final dot.) - TemplateExtensions []string - - // MemoryMode controls the tradeoff `gopls` makes between memory usage and - // correctness. - // - // Values other than `Normal` are untested and may break in surprising ways. - MemoryMode MemoryMode `status:"experimental"` - - // ExpandWorkspaceToModule instructs `gopls` to adjust the scope of the - // workspace to find the best available module root. `gopls` first looks for - // a go.mod file in any parent directory of the workspace folder, expanding - // the scope to that directory if it exists. If no viable parent directory is - // found, gopls will check if there is exactly one child directory containing - // a go.mod file, narrowing the scope to that directory if it exists. - ExpandWorkspaceToModule bool `status:"experimental"` - - // AllowModfileModifications disables -mod=readonly, allowing imports from - // out-of-scope modules. This option will eventually be removed. - AllowModfileModifications bool `status:"experimental"` - - // AllowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module - // downloads rather than requiring user action. This option will eventually - // be removed. - AllowImplicitNetworkAccess bool `status:"experimental"` - - // StandaloneTags specifies a set of build constraints that identify - // individual Go source files that make up the entire main package of an - // executable. - // - // A common example of standalone main files is the convention of using the - // directive `//go:build ignore` to denote files that are not intended to be - // included in any package, for example because they are invoked directly by - // the developer using `go run`. - // - // Gopls considers a file to be a standalone main file if and only if it has - // package name "main" and has a build directive of the exact form - // "//go:build tag" or "// +build tag", where tag is among the list of tags - // configured by this setting. Notably, if the build constraint is more - // complicated than a simple tag (such as the composite constraint - // `//go:build tag && go1.18`), the file is not considered to be a standalone - // main file. - // - // This setting is only supported when gopls is built with Go 1.16 or later. - StandaloneTags []string -} - -type UIOptions struct { - DocumentationOptions - CompletionOptions - NavigationOptions - DiagnosticOptions - InlayHintOptions - - // Codelenses overrides the enabled/disabled state of code lenses. See the - // "Code Lenses" section of the - // [Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses) - // for the list of supported lenses. - // - // Example Usage: - // - // ```json5 - // "gopls": { - // ... - // "codelenses": { - // "generate": false, // Don't show the `go generate` lens. - // "gc_details": true // Show a code lens toggling the display of gc's choices. - // } - // ... - // } - // ``` - Codelenses map[string]bool - - // SemanticTokens controls whether the LSP server will send - // semantic tokens to the client. - SemanticTokens bool `status:"experimental"` - - // NoSemanticString turns off the sending of the semantic token 'string' - NoSemanticString bool `status:"experimental"` - - // NoSemanticNumber turns off the sending of the semantic token 'number' - NoSemanticNumber bool `status:"experimental"` -} - -type CompletionOptions struct { - // Placeholders enables placeholders for function parameters or struct - // fields in completion responses. - UsePlaceholders bool - - // CompletionBudget is the soft latency goal for completion requests. Most - // requests finish in a couple milliseconds, but in some cases deep - // completions can take much longer. As we use up our budget we - // dynamically reduce the search scope to ensure we return timely - // results. Zero means unlimited. - CompletionBudget time.Duration `status:"debug"` - - // Matcher sets the algorithm that is used when calculating completion - // candidates. - Matcher Matcher `status:"advanced"` - - // ExperimentalPostfixCompletions enables artificial method snippets - // such as "someSlice.sort!". - ExperimentalPostfixCompletions bool `status:"experimental"` - - // CompleteFunctionCalls enables function call completion. - // - // When completing a statement, or when a function return type matches the - // expected of the expression being completed, completion may suggest call - // expressions (i.e. may include parentheses). - CompleteFunctionCalls bool -} - -type DocumentationOptions struct { - // HoverKind controls the information that appears in the hover text. - // SingleLine and Structured are intended for use only by authors of editor plugins. - HoverKind HoverKind - - // LinkTarget controls where documentation links go. - // It might be one of: - // - // * `"godoc.org"` - // * `"pkg.go.dev"` - // - // If company chooses to use its own `godoc.org`, its address can be used as well. - // - // Modules matching the GOPRIVATE environment variable will not have - // documentation links in hover. - LinkTarget string - - // LinksInHover toggles the presence of links to documentation in hover. - LinksInHover bool -} - -type FormattingOptions struct { - // Local is the equivalent of the `goimports -local` flag, which puts - // imports beginning with this string after third-party packages. It should - // be the prefix of the import path whose imports should be grouped - // separately. - Local string - - // Gofumpt indicates if we should run gofumpt formatting. - Gofumpt bool -} - -type DiagnosticOptions struct { - // Analyses specify analyses that the user would like to enable or disable. - // A map of the names of analysis passes that should be enabled/disabled. - // A full list of analyzers that gopls uses can be found in - // [analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md). - // - // Example Usage: - // - // ```json5 - // ... - // "analyses": { - // "unreachable": false, // Disable the unreachable analyzer. - // "unusedparams": true // Enable the unusedparams analyzer. - // } - // ... - // ``` - Analyses map[string]bool - - // Staticcheck enables additional analyses from staticcheck.io. - // These analyses are documented on - // [Staticcheck's website](https://staticcheck.io/docs/checks/). - Staticcheck bool `status:"experimental"` - - // Annotations specifies the various kinds of optimization diagnostics - // that should be reported by the gc_details command. - Annotations map[Annotation]bool `status:"experimental"` - - // Vulncheck enables vulnerability scanning. - Vulncheck VulncheckMode `status:"experimental"` - - // DiagnosticsDelay controls the amount of time that gopls waits - // after the most recent file modification before computing deep diagnostics. - // Simple diagnostics (parsing and type-checking) are always run immediately - // on recently modified packages. - // - // This option must be set to a valid duration string, for example `"250ms"`. - DiagnosticsDelay time.Duration `status:"advanced"` - - // DiagnosticsTrigger controls when to run diagnostics. - DiagnosticsTrigger DiagnosticsTrigger `status:"experimental"` - - // AnalysisProgressReporting controls whether gopls sends progress - // notifications when construction of its index of analysis facts is taking a - // long time. Cancelling these notifications will cancel the indexing task, - // though it will restart after the next change in the workspace. - // - // When a package is opened for the first time and heavyweight analyses such as - // staticcheck are enabled, it can take a while to construct the index of - // analysis facts for all its dependencies. The index is cached in the - // filesystem, so subsequent analysis should be faster. - AnalysisProgressReporting bool -} - -type InlayHintOptions struct { - // Hints specify inlay hints that users want to see. A full list of hints - // that gopls uses can be found in - // [inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md). - Hints map[string]bool `status:"experimental"` -} - -type NavigationOptions struct { - // ImportShortcut specifies whether import statements should link to - // documentation or go to definitions. - ImportShortcut ImportShortcut - - // SymbolMatcher sets the algorithm that is used when finding workspace symbols. - SymbolMatcher SymbolMatcher `status:"advanced"` - - // SymbolStyle controls how symbols are qualified in symbol responses. - // - // Example Usage: - // - // ```json5 - // "gopls": { - // ... - // "symbolStyle": "Dynamic", - // ... - // } - // ``` - SymbolStyle SymbolStyle `status:"advanced"` - - // SymbolScope controls which packages are searched for workspace/symbol - // requests. The default value, "workspace", searches only workspace - // packages. The legacy behavior, "all", causes all loaded packages to be - // searched, including dependencies; this is more expensive and may return - // unwanted results. - SymbolScope SymbolScope -} - -// UserOptions holds custom Gopls configuration (not part of the LSP) that is -// modified by the client. -type UserOptions struct { - BuildOptions - UIOptions - FormattingOptions - - // VerboseOutput enables additional debug logging. - VerboseOutput bool `status:"debug"` -} - -// EnvSlice returns Env as a slice of k=v strings. -func (u *UserOptions) EnvSlice() []string { - var result []string - for k, v := range u.Env { - result = append(result, fmt.Sprintf("%v=%v", k, v)) - } - return result -} - -// SetEnvSlice sets Env from a slice of k=v strings. -func (u *UserOptions) SetEnvSlice(env []string) { - u.Env = map[string]string{} - for _, kv := range env { - split := strings.SplitN(kv, "=", 2) - if len(split) != 2 { - continue - } - u.Env[split[0]] = split[1] - } -} - -// DiffFunction is the type for a function that produces a set of edits that -// convert from the before content to the after content. -type DiffFunction func(before, after string) []diff.Edit - -// Hooks contains configuration that is provided to the Gopls command by the -// main package. -type Hooks struct { - // LicensesText holds third party licenses for software used by gopls. - LicensesText string - - // GoDiff is used in gopls/hooks to get Myers' diff - GoDiff bool - - // Whether staticcheck is supported. - StaticcheckSupported bool - - // ComputeEdits is used to compute edits between file versions. - ComputeEdits DiffFunction - - // URLRegexp is used to find potential URLs in comments/strings. - // - // Not all matches are shown to the user: if the matched URL is not detected - // as valid, it will be skipped. - URLRegexp *regexp.Regexp - - // GofumptFormat allows the gopls module to wire-in a call to - // gofumpt/format.Source. langVersion and modulePath are used for some - // Gofumpt formatting rules -- see the Gofumpt documentation for details. - GofumptFormat func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error) - - DefaultAnalyzers map[string]*Analyzer - TypeErrorAnalyzers map[string]*Analyzer - ConvenienceAnalyzers map[string]*Analyzer - StaticcheckAnalyzers map[string]*Analyzer -} - -// InternalOptions contains settings that are not intended for use by the -// average user. These may be settings used by tests or outdated settings that -// will soon be deprecated. Some of these settings may not even be configurable -// by the user. -// -// TODO(rfindley): even though these settings are not intended for -// modification, some of them should be surfaced in our documentation. -type InternalOptions struct { - // LiteralCompletions controls whether literal candidates such as - // "&someStruct{}" are offered. Tests disable this flag to simplify - // their expected values. - // - // TODO(rfindley): this is almost unnecessary now. Remove it along with the - // old marker tests. - LiteralCompletions bool - - // VerboseWorkDoneProgress controls whether the LSP server should send - // progress reports for all work done outside the scope of an RPC. - // Used by the regression tests. - VerboseWorkDoneProgress bool - - // The following options were previously available to users, but they - // really shouldn't be configured by anyone other than "power users". - - // CompletionDocumentation enables documentation with completion results. - CompletionDocumentation bool - - // CompleteUnimported enables completion for packages that you do not - // currently import. - CompleteUnimported bool - - // DeepCompletion enables the ability to return completions from deep - // inside relevant entities, rather than just the locally accessible ones. - // - // Consider this example: - // - // ```go - // package main - // - // import "fmt" - // - // type wrapString struct { - // str string - // } - // - // func main() { - // x := wrapString{"hello world"} - // fmt.Printf(<>) - // } - // ``` - // - // At the location of the `<>` in this program, deep completion would suggest - // the result `x.str`. - DeepCompletion bool - - // TempModfile controls the use of the -modfile flag in Go 1.14. - TempModfile bool - - // ShowBugReports causes a message to be shown when the first bug is reported - // on the server. - // This option applies only during initialization. - ShowBugReports bool - - // NewDiff controls the choice of the new diff implementation. It can be - // 'new', 'old', or 'both', which is the default. 'both' computes diffs with - // both algorithms, checks that the new algorithm has worked, and write some - // summary statistics to a file in os.TmpDir(). - NewDiff string - - // ChattyDiagnostics controls whether to report file diagnostics for each - // file change. If unset, gopls only reports diagnostics when they change, or - // when a file is opened or closed. - ChattyDiagnostics bool - - // SubdirWatchPatterns configures the file watching glob patterns registered - // by gopls. - // - // Some clients (namely VS Code) do not send workspace/didChangeWatchedFile - // notifications for files contained in a directory when that directory is - // deleted: - // https://github.com/microsoft/vscode/issues/109754 - // - // In this case, gopls would miss important notifications about deleted - // packages. To work around this, gopls registers a watch pattern for each - // directory containing Go files. - // - // Unfortunately, other clients experience performance problems with this - // many watch patterns, so there is no single behavior that works well for - // all clients. - // - // The "subdirWatchPatterns" setting allows configuring this behavior. Its - // default value of "auto" attempts to guess the correct behavior based on - // the client name. We'd love to avoid this specialization, but as described - // above there is no single value that works for all clients. - // - // If any LSP client does not behave well with the default value (for - // example, if like VS Code it drops file notifications), please file an - // issue. - SubdirWatchPatterns SubdirWatchPatterns - - // ReportAnalysisProgressAfter sets the duration for gopls to wait before starting - // progress reporting for ongoing go/analysis passes. - // - // It is intended to be used for testing only. - ReportAnalysisProgressAfter time.Duration - - // TelemetryPrompt controls whether gopls prompts about enabling Go telemetry. - // - // Once the prompt is answered, gopls doesn't ask again, but TelemetryPrompt - // can prevent the question from ever being asked in the first place. - TelemetryPrompt bool - - // LinkifyShowMessage controls whether the client wants gopls - // to linkify links in showMessage. e.g. [go.dev](https://go.dev). - LinkifyShowMessage bool -} - -type SubdirWatchPatterns string - -const ( - SubdirWatchPatternsOn SubdirWatchPatterns = "on" - SubdirWatchPatternsOff SubdirWatchPatterns = "off" - SubdirWatchPatternsAuto SubdirWatchPatterns = "auto" -) - -type ImportShortcut string - -const ( - BothShortcuts ImportShortcut = "Both" - LinkShortcut ImportShortcut = "Link" - DefinitionShortcut ImportShortcut = "Definition" -) - -func (s ImportShortcut) ShowLinks() bool { - return s == BothShortcuts || s == LinkShortcut -} - -func (s ImportShortcut) ShowDefinition() bool { - return s == BothShortcuts || s == DefinitionShortcut -} - -type Matcher string - -const ( - Fuzzy Matcher = "Fuzzy" - CaseInsensitive Matcher = "CaseInsensitive" - CaseSensitive Matcher = "CaseSensitive" -) - -// A SymbolMatcher controls the matching of symbols for workspace/symbol -// requests. -type SymbolMatcher string - -const ( - SymbolFuzzy SymbolMatcher = "Fuzzy" - SymbolFastFuzzy SymbolMatcher = "FastFuzzy" - SymbolCaseInsensitive SymbolMatcher = "CaseInsensitive" - SymbolCaseSensitive SymbolMatcher = "CaseSensitive" -) - -// A SymbolStyle controls the formatting of symbols in workspace/symbol results. -type SymbolStyle string - -const ( - // PackageQualifiedSymbols is package qualified symbols i.e. - // "pkg.Foo.Field". - PackageQualifiedSymbols SymbolStyle = "Package" - // FullyQualifiedSymbols is fully qualified symbols, i.e. - // "path/to/pkg.Foo.Field". - FullyQualifiedSymbols SymbolStyle = "Full" - // DynamicSymbols uses whichever qualifier results in the highest scoring - // match for the given symbol query. Here a "qualifier" is any "/" or "." - // delimited suffix of the fully qualified symbol. i.e. "to/pkg.Foo.Field" or - // just "Foo.Field". - DynamicSymbols SymbolStyle = "Dynamic" -) - -// A SymbolScope controls the search scope for workspace/symbol requests. -type SymbolScope string - -const ( - // WorkspaceSymbolScope matches symbols in workspace packages only. - WorkspaceSymbolScope SymbolScope = "workspace" - // AllSymbolScope matches symbols in any loaded package, including - // dependencies. - AllSymbolScope SymbolScope = "all" -) - -type HoverKind string - -const ( - SingleLine HoverKind = "SingleLine" - NoDocumentation HoverKind = "NoDocumentation" - SynopsisDocumentation HoverKind = "SynopsisDocumentation" - FullDocumentation HoverKind = "FullDocumentation" - - // Structured is an experimental setting that returns a structured hover format. - // This format separates the signature from the documentation, so that the client - // can do more manipulation of these fields. - // - // This should only be used by clients that support this behavior. - Structured HoverKind = "Structured" -) - -type MemoryMode string - -const ( - ModeNormal MemoryMode = "Normal" - // In DegradeClosed mode, `gopls` will collect less information about - // packages without open files. As a result, features like Find - // References and Rename will miss results in such packages. - ModeDegradeClosed MemoryMode = "DegradeClosed" -) - -type VulncheckMode string - -const ( - // Disable vulnerability analysis. - ModeVulncheckOff VulncheckMode = "Off" - // In Imports mode, `gopls` will report vulnerabilities that affect packages - // directly and indirectly used by the analyzed main module. - ModeVulncheckImports VulncheckMode = "Imports" - - // TODO: VulncheckRequire, VulncheckCallgraph -) - -type DiagnosticsTrigger string - -const ( - // Trigger diagnostics on file edit and save. (default) - DiagnosticsOnEdit DiagnosticsTrigger = "Edit" - // Trigger diagnostics only on file save. Events like initial workspace load - // or configuration change will still trigger diagnostics. - DiagnosticsOnSave DiagnosticsTrigger = "Save" - // TODO: support "Manual"? -) - -type OptionResults []OptionResult - -type OptionResult struct { - Name string - Value interface{} - Error error -} - -func SetOptions(options *Options, opts interface{}) OptionResults { - var results OptionResults - switch opts := opts.(type) { - case nil: - case map[string]interface{}: - // If the user's settings contains "allExperiments", set that first, - // and then let them override individual settings independently. - var enableExperiments bool - for name, value := range opts { - if b, ok := value.(bool); name == "allExperiments" && ok && b { - enableExperiments = true - options.EnableAllExperiments() - } - } - seen := map[string]struct{}{} - for name, value := range opts { - results = append(results, options.set(name, value, seen)) - } - // Finally, enable any experimental features that are specified in - // maps, which allows users to individually toggle them on or off. - if enableExperiments { - options.enableAllExperimentMaps() - } - default: - results = append(results, OptionResult{ - Value: opts, - Error: fmt.Errorf("Invalid options type %T", opts), - }) - } - return results -} - -func (o *Options) ForClientCapabilities(clientName *protocol.Msg_XInitializeParams_clientInfo, caps protocol.ClientCapabilities) { - o.ClientInfo = clientName - // Check if the client supports snippets in completion items. - if caps.Workspace.WorkspaceEdit != nil { - o.SupportedResourceOperations = caps.Workspace.WorkspaceEdit.ResourceOperations - } - if c := caps.TextDocument.Completion; c.CompletionItem.SnippetSupport { - o.InsertTextFormat = protocol.SnippetTextFormat - } - // Check if the client supports configuration messages. - o.ConfigurationSupported = caps.Workspace.Configuration - o.DynamicConfigurationSupported = caps.Workspace.DidChangeConfiguration.DynamicRegistration - o.DynamicRegistrationSemanticTokensSupported = caps.TextDocument.SemanticTokens.DynamicRegistration - o.DynamicWatchedFilesSupported = caps.Workspace.DidChangeWatchedFiles.DynamicRegistration - - // Check which types of content format are supported by this client. - if hover := caps.TextDocument.Hover; hover != nil && len(hover.ContentFormat) > 0 { - o.PreferredContentFormat = hover.ContentFormat[0] - } - // Check if the client supports only line folding. - - if fr := caps.TextDocument.FoldingRange; fr != nil { - o.LineFoldingOnly = fr.LineFoldingOnly - } - // Check if the client supports hierarchical document symbols. - o.HierarchicalDocumentSymbolSupport = caps.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport - - // Client's semantic tokens - o.SemanticTypes = caps.TextDocument.SemanticTokens.TokenTypes - o.SemanticMods = caps.TextDocument.SemanticTokens.TokenModifiers - // we don't need Requests, as we support full functionality - // we don't need Formats, as there is only one, for now - - // Check if the client supports diagnostic related information. - o.RelatedInformationSupported = caps.TextDocument.PublishDiagnostics.RelatedInformation - // Check if the client completion support includes tags (preferred) or deprecation - if caps.TextDocument.Completion.CompletionItem.TagSupport.ValueSet != nil { - o.CompletionTags = true - } else if caps.TextDocument.Completion.CompletionItem.DeprecatedSupport { - o.CompletionDeprecated = true - } -} - -func (o *Options) Clone() *Options { - // TODO(rfindley): has this function gone stale? It appears that there are - // settings that are incorrectly cloned here (such as TemplateExtensions). - result := &Options{ - ClientOptions: o.ClientOptions, - InternalOptions: o.InternalOptions, - Hooks: Hooks{ - GoDiff: o.GoDiff, - StaticcheckSupported: o.StaticcheckSupported, - ComputeEdits: o.ComputeEdits, - GofumptFormat: o.GofumptFormat, - URLRegexp: o.URLRegexp, - }, - ServerOptions: o.ServerOptions, - UserOptions: o.UserOptions, - } - // Fully clone any slice or map fields. Only Hooks, ExperimentalOptions, - // and UserOptions can be modified. - copyStringMap := func(src map[string]bool) map[string]bool { - dst := make(map[string]bool) - for k, v := range src { - dst[k] = v - } - return dst - } - result.Analyses = copyStringMap(o.Analyses) - result.Codelenses = copyStringMap(o.Codelenses) - - copySlice := func(src []string) []string { - dst := make([]string, len(src)) - copy(dst, src) - return dst - } - result.SetEnvSlice(o.EnvSlice()) - result.BuildFlags = copySlice(o.BuildFlags) - result.DirectoryFilters = copySlice(o.DirectoryFilters) - result.StandaloneTags = copySlice(o.StandaloneTags) - - copyAnalyzerMap := func(src map[string]*Analyzer) map[string]*Analyzer { - dst := make(map[string]*Analyzer) - for k, v := range src { - dst[k] = v - } - return dst - } - result.DefaultAnalyzers = copyAnalyzerMap(o.DefaultAnalyzers) - result.TypeErrorAnalyzers = copyAnalyzerMap(o.TypeErrorAnalyzers) - result.ConvenienceAnalyzers = copyAnalyzerMap(o.ConvenienceAnalyzers) - result.StaticcheckAnalyzers = copyAnalyzerMap(o.StaticcheckAnalyzers) - return result -} - -func (o *Options) AddStaticcheckAnalyzer(a *analysis.Analyzer, enabled bool, severity protocol.DiagnosticSeverity) { - o.StaticcheckAnalyzers[a.Name] = &Analyzer{ - Analyzer: a, - Enabled: enabled, - Severity: severity, - } -} - -// EnableAllExperiments turns on all of the experimental "off-by-default" -// features offered by gopls. Any experimental features specified in maps -// should be enabled in enableAllExperimentMaps. -func (o *Options) EnableAllExperiments() { - o.SemanticTokens = true -} - -func (o *Options) enableAllExperimentMaps() { - if _, ok := o.Codelenses[string(command.GCDetails)]; !ok { - o.Codelenses[string(command.GCDetails)] = true - } - if _, ok := o.Codelenses[string(command.RunGovulncheck)]; !ok { - o.Codelenses[string(command.RunGovulncheck)] = true - } - if _, ok := o.Analyses[unusedparams.Analyzer.Name]; !ok { - o.Analyses[unusedparams.Analyzer.Name] = true - } - if _, ok := o.Analyses[unusedvariable.Analyzer.Name]; !ok { - o.Analyses[unusedvariable.Analyzer.Name] = true - } -} - -// validateDirectoryFilter validates if the filter string -// - is not empty -// - start with either + or - -// - doesn't contain currently unsupported glob operators: *, ? -func validateDirectoryFilter(ifilter string) (string, error) { - filter := fmt.Sprint(ifilter) - if filter == "" || (filter[0] != '+' && filter[0] != '-') { - return "", fmt.Errorf("invalid filter %v, must start with + or -", filter) - } - segs := strings.Split(filter[1:], "/") - unsupportedOps := [...]string{"?", "*"} - for _, seg := range segs { - if seg != "**" { - for _, op := range unsupportedOps { - if strings.Contains(seg, op) { - return "", fmt.Errorf("invalid filter %v, operator %v not supported. If you want to have this operator supported, consider filing an issue.", filter, op) - } - } - } - } - - return strings.TrimRight(filepath.FromSlash(filter), "/"), nil -} - -func (o *Options) set(name string, value interface{}, seen map[string]struct{}) OptionResult { - // Flatten the name in case we get options with a hierarchy. - split := strings.Split(name, ".") - name = split[len(split)-1] - - result := OptionResult{Name: name, Value: value} - if _, ok := seen[name]; ok { - result.parseErrorf("duplicate configuration for %s", name) - } - seen[name] = struct{}{} - - switch name { - case "env": - menv, ok := value.(map[string]interface{}) - if !ok { - result.parseErrorf("invalid type %T, expect map", value) - break - } - if o.Env == nil { - o.Env = make(map[string]string) - } - for k, v := range menv { - o.Env[k] = fmt.Sprint(v) - } - - case "buildFlags": - // TODO(rfindley): use asStringSlice. - iflags, ok := value.([]interface{}) - if !ok { - result.parseErrorf("invalid type %T, expect list", value) - break - } - flags := make([]string, 0, len(iflags)) - for _, flag := range iflags { - flags = append(flags, fmt.Sprintf("%s", flag)) - } - o.BuildFlags = flags - - case "directoryFilters": - // TODO(rfindley): use asStringSlice. - ifilters, ok := value.([]interface{}) - if !ok { - result.parseErrorf("invalid type %T, expect list", value) - break - } - var filters []string - for _, ifilter := range ifilters { - filter, err := validateDirectoryFilter(fmt.Sprintf("%v", ifilter)) - if err != nil { - result.parseErrorf("%v", err) - return result - } - filters = append(filters, strings.TrimRight(filepath.FromSlash(filter), "/")) - } - o.DirectoryFilters = filters - - case "memoryMode": - if s, ok := result.asOneOf( - string(ModeNormal), - string(ModeDegradeClosed), - ); ok { - o.MemoryMode = MemoryMode(s) - } - case "completionDocumentation": - result.setBool(&o.CompletionDocumentation) - case "usePlaceholders": - result.setBool(&o.UsePlaceholders) - case "deepCompletion": - result.setBool(&o.DeepCompletion) - case "completeUnimported": - result.setBool(&o.CompleteUnimported) - case "completionBudget": - result.setDuration(&o.CompletionBudget) - case "matcher": - if s, ok := result.asOneOf( - string(Fuzzy), - string(CaseSensitive), - string(CaseInsensitive), - ); ok { - o.Matcher = Matcher(s) - } - - case "symbolMatcher": - if s, ok := result.asOneOf( - string(SymbolFuzzy), - string(SymbolFastFuzzy), - string(SymbolCaseInsensitive), - string(SymbolCaseSensitive), - ); ok { - o.SymbolMatcher = SymbolMatcher(s) - } - - case "symbolStyle": - if s, ok := result.asOneOf( - string(FullyQualifiedSymbols), - string(PackageQualifiedSymbols), - string(DynamicSymbols), - ); ok { - o.SymbolStyle = SymbolStyle(s) - } - - case "symbolScope": - if s, ok := result.asOneOf( - string(WorkspaceSymbolScope), - string(AllSymbolScope), - ); ok { - o.SymbolScope = SymbolScope(s) - } - - case "hoverKind": - if s, ok := result.asOneOf( - string(NoDocumentation), - string(SingleLine), - string(SynopsisDocumentation), - string(FullDocumentation), - string(Structured), - ); ok { - o.HoverKind = HoverKind(s) - } - - case "linkTarget": - result.setString(&o.LinkTarget) - - case "linksInHover": - result.setBool(&o.LinksInHover) - - case "importShortcut": - if s, ok := result.asOneOf(string(BothShortcuts), string(LinkShortcut), string(DefinitionShortcut)); ok { - o.ImportShortcut = ImportShortcut(s) - } - - case "analyses": - result.setBoolMap(&o.Analyses) - - case "hints": - result.setBoolMap(&o.Hints) - - case "annotations": - result.setAnnotationMap(&o.Annotations) - - case "vulncheck": - if s, ok := result.asOneOf( - string(ModeVulncheckOff), - string(ModeVulncheckImports), - ); ok { - o.Vulncheck = VulncheckMode(s) - } - - case "codelenses", "codelens": - var lensOverrides map[string]bool - result.setBoolMap(&lensOverrides) - if result.Error == nil { - if o.Codelenses == nil { - o.Codelenses = make(map[string]bool) - } - for lens, enabled := range lensOverrides { - o.Codelenses[lens] = enabled - } - } - - // codelens is deprecated, but still works for now. - // TODO(rstambler): Remove this for the gopls/v0.7.0 release. - if name == "codelens" { - result.deprecated("codelenses") - } - - case "staticcheck": - if v, ok := result.asBool(); ok { - o.Staticcheck = v - if v && !o.StaticcheckSupported { - result.Error = fmt.Errorf("applying setting %q: staticcheck is not supported at %s;"+ - " rebuild gopls with a more recent version of Go", result.Name, runtime.Version()) - } - } - - case "local": - result.setString(&o.Local) - - case "verboseOutput": - result.setBool(&o.VerboseOutput) - - case "verboseWorkDoneProgress": - result.setBool(&o.VerboseWorkDoneProgress) - - case "tempModfile": - result.softErrorf("gopls setting \"tempModfile\" is deprecated.\nPlease comment on https://go.dev/issue/63537 if this impacts your workflow.") - result.setBool(&o.TempModfile) - - case "showBugReports": - result.setBool(&o.ShowBugReports) - - case "gofumpt": - if v, ok := result.asBool(); ok { - o.Gofumpt = v - if v && o.GofumptFormat == nil { - result.Error = fmt.Errorf("applying setting %q: gofumpt is not supported at %s;"+ - " rebuild gopls with a more recent version of Go", result.Name, runtime.Version()) - } - } - case "completeFunctionCalls": - result.setBool(&o.CompleteFunctionCalls) - - case "semanticTokens": - result.setBool(&o.SemanticTokens) - - case "noSemanticString": - result.setBool(&o.NoSemanticString) - - case "noSemanticNumber": - result.setBool(&o.NoSemanticNumber) - - case "expandWorkspaceToModule": - result.softErrorf("gopls setting \"expandWorkspaceToModule\" is deprecated.\nPlease comment on https://go.dev/issue/63536 if this impacts your workflow.") - result.setBool(&o.ExpandWorkspaceToModule) - - case "experimentalPostfixCompletions": - result.setBool(&o.ExperimentalPostfixCompletions) - - case "experimentalWorkspaceModule": - result.deprecated("") - - case "experimentalTemplateSupport": // TODO(pjw): remove after June 2022 - result.deprecated("") - - case "templateExtensions": - if iexts, ok := value.([]interface{}); ok { - ans := []string{} - for _, x := range iexts { - ans = append(ans, fmt.Sprint(x)) - } - o.TemplateExtensions = ans - break - } - if value == nil { - o.TemplateExtensions = nil - break - } - result.parseErrorf("unexpected type %T not []string", value) - - case "experimentalDiagnosticsDelay": - result.deprecated("diagnosticsDelay") - - case "diagnosticsDelay": - result.setDuration(&o.DiagnosticsDelay) - - case "diagnosticsTrigger": - if s, ok := result.asOneOf( - string(DiagnosticsOnEdit), - string(DiagnosticsOnSave), - ); ok { - o.DiagnosticsTrigger = DiagnosticsTrigger(s) - } - - case "analysisProgressReporting": - result.setBool(&o.AnalysisProgressReporting) - - case "experimentalWatchedFileDelay": - result.deprecated("") - - case "experimentalPackageCacheKey": - result.deprecated("") - - case "allowModfileModifications": - result.setBool(&o.AllowModfileModifications) - - case "allowImplicitNetworkAccess": - result.setBool(&o.AllowImplicitNetworkAccess) - - case "experimentalUseInvalidMetadata": - result.deprecated("") - - case "standaloneTags": - result.setStringSlice(&o.StandaloneTags) - - case "allExperiments": - // This setting should be handled before all of the other options are - // processed, so do nothing here. - - case "newDiff": - result.setString(&o.NewDiff) - - case "chattyDiagnostics": - result.setBool(&o.ChattyDiagnostics) - - case "subdirWatchPatterns": - if s, ok := result.asOneOf( - string(SubdirWatchPatternsOn), - string(SubdirWatchPatternsOff), - string(SubdirWatchPatternsAuto), - ); ok { - o.SubdirWatchPatterns = SubdirWatchPatterns(s) - } - - case "reportAnalysisProgressAfter": - result.setDuration(&o.ReportAnalysisProgressAfter) - - case "telemetryPrompt": - result.setBool(&o.TelemetryPrompt) - case "linkifyShowMessage": - result.setBool(&o.LinkifyShowMessage) - - // Replaced settings. - case "experimentalDisabledAnalyses": - result.deprecated("analyses") - - case "disableDeepCompletion": - result.deprecated("deepCompletion") - - case "disableFuzzyMatching": - result.deprecated("fuzzyMatching") - - case "wantCompletionDocumentation": - result.deprecated("completionDocumentation") - - case "wantUnimportedCompletions": - result.deprecated("completeUnimported") - - case "fuzzyMatching": - result.deprecated("matcher") - - case "caseSensitiveCompletion": - result.deprecated("matcher") - - // Deprecated settings. - case "wantSuggestedFixes": - result.deprecated("") - - case "noIncrementalSync": - result.deprecated("") - - case "watchFileChanges": - result.deprecated("") - - case "go-diff": - result.deprecated("") - - default: - result.unexpected() - } - return result -} - -// parseErrorf reports an error parsing the current configuration value. -func (r *OptionResult) parseErrorf(msg string, values ...interface{}) { - if false { - _ = fmt.Sprintf(msg, values...) // this causes vet to check this like printf - } - prefix := fmt.Sprintf("parsing setting %q: ", r.Name) - r.Error = fmt.Errorf(prefix+msg, values...) -} - -// A SoftError is an error that does not affect the functionality of gopls. -type SoftError struct { - msg string -} - -func (e *SoftError) Error() string { - return e.msg -} - -// deprecated reports the current setting as deprecated. If 'replacement' is -// non-nil, it is suggested to the user. -func (r *OptionResult) deprecated(replacement string) { - msg := fmt.Sprintf("gopls setting %q is deprecated", r.Name) - if replacement != "" { - msg = fmt.Sprintf("%s, use %q instead", msg, replacement) - } - r.Error = &SoftError{msg} -} - -// softErrorf reports a soft error related to the current option. -func (r *OptionResult) softErrorf(format string, args ...any) { - r.Error = &SoftError{fmt.Sprintf(format, args...)} -} - -// unexpected reports that the current setting is not known to gopls. -func (r *OptionResult) unexpected() { - r.Error = fmt.Errorf("unexpected gopls setting %q", r.Name) -} - -func (r *OptionResult) asBool() (bool, bool) { - b, ok := r.Value.(bool) - if !ok { - r.parseErrorf("invalid type %T, expect bool", r.Value) - return false, false - } - return b, true -} - -func (r *OptionResult) setBool(b *bool) { - if v, ok := r.asBool(); ok { - *b = v - } -} - -func (r *OptionResult) setDuration(d *time.Duration) { - if v, ok := r.asString(); ok { - parsed, err := time.ParseDuration(v) - if err != nil { - r.parseErrorf("failed to parse duration %q: %v", v, err) - return - } - *d = parsed - } -} - -func (r *OptionResult) setBoolMap(bm *map[string]bool) { - m := r.asBoolMap() - *bm = m -} - -func (r *OptionResult) setAnnotationMap(bm *map[Annotation]bool) { - all := r.asBoolMap() - if all == nil { - return - } - // Default to everything enabled by default. - m := make(map[Annotation]bool) - for k, enabled := range all { - a, err := asOneOf( - k, - string(Nil), - string(Escape), - string(Inline), - string(Bounds), - ) - if err != nil { - // In case of an error, process any legacy values. - switch k { - case "noEscape": - m[Escape] = false - r.parseErrorf(`"noEscape" is deprecated, set "Escape: false" instead`) - case "noNilcheck": - m[Nil] = false - r.parseErrorf(`"noNilcheck" is deprecated, set "Nil: false" instead`) - case "noInline": - m[Inline] = false - r.parseErrorf(`"noInline" is deprecated, set "Inline: false" instead`) - case "noBounds": - m[Bounds] = false - r.parseErrorf(`"noBounds" is deprecated, set "Bounds: false" instead`) - default: - r.parseErrorf("%v", err) - } - continue - } - m[Annotation(a)] = enabled - } - *bm = m -} - -func (r *OptionResult) asBoolMap() map[string]bool { - all, ok := r.Value.(map[string]interface{}) - if !ok { - r.parseErrorf("invalid type %T for map[string]bool option", r.Value) - return nil - } - m := make(map[string]bool) - for a, enabled := range all { - if e, ok := enabled.(bool); ok { - m[a] = e - } else { - r.parseErrorf("invalid type %T for map key %q", enabled, a) - return m - } - } - return m -} - -func (r *OptionResult) asString() (string, bool) { - b, ok := r.Value.(string) - if !ok { - r.parseErrorf("invalid type %T, expect string", r.Value) - return "", false - } - return b, true -} - -func (r *OptionResult) asStringSlice() ([]string, bool) { - iList, ok := r.Value.([]interface{}) - if !ok { - r.parseErrorf("invalid type %T, expect list", r.Value) - return nil, false - } - var list []string - for _, elem := range iList { - s, ok := elem.(string) - if !ok { - r.parseErrorf("invalid element type %T, expect string", elem) - return nil, false - } - list = append(list, s) - } - return list, true -} - -func (r *OptionResult) asOneOf(options ...string) (string, bool) { - s, ok := r.asString() - if !ok { - return "", false - } - s, err := asOneOf(s, options...) - if err != nil { - r.parseErrorf("%v", err) - } - return s, err == nil -} - -func asOneOf(str string, options ...string) (string, error) { - lower := strings.ToLower(str) - for _, opt := range options { - if strings.ToLower(opt) == lower { - return opt, nil - } - } - return "", fmt.Errorf("invalid option %q for enum", str) -} - -func (r *OptionResult) setString(s *string) { - if v, ok := r.asString(); ok { - *s = v - } -} - -func (r *OptionResult) setStringSlice(s *[]string) { - if v, ok := r.asStringSlice(); ok { - *s = v - } -} - -func typeErrorAnalyzers() map[string]*Analyzer { - return map[string]*Analyzer{ - fillreturns.Analyzer.Name: { - Analyzer: fillreturns.Analyzer, - // TODO(rfindley): is SourceFixAll even necessary here? Is that not implied? - ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - Enabled: true, - }, - nonewvars.Analyzer.Name: { - Analyzer: nonewvars.Analyzer, - Enabled: true, - }, - noresultvalues.Analyzer.Name: { - Analyzer: noresultvalues.Analyzer, - Enabled: true, - }, - undeclaredname.Analyzer.Name: { - Analyzer: undeclaredname.Analyzer, - Fix: UndeclaredName, - Enabled: true, - }, - unusedvariable.Analyzer.Name: { - Analyzer: unusedvariable.Analyzer, - Enabled: false, - }, - } -} - -// TODO(golang/go#61559): remove convenience analyzers now that they are not -// used from the analysis framework. -func convenienceAnalyzers() map[string]*Analyzer { - return map[string]*Analyzer{ - fillstruct.Analyzer.Name: { - Analyzer: fillstruct.Analyzer, - Fix: FillStruct, - Enabled: true, - ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite}, - }, - stubmethods.Analyzer.Name: { - Analyzer: stubmethods.Analyzer, - Fix: StubMethods, - Enabled: true, - }, - infertypeargs.Analyzer.Name: { - Analyzer: infertypeargs.Analyzer, - Enabled: true, - ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite}, - }, - } -} - -func defaultAnalyzers() map[string]*Analyzer { - return map[string]*Analyzer{ - // The traditional vet suite: - appends.Analyzer.Name: {Analyzer: appends.Analyzer, Enabled: true}, - asmdecl.Analyzer.Name: {Analyzer: asmdecl.Analyzer, Enabled: true}, - assign.Analyzer.Name: {Analyzer: assign.Analyzer, Enabled: true}, - atomic.Analyzer.Name: {Analyzer: atomic.Analyzer, Enabled: true}, - bools.Analyzer.Name: {Analyzer: bools.Analyzer, Enabled: true}, - buildtag.Analyzer.Name: {Analyzer: buildtag.Analyzer, Enabled: true}, - cgocall.Analyzer.Name: {Analyzer: cgocall.Analyzer, Enabled: true}, - composite.Analyzer.Name: {Analyzer: composite.Analyzer, Enabled: true}, - copylock.Analyzer.Name: {Analyzer: copylock.Analyzer, Enabled: true}, - defers.Analyzer.Name: {Analyzer: defers.Analyzer, Enabled: true}, - deprecated.Analyzer.Name: {Analyzer: deprecated.Analyzer, Enabled: true, Severity: protocol.SeverityHint, Tag: []protocol.DiagnosticTag{protocol.Deprecated}}, - directive.Analyzer.Name: {Analyzer: directive.Analyzer, Enabled: true}, - errorsas.Analyzer.Name: {Analyzer: errorsas.Analyzer, Enabled: true}, - httpresponse.Analyzer.Name: {Analyzer: httpresponse.Analyzer, Enabled: true}, - ifaceassert.Analyzer.Name: {Analyzer: ifaceassert.Analyzer, Enabled: true}, - loopclosure.Analyzer.Name: {Analyzer: loopclosure.Analyzer, Enabled: true}, - lostcancel.Analyzer.Name: {Analyzer: lostcancel.Analyzer, Enabled: true}, - nilfunc.Analyzer.Name: {Analyzer: nilfunc.Analyzer, Enabled: true}, - printf.Analyzer.Name: {Analyzer: printf.Analyzer, Enabled: true}, - shift.Analyzer.Name: {Analyzer: shift.Analyzer, Enabled: true}, - slog.Analyzer.Name: {Analyzer: slog.Analyzer, Enabled: true}, - stdmethods.Analyzer.Name: {Analyzer: stdmethods.Analyzer, Enabled: true}, - stringintconv.Analyzer.Name: {Analyzer: stringintconv.Analyzer, Enabled: true}, - structtag.Analyzer.Name: {Analyzer: structtag.Analyzer, Enabled: true}, - tests.Analyzer.Name: {Analyzer: tests.Analyzer, Enabled: true}, - unmarshal.Analyzer.Name: {Analyzer: unmarshal.Analyzer, Enabled: true}, - unreachable.Analyzer.Name: {Analyzer: unreachable.Analyzer, Enabled: true}, - unsafeptr.Analyzer.Name: {Analyzer: unsafeptr.Analyzer, Enabled: true}, - unusedresult.Analyzer.Name: {Analyzer: unusedresult.Analyzer, Enabled: true}, - - // Non-vet analyzers: - atomicalign.Analyzer.Name: {Analyzer: atomicalign.Analyzer, Enabled: true}, - deepequalerrors.Analyzer.Name: {Analyzer: deepequalerrors.Analyzer, Enabled: true}, - fieldalignment.Analyzer.Name: {Analyzer: fieldalignment.Analyzer, Enabled: false}, - nilness.Analyzer.Name: {Analyzer: nilness.Analyzer, Enabled: true}, - shadow.Analyzer.Name: {Analyzer: shadow.Analyzer, Enabled: false}, - sortslice.Analyzer.Name: {Analyzer: sortslice.Analyzer, Enabled: true}, - testinggoroutine.Analyzer.Name: {Analyzer: testinggoroutine.Analyzer, Enabled: true}, - unusedparams.Analyzer.Name: {Analyzer: unusedparams.Analyzer, Enabled: false}, - unusedwrite.Analyzer.Name: {Analyzer: unusedwrite.Analyzer, Enabled: false}, - useany.Analyzer.Name: {Analyzer: useany.Analyzer, Enabled: false}, - timeformat.Analyzer.Name: {Analyzer: timeformat.Analyzer, Enabled: true}, - embeddirective.Analyzer.Name: { - Analyzer: embeddirective.Analyzer, - Enabled: true, - Fix: AddEmbedImport, - fixesDiagnostic: fixedByImportingEmbed, - }, - - // gofmt -s suite: - simplifycompositelit.Analyzer.Name: { - Analyzer: simplifycompositelit.Analyzer, - Enabled: true, - ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - }, - simplifyrange.Analyzer.Name: { - Analyzer: simplifyrange.Analyzer, - Enabled: true, - ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - }, - simplifyslice.Analyzer.Name: { - Analyzer: simplifyslice.Analyzer, - Enabled: true, - ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - }, - } -} - -func urlRegexp() *regexp.Regexp { - // Ensure links are matched as full words, not anywhere. - re := regexp.MustCompile(`\b(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?\b`) - re.Longest() - return re -} - -type APIJSON struct { - Options map[string][]*OptionJSON - Commands []*CommandJSON - Lenses []*LensJSON - Analyzers []*AnalyzerJSON - Hints []*HintJSON -} - -type OptionJSON struct { - Name string - Type string - Doc string - EnumKeys EnumKeys - EnumValues []EnumValue - Default string - Status string - Hierarchy string -} - -func (o *OptionJSON) String() string { - return o.Name -} - -func (o *OptionJSON) Write(w io.Writer) { - fmt.Fprintf(w, "**%v** *%v*\n\n", o.Name, o.Type) - writeStatus(w, o.Status) - enumValues := collectEnums(o) - fmt.Fprintf(w, "%v%v\nDefault: `%v`.\n\n", o.Doc, enumValues, o.Default) -} - -func writeStatus(section io.Writer, status string) { - switch status { - case "": - case "advanced": - fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n") - case "debug": - fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n") - case "experimental": - fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n") - default: - fmt.Fprintf(section, "**Status: %s.**\n\n", status) - } -} - -var parBreakRE = regexp.MustCompile("\n{2,}") - -func collectEnums(opt *OptionJSON) string { - var b strings.Builder - write := func(name, doc string) { - if doc != "" { - unbroken := parBreakRE.ReplaceAllString(doc, "\\\n") - fmt.Fprintf(&b, "* %s\n", strings.TrimSpace(unbroken)) - } else { - fmt.Fprintf(&b, "* `%s`\n", name) - } - } - if len(opt.EnumValues) > 0 && opt.Type == "enum" { - b.WriteString("\nMust be one of:\n\n") - for _, val := range opt.EnumValues { - write(val.Value, val.Doc) - } - } else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) { - b.WriteString("\nCan contain any of:\n\n") - for _, val := range opt.EnumKeys.Keys { - write(val.Name, val.Doc) - } - } - return b.String() -} - -func shouldShowEnumKeysInSettings(name string) bool { - // These fields have too many possible options to print. - return !(name == "analyses" || name == "codelenses" || name == "hints") -} - -type EnumKeys struct { - ValueType string - Keys []EnumKey -} - -type EnumKey struct { - Name string - Doc string - Default string -} - -type EnumValue struct { - Value string - Doc string -} - -type CommandJSON struct { - Command string - Title string - Doc string - ArgDoc string - ResultDoc string -} - -func (c *CommandJSON) String() string { - return c.Command -} - -func (c *CommandJSON) Write(w io.Writer) { - fmt.Fprintf(w, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", c.Title, c.Command, c.Doc) - if c.ArgDoc != "" { - fmt.Fprintf(w, "Args:\n\n```\n%s\n```\n\n", c.ArgDoc) - } - if c.ResultDoc != "" { - fmt.Fprintf(w, "Result:\n\n```\n%s\n```\n\n", c.ResultDoc) - } -} - -type LensJSON struct { - Lens string - Title string - Doc string -} - -func (l *LensJSON) String() string { - return l.Title -} - -func (l *LensJSON) Write(w io.Writer) { - fmt.Fprintf(w, "%s (%s): %s", l.Title, l.Lens, l.Doc) -} - -type AnalyzerJSON struct { - Name string - Doc string - URL string - Default bool -} - -func (a *AnalyzerJSON) String() string { - return a.Name -} - -func (a *AnalyzerJSON) Write(w io.Writer) { - fmt.Fprintf(w, "%s (%s): %v", a.Name, a.Doc, a.Default) -} - -type HintJSON struct { - Name string - Doc string - Default bool -} - -func (h *HintJSON) String() string { - return h.Name -} - -func (h *HintJSON) Write(w io.Writer) { - fmt.Fprintf(w, "%s (%s): %v", h.Name, h.Doc, h.Default) -} diff --git a/gopls/internal/lsp/source/options_test.go b/gopls/internal/lsp/source/options_test.go deleted file mode 100644 index 4fa6ecf15df..00000000000 --- a/gopls/internal/lsp/source/options_test.go +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "testing" - "time" -) - -func TestSetOption(t *testing.T) { - tests := []struct { - name string - value interface{} - wantError bool - check func(Options) bool - }{ - { - name: "symbolStyle", - value: "Dynamic", - check: func(o Options) bool { return o.SymbolStyle == DynamicSymbols }, - }, - { - name: "symbolStyle", - value: "", - wantError: true, - check: func(o Options) bool { return o.SymbolStyle == "" }, - }, - { - name: "symbolStyle", - value: false, - wantError: true, - check: func(o Options) bool { return o.SymbolStyle == "" }, - }, - { - name: "symbolMatcher", - value: "caseInsensitive", - check: func(o Options) bool { return o.SymbolMatcher == SymbolCaseInsensitive }, - }, - { - name: "completionBudget", - value: "2s", - check: func(o Options) bool { return o.CompletionBudget == 2*time.Second }, - }, - { - name: "staticcheck", - value: true, - check: func(o Options) bool { return o.Staticcheck == true }, - wantError: true, // o.StaticcheckSupported is unset - }, - { - name: "codelenses", - value: map[string]interface{}{"generate": true}, - check: func(o Options) bool { return o.Codelenses["generate"] }, - }, - { - name: "allExperiments", - value: true, - check: func(o Options) bool { - return true // just confirm that we handle this setting - }, - }, - { - name: "hoverKind", - value: "FullDocumentation", - check: func(o Options) bool { - return o.HoverKind == FullDocumentation - }, - }, - { - name: "hoverKind", - value: "NoDocumentation", - check: func(o Options) bool { - return o.HoverKind == NoDocumentation - }, - }, - { - name: "hoverKind", - value: "SingleLine", - check: func(o Options) bool { - return o.HoverKind == SingleLine - }, - }, - { - name: "hoverKind", - value: "Structured", - check: func(o Options) bool { - return o.HoverKind == Structured - }, - }, - { - name: "ui.documentation.hoverKind", - value: "Structured", - check: func(o Options) bool { - return o.HoverKind == Structured - }, - }, - { - name: "matcher", - value: "Fuzzy", - check: func(o Options) bool { - return o.Matcher == Fuzzy - }, - }, - { - name: "matcher", - value: "CaseSensitive", - check: func(o Options) bool { - return o.Matcher == CaseSensitive - }, - }, - { - name: "matcher", - value: "CaseInsensitive", - check: func(o Options) bool { - return o.Matcher == CaseInsensitive - }, - }, - { - name: "env", - value: map[string]interface{}{"testing": "true"}, - check: func(o Options) bool { - v, found := o.Env["testing"] - return found && v == "true" - }, - }, - { - name: "env", - value: []string{"invalid", "input"}, - wantError: true, - check: func(o Options) bool { - return o.Env == nil - }, - }, - { - name: "directoryFilters", - value: []interface{}{"-node_modules", "+project_a"}, - check: func(o Options) bool { - return len(o.DirectoryFilters) == 2 - }, - }, - { - name: "directoryFilters", - value: []interface{}{"invalid"}, - wantError: true, - check: func(o Options) bool { - return len(o.DirectoryFilters) == 0 - }, - }, - { - name: "directoryFilters", - value: []string{"-invalid", "+type"}, - wantError: true, - check: func(o Options) bool { - return len(o.DirectoryFilters) == 0 - }, - }, - { - name: "annotations", - value: map[string]interface{}{ - "Nil": false, - "noBounds": true, - }, - wantError: true, - check: func(o Options) bool { - return !o.Annotations[Nil] && !o.Annotations[Bounds] - }, - }, - { - name: "vulncheck", - value: []interface{}{"invalid"}, - wantError: true, - check: func(o Options) bool { - return o.Vulncheck == "" // For invalid value, default to 'off'. - }, - }, - { - name: "vulncheck", - value: "Imports", - check: func(o Options) bool { - return o.Vulncheck == ModeVulncheckImports // For invalid value, default to 'off'. - }, - }, - { - name: "vulncheck", - value: "imports", - check: func(o Options) bool { - return o.Vulncheck == ModeVulncheckImports - }, - }, - } - - for _, test := range tests { - var opts Options - result := opts.set(test.name, test.value, map[string]struct{}{}) - if (result.Error != nil) != test.wantError { - t.Fatalf("Options.set(%q, %v): result.Error = %v, want error: %t", test.name, test.value, result.Error, test.wantError) - } - // TODO: this could be made much better using cmp.Diff, if that becomes - // available in this module. - if !test.check(opts) { - t.Errorf("Options.set(%q, %v): unexpected result %+v", test.name, test.value, opts) - } - } -} diff --git a/gopls/internal/lsp/source/origin.go b/gopls/internal/lsp/source/origin.go deleted file mode 100644 index 8ee467e844e..00000000000 --- a/gopls/internal/lsp/source/origin.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package source - -import "go/types" - -// containsOrigin reports whether the provided object set contains an object -// with the same origin as the provided obj (which may be a synthetic object -// created during instantiation). -func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { - if obj == nil { - return objSet[obj] - } - // In Go 1.18, we can't use the types.Var.Origin and types.Func.Origin methods. - for target := range objSet { - if target.Pkg() == obj.Pkg() && target.Pos() == obj.Pos() && target.Name() == obj.Name() { - return true - } - } - return false -} diff --git a/gopls/internal/lsp/source/origin_119.go b/gopls/internal/lsp/source/origin_119.go deleted file mode 100644 index a249ce4b1c5..00000000000 --- a/gopls/internal/lsp/source/origin_119.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package source - -import "go/types" - -// containsOrigin reports whether the provided object set contains an object -// with the same origin as the provided obj (which may be a synthetic object -// created during instantiation). -func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { - objOrigin := origin(obj) - for target := range objSet { - if origin(target) == objOrigin { - return true - } - } - return false -} - -func origin(obj types.Object) types.Object { - switch obj := obj.(type) { - case *types.Var: - return obj.Origin() - case *types.Func: - return obj.Origin() - } - return obj -} diff --git a/gopls/internal/lsp/source/parsemode_go116.go b/gopls/internal/lsp/source/parsemode_go116.go deleted file mode 100644 index fabcb69fac9..00000000000 --- a/gopls/internal/lsp/source/parsemode_go116.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.17 -// +build !go1.17 - -package source - -import "go/parser" - -// The parser.SkipObjectResolution mode flag is not supported before Go 1.17. -const SkipObjectResolution parser.Mode = 0 diff --git a/gopls/internal/lsp/source/parsemode_go117.go b/gopls/internal/lsp/source/parsemode_go117.go deleted file mode 100644 index 773a9b64262..00000000000 --- a/gopls/internal/lsp/source/parsemode_go117.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.17 -// +build go1.17 - -package source - -import "go/parser" - -const SkipObjectResolution = parser.SkipObjectResolution diff --git a/gopls/internal/lsp/source/references.go b/gopls/internal/lsp/source/references.go deleted file mode 100644 index 46459dcbec4..00000000000 --- a/gopls/internal/lsp/source/references.go +++ /dev/null @@ -1,692 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -// This file defines the 'references' query based on a serializable -// index constructed during type checking, thus avoiding the need to -// type-check packages at search time. -// -// See the ./xrefs/ subpackage for the index construction and lookup. -// -// This implementation does not intermingle objects from distinct -// calls to TypeCheck. - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - "sort" - "strings" - "sync" - - "golang.org/x/sync/errgroup" - "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source/methodsets" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" -) - -// References returns a list of all references (sorted with -// definitions before uses) to the object denoted by the identifier at -// the given file/position, searching the entire workspace. -func References(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position, includeDeclaration bool) ([]protocol.Location, error) { - references, err := references(ctx, snapshot, fh, pp, includeDeclaration) - if err != nil { - return nil, err - } - locations := make([]protocol.Location, len(references)) - for i, ref := range references { - locations[i] = ref.location - } - return locations, nil -} - -// A reference describes an identifier that refers to the same -// object as the subject of a References query. -type reference struct { - isDeclaration bool - location protocol.Location - pkgPath PackagePath // of declaring package (same for all elements of the slice) -} - -// references returns a list of all references (sorted with -// definitions before uses) to the object denoted by the identifier at -// the given file/position, searching the entire workspace. -func references(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position, includeDeclaration bool) ([]reference, error) { - ctx, done := event.Start(ctx, "source.references") - defer done() - - // Is the cursor within the package name declaration? - _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) - if err != nil { - return nil, err - } - - var refs []reference - if inPackageName { - refs, err = packageReferences(ctx, snapshot, f.URI()) - } else { - refs, err = ordinaryReferences(ctx, snapshot, f.URI(), pp) - } - if err != nil { - return nil, err - } - - sort.Slice(refs, func(i, j int) bool { - x, y := refs[i], refs[j] - if x.isDeclaration != y.isDeclaration { - return x.isDeclaration // decls < refs - } - return protocol.CompareLocation(x.location, y.location) < 0 - }) - - // De-duplicate by location, and optionally remove declarations. - out := refs[:0] - for _, ref := range refs { - if !includeDeclaration && ref.isDeclaration { - continue - } - if len(out) == 0 || out[len(out)-1].location != ref.location { - out = append(out, ref) - } - } - refs = out - - return refs, nil -} - -// packageReferences returns a list of references to the package -// declaration of the specified name and uri by searching among the -// import declarations of all packages that directly import the target -// package. -func packageReferences(ctx context.Context, snapshot Snapshot, uri span.URI) ([]reference, error) { - metas, err := snapshot.MetadataForFile(ctx, uri) - if err != nil { - return nil, err - } - if len(metas) == 0 { - return nil, fmt.Errorf("found no package containing %s", uri) - } - - var refs []reference - - // Find external references to the package declaration - // from each direct import of the package. - // - // The narrowest package is the most broadly imported, - // so we choose it for the external references. - // - // But if the file ends with _test.go then we need to - // find the package it is testing; there's no direct way - // to do that, so pick a file from the same package that - // doesn't end in _test.go and start over. - narrowest := metas[0] - if narrowest.ForTest != "" && strings.HasSuffix(string(uri), "_test.go") { - for _, f := range narrowest.CompiledGoFiles { - if !strings.HasSuffix(string(f), "_test.go") { - return packageReferences(ctx, snapshot, f) - } - } - // This package has no non-test files. - // Skip the search for external references. - // (Conceivably one could blank-import an empty package, but why?) - } else { - rdeps, err := snapshot.ReverseDependencies(ctx, narrowest.ID, false) // direct - if err != nil { - return nil, err - } - - // Restrict search to workspace packages. - workspace, err := snapshot.WorkspaceMetadata(ctx) - if err != nil { - return nil, err - } - workspaceMap := make(map[PackageID]*Metadata, len(workspace)) - for _, m := range workspace { - workspaceMap[m.ID] = m - } - - for _, rdep := range rdeps { - if _, ok := workspaceMap[rdep.ID]; !ok { - continue - } - for _, uri := range rdep.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return nil, err - } - for _, imp := range f.File.Imports { - if rdep.DepsByImpPath[UnquoteImportPath(imp)] == narrowest.ID { - refs = append(refs, reference{ - isDeclaration: false, - location: mustLocation(f, imp), - pkgPath: narrowest.PkgPath, - }) - } - } - } - } - } - - // Find internal "references" to the package from - // of each package declaration in the target package itself. - // - // The widest package (possibly a test variant) has the - // greatest number of files and thus we choose it for the - // "internal" references. - widest := metas[len(metas)-1] // may include _test.go files - for _, uri := range widest.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return nil, err - } - refs = append(refs, reference{ - isDeclaration: true, // (one of many) - location: mustLocation(f, f.File.Name), - pkgPath: widest.PkgPath, - }) - } - - return refs, nil -} - -// ordinaryReferences computes references for all ordinary objects (not package declarations). -func ordinaryReferences(ctx context.Context, snapshot Snapshot, uri span.URI, pp protocol.Position) ([]reference, error) { - // Strategy: use the reference information computed by the - // type checker to find the declaration. First type-check this - // package to find the declaration, then type check the - // declaring package (which may be different), plus variants, - // to find local (in-package) references. - // Global references are satisfied by the index. - - // Strictly speaking, a wider package could provide a different - // declaration (e.g. because the _test.go files can change the - // meaning of a field or method selection), but the narrower - // package reports the more broadly referenced object. - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, err - } - - // Find the selected object (declaration or reference). - // For struct{T}, we choose the field (Def) over the type (Use). - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - candidates, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return nil, err - } - - // Pick first object arbitrarily. - // The case variables of a type switch have different - // types but that difference is immaterial here. - var obj types.Object - for obj = range candidates { - break - } - if obj == nil { - return nil, ErrNoIdentFound // can't happen - } - - // nil, error, error.Error, iota, or other built-in? - if obj.Pkg() == nil { - return nil, fmt.Errorf("references to builtin %q are not supported", obj.Name()) - } - if !obj.Pos().IsValid() { - if obj.Pkg().Path() != "unsafe" { - bug.Reportf("references: object %v has no position", obj) - } - return nil, fmt.Errorf("references to unsafe.%s are not supported", obj.Name()) - } - - // Find metadata of all packages containing the object's defining file. - // This may include the query pkg, and possibly other variants. - declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) - declURI := span.URIFromPath(declPosn.Filename) - variants, err := snapshot.MetadataForFile(ctx, declURI) - if err != nil { - return nil, err - } - if len(variants) == 0 { - return nil, fmt.Errorf("no packages for file %q", declURI) // can't happen - } - // (variants must include ITVs for reverse dependency computation below.) - - // Is object exported? - // If so, compute scope and targets of the global search. - var ( - globalScope = make(map[PackageID]*Metadata) // (excludes ITVs) - globalTargets map[PackagePath]map[objectpath.Path]unit - expansions = make(map[PackageID]unit) // packages that caused search expansion - ) - // TODO(adonovan): what about generic functions? Need to consider both - // uninstantiated and instantiated. The latter have no objectpath. Use Origin? - if path, err := objectpath.For(obj); err == nil && obj.Exported() { - pkgPath := variants[0].PkgPath // (all variants have same package path) - globalTargets = map[PackagePath]map[objectpath.Path]unit{ - pkgPath: {path: {}}, // primary target - } - - // Compute set of (non-ITV) workspace packages. - // We restrict references to this subset. - workspace, err := snapshot.WorkspaceMetadata(ctx) - if err != nil { - return nil, err - } - workspaceMap := make(map[PackageID]*Metadata, len(workspace)) - workspaceIDs := make([]PackageID, 0, len(workspace)) - for _, m := range workspace { - workspaceMap[m.ID] = m - workspaceIDs = append(workspaceIDs, m.ID) - } - - // addRdeps expands the global scope to include the - // reverse dependencies of the specified package. - addRdeps := func(id PackageID, transitive bool) error { - rdeps, err := snapshot.ReverseDependencies(ctx, id, transitive) - if err != nil { - return err - } - for rdepID, rdep := range rdeps { - // Skip non-workspace packages. - // - // This means we also skip any expansion of the - // search that might be caused by a non-workspace - // package, possibly causing us to miss references - // to the expanded target set from workspace packages. - // - // TODO(adonovan): don't skip those expansions. - // The challenge is how to so without type-checking - // a lot of non-workspace packages not covered by - // the initial workspace load. - if _, ok := workspaceMap[rdepID]; !ok { - continue - } - - globalScope[rdepID] = rdep - } - return nil - } - - // How far need we search? - // For package-level objects, we need only search the direct importers. - // For fields and methods, we must search transitively. - transitive := obj.Pkg().Scope().Lookup(obj.Name()) != obj - - // The scope is the union of rdeps of each variant. - // (Each set is disjoint so there's no benefit to - // combining the metadata graph traversals.) - for _, m := range variants { - if err := addRdeps(m.ID, transitive); err != nil { - return nil, err - } - } - - // Is object a method? - // - // If so, expand the search so that the targets include - // all methods that correspond to it through interface - // satisfaction, and the scope includes the rdeps of - // the package that declares each corresponding type. - // - // 'expansions' records the packages that declared - // such types. - if recv := effectiveReceiver(obj); recv != nil { - if err := expandMethodSearch(ctx, snapshot, workspaceIDs, obj.(*types.Func), recv, addRdeps, globalTargets, expansions); err != nil { - return nil, err - } - } - } - - // The search functions will call report(loc) for each hit. - var ( - refsMu sync.Mutex - refs []reference - ) - report := func(loc protocol.Location, isDecl bool) { - ref := reference{ - isDeclaration: isDecl, - location: loc, - pkgPath: pkg.Metadata().PkgPath, - } - refsMu.Lock() - refs = append(refs, ref) - refsMu.Unlock() - } - - // Loop over the variants of the declaring package, - // and perform both the local (in-package) and global - // (cross-package) searches, in parallel. - // - // TODO(adonovan): opt: support LSP reference streaming. See: - // - https://github.com/microsoft/vscode-languageserver-node/pull/164 - // - https://github.com/microsoft/language-server-protocol/pull/182 - // - // Careful: this goroutine must not return before group.Wait. - var group errgroup.Group - - // Compute local references for each variant. - // The target objects are identified by (URI, offset). - for _, m := range variants { - // We want the ordinary importable package, - // plus any test-augmented variants, since - // declarations in _test.go files may change - // the reference of a selection, or even a - // field into a method or vice versa. - // - // But we don't need intermediate test variants, - // as their local references will be covered - // already by other variants. - if m.IsIntermediateTestVariant() { - continue - } - m := m - group.Go(func() error { - // TODO(adonovan): opt: batch these TypeChecks. - pkgs, err := snapshot.TypeCheck(ctx, m.ID) - if err != nil { - return err - } - pkg := pkgs[0] - - // Find the declaration of the corresponding - // object in this package based on (URI, offset). - pgf, err := pkg.File(declURI) - if err != nil { - return err - } - pos, err := safetoken.Pos(pgf.Tok, declPosn.Offset) - if err != nil { - return err - } - objects, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return err // unreachable? (probably caught earlier) - } - - // Report the locations of the declaration(s). - // TODO(adonovan): what about for corresponding methods? Add tests. - for _, node := range objects { - report(mustLocation(pgf, node), true) - } - - // Convert targets map to set. - targets := make(map[types.Object]bool) - for obj := range objects { - targets[obj] = true - } - - return localReferences(pkg, targets, true, report) - }) - } - - // Also compute local references within packages that declare - // corresponding methods (see above), which expand the global search. - // The target objects are identified by (PkgPath, objectpath). - for id := range expansions { - id := id - group.Go(func() error { - // TODO(adonovan): opt: batch these TypeChecks. - pkgs, err := snapshot.TypeCheck(ctx, id) - if err != nil { - return err - } - pkg := pkgs[0] - - targets := make(map[types.Object]bool) - for objpath := range globalTargets[pkg.Metadata().PkgPath] { - obj, err := objectpath.Object(pkg.GetTypes(), objpath) - if err != nil { - // No such object, because it was - // declared only in the test variant. - continue - } - targets[obj] = true - } - - // Don't include corresponding types or methods - // since expansions did that already, and we don't - // want (e.g.) concrete -> interface -> concrete. - const correspond = false - return localReferences(pkg, targets, correspond, report) - }) - } - - // Compute global references for selected reverse dependencies. - group.Go(func() error { - var globalIDs []PackageID - for id := range globalScope { - globalIDs = append(globalIDs, id) - } - indexes, err := snapshot.References(ctx, globalIDs...) - if err != nil { - return err - } - for _, index := range indexes { - for _, loc := range index.Lookup(globalTargets) { - report(loc, false) - } - } - return nil - }) - - if err := group.Wait(); err != nil { - return nil, err - } - return refs, nil -} - -// expandMethodSearch expands the scope and targets of a global search -// for an exported method to include all methods in the workspace -// that correspond to it through interface satisfaction. -// -// Each package that declares a corresponding type is added to -// expansions so that we can also find local references to the type -// within the package, which of course requires type checking. -// -// The scope is expanded by a sequence of calls (not concurrent) to addRdeps. -// -// recv is the method's effective receiver type, for method-set computations. -func expandMethodSearch(ctx context.Context, snapshot Snapshot, workspaceIDs []PackageID, method *types.Func, recv types.Type, addRdeps func(id PackageID, transitive bool) error, targets map[PackagePath]map[objectpath.Path]unit, expansions map[PackageID]unit) error { - // Compute the method-set fingerprint used as a key to the global search. - key, hasMethods := methodsets.KeyOf(recv) - if !hasMethods { - return bug.Errorf("KeyOf(%s)={} yet %s is a method", recv, method) - } - // Search the methodset index of each package in the workspace. - indexes, err := snapshot.MethodSets(ctx, workspaceIDs...) - if err != nil { - return err - } - var mu sync.Mutex // guards addRdeps, targets, expansions - var group errgroup.Group - for i, index := range indexes { - i := i - index := index - group.Go(func() error { - // Consult index for matching methods. - results := index.Search(key, method.Name()) - if len(results) == 0 { - return nil - } - - // We have discovered one or more corresponding types. - id := workspaceIDs[i] - - mu.Lock() - defer mu.Unlock() - - // Expand global search scope to include rdeps of this pkg. - if err := addRdeps(id, true); err != nil { - return err - } - - // Mark this package so that we search within it for - // local references to the additional types/methods. - expansions[id] = unit{} - - // Add each corresponding method the to set of global search targets. - for _, res := range results { - methodPkg := PackagePath(res.PkgPath) - opaths, ok := targets[methodPkg] - if !ok { - opaths = make(map[objectpath.Path]unit) - targets[methodPkg] = opaths - } - opaths[res.ObjectPath] = unit{} - } - return nil - }) - } - return group.Wait() -} - -// localReferences traverses syntax and reports each reference to one -// of the target objects, or (if correspond is set) an object that -// corresponds to one of them via interface satisfaction. -func localReferences(pkg Package, targets map[types.Object]bool, correspond bool, report func(loc protocol.Location, isDecl bool)) error { - // If we're searching for references to a method optionally - // broaden the search to include references to corresponding - // methods of mutually assignable receiver types. - // (We use a slice, but objectsAt never returns >1 methods.) - var methodRecvs []types.Type - var methodName string // name of an arbitrary target, iff a method - if correspond { - for obj := range targets { - if t := effectiveReceiver(obj); t != nil { - methodRecvs = append(methodRecvs, t) - methodName = obj.Name() - } - } - } - - // matches reports whether obj either is or corresponds to a target. - // (Correspondence is defined as usual for interface methods.) - matches := func(obj types.Object) bool { - if containsOrigin(targets, obj) { - return true - } - if methodRecvs != nil && obj.Name() == methodName { - if orecv := effectiveReceiver(obj); orecv != nil { - for _, mrecv := range methodRecvs { - if concreteImplementsIntf(orecv, mrecv) { - return true - } - } - } - } - return false - } - - // Scan through syntax looking for uses of one of the target objects. - for _, pgf := range pkg.CompiledGoFiles() { - ast.Inspect(pgf.File, func(n ast.Node) bool { - if id, ok := n.(*ast.Ident); ok { - if obj, ok := pkg.GetTypesInfo().Uses[id]; ok && matches(obj) { - report(mustLocation(pgf, id), false) - } - } - return true - }) - } - return nil -} - -// effectiveReceiver returns the effective receiver type for method-set -// comparisons for obj, if it is a method, or nil otherwise. -func effectiveReceiver(obj types.Object) types.Type { - if fn, ok := obj.(*types.Func); ok { - if recv := fn.Type().(*types.Signature).Recv(); recv != nil { - return methodsets.EnsurePointer(recv.Type()) - } - } - return nil -} - -// objectsAt returns the non-empty set of objects denoted (def or use) -// by the specified position within a file syntax tree, or an error if -// none were found. -// -// The result may contain more than one element because all case -// variables of a type switch appear to be declared at the same -// position. -// -// Each object is mapped to the syntax node that was treated as an -// identifier, which is not always an ast.Ident. The second component -// of the result is the innermost node enclosing pos. -// -// TODO(adonovan): factor in common with referencedObject. -func objectsAt(info *types.Info, file *ast.File, pos token.Pos) (map[types.Object]ast.Node, ast.Node, error) { - path := pathEnclosingObjNode(file, pos) - if path == nil { - return nil, nil, ErrNoIdentFound - } - - targets := make(map[types.Object]ast.Node) - - switch leaf := path[0].(type) { - case *ast.Ident: - // If leaf represents an implicit type switch object or the type - // switch "assign" variable, expand to all of the type switch's - // implicit objects. - if implicits, _ := typeSwitchImplicits(info, path); len(implicits) > 0 { - for _, obj := range implicits { - targets[obj] = leaf - } - } else { - // Note: prior to go1.21, go/types issue #60372 causes the position - // a field Var T created for struct{*p.T} to be recorded at the - // start of the field type ("*") not the location of the T. - // This affects references and other gopls operations (issue #60369). - // TODO(adonovan): delete this comment when we drop support for go1.20. - - // For struct{T}, we prefer the defined field Var over the used TypeName. - obj := info.ObjectOf(leaf) - if obj == nil { - return nil, nil, fmt.Errorf("%w for %q", errNoObjectFound, leaf.Name) - } - targets[obj] = leaf - } - case *ast.ImportSpec: - // Look up the implicit *types.PkgName. - obj := info.Implicits[leaf] - if obj == nil { - return nil, nil, fmt.Errorf("%w for import %s", errNoObjectFound, UnquoteImportPath(leaf)) - } - targets[obj] = leaf - } - - if len(targets) == 0 { - return nil, nil, fmt.Errorf("objectAt: internal error: no targets") // can't happen - } - return targets, path[0], nil -} - -// mustLocation reports the location interval a syntax node, -// which must belong to m.File. -// -// Safe for use only by references and implementations. -func mustLocation(pgf *ParsedGoFile, n ast.Node) protocol.Location { - loc, err := pgf.NodeLocation(n) - if err != nil { - panic(err) // can't happen in references or implementations - } - return loc -} diff --git a/gopls/internal/lsp/source/rename.go b/gopls/internal/lsp/source/rename.go deleted file mode 100644 index 17ab2cda815..00000000000 --- a/gopls/internal/lsp/source/rename.go +++ /dev/null @@ -1,1277 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -// TODO(adonovan): -// -// - method of generic concrete type -> arbitrary instances of same -// -// - make satisfy work across packages. -// -// - tests, tests, tests: -// - play with renamings in the k8s tree. -// - generics -// - error cases (e.g. conflicts) -// - renaming a symbol declared in the module cache -// (currently proceeds with half of the renaming!) -// - make sure all tests have both a local and a cross-package analogue. -// - look at coverage -// - special cases: embedded fields, interfaces, test variants, -// function-local things with uppercase names; -// packages with type errors (currently 'satisfy' rejects them), -// package with missing imports; -// -// - measure performance in k8s. -// -// - The original gorename tool assumed well-typedness, but the gopls feature -// does no such check (which actually makes it much more useful). -// Audit to ensure it is safe on ill-typed code. -// -// - Generics support was no doubt buggy before but incrementalization -// may have exacerbated it. If the problem were just about objects, -// defs and uses it would be fairly simple, but type assignability -// comes into play in the 'satisfy' check for method renamings. -// De-instantiating Vector[int] to Vector[T] changes its type. -// We need to come up with a theory for the satisfy check that -// works with generics, and across packages. We currently have no -// simple way to pass types between packages (think: objectpath for -// types), though presumably exportdata could be pressed into service. -// -// - FileID-based de-duplication of edits to different URIs for the same file. - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "path" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/typeparams" - "golang.org/x/tools/refactor/satisfy" -) - -// A renamer holds state of a single call to renameObj, which renames -// an object (or several coupled objects) within a single type-checked -// syntax package. -type renamer struct { - pkg Package // the syntax package in which the renaming is applied - objsToUpdate map[types.Object]bool // records progress of calls to check - hadConflicts bool - conflicts []string - from, to string - satisfyConstraints map[satisfy.Constraint]bool - msets typeutil.MethodSetCache - changeMethods bool -} - -// A PrepareItem holds the result of a "prepare rename" operation: -// the source range and value of a selected identifier. -type PrepareItem struct { - Range protocol.Range - Text string -} - -// PrepareRename searches for a valid renaming at position pp. -// -// The returned usererr is intended to be displayed to the user to explain why -// the prepare fails. Probably we could eliminate the redundancy in returning -// two errors, but for now this is done defensively. -func PrepareRename(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position) (_ *PrepareItem, usererr, err error) { - ctx, done := event.Start(ctx, "source.PrepareRename") - defer done() - - // Is the cursor within the package name declaration? - if pgf, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp); err != nil { - return nil, err, err - } else if inPackageName { - item, err := prepareRenamePackageName(ctx, snapshot, pgf) - return item, err, err - } - - // Ordinary (non-package) renaming. - // - // Type-check the current package, locate the reference at the position, - // validate the object, and report its name and range. - // - // TODO(adonovan): in all cases below, we return usererr=nil, - // which means we return (nil, nil) at the protocol - // layer. This seems like a bug, or at best an exploitation of - // knowledge of VSCode-specific behavior. Can we avoid that? - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, f.URI()) - if err != nil { - return nil, nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, nil, err - } - targets, node, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return nil, nil, err - } - var obj types.Object - for obj = range targets { - break // pick one arbitrarily - } - if err := checkRenamable(obj); err != nil { - return nil, nil, err - } - rng, err := pgf.NodeRange(node) - if err != nil { - return nil, nil, err - } - if _, isImport := node.(*ast.ImportSpec); isImport { - // We're not really renaming the import path. - rng.End = rng.Start - } - return &PrepareItem{ - Range: rng, - Text: obj.Name(), - }, nil, nil -} - -func prepareRenamePackageName(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile) (*PrepareItem, error) { - // Does the client support file renaming? - fileRenameSupported := false - for _, op := range snapshot.Options().SupportedResourceOperations { - if op == protocol.Rename { - fileRenameSupported = true - break - } - } - if !fileRenameSupported { - return nil, errors.New("can't rename package: LSP client does not support file renaming") - } - - // Check validity of the metadata for the file's containing package. - meta, err := NarrowestMetadataForFile(ctx, snapshot, pgf.URI) - if err != nil { - return nil, err - } - if meta.Name == "main" { - return nil, fmt.Errorf("can't rename package \"main\"") - } - if strings.HasSuffix(string(meta.Name), "_test") { - return nil, fmt.Errorf("can't rename x_test packages") - } - if meta.Module == nil { - return nil, fmt.Errorf("can't rename package: missing module information for package %q", meta.PkgPath) - } - if meta.Module.Path == string(meta.PkgPath) { - return nil, fmt.Errorf("can't rename package: package path %q is the same as module path %q", meta.PkgPath, meta.Module.Path) - } - - // Return the location of the package declaration. - rng, err := pgf.NodeRange(pgf.File.Name) - if err != nil { - return nil, err - } - return &PrepareItem{ - Range: rng, - Text: string(meta.Name), - }, nil -} - -func checkRenamable(obj types.Object) error { - switch obj := obj.(type) { - case *types.Var: - if obj.Embedded() { - return fmt.Errorf("can't rename embedded fields: rename the type directly or name the field") - } - case *types.Builtin, *types.Nil: - return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) - } - if obj.Pkg() == nil || obj.Pkg().Path() == "unsafe" { - // e.g. error.Error, unsafe.Pointer - return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) - } - if obj.Name() == "_" { - return errors.New("can't rename \"_\"") - } - return nil -} - -// Rename returns a map of TextEdits for each file modified when renaming a -// given identifier within a package and a boolean value of true for renaming -// package and false otherwise. -func Rename(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position, newName string) (map[span.URI][]protocol.TextEdit, bool, error) { - ctx, done := event.Start(ctx, "source.Rename") - defer done() - - if !isValidIdentifier(newName) { - return nil, false, fmt.Errorf("invalid identifier to rename: %q", newName) - } - - // Cursor within package name declaration? - _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) - if err != nil { - return nil, false, err - } - - var editMap map[span.URI][]diff.Edit - if inPackageName { - editMap, err = renamePackageName(ctx, snapshot, f, PackageName(newName)) - } else { - editMap, err = renameOrdinary(ctx, snapshot, f, pp, newName) - } - if err != nil { - return nil, false, err - } - - // Convert edits to protocol form. - result := make(map[span.URI][]protocol.TextEdit) - for uri, edits := range editMap { - // Sort and de-duplicate edits. - // - // Overlapping edits may arise in local renamings (due - // to type switch implicits) and globals ones (due to - // processing multiple package variants). - // - // We assume renaming produces diffs that are all - // replacements (no adjacent insertions that might - // become reordered) and that are either identical or - // non-overlapping. - diff.SortEdits(edits) - filtered := edits[:0] - for i, edit := range edits { - if i == 0 || edit != filtered[len(filtered)-1] { - filtered = append(filtered, edit) - } - } - edits = filtered - - // TODO(adonovan): the logic above handles repeat edits to the - // same file URI (e.g. as a member of package p and p_test) but - // is not sufficient to handle file-system level aliasing arising - // from symbolic or hard links. For that, we should use a - // robustio-FileID-keyed map. - // See https://go.dev/cl/457615 for example. - // This really occurs in practice, e.g. kubernetes has - // vendor/k8s.io/kubectl -> ../../staging/src/k8s.io/kubectl. - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, false, err - } - data, err := fh.Content() - if err != nil { - return nil, false, err - } - m := protocol.NewMapper(uri, data) - protocolEdits, err := ToProtocolEdits(m, edits) - if err != nil { - return nil, false, err - } - result[uri] = protocolEdits - } - - return result, inPackageName, nil -} - -// renameOrdinary renames an ordinary (non-package) name throughout the workspace. -func renameOrdinary(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position, newName string) (map[span.URI][]diff.Edit, error) { - // Type-check the referring package and locate the object(s). - // - // Unlike NarrowestPackageForFile, this operation prefers the - // widest variant as, for non-exported identifiers, it is the - // only package we need. (In case you're wondering why - // 'references' doesn't also want the widest variant: it - // computes the union across all variants.) - var targets map[types.Object]ast.Node - var pkg Package - { - metas, err := snapshot.MetadataForFile(ctx, f.URI()) - if err != nil { - return nil, err - } - RemoveIntermediateTestVariants(&metas) - if len(metas) == 0 { - return nil, fmt.Errorf("no package metadata for file %s", f.URI()) - } - widest := metas[len(metas)-1] // widest variant may include _test.go files - pkgs, err := snapshot.TypeCheck(ctx, widest.ID) - if err != nil { - return nil, err - } - pkg = pkgs[0] - pgf, err := pkg.File(f.URI()) - if err != nil { - return nil, err // "can't happen" - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - objects, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return nil, err - } - targets = objects - } - - // Pick a representative object arbitrarily. - // (All share the same name, pos, and kind.) - var obj types.Object - for obj = range targets { - break - } - if obj.Name() == newName { - return nil, fmt.Errorf("old and new names are the same: %s", newName) - } - if err := checkRenamable(obj); err != nil { - return nil, err - } - - // Find objectpath, if object is exported ("" otherwise). - var declObjPath objectpath.Path - if obj.Exported() { - // objectpath.For requires the origin of a generic function or type, not an - // instantiation (a bug?). Unfortunately we can't call Func.Origin as this - // is not available in go/types@go1.18. So we take a scenic route. - // - // Note that unlike Funcs, TypeNames are always canonical (they are "left" - // of the type parameters, unlike methods). - switch obj.(type) { // avoid "obj :=" since cases reassign the var - case *types.TypeName: - if _, ok := obj.Type().(*typeparams.TypeParam); ok { - // As with capitalized function parameters below, type parameters are - // local. - goto skipObjectPath - } - case *types.Func: - obj = funcOrigin(obj.(*types.Func)) - case *types.Var: - // TODO(adonovan): do vars need the origin treatment too? (issue #58462) - - // Function parameter and result vars that are (unusually) - // capitalized are technically exported, even though they - // cannot be referenced, because they may affect downstream - // error messages. But we can safely treat them as local. - // - // This is not merely an optimization: the renameExported - // operation gets confused by such vars. It finds them from - // objectpath, the classifies them as local vars, but as - // they came from export data they lack syntax and the - // correct scope tree (issue #61294). - if !obj.(*types.Var).IsField() && !isPackageLevel(obj) { - goto skipObjectPath - } - } - if path, err := objectpath.For(obj); err == nil { - declObjPath = path - } - skipObjectPath: - } - - // Nonexported? Search locally. - if declObjPath == "" { - var objects []types.Object - for obj := range targets { - objects = append(objects, obj) - } - editMap, _, err := renameObjects(newName, pkg, objects...) - return editMap, err - } - - // Exported: search globally. - // - // For exported package-level var/const/func/type objects, the - // search scope is just the direct importers. - // - // For exported fields and methods, the scope is the - // transitive rdeps. (The exportedness of the field's struct - // or method's receiver is irrelevant.) - transitive := false - switch obj.(type) { - case *types.TypeName: - // Renaming an exported package-level type - // requires us to inspect all transitive rdeps - // in the event that the type is embedded. - // - // TODO(adonovan): opt: this is conservative - // but inefficient. Instead, expand the scope - // of the search only if we actually encounter - // an embedding of the type, and only then to - // the rdeps of the embedding package. - if obj.Parent() == obj.Pkg().Scope() { - transitive = true - } - - case *types.Var: - if obj.(*types.Var).IsField() { - transitive = true // field - } - - // TODO(adonovan): opt: process only packages that - // contain a reference (xrefs) to the target field. - - case *types.Func: - if obj.Type().(*types.Signature).Recv() != nil { - transitive = true // method - } - - // It's tempting to optimize by skipping - // packages that don't contain a reference to - // the method in the xrefs index, but we still - // need to apply the satisfy check to those - // packages to find assignment statements that - // might expands the scope of the renaming. - } - - // Type-check all the packages to inspect. - declURI := span.URIFromPath(pkg.FileSet().File(obj.Pos()).Name()) - pkgs, err := typeCheckReverseDependencies(ctx, snapshot, declURI, transitive) - if err != nil { - return nil, err - } - - // Apply the renaming to the (initial) object. - declPkgPath := PackagePath(obj.Pkg().Path()) - return renameExported(pkgs, declPkgPath, declObjPath, newName) -} - -// funcOrigin is a go1.18-portable implementation of (*types.Func).Origin. -func funcOrigin(fn *types.Func) *types.Func { - // Method? - if fn.Type().(*types.Signature).Recv() != nil { - return typeparams.OriginMethod(fn) - } - - // Package-level function? - // (Assume the origin has the same position.) - gen := fn.Pkg().Scope().Lookup(fn.Name()) - if gen != nil && gen.Pos() == fn.Pos() { - return gen.(*types.Func) - } - - return fn -} - -// typeCheckReverseDependencies returns the type-checked packages for -// the reverse dependencies of all packages variants containing -// file declURI. The packages are in some topological order. -// -// It includes all variants (even intermediate test variants) for the -// purposes of computing reverse dependencies, but discards ITVs for -// the actual renaming work. -// -// (This neglects obscure edge cases where a _test.go file changes the -// selectors used only in an ITV, but life is short. Also sin must be -// punished.) -func typeCheckReverseDependencies(ctx context.Context, snapshot Snapshot, declURI span.URI, transitive bool) ([]Package, error) { - variants, err := snapshot.MetadataForFile(ctx, declURI) - if err != nil { - return nil, err - } - // variants must include ITVs for the reverse dependency - // computation, but they are filtered out before we typecheck. - allRdeps := make(map[PackageID]*Metadata) - for _, variant := range variants { - rdeps, err := snapshot.ReverseDependencies(ctx, variant.ID, transitive) - if err != nil { - return nil, err - } - allRdeps[variant.ID] = variant // include self - for id, meta := range rdeps { - allRdeps[id] = meta - } - } - var ids []PackageID - for id, meta := range allRdeps { - if meta.IsIntermediateTestVariant() { - continue - } - ids = append(ids, id) - } - - // Sort the packages into some topological order of the - // (unfiltered) metadata graph. - SortPostOrder(snapshot, ids) - - // Dependencies must be visited first since they can expand - // the search set. Ideally we would process the (filtered) set - // of packages in the parallel postorder of the snapshot's - // (unfiltered) metadata graph, but this is quite tricky - // without a good graph abstraction. - // - // For now, we visit packages sequentially in order of - // ascending height, like an inverted breadth-first search. - // - // Type checking is by far the dominant cost, so - // overlapping it with renaming may not be worthwhile. - return snapshot.TypeCheck(ctx, ids...) -} - -// SortPostOrder sorts the IDs so that if x depends on y, then y appears before x. -func SortPostOrder(meta MetadataSource, ids []PackageID) { - postorder := make(map[PackageID]int) - order := 0 - var visit func(PackageID) - visit = func(id PackageID) { - if _, ok := postorder[id]; !ok { - postorder[id] = -1 // break recursion - if m := meta.Metadata(id); m != nil { - for _, depID := range m.DepsByPkgPath { - visit(depID) - } - } - order++ - postorder[id] = order - } - } - for _, id := range ids { - visit(id) - } - sort.Slice(ids, func(i, j int) bool { - return postorder[ids[i]] < postorder[ids[j]] - }) -} - -// renameExported renames the object denoted by (pkgPath, objPath) -// within the specified packages, along with any other objects that -// must be renamed as a consequence. The slice of packages must be -// topologically ordered. -func renameExported(pkgs []Package, declPkgPath PackagePath, declObjPath objectpath.Path, newName string) (map[span.URI][]diff.Edit, error) { - - // A target is a name for an object that is stable across types.Packages. - type target struct { - pkg PackagePath - obj objectpath.Path - } - - // Populate the initial set of target objects. - // This set may grow as we discover the consequences of each renaming. - // - // TODO(adonovan): strictly, each cone of reverse dependencies - // of a single variant should have its own target map that - // monotonically expands as we go up the import graph, because - // declarations in test files can alter the set of - // package-level names and change the meaning of field and - // method selectors. So if we parallelize the graph - // visitation (see above), we should also compute the targets - // as a union of dependencies. - // - // Or we could decide that the logic below is fast enough not - // to need parallelism. In small measurements so far the - // type-checking step is about 95% and the renaming only 5%. - targets := map[target]bool{{declPkgPath, declObjPath}: true} - - // Apply the renaming operation to each package. - allEdits := make(map[span.URI][]diff.Edit) - for _, pkg := range pkgs { - - // Resolved target objects within package pkg. - var objects []types.Object - for t := range targets { - p := pkg.DependencyTypes(t.pkg) - if p == nil { - continue // indirect dependency of no consequence - } - obj, err := objectpath.Object(p, t.obj) - if err != nil { - // Possibly a method or an unexported type - // that is not reachable through export data? - // See https://github.com/golang/go/issues/60789. - // - // TODO(adonovan): it seems unsatisfactory that Object - // should return an error for a "valid" path. Perhaps - // we should define such paths as invalid and make - // objectpath.For compute reachability? - // Would that be a compatible change? - continue - } - objects = append(objects, obj) - } - if len(objects) == 0 { - continue // no targets of consequence to this package - } - - // Apply the renaming. - editMap, moreObjects, err := renameObjects(newName, pkg, objects...) - if err != nil { - return nil, err - } - - // It is safe to concatenate the edits as they are non-overlapping - // (or identical, in which case they will be de-duped by Rename). - for uri, edits := range editMap { - allEdits[uri] = append(allEdits[uri], edits...) - } - - // Expand the search set? - for obj := range moreObjects { - objpath, err := objectpath.For(obj) - if err != nil { - continue // not exported - } - target := target{PackagePath(obj.Pkg().Path()), objpath} - targets[target] = true - - // TODO(adonovan): methods requires dynamic - // programming of the product targets x - // packages as any package might add a new - // target (from a foward dep) as a - // consequence, and any target might imply a - // new set of rdeps. See golang/go#58461. - } - } - - return allEdits, nil -} - -// renamePackageName renames package declarations, imports, and go.mod files. -func renamePackageName(ctx context.Context, s Snapshot, f FileHandle, newName PackageName) (map[span.URI][]diff.Edit, error) { - // Rename the package decl and all imports. - renamingEdits, err := renamePackage(ctx, s, f, newName) - if err != nil { - return nil, err - } - - // Update the last component of the file's enclosing directory. - oldBase := filepath.Dir(f.URI().Filename()) - newPkgDir := filepath.Join(filepath.Dir(oldBase), string(newName)) - - // Update any affected replace directives in go.mod files. - // TODO(adonovan): extract into its own function. - // - // Get all workspace modules. - // TODO(adonovan): should this operate on all go.mod files, - // irrespective of whether they are included in the workspace? - modFiles := s.ModFiles() - for _, m := range modFiles { - fh, err := s.ReadFile(ctx, m) - if err != nil { - return nil, err - } - pm, err := s.ParseMod(ctx, fh) - if err != nil { - return nil, err - } - - modFileDir := filepath.Dir(pm.URI.Filename()) - affectedReplaces := []*modfile.Replace{} - - // Check if any replace directives need to be fixed - for _, r := range pm.File.Replace { - if !strings.HasPrefix(r.New.Path, "/") && !strings.HasPrefix(r.New.Path, "./") && !strings.HasPrefix(r.New.Path, "../") { - continue - } - - replacedPath := r.New.Path - if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { - replacedPath = filepath.Join(modFileDir, r.New.Path) - } - - // TODO: Is there a risk of converting a '\' delimited replacement to a '/' delimited replacement? - if !strings.HasPrefix(filepath.ToSlash(replacedPath)+"/", filepath.ToSlash(oldBase)+"/") { - continue // not affected by the package renaming - } - - affectedReplaces = append(affectedReplaces, r) - } - - if len(affectedReplaces) == 0 { - continue - } - copied, err := modfile.Parse("", pm.Mapper.Content, nil) - if err != nil { - return nil, err - } - - for _, r := range affectedReplaces { - replacedPath := r.New.Path - if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { - replacedPath = filepath.Join(modFileDir, r.New.Path) - } - - suffix := strings.TrimPrefix(replacedPath, string(oldBase)) - - newReplacedPath, err := filepath.Rel(modFileDir, newPkgDir+suffix) - if err != nil { - return nil, err - } - - newReplacedPath = filepath.ToSlash(newReplacedPath) - - if !strings.HasPrefix(newReplacedPath, "/") && !strings.HasPrefix(newReplacedPath, "../") { - newReplacedPath = "./" + newReplacedPath - } - - if err := copied.AddReplace(r.Old.Path, "", newReplacedPath, ""); err != nil { - return nil, err - } - } - - copied.Cleanup() - newContent, err := copied.Format() - if err != nil { - return nil, err - } - - // Calculate the edits to be made due to the change. - edits := s.Options().ComputeEdits(string(pm.Mapper.Content), string(newContent)) - renamingEdits[pm.URI] = append(renamingEdits[pm.URI], edits...) - } - - return renamingEdits, nil -} - -// renamePackage computes all workspace edits required to rename the package -// described by the given metadata, to newName, by renaming its package -// directory. -// -// It updates package clauses and import paths for the renamed package as well -// as any other packages affected by the directory renaming among all packages -// known to the snapshot. -func renamePackage(ctx context.Context, s Snapshot, f FileHandle, newName PackageName) (map[span.URI][]diff.Edit, error) { - if strings.HasSuffix(string(newName), "_test") { - return nil, fmt.Errorf("cannot rename to _test package") - } - - // We need metadata for the relevant package and module paths. - // These should be the same for all packages containing the file. - meta, err := NarrowestMetadataForFile(ctx, s, f.URI()) - if err != nil { - return nil, err - } - - oldPkgPath := meta.PkgPath - if meta.Module == nil { - return nil, fmt.Errorf("cannot rename package: missing module information for package %q", meta.PkgPath) - } - modulePath := PackagePath(meta.Module.Path) - if modulePath == oldPkgPath { - return nil, fmt.Errorf("cannot rename package: module path %q is the same as the package path, so renaming the package directory would have no effect", modulePath) - } - - newPathPrefix := path.Join(path.Dir(string(oldPkgPath)), string(newName)) - - // We must inspect all packages, not just direct importers, - // because we also rename subpackages, which may be unrelated. - // (If the renamed package imports a subpackage it may require - // edits to both its package and import decls.) - allMetadata, err := s.AllMetadata(ctx) - if err != nil { - return nil, err - } - - // Rename package and import declarations in all relevant packages. - edits := make(map[span.URI][]diff.Edit) - for _, m := range allMetadata { - // Special case: x_test packages for the renamed package will not have the - // package path as a dir prefix, but still need their package clauses - // renamed. - if m.PkgPath == oldPkgPath+"_test" { - if err := renamePackageClause(ctx, m, s, newName+"_test", edits); err != nil { - return nil, err - } - continue - } - - // Subtle: check this condition before checking for valid module info - // below, because we should not fail this operation if unrelated packages - // lack module info. - if !strings.HasPrefix(string(m.PkgPath)+"/", string(oldPkgPath)+"/") { - continue // not affected by the package renaming - } - - if m.Module == nil { - // This check will always fail under Bazel. - return nil, fmt.Errorf("cannot rename package: missing module information for package %q", m.PkgPath) - } - - if modulePath != PackagePath(m.Module.Path) { - continue // don't edit imports if nested package and renaming package have different module paths - } - - // Renaming a package consists of changing its import path and package name. - suffix := strings.TrimPrefix(string(m.PkgPath), string(oldPkgPath)) - newPath := newPathPrefix + suffix - - pkgName := m.Name - if m.PkgPath == oldPkgPath { - pkgName = PackageName(newName) - - if err := renamePackageClause(ctx, m, s, newName, edits); err != nil { - return nil, err - } - } - - imp := ImportPath(newPath) // TODO(adonovan): what if newPath has vendor/ prefix? - if err := renameImports(ctx, s, m, imp, pkgName, edits); err != nil { - return nil, err - } - } - - return edits, nil -} - -// renamePackageClause computes edits renaming the package clause of files in -// the package described by the given metadata, to newName. -// -// Edits are written into the edits map. -func renamePackageClause(ctx context.Context, m *Metadata, snapshot Snapshot, newName PackageName, edits map[span.URI][]diff.Edit) error { - // Rename internal references to the package in the renaming package. - for _, uri := range m.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return err - } - if f.File.Name == nil { - continue // no package declaration - } - - edit, err := posEdit(f.Tok, f.File.Name.Pos(), f.File.Name.End(), string(newName)) - if err != nil { - return err - } - edits[f.URI] = append(edits[f.URI], edit) - } - - return nil -} - -// renameImports computes the set of edits to imports resulting from renaming -// the package described by the given metadata, to a package with import path -// newPath and name newName. -// -// Edits are written into the edits map. -func renameImports(ctx context.Context, snapshot Snapshot, m *Metadata, newPath ImportPath, newName PackageName, allEdits map[span.URI][]diff.Edit) error { - rdeps, err := snapshot.ReverseDependencies(ctx, m.ID, false) // find direct importers - if err != nil { - return err - } - - // Pass 1: rename import paths in import declarations. - needsTypeCheck := make(map[PackageID][]span.URI) - for _, rdep := range rdeps { - if rdep.IsIntermediateTestVariant() { - continue // for renaming, these variants are redundant - } - - for _, uri := range rdep.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return err - } - if f.File.Name == nil { - continue // no package declaration - } - for _, imp := range f.File.Imports { - if rdep.DepsByImpPath[UnquoteImportPath(imp)] != m.ID { - continue // not the import we're looking for - } - - // If the import does not explicitly specify - // a local name, then we need to invoke the - // type checker to locate references to update. - // - // TODO(adonovan): is this actually true? - // Renaming an import with a local name can still - // cause conflicts: shadowing of built-ins, or of - // package-level decls in the same or another file. - if imp.Name == nil { - needsTypeCheck[rdep.ID] = append(needsTypeCheck[rdep.ID], uri) - } - - // Create text edit for the import path (string literal). - edit, err := posEdit(f.Tok, imp.Path.Pos(), imp.Path.End(), strconv.Quote(string(newPath))) - if err != nil { - return err - } - allEdits[uri] = append(allEdits[uri], edit) - } - } - } - - // If the imported package's name hasn't changed, - // we don't need to rename references within each file. - if newName == m.Name { - return nil - } - - // Pass 2: rename local name (types.PkgName) of imported - // package throughout one or more files of the package. - ids := make([]PackageID, 0, len(needsTypeCheck)) - for id := range needsTypeCheck { - ids = append(ids, id) - } - pkgs, err := snapshot.TypeCheck(ctx, ids...) - if err != nil { - return err - } - for i, id := range ids { - pkg := pkgs[i] - for _, uri := range needsTypeCheck[id] { - f, err := pkg.File(uri) - if err != nil { - return err - } - for _, imp := range f.File.Imports { - if imp.Name != nil { - continue // has explicit local name - } - if rdeps[id].DepsByImpPath[UnquoteImportPath(imp)] != m.ID { - continue // not the import we're looking for - } - - pkgname := pkg.GetTypesInfo().Implicits[imp].(*types.PkgName) - - pkgScope := pkg.GetTypes().Scope() - fileScope := pkg.GetTypesInfo().Scopes[f.File] - - localName := string(newName) - try := 0 - - // Keep trying with fresh names until one succeeds. - // - // TODO(adonovan): fix: this loop is not sufficient to choose a name - // that is guaranteed to be conflict-free; renameObj may still fail. - // So the retry loop should be around renameObj, and we shouldn't - // bother with scopes here. - for fileScope.Lookup(localName) != nil || pkgScope.Lookup(localName) != nil { - try++ - localName = fmt.Sprintf("%s%d", newName, try) - } - - // renameObj detects various conflicts, including: - // - new name conflicts with a package-level decl in this file; - // - new name hides a package-level decl in another file that - // is actually referenced in this file; - // - new name hides a built-in that is actually referenced - // in this file; - // - a reference in this file to the old package name would - // become shadowed by an intervening declaration that - // uses the new name. - // It returns the edits if no conflict was detected. - editMap, _, err := renameObjects(localName, pkg, pkgname) - if err != nil { - return err - } - - // If the chosen local package name matches the package's - // new name, delete the change that would have inserted - // an explicit local name, which is always the lexically - // first change. - if localName == string(newName) { - edits, ok := editMap[uri] - if !ok { - return fmt.Errorf("internal error: no changes for %s", uri) - } - diff.SortEdits(edits) - editMap[uri] = edits[1:] - } - for uri, edits := range editMap { - allEdits[uri] = append(allEdits[uri], edits...) - } - } - } - } - return nil -} - -// renameObjects computes the edits to the type-checked syntax package pkg -// required to rename a set of target objects to newName. -// -// It also returns the set of objects that were found (due to -// corresponding methods and embedded fields) to require renaming as a -// consequence of the requested renamings. -// -// It returns an error if the renaming would cause a conflict. -func renameObjects(newName string, pkg Package, targets ...types.Object) (map[span.URI][]diff.Edit, map[types.Object]bool, error) { - r := renamer{ - pkg: pkg, - objsToUpdate: make(map[types.Object]bool), - from: targets[0].Name(), - to: newName, - } - - // A renaming initiated at an interface method indicates the - // intention to rename abstract and concrete methods as needed - // to preserve assignability. - // TODO(adonovan): pull this into the caller. - for _, obj := range targets { - if obj, ok := obj.(*types.Func); ok { - recv := obj.Type().(*types.Signature).Recv() - if recv != nil && types.IsInterface(recv.Type().Underlying()) { - r.changeMethods = true - break - } - } - } - - // Check that the renaming of the identifier is ok. - for _, obj := range targets { - r.check(obj) - if len(r.conflicts) > 0 { - // Stop at first error. - return nil, nil, fmt.Errorf("%s", strings.Join(r.conflicts, "\n")) - } - } - - editMap, err := r.update() - if err != nil { - return nil, nil, err - } - - // Remove initial targets so that only 'consequences' remain. - for _, obj := range targets { - delete(r.objsToUpdate, obj) - } - return editMap, r.objsToUpdate, nil -} - -// Rename all references to the target objects. -func (r *renamer) update() (map[span.URI][]diff.Edit, error) { - result := make(map[span.URI][]diff.Edit) - - // shouldUpdate reports whether obj is one of (or an - // instantiation of one of) the target objects. - shouldUpdate := func(obj types.Object) bool { - return containsOrigin(r.objsToUpdate, obj) - } - - // Find all identifiers in the package that define or use a - // renamed object. We iterate over info as it is more efficient - // than calling ast.Inspect for each of r.pkg.CompiledGoFiles(). - type item struct { - node ast.Node // Ident, ImportSpec (obj=PkgName), or CaseClause (obj=Var) - obj types.Object - isDef bool - } - var items []item - info := r.pkg.GetTypesInfo() - for id, obj := range info.Uses { - if shouldUpdate(obj) { - items = append(items, item{id, obj, false}) - } - } - for id, obj := range info.Defs { - if shouldUpdate(obj) { - items = append(items, item{id, obj, true}) - } - } - for node, obj := range info.Implicits { - if shouldUpdate(obj) { - switch node.(type) { - case *ast.ImportSpec, *ast.CaseClause: - items = append(items, item{node, obj, true}) - } - } - } - sort.Slice(items, func(i, j int) bool { - return items[i].node.Pos() < items[j].node.Pos() - }) - - // Update each identifier. - for _, item := range items { - pgf, ok := enclosingFile(r.pkg, item.node.Pos()) - if !ok { - bug.Reportf("edit does not belong to syntax of package %q", r.pkg) - continue - } - - // Renaming a types.PkgName may result in the addition or removal of an identifier, - // so we deal with this separately. - if pkgName, ok := item.obj.(*types.PkgName); ok && item.isDef { - edit, err := r.updatePkgName(pgf, pkgName) - if err != nil { - return nil, err - } - result[pgf.URI] = append(result[pgf.URI], edit) - continue - } - - // Workaround the unfortunate lack of a Var object - // for x in "switch x := expr.(type) {}" by adjusting - // the case clause to the switch ident. - // This may result in duplicate edits, but we de-dup later. - if _, ok := item.node.(*ast.CaseClause); ok { - path, _ := astutil.PathEnclosingInterval(pgf.File, item.obj.Pos(), item.obj.Pos()) - item.node = path[0].(*ast.Ident) - } - - // Replace the identifier with r.to. - edit, err := posEdit(pgf.Tok, item.node.Pos(), item.node.End(), r.to) - if err != nil { - return nil, err - } - - result[pgf.URI] = append(result[pgf.URI], edit) - - if !item.isDef { // uses do not have doc comments to update. - continue - } - - doc := docComment(pgf, item.node.(*ast.Ident)) - if doc == nil { - continue - } - - // Perform the rename in doc comments declared in the original package. - // go/parser strips out \r\n returns from the comment text, so go - // line-by-line through the comment text to get the correct positions. - docRegexp := regexp.MustCompile(`\b` + r.from + `\b`) // valid identifier => valid regexp - for _, comment := range doc.List { - if isDirective(comment.Text) { - continue - } - // TODO(adonovan): why are we looping over lines? - // Just run the loop body once over the entire multiline comment. - lines := strings.Split(comment.Text, "\n") - tokFile := pgf.Tok - commentLine := safetoken.Line(tokFile, comment.Pos()) - uri := span.URIFromPath(tokFile.Name()) - for i, line := range lines { - lineStart := comment.Pos() - if i > 0 { - lineStart = tokFile.LineStart(commentLine + i) - } - for _, locs := range docRegexp.FindAllIndex([]byte(line), -1) { - edit, err := posEdit(tokFile, lineStart+token.Pos(locs[0]), lineStart+token.Pos(locs[1]), r.to) - if err != nil { - return nil, err // can't happen - } - result[uri] = append(result[uri], edit) - } - } - } - } - - return result, nil -} - -// docComment returns the doc for an identifier within the specified file. -func docComment(pgf *ParsedGoFile, id *ast.Ident) *ast.CommentGroup { - nodes, _ := astutil.PathEnclosingInterval(pgf.File, id.Pos(), id.End()) - for _, node := range nodes { - switch decl := node.(type) { - case *ast.FuncDecl: - return decl.Doc - case *ast.Field: - return decl.Doc - case *ast.GenDecl: - return decl.Doc - // For {Type,Value}Spec, if the doc on the spec is absent, - // search for the enclosing GenDecl - case *ast.TypeSpec: - if decl.Doc != nil { - return decl.Doc - } - case *ast.ValueSpec: - if decl.Doc != nil { - return decl.Doc - } - case *ast.Ident: - case *ast.AssignStmt: - // *ast.AssignStmt doesn't have an associated comment group. - // So, we try to find a comment just before the identifier. - - // Try to find a comment group only for short variable declarations (:=). - if decl.Tok != token.DEFINE { - return nil - } - - identLine := safetoken.Line(pgf.Tok, id.Pos()) - for _, comment := range nodes[len(nodes)-1].(*ast.File).Comments { - if comment.Pos() > id.Pos() { - // Comment is after the identifier. - continue - } - - lastCommentLine := safetoken.Line(pgf.Tok, comment.End()) - if lastCommentLine+1 == identLine { - return comment - } - } - default: - return nil - } - } - return nil -} - -// updatePkgName returns the updates to rename a pkgName in the import spec by -// only modifying the package name portion of the import declaration. -func (r *renamer) updatePkgName(pgf *ParsedGoFile, pkgName *types.PkgName) (diff.Edit, error) { - // Modify ImportSpec syntax to add or remove the Name as needed. - path, _ := astutil.PathEnclosingInterval(pgf.File, pkgName.Pos(), pkgName.Pos()) - if len(path) < 2 { - return diff.Edit{}, fmt.Errorf("no path enclosing interval for %s", pkgName.Name()) - } - spec, ok := path[1].(*ast.ImportSpec) - if !ok { - return diff.Edit{}, fmt.Errorf("failed to update PkgName for %s", pkgName.Name()) - } - - newText := "" - if pkgName.Imported().Name() != r.to { - newText = r.to + " " - } - - // Replace the portion (possibly empty) of the spec before the path: - // local "path" or "path" - // -> <- -><- - return posEdit(pgf.Tok, spec.Pos(), spec.Path.Pos(), newText) -} - -// parsePackageNameDecl is a convenience function that parses and -// returns the package name declaration of file fh, and reports -// whether the position ppos lies within it. -// -// Note: also used by references. -func parsePackageNameDecl(ctx context.Context, snapshot Snapshot, fh FileHandle, ppos protocol.Position) (*ParsedGoFile, bool, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return nil, false, err - } - // Careful: because we used ParseHeader, - // pgf.Pos(ppos) may be beyond EOF => (0, err). - pos, _ := pgf.PositionPos(ppos) - return pgf, pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End(), nil -} - -// enclosingFile returns the CompiledGoFile of pkg that contains the specified position. -func enclosingFile(pkg Package, pos token.Pos) (*ParsedGoFile, bool) { - for _, pgf := range pkg.CompiledGoFiles() { - if pgf.File.Pos() <= pos && pos <= pgf.File.End() { - return pgf, true - } - } - return nil, false -} - -// posEdit returns an edit to replace the (start, end) range of tf with 'new'. -func posEdit(tf *token.File, start, end token.Pos, new string) (diff.Edit, error) { - startOffset, endOffset, err := safetoken.Offsets(tf, start, end) - if err != nil { - return diff.Edit{}, err - } - return diff.Edit{Start: startOffset, End: endOffset, New: new}, nil -} diff --git a/gopls/internal/lsp/source/signature_help.go b/gopls/internal/lsp/source/signature_help.go deleted file mode 100644 index dc45322b864..00000000000 --- a/gopls/internal/lsp/source/signature_help.go +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/event" -) - -func SignatureHelp(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.SignatureInformation, int, error) { - ctx, done := event.Start(ctx, "source.SignatureHelp") - defer done() - - // We need full type-checking here, as we must type-check function bodies in - // order to provide signature help at the requested position. - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, 0, fmt.Errorf("getting file for SignatureHelp: %w", err) - } - pos, err := pgf.PositionPos(position) - if err != nil { - return nil, 0, err - } - // Find a call expression surrounding the query position. - var callExpr *ast.CallExpr - path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) - if path == nil { - return nil, 0, fmt.Errorf("cannot find node enclosing position") - } -FindCall: - for _, node := range path { - switch node := node.(type) { - case *ast.CallExpr: - if pos >= node.Lparen && pos <= node.Rparen { - callExpr = node - break FindCall - } - case *ast.FuncLit, *ast.FuncType: - // The user is within an anonymous function, - // which may be the parameter to the *ast.CallExpr. - // Don't show signature help in this case. - return nil, 0, fmt.Errorf("no signature help within a function declaration") - case *ast.BasicLit: - if node.Kind == token.STRING { - return nil, 0, fmt.Errorf("no signature help within a string literal") - } - } - - } - if callExpr == nil || callExpr.Fun == nil { - return nil, 0, fmt.Errorf("cannot find an enclosing function") - } - - qf := Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()) - - // Get the object representing the function, if available. - // There is no object in certain cases such as calling a function returned by - // a function (e.g. "foo()()"). - var obj types.Object - switch t := callExpr.Fun.(type) { - case *ast.Ident: - obj = pkg.GetTypesInfo().ObjectOf(t) - case *ast.SelectorExpr: - obj = pkg.GetTypesInfo().ObjectOf(t.Sel) - } - - // Built-in? - if obj != nil && !obj.Pos().IsValid() { - // built-in function? - if obj, ok := obj.(*types.Builtin); ok { - return builtinSignature(ctx, snapshot, callExpr, obj.Name(), pos) - } - - // error.Error? - if fn, ok := obj.(*types.Func); ok && fn.Name() == "Error" { - return &protocol.SignatureInformation{ - Label: "Error()", - Documentation: stringToSigInfoDocumentation("Error returns the error message.", snapshot.Options()), - }, 0, nil - } - - return nil, 0, bug.Errorf("call to unexpected built-in %v (%T)", obj, obj) - } - - // Get the type information for the function being called. - sigType := pkg.GetTypesInfo().TypeOf(callExpr.Fun) - if sigType == nil { - return nil, 0, fmt.Errorf("cannot get type for Fun %[1]T (%[1]v)", callExpr.Fun) - } - - sig, _ := sigType.Underlying().(*types.Signature) - if sig == nil { - return nil, 0, fmt.Errorf("cannot find signature for Fun %[1]T (%[1]v)", callExpr.Fun) - } - - activeParam := activeParameter(callExpr, sig.Params().Len(), sig.Variadic(), pos) - - var ( - name string - comment *ast.CommentGroup - ) - if obj != nil { - d, err := HoverDocForObject(ctx, snapshot, pkg.FileSet(), obj) - if err != nil { - return nil, 0, err - } - name = obj.Name() - comment = d - } else { - name = "func" - } - mq := MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()) - s, err := NewSignature(ctx, snapshot, pkg, sig, comment, qf, mq) - if err != nil { - return nil, 0, err - } - paramInfo := make([]protocol.ParameterInformation, 0, len(s.params)) - for _, p := range s.params { - paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p}) - } - return &protocol.SignatureInformation{ - Label: name + s.Format(), - Documentation: stringToSigInfoDocumentation(s.doc, snapshot.Options()), - Parameters: paramInfo, - }, activeParam, nil -} - -func builtinSignature(ctx context.Context, snapshot Snapshot, callExpr *ast.CallExpr, name string, pos token.Pos) (*protocol.SignatureInformation, int, error) { - sig, err := NewBuiltinSignature(ctx, snapshot, name) - if err != nil { - return nil, 0, err - } - paramInfo := make([]protocol.ParameterInformation, 0, len(sig.params)) - for _, p := range sig.params { - paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p}) - } - activeParam := activeParameter(callExpr, len(sig.params), sig.variadic, pos) - return &protocol.SignatureInformation{ - Label: sig.name + sig.Format(), - Documentation: stringToSigInfoDocumentation(sig.doc, snapshot.Options()), - Parameters: paramInfo, - }, activeParam, nil -} - -func activeParameter(callExpr *ast.CallExpr, numParams int, variadic bool, pos token.Pos) (activeParam int) { - if len(callExpr.Args) == 0 { - return 0 - } - // First, check if the position is even in the range of the arguments. - start, end := callExpr.Lparen, callExpr.Rparen - if !(start <= pos && pos <= end) { - return 0 - } - for _, expr := range callExpr.Args { - if start == token.NoPos { - start = expr.Pos() - } - end = expr.End() - if start <= pos && pos <= end { - break - } - // Don't advance the active parameter for the last parameter of a variadic function. - if !variadic || activeParam < numParams-1 { - activeParam++ - } - start = expr.Pos() + 1 // to account for commas - } - return activeParam -} - -func stringToSigInfoDocumentation(s string, options *Options) *protocol.Or_SignatureInformation_documentation { - v := s - k := protocol.PlainText - if options.PreferredContentFormat == protocol.Markdown { - v = CommentToMarkdown(s, options) - // whether or not content is newline terminated may not matter for LSP clients, - // but our tests expect trailing newlines to be stripped. - v = strings.TrimSuffix(v, "\n") // TODO(pjw): change the golden files - k = protocol.Markdown - } - return &protocol.Or_SignatureInformation_documentation{ - Value: protocol.MarkupContent{ - Kind: k, - Value: v, - }, - } -} diff --git a/gopls/internal/lsp/source/stub.go b/gopls/internal/lsp/source/stub.go deleted file mode 100644 index fd2b357032c..00000000000 --- a/gopls/internal/lsp/source/stub.go +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "bytes" - "context" - "fmt" - "go/format" - "go/parser" - "go/token" - "go/types" - "io" - "path" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/analysis/stubmethods" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/tokeninternal" - "golang.org/x/tools/internal/typeparams" -) - -// stubSuggestedFixFunc returns a suggested fix to declare the missing -// methods of the concrete type that is assigned to an interface type -// at the cursor position. -func stubSuggestedFixFunc(ctx context.Context, snapshot Snapshot, fh FileHandle, rng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, nil, fmt.Errorf("GetTypedFile: %w", err) - } - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, nil, err - } - nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - si := stubmethods.GetStubInfo(pkg.FileSet(), pkg.GetTypesInfo(), nodes, start) - if si == nil { - return nil, nil, fmt.Errorf("nil interface request") - } - return stub(ctx, snapshot, si) -} - -// stub returns a suggested fix to declare the missing methods of si.Concrete. -func stub(ctx context.Context, snapshot Snapshot, si *stubmethods.StubInfo) (*token.FileSet, *analysis.SuggestedFix, error) { - // A function-local type cannot be stubbed - // since there's nowhere to put the methods. - conc := si.Concrete.Obj() - if conc.Parent() != conc.Pkg().Scope() { - return nil, nil, fmt.Errorf("local type %q cannot be stubbed", conc.Name()) - } - - // Parse the file declaring the concrete type. - declPGF, _, err := parseFull(ctx, snapshot, si.Fset, conc.Pos()) - if err != nil { - return nil, nil, fmt.Errorf("failed to parse file %q declaring implementation type: %w", declPGF.URI, err) - } - if declPGF.Fixed() { - return nil, nil, fmt.Errorf("file contains parse errors: %s", declPGF.URI) - } - - // Build import environment for the declaring file. - importEnv := make(map[ImportPath]string) // value is local name - for _, imp := range declPGF.File.Imports { - importPath := UnquoteImportPath(imp) - var name string - if imp.Name != nil { - name = imp.Name.Name - if name == "_" { - continue - } else if name == "." { - name = "" // see types.Qualifier - } - } else { - // TODO(adonovan): may omit a vendor/ prefix; consult the Metadata. - name = path.Base(string(importPath)) - } - importEnv[importPath] = name // latest alias wins - } - - // Find subset of interface methods that the concrete type lacks. - var missing []*types.Func - ifaceType := si.Interface.Type().Underlying().(*types.Interface) - for i := 0; i < ifaceType.NumMethods(); i++ { - imethod := ifaceType.Method(i) - cmethod, _, _ := types.LookupFieldOrMethod(si.Concrete, si.Pointer, imethod.Pkg(), imethod.Name()) - if cmethod == nil { - missing = append(missing, imethod) - continue - } - - if _, ok := cmethod.(*types.Var); ok { - // len(LookupFieldOrMethod.index) = 1 => conflict, >1 => shadow. - return nil, nil, fmt.Errorf("adding method %s.%s would conflict with (or shadow) existing field", - conc.Name(), imethod.Name()) - } - - if !types.Identical(cmethod.Type(), imethod.Type()) { - return nil, nil, fmt.Errorf("method %s.%s already exists but has the wrong type: got %s, want %s", - conc.Name(), imethod.Name(), cmethod.Type(), imethod.Type()) - } - } - if len(missing) == 0 { - return nil, nil, fmt.Errorf("no missing methods found") - } - - // Create a package name qualifier that uses the - // locally appropriate imported package name. - // It records any needed new imports. - // TODO(adonovan): factor with source.FormatVarType, stubmethods.RelativeToFiles? - // - // Prior to CL 469155 this logic preserved any renaming - // imports from the file that declares the interface - // method--ostensibly the preferred name for imports of - // frequently renamed packages such as protobufs. - // Now we use the package's declared name. If this turns out - // to be a mistake, then use parseHeader(si.iface.Pos()). - // - type newImport struct{ name, importPath string } - var newImports []newImport // for AddNamedImport - qual := func(pkg *types.Package) string { - // TODO(adonovan): don't ignore vendor prefix. - // - // Ignore the current package import. - if pkg.Path() == conc.Pkg().Path() { - return "" - } - - importPath := ImportPath(pkg.Path()) - name, ok := importEnv[importPath] - if !ok { - // Insert new import using package's declared name. - // - // TODO(adonovan): resolve conflict between declared - // name and existing file-level (declPGF.File.Imports) - // or package-level (si.Concrete.Pkg.Scope) decls by - // generating a fresh name. - name = pkg.Name() - importEnv[importPath] = name - new := newImport{importPath: string(importPath)} - // For clarity, use a renaming import whenever the - // local name does not match the path's last segment. - if name != path.Base(new.importPath) { - new.name = name - } - newImports = append(newImports, new) - } - return name - } - - // Format interface name (used only in a comment). - iface := si.Interface.Name() - if ipkg := si.Interface.Pkg(); ipkg != nil && ipkg != conc.Pkg() { - iface = ipkg.Name() + "." + iface - } - - // Pointer receiver? - var star string - if si.Pointer { - star = "*" - } - - // Format the new methods. - var newMethods bytes.Buffer - for _, method := range missing { - fmt.Fprintf(&newMethods, `// %s implements %s. -func (%s%s%s) %s%s { - panic("unimplemented") -} -`, - method.Name(), - iface, - star, - si.Concrete.Obj().Name(), - FormatTypeParams(typeparams.ForNamed(si.Concrete)), - method.Name(), - strings.TrimPrefix(types.TypeString(method.Type(), qual), "func")) - } - - // Compute insertion point for new methods: - // after the top-level declaration enclosing the (package-level) type. - insertOffset, err := safetoken.Offset(declPGF.Tok, declPGF.File.End()) - if err != nil { - return nil, nil, bug.Errorf("internal error: end position outside file bounds: %v", err) - } - concOffset, err := safetoken.Offset(si.Fset.File(conc.Pos()), conc.Pos()) - if err != nil { - return nil, nil, bug.Errorf("internal error: finding type decl offset: %v", err) - } - for _, decl := range declPGF.File.Decls { - declEndOffset, err := safetoken.Offset(declPGF.Tok, decl.End()) - if err != nil { - return nil, nil, bug.Errorf("internal error: finding decl offset: %v", err) - } - if declEndOffset > concOffset { - insertOffset = declEndOffset - break - } - } - - // Splice the new methods into the file content. - var buf bytes.Buffer - input := declPGF.Mapper.Content // unfixed content of file - buf.Write(input[:insertOffset]) - buf.WriteByte('\n') - io.Copy(&buf, &newMethods) - buf.Write(input[insertOffset:]) - - // Re-parse the file. - fset := token.NewFileSet() - newF, err := parser.ParseFile(fset, declPGF.File.Name.Name, buf.Bytes(), parser.ParseComments) - if err != nil { - return nil, nil, fmt.Errorf("could not reparse file: %w", err) - } - - // Splice the new imports into the syntax tree. - for _, imp := range newImports { - astutil.AddNamedImport(fset, newF, imp.name, imp.importPath) - } - - // Pretty-print. - var output strings.Builder - if err := format.Node(&output, fset, newF); err != nil { - return nil, nil, fmt.Errorf("format.Node: %w", err) - } - - // Report the diff. - diffs := snapshot.Options().ComputeEdits(string(input), output.String()) - return tokeninternal.FileSetFor(declPGF.Tok), // edits use declPGF.Tok - &analysis.SuggestedFix{TextEdits: diffToTextEdits(declPGF.Tok, diffs)}, - nil -} - -func diffToTextEdits(tok *token.File, diffs []diff.Edit) []analysis.TextEdit { - edits := make([]analysis.TextEdit, 0, len(diffs)) - for _, edit := range diffs { - edits = append(edits, analysis.TextEdit{ - Pos: tok.Pos(edit.Start), - End: tok.Pos(edit.End), - NewText: []byte(edit.New), - }) - } - return edits -} diff --git a/gopls/internal/lsp/source/symbols.go b/gopls/internal/lsp/source/symbols.go deleted file mode 100644 index a5c015e0aa0..00000000000 --- a/gopls/internal/lsp/source/symbols.go +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/event" -) - -func DocumentSymbols(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.DocumentSymbol, error) { - ctx, done := event.Start(ctx, "source.DocumentSymbols") - defer done() - - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, fmt.Errorf("getting file for DocumentSymbols: %w", err) - } - - // Build symbols for file declarations. When encountering a declaration with - // errors (typically because positions are invalid), we skip the declaration - // entirely. VS Code fails to show any symbols if one of the top-level - // symbols is missing position information. - var symbols []protocol.DocumentSymbol - for _, decl := range pgf.File.Decls { - switch decl := decl.(type) { - case *ast.FuncDecl: - if decl.Name.Name == "_" { - continue - } - fs, err := funcSymbol(pgf.Mapper, pgf.Tok, decl) - if err == nil { - // If function is a method, prepend the type of the method. - if decl.Recv != nil && len(decl.Recv.List) > 0 { - fs.Name = fmt.Sprintf("(%s).%s", types.ExprString(decl.Recv.List[0].Type), fs.Name) - } - symbols = append(symbols, fs) - } - case *ast.GenDecl: - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.TypeSpec: - if spec.Name.Name == "_" { - continue - } - ts, err := typeSymbol(pgf.Mapper, pgf.Tok, spec) - if err == nil { - symbols = append(symbols, ts) - } - case *ast.ValueSpec: - for _, name := range spec.Names { - if name.Name == "_" { - continue - } - vs, err := varSymbol(pgf.Mapper, pgf.Tok, spec, name, decl.Tok == token.CONST) - if err == nil { - symbols = append(symbols, vs) - } - } - } - } - } - } - return symbols, nil -} - -func funcSymbol(m *protocol.Mapper, tf *token.File, decl *ast.FuncDecl) (protocol.DocumentSymbol, error) { - s := protocol.DocumentSymbol{ - Name: decl.Name.Name, - Kind: protocol.Function, - } - if decl.Recv != nil { - s.Kind = protocol.Method - } - var err error - s.Range, err = m.NodeRange(tf, decl) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.SelectionRange, err = m.NodeRange(tf, decl.Name) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.Detail = types.ExprString(decl.Type) - return s, nil -} - -func typeSymbol(m *protocol.Mapper, tf *token.File, spec *ast.TypeSpec) (protocol.DocumentSymbol, error) { - s := protocol.DocumentSymbol{ - Name: spec.Name.Name, - } - var err error - s.Range, err = m.NodeRange(tf, spec) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.SelectionRange, err = m.NodeRange(tf, spec.Name) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.Kind, s.Detail, s.Children = typeDetails(m, tf, spec.Type) - return s, nil -} - -func typeDetails(m *protocol.Mapper, tf *token.File, typExpr ast.Expr) (kind protocol.SymbolKind, detail string, children []protocol.DocumentSymbol) { - switch typExpr := typExpr.(type) { - case *ast.StructType: - kind = protocol.Struct - children = fieldListSymbols(m, tf, typExpr.Fields, protocol.Field) - if len(children) > 0 { - detail = "struct{...}" - } else { - detail = "struct{}" - } - - // Find interface methods and embedded types. - case *ast.InterfaceType: - kind = protocol.Interface - children = fieldListSymbols(m, tf, typExpr.Methods, protocol.Method) - if len(children) > 0 { - detail = "interface{...}" - } else { - detail = "interface{}" - } - - case *ast.FuncType: - kind = protocol.Function - detail = types.ExprString(typExpr) - - default: - kind = protocol.Class // catch-all, for cases where we don't know the kind syntactically - detail = types.ExprString(typExpr) - } - return -} - -func fieldListSymbols(m *protocol.Mapper, tf *token.File, fields *ast.FieldList, fieldKind protocol.SymbolKind) []protocol.DocumentSymbol { - if fields == nil { - return nil - } - - var symbols []protocol.DocumentSymbol - for _, field := range fields.List { - detail, children := "", []protocol.DocumentSymbol(nil) - if field.Type != nil { - _, detail, children = typeDetails(m, tf, field.Type) - } - if len(field.Names) == 0 { // embedded interface or struct field - // By default, use the formatted type details as the name of this field. - // This handles potentially invalid syntax, as well as type embeddings in - // interfaces. - child := protocol.DocumentSymbol{ - Name: detail, - Kind: protocol.Field, // consider all embeddings to be fields - Children: children, - } - - // If the field is a valid embedding, promote the type name to field - // name. - selection := field.Type - if id := embeddedIdent(field.Type); id != nil { - child.Name = id.Name - child.Detail = detail - selection = id - } - - if rng, err := m.NodeRange(tf, field.Type); err == nil { - child.Range = rng - } - if rng, err := m.NodeRange(tf, selection); err == nil { - child.SelectionRange = rng - } - - symbols = append(symbols, child) - } else { - for _, name := range field.Names { - child := protocol.DocumentSymbol{ - Name: name.Name, - Kind: fieldKind, - Detail: detail, - Children: children, - } - - if rng, err := m.NodeRange(tf, field); err == nil { - child.Range = rng - } - if rng, err := m.NodeRange(tf, name); err == nil { - child.SelectionRange = rng - } - - symbols = append(symbols, child) - } - } - - } - return symbols -} - -func varSymbol(m *protocol.Mapper, tf *token.File, spec *ast.ValueSpec, name *ast.Ident, isConst bool) (protocol.DocumentSymbol, error) { - s := protocol.DocumentSymbol{ - Name: name.Name, - Kind: protocol.Variable, - } - if isConst { - s.Kind = protocol.Constant - } - var err error - s.Range, err = m.NodeRange(tf, spec) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.SelectionRange, err = m.NodeRange(tf, name) - if err != nil { - return protocol.DocumentSymbol{}, err - } - if spec.Type != nil { // type may be missing from the syntax - _, s.Detail, s.Children = typeDetails(m, tf, spec.Type) - } - return s, nil -} diff --git a/gopls/internal/lsp/source/typerefs/doc.go b/gopls/internal/lsp/source/typerefs/doc.go deleted file mode 100644 index 700da5dde26..00000000000 --- a/gopls/internal/lsp/source/typerefs/doc.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package typerefs extracts symbol-level reachability information -// from the syntax of a Go package. -// -// # Background -// -// The goal of this analysis is to determine, for each package P, a nearly -// minimal set of packages that could affect the type checking of P. This set -// may contain false positives, but the smaller this set the better we can -// invalidate and prune packages in gopls. -// -// More precisely, for each package P we define the set of "reachable" packages -// from P as the set of packages that may affect the (deep) export data of the -// direct dependencies of P. By this definition, the complement of this set -// cannot affect any information derived from type checking P, such as -// diagnostics, cross references, or method sets. Therefore we need not -// invalidate any results for P when a package in the complement of this set -// changes. -// -// # Computing references -// -// For a given declaration D, references are computed based on identifiers or -// dotted identifiers referenced in the declaration of D, that may affect -// the type of D. However, these references reflect only local knowledge of the -// package and its dependency metadata, and do not depend on any analysis of -// the dependencies themselves. This allows the reference information for -// a package to be cached independent of all others. -// -// Specifically, if a referring identifier I appears in the declaration, we -// record an edge from D to each object possibly referenced by I. We search for -// references within type syntax, but do not actually type-check, so we can't -// reliably determine whether an expression is a type or a term, or whether a -// function is a builtin or generic. For example, the type of x in var x = -// p.F(W) only depends on W if p.F is a builtin or generic function, which we -// cannot know without type-checking package p. So we may over-approximate in -// this way. -// -// - If I is declared in the current package, record a reference to its -// declaration. -// - Otherwise, if there are any dot imports in the current -// file and I is exported, record a (possibly dangling) edge to -// the corresponding declaration in each dot-imported package. -// -// If a dotted identifier q.I appears in the declaration, we -// perform a similar operation: -// -// - If q is declared in the current package, we record a reference to that -// object. It may be a var or const that has a field or method I. -// - Otherwise, if q is a valid import name based on imports in the current file -// and the provided metadata for dependency package names, record a -// reference to the object I in that package. -// - Additionally, handle the case where Q is exported, and Q.I may refer to -// a field or method in a dot-imported package. -// -// That is essentially the entire algorithm, though there is some subtlety to -// visiting the set of identifiers or dotted identifiers that may affect the -// declaration type. See the visitDeclOrSpec function for the details of this -// analysis. Notably, we also skip identifiers that refer to type parameters in -// generic declarations. -// -// # Graph optimizations -// -// The references extracted from the syntax are used to construct -// edges between nodes representing declarations. Edges are of two -// kinds: internal references, from one package-level declaration to -// another; and external references, from a symbol in this package to -// a symbol imported from a direct dependency. -// -// Once the symbol reference graph is constructed, we find its -// strongly connected components (SCCs) using Tarjan's algorithm. -// As we coalesce the nodes of each SCC we compute the union of -// external references reached by each package-level declaration. -// The final result is the mapping from each exported package-level -// declaration to the set of external (imported) declarations that it -// reaches. -// -// Because it is common for many package members to have the same -// reachability, the result takes the form of a set of equivalence -// classes, each mapping a set of package-level declarations to a set -// of external symbols. We use a hash table to canonicalize sets so that -// repeated occurrences of the same set (which are common) are only -// represented once in memory or in the file system. -// For example, all declarations that ultimately reference only -// {fmt.Println,strings.Join} would be classed as equivalent. -// -// This approach was inspired by the Hash-Value Numbering (HVN) -// optimization described by Hardekopf and Lin. See -// golang.org/x/tools/go/pointer/hvn.go for an implementation. (Like -// pointer analysis, this problem is fundamentally one of graph -// reachability.) The HVN algorithm takes the compression a step -// further by preserving the topology of the SCC DAG, in which edges -// represent "is a superset of" constraints. Redundant edges that -// don't increase the solution can be deleted. We could apply the same -// technique here to further reduce the worst-case size of the result, -// but the current implementation seems adequate. -// -// # API -// -// The main entry point for this analysis is the [Encode] function, -// which implements the analysis described above for one package, and -// encodes the result as a binary message. -// -// The [Decode] function decodes the message into a usable form: a set -// of equivalence classes. The decoder uses a shared [PackageIndex] to -// enable more compact representations of sets of packages -// ([PackageSet]) during the global reacahability computation. -// -// The [BuildPackageGraph] constructor implements a whole-graph analysis similar -// to that which will be implemented by gopls, but for various reasons the -// logic for this analysis will eventually live in the -// [golang.org/x/tools/gopls/internal/lsp/cache] package. Nevertheless, -// BuildPackageGraph and its test serve to verify the syntactic analysis, and -// may serve as a proving ground for new optimizations of the whole-graph analysis. -// -// # Export data is insufficient -// -// At first it may seem that the simplest way to implement this analysis would -// be to consider the types.Packages of the dependencies of P, for example -// during export. After all, it makes sense that the type checked packages -// themselves could describe their dependencies. However, this does not work as -// type information does not describe certain syntactic relationships. -// -// For example, the following scenarios cause type information to miss -// syntactic relationships: -// -// Named type forwarding: -// -// package a; type A b.B -// package b; type B int -// -// Aliases: -// -// package a; func A(f b.B) -// package b; type B = func() -// -// Initializers: -// -// package a; var A = b.B() -// package b; func B() string { return "hi" } -// -// Use of the unsafe package: -// -// package a; type A [unsafe.Sizeof(B{})]int -// package b; type B struct { f1, f2, f3 int } -// -// In all of these examples, types do not contain information about the edge -// between the a.A and b.B declarations. -package typerefs diff --git a/gopls/internal/lsp/source/util.go b/gopls/internal/lsp/source/util.go deleted file mode 100644 index 2cbce615962..00000000000 --- a/gopls/internal/lsp/source/util.go +++ /dev/null @@ -1,541 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "go/ast" - "go/printer" - "go/token" - "go/types" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/tokeninternal" - "golang.org/x/tools/internal/typeparams" -) - -// IsGenerated gets and reads the file denoted by uri and reports -// whether it contains a "generated file" comment as described at -// https://golang.org/s/generatedcode. -// -// TODO(adonovan): opt: this function does too much. -// Move snapshot.ReadFile into the caller (most of which have already done it). -func IsGenerated(ctx context.Context, snapshot Snapshot, uri span.URI) bool { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return false - } - pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return false - } - for _, commentGroup := range pgf.File.Comments { - for _, comment := range commentGroup.List { - if matched := generatedRx.MatchString(comment.Text); matched { - // Check if comment is at the beginning of the line in source. - if safetoken.Position(pgf.Tok, comment.Slash).Column == 1 { - return true - } - } - } - } - return false -} - -// adjustedObjEnd returns the end position of obj, possibly modified for -// package names. -// -// TODO(rfindley): eliminate this function, by inlining it at callsites where -// it makes sense. -func adjustedObjEnd(obj types.Object) token.Pos { - nameLen := len(obj.Name()) - if pkgName, ok := obj.(*types.PkgName); ok { - // An imported Go package has a package-local, unqualified name. - // When the name matches the imported package name, there is no - // identifier in the import spec with the local package name. - // - // For example: - // import "go/ast" // name "ast" matches package name - // import a "go/ast" // name "a" does not match package name - // - // When the identifier does not appear in the source, have the range - // of the object be the import path, including quotes. - if pkgName.Imported().Name() == pkgName.Name() { - nameLen = len(pkgName.Imported().Path()) + len(`""`) - } - } - return obj.Pos() + token.Pos(nameLen) -} - -// Matches cgo generated comment as well as the proposed standard: -// -// https://golang.org/s/generatedcode -var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) - -// FileKindForLang returns the file kind associated with the given language ID, -// or UnknownKind if the language ID is not recognized. -func FileKindForLang(langID string) FileKind { - switch langID { - case "go": - return Go - case "go.mod": - return Mod - case "go.sum": - return Sum - case "tmpl", "gotmpl": - return Tmpl - case "go.work": - return Work - default: - return UnknownKind - } -} - -// nodeAtPos returns the index and the node whose position is contained inside -// the node list. -func nodeAtPos(nodes []ast.Node, pos token.Pos) (ast.Node, int) { - if nodes == nil { - return nil, -1 - } - for i, node := range nodes { - if node.Pos() <= pos && pos <= node.End() { - return node, i - } - } - return nil, -1 -} - -// FormatNode returns the "pretty-print" output for an ast node. -func FormatNode(fset *token.FileSet, n ast.Node) string { - var buf strings.Builder - if err := printer.Fprint(&buf, fset, n); err != nil { - // TODO(rfindley): we should use bug.Reportf here. - // We encounter this during completion.resolveInvalid. - return "" - } - return buf.String() -} - -// FormatNodeFile is like FormatNode, but requires only the token.File for the -// syntax containing the given ast node. -func FormatNodeFile(file *token.File, n ast.Node) string { - fset := tokeninternal.FileSetFor(file) - return FormatNode(fset, n) -} - -// Deref returns a pointer's element type, traversing as many levels as needed. -// Otherwise it returns typ. -// -// It can return a pointer type for cyclic types (see golang/go#45510). -func Deref(typ types.Type) types.Type { - var seen map[types.Type]struct{} - for { - p, ok := typ.Underlying().(*types.Pointer) - if !ok { - return typ - } - if _, ok := seen[p.Elem()]; ok { - return typ - } - - typ = p.Elem() - - if seen == nil { - seen = make(map[types.Type]struct{}) - } - seen[typ] = struct{}{} - } -} - -func SortDiagnostics(d []*Diagnostic) { - sort.Slice(d, func(i int, j int) bool { - return CompareDiagnostic(d[i], d[j]) < 0 - }) -} - -func CompareDiagnostic(a, b *Diagnostic) int { - if r := protocol.CompareRange(a.Range, b.Range); r != 0 { - return r - } - if a.Source < b.Source { - return -1 - } - if a.Source > b.Source { - return +1 - } - if a.Message < b.Message { - return -1 - } - if a.Message > b.Message { - return +1 - } - return 0 -} - -// findFileInDeps finds package metadata containing URI in the transitive -// dependencies of m. When using the Go command, the answer is unique. -// -// TODO(rfindley): refactor to share logic with findPackageInDeps? -func findFileInDeps(s MetadataSource, m *Metadata, uri span.URI) *Metadata { - seen := make(map[PackageID]bool) - var search func(*Metadata) *Metadata - search = func(m *Metadata) *Metadata { - if seen[m.ID] { - return nil - } - seen[m.ID] = true - for _, cgf := range m.CompiledGoFiles { - if cgf == uri { - return m - } - } - for _, dep := range m.DepsByPkgPath { - m := s.Metadata(dep) - if m == nil { - bug.Reportf("nil metadata for %q", dep) - continue - } - if found := search(m); found != nil { - return found - } - } - return nil - } - return search(m) -} - -// UnquoteImportPath returns the unquoted import path of s, -// or "" if the path is not properly quoted. -func UnquoteImportPath(s *ast.ImportSpec) ImportPath { - path, err := strconv.Unquote(s.Path.Value) - if err != nil { - return "" - } - return ImportPath(path) -} - -// NodeContains returns true if a node encloses a given position pos. -func NodeContains(n ast.Node, pos token.Pos) bool { - return n != nil && n.Pos() <= pos && pos <= n.End() -} - -// CollectScopes returns all scopes in an ast path, ordered as innermost scope -// first. -func CollectScopes(info *types.Info, path []ast.Node, pos token.Pos) []*types.Scope { - // scopes[i], where i import path mapping. - inverseDeps := make(map[PackageID]PackagePath) - for path, id := range m.DepsByPkgPath { - inverseDeps[id] = path - } - importsByPkgPath := make(map[PackagePath]ImportPath) // best import paths by pkgPath - for impPath, id := range m.DepsByImpPath { - if id == "" { - continue - } - pkgPath := inverseDeps[id] - _, hasPath := importsByPkgPath[pkgPath] - _, hasImp := localNames[impPath] - // In rare cases, there may be multiple import paths with the same package - // path. In such scenarios, prefer an import path that already exists in - // the file. - if !hasPath || hasImp { - importsByPkgPath[pkgPath] = impPath - } - } - - return func(pkgName PackageName, impPath ImportPath, pkgPath PackagePath) string { - // If supplied, translate the package path to an import path in the source - // package. - if pkgPath != "" { - if srcImp := importsByPkgPath[pkgPath]; srcImp != "" { - impPath = srcImp - } - if pkgPath == m.PkgPath { - return "" - } - } - if localName, ok := localNames[impPath]; ok && impPath != "" { - return string(localName) - } - if pkgName != "" { - return string(pkgName) - } - idx := strings.LastIndexByte(string(impPath), '/') - return string(impPath[idx+1:]) - } -} - -// importInfo collects information about the import specified by imp, -// extracting its file-local name, package name, import path, and package path. -// -// If metadata is missing for the import, the resulting package name and -// package path may be empty, and the file local name may be guessed based on -// the import path. -// -// Note: previous versions of this helper used a PackageID->PackagePath map -// extracted from m, for extracting package path even in the case where -// metadata for a dep was missing. This should not be necessary, as we should -// always have metadata for IDs contained in DepsByPkgPath. -func importInfo(s MetadataSource, imp *ast.ImportSpec, m *Metadata) (string, PackageName, ImportPath, PackagePath) { - var ( - name string // local name - pkgName PackageName - impPath = UnquoteImportPath(imp) - pkgPath PackagePath - ) - - // If the import has a local name, use it. - if imp.Name != nil { - name = imp.Name.Name - } - - // Try to find metadata for the import. If successful and there is no local - // name, the package name is the local name. - if depID := m.DepsByImpPath[impPath]; depID != "" { - if depm := s.Metadata(depID); depm != nil { - if name == "" { - name = string(depm.Name) - } - pkgName = depm.Name - pkgPath = depm.PkgPath - } - } - - // If the local name is still unknown, guess it based on the import path. - if name == "" { - idx := strings.LastIndexByte(string(impPath), '/') - name = string(impPath[idx+1:]) - } - return name, pkgName, impPath, pkgPath -} - -// isDirective reports whether c is a comment directive. -// -// Copied and adapted from go/src/go/ast/ast.go. -func isDirective(c string) bool { - if len(c) < 3 { - return false - } - if c[1] != '/' { - return false - } - //-style comment (no newline at the end) - c = c[2:] - if len(c) == 0 { - // empty line - return false - } - // "//line " is a line directive. - // (The // has been removed.) - if strings.HasPrefix(c, "line ") { - return true - } - - // "//[a-z0-9]+:[a-z0-9]" - // (The // has been removed.) - colon := strings.Index(c, ":") - if colon <= 0 || colon+1 >= len(c) { - return false - } - for i := 0; i <= colon+1; i++ { - if i == colon { - continue - } - b := c[i] - if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { - return false - } - } - return true -} - -// InDir checks whether path is in the file tree rooted at dir. -// It checks only the lexical form of the file names. -// It does not consider symbolic links. -// -// Copied from go/src/cmd/go/internal/search/search.go. -func InDir(dir, path string) bool { - pv := strings.ToUpper(filepath.VolumeName(path)) - dv := strings.ToUpper(filepath.VolumeName(dir)) - path = path[len(pv):] - dir = dir[len(dv):] - switch { - default: - return false - case pv != dv: - return false - case len(path) == len(dir): - if path == dir { - return true - } - return false - case dir == "": - return path != "" - case len(path) > len(dir): - if dir[len(dir)-1] == filepath.Separator { - if path[:len(dir)] == dir { - return path[len(dir):] != "" - } - return false - } - if path[len(dir)] == filepath.Separator && path[:len(dir)] == dir { - if len(path) == len(dir)+1 { - return true - } - return path[len(dir)+1:] != "" - } - return false - } -} - -// IsValidImport returns whether importPkgPath is importable -// by pkgPath -func IsValidImport(pkgPath, importPkgPath PackagePath) bool { - i := strings.LastIndex(string(importPkgPath), "/internal/") - if i == -1 { - return true - } - // TODO(rfindley): this looks wrong: IsCommandLineArguments is meant to - // operate on package IDs, not package paths. - if IsCommandLineArguments(PackageID(pkgPath)) { - return true - } - // TODO(rfindley): this is wrong. mod.testx/p should not be able to - // import mod.test/internal: https://go.dev/play/p/-Ca6P-E4V4q - return strings.HasPrefix(string(pkgPath), string(importPkgPath[:i])) -} - -// IsCommandLineArguments reports whether a given value denotes -// "command-line-arguments" package, which is a package with an unknown ID -// created by the go command. It can have a test variant, which is why callers -// should not check that a value equals "command-line-arguments" directly. -func IsCommandLineArguments(id PackageID) bool { - return strings.Contains(string(id), "command-line-arguments") -} - -// embeddedIdent returns the type name identifier for an embedding x, if x in a -// valid embedding. Otherwise, it returns nil. -// -// Spec: An embedded field must be specified as a type name T or as a pointer -// to a non-interface type name *T -func embeddedIdent(x ast.Expr) *ast.Ident { - if star, ok := x.(*ast.StarExpr); ok { - x = star.X - } - switch ix := x.(type) { // check for instantiated receivers - case *ast.IndexExpr: - x = ix.X - case *typeparams.IndexListExpr: - x = ix.X - } - switch x := x.(type) { - case *ast.Ident: - return x - case *ast.SelectorExpr: - if _, ok := x.X.(*ast.Ident); ok { - return x.Sel - } - } - return nil -} - -// An importFunc is an implementation of the single-method -// types.Importer interface based on a function value. -type ImporterFunc func(path string) (*types.Package, error) - -func (f ImporterFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go deleted file mode 100644 index 46c2eeeb609..00000000000 --- a/gopls/internal/lsp/source/view.go +++ /dev/null @@ -1,1060 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "bytes" - "context" - "crypto/sha256" - "encoding/json" - "errors" - "fmt" - "go/ast" - "go/parser" - "go/scanner" - "go/token" - "go/types" - "io" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/gopls/internal/lsp/progress" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source/methodsets" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/gopls/internal/vulncheck" - "golang.org/x/tools/internal/event/label" - "golang.org/x/tools/internal/event/tag" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/imports" - "golang.org/x/tools/internal/packagesinternal" -) - -// A GlobalSnapshotID uniquely identifies a snapshot within this process and -// increases monotonically with snapshot creation time. -// -// We use a distinct integral type for global IDs to help enforce correct -// usage. -type GlobalSnapshotID uint64 - -// Snapshot represents the current state for the given view. -type Snapshot interface { - // SequenceID is the sequence id of this snapshot within its containing - // view. - // - // Relative to their view sequence ids are monotonically increasing, but this - // does not hold globally: when new views are created their initial snapshot - // has sequence ID 0. For operations that span multiple views, use global - // IDs. - SequenceID() uint64 - - // GlobalID is a globally unique identifier for this snapshot. Global IDs are - // monotonic: subsequent snapshots will have higher global ID, though - // subsequent snapshots in a view may not have adjacent global IDs. - GlobalID() GlobalSnapshotID - - // FileKind returns the type of a file. - // - // We can't reliably deduce the kind from the file name alone, - // as some editors can be told to interpret a buffer as - // language different from the file name heuristic, e.g. that - // an .html file actually contains Go "html/template" syntax, - // or even that a .go file contains Python. - FileKind(FileHandle) FileKind - - // Options returns the options associated with this snapshot. - Options() *Options - - // View returns the View associated with this snapshot. - View() View - - // BackgroundContext returns a context used for all background processing - // on behalf of this snapshot. - BackgroundContext() context.Context - - // A Snapshot is a caching implementation of FileSource whose - // ReadFile method returns consistent information about the existence - // and content of each file throughout its lifetime. - FileSource - - // FindFile returns the FileHandle for the given URI, if it is already - // in the given snapshot. - // TODO(adonovan): delete this operation; use ReadFile instead. - FindFile(uri span.URI) FileHandle - - // AwaitInitialized waits until the snapshot's view is initialized. - AwaitInitialized(ctx context.Context) - - // IsOpen returns whether the editor currently has a file open. - IsOpen(uri span.URI) bool - - // IgnoredFile reports if a file would be ignored by a `go list` of the whole - // workspace. - IgnoredFile(uri span.URI) bool - - // Templates returns the .tmpl files - Templates() map[span.URI]FileHandle - - // ParseGo returns the parsed AST for the file. - // If the file is not available, returns nil and an error. - // Position information is added to FileSet(). - ParseGo(ctx context.Context, fh FileHandle, mode parser.Mode) (*ParsedGoFile, error) - - // Analyze runs the specified analyzers on the given packages at this snapshot. - // - // If the provided tracker is non-nil, it may be used to report progress of - // the analysis pass. - Analyze(ctx context.Context, pkgIDs map[PackageID]unit, analyzers []*Analyzer, tracker *progress.Tracker) ([]*Diagnostic, error) - - // RunGoCommandPiped runs the given `go` command, writing its output - // to stdout and stderr. Verb, Args, and WorkingDir must be specified. - // - // RunGoCommandPiped runs the command serially using gocommand.RunPiped, - // enforcing that this command executes exclusively to other commands on the - // server. - RunGoCommandPiped(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation, stdout, stderr io.Writer) error - - // RunGoCommandDirect runs the given `go` command. Verb, Args, and - // WorkingDir must be specified. - RunGoCommandDirect(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation) (*bytes.Buffer, error) - - // RunGoCommands runs a series of `go` commands that updates the go.mod - // and go.sum file for wd, and returns their updated contents. - RunGoCommands(ctx context.Context, allowNetwork bool, wd string, run func(invoke func(...string) (*bytes.Buffer, error)) error) (bool, []byte, []byte, error) - - // RunProcessEnvFunc runs fn with the process env for this snapshot's view. - // Note: the process env contains cached module and filesystem state. - RunProcessEnvFunc(ctx context.Context, fn func(context.Context, *imports.Options) error) error - - // ModFiles are the go.mod files enclosed in the snapshot's view and known - // to the snapshot. - ModFiles() []span.URI - - // ParseMod is used to parse go.mod files. - ParseMod(ctx context.Context, fh FileHandle) (*ParsedModule, error) - - // ModWhy returns the results of `go mod why` for the module specified by - // the given go.mod file. - ModWhy(ctx context.Context, fh FileHandle) (map[string]string, error) - - // ModTidy returns the results of `go mod tidy` for the module specified by - // the given go.mod file. - ModTidy(ctx context.Context, pm *ParsedModule) (*TidiedModule, error) - - // ModVuln returns import vulnerability analysis for the given go.mod URI. - // Concurrent requests are combined into a single command. - ModVuln(ctx context.Context, modURI span.URI) (*vulncheck.Result, error) - - // GoModForFile returns the URI of the go.mod file for the given URI. - GoModForFile(uri span.URI) span.URI - - // WorkFile, if non-empty, is the go.work file for the workspace. - WorkFile() span.URI - - // ParseWork is used to parse go.work files. - ParseWork(ctx context.Context, fh FileHandle) (*ParsedWorkFile, error) - - // BuiltinFile returns information about the special builtin package. - BuiltinFile(ctx context.Context) (*ParsedGoFile, error) - - // IsBuiltin reports whether uri is part of the builtin package. - IsBuiltin(uri span.URI) bool - - // CriticalError returns any critical errors in the workspace. - // - // A nil result may mean success, or context cancellation. - CriticalError(ctx context.Context) *CriticalError - - // Symbols returns all symbols in the snapshot. - // - // If workspaceOnly is set, this only includes symbols from files in a - // workspace package. Otherwise, it returns symbols from all loaded packages. - Symbols(ctx context.Context, workspaceOnly bool) (map[span.URI][]Symbol, error) - - // -- package metadata -- - - // ReverseDependencies returns a new mapping whose entries are - // the ID and Metadata of each package in the workspace that - // directly or transitively depend on the package denoted by id, - // excluding id itself. - ReverseDependencies(ctx context.Context, id PackageID, transitive bool) (map[PackageID]*Metadata, error) - - // WorkspaceMetadata returns a new, unordered slice containing - // metadata for all ordinary and test packages (but not - // intermediate test variants) in the workspace. - // - // The workspace is the set of modules typically defined by a - // go.work file. It is not transitively closed: for example, - // the standard library is not usually part of the workspace - // even though every module in the workspace depends on it. - // - // Operations that must inspect all the dependencies of the - // workspace packages should instead use AllMetadata. - WorkspaceMetadata(ctx context.Context) ([]*Metadata, error) - - // AllMetadata returns a new unordered array of metadata for - // all packages known to this snapshot, which includes the - // packages of all workspace modules plus their transitive - // import dependencies. - // - // It may also contain ad-hoc packages for standalone files. - // It includes all test variants. - AllMetadata(ctx context.Context) ([]*Metadata, error) - - // Metadata returns the metadata for the specified package, - // or nil if it was not found. - Metadata(id PackageID) *Metadata - - // MetadataForFile returns a new slice containing metadata for each - // package containing the Go file identified by uri, ordered by the - // number of CompiledGoFiles (i.e. "narrowest" to "widest" package), - // and secondarily by IsIntermediateTestVariant (false < true). - // The result may include tests and intermediate test variants of - // importable packages. - // It returns an error if the context was cancelled. - MetadataForFile(ctx context.Context, uri span.URI) ([]*Metadata, error) - - // OrphanedFileDiagnostics reports diagnostics for files that have no package - // associations or which only have only command-line-arguments packages. - // - // The caller must not mutate the result. - OrphanedFileDiagnostics(ctx context.Context) (map[span.URI]*Diagnostic, error) - - // -- package type-checking -- - - // TypeCheck parses and type-checks the specified packages, - // and returns them in the same order as the ids. - // The resulting packages' types may belong to different importers, - // so types from different packages are incommensurable. - // - // In general, clients should never need to type-checked - // syntax for an intermediate test variant (ITV) package. - // Callers should apply RemoveIntermediateTestVariants (or - // equivalent) before this method, or any of the potentially - // type-checking methods below. - TypeCheck(ctx context.Context, ids ...PackageID) ([]Package, error) - - // PackageDiagnostics returns diagnostics for files contained in specified - // packages. - // - // If these diagnostics cannot be loaded from cache, the requested packages - // may be type-checked. - PackageDiagnostics(ctx context.Context, ids ...PackageID) (map[span.URI][]*Diagnostic, error) - - // References returns cross-references indexes for the specified packages. - // - // If these indexes cannot be loaded from cache, the requested packages may - // be type-checked. - References(ctx context.Context, ids ...PackageID) ([]XrefIndex, error) - - // MethodSets returns method-set indexes for the specified packages. - // - // If these indexes cannot be loaded from cache, the requested packages may - // be type-checked. - MethodSets(ctx context.Context, ids ...PackageID) ([]*methodsets.Index, error) -} - -// NarrowestMetadataForFile returns metadata for the narrowest package -// (the one with the fewest files) that encloses the specified file. -// The result may be a test variant, but never an intermediate test variant. -func NarrowestMetadataForFile(ctx context.Context, snapshot Snapshot, uri span.URI) (*Metadata, error) { - metas, err := snapshot.MetadataForFile(ctx, uri) - if err != nil { - return nil, err - } - RemoveIntermediateTestVariants(&metas) - if len(metas) == 0 { - return nil, fmt.Errorf("no package metadata for file %s", uri) - } - return metas[0], nil -} - -type XrefIndex interface { - Lookup(targets map[PackagePath]map[objectpath.Path]struct{}) (locs []protocol.Location) -} - -// SnapshotLabels returns a new slice of labels that should be used for events -// related to a snapshot. -func SnapshotLabels(snapshot Snapshot) []label.Label { - return []label.Label{tag.Snapshot.Of(snapshot.SequenceID()), tag.Directory.Of(snapshot.View().Folder())} -} - -// NarrowestPackageForFile is a convenience function that selects the narrowest -// non-ITV package to which this file belongs, type-checks it in the requested -// mode (full or workspace), and returns it, along with the parse tree of that -// file. -// -// The "narrowest" package is the one with the fewest number of files that -// includes the given file. This solves the problem of test variants, as the -// test will have more files than the non-test package. -// -// An intermediate test variant (ITV) package has identical source to a regular -// package but resolves imports differently. gopls should never need to -// type-check them. -// -// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse -// tree, or snapshot.MetadataForFile if you only need metadata. -func NarrowestPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI) (Package, *ParsedGoFile, error) { - return selectPackageForFile(ctx, snapshot, uri, func(metas []*Metadata) *Metadata { return metas[0] }) -} - -// WidestPackageForFile is a convenience function that selects the widest -// non-ITV package to which this file belongs, type-checks it in the requested -// mode (full or workspace), and returns it, along with the parse tree of that -// file. -// -// The "widest" package is the one with the most number of files that includes -// the given file. Which is the test variant if one exists. -// -// An intermediate test variant (ITV) package has identical source to a regular -// package but resolves imports differently. gopls should never need to -// type-check them. -// -// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse -// tree, or snapshot.MetadataForFile if you only need metadata. -func WidestPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI) (Package, *ParsedGoFile, error) { - return selectPackageForFile(ctx, snapshot, uri, func(metas []*Metadata) *Metadata { return metas[len(metas)-1] }) -} - -func selectPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI, selector func([]*Metadata) *Metadata) (Package, *ParsedGoFile, error) { - metas, err := snapshot.MetadataForFile(ctx, uri) - if err != nil { - return nil, nil, err - } - RemoveIntermediateTestVariants(&metas) - if len(metas) == 0 { - return nil, nil, fmt.Errorf("no package metadata for file %s", uri) - } - md := selector(metas) - pkgs, err := snapshot.TypeCheck(ctx, md.ID) - if err != nil { - return nil, nil, err - } - pkg := pkgs[0] - pgf, err := pkg.File(uri) - if err != nil { - return nil, nil, err // "can't happen" - } - return pkg, pgf, err -} - -// InvocationFlags represents the settings of a particular go command invocation. -// It is a mode, plus a set of flag bits. -type InvocationFlags int - -const ( - // Normal is appropriate for commands that might be run by a user and don't - // deliberately modify go.mod files, e.g. `go test`. - Normal InvocationFlags = iota - // WriteTemporaryModFile is for commands that need information from a - // modified version of the user's go.mod file, e.g. `go mod tidy` used to - // generate diagnostics. - WriteTemporaryModFile - // LoadWorkspace is for packages.Load, and other operations that should - // consider the whole workspace at once. - LoadWorkspace - - // AllowNetwork is a flag bit that indicates the invocation should be - // allowed to access the network. - AllowNetwork InvocationFlags = 1 << 10 -) - -func (m InvocationFlags) Mode() InvocationFlags { - return m & (AllowNetwork - 1) -} - -func (m InvocationFlags) AllowNetwork() bool { - return m&AllowNetwork != 0 -} - -// View represents a single build context for a workspace. -// -// A unique build is determined by the workspace folder along with a Go -// environment (GOOS, GOARCH, GOWORK, etc). -// -// Additionally, the View holds a pointer to the current state of that build -// (the Snapshot). -// -// TODO(rfindley): move all other state such as module upgrades into the -// Snapshot. -type View interface { - // ID returns a globally unique identifier for this view. - ID() string - - // Name returns the name this view was constructed with. - Name() string - - // Folder returns the folder with which this view was created. - Folder() span.URI - - // Snapshot returns the current snapshot for the view, and a - // release function that must be called when the Snapshot is - // no longer needed. - // - // If the view is shut down, the resulting error will be non-nil, and the - // release function need not be called. - Snapshot() (Snapshot, func(), error) - - // IsGoPrivatePath reports whether target is a private import path, as identified - // by the GOPRIVATE environment variable. - IsGoPrivatePath(path string) bool - - // ModuleUpgrades returns known module upgrades for the dependencies of - // modfile. - ModuleUpgrades(modfile span.URI) map[string]string - - // RegisterModuleUpgrades registers that upgrades exist for the given modules - // required by modfile. - RegisterModuleUpgrades(modfile span.URI, upgrades map[string]string) - - // ClearModuleUpgrades clears all upgrades for the modules in modfile. - ClearModuleUpgrades(modfile span.URI) - - // Vulnerabilities returns known vulnerabilities for the given modfile. - // TODO(suzmue): replace command.Vuln with a different type, maybe - // https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck/govulnchecklib#Summary? - Vulnerabilities(modfile ...span.URI) map[span.URI]*vulncheck.Result - - // SetVulnerabilities resets the list of vulnerabilities that exists for the given modules - // required by modfile. - SetVulnerabilities(modfile span.URI, vulncheckResult *vulncheck.Result) - - // GoVersion returns the configured Go version for this view. - GoVersion() int - - // GoVersionString returns the go version string configured for this view. - // Unlike [GoVersion], this encodes the minor version and commit hash information. - GoVersionString() string -} - -// A FileSource maps URIs to FileHandles. -type FileSource interface { - // ReadFile returns the FileHandle for a given URI, either by - // reading the content of the file or by obtaining it from a cache. - // - // Invariant: ReadFile must only return an error in the case of context - // cancellation. If ctx.Err() is nil, the resulting error must also be nil. - ReadFile(ctx context.Context, uri span.URI) (FileHandle, error) -} - -// A MetadataSource maps package IDs to metadata. -// -// TODO(rfindley): replace this with a concrete metadata graph, once it is -// exposed from the snapshot. -type MetadataSource interface { - // Metadata returns Metadata for the given package ID, or nil if it does not - // exist. - Metadata(PackageID) *Metadata -} - -// A ParsedGoFile contains the results of parsing a Go file. -type ParsedGoFile struct { - URI span.URI - Mode parser.Mode - File *ast.File - Tok *token.File - // Source code used to build the AST. It may be different from the - // actual content of the file if we have fixed the AST. - Src []byte - - // FixedSrc and Fixed AST report on "fixing" that occurred during parsing of - // this file. - // - // If FixedSrc == true, the source contained in the Src field was modified - // from the original source to improve parsing. - // - // If FixedAST == true, the ast was modified after parsing, and therefore - // positions encoded in the AST may not accurately represent the content of - // the Src field. - // - // TODO(rfindley): there are many places where we haphazardly use the Src or - // positions without checking these fields. Audit these places and guard - // accordingly. After doing so, we may find that we don't need to - // differentiate FixedSrc and FixedAST. - FixedSrc bool - FixedAST bool - Mapper *protocol.Mapper // may map fixed Src, not file content - ParseErr scanner.ErrorList -} - -// Fixed reports whether p was "Fixed", meaning that its source or positions -// may not correlate with the original file. -func (p ParsedGoFile) Fixed() bool { - return p.FixedSrc || p.FixedAST -} - -// -- go/token domain convenience helpers -- - -// PositionPos returns the token.Pos of protocol position p within the file. -func (pgf *ParsedGoFile) PositionPos(p protocol.Position) (token.Pos, error) { - offset, err := pgf.Mapper.PositionOffset(p) - if err != nil { - return token.NoPos, err - } - return safetoken.Pos(pgf.Tok, offset) -} - -// PosRange returns a protocol Range for the token.Pos interval in this file. -func (pgf *ParsedGoFile) PosRange(start, end token.Pos) (protocol.Range, error) { - return pgf.Mapper.PosRange(pgf.Tok, start, end) -} - -// PosMappedRange returns a MappedRange for the token.Pos interval in this file. -// A MappedRange can be converted to any other form. -func (pgf *ParsedGoFile) PosMappedRange(start, end token.Pos) (protocol.MappedRange, error) { - return pgf.Mapper.PosMappedRange(pgf.Tok, start, end) -} - -// PosLocation returns a protocol Location for the token.Pos interval in this file. -func (pgf *ParsedGoFile) PosLocation(start, end token.Pos) (protocol.Location, error) { - return pgf.Mapper.PosLocation(pgf.Tok, start, end) -} - -// NodeRange returns a protocol Range for the ast.Node interval in this file. -func (pgf *ParsedGoFile) NodeRange(node ast.Node) (protocol.Range, error) { - return pgf.Mapper.NodeRange(pgf.Tok, node) -} - -// NodeMappedRange returns a MappedRange for the ast.Node interval in this file. -// A MappedRange can be converted to any other form. -func (pgf *ParsedGoFile) NodeMappedRange(node ast.Node) (protocol.MappedRange, error) { - return pgf.Mapper.NodeMappedRange(pgf.Tok, node) -} - -// NodeLocation returns a protocol Location for the ast.Node interval in this file. -func (pgf *ParsedGoFile) NodeLocation(node ast.Node) (protocol.Location, error) { - return pgf.Mapper.PosLocation(pgf.Tok, node.Pos(), node.End()) -} - -// RangePos parses a protocol Range back into the go/token domain. -func (pgf *ParsedGoFile) RangePos(r protocol.Range) (token.Pos, token.Pos, error) { - start, end, err := pgf.Mapper.RangeOffsets(r) - if err != nil { - return token.NoPos, token.NoPos, err - } - return pgf.Tok.Pos(start), pgf.Tok.Pos(end), nil -} - -// A ParsedModule contains the results of parsing a go.mod file. -type ParsedModule struct { - URI span.URI - File *modfile.File - Mapper *protocol.Mapper - ParseErrors []*Diagnostic -} - -// A ParsedWorkFile contains the results of parsing a go.work file. -type ParsedWorkFile struct { - URI span.URI - File *modfile.WorkFile - Mapper *protocol.Mapper - ParseErrors []*Diagnostic -} - -// A TidiedModule contains the results of running `go mod tidy` on a module. -type TidiedModule struct { - // Diagnostics representing changes made by `go mod tidy`. - Diagnostics []*Diagnostic - // The bytes of the go.mod file after it was tidied. - TidiedContent []byte -} - -// Metadata represents package metadata retrieved from go/packages. -// The Deps* maps do not contain self-import edges. -// -// An ad-hoc package (without go.mod or GOPATH) has its ID, PkgPath, -// and LoadDir equal to the absolute path of its directory. -type Metadata struct { - ID PackageID - PkgPath PackagePath - Name PackageName - - // these three fields are as defined by go/packages.Package - GoFiles []span.URI - CompiledGoFiles []span.URI - IgnoredFiles []span.URI - - ForTest PackagePath // q in a "p [q.test]" package, else "" - TypesSizes types.Sizes - Errors []packages.Error // must be set for packages in import cycles - DepsByImpPath map[ImportPath]PackageID // may contain dups; empty ID => missing - DepsByPkgPath map[PackagePath]PackageID // values are unique and non-empty - Module *packages.Module - DepsErrors []*packagesinternal.PackageError - Diagnostics []*Diagnostic // processed diagnostics from 'go list' - LoadDir string // directory from which go/packages was run - Standalone bool // package synthesized for a standalone file (e.g. ignore-tagged) -} - -func (m *Metadata) String() string { return string(m.ID) } - -// IsIntermediateTestVariant reports whether the given package is an -// intermediate test variant (ITV), e.g. "net/http [net/url.test]". -// -// An ITV has identical syntax to the regular variant, but different -// import metadata (DepsBy{Imp,Pkg}Path). -// -// Such test variants arise when an x_test package (in this case net/url_test) -// imports a package (in this case net/http) that itself imports the -// non-x_test package (in this case net/url). -// -// This is done so that the forward transitive closure of net/url_test has -// only one package for the "net/url" import. -// The ITV exists to hold the test variant import: -// -// net/url_test [net/url.test] -// -// | "net/http" -> net/http [net/url.test] -// | "net/url" -> net/url [net/url.test] -// | ... -// -// net/http [net/url.test] -// -// | "net/url" -> net/url [net/url.test] -// | ... -// -// This restriction propagates throughout the import graph of net/http: for -// every package imported by net/http that imports net/url, there must be an -// intermediate test variant that instead imports "net/url [net/url.test]". -// -// As one can see from the example of net/url and net/http, intermediate test -// variants can result in many additional packages that are essentially (but -// not quite) identical. For this reason, we filter these variants wherever -// possible. -// -// # Why we mostly ignore intermediate test variants -// -// In projects with complicated tests, there may be a very large -// number of ITVs--asymptotically more than the number of ordinary -// variants. Since they have identical syntax, it is fine in most -// cases to ignore them since the results of analyzing the ordinary -// variant suffice. However, this is not entirely sound. -// -// Consider this package: -// -// // p/p.go -- in all variants of p -// package p -// type T struct { io.Closer } -// -// // p/p_test.go -- in test variant of p -// package p -// func (T) Close() error { ... } -// -// The ordinary variant "p" defines T with a Close method promoted -// from io.Closer. But its test variant "p [p.test]" defines a type T -// with a Close method from p_test.go. -// -// Now consider a package q that imports p, perhaps indirectly. Within -// it, T.Close will resolve to the first Close method: -// -// // q/q.go -- in all variants of q -// package q -// import "p" -// var _ = new(p.T).Close -// -// Let's assume p also contains this file defining an external test (xtest): -// -// // p/p_x_test.go -- external test of p -// package p_test -// import ( "q"; "testing" ) -// func Test(t *testing.T) { ... } -// -// Note that q imports p, but p's xtest imports q. Now, in "q -// [p.test]", the intermediate test variant of q built for p's -// external test, T.Close resolves not to the io.Closer.Close -// interface method, but to the concrete method of T.Close -// declared in p_test.go. -// -// If we now request all references to the T.Close declaration in -// p_test.go, the result should include the reference from q's ITV. -// (It's not just methods that can be affected; fields can too, though -// it requires bizarre code to achieve.) -// -// As a matter of policy, gopls mostly ignores this subtlety, -// because to account for it would require that we type-check every -// intermediate test variant of p, of which there could be many. -// Good code doesn't rely on such trickery. -// -// Most callers of MetadataForFile call RemoveIntermediateTestVariants -// to discard them before requesting type checking, or the products of -// type-checking such as the cross-reference index or method set index. -// -// MetadataForFile doesn't do this filtering itself becaused in some -// cases we need to make a reverse dependency query on the metadata -// graph, and it's important to include the rdeps of ITVs in that -// query. But the filtering of ITVs should be applied after that step, -// before type checking. -// -// In general, we should never type check an ITV. -func (m *Metadata) IsIntermediateTestVariant() bool { - return m.ForTest != "" && m.ForTest != m.PkgPath && m.ForTest+"_test" != m.PkgPath -} - -// RemoveIntermediateTestVariants removes intermediate test variants, modifying the array. -// We use a pointer to a slice make it impossible to forget to use the result. -func RemoveIntermediateTestVariants(pmetas *[]*Metadata) { - metas := *pmetas - res := metas[:0] - for _, m := range metas { - if !m.IsIntermediateTestVariant() { - res = append(res, m) - } - } - *pmetas = res -} - -var ErrViewExists = errors.New("view already exists for session") - -// FileModification represents a modification to a file. -type FileModification struct { - URI span.URI - Action FileAction - - // OnDisk is true if a watched file is changed on disk. - // If true, Version will be -1 and Text will be nil. - OnDisk bool - - // Version will be -1 and Text will be nil when they are not supplied, - // specifically on textDocument/didClose and for on-disk changes. - Version int32 - Text []byte - - // LanguageID is only sent from the language client on textDocument/didOpen. - LanguageID string -} - -type FileAction int - -const ( - UnknownFileAction = FileAction(iota) - Open - Change - Close - Save - Create - Delete -) - -func (a FileAction) String() string { - switch a { - case Open: - return "Open" - case Change: - return "Change" - case Close: - return "Close" - case Save: - return "Save" - case Create: - return "Create" - case Delete: - return "Delete" - default: - return "Unknown" - } -} - -var ErrTmpModfileUnsupported = errors.New("-modfile is unsupported for this Go version") -var ErrNoModOnDisk = errors.New("go.mod file is not on disk") - -func IsNonFatalGoModError(err error) bool { - return err == ErrTmpModfileUnsupported || err == ErrNoModOnDisk -} - -// Common parse modes; these should be reused wherever possible to increase -// cache hits. -const ( - // ParseHeader specifies that the main package declaration and imports are needed. - // This is the mode used when attempting to examine the package graph structure. - ParseHeader = parser.AllErrors | parser.ParseComments | parser.ImportsOnly | SkipObjectResolution - - // ParseFull specifies the full AST is needed. - // This is used for files of direct interest where the entire contents must - // be considered. - ParseFull = parser.AllErrors | parser.ParseComments | SkipObjectResolution -) - -// A FileHandle represents the URI, content, hash, and optional -// version of a file tracked by the LSP session. -// -// File content may be provided by the file system (for Saved files) -// or from an overlay, for open files with unsaved edits. -// A FileHandle may record an attempt to read a non-existent file, -// in which case Content returns an error. -type FileHandle interface { - // URI is the URI for this file handle. - // TODO(rfindley): this is not actually well-defined. In some cases, there - // may be more than one URI that resolve to the same FileHandle. Which one is - // this? - URI() span.URI - // FileIdentity returns a FileIdentity for the file, even if there was an - // error reading it. - FileIdentity() FileIdentity - // SameContentsOnDisk reports whether the file has the same content on disk: - // it is false for files open on an editor with unsaved edits. - SameContentsOnDisk() bool - // Version returns the file version, as defined by the LSP client. - // For on-disk file handles, Version returns 0. - Version() int32 - // Content returns the contents of a file. - // If the file is not available, returns a nil slice and an error. - Content() ([]byte, error) -} - -// A Hash is a cryptographic digest of the contents of a file. -// (Although at 32B it is larger than a 16B string header, it is smaller -// and has better locality than the string header + 64B of hex digits.) -type Hash [sha256.Size]byte - -// HashOf returns the hash of some data. -func HashOf(data []byte) Hash { - return Hash(sha256.Sum256(data)) -} - -// Hashf returns the hash of a printf-formatted string. -func Hashf(format string, args ...interface{}) Hash { - // Although this looks alloc-heavy, it is faster than using - // Fprintf on sha256.New() because the allocations don't escape. - return HashOf([]byte(fmt.Sprintf(format, args...))) -} - -// String returns the digest as a string of hex digits. -func (h Hash) String() string { - return fmt.Sprintf("%64x", [sha256.Size]byte(h)) -} - -// Less returns true if the given hash is less than the other. -func (h Hash) Less(other Hash) bool { - return bytes.Compare(h[:], other[:]) < 0 -} - -// XORWith updates *h to *h XOR h2. -func (h *Hash) XORWith(h2 Hash) { - // Small enough that we don't need crypto/subtle.XORBytes. - for i := range h { - h[i] ^= h2[i] - } -} - -// FileIdentity uniquely identifies a file at a version from a FileSystem. -type FileIdentity struct { - URI span.URI - Hash Hash // digest of file contents -} - -func (id FileIdentity) String() string { - return fmt.Sprintf("%s%s", id.URI, id.Hash) -} - -// FileKind describes the kind of the file in question. -// It can be one of Go,mod, Sum, or Tmpl. -type FileKind int - -const ( - // UnknownKind is a file type we don't know about. - UnknownKind = FileKind(iota) - - // Go is a normal go source file. - Go - // Mod is a go.mod file. - Mod - // Sum is a go.sum file. - Sum - // Tmpl is a template file. - Tmpl - // Work is a go.work file. - Work -) - -func (k FileKind) String() string { - switch k { - case Go: - return "go" - case Mod: - return "go.mod" - case Sum: - return "go.sum" - case Tmpl: - return "tmpl" - case Work: - return "go.work" - default: - return fmt.Sprintf("internal error: unknown file kind %d", k) - } -} - -// Analyzer represents a go/analysis analyzer with some boolean properties -// that let the user know how to use the analyzer. -type Analyzer struct { - Analyzer *analysis.Analyzer - - // Enabled reports whether the analyzer is enabled. This value can be - // configured per-analysis in user settings. For staticcheck analyzers, - // the value of the Staticcheck setting overrides this field. - // - // Most clients should use the IsEnabled method. - Enabled bool - - // Fix is the name of the suggested fix name used to invoke the suggested - // fixes for the analyzer. It is non-empty if we expect this analyzer to - // provide its fix separately from its diagnostics. That is, we should apply - // the analyzer's suggested fixes through a Command, not a TextEdit. - Fix string - - // fixesDiagnostic reports if a diagnostic from the analyzer can be fixed by Fix. - // If nil then all diagnostics from the analyzer are assumed to be fixable. - fixesDiagnostic func(*Diagnostic) bool - - // ActionKind is the kind of code action this analyzer produces. If - // unspecified the type defaults to quickfix. - ActionKind []protocol.CodeActionKind - - // Severity is the severity set for diagnostics reported by this - // analyzer. If left unset it defaults to Warning. - Severity protocol.DiagnosticSeverity - - // Tag is extra tags (unnecessary, deprecated, etc) for diagnostics - // reported by this analyzer. - Tag []protocol.DiagnosticTag -} - -func (a *Analyzer) String() string { return a.Analyzer.String() } - -// IsEnabled reports whether this analyzer is enabled by the given options. -func (a Analyzer) IsEnabled(options *Options) bool { - // Staticcheck analyzers can only be enabled when staticcheck is on. - if _, ok := options.StaticcheckAnalyzers[a.Analyzer.Name]; ok { - if !options.Staticcheck { - return false - } - } - if enabled, ok := options.Analyses[a.Analyzer.Name]; ok { - return enabled - } - return a.Enabled -} - -// FixesDiagnostic returns true if Analyzer.Fix can fix the Diagnostic. -func (a Analyzer) FixesDiagnostic(d *Diagnostic) bool { - if a.fixesDiagnostic == nil { - return true - } - return a.fixesDiagnostic(d) -} - -// Declare explicit types for package paths, names, and IDs to ensure that we -// never use an ID where a path belongs, and vice versa. If we confused these, -// it would result in confusing errors because package IDs often look like -// package paths. -type ( - PackageID string // go list's unique identifier for a package (e.g. "vendor/example.com/foo [vendor/example.com/bar.test]") - PackagePath string // name used to prefix linker symbols (e.g. "vendor/example.com/foo") - PackageName string // identifier in 'package' declaration (e.g. "foo") - ImportPath string // path that appears in an import declaration (e.g. "example.com/foo") -) - -// Package represents a Go package that has been parsed and type-checked. -// -// By design, there is no way to reach from a Package to the Package -// representing one of its dependencies. -// -// Callers must not assume that two Packages share the same -// token.FileSet or types.Importer and thus have commensurable -// token.Pos values or types.Objects. Instead, use stable naming -// schemes, such as (URI, byte offset) for positions, or (PackagePath, -// objectpath.Path) for exported declarations. -type Package interface { - Metadata() *Metadata - - // Results of parsing: - FileSet() *token.FileSet - CompiledGoFiles() []*ParsedGoFile // (borrowed) - File(uri span.URI) (*ParsedGoFile, error) - GetSyntax() []*ast.File // (borrowed) - GetParseErrors() []scanner.ErrorList - - // Results of type checking: - GetTypes() *types.Package - GetTypeErrors() []types.Error - GetTypesInfo() *types.Info - DependencyTypes(PackagePath) *types.Package // nil for indirect dependency of no consequence - DiagnosticsForFile(ctx context.Context, s Snapshot, uri span.URI) ([]*Diagnostic, error) -} - -type unit = struct{} - -// A CriticalError is a workspace-wide error that generally prevents gopls from -// functioning correctly. In the presence of critical errors, other diagnostics -// in the workspace may not make sense. -type CriticalError struct { - // MainError is the primary error. Must be non-nil. - MainError error - - // Diagnostics contains any supplemental (structured) diagnostics. - Diagnostics []*Diagnostic -} - -// An Diagnostic corresponds to an LSP Diagnostic. -// https://microsoft.github.io/language-server-protocol/specification#diagnostic -type Diagnostic struct { - // TODO(adonovan): should be a protocol.URI, for symmetry. - URI span.URI // of diagnosed file (not diagnostic documentation) - Range protocol.Range - Severity protocol.DiagnosticSeverity - Code string - CodeHref string - - // Source is a human-readable description of the source of the error. - // Diagnostics generated by an analysis.Analyzer set it to Analyzer.Name. - Source DiagnosticSource - - Message string - - Tags []protocol.DiagnosticTag - Related []protocol.DiagnosticRelatedInformation - - // Fields below are used internally to generate quick fixes. They aren't - // part of the LSP spec and historically didn't leave the server. - // - // Update(2023-05): version 3.16 of the LSP spec included support for the - // Diagnostic.data field, which holds arbitrary data preserved in the - // diagnostic for codeAction requests. This field allows bundling additional - // information for quick-fixes, and gopls can (and should) use this - // information to avoid re-evaluating diagnostics in code-action handlers. - // - // In order to stage this transition incrementally, the 'BundledFixes' field - // may store a 'bundled' (=json-serialized) form of the associated - // SuggestedFixes. Not all diagnostics have their fixes bundled. - BundledFixes *json.RawMessage - SuggestedFixes []SuggestedFix -} - -func (d *Diagnostic) String() string { - return fmt.Sprintf("%v: %s", d.Range, d.Message) -} - -type DiagnosticSource string - -const ( - UnknownError DiagnosticSource = "" - ListError DiagnosticSource = "go list" - ParseError DiagnosticSource = "syntax" - TypeError DiagnosticSource = "compiler" - ModTidyError DiagnosticSource = "go mod tidy" - OptimizationDetailsError DiagnosticSource = "optimizer details" - UpgradeNotification DiagnosticSource = "upgrade available" - Vulncheck DiagnosticSource = "vulncheck imports" - Govulncheck DiagnosticSource = "govulncheck" - TemplateError DiagnosticSource = "template" - WorkFileError DiagnosticSource = "go.work file" - ConsistencyInfo DiagnosticSource = "consistency" -) - -func AnalyzerErrorKind(name string) DiagnosticSource { - return DiagnosticSource(name) -} diff --git a/gopls/internal/lsp/source/workspace_symbol.go b/gopls/internal/lsp/source/workspace_symbol.go deleted file mode 100644 index c656889fdb6..00000000000 --- a/gopls/internal/lsp/source/workspace_symbol.go +++ /dev/null @@ -1,611 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "context" - "fmt" - "path" - "path/filepath" - "regexp" - "runtime" - "sort" - "strings" - "unicode" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/fuzzy" -) - -// Symbol holds a precomputed symbol value. Note: we avoid using the -// protocol.SymbolInformation struct here in order to reduce the size of each -// symbol. -type Symbol struct { - Name string - Kind protocol.SymbolKind - Range protocol.Range -} - -// maxSymbols defines the maximum number of symbol results that should ever be -// sent in response to a client. -const maxSymbols = 100 - -// WorkspaceSymbols matches symbols across all views using the given query, -// according to the match semantics parameterized by matcherType and style. -// -// The workspace symbol method is defined in the spec as follows: -// -// The workspace symbol request is sent from the client to the server to -// list project-wide symbols matching the query string. -// -// It is unclear what "project-wide" means here, but given the parameters of -// workspace/symbol do not include any workspace identifier, then it has to be -// assumed that "project-wide" means "across all workspaces". Hence why -// WorkspaceSymbols receives the views []View. -// -// However, it then becomes unclear what it would mean to call WorkspaceSymbols -// with a different configured SymbolMatcher per View. Therefore we assume that -// Session level configuration will define the SymbolMatcher to be used for the -// WorkspaceSymbols method. -func WorkspaceSymbols(ctx context.Context, matcher SymbolMatcher, style SymbolStyle, views []View, query string) ([]protocol.SymbolInformation, error) { - ctx, done := event.Start(ctx, "source.WorkspaceSymbols") - defer done() - if query == "" { - return nil, nil - } - - var s symbolizer - switch style { - case DynamicSymbols: - s = dynamicSymbolMatch - case FullyQualifiedSymbols: - s = fullyQualifiedSymbolMatch - case PackageQualifiedSymbols: - s = packageSymbolMatch - default: - panic(fmt.Errorf("unknown symbol style: %v", style)) - } - - return collectSymbols(ctx, views, matcher, s, query) -} - -// A matcherFunc returns the index and score of a symbol match. -// -// See the comment for symbolCollector for more information. -type matcherFunc func(chunks []string) (int, float64) - -// A symbolizer returns the best symbol match for a name with pkg, according to -// some heuristic. The symbol name is passed as the slice nameParts of logical -// name pieces. For example, for myType.field the caller can pass either -// []string{"myType.field"} or []string{"myType.", "field"}. -// -// See the comment for symbolCollector for more information. -// -// The space argument is an empty slice with spare capacity that may be used -// to allocate the result. -type symbolizer func(space []string, name string, pkg *Metadata, m matcherFunc) ([]string, float64) - -func fullyQualifiedSymbolMatch(space []string, name string, pkg *Metadata, matcher matcherFunc) ([]string, float64) { - if _, score := dynamicSymbolMatch(space, name, pkg, matcher); score > 0 { - return append(space, string(pkg.PkgPath), ".", name), score - } - return nil, 0 -} - -func dynamicSymbolMatch(space []string, name string, pkg *Metadata, matcher matcherFunc) ([]string, float64) { - if IsCommandLineArguments(pkg.ID) { - // command-line-arguments packages have a non-sensical package path, so - // just use their package name. - return packageSymbolMatch(space, name, pkg, matcher) - } - - var score float64 - - endsInPkgName := strings.HasSuffix(string(pkg.PkgPath), string(pkg.Name)) - - // If the package path does not end in the package name, we need to check the - // package-qualified symbol as an extra pass first. - if !endsInPkgName { - pkgQualified := append(space, string(pkg.Name), ".", name) - idx, score := matcher(pkgQualified) - nameStart := len(pkg.Name) + 1 - if score > 0 { - // If our match is contained entirely within the unqualified portion, - // just return that. - if idx >= nameStart { - return append(space, name), score - } - // Lower the score for matches that include the package name. - return pkgQualified, score * 0.8 - } - } - - // Now try matching the fully qualified symbol. - fullyQualified := append(space, string(pkg.PkgPath), ".", name) - idx, score := matcher(fullyQualified) - - // As above, check if we matched just the unqualified symbol name. - nameStart := len(pkg.PkgPath) + 1 - if idx >= nameStart { - return append(space, name), score - } - - // If our package path ends in the package name, we'll have skipped the - // initial pass above, so check if we matched just the package-qualified - // name. - if endsInPkgName && idx >= 0 { - pkgStart := len(pkg.PkgPath) - len(pkg.Name) - if idx >= pkgStart { - return append(space, string(pkg.Name), ".", name), score - } - } - - // Our match was not contained within the unqualified or package qualified - // symbol. Return the fully qualified symbol but discount the score. - return fullyQualified, score * 0.6 -} - -func packageSymbolMatch(space []string, name string, pkg *Metadata, matcher matcherFunc) ([]string, float64) { - qualified := append(space, string(pkg.Name), ".", name) - if _, s := matcher(qualified); s > 0 { - return qualified, s - } - return nil, 0 -} - -func buildMatcher(matcher SymbolMatcher, query string) matcherFunc { - switch matcher { - case SymbolFuzzy: - return parseQuery(query, newFuzzyMatcher) - case SymbolFastFuzzy: - return parseQuery(query, func(query string) matcherFunc { - return fuzzy.NewSymbolMatcher(query).Match - }) - case SymbolCaseSensitive: - return matchExact(query) - case SymbolCaseInsensitive: - q := strings.ToLower(query) - exact := matchExact(q) - wrapper := []string{""} - return func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - wrapper[0] = strings.ToLower(s) - return exact(wrapper) - } - } - panic(fmt.Errorf("unknown symbol matcher: %v", matcher)) -} - -func newFuzzyMatcher(query string) matcherFunc { - fm := fuzzy.NewMatcher(query) - return func(chunks []string) (int, float64) { - score := float64(fm.ScoreChunks(chunks)) - ranges := fm.MatchedRanges() - if len(ranges) > 0 { - return ranges[0], score - } - return -1, score - } -} - -// parseQuery parses a field-separated symbol query, extracting the special -// characters listed below, and returns a matcherFunc corresponding to the AND -// of all field queries. -// -// Special characters: -// -// ^ match exact prefix -// $ match exact suffix -// ' match exact -// -// In all three of these special queries, matches are 'smart-cased', meaning -// they are case sensitive if the symbol query contains any upper-case -// characters, and case insensitive otherwise. -func parseQuery(q string, newMatcher func(string) matcherFunc) matcherFunc { - fields := strings.Fields(q) - if len(fields) == 0 { - return func([]string) (int, float64) { return -1, 0 } - } - var funcs []matcherFunc - for _, field := range fields { - var f matcherFunc - switch { - case strings.HasPrefix(field, "^"): - prefix := field[1:] - f = smartCase(prefix, func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if strings.HasPrefix(s, prefix) { - return 0, 1 - } - return -1, 0 - }) - case strings.HasPrefix(field, "'"): - exact := field[1:] - f = smartCase(exact, matchExact(exact)) - case strings.HasSuffix(field, "$"): - suffix := field[0 : len(field)-1] - f = smartCase(suffix, func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if strings.HasSuffix(s, suffix) { - return len(s) - len(suffix), 1 - } - return -1, 0 - }) - default: - f = newMatcher(field) - } - funcs = append(funcs, f) - } - if len(funcs) == 1 { - return funcs[0] - } - return comboMatcher(funcs).match -} - -func matchExact(exact string) matcherFunc { - return func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if idx := strings.LastIndex(s, exact); idx >= 0 { - return idx, 1 - } - return -1, 0 - } -} - -// smartCase returns a matcherFunc that is case-sensitive if q contains any -// upper-case characters, and case-insensitive otherwise. -func smartCase(q string, m matcherFunc) matcherFunc { - insensitive := strings.ToLower(q) == q - wrapper := []string{""} - return func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if insensitive { - s = strings.ToLower(s) - } - wrapper[0] = s - return m(wrapper) - } -} - -type comboMatcher []matcherFunc - -func (c comboMatcher) match(chunks []string) (int, float64) { - score := 1.0 - first := 0 - for _, f := range c { - idx, s := f(chunks) - if idx < first { - first = idx - } - score *= s - } - return first, score -} - -// collectSymbols calls snapshot.Symbols to walk the syntax trees of -// all files in the views' current snapshots, and returns a sorted, -// scored list of symbols that best match the parameters. -// -// How it matches symbols is parameterized by two interfaces: -// - A matcherFunc determines how well a string symbol matches a query. It -// returns a non-negative score indicating the quality of the match. A score -// of zero indicates no match. -// - A symbolizer determines how we extract the symbol for an object. This -// enables the 'symbolStyle' configuration option. -func collectSymbols(ctx context.Context, views []View, matcherType SymbolMatcher, symbolizer symbolizer, query string) ([]protocol.SymbolInformation, error) { - // Extract symbols from all files. - var work []symbolFile - var roots []string - seen := make(map[span.URI]bool) - // TODO(adonovan): opt: parallelize this loop? How often is len > 1? - for _, v := range views { - snapshot, release, err := v.Snapshot() - if err != nil { - continue // view is shut down; continue with others - } - defer release() - - // Use the root view URIs for determining (lexically) - // whether a URI is in any open workspace. - roots = append(roots, strings.TrimRight(string(v.Folder()), "/")) - - filters := snapshot.Options().DirectoryFilters - filterer := NewFilterer(filters) - folder := filepath.ToSlash(v.Folder().Filename()) - - workspaceOnly := true - if snapshot.Options().SymbolScope == AllSymbolScope { - workspaceOnly = false - } - symbols, err := snapshot.Symbols(ctx, workspaceOnly) - if err != nil { - return nil, err - } - - for uri, syms := range symbols { - norm := filepath.ToSlash(uri.Filename()) - nm := strings.TrimPrefix(norm, folder) - if filterer.Disallow(nm) { - continue - } - // Only scan each file once. - if seen[uri] { - continue - } - meta, err := NarrowestMetadataForFile(ctx, snapshot, uri) - if err != nil { - event.Error(ctx, fmt.Sprintf("missing metadata for %q", uri), err) - continue - } - seen[uri] = true - work = append(work, symbolFile{uri, meta, syms}) - } - } - - // Match symbols in parallel. - // Each worker has its own symbolStore, - // which we merge at the end. - nmatchers := runtime.GOMAXPROCS(-1) // matching is CPU bound - results := make(chan *symbolStore) - for i := 0; i < nmatchers; i++ { - go func(i int) { - matcher := buildMatcher(matcherType, query) - store := new(symbolStore) - // Assign files to workers in round-robin fashion. - for j := i; j < len(work); j += nmatchers { - matchFile(store, symbolizer, matcher, roots, work[j]) - } - results <- store - }(i) - } - - // Gather and merge results as they arrive. - var unified symbolStore - for i := 0; i < nmatchers; i++ { - store := <-results - for _, syms := range store.res { - unified.store(syms) - } - } - return unified.results(), nil -} - -type Filterer struct { - // Whether a filter is excluded depends on the operator (first char of the raw filter). - // Slices filters and excluded then should have the same length. - filters []*regexp.Regexp - excluded []bool -} - -// NewFilterer computes regular expression form of all raw filters -func NewFilterer(rawFilters []string) *Filterer { - var f Filterer - for _, filter := range rawFilters { - filter = path.Clean(filepath.ToSlash(filter)) - // TODO(dungtuanle): fix: validate [+-] prefix. - op, prefix := filter[0], filter[1:] - // convertFilterToRegexp adds "/" at the end of prefix to handle cases where a filter is a prefix of another filter. - // For example, it prevents [+foobar, -foo] from excluding "foobar". - f.filters = append(f.filters, convertFilterToRegexp(filepath.ToSlash(prefix))) - f.excluded = append(f.excluded, op == '-') - } - - return &f -} - -// Disallow return true if the path is excluded from the filterer's filters. -func (f *Filterer) Disallow(path string) bool { - // Ensure trailing but not leading slash. - path = strings.TrimPrefix(path, "/") - if !strings.HasSuffix(path, "/") { - path += "/" - } - - // TODO(adonovan): opt: iterate in reverse and break at first match. - excluded := false - for i, filter := range f.filters { - if filter.MatchString(path) { - excluded = f.excluded[i] // last match wins - } - } - return excluded -} - -// convertFilterToRegexp replaces glob-like operator substrings in a string file path to their equivalent regex forms. -// Supporting glob-like operators: -// - **: match zero or more complete path segments -func convertFilterToRegexp(filter string) *regexp.Regexp { - if filter == "" { - return regexp.MustCompile(".*") - } - var ret strings.Builder - ret.WriteString("^") - segs := strings.Split(filter, "/") - for _, seg := range segs { - // Inv: seg != "" since path is clean. - if seg == "**" { - ret.WriteString(".*") - } else { - ret.WriteString(regexp.QuoteMeta(seg)) - } - ret.WriteString("/") - } - pattern := ret.String() - - // Remove unnecessary "^.*" prefix, which increased - // BenchmarkWorkspaceSymbols time by ~20% (even though - // filter CPU time increased by only by ~2.5%) when the - // default filter was changed to "**/node_modules". - pattern = strings.TrimPrefix(pattern, "^.*") - - return regexp.MustCompile(pattern) -} - -// symbolFile holds symbol information for a single file. -type symbolFile struct { - uri span.URI - md *Metadata - syms []Symbol -} - -// matchFile scans a symbol file and adds matching symbols to the store. -func matchFile(store *symbolStore, symbolizer symbolizer, matcher matcherFunc, roots []string, i symbolFile) { - space := make([]string, 0, 3) - for _, sym := range i.syms { - symbolParts, score := symbolizer(space, sym.Name, i.md, matcher) - - // Check if the score is too low before applying any downranking. - if store.tooLow(score) { - continue - } - - // Factors to apply to the match score for the purpose of downranking - // results. - // - // These numbers were crudely calibrated based on trial-and-error using a - // small number of sample queries. Adjust as necessary. - // - // All factors are multiplicative, meaning if more than one applies they are - // multiplied together. - const ( - // nonWorkspaceFactor is applied to symbols outside the workspace. - // Developers are less likely to want to jump to code that they - // are not actively working on. - nonWorkspaceFactor = 0.5 - // nonWorkspaceUnexportedFactor is applied to unexported symbols outside - // the workspace. Since one wouldn't usually jump to unexported - // symbols to understand a package API, they are particularly irrelevant. - nonWorkspaceUnexportedFactor = 0.5 - // every field or method nesting level to access the field decreases - // the score by a factor of 1.0 - depth*depthFactor, up to a depth of - // 3. - // - // Use a small constant here, as this exists mostly to break ties - // (e.g. given a type Foo and a field x.Foo, prefer Foo). - depthFactor = 0.01 - ) - - startWord := true - exported := true - depth := 0.0 - for _, r := range sym.Name { - if startWord && !unicode.IsUpper(r) { - exported = false - } - if r == '.' { - startWord = true - depth++ - } else { - startWord = false - } - } - - // TODO(rfindley): use metadata to determine if the file is in a workspace - // package, rather than this heuristic. - inWorkspace := false - for _, root := range roots { - if strings.HasPrefix(string(i.uri), root) { - inWorkspace = true - break - } - } - - // Apply downranking based on workspace position. - if !inWorkspace { - score *= nonWorkspaceFactor - if !exported { - score *= nonWorkspaceUnexportedFactor - } - } - - // Apply downranking based on symbol depth. - if depth > 3 { - depth = 3 - } - score *= 1.0 - depth*depthFactor - - if store.tooLow(score) { - continue - } - - si := symbolInformation{ - score: score, - symbol: strings.Join(symbolParts, ""), - kind: sym.Kind, - uri: i.uri, - rng: sym.Range, - container: string(i.md.PkgPath), - } - store.store(si) - } -} - -type symbolStore struct { - res [maxSymbols]symbolInformation -} - -// store inserts si into the sorted results, if si has a high enough score. -func (sc *symbolStore) store(si symbolInformation) { - if sc.tooLow(si.score) { - return - } - insertAt := sort.Search(len(sc.res), func(i int) bool { - // Sort by score, then symbol length, and finally lexically. - if sc.res[i].score != si.score { - return sc.res[i].score < si.score - } - if len(sc.res[i].symbol) != len(si.symbol) { - return len(sc.res[i].symbol) > len(si.symbol) - } - return sc.res[i].symbol > si.symbol - }) - if insertAt < len(sc.res)-1 { - copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1]) - } - sc.res[insertAt] = si -} - -func (sc *symbolStore) tooLow(score float64) bool { - return score <= sc.res[len(sc.res)-1].score -} - -func (sc *symbolStore) results() []protocol.SymbolInformation { - var res []protocol.SymbolInformation - for _, si := range sc.res { - if si.score <= 0 { - return res - } - res = append(res, si.asProtocolSymbolInformation()) - } - return res -} - -// symbolInformation is a cut-down version of protocol.SymbolInformation that -// allows struct values of this type to be used as map keys. -type symbolInformation struct { - score float64 - symbol string - container string - kind protocol.SymbolKind - uri span.URI - rng protocol.Range -} - -// asProtocolSymbolInformation converts s to a protocol.SymbolInformation value. -// -// TODO: work out how to handle tags if/when they are needed. -func (s symbolInformation) asProtocolSymbolInformation() protocol.SymbolInformation { - return protocol.SymbolInformation{ - Name: s.symbol, - Kind: s.kind, - Location: protocol.Location{ - URI: protocol.URIFromSpanURI(s.uri), - Range: s.rng, - }, - ContainerName: s.container, - } -} diff --git a/gopls/internal/lsp/source/workspace_symbol_test.go b/gopls/internal/lsp/source/workspace_symbol_test.go deleted file mode 100644 index 24fb8b45210..00000000000 --- a/gopls/internal/lsp/source/workspace_symbol_test.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package source - -import ( - "testing" -) - -func TestParseQuery(t *testing.T) { - tests := []struct { - query, s string - wantMatch bool - }{ - {"", "anything", false}, - {"any", "anything", true}, - {"any$", "anything", false}, - {"ing$", "anything", true}, - {"ing$", "anythinG", true}, - {"inG$", "anything", false}, - {"^any", "anything", true}, - {"^any", "Anything", true}, - {"^Any", "anything", false}, - {"at", "anything", true}, - // TODO: this appears to be a bug in the fuzzy matching algorithm. 'At' - // should cause a case-sensitive match. - // {"At", "anything", false}, - {"At", "Anything", true}, - {"'yth", "Anything", true}, - {"'yti", "Anything", false}, - {"'any 'thing", "Anything", true}, - {"anythn nythg", "Anything", true}, - {"ntx", "Anything", false}, - {"anythn", "anything", true}, - {"ing", "anything", true}, - {"anythn nythgx", "anything", false}, - } - - for _, test := range tests { - matcher := parseQuery(test.query, newFuzzyMatcher) - if _, score := matcher([]string{test.s}); score > 0 != test.wantMatch { - t.Errorf("parseQuery(%q) match for %q: %.2g, want match: %t", test.query, test.s, score, test.wantMatch) - } - } -} - -func TestFiltererDisallow(t *testing.T) { - tests := []struct { - filters []string - included []string - excluded []string - }{ - { - []string{"+**/c.go"}, - []string{"a/c.go", "a/b/c.go"}, - []string{}, - }, - { - []string{"+a/**/c.go"}, - []string{"a/b/c.go", "a/b/d/c.go", "a/c.go"}, - []string{}, - }, - { - []string{"-a/c.go", "+a/**"}, - []string{"a/c.go"}, - []string{}, - }, - { - []string{"+a/**/c.go", "-**/c.go"}, - []string{}, - []string{"a/b/c.go"}, - }, - { - []string{"+a/**/c.go", "-a/**"}, - []string{}, - []string{"a/b/c.go"}, - }, - { - []string{"+**/c.go", "-a/**/c.go"}, - []string{}, - []string{"a/b/c.go"}, - }, - { - []string{"+foobar", "-foo"}, - []string{"foobar", "foobar/a"}, - []string{"foo", "foo/a"}, - }, - { - []string{"+", "-"}, - []string{}, - []string{"foobar", "foobar/a", "foo", "foo/a"}, - }, - { - []string{"-", "+"}, - []string{"foobar", "foobar/a", "foo", "foo/a"}, - []string{}, - }, - { - []string{"-a/**/b/**/c.go"}, - []string{}, - []string{"a/x/y/z/b/f/g/h/c.go"}, - }, - // tests for unsupported glob operators - { - []string{"+**/c.go", "-a/*/c.go"}, - []string{"a/b/c.go"}, - []string{}, - }, - { - []string{"+**/c.go", "-a/?/c.go"}, - []string{"a/b/c.go"}, - []string{}, - }, - { - []string{"-b"}, // should only filter paths prefixed with the "b" directory - []string{"a/b/c.go", "bb"}, - []string{"b/c/d.go", "b"}, - }, - } - - for _, test := range tests { - filterer := NewFilterer(test.filters) - for _, inc := range test.included { - if filterer.Disallow(inc) { - t.Errorf("Filters %v excluded %v, wanted included", test.filters, inc) - } - } - - for _, exc := range test.excluded { - if !filterer.Disallow(exc) { - t.Errorf("Filters %v included %v, wanted excluded", test.filters, exc) - } - } - } -} diff --git a/gopls/internal/lsp/symbols.go b/gopls/internal/lsp/symbols.go deleted file mode 100644 index 18bae059e79..00000000000 --- a/gopls/internal/lsp/symbols.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/template" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" -) - -func (s *Server) documentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]interface{}, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentSymbol", tag.URI.Of(params.TextDocument.URI)) - defer done() - - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { - return []interface{}{}, err - } - var docSymbols []protocol.DocumentSymbol - switch snapshot.FileKind(fh) { - case source.Tmpl: - docSymbols, err = template.DocumentSymbols(snapshot, fh) - case source.Go: - docSymbols, err = source.DocumentSymbols(ctx, snapshot, fh) - default: - return []interface{}{}, nil - } - if err != nil { - event.Error(ctx, "DocumentSymbols failed", err) - return []interface{}{}, nil - } - // Convert the symbols to an interface array. - // TODO: Remove this once the lsp deprecates SymbolInformation. - symbols := make([]interface{}, len(docSymbols)) - for i, s := range docSymbols { - if snapshot.Options().HierarchicalDocumentSymbolSupport { - symbols[i] = s - continue - } - // If the client does not support hierarchical document symbols, then - // we need to be backwards compatible for now and return SymbolInformation. - symbols[i] = protocol.SymbolInformation{ - Name: s.Name, - Kind: s.Kind, - Deprecated: s.Deprecated, - Location: protocol.Location{ - URI: params.TextDocument.URI, - Range: s.Range, - }, - } - } - return symbols, nil -} diff --git a/gopls/internal/lsp/template/completion.go b/gopls/internal/lsp/template/completion.go deleted file mode 100644 index 292563a88cd..00000000000 --- a/gopls/internal/lsp/template/completion.go +++ /dev/null @@ -1,287 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "bytes" - "context" - "fmt" - "go/scanner" - "go/token" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" -) - -// information needed for completion -type completer struct { - p *Parsed - pos protocol.Position - offset int // offset of the start of the Token - ctx protocol.CompletionContext - syms map[string]symbol -} - -func Completion(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, pos protocol.Position, context protocol.CompletionContext) (*protocol.CompletionList, error) { - all := New(snapshot.Templates()) - var start int // the beginning of the Token (completed or not) - syms := make(map[string]symbol) - var p *Parsed - for fn, fc := range all.files { - // collect symbols from all template files - filterSyms(syms, fc.symbols) - if fn.Filename() != fh.URI().Filename() { - continue - } - if start = inTemplate(fc, pos); start == -1 { - return nil, nil - } - p = fc - } - if p == nil { - // this cannot happen unless the search missed a template file - return nil, fmt.Errorf("%s not found", fh.FileIdentity().URI.Filename()) - } - c := completer{ - p: p, - pos: pos, - offset: start + len(Left), - ctx: context, - syms: syms, - } - return c.complete() -} - -func filterSyms(syms map[string]symbol, ns []symbol) { - for _, xsym := range ns { - switch xsym.kind { - case protocol.Method, protocol.Package, protocol.Boolean, protocol.Namespace, - protocol.Function: - syms[xsym.name] = xsym // we don't care which symbol we get - case protocol.Variable: - if xsym.name != "dot" { - syms[xsym.name] = xsym - } - case protocol.Constant: - if xsym.name == "nil" { - syms[xsym.name] = xsym - } - } - } -} - -// return the starting position of the enclosing token, or -1 if none -func inTemplate(fc *Parsed, pos protocol.Position) int { - // pos is the pos-th character. if the cursor is at the beginning - // of the file, pos is 0. That is, we've only seen characters before pos - // 1. pos might be in a Token, return tk.Start - // 2. pos might be after an elided but before a Token, return elided - // 3. return -1 for false - offset := fc.FromPosition(pos) - // this could be a binary search, as the tokens are ordered - for _, tk := range fc.tokens { - if tk.Start < offset && offset <= tk.End { - return tk.Start - } - } - for _, x := range fc.elided { - if x > offset { - // fc.elided is sorted - break - } - // If the interval [x,offset] does not contain Left or Right - // then provide completions. (do we need the test for Right?) - if !bytes.Contains(fc.buf[x:offset], []byte(Left)) && !bytes.Contains(fc.buf[x:offset], []byte(Right)) { - return x - } - } - return -1 -} - -var ( - keywords = []string{"if", "with", "else", "block", "range", "template", "end}}", "end"} - globals = []string{"and", "call", "html", "index", "slice", "js", "len", "not", "or", - "urlquery", "printf", "println", "print", "eq", "ne", "le", "lt", "ge", "gt"} -) - -// find the completions. start is the offset of either the Token enclosing pos, or where -// the incomplete token starts. -// The error return is always nil. -func (c *completer) complete() (*protocol.CompletionList, error) { - ans := &protocol.CompletionList{IsIncomplete: true, Items: []protocol.CompletionItem{}} - start := c.p.FromPosition(c.pos) - sofar := c.p.buf[c.offset:start] - if len(sofar) == 0 || sofar[len(sofar)-1] == ' ' || sofar[len(sofar)-1] == '\t' { - return ans, nil - } - // sofar could be parsed by either c.analyzer() or scan(). The latter is precise - // and slower, but fast enough - words := scan(sofar) - // 1. if pattern starts $, show variables - // 2. if pattern starts ., show methods (and . by itself?) - // 3. if len(words) == 1, show firstWords (but if it were a |, show functions and globals) - // 4. ...? (parenthetical expressions, arguments, ...) (packages, namespaces, nil?) - if len(words) == 0 { - return nil, nil // if this happens, why were we called? - } - pattern := string(words[len(words)-1]) - if pattern[0] == '$' { - // should we also return a raw "$"? - for _, s := range c.syms { - if s.kind == protocol.Variable && weakMatch(s.name, pattern) > 0 { - ans.Items = append(ans.Items, protocol.CompletionItem{ - Label: s.name, - Kind: protocol.VariableCompletion, - Detail: "Variable", - }) - } - } - return ans, nil - } - if pattern[0] == '.' { - for _, s := range c.syms { - if s.kind == protocol.Method && weakMatch("."+s.name, pattern) > 0 { - ans.Items = append(ans.Items, protocol.CompletionItem{ - Label: s.name, - Kind: protocol.MethodCompletion, - Detail: "Method/member", - }) - } - } - return ans, nil - } - // could we get completion attempts in strings or numbers, and if so, do we care? - // globals - for _, kw := range globals { - if weakMatch(kw, string(pattern)) != 0 { - ans.Items = append(ans.Items, protocol.CompletionItem{ - Label: kw, - Kind: protocol.KeywordCompletion, - Detail: "Function", - }) - } - } - // and functions - for _, s := range c.syms { - if s.kind == protocol.Function && weakMatch(s.name, pattern) != 0 { - ans.Items = append(ans.Items, protocol.CompletionItem{ - Label: s.name, - Kind: protocol.FunctionCompletion, - Detail: "Function", - }) - } - } - // keywords if we're at the beginning - if len(words) <= 1 || len(words[len(words)-2]) == 1 && words[len(words)-2][0] == '|' { - for _, kw := range keywords { - if weakMatch(kw, string(pattern)) != 0 { - ans.Items = append(ans.Items, protocol.CompletionItem{ - Label: kw, - Kind: protocol.KeywordCompletion, - Detail: "keyword", - }) - } - } - } - return ans, nil -} - -// someday think about comments, strings, backslashes, etc -// this would repeat some of the template parsing, but because the user is typing -// there may be no parse tree here. -// (go/scanner will report 2 tokens for $a, as $ is not a legal go identifier character) -// (go/scanner is about 2.7 times more expensive) -func (c *completer) analyze(buf []byte) [][]byte { - // we want to split on whitespace and before dots - var working []byte - var ans [][]byte - for _, ch := range buf { - if ch == '.' && len(working) > 0 { - ans = append(ans, working) - working = []byte{'.'} - continue - } - if ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' { - if len(working) > 0 { - ans = append(ans, working) - working = []byte{} - continue - } - } - working = append(working, ch) - } - if len(working) > 0 { - ans = append(ans, working) - } - ch := buf[len(buf)-1] - if ch == ' ' || ch == '\t' { - // avoid completing on whitespace - ans = append(ans, []byte{ch}) - } - return ans -} - -// version of c.analyze that uses go/scanner. -func scan(buf []byte) []string { - fset := token.NewFileSet() - fp := fset.AddFile("", -1, len(buf)) - var sc scanner.Scanner - sc.Init(fp, buf, func(pos token.Position, msg string) {}, scanner.ScanComments) - ans := make([]string, 0, 10) // preallocating gives a measurable savings - for { - _, tok, lit := sc.Scan() // tok is an int - if tok == token.EOF { - break // done - } else if tok == token.SEMICOLON && lit == "\n" { - continue // don't care, but probably can't happen - } else if tok == token.PERIOD { - ans = append(ans, ".") // lit is empty - } else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "." { - ans[len(ans)-1] = "." + lit - } else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "$" { - ans[len(ans)-1] = "$" + lit - } else if lit != "" { - ans = append(ans, lit) - } - } - return ans -} - -// pattern is what the user has typed -func weakMatch(choice, pattern string) float64 { - lower := strings.ToLower(choice) - // for now, use only lower-case everywhere - pattern = strings.ToLower(pattern) - // The first char has to match - if pattern[0] != lower[0] { - return 0 - } - // If they start with ., then the second char has to match - from := 1 - if pattern[0] == '.' { - if len(pattern) < 2 { - return 1 // pattern just a ., so it matches - } - if pattern[1] != lower[1] { - return 0 - } - from = 2 - } - // check that all the characters of pattern occur as a subsequence of choice - i, j := from, from - for ; i < len(lower) && j < len(pattern); j++ { - if pattern[j] == lower[i] { - i++ - if i >= len(lower) { - return 0 - } - } - } - if j < len(pattern) { - return 0 - } - return 1 -} diff --git a/gopls/internal/lsp/template/completion_test.go b/gopls/internal/lsp/template/completion_test.go deleted file mode 100644 index 0fc478842ee..00000000000 --- a/gopls/internal/lsp/template/completion_test.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "log" - "sort" - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -func init() { - log.SetFlags(log.Lshortfile) -} - -type tparse struct { - marked string // ^ shows where to ask for completions. (The user just typed the following character.) - wanted []string // expected completions -} - -// Test completions in templates that parse enough (if completion needs symbols) -// Seen characters up to the ^ -func TestParsed(t *testing.T) { - var tests = []tparse{ - {"{{x}}{{12. xx^", nil}, // https://github.com/golang/go/issues/50430 - {``, nil}, - {"{{i^f}}", []string{"index", "if"}}, - {"{{if .}}{{e^ {{end}}", []string{"eq", "end}}", "else", "end"}}, - {"{{foo}}{{f^", []string{"foo"}}, - {"{{$^}}", []string{"$"}}, - {"{{$x:=4}}{{$^", []string{"$x"}}, - {"{{$x:=4}}{{$ ^ ", []string{}}, - {"{{len .Modified}}{{.^Mo", []string{"Modified"}}, - {"{{len .Modified}}{{.mf^", []string{"Modified"}}, - {"{{$^ }}", []string{"$"}}, - {"{{$a =3}}{{$^", []string{"$a"}}, - // .two is not good here: fix someday - {`{{.Modified}}{{.^{{if $.one.two}}xxx{{end}}`, []string{"Modified", "one", "two"}}, - {`{{.Modified}}{{.o^{{if $.one.two}}xxx{{end}}`, []string{"one"}}, - {"{{.Modiifed}}{{.one.t^{{if $.one.two}}xxx{{end}}", []string{"two"}}, - {`{{block "foo" .}}{{i^`, []string{"index", "if"}}, - {"{{in^{{Internal}}", []string{"index", "Internal", "if"}}, - // simple number has no completions - {"{{4^e", []string{}}, - // simple string has no completions - {"{{`e^", []string{}}, - {"{{`No i^", []string{}}, // example of why go/scanner is used - {"{{xavier}}{{12. x^", []string{"xavier"}}, - } - for _, tx := range tests { - c := testCompleter(t, tx) - var v []string - if c != nil { - ans, _ := c.complete() - for _, a := range ans.Items { - v = append(v, a.Label) - } - } - if len(v) != len(tx.wanted) { - t.Errorf("%q: got %q, wanted %q %d,%d", tx.marked, v, tx.wanted, len(v), len(tx.wanted)) - continue - } - sort.Strings(tx.wanted) - sort.Strings(v) - for i := 0; i < len(v); i++ { - if tx.wanted[i] != v[i] { - t.Errorf("%q at %d: got %v, wanted %v", tx.marked, i, v, tx.wanted) - break - } - } - } -} - -func testCompleter(t *testing.T, tx tparse) *completer { - t.Helper() - // seen chars up to ^ - col := strings.Index(tx.marked, "^") - buf := strings.Replace(tx.marked, "^", "", 1) - p := parseBuffer([]byte(buf)) - pos := protocol.Position{Line: 0, Character: uint32(col)} - if p.ParseErr != nil { - log.Printf("%q: %v", tx.marked, p.ParseErr) - } - offset := inTemplate(p, pos) - if offset == -1 { - return nil - } - syms := make(map[string]symbol) - filterSyms(syms, p.symbols) - c := &completer{ - p: p, - pos: protocol.Position{Line: 0, Character: uint32(col)}, - offset: offset + len(Left), - ctx: protocol.CompletionContext{TriggerKind: protocol.Invoked}, - syms: syms, - } - return c -} diff --git a/gopls/internal/lsp/template/highlight.go b/gopls/internal/lsp/template/highlight.go deleted file mode 100644 index 47069b1cc60..00000000000 --- a/gopls/internal/lsp/template/highlight.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "context" - "fmt" - "regexp" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" -) - -func Highlight(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, loc protocol.Position) ([]protocol.DocumentHighlight, error) { - buf, err := fh.Content() - if err != nil { - return nil, err - } - p := parseBuffer(buf) - pos := p.FromPosition(loc) - var ans []protocol.DocumentHighlight - if p.ParseErr == nil { - for _, s := range p.symbols { - if s.start <= pos && pos < s.start+s.length { - return markSymbols(p, s) - } - } - } - // these tokens exist whether or not there was a parse error - // (symbols require a successful parse) - for _, tok := range p.tokens { - if tok.Start <= pos && pos < tok.End { - wordAt := findWordAt(p, pos) - if len(wordAt) > 0 { - return markWordInToken(p, wordAt) - } - } - } - // find the 'word' at pos, etc: someday - // until then we get the default action, which doesn't respect word boundaries - return ans, nil -} - -func markSymbols(p *Parsed, sym symbol) ([]protocol.DocumentHighlight, error) { - var ans []protocol.DocumentHighlight - for _, s := range p.symbols { - if s.name == sym.name { - kind := protocol.Read - if s.vardef { - kind = protocol.Write - } - ans = append(ans, protocol.DocumentHighlight{ - Range: p.Range(s.start, s.length), - Kind: kind, - }) - } - } - return ans, nil -} - -// A token is {{...}}, and this marks words in the token that equal the give word -func markWordInToken(p *Parsed, wordAt string) ([]protocol.DocumentHighlight, error) { - var ans []protocol.DocumentHighlight - pat, err := regexp.Compile(fmt.Sprintf(`\b%s\b`, wordAt)) - if err != nil { - return nil, fmt.Errorf("%q: unmatchable word (%v)", wordAt, err) - } - for _, tok := range p.tokens { - got := pat.FindAllIndex(p.buf[tok.Start:tok.End], -1) - for i := 0; i < len(got); i++ { - ans = append(ans, protocol.DocumentHighlight{ - Range: p.Range(got[i][0], got[i][1]-got[i][0]), - Kind: protocol.Text, - }) - } - } - return ans, nil -} - -var wordRe = regexp.MustCompile(`[$]?\w+$`) -var moreRe = regexp.MustCompile(`^[$]?\w+`) - -// findWordAt finds the word the cursor is in (meaning in or just before) -func findWordAt(p *Parsed, pos int) string { - if pos >= len(p.buf) { - return "" // can't happen, as we are called with pos < tok.End - } - after := moreRe.Find(p.buf[pos:]) - if len(after) == 0 { - return "" // end of the word - } - got := wordRe.Find(p.buf[:pos+len(after)]) - return string(got) -} diff --git a/gopls/internal/lsp/template/implementations.go b/gopls/internal/lsp/template/implementations.go deleted file mode 100644 index b98acaa313f..00000000000 --- a/gopls/internal/lsp/template/implementations.go +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "context" - "fmt" - "regexp" - "strconv" - "time" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" -) - -// line number (1-based) and message -var errRe = regexp.MustCompile(`template.*:(\d+): (.*)`) - -// Diagnose returns parse errors. There is only one. -// The errors are not always helpful. For instance { {end}} -// will likely point to the end of the file. -func Diagnose(f source.FileHandle) []*source.Diagnostic { - // no need for skipTemplate check, as Diagnose is called on the - // snapshot's template files - buf, err := f.Content() - if err != nil { - // Is a Diagnostic with no Range useful? event.Error also? - msg := fmt.Sprintf("failed to read %s (%v)", f.URI().Filename(), err) - d := source.Diagnostic{Message: msg, Severity: protocol.SeverityError, URI: f.URI(), - Source: source.TemplateError} - return []*source.Diagnostic{&d} - } - p := parseBuffer(buf) - if p.ParseErr == nil { - return nil - } - unknownError := func(msg string) []*source.Diagnostic { - s := fmt.Sprintf("malformed template error %q: %s", p.ParseErr.Error(), msg) - d := source.Diagnostic{ - Message: s, Severity: protocol.SeverityError, Range: p.Range(p.nls[0], 1), - URI: f.URI(), Source: source.TemplateError} - return []*source.Diagnostic{&d} - } - // errors look like `template: :40: unexpected "}" in operand` - // so the string needs to be parsed - matches := errRe.FindStringSubmatch(p.ParseErr.Error()) - if len(matches) != 3 { - msg := fmt.Sprintf("expected 3 matches, got %d (%v)", len(matches), matches) - return unknownError(msg) - } - lineno, err := strconv.Atoi(matches[1]) - if err != nil { - msg := fmt.Sprintf("couldn't convert %q to int, %v", matches[1], err) - return unknownError(msg) - } - msg := matches[2] - d := source.Diagnostic{Message: msg, Severity: protocol.SeverityError, - Source: source.TemplateError} - start := p.nls[lineno-1] - if lineno < len(p.nls) { - size := p.nls[lineno] - start - d.Range = p.Range(start, size) - } else { - d.Range = p.Range(start, 1) - } - return []*source.Diagnostic{&d} -} - -// Definition finds the definitions of the symbol at loc. It -// does not understand scoping (if any) in templates. This code is -// for definitions, type definitions, and implementations. -// Results only for variables and templates. -func Definition(snapshot source.Snapshot, fh source.FileHandle, loc protocol.Position) ([]protocol.Location, error) { - x, _, err := symAtPosition(fh, loc) - if err != nil { - return nil, err - } - sym := x.name - ans := []protocol.Location{} - // PJW: this is probably a pattern to abstract - a := New(snapshot.Templates()) - for k, p := range a.files { - for _, s := range p.symbols { - if !s.vardef || s.name != sym { - continue - } - ans = append(ans, protocol.Location{URI: protocol.DocumentURI(k), Range: p.Range(s.start, s.length)}) - } - } - return ans, nil -} - -func Hover(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) (*protocol.Hover, error) { - sym, p, err := symAtPosition(fh, position) - if sym == nil || err != nil { - return nil, err - } - ans := protocol.Hover{Range: p.Range(sym.start, sym.length), Contents: protocol.MarkupContent{Kind: protocol.Markdown}} - switch sym.kind { - case protocol.Function: - ans.Contents.Value = fmt.Sprintf("function: %s", sym.name) - case protocol.Variable: - ans.Contents.Value = fmt.Sprintf("variable: %s", sym.name) - case protocol.Constant: - ans.Contents.Value = fmt.Sprintf("constant %s", sym.name) - case protocol.Method: // field or method - ans.Contents.Value = fmt.Sprintf("%s: field or method", sym.name) - case protocol.Package: // template use, template def (PJW: do we want two?) - ans.Contents.Value = fmt.Sprintf("template %s\n(add definition)", sym.name) - case protocol.Namespace: - ans.Contents.Value = fmt.Sprintf("template %s defined", sym.name) - case protocol.Number: - ans.Contents.Value = "number" - case protocol.String: - ans.Contents.Value = "string" - case protocol.Boolean: - ans.Contents.Value = "boolean" - default: - ans.Contents.Value = fmt.Sprintf("oops, sym=%#v", sym) - } - return &ans, nil -} - -func References(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, params *protocol.ReferenceParams) ([]protocol.Location, error) { - sym, _, err := symAtPosition(fh, params.Position) - if sym == nil || err != nil || sym.name == "" { - return nil, err - } - ans := []protocol.Location{} - - a := New(snapshot.Templates()) - for k, p := range a.files { - for _, s := range p.symbols { - if s.name != sym.name { - continue - } - if s.vardef && !params.Context.IncludeDeclaration { - continue - } - ans = append(ans, protocol.Location{URI: protocol.DocumentURI(k), Range: p.Range(s.start, s.length)}) - } - } - // do these need to be sorted? (a.files is a map) - return ans, nil -} - -func SemanticTokens(ctx context.Context, snapshot source.Snapshot, spn span.URI, add func(line, start, len uint32), d func() []uint32) (*protocol.SemanticTokens, error) { - fh, err := snapshot.ReadFile(ctx, spn) - if err != nil { - return nil, err - } - buf, err := fh.Content() - if err != nil { - return nil, err - } - p := parseBuffer(buf) - - for _, t := range p.Tokens() { - if t.Multiline { - la, ca := p.LineCol(t.Start) - lb, cb := p.LineCol(t.End) - add(la, ca, p.RuneCount(la, ca, 0)) - for l := la + 1; l < lb; l++ { - add(l, 0, p.RuneCount(l, 0, 0)) - } - add(lb, 0, p.RuneCount(lb, 0, cb)) - continue - } - sz, err := p.TokenSize(t) - if err != nil { - return nil, err - } - line, col := p.LineCol(t.Start) - add(line, col, uint32(sz)) - } - data := d() - ans := &protocol.SemanticTokens{ - Data: data, - // for small cache, some day. for now, the LSP client ignores this - // (that is, when the LSP client starts returning these, we can cache) - ResultID: fmt.Sprintf("%v", time.Now()), - } - return ans, nil -} - -// still need to do rename, etc diff --git a/gopls/internal/lsp/template/parse.go b/gopls/internal/lsp/template/parse.go deleted file mode 100644 index 0816c0dba7d..00000000000 --- a/gopls/internal/lsp/template/parse.go +++ /dev/null @@ -1,508 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package template contains code for dealing with templates -package template - -// template files are small enough that the code reprocesses them each time -// this may be a bad choice for projects with lots of template files. - -// This file contains the parsing code, some debugging printing, and -// implementations for Diagnose, Definition, Hover, References - -import ( - "bytes" - "context" - "fmt" - "io" - "log" - "regexp" - "runtime" - "sort" - "text/template" - "text/template/parse" - "unicode/utf8" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" -) - -var ( - Left = []byte("{{") - Right = []byte("}}") -) - -type Parsed struct { - buf []byte //contents - lines [][]byte // needed?, other than for debugging? - elided []int // offsets where Left was replaced by blanks - - // tokens are matched Left-Right pairs, computed before trying to parse - tokens []Token - - // result of parsing - named []*template.Template // the template and embedded templates - ParseErr error - symbols []symbol - stack []parse.Node // used while computing symbols - - // for mapping from offsets in buf to LSP coordinates - // See FromPosition() and LineCol() - nls []int // offset of newlines before each line (nls[0]==-1) - lastnl int // last line seen - check int // used to decide whether to use lastnl or search through nls - nonASCII bool // are there any non-ascii runes in buf? -} - -// Token is a single {{...}}. More precisely, Left...Right -type Token struct { - Start, End int // offset from start of template - Multiline bool -} - -// All contains the Parse of all the template files -type All struct { - files map[span.URI]*Parsed -} - -// New returns the Parses of the snapshot's tmpl files -// (maybe cache these, but then avoiding import cycles needs code rearrangements) -func New(tmpls map[span.URI]source.FileHandle) *All { - all := make(map[span.URI]*Parsed) - for k, v := range tmpls { - buf, err := v.Content() - if err != nil { // PJW: decide what to do with these errors - log.Printf("failed to read %s (%v)", v.URI().Filename(), err) - continue - } - all[k] = parseBuffer(buf) - } - return &All{files: all} -} - -func parseBuffer(buf []byte) *Parsed { - ans := &Parsed{ - buf: buf, - check: -1, - nls: []int{-1}, - } - if len(buf) == 0 { - return ans - } - // how to compute allAscii... - for _, b := range buf { - if b >= utf8.RuneSelf { - ans.nonASCII = true - break - } - } - if buf[len(buf)-1] != '\n' { - ans.buf = append(buf, '\n') - } - for i, p := range ans.buf { - if p == '\n' { - ans.nls = append(ans.nls, i) - } - } - ans.setTokens() // ans.buf may be a new []byte - ans.lines = bytes.Split(ans.buf, []byte{'\n'}) - t, err := template.New("").Parse(string(ans.buf)) - if err != nil { - funcs := make(template.FuncMap) - for t == nil && ans.ParseErr == nil { - // in 1.17 it may be possible to avoid getting this error - // template: :2: function "foo" not defined - matches := parseErrR.FindStringSubmatch(err.Error()) - if len(matches) == 2 { - // suppress the error by giving it a function with the right name - funcs[matches[1]] = func() interface{} { return nil } - t, err = template.New("").Funcs(funcs).Parse(string(ans.buf)) - continue - } - ans.ParseErr = err // unfixed error - return ans - } - } - ans.named = t.Templates() - // set the symbols - for _, t := range ans.named { - ans.stack = append(ans.stack, t.Root) - ans.findSymbols() - if t.Name() != "" { - // defining a template. The pos is just after {{define...}} (or {{block...}}?) - at, sz := ans.FindLiteralBefore(int(t.Root.Pos)) - s := symbol{start: at, length: sz, name: t.Name(), kind: protocol.Namespace, vardef: true} - ans.symbols = append(ans.symbols, s) - } - } - - sort.Slice(ans.symbols, func(i, j int) bool { - left, right := ans.symbols[i], ans.symbols[j] - if left.start != right.start { - return left.start < right.start - } - if left.vardef != right.vardef { - return left.vardef - } - return left.kind < right.kind - }) - return ans -} - -// FindLiteralBefore locates the first preceding string literal -// returning its position and length in buf -// or returns -1 if there is none. -// Assume double-quoted string rather than backquoted string for now. -func (p *Parsed) FindLiteralBefore(pos int) (int, int) { - left, right := -1, -1 - for i := pos - 1; i >= 0; i-- { - if p.buf[i] != '"' { - continue - } - if right == -1 { - right = i - continue - } - left = i - break - } - if left == -1 { - return -1, 0 - } - return left + 1, right - left - 1 -} - -var ( - parseErrR = regexp.MustCompile(`template:.*function "([^"]+)" not defined`) -) - -func (p *Parsed) setTokens() { - const ( - // InRaw and InString only occur inside an action (SeenLeft) - Start = iota - InRaw - InString - SeenLeft - ) - state := Start - var left, oldState int - for n := 0; n < len(p.buf); n++ { - c := p.buf[n] - switch state { - case InRaw: - if c == '`' { - state = oldState - } - case InString: - if c == '"' && !isEscaped(p.buf[:n]) { - state = oldState - } - case SeenLeft: - if c == '`' { - oldState = state // it's SeenLeft, but a little clearer this way - state = InRaw - continue - } - if c == '"' { - oldState = state - state = InString - continue - } - if bytes.HasPrefix(p.buf[n:], Right) { - right := n + len(Right) - tok := Token{Start: left, - End: right, - Multiline: bytes.Contains(p.buf[left:right], []byte{'\n'}), - } - p.tokens = append(p.tokens, tok) - state = Start - } - // If we see (unquoted) Left then the original left is probably the user - // typing. Suppress the original left - if bytes.HasPrefix(p.buf[n:], Left) { - p.elideAt(left) - left = n - n += len(Left) - 1 // skip the rest - } - case Start: - if bytes.HasPrefix(p.buf[n:], Left) { - left = n - state = SeenLeft - n += len(Left) - 1 // skip the rest (avoids {{{ bug) - } - } - } - // this error occurs after typing {{ at the end of the file - if state != Start { - // Unclosed Left. remove the Left at left - p.elideAt(left) - } -} - -func (p *Parsed) elideAt(left int) { - if p.elided == nil { - // p.buf is the same buffer that v.Read() returns, so copy it. - // (otherwise the next time it's parsed, elided information is lost) - b := make([]byte, len(p.buf)) - copy(b, p.buf) - p.buf = b - } - for i := 0; i < len(Left); i++ { - p.buf[left+i] = ' ' - } - p.elided = append(p.elided, left) -} - -// isEscaped reports whether the byte after buf is escaped -func isEscaped(buf []byte) bool { - backSlashes := 0 - for j := len(buf) - 1; j >= 0 && buf[j] == '\\'; j-- { - backSlashes++ - } - return backSlashes%2 == 1 -} - -func (p *Parsed) Tokens() []Token { - return p.tokens -} - -// TODO(adonovan): the next 100 lines could perhaps replaced by use of protocol.Mapper. - -func (p *Parsed) utf16len(buf []byte) int { - cnt := 0 - if !p.nonASCII { - return len(buf) - } - // we need a utf16len(rune), but we don't have it - for _, r := range string(buf) { - cnt++ - if r >= 1<<16 { - cnt++ - } - } - return cnt -} - -func (p *Parsed) TokenSize(t Token) (int, error) { - if t.Multiline { - return -1, fmt.Errorf("TokenSize called with Multiline token %#v", t) - } - ans := p.utf16len(p.buf[t.Start:t.End]) - return ans, nil -} - -// RuneCount counts runes in line l, from col s to e -// (e==0 for end of line. called only for multiline tokens) -func (p *Parsed) RuneCount(l, s, e uint32) uint32 { - start := p.nls[l] + 1 + int(s) - end := p.nls[l] + 1 + int(e) - if e == 0 || end > p.nls[l+1] { - end = p.nls[l+1] - } - return uint32(utf8.RuneCount(p.buf[start:end])) -} - -// LineCol converts from a 0-based byte offset to 0-based line, col. col in runes -func (p *Parsed) LineCol(x int) (uint32, uint32) { - if x < p.check { - p.lastnl = 0 - } - p.check = x - for i := p.lastnl; i < len(p.nls); i++ { - if p.nls[i] <= x { - continue - } - p.lastnl = i - var count int - if i > 0 && x == p.nls[i-1] { // \n - count = 0 - } else { - count = p.utf16len(p.buf[p.nls[i-1]+1 : x]) - } - return uint32(i - 1), uint32(count) - } - if x == len(p.buf)-1 { // trailing \n - return uint32(len(p.nls) - 1), 0 - } - // shouldn't happen - for i := 1; i < 4; i++ { - _, f, l, ok := runtime.Caller(i) - if !ok { - break - } - log.Printf("%d: %s:%d", i, f, l) - } - - msg := fmt.Errorf("LineCol off the end, %d of %d, nls=%v, %q", x, len(p.buf), p.nls, p.buf[x:]) - event.Error(context.Background(), "internal error", msg) - return 0, 0 -} - -// Position produces a protocol.Position from an offset in the template -func (p *Parsed) Position(pos int) protocol.Position { - line, col := p.LineCol(pos) - return protocol.Position{Line: line, Character: col} -} - -func (p *Parsed) Range(x, length int) protocol.Range { - line, col := p.LineCol(x) - ans := protocol.Range{ - Start: protocol.Position{Line: line, Character: col}, - End: protocol.Position{Line: line, Character: col + uint32(length)}, - } - return ans -} - -// FromPosition translates a protocol.Position into an offset into the template -func (p *Parsed) FromPosition(x protocol.Position) int { - l, c := int(x.Line), int(x.Character) - if l >= len(p.nls) || p.nls[l]+1 >= len(p.buf) { - // paranoia to avoid panic. return the largest offset - return len(p.buf) - } - line := p.buf[p.nls[l]+1:] - cnt := 0 - for w := range string(line) { - if cnt >= c { - return w + p.nls[l] + 1 - } - cnt++ - } - // do we get here? NO - pos := int(x.Character) + p.nls[int(x.Line)] + 1 - event.Error(context.Background(), "internal error", fmt.Errorf("surprise %#v", x)) - return pos -} - -func symAtPosition(fh source.FileHandle, loc protocol.Position) (*symbol, *Parsed, error) { - buf, err := fh.Content() - if err != nil { - return nil, nil, err - } - p := parseBuffer(buf) - pos := p.FromPosition(loc) - syms := p.SymsAtPos(pos) - if len(syms) == 0 { - return nil, p, fmt.Errorf("no symbol found") - } - if len(syms) > 1 { - log.Printf("Hover: %d syms, not 1 %v", len(syms), syms) - } - sym := syms[0] - return &sym, p, nil -} - -func (p *Parsed) SymsAtPos(pos int) []symbol { - ans := []symbol{} - for _, s := range p.symbols { - if s.start <= pos && pos < s.start+s.length { - ans = append(ans, s) - } - } - return ans -} - -type wrNode struct { - p *Parsed - w io.Writer -} - -// WriteNode is for debugging -func (p *Parsed) WriteNode(w io.Writer, n parse.Node) { - wr := wrNode{p: p, w: w} - wr.writeNode(n, "") -} - -func (wr wrNode) writeNode(n parse.Node, indent string) { - if n == nil { - return - } - at := func(pos parse.Pos) string { - line, col := wr.p.LineCol(int(pos)) - return fmt.Sprintf("(%d)%v:%v", pos, line, col) - } - switch x := n.(type) { - case *parse.ActionNode: - fmt.Fprintf(wr.w, "%sActionNode at %s\n", indent, at(x.Pos)) - wr.writeNode(x.Pipe, indent+". ") - case *parse.BoolNode: - fmt.Fprintf(wr.w, "%sBoolNode at %s, %v\n", indent, at(x.Pos), x.True) - case *parse.BranchNode: - fmt.Fprintf(wr.w, "%sBranchNode at %s\n", indent, at(x.Pos)) - wr.writeNode(x.Pipe, indent+"Pipe. ") - wr.writeNode(x.List, indent+"List. ") - wr.writeNode(x.ElseList, indent+"Else. ") - case *parse.ChainNode: - fmt.Fprintf(wr.w, "%sChainNode at %s, %v\n", indent, at(x.Pos), x.Field) - case *parse.CommandNode: - fmt.Fprintf(wr.w, "%sCommandNode at %s, %d children\n", indent, at(x.Pos), len(x.Args)) - for _, a := range x.Args { - wr.writeNode(a, indent+". ") - } - //case *parse.CommentNode: // 1.16 - case *parse.DotNode: - fmt.Fprintf(wr.w, "%sDotNode at %s\n", indent, at(x.Pos)) - case *parse.FieldNode: - fmt.Fprintf(wr.w, "%sFieldNode at %s, %v\n", indent, at(x.Pos), x.Ident) - case *parse.IdentifierNode: - fmt.Fprintf(wr.w, "%sIdentifierNode at %s, %v\n", indent, at(x.Pos), x.Ident) - case *parse.IfNode: - fmt.Fprintf(wr.w, "%sIfNode at %s\n", indent, at(x.Pos)) - wr.writeNode(&x.BranchNode, indent+". ") - case *parse.ListNode: - if x == nil { - return // nil BranchNode.ElseList - } - fmt.Fprintf(wr.w, "%sListNode at %s, %d children\n", indent, at(x.Pos), len(x.Nodes)) - for _, n := range x.Nodes { - wr.writeNode(n, indent+". ") - } - case *parse.NilNode: - fmt.Fprintf(wr.w, "%sNilNode at %s\n", indent, at(x.Pos)) - case *parse.NumberNode: - fmt.Fprintf(wr.w, "%sNumberNode at %s, %s\n", indent, at(x.Pos), x.Text) - case *parse.PipeNode: - if x == nil { - return // {{template "xxx"}} - } - fmt.Fprintf(wr.w, "%sPipeNode at %s, %d vars, %d cmds, IsAssign:%v\n", - indent, at(x.Pos), len(x.Decl), len(x.Cmds), x.IsAssign) - for _, d := range x.Decl { - wr.writeNode(d, indent+"Decl. ") - } - for _, c := range x.Cmds { - wr.writeNode(c, indent+"Cmd. ") - } - case *parse.RangeNode: - fmt.Fprintf(wr.w, "%sRangeNode at %s\n", indent, at(x.Pos)) - wr.writeNode(&x.BranchNode, indent+". ") - case *parse.StringNode: - fmt.Fprintf(wr.w, "%sStringNode at %s, %s\n", indent, at(x.Pos), x.Quoted) - case *parse.TemplateNode: - fmt.Fprintf(wr.w, "%sTemplateNode at %s, %s\n", indent, at(x.Pos), x.Name) - wr.writeNode(x.Pipe, indent+". ") - case *parse.TextNode: - fmt.Fprintf(wr.w, "%sTextNode at %s, len %d\n", indent, at(x.Pos), len(x.Text)) - case *parse.VariableNode: - fmt.Fprintf(wr.w, "%sVariableNode at %s, %v\n", indent, at(x.Pos), x.Ident) - case *parse.WithNode: - fmt.Fprintf(wr.w, "%sWithNode at %s\n", indent, at(x.Pos)) - wr.writeNode(&x.BranchNode, indent+". ") - } -} - -var kindNames = []string{"", "File", "Module", "Namespace", "Package", "Class", "Method", "Property", - "Field", "Constructor", "Enum", "Interface", "Function", "Variable", "Constant", "String", - "Number", "Boolean", "Array", "Object", "Key", "Null", "EnumMember", "Struct", "Event", - "Operator", "TypeParameter"} - -func kindStr(k protocol.SymbolKind) string { - n := int(k) - if n < 1 || n >= len(kindNames) { - return fmt.Sprintf("?SymbolKind %d?", n) - } - return kindNames[n] -} diff --git a/gopls/internal/lsp/template/symbols.go b/gopls/internal/lsp/template/symbols.go deleted file mode 100644 index 1d1d9888612..00000000000 --- a/gopls/internal/lsp/template/symbols.go +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "bytes" - "context" - "fmt" - "text/template/parse" - "unicode/utf8" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" -) - -// in local coordinates, to be translated to protocol.DocumentSymbol -type symbol struct { - start int // for sorting - length int // in runes (unicode code points) - name string - kind protocol.SymbolKind - vardef bool // is this a variable definition? - // do we care about selection range, or children? - // no children yet, and selection range is the same as range -} - -func (s symbol) String() string { - return fmt.Sprintf("{%d,%d,%s,%s,%v}", s.start, s.length, s.name, s.kind, s.vardef) -} - -// for FieldNode or VariableNode (or ChainNode?) -func (p *Parsed) fields(flds []string, x parse.Node) []symbol { - ans := []symbol{} - // guessing that there are no embedded blanks allowed. The doc is unclear - lookfor := "" - switch x.(type) { - case *parse.FieldNode: - for _, f := range flds { - lookfor += "." + f // quadratic, but probably ok - } - case *parse.VariableNode: - lookfor = flds[0] - for i := 1; i < len(flds); i++ { - lookfor += "." + flds[i] - } - case *parse.ChainNode: // PJW, what are these? - for _, f := range flds { - lookfor += "." + f // quadratic, but probably ok - } - default: - // If these happen they will happen even if gopls is restarted - // and the users does the same thing, so it is better not to panic. - // context.Background() is used because we don't have access - // to any other context. [we could, but it would be complicated] - event.Log(context.Background(), fmt.Sprintf("%T unexpected in fields()", x)) - return nil - } - if len(lookfor) == 0 { - event.Log(context.Background(), fmt.Sprintf("no strings in fields() %#v", x)) - return nil - } - startsAt := int(x.Position()) - ix := bytes.Index(p.buf[startsAt:], []byte(lookfor)) // HasPrefix? PJW? - if ix < 0 || ix > len(lookfor) { // lookfor expected to be at start (or so) - // probably golang.go/#43388, so back up - startsAt -= len(flds[0]) + 1 - ix = bytes.Index(p.buf[startsAt:], []byte(lookfor)) // ix might be 1? PJW - if ix < 0 { - return ans - } - } - at := ix + startsAt - for _, f := range flds { - at += 1 // . - kind := protocol.Method - if f[0] == '$' { - kind = protocol.Variable - } - sym := symbol{name: f, kind: kind, start: at, length: utf8.RuneCount([]byte(f))} - if kind == protocol.Variable && len(p.stack) > 1 { - if pipe, ok := p.stack[len(p.stack)-2].(*parse.PipeNode); ok { - for _, y := range pipe.Decl { - if x == y { - sym.vardef = true - } - } - } - } - ans = append(ans, sym) - at += len(f) - } - return ans -} - -func (p *Parsed) findSymbols() { - if len(p.stack) == 0 { - return - } - n := p.stack[len(p.stack)-1] - pop := func() { - p.stack = p.stack[:len(p.stack)-1] - } - if n == nil { // allowing nil simplifies the code - pop() - return - } - nxt := func(nd parse.Node) { - p.stack = append(p.stack, nd) - p.findSymbols() - } - switch x := n.(type) { - case *parse.ActionNode: - nxt(x.Pipe) - case *parse.BoolNode: - // need to compute the length from the value - msg := fmt.Sprintf("%v", x.True) - p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: len(msg), kind: protocol.Boolean}) - case *parse.BranchNode: - nxt(x.Pipe) - nxt(x.List) - nxt(x.ElseList) - case *parse.ChainNode: - p.symbols = append(p.symbols, p.fields(x.Field, x)...) - nxt(x.Node) - case *parse.CommandNode: - for _, a := range x.Args { - nxt(a) - } - //case *parse.CommentNode: // go 1.16 - // log.Printf("implement %d", x.Type()) - case *parse.DotNode: - sym := symbol{name: "dot", kind: protocol.Variable, start: int(x.Pos), length: 1} - p.symbols = append(p.symbols, sym) - case *parse.FieldNode: - p.symbols = append(p.symbols, p.fields(x.Ident, x)...) - case *parse.IdentifierNode: - sym := symbol{name: x.Ident, kind: protocol.Function, start: int(x.Pos), - length: utf8.RuneCount([]byte(x.Ident))} - p.symbols = append(p.symbols, sym) - case *parse.IfNode: - nxt(&x.BranchNode) - case *parse.ListNode: - if x != nil { // wretched typed nils. Node should have an IfNil - for _, nd := range x.Nodes { - nxt(nd) - } - } - case *parse.NilNode: - sym := symbol{name: "nil", kind: protocol.Constant, start: int(x.Pos), length: 3} - p.symbols = append(p.symbols, sym) - case *parse.NumberNode: - // no name; ascii - p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: len(x.Text), kind: protocol.Number}) - case *parse.PipeNode: - if x == nil { // {{template "foo"}} - return - } - for _, d := range x.Decl { - nxt(d) - } - for _, c := range x.Cmds { - nxt(c) - } - case *parse.RangeNode: - nxt(&x.BranchNode) - case *parse.StringNode: - // no name - sz := utf8.RuneCount([]byte(x.Text)) - p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: sz, kind: protocol.String}) - case *parse.TemplateNode: // invoking a template - // x.Pos points to the quote before the name - p.symbols = append(p.symbols, symbol{name: x.Name, kind: protocol.Package, start: int(x.Pos) + 1, - length: utf8.RuneCount([]byte(x.Name))}) - nxt(x.Pipe) - case *parse.TextNode: - if len(x.Text) == 1 && x.Text[0] == '\n' { - break - } - // nothing to report, but build one for hover - sz := utf8.RuneCount([]byte(x.Text)) - p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: sz, kind: protocol.Constant}) - case *parse.VariableNode: - p.symbols = append(p.symbols, p.fields(x.Ident, x)...) - case *parse.WithNode: - nxt(&x.BranchNode) - - } - pop() -} - -// DocumentSymbols returns a hierarchy of the symbols defined in a template file. -// (The hierarchy is flat. SymbolInformation might be better.) -func DocumentSymbols(snapshot source.Snapshot, fh source.FileHandle) ([]protocol.DocumentSymbol, error) { - buf, err := fh.Content() - if err != nil { - return nil, err - } - p := parseBuffer(buf) - if p.ParseErr != nil { - return nil, p.ParseErr - } - var ans []protocol.DocumentSymbol - for _, s := range p.symbols { - if s.kind == protocol.Constant { - continue - } - d := kindStr(s.kind) - if d == "Namespace" { - d = "Template" - } - if s.vardef { - d += "(def)" - } else { - d += "(use)" - } - r := p.Range(s.start, s.length) - y := protocol.DocumentSymbol{ - Name: s.name, - Detail: d, - Kind: s.kind, - Range: r, - SelectionRange: r, // or should this be the entire {{...}}? - } - ans = append(ans, y) - } - return ans, nil -} diff --git a/gopls/internal/lsp/testdata/%percent/perc%ent.go b/gopls/internal/lsp/testdata/%percent/perc%ent.go deleted file mode 100644 index 93b5e5570bb..00000000000 --- a/gopls/internal/lsp/testdata/%percent/perc%ent.go +++ /dev/null @@ -1 +0,0 @@ -package percent diff --git a/gopls/internal/lsp/testdata/addimport/addimport.go.golden b/gopls/internal/lsp/testdata/addimport/addimport.go.golden deleted file mode 100644 index 9605aa6f955..00000000000 --- a/gopls/internal/lsp/testdata/addimport/addimport.go.golden +++ /dev/null @@ -1,7 +0,0 @@ --- addimport -- -package addimport //@addimport("", "bytes") - -import "bytes" - -func main() {} - diff --git a/gopls/internal/lsp/testdata/addimport/addimport.go.in b/gopls/internal/lsp/testdata/addimport/addimport.go.in deleted file mode 100644 index 07b454f5245..00000000000 --- a/gopls/internal/lsp/testdata/addimport/addimport.go.in +++ /dev/null @@ -1,3 +0,0 @@ -package addimport //@addimport("", "bytes") - -func main() {} diff --git a/gopls/internal/lsp/testdata/callhierarchy/callhierarchy.go b/gopls/internal/lsp/testdata/callhierarchy/callhierarchy.go deleted file mode 100644 index 252e8054f40..00000000000 --- a/gopls/internal/lsp/testdata/callhierarchy/callhierarchy.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package callhierarchy - -import "golang.org/lsptests/callhierarchy/outgoing" - -func a() { //@mark(hierarchyA, "a") - D() -} - -func b() { //@mark(hierarchyB, "b") - D() -} - -// C is an exported function -func C() { //@mark(hierarchyC, "C") - D() - D() -} - -// To test hierarchy across function literals -var x = func() { //@mark(hierarchyLiteral, "func"),mark(hierarchyLiteralOut, "x") - D() -} - -// D is exported to test incoming/outgoing calls across packages -func D() { //@mark(hierarchyD, "D"),incomingcalls(hierarchyD, hierarchyA, hierarchyB, hierarchyC, hierarchyLiteral, incomingA),outgoingcalls(hierarchyD, hierarchyE, hierarchyF, hierarchyG, hierarchyLiteralOut, outgoingB, hierarchyFoo, hierarchyH, hierarchyI, hierarchyJ, hierarchyK) - e() - x() - F() - outgoing.B() - foo := func() {} //@mark(hierarchyFoo, "foo"),incomingcalls(hierarchyFoo, hierarchyD),outgoingcalls(hierarchyFoo) - foo() - - func() { - g() - }() - - var i Interface = impl{} - i.H() - i.I() - - s := Struct{} - s.J() - s.K() -} - -func e() {} //@mark(hierarchyE, "e") - -// F is an exported function -func F() {} //@mark(hierarchyF, "F") - -func g() {} //@mark(hierarchyG, "g") - -type Interface interface { - H() //@mark(hierarchyH, "H") - I() //@mark(hierarchyI, "I") -} - -type impl struct{} - -func (i impl) H() {} -func (i impl) I() {} - -type Struct struct { - J func() //@mark(hierarchyJ, "J") - K func() //@mark(hierarchyK, "K") -} diff --git a/gopls/internal/lsp/testdata/callhierarchy/incoming/incoming.go b/gopls/internal/lsp/testdata/callhierarchy/incoming/incoming.go deleted file mode 100644 index c629aa87929..00000000000 --- a/gopls/internal/lsp/testdata/callhierarchy/incoming/incoming.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package incoming - -import "golang.org/lsptests/callhierarchy" - -// A is exported to test incoming calls across packages -func A() { //@mark(incomingA, "A") - callhierarchy.D() -} diff --git a/gopls/internal/lsp/testdata/callhierarchy/outgoing/outgoing.go b/gopls/internal/lsp/testdata/callhierarchy/outgoing/outgoing.go deleted file mode 100644 index 74362d419c3..00000000000 --- a/gopls/internal/lsp/testdata/callhierarchy/outgoing/outgoing.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package outgoing - -// B is exported to test outgoing calls across packages -func B() { //@mark(outgoingB, "B") -} diff --git a/gopls/internal/lsp/testdata/embeddirective/embed.txt b/gopls/internal/lsp/testdata/embeddirective/embed.txt deleted file mode 100644 index 8e27be7d615..00000000000 --- a/gopls/internal/lsp/testdata/embeddirective/embed.txt +++ /dev/null @@ -1 +0,0 @@ -text diff --git a/gopls/internal/lsp/testdata/embeddirective/fix_import.go b/gopls/internal/lsp/testdata/embeddirective/fix_import.go deleted file mode 100644 index 5eaf3d09868..00000000000 --- a/gopls/internal/lsp/testdata/embeddirective/fix_import.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package embeddirective - -import ( - "io" - "os" -) - -//go:embed embed.txt //@suggestedfix("//go:embed", "quickfix", "") -var t string - -func unused() { - _ = os.Stdin - _ = io.EOF -} diff --git a/gopls/internal/lsp/testdata/embeddirective/fix_import.go.golden b/gopls/internal/lsp/testdata/embeddirective/fix_import.go.golden deleted file mode 100644 index 15a23f4d0a3..00000000000 --- a/gopls/internal/lsp/testdata/embeddirective/fix_import.go.golden +++ /dev/null @@ -1,21 +0,0 @@ --- suggestedfix_fix_import_12_1 -- -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package embeddirective - -import ( - _ "embed" - "io" - "os" -) - -//go:embed embed.txt //@suggestedfix("//go:embed", "quickfix", "") -var t string - -func unused() { - _ = os.Stdin - _ = io.EOF -} - diff --git a/gopls/internal/lsp/testdata/inlay_hint/composite_literals.go b/gopls/internal/lsp/testdata/inlay_hint/composite_literals.go deleted file mode 100644 index b05c95ec800..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/composite_literals.go +++ /dev/null @@ -1,27 +0,0 @@ -package inlayHint //@inlayHint("package") - -import "fmt" - -func fieldNames() { - for _, c := range []struct { - in, want string - }{ - struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, - {"Hello, 世界", "界世 ,olleH"}, - {"", ""}, - } { - fmt.Println(c.in == c.want) - } -} - -func fieldNamesPointers() { - for _, c := range []*struct { - in, want string - }{ - &struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, - {"Hello, 世界", "界世 ,olleH"}, - {"", ""}, - } { - fmt.Println(c.in == c.want) - } -} diff --git a/gopls/internal/lsp/testdata/inlay_hint/composite_literals.go.golden b/gopls/internal/lsp/testdata/inlay_hint/composite_literals.go.golden deleted file mode 100644 index eb2febdb6a3..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/composite_literals.go.golden +++ /dev/null @@ -1,29 +0,0 @@ --- inlayHint -- -package inlayHint //@inlayHint("package") - -import "fmt" - -func fieldNames() { - for _< int>, c< struct{in string; want string}> := range []struct { - in, want string - }{ - struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, - {"Hello, 世界", "界世 ,olleH"}, - {"", ""}, - } { - fmt.Println(c.in == c.want) - } -} - -func fieldNamesPointers() { - for _< int>, c< *struct{in string; want string}> := range []*struct { - in, want string - }{ - &struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, - <&struct{in string; want string}>{"Hello, 世界", "界世 ,olleH"}, - <&struct{in string; want string}>{"", ""}, - } { - fmt.Println(c.in == c.want) - } -} - diff --git a/gopls/internal/lsp/testdata/inlay_hint/constant_values.go b/gopls/internal/lsp/testdata/inlay_hint/constant_values.go deleted file mode 100644 index e3339b0f303..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/constant_values.go +++ /dev/null @@ -1,45 +0,0 @@ -package inlayHint //@inlayHint("package") - -const True = true - -type Kind int - -const ( - KindNone Kind = iota - KindPrint - KindPrintf - KindErrorf -) - -const ( - u = iota * 4 - v float64 = iota * 42 - w = iota * 42 -) - -const ( - a, b = 1, 2 - c, d - e, f = 5 * 5, "hello" + "world" - g, h - i, j = true, f -) - -// No hint -const ( - Int = 3 - Float = 3.14 - Bool = true - Rune = '3' - Complex = 2.7i - String = "Hello, world!" -) - -var ( - varInt = 3 - varFloat = 3.14 - varBool = true - varRune = '3' + '4' - varComplex = 2.7i - varString = "Hello, world!" -) diff --git a/gopls/internal/lsp/testdata/inlay_hint/constant_values.go.golden b/gopls/internal/lsp/testdata/inlay_hint/constant_values.go.golden deleted file mode 100644 index edc46debc37..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/constant_values.go.golden +++ /dev/null @@ -1,47 +0,0 @@ --- inlayHint -- -package inlayHint //@inlayHint("package") - -const True = true - -type Kind int - -const ( - KindNone Kind = iota< = 0> - KindPrint< = 1> - KindPrintf< = 2> - KindErrorf< = 3> -) - -const ( - u = iota * 4< = 0> - v float64 = iota * 42< = 42> - w = iota * 42< = 84> -) - -const ( - a, b = 1, 2 - c, d< = 1, 2> - e, f = 5 * 5, "hello" + "world"< = 25, "helloworld"> - g, h< = 25, "helloworld"> - i, j = true, f< = true, "helloworld"> -) - -// No hint -const ( - Int = 3 - Float = 3.14 - Bool = true - Rune = '3' - Complex = 2.7i - String = "Hello, world!" -) - -var ( - varInt = 3 - varFloat = 3.14 - varBool = true - varRune = '3' + '4' - varComplex = 2.7i - varString = "Hello, world!" -) - diff --git a/gopls/internal/lsp/testdata/inlay_hint/parameter_names.go b/gopls/internal/lsp/testdata/inlay_hint/parameter_names.go deleted file mode 100644 index 0d930e5d426..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/parameter_names.go +++ /dev/null @@ -1,50 +0,0 @@ -package inlayHint //@inlayHint("package") - -import "fmt" - -func hello(name string) string { - return "Hello " + name -} - -func helloWorld() string { - return hello("World") -} - -type foo struct{} - -func (*foo) bar(baz string, qux int) int { - if baz != "" { - return qux + 1 - } - return qux -} - -func kase(foo int, bar bool, baz ...string) { - fmt.Println(foo, bar, baz) -} - -func kipp(foo string, bar, baz string) { - fmt.Println(foo, bar, baz) -} - -func plex(foo, bar string, baz string) { - fmt.Println(foo, bar, baz) -} - -func tars(foo string, bar, baz string) { - fmt.Println(foo, bar, baz) -} - -func foobar() { - var x foo - x.bar("", 1) - kase(0, true, "c", "d", "e") - kipp("a", "b", "c") - plex("a", "b", "c") - tars("a", "b", "c") - foo, bar, baz := "a", "b", "c" - kipp(foo, bar, baz) - plex("a", bar, baz) - tars(foo+foo, (bar), "c") - -} diff --git a/gopls/internal/lsp/testdata/inlay_hint/parameter_names.go.golden b/gopls/internal/lsp/testdata/inlay_hint/parameter_names.go.golden deleted file mode 100644 index 4e93a4f9268..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/parameter_names.go.golden +++ /dev/null @@ -1,52 +0,0 @@ --- inlayHint -- -package inlayHint //@inlayHint("package") - -import "fmt" - -func hello(name string) string { - return "Hello " + name -} - -func helloWorld() string { - return hello("World") -} - -type foo struct{} - -func (*foo) bar(baz string, qux int) int { - if baz != "" { - return qux + 1 - } - return qux -} - -func kase(foo int, bar bool, baz ...string) { - fmt.Println(foo, bar, baz) -} - -func kipp(foo string, bar, baz string) { - fmt.Println(foo, bar, baz) -} - -func plex(foo, bar string, baz string) { - fmt.Println(foo, bar, baz) -} - -func tars(foo string, bar, baz string) { - fmt.Println(foo, bar, baz) -} - -func foobar() { - var x foo - x.bar("", 1) - kase(0, true, "c", "d", "e") - kipp("a", "b", "c") - plex("a", "b", "c") - tars("a", "b", "c") - foo< string>, bar< string>, baz< string> := "a", "b", "c" - kipp(foo, bar, baz) - plex("a", bar, baz) - tars(foo+foo, (bar), "c") - -} - diff --git a/gopls/internal/lsp/testdata/inlay_hint/type_params.go b/gopls/internal/lsp/testdata/inlay_hint/type_params.go deleted file mode 100644 index 3a3c7e53734..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/type_params.go +++ /dev/null @@ -1,45 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package inlayHint //@inlayHint("package") - -func main() { - ints := map[string]int64{ - "first": 34, - "second": 12, - } - - floats := map[string]float64{ - "first": 35.98, - "second": 26.99, - } - - SumIntsOrFloats[string, int64](ints) - SumIntsOrFloats[string, float64](floats) - - SumIntsOrFloats(ints) - SumIntsOrFloats(floats) - - SumNumbers(ints) - SumNumbers(floats) -} - -type Number interface { - int64 | float64 -} - -func SumIntsOrFloats[K comparable, V int64 | float64](m map[K]V) V { - var s V - for _, v := range m { - s += v - } - return s -} - -func SumNumbers[K comparable, V Number](m map[K]V) V { - var s V - for _, v := range m { - s += v - } - return s -} diff --git a/gopls/internal/lsp/testdata/inlay_hint/type_params.go.golden b/gopls/internal/lsp/testdata/inlay_hint/type_params.go.golden deleted file mode 100644 index 4819963b7a4..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/type_params.go.golden +++ /dev/null @@ -1,47 +0,0 @@ --- inlayHint -- -//go:build go1.18 -// +build go1.18 - -package inlayHint //@inlayHint("package") - -func main() { - ints< map[string]int64> := map[string]int64{ - "first": 34, - "second": 12, - } - - floats< map[string]float64> := map[string]float64{ - "first": 35.98, - "second": 26.99, - } - - SumIntsOrFloats[string, int64](ints) - SumIntsOrFloats[string, float64](floats) - - SumIntsOrFloats<[string, int64]>(ints) - SumIntsOrFloats<[string, float64]>(floats) - - SumNumbers<[string, int64]>(ints) - SumNumbers<[string, float64]>(floats) -} - -type Number interface { - int64 | float64 -} - -func SumIntsOrFloats[K comparable, V int64 | float64](m map[K]V) V { - var s V - for _< K>, v< V> := range m { - s += v - } - return s -} - -func SumNumbers[K comparable, V Number](m map[K]V) V { - var s V - for _< K>, v< V> := range m { - s += v - } - return s -} - diff --git a/gopls/internal/lsp/testdata/inlay_hint/variable_types.go b/gopls/internal/lsp/testdata/inlay_hint/variable_types.go deleted file mode 100644 index 219af7059c7..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/variable_types.go +++ /dev/null @@ -1,20 +0,0 @@ -package inlayHint //@inlayHint("package") - -func assignTypes() { - i, j := 0, len([]string{})-1 - println(i, j) -} - -func rangeTypes() { - for k, v := range []string{} { - println(k, v) - } -} - -func funcLitType() { - myFunc := func(a string) string { return "" } -} - -func compositeLitType() { - foo := map[string]interface{}{"": ""} -} diff --git a/gopls/internal/lsp/testdata/inlay_hint/variable_types.go.golden b/gopls/internal/lsp/testdata/inlay_hint/variable_types.go.golden deleted file mode 100644 index 6039950d5f3..00000000000 --- a/gopls/internal/lsp/testdata/inlay_hint/variable_types.go.golden +++ /dev/null @@ -1,22 +0,0 @@ --- inlayHint -- -package inlayHint //@inlayHint("package") - -func assignTypes() { - i< int>, j< int> := 0, len([]string{})-1 - println(i, j) -} - -func rangeTypes() { - for k< int>, v< string> := range []string{} { - println(k, v) - } -} - -func funcLitType() { - myFunc< func(a string) string> := func(a string) string { return "" } -} - -func compositeLitType() { - foo< map[string]interface{}> := map[string]interface{}{"": ""} -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean.go b/gopls/internal/lsp/testdata/invertifcondition/boolean.go deleted file mode 100644 index 9a01d983700..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/boolean.go +++ /dev/null @@ -1,14 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func Boolean() { - b := true - if b { //@suggestedfix("if b", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean.go.golden b/gopls/internal/lsp/testdata/invertifcondition/boolean.go.golden deleted file mode 100644 index 9add5996ece..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/boolean.go.golden +++ /dev/null @@ -1,16 +0,0 @@ --- suggestedfix_boolean_9_2 -- -package invertifcondition - -import ( - "fmt" -) - -func Boolean() { - b := true - if !b { - fmt.Println("B") - } else { //@suggestedfix("if b", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go b/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go deleted file mode 100644 index 3fadab78b74..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go +++ /dev/null @@ -1,14 +0,0 @@ -package invertifcondition - -import ( - "fmt" - "os" -) - -func BooleanFn() { - if os.IsPathSeparator('X') { //@suggestedfix("if os.IsPathSeparator('X')", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go.golden b/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go.golden deleted file mode 100644 index 26e8193302d..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/boolean_fn.go.golden +++ /dev/null @@ -1,16 +0,0 @@ --- suggestedfix_boolean_fn_9_2 -- -package invertifcondition - -import ( - "fmt" - "os" -) - -func BooleanFn() { - if !os.IsPathSeparator('X') { - fmt.Println("B") - } else { //@suggestedfix("if os.IsPathSeparator('X')", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go b/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go deleted file mode 100644 index 3793cdd31b4..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go +++ /dev/null @@ -1,16 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func DontRemoveParens() { - a := false - b := true - if !(a || - b) { //@suggestedfix("b", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go.golden b/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go.golden deleted file mode 100644 index a47f1ca469e..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/dont_remove_parens.go.golden +++ /dev/null @@ -1,18 +0,0 @@ --- suggestedfix_dont_remove_parens_11_3 -- -package invertifcondition - -import ( - "fmt" -) - -func DontRemoveParens() { - a := false - b := true - if (a || - b) { - fmt.Println("B") - } else { //@suggestedfix("b", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/else_if.go b/gopls/internal/lsp/testdata/invertifcondition/else_if.go deleted file mode 100644 index 847225f9f03..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/else_if.go +++ /dev/null @@ -1,22 +0,0 @@ -package invertifcondition - -import ( - "fmt" - "os" -) - -func ElseIf() { - // No inversion expected when there's not else clause - if len(os.Args) > 2 { - fmt.Println("A") - } - - // No inversion expected for else-if, that would become unreadable - if len(os.Args) > 2 { - fmt.Println("A") - } else if os.Args[0] == "X" { //@suggestedfix(re"if os.Args.0. == .X.", "refactor.rewrite", "") - fmt.Println("B") - } else { - fmt.Println("C") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/else_if.go.golden b/gopls/internal/lsp/testdata/invertifcondition/else_if.go.golden deleted file mode 100644 index bbfb13dd085..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/else_if.go.golden +++ /dev/null @@ -1,24 +0,0 @@ --- suggestedfix_else_if_17_9 -- -package invertifcondition - -import ( - "fmt" - "os" -) - -func ElseIf() { - // No inversion expected when there's not else clause - if len(os.Args) > 2 { - fmt.Println("A") - } - - // No inversion expected for else-if, that would become unreadable - if len(os.Args) > 2 { - fmt.Println("A") - } else if os.Args[0] != "X" { - fmt.Println("C") - } else { //@suggestedfix(re"if os.Args.0. == .X.", "refactor.rewrite", "") - fmt.Println("B") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/greater_than.go b/gopls/internal/lsp/testdata/invertifcondition/greater_than.go deleted file mode 100644 index 41b7e357950..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/greater_than.go +++ /dev/null @@ -1,14 +0,0 @@ -package invertifcondition - -import ( - "fmt" - "os" -) - -func GreaterThan() { - if len(os.Args) > 2 { //@suggestedfix("i", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/greater_than.go.golden b/gopls/internal/lsp/testdata/invertifcondition/greater_than.go.golden deleted file mode 100644 index ee87d615da0..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/greater_than.go.golden +++ /dev/null @@ -1,16 +0,0 @@ --- suggestedfix_greater_than_9_2 -- -package invertifcondition - -import ( - "fmt" - "os" -) - -func GreaterThan() { - if len(os.Args) <= 2 { - fmt.Println("B") - } else { //@suggestedfix("i", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go b/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go deleted file mode 100644 index def97164f34..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go +++ /dev/null @@ -1,14 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func NotBoolean() { - b := true - if !b { //@suggestedfix("if !b", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go.golden b/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go.golden deleted file mode 100644 index 3dfbf0fe334..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/not_boolean.go.golden +++ /dev/null @@ -1,16 +0,0 @@ --- suggestedfix_not_boolean_9_2 -- -package invertifcondition - -import ( - "fmt" -) - -func NotBoolean() { - b := true - if b { - fmt.Println("B") - } else { //@suggestedfix("if !b", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_else.go b/gopls/internal/lsp/testdata/invertifcondition/remove_else.go deleted file mode 100644 index a8e39e7f4cb..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/remove_else.go +++ /dev/null @@ -1,16 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func RemoveElse() { - if true { //@suggestedfix("if true", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - return - } - - fmt.Println("C") -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_else.go.golden b/gopls/internal/lsp/testdata/invertifcondition/remove_else.go.golden deleted file mode 100644 index 7362d24aec5..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/remove_else.go.golden +++ /dev/null @@ -1,19 +0,0 @@ --- suggestedfix_remove_else_8_2 -- -package invertifcondition - -import ( - "fmt" -) - -func RemoveElse() { - if false { - fmt.Println("B") - return - } - - //@suggestedfix("if true", "refactor.rewrite", "") - fmt.Println("A") - - fmt.Println("C") -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go b/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go deleted file mode 100644 index 6d3b6ab32c3..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go +++ /dev/null @@ -1,14 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func RemoveParens() { - b := true - if !(b) { //@suggestedfix("if", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go.golden b/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go.golden deleted file mode 100644 index 62e6ae03415..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/remove_parens.go.golden +++ /dev/null @@ -1,16 +0,0 @@ --- suggestedfix_remove_parens_9_2 -- -package invertifcondition - -import ( - "fmt" -) - -func RemoveParens() { - b := true - if b { - fmt.Println("B") - } else { //@suggestedfix("if", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon.go b/gopls/internal/lsp/testdata/invertifcondition/semicolon.go deleted file mode 100644 index a23589374fd..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/semicolon.go +++ /dev/null @@ -1,13 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func Semicolon() { - if _, err := fmt.Println("x"); err != nil { //@suggestedfix("if", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon.go.golden b/gopls/internal/lsp/testdata/invertifcondition/semicolon.go.golden deleted file mode 100644 index 25c8930a936..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/semicolon.go.golden +++ /dev/null @@ -1,15 +0,0 @@ --- suggestedfix_semicolon_8_2 -- -package invertifcondition - -import ( - "fmt" -) - -func Semicolon() { - if _, err := fmt.Println("x"); err == nil { - fmt.Println("B") - } else { //@suggestedfix("if", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go b/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go deleted file mode 100644 index 21fc4f5c773..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go +++ /dev/null @@ -1,13 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func SemicolonAnd() { - if n, err := fmt.Println("x"); err != nil && n > 0 { //@suggestedfix("f", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go.golden b/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go.golden deleted file mode 100644 index 27e6b94a43e..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/semicolon_and.go.golden +++ /dev/null @@ -1,15 +0,0 @@ --- suggestedfix_semicolon_and_8_3 -- -package invertifcondition - -import ( - "fmt" -) - -func SemicolonAnd() { - if n, err := fmt.Println("x"); err == nil || n <= 0 { - fmt.Println("B") - } else { //@suggestedfix("f", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go b/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go deleted file mode 100644 index 53ec3b3ee86..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go +++ /dev/null @@ -1,13 +0,0 @@ -package invertifcondition - -import ( - "fmt" -) - -func SemicolonOr() { - if n, err := fmt.Println("x"); err != nil || n < 5 { //@suggestedfix(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite", "") - fmt.Println("A") - } else { - fmt.Println("B") - } -} diff --git a/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go.golden b/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go.golden deleted file mode 100644 index 96135262619..00000000000 --- a/gopls/internal/lsp/testdata/invertifcondition/semicolon_or.go.golden +++ /dev/null @@ -1,15 +0,0 @@ --- suggestedfix_semicolon_or_8_2 -- -package invertifcondition - -import ( - "fmt" -) - -func SemicolonOr() { - if n, err := fmt.Println("x"); err == nil && n >= 5 { - fmt.Println("B") - } else { //@suggestedfix(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite", "") - fmt.Println("A") - } -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/channels.go b/gopls/internal/lsp/testdata/missingfunction/channels.go deleted file mode 100644 index 303770cd7aa..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/channels.go +++ /dev/null @@ -1,9 +0,0 @@ -package missingfunction - -func channels(s string) { - undefinedChannels(c()) //@suggestedfix("undefinedChannels", "quickfix", "") -} - -func c() (<-chan string, chan string) { - return make(<-chan string), make(chan string) -} diff --git a/gopls/internal/lsp/testdata/missingfunction/channels.go.golden b/gopls/internal/lsp/testdata/missingfunction/channels.go.golden deleted file mode 100644 index 998ce589e1d..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/channels.go.golden +++ /dev/null @@ -1,15 +0,0 @@ --- suggestedfix_channels_4_2 -- -package missingfunction - -func channels(s string) { - undefinedChannels(c()) //@suggestedfix("undefinedChannels", "quickfix", "") -} - -func undefinedChannels(ch1 <-chan string, ch2 chan string) { - panic("unimplemented") -} - -func c() (<-chan string, chan string) { - return make(<-chan string), make(chan string) -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/consecutive_params.go b/gopls/internal/lsp/testdata/missingfunction/consecutive_params.go deleted file mode 100644 index f2fb3c04132..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/consecutive_params.go +++ /dev/null @@ -1,6 +0,0 @@ -package missingfunction - -func consecutiveParams() { - var s string - undefinedConsecutiveParams(s, s) //@suggestedfix("undefinedConsecutiveParams", "quickfix", "") -} diff --git a/gopls/internal/lsp/testdata/missingfunction/consecutive_params.go.golden b/gopls/internal/lsp/testdata/missingfunction/consecutive_params.go.golden deleted file mode 100644 index 4b852ce141b..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/consecutive_params.go.golden +++ /dev/null @@ -1,12 +0,0 @@ --- suggestedfix_consecutive_params_5_2 -- -package missingfunction - -func consecutiveParams() { - var s string - undefinedConsecutiveParams(s, s) //@suggestedfix("undefinedConsecutiveParams", "quickfix", "") -} - -func undefinedConsecutiveParams(s1, s2 string) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/error_param.go b/gopls/internal/lsp/testdata/missingfunction/error_param.go deleted file mode 100644 index d0484f0ff56..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/error_param.go +++ /dev/null @@ -1,6 +0,0 @@ -package missingfunction - -func errorParam() { - var err error - undefinedErrorParam(err) //@suggestedfix("undefinedErrorParam", "quickfix", "") -} diff --git a/gopls/internal/lsp/testdata/missingfunction/error_param.go.golden b/gopls/internal/lsp/testdata/missingfunction/error_param.go.golden deleted file mode 100644 index de78646a5f1..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/error_param.go.golden +++ /dev/null @@ -1,12 +0,0 @@ --- suggestedfix_error_param_5_2 -- -package missingfunction - -func errorParam() { - var err error - undefinedErrorParam(err) //@suggestedfix("undefinedErrorParam", "quickfix", "") -} - -func undefinedErrorParam(err error) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/literals.go b/gopls/internal/lsp/testdata/missingfunction/literals.go deleted file mode 100644 index 0099b1a08ad..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/literals.go +++ /dev/null @@ -1,7 +0,0 @@ -package missingfunction - -type T struct{} - -func literals() { - undefinedLiterals("hey compiler", T{}, &T{}) //@suggestedfix("undefinedLiterals", "quickfix", "") -} diff --git a/gopls/internal/lsp/testdata/missingfunction/literals.go.golden b/gopls/internal/lsp/testdata/missingfunction/literals.go.golden deleted file mode 100644 index cb85de4eb11..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/literals.go.golden +++ /dev/null @@ -1,13 +0,0 @@ --- suggestedfix_literals_6_2 -- -package missingfunction - -type T struct{} - -func literals() { - undefinedLiterals("hey compiler", T{}, &T{}) //@suggestedfix("undefinedLiterals", "quickfix", "") -} - -func undefinedLiterals(s string, t1 T, t2 *T) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/operation.go b/gopls/internal/lsp/testdata/missingfunction/operation.go deleted file mode 100644 index a4913ec10b2..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/operation.go +++ /dev/null @@ -1,7 +0,0 @@ -package missingfunction - -import "time" - -func operation() { - undefinedOperation(10 * time.Second) //@suggestedfix("undefinedOperation", "quickfix", "") -} diff --git a/gopls/internal/lsp/testdata/missingfunction/operation.go.golden b/gopls/internal/lsp/testdata/missingfunction/operation.go.golden deleted file mode 100644 index 6f9e6ffab6d..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/operation.go.golden +++ /dev/null @@ -1,13 +0,0 @@ --- suggestedfix_operation_6_2 -- -package missingfunction - -import "time" - -func operation() { - undefinedOperation(10 * time.Second) //@suggestedfix("undefinedOperation", "quickfix", "") -} - -func undefinedOperation(duration time.Duration) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/selector.go b/gopls/internal/lsp/testdata/missingfunction/selector.go deleted file mode 100644 index 93a04027138..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/selector.go +++ /dev/null @@ -1,6 +0,0 @@ -package missingfunction - -func selector() { - m := map[int]bool{} - undefinedSelector(m[1]) //@suggestedfix("undefinedSelector", "quickfix", "") -} diff --git a/gopls/internal/lsp/testdata/missingfunction/selector.go.golden b/gopls/internal/lsp/testdata/missingfunction/selector.go.golden deleted file mode 100644 index 44e2dde3aa7..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/selector.go.golden +++ /dev/null @@ -1,12 +0,0 @@ --- suggestedfix_selector_5_2 -- -package missingfunction - -func selector() { - m := map[int]bool{} - undefinedSelector(m[1]) //@suggestedfix("undefinedSelector", "quickfix", "") -} - -func undefinedSelector(b bool) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/slice.go b/gopls/internal/lsp/testdata/missingfunction/slice.go deleted file mode 100644 index 48b1a52b3f3..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/slice.go +++ /dev/null @@ -1,5 +0,0 @@ -package missingfunction - -func slice() { - undefinedSlice([]int{1, 2}) //@suggestedfix("undefinedSlice", "quickfix", "") -} diff --git a/gopls/internal/lsp/testdata/missingfunction/slice.go.golden b/gopls/internal/lsp/testdata/missingfunction/slice.go.golden deleted file mode 100644 index 2a05d9a0f54..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/slice.go.golden +++ /dev/null @@ -1,11 +0,0 @@ --- suggestedfix_slice_4_2 -- -package missingfunction - -func slice() { - undefinedSlice([]int{1, 2}) //@suggestedfix("undefinedSlice", "quickfix", "") -} - -func undefinedSlice(i []int) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/tuple.go b/gopls/internal/lsp/testdata/missingfunction/tuple.go deleted file mode 100644 index 4059ced983a..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/tuple.go +++ /dev/null @@ -1,9 +0,0 @@ -package missingfunction - -func tuple() { - undefinedTuple(b()) //@suggestedfix("undefinedTuple", "quickfix", "") -} - -func b() (string, error) { - return "", nil -} diff --git a/gopls/internal/lsp/testdata/missingfunction/tuple.go.golden b/gopls/internal/lsp/testdata/missingfunction/tuple.go.golden deleted file mode 100644 index e1118a3f348..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/tuple.go.golden +++ /dev/null @@ -1,15 +0,0 @@ --- suggestedfix_tuple_4_2 -- -package missingfunction - -func tuple() { - undefinedTuple(b()) //@suggestedfix("undefinedTuple", "quickfix", "") -} - -func undefinedTuple(s string, err error) { - panic("unimplemented") -} - -func b() (string, error) { - return "", nil -} - diff --git a/gopls/internal/lsp/testdata/missingfunction/unique_params.go b/gopls/internal/lsp/testdata/missingfunction/unique_params.go deleted file mode 100644 index 00479bf7554..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/unique_params.go +++ /dev/null @@ -1,7 +0,0 @@ -package missingfunction - -func uniqueArguments() { - var s string - var i int - undefinedUniqueArguments(s, i, s) //@suggestedfix("undefinedUniqueArguments", "quickfix", "") -} diff --git a/gopls/internal/lsp/testdata/missingfunction/unique_params.go.golden b/gopls/internal/lsp/testdata/missingfunction/unique_params.go.golden deleted file mode 100644 index 8d6352cded4..00000000000 --- a/gopls/internal/lsp/testdata/missingfunction/unique_params.go.golden +++ /dev/null @@ -1,13 +0,0 @@ --- suggestedfix_unique_params_6_2 -- -package missingfunction - -func uniqueArguments() { - var s string - var i int - undefinedUniqueArguments(s, i, s) //@suggestedfix("undefinedUniqueArguments", "quickfix", "") -} - -func undefinedUniqueArguments(s1 string, i int, s2 string) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/nested_complit/nested_complit.go.in b/gopls/internal/lsp/testdata/nested_complit/nested_complit.go.in deleted file mode 100644 index 3ad2d213e98..00000000000 --- a/gopls/internal/lsp/testdata/nested_complit/nested_complit.go.in +++ /dev/null @@ -1,15 +0,0 @@ -package nested_complit - -type ncFoo struct {} //@item(structNCFoo, "ncFoo", "struct{...}", "struct") - -type ncBar struct { //@item(structNCBar, "ncBar", "struct{...}", "struct") - baz []ncFoo -} - -func _() { - []ncFoo{} //@item(litNCFoo, "[]ncFoo{}", "", "var") - _ := ncBar{ - // disabled - see issue #54822 - baz: [] // complete(" //", structNCFoo, structNCBar) - } -} diff --git a/gopls/internal/lsp/testdata/noparse/noparse.go.in b/gopls/internal/lsp/testdata/noparse/noparse.go.in deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/gopls/internal/lsp/testdata/rename/a/random.go.golden b/gopls/internal/lsp/testdata/rename/a/random.go.golden deleted file mode 100644 index 7459863ec93..00000000000 --- a/gopls/internal/lsp/testdata/rename/a/random.go.golden +++ /dev/null @@ -1,616 +0,0 @@ --- GetSum-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) GetSum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.GetSum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- f2name-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2name "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2name.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- f2y-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2y "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2y.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- fmt2-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - fmt2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - fmt2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- fmty-rename -- -package a - -import ( - lg "log" - fmty "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmty.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- format-rename -- -package a - -import ( - lg "log" - format "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - format.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- log-rename -- -package a - -import ( - "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - log.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- myX-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - myX, y int -} - -func (p *Pos) Sum() int { - return p.myX + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- pos-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var pos Pos //@rename("p", "pos") - _ = pos.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- y0-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y0 := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y0) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y0) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y0) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- y1-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y1 := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y1) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y1) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y1) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- y2-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y2 := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y2) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y2) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y2) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- y3-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y3 := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y3) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y3) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y3) //@rename("y", "y3"),rename("f2","fmt2") - } -} - --- z-rename -- -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(z int) int { //@rename("y", "z") - return z -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} - diff --git a/gopls/internal/lsp/testdata/rename/a/random.go.in b/gopls/internal/lsp/testdata/rename/a/random.go.in deleted file mode 100644 index 069db27baac..00000000000 --- a/gopls/internal/lsp/testdata/rename/a/random.go.in +++ /dev/null @@ -1,42 +0,0 @@ -package a - -import ( - lg "log" - "fmt" //@rename("fmt", "fmty") - f2 "fmt" //@rename("f2", "f2name"),rename("fmt","f2y") -) - -func Random() int { - y := 6 + 7 - return y -} - -func Random2(y int) int { //@rename("y", "z") - return y -} - -type Pos struct { - x, y int -} - -func (p *Pos) Sum() int { - return p.x + p.y //@rename("x", "myX") -} - -func _() { - var p Pos //@rename("p", "pos") - _ = p.Sum() //@rename("Sum", "GetSum") -} - -func sw() { - var x interface{} - - switch y := x.(type) { //@rename("y", "y0") - case int: - fmt.Printf("%d", y) //@rename("y", "y1"),rename("fmt", "format") - case string: - lg.Printf("%s", y) //@rename("y", "y2"),rename("lg","log") - default: - f2.Printf("%v", y) //@rename("y", "y3"),rename("f2","fmt2") - } -} diff --git a/gopls/internal/lsp/testdata/rename/b/b.go b/gopls/internal/lsp/testdata/rename/b/b.go deleted file mode 100644 index 8455f035b5f..00000000000 --- a/gopls/internal/lsp/testdata/rename/b/b.go +++ /dev/null @@ -1,20 +0,0 @@ -package b - -var c int //@rename("int", "uint") - -func _() { - a := 1 //@rename("a", "error") - a = 2 - _ = a -} - -var ( - // Hello there. - // Foo does the thing. - Foo int //@rename("Foo", "Bob") -) - -/* -Hello description -*/ -func Hello() {} //@rename("Hello", "Goodbye") diff --git a/gopls/internal/lsp/testdata/rename/b/b.go.golden b/gopls/internal/lsp/testdata/rename/b/b.go.golden deleted file mode 100644 index add4049cd98..00000000000 --- a/gopls/internal/lsp/testdata/rename/b/b.go.golden +++ /dev/null @@ -1,78 +0,0 @@ --- Bob-rename -- -package b - -var c int //@rename("int", "uint") - -func _() { - a := 1 //@rename("a", "error") - a = 2 - _ = a -} - -var ( - // Hello there. - // Bob does the thing. - Bob int //@rename("Foo", "Bob") -) - -/* -Hello description -*/ -func Hello() {} //@rename("Hello", "Goodbye") - --- Goodbye-rename -- -b.go: -package b - -var c int //@rename("int", "uint") - -func _() { - a := 1 //@rename("a", "error") - a = 2 - _ = a -} - -var ( - // Hello there. - // Foo does the thing. - Foo int //@rename("Foo", "Bob") -) - -/* -Goodbye description -*/ -func Goodbye() {} //@rename("Hello", "Goodbye") - -c.go: -package c - -import "golang.org/lsptests/rename/b" - -func _() { - b.Goodbye() //@rename("Hello", "Goodbye") -} - --- error-rename -- -package b - -var c int //@rename("int", "uint") - -func _() { - error := 1 //@rename("a", "error") - error = 2 - _ = error -} - -var ( - // Hello there. - // Foo does the thing. - Foo int //@rename("Foo", "Bob") -) - -/* -Hello description -*/ -func Hello() {} //@rename("Hello", "Goodbye") - --- uint-rename -- -int is built in and cannot be renamed diff --git a/gopls/internal/lsp/testdata/rename/bad/bad.go.golden b/gopls/internal/lsp/testdata/rename/bad/bad.go.golden deleted file mode 100644 index 1b27e1782f3..00000000000 --- a/gopls/internal/lsp/testdata/rename/bad/bad.go.golden +++ /dev/null @@ -1,2 +0,0 @@ --- rFunc-rename -- -renaming "sFunc" to "rFunc" not possible because "golang.org/lsptests/rename/bad" has errors diff --git a/gopls/internal/lsp/testdata/rename/bad/bad.go.in b/gopls/internal/lsp/testdata/rename/bad/bad.go.in deleted file mode 100644 index 56dbee74e2b..00000000000 --- a/gopls/internal/lsp/testdata/rename/bad/bad.go.in +++ /dev/null @@ -1,8 +0,0 @@ -package bad - -type myStruct struct { -} - -func (s *myStruct) sFunc() bool { //@rename("sFunc", "rFunc") - return s.Bad -} diff --git a/gopls/internal/lsp/testdata/rename/bad/bad_test.go.in b/gopls/internal/lsp/testdata/rename/bad/bad_test.go.in deleted file mode 100644 index e695db14be8..00000000000 --- a/gopls/internal/lsp/testdata/rename/bad/bad_test.go.in +++ /dev/null @@ -1 +0,0 @@ -package bad \ No newline at end of file diff --git a/gopls/internal/lsp/testdata/rename/c/c.go b/gopls/internal/lsp/testdata/rename/c/c.go deleted file mode 100644 index 6332c78f3f9..00000000000 --- a/gopls/internal/lsp/testdata/rename/c/c.go +++ /dev/null @@ -1,7 +0,0 @@ -package c - -import "golang.org/lsptests/rename/b" - -func _() { - b.Hello() //@rename("Hello", "Goodbye") -} diff --git a/gopls/internal/lsp/testdata/rename/c/c.go.golden b/gopls/internal/lsp/testdata/rename/c/c.go.golden deleted file mode 100644 index d56250693a9..00000000000 --- a/gopls/internal/lsp/testdata/rename/c/c.go.golden +++ /dev/null @@ -1,32 +0,0 @@ --- Goodbye-rename -- -b.go: -package b - -var c int //@rename("int", "uint") - -func _() { - a := 1 //@rename("a", "error") - a = 2 - _ = a -} - -var ( - // Hello there. - // Foo does the thing. - Foo int //@rename("Foo", "Bob") -) - -/* -Goodbye description -*/ -func Goodbye() {} //@rename("Hello", "Goodbye") - -c.go: -package c - -import "golang.org/lsptests/rename/b" - -func _() { - b.Goodbye() //@rename("Hello", "Goodbye") -} - diff --git a/gopls/internal/lsp/testdata/rename/c/c2.go b/gopls/internal/lsp/testdata/rename/c/c2.go deleted file mode 100644 index 4fc484a1a31..00000000000 --- a/gopls/internal/lsp/testdata/rename/c/c2.go +++ /dev/null @@ -1,4 +0,0 @@ -package c - -//go:embed Static/* -var Static embed.FS //@rename("Static", "static") \ No newline at end of file diff --git a/gopls/internal/lsp/testdata/rename/c/c2.go.golden b/gopls/internal/lsp/testdata/rename/c/c2.go.golden deleted file mode 100644 index e509227a93f..00000000000 --- a/gopls/internal/lsp/testdata/rename/c/c2.go.golden +++ /dev/null @@ -1,5 +0,0 @@ --- static-rename -- -package c - -//go:embed Static/* -var static embed.FS //@rename("Static", "static") diff --git a/gopls/internal/lsp/testdata/rename/crosspkg/another/another.go b/gopls/internal/lsp/testdata/rename/crosspkg/another/another.go deleted file mode 100644 index 9b50af2cb9c..00000000000 --- a/gopls/internal/lsp/testdata/rename/crosspkg/another/another.go +++ /dev/null @@ -1,13 +0,0 @@ -package another - -type ( - I interface{ F() } - C struct{ I } -) - -func (C) g() - -func _() { - var x I = C{} - x.F() //@rename("F", "G") -} diff --git a/gopls/internal/lsp/testdata/rename/crosspkg/another/another.go.golden b/gopls/internal/lsp/testdata/rename/crosspkg/another/another.go.golden deleted file mode 100644 index d3fccdaf132..00000000000 --- a/gopls/internal/lsp/testdata/rename/crosspkg/another/another.go.golden +++ /dev/null @@ -1,15 +0,0 @@ --- G-rename -- -package another - -type ( - I interface{ G() } - C struct{ I } -) - -func (C) g() - -func _() { - var x I = C{} - x.G() //@rename("F", "G") -} - diff --git a/gopls/internal/lsp/testdata/rename/crosspkg/crosspkg.go b/gopls/internal/lsp/testdata/rename/crosspkg/crosspkg.go deleted file mode 100644 index 8510bcfe057..00000000000 --- a/gopls/internal/lsp/testdata/rename/crosspkg/crosspkg.go +++ /dev/null @@ -1,7 +0,0 @@ -package crosspkg - -func Foo() { //@rename("Foo", "Dolphin") - -} - -var Bar int //@rename("Bar", "Tomato") diff --git a/gopls/internal/lsp/testdata/rename/crosspkg/crosspkg.go.golden b/gopls/internal/lsp/testdata/rename/crosspkg/crosspkg.go.golden deleted file mode 100644 index 49ff7f841cf..00000000000 --- a/gopls/internal/lsp/testdata/rename/crosspkg/crosspkg.go.golden +++ /dev/null @@ -1,40 +0,0 @@ --- Dolphin-rename -- -crosspkg.go: -package crosspkg - -func Dolphin() { //@rename("Foo", "Dolphin") - -} - -var Bar int //@rename("Bar", "Tomato") - -other.go: -package other - -import "golang.org/lsptests/rename/crosspkg" - -func Other() { - crosspkg.Bar - crosspkg.Dolphin() //@rename("Foo", "Flamingo") -} - --- Tomato-rename -- -crosspkg.go: -package crosspkg - -func Foo() { //@rename("Foo", "Dolphin") - -} - -var Tomato int //@rename("Bar", "Tomato") - -other.go: -package other - -import "golang.org/lsptests/rename/crosspkg" - -func Other() { - crosspkg.Tomato - crosspkg.Foo() //@rename("Foo", "Flamingo") -} - diff --git a/gopls/internal/lsp/testdata/rename/crosspkg/other/other.go b/gopls/internal/lsp/testdata/rename/crosspkg/other/other.go deleted file mode 100644 index 5fd147da62e..00000000000 --- a/gopls/internal/lsp/testdata/rename/crosspkg/other/other.go +++ /dev/null @@ -1,8 +0,0 @@ -package other - -import "golang.org/lsptests/rename/crosspkg" - -func Other() { - crosspkg.Bar - crosspkg.Foo() //@rename("Foo", "Flamingo") -} diff --git a/gopls/internal/lsp/testdata/rename/crosspkg/other/other.go.golden b/gopls/internal/lsp/testdata/rename/crosspkg/other/other.go.golden deleted file mode 100644 index f7b4aaad42f..00000000000 --- a/gopls/internal/lsp/testdata/rename/crosspkg/other/other.go.golden +++ /dev/null @@ -1,20 +0,0 @@ --- Flamingo-rename -- -crosspkg.go: -package crosspkg - -func Flamingo() { //@rename("Foo", "Dolphin") - -} - -var Bar int //@rename("Bar", "Tomato") - -other.go: -package other - -import "golang.org/lsptests/rename/crosspkg" - -func Other() { - crosspkg.Bar - crosspkg.Flamingo() //@rename("Foo", "Flamingo") -} - diff --git a/gopls/internal/lsp/testdata/rename/generics/embedded.go b/gopls/internal/lsp/testdata/rename/generics/embedded.go deleted file mode 100644 index b44bab8809b..00000000000 --- a/gopls/internal/lsp/testdata/rename/generics/embedded.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package generics - -type foo[P any] int //@rename("foo","bar") - -var x struct{ foo[int] } - -var _ = x.foo diff --git a/gopls/internal/lsp/testdata/rename/generics/embedded.go.golden b/gopls/internal/lsp/testdata/rename/generics/embedded.go.golden deleted file mode 100644 index faa9afb69f6..00000000000 --- a/gopls/internal/lsp/testdata/rename/generics/embedded.go.golden +++ /dev/null @@ -1,12 +0,0 @@ --- bar-rename -- -//go:build go1.18 -// +build go1.18 - -package generics - -type bar[P any] int //@rename("foo","bar") - -var x struct{ bar[int] } - -var _ = x.bar - diff --git a/gopls/internal/lsp/testdata/rename/generics/generics.go b/gopls/internal/lsp/testdata/rename/generics/generics.go deleted file mode 100644 index 977589c0c59..00000000000 --- a/gopls/internal/lsp/testdata/rename/generics/generics.go +++ /dev/null @@ -1,25 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package generics - -type G[P any] struct { - F int -} - -func (G[_]) M() {} - -func F[P any](P) { - var p P //@rename("P", "Q") - _ = p -} - -func _() { - var x G[int] //@rename("G", "H") - _ = x.F //@rename("F", "K") - x.M() //@rename("M", "N") - - var y G[string] - _ = y.F - y.M() -} diff --git a/gopls/internal/lsp/testdata/rename/generics/generics.go.golden b/gopls/internal/lsp/testdata/rename/generics/generics.go.golden deleted file mode 100644 index 7d39813e122..00000000000 --- a/gopls/internal/lsp/testdata/rename/generics/generics.go.golden +++ /dev/null @@ -1,108 +0,0 @@ --- H-rename -- -//go:build go1.18 -// +build go1.18 - -package generics - -type H[P any] struct { - F int -} - -func (H[_]) M() {} - -func F[P any](P) { - var p P //@rename("P", "Q") - _ = p -} - -func _() { - var x H[int] //@rename("G", "H") - _ = x.F //@rename("F", "K") - x.M() //@rename("M", "N") - - var y H[string] - _ = y.F - y.M() -} - --- K-rename -- -//go:build go1.18 -// +build go1.18 - -package generics - -type G[P any] struct { - K int -} - -func (G[_]) M() {} - -func F[P any](P) { - var p P //@rename("P", "Q") - _ = p -} - -func _() { - var x G[int] //@rename("G", "H") - _ = x.K //@rename("F", "K") - x.M() //@rename("M", "N") - - var y G[string] - _ = y.K - y.M() -} - --- N-rename -- -//go:build go1.18 -// +build go1.18 - -package generics - -type G[P any] struct { - F int -} - -func (G[_]) N() {} - -func F[P any](P) { - var p P //@rename("P", "Q") - _ = p -} - -func _() { - var x G[int] //@rename("G", "H") - _ = x.F //@rename("F", "K") - x.N() //@rename("M", "N") - - var y G[string] - _ = y.F - y.N() -} - --- Q-rename -- -//go:build go1.18 -// +build go1.18 - -package generics - -type G[P any] struct { - F int -} - -func (G[_]) M() {} - -func F[Q any](Q) { - var p Q //@rename("P", "Q") - _ = p -} - -func _() { - var x G[int] //@rename("G", "H") - _ = x.F //@rename("F", "K") - x.M() //@rename("M", "N") - - var y G[string] - _ = y.F - y.M() -} - diff --git a/gopls/internal/lsp/testdata/rename/generics/unions.go b/gopls/internal/lsp/testdata/rename/generics/unions.go deleted file mode 100644 index c737b5c27e2..00000000000 --- a/gopls/internal/lsp/testdata/rename/generics/unions.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package generics - -type T string //@rename("T", "R") - -type C interface { - T | ~int //@rename("T", "S") -} diff --git a/gopls/internal/lsp/testdata/rename/generics/unions.go.golden b/gopls/internal/lsp/testdata/rename/generics/unions.go.golden deleted file mode 100644 index 463289629c5..00000000000 --- a/gopls/internal/lsp/testdata/rename/generics/unions.go.golden +++ /dev/null @@ -1,24 +0,0 @@ --- R-rename -- -//go:build go1.18 -// +build go1.18 - -package generics - -type R string //@rename("T", "R") - -type C interface { - R | ~int //@rename("T", "S") -} - --- S-rename -- -//go:build go1.18 -// +build go1.18 - -package generics - -type S string //@rename("T", "R") - -type C interface { - S | ~int //@rename("T", "S") -} - diff --git a/gopls/internal/lsp/testdata/rename/issue39614/issue39614.go.golden b/gopls/internal/lsp/testdata/rename/issue39614/issue39614.go.golden deleted file mode 100644 index d87c58e832e..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue39614/issue39614.go.golden +++ /dev/null @@ -1,10 +0,0 @@ --- bar-rename -- -package issue39614 - -func fn() { - var bar bool //@rename("foo","bar") - make(map[string]bool - if true { - } -} - diff --git a/gopls/internal/lsp/testdata/rename/issue39614/issue39614.go.in b/gopls/internal/lsp/testdata/rename/issue39614/issue39614.go.in deleted file mode 100644 index 8222db2c441..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue39614/issue39614.go.in +++ /dev/null @@ -1,8 +0,0 @@ -package issue39614 - -func fn() { - var foo bool //@rename("foo","bar") - make(map[string]bool - if true { - } -} diff --git a/gopls/internal/lsp/testdata/rename/issue42134/1.go b/gopls/internal/lsp/testdata/rename/issue42134/1.go deleted file mode 100644 index 056f8476a27..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/1.go +++ /dev/null @@ -1,8 +0,0 @@ -package issue42134 - -func _() { - // foo computes things. - foo := func() {} - - foo() //@rename("foo", "bar") -} diff --git a/gopls/internal/lsp/testdata/rename/issue42134/1.go.golden b/gopls/internal/lsp/testdata/rename/issue42134/1.go.golden deleted file mode 100644 index 266aeef4b66..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/1.go.golden +++ /dev/null @@ -1,10 +0,0 @@ --- bar-rename -- -package issue42134 - -func _() { - // bar computes things. - bar := func() {} - - bar() //@rename("foo", "bar") -} - diff --git a/gopls/internal/lsp/testdata/rename/issue42134/2.go b/gopls/internal/lsp/testdata/rename/issue42134/2.go deleted file mode 100644 index e9f639575c8..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/2.go +++ /dev/null @@ -1,12 +0,0 @@ -package issue42134 - -import "fmt" - -func _() { - // minNumber is a min number. - // Second line. - minNumber := min(1, 2) - fmt.Println(minNumber) //@rename("minNumber", "res") -} - -func min(a, b int) int { return a } diff --git a/gopls/internal/lsp/testdata/rename/issue42134/2.go.golden b/gopls/internal/lsp/testdata/rename/issue42134/2.go.golden deleted file mode 100644 index 406a3833c45..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/2.go.golden +++ /dev/null @@ -1,14 +0,0 @@ --- res-rename -- -package issue42134 - -import "fmt" - -func _() { - // res is a min number. - // Second line. - res := min(1, 2) - fmt.Println(res) //@rename("minNumber", "res") -} - -func min(a, b int) int { return a } - diff --git a/gopls/internal/lsp/testdata/rename/issue42134/3.go b/gopls/internal/lsp/testdata/rename/issue42134/3.go deleted file mode 100644 index 7666f57d34a..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/3.go +++ /dev/null @@ -1,11 +0,0 @@ -package issue42134 - -func _() { - /* - tests contains test cases - */ - tests := []struct { //@rename("tests", "testCases") - in, out string - }{} - _ = tests -} diff --git a/gopls/internal/lsp/testdata/rename/issue42134/3.go.golden b/gopls/internal/lsp/testdata/rename/issue42134/3.go.golden deleted file mode 100644 index cdcae18089c..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/3.go.golden +++ /dev/null @@ -1,13 +0,0 @@ --- testCases-rename -- -package issue42134 - -func _() { - /* - testCases contains test cases - */ - testCases := []struct { //@rename("tests", "testCases") - in, out string - }{} - _ = testCases -} - diff --git a/gopls/internal/lsp/testdata/rename/issue42134/4.go b/gopls/internal/lsp/testdata/rename/issue42134/4.go deleted file mode 100644 index c953520bc94..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/4.go +++ /dev/null @@ -1,8 +0,0 @@ -package issue42134 - -func _() { - // a is equal to 5. Comment must stay the same - - a := 5 - _ = a //@rename("a", "b") -} diff --git a/gopls/internal/lsp/testdata/rename/issue42134/4.go.golden b/gopls/internal/lsp/testdata/rename/issue42134/4.go.golden deleted file mode 100644 index 2086cf74cfa..00000000000 --- a/gopls/internal/lsp/testdata/rename/issue42134/4.go.golden +++ /dev/null @@ -1,10 +0,0 @@ --- b-rename -- -package issue42134 - -func _() { - // a is equal to 5. Comment must stay the same - - b := 5 - _ = b //@rename("a", "b") -} - diff --git a/gopls/internal/lsp/testdata/rename/shadow/shadow.go b/gopls/internal/lsp/testdata/rename/shadow/shadow.go deleted file mode 100644 index 38329b4fea2..00000000000 --- a/gopls/internal/lsp/testdata/rename/shadow/shadow.go +++ /dev/null @@ -1,20 +0,0 @@ -package shadow - -func _() { - a := true - b, c, _ := A(), B(), D() //@rename("A", "a"),rename("B", "b"),rename("b", "c"),rename("D", "d") - d := false - _, _, _, _ = a, b, c, d -} - -func A() int { - return 0 -} - -func B() int { - return 0 -} - -func D() int { - return 0 -} diff --git a/gopls/internal/lsp/testdata/rename/shadow/shadow.go.golden b/gopls/internal/lsp/testdata/rename/shadow/shadow.go.golden deleted file mode 100644 index a34b5c0fefe..00000000000 --- a/gopls/internal/lsp/testdata/rename/shadow/shadow.go.golden +++ /dev/null @@ -1,51 +0,0 @@ --- a-rename -- -shadow/shadow.go:10:6: renaming this func "A" to "a" -shadow/shadow.go:5:13: would cause this reference to become shadowed -shadow/shadow.go:4:2: by this intervening var definition --- b-rename -- -package shadow - -func _() { - a := true - b, c, _ := A(), b(), D() //@rename("A", "a"),rename("B", "b"),rename("b", "c"),rename("D", "d") - d := false - _, _, _, _ = a, b, c, d -} - -func A() int { - return 0 -} - -func b() int { - return 0 -} - -func D() int { - return 0 -} - --- c-rename -- -shadow/shadow.go:5:2: renaming this var "b" to "c" -shadow/shadow.go:5:5: conflicts with var in same block --- d-rename -- -package shadow - -func _() { - a := true - b, c, _ := A(), B(), d() //@rename("A", "a"),rename("B", "b"),rename("b", "c"),rename("D", "d") - d := false - _, _, _, _ = a, b, c, d -} - -func A() int { - return 0 -} - -func B() int { - return 0 -} - -func d() int { - return 0 -} - diff --git a/gopls/internal/lsp/testdata/rename/testy/testy.go b/gopls/internal/lsp/testdata/rename/testy/testy.go deleted file mode 100644 index e46dc06cda2..00000000000 --- a/gopls/internal/lsp/testdata/rename/testy/testy.go +++ /dev/null @@ -1,7 +0,0 @@ -package testy - -type tt int //@rename("tt", "testyType") - -func a() { - foo := 42 //@rename("foo", "bar") -} diff --git a/gopls/internal/lsp/testdata/rename/testy/testy.go.golden b/gopls/internal/lsp/testdata/rename/testy/testy.go.golden deleted file mode 100644 index 288dfee9682..00000000000 --- a/gopls/internal/lsp/testdata/rename/testy/testy.go.golden +++ /dev/null @@ -1,18 +0,0 @@ --- bar-rename -- -package testy - -type tt int //@rename("tt", "testyType") - -func a() { - bar := 42 //@rename("foo", "bar") -} - --- testyType-rename -- -package testy - -type testyType int //@rename("tt", "testyType") - -func a() { - foo := 42 //@rename("foo", "bar") -} - diff --git a/gopls/internal/lsp/testdata/rename/testy/testy_test.go b/gopls/internal/lsp/testdata/rename/testy/testy_test.go deleted file mode 100644 index 3d86e845558..00000000000 --- a/gopls/internal/lsp/testdata/rename/testy/testy_test.go +++ /dev/null @@ -1,8 +0,0 @@ -package testy - -import "testing" - -func TestSomething(t *testing.T) { - var x int //@rename("x", "testyX") - a() //@rename("a", "b") -} diff --git a/gopls/internal/lsp/testdata/rename/testy/testy_test.go.golden b/gopls/internal/lsp/testdata/rename/testy/testy_test.go.golden deleted file mode 100644 index 480c8e99532..00000000000 --- a/gopls/internal/lsp/testdata/rename/testy/testy_test.go.golden +++ /dev/null @@ -1,30 +0,0 @@ --- b-rename -- -testy.go: -package testy - -type tt int //@rename("tt", "testyType") - -func b() { - foo := 42 //@rename("foo", "bar") -} - -testy_test.go: -package testy - -import "testing" - -func TestSomething(t *testing.T) { - var x int //@rename("x", "testyX") - b() //@rename("a", "b") -} - --- testyX-rename -- -package testy - -import "testing" - -func TestSomething(t *testing.T) { - var testyX int //@rename("x", "testyX") - a() //@rename("a", "b") -} - diff --git a/gopls/internal/lsp/testdata/selectionrange/foo.go b/gopls/internal/lsp/testdata/selectionrange/foo.go deleted file mode 100644 index 1bf41340ce6..00000000000 --- a/gopls/internal/lsp/testdata/selectionrange/foo.go +++ /dev/null @@ -1,13 +0,0 @@ -package foo - -import "time" - -func Bar(x, y int, t time.Time) int { - zs := []int{1, 2, 3} //@selectionrange("1") - - for _, z := range zs { - x = x + z + y + zs[1] //@selectionrange("1") - } - - return x + y //@selectionrange("+") -} diff --git a/gopls/internal/lsp/testdata/selectionrange/foo.go.golden b/gopls/internal/lsp/testdata/selectionrange/foo.go.golden deleted file mode 100644 index fe70b30b711..00000000000 --- a/gopls/internal/lsp/testdata/selectionrange/foo.go.golden +++ /dev/null @@ -1,29 +0,0 @@ --- selectionrange_foo_12_11 -- -Ranges 0: - 11:8-11:13 "x + y" - 11:1-11:13 "return x + y" - 4:36-12:1 "{\\n\tzs := []int{...ionrange(\"+\")\\n}" - 4:0-12:1 "func Bar(x, y i...ionrange(\"+\")\\n}" - 0:0-12:1 "package foo\\n\\nim...ionrange(\"+\")\\n}" - --- selectionrange_foo_6_14 -- -Ranges 0: - 5:13-5:14 "1" - 5:7-5:21 "[]int{1, 2, 3}" - 5:1-5:21 "zs := []int{1, 2, 3}" - 4:36-12:1 "{\\n\tzs := []int{...ionrange(\"+\")\\n}" - 4:0-12:1 "func Bar(x, y i...ionrange(\"+\")\\n}" - 0:0-12:1 "package foo\\n\\nim...ionrange(\"+\")\\n}" - --- selectionrange_foo_9_22 -- -Ranges 0: - 8:21-8:22 "1" - 8:18-8:23 "zs[1]" - 8:6-8:23 "x + z + y + zs[1]" - 8:2-8:23 "x = x + z + y + zs[1]" - 7:22-9:2 "{\\n\t\tx = x + z +...onrange(\"1\")\\n\t}" - 7:1-9:2 "for _, z := ran...onrange(\"1\")\\n\t}" - 4:36-12:1 "{\\n\tzs := []int{...ionrange(\"+\")\\n}" - 4:0-12:1 "func Bar(x, y i...ionrange(\"+\")\\n}" - 0:0-12:1 "package foo\\n\\nim...ionrange(\"+\")\\n}" - diff --git a/gopls/internal/lsp/testdata/semantic/README.md b/gopls/internal/lsp/testdata/semantic/README.md deleted file mode 100644 index 00ec19af170..00000000000 --- a/gopls/internal/lsp/testdata/semantic/README.md +++ /dev/null @@ -1,2 +0,0 @@ -The golden files are the output of `gopls semtok `, with `-- semantic --` -inserted as the first line (the spaces are mandatory) and an extra newline at the end. diff --git a/gopls/internal/lsp/testdata/semantic/a.go b/gopls/internal/lsp/testdata/semantic/a.go deleted file mode 100644 index 54d6c8a62fa..00000000000 --- a/gopls/internal/lsp/testdata/semantic/a.go +++ /dev/null @@ -1,81 +0,0 @@ -package semantictokens //@ semantic("") - -import ( - _ "encoding/utf8" - utf "encoding/utf8" - "fmt" //@ semantic("fmt") - . "fmt" - "unicode/utf8" -) - -var ( - a = fmt.Print - b []string = []string{"foo"} - c1 chan int - c2 <-chan int - c3 = make([]chan<- int) - b = A{X: 23} - m map[bool][3]*float64 -) - -const ( - xx F = iota - yy = xx + 3 - zz = "" - ww = "not " + zz -) - -type A struct { - X int `foof` -} -type B interface { - A - sad(int) bool -} - -type F int - -func (a *A) f() bool { - var z string - x := "foo" - a(x) - y := "bar" + x - switch z { - case "xx": - default: - } - select { - case z := <-c3[0]: - default: - } - for k, v := range m { - return (!k) && v[0] == nil - } - c2 <- A.X - w := b[4:] - j := len(x) - j-- - q := []interface{}{j, 23i, &y} - g(q...) - return true -} - -func g(vv ...interface{}) { - ff := func() {} - defer ff() - go utf.RuneCount("") - go utf8.RuneCount(vv.(string)) - if true { - } else { - } -Never: - for i := 0; i < 10; { - break Never - } - _, ok := vv[0].(A) - if !ok { - switch x := vv[0].(type) { - } - goto Never - } -} diff --git a/gopls/internal/lsp/testdata/semantic/a.go.golden b/gopls/internal/lsp/testdata/semantic/a.go.golden deleted file mode 100644 index 047a031a784..00000000000 --- a/gopls/internal/lsp/testdata/semantic/a.go.golden +++ /dev/null @@ -1,83 +0,0 @@ --- semantic -- -/*⇒7,keyword,[]*/package /*⇒14,namespace,[]*/semantictokens /*⇒16,comment,[]*///@ semantic("") - -/*⇒6,keyword,[]*/import ( - _ "encoding/utf8" - /*⇒3,namespace,[]*/utf "encoding/utf8" - "fmt"/*⇐3,namespace,[]*/ /*⇒19,comment,[]*///@ semantic("fmt") - . "fmt" - "unicode/utf8"/*⇐4,namespace,[]*/ -) - -/*⇒3,keyword,[]*/var ( - /*⇒1,variable,[definition]*/a = /*⇒3,namespace,[]*/fmt./*⇒5,function,[]*/Print - /*⇒1,variable,[definition]*/b []/*⇒6,type,[defaultLibrary]*/string = []/*⇒6,type,[defaultLibrary]*/string{/*⇒5,string,[]*/"foo"} - /*⇒2,variable,[definition]*/c1 /*⇒4,keyword,[]*/chan /*⇒3,type,[defaultLibrary]*/int - /*⇒2,variable,[definition]*/c2 /*⇒2,operator,[]*/<-/*⇒4,keyword,[]*/chan /*⇒3,type,[defaultLibrary]*/int - /*⇒2,variable,[definition]*/c3 = /*⇒4,function,[defaultLibrary]*/make([]/*⇒4,keyword,[]*/chan/*⇒2,operator,[]*/<- /*⇒3,type,[defaultLibrary]*/int) - /*⇒1,variable,[definition]*/b = /*⇒1,type,[]*/A{/*⇒1,variable,[]*/X: /*⇒2,number,[]*/23} - /*⇒1,variable,[definition]*/m /*⇒3,keyword,[]*/map[/*⇒4,type,[defaultLibrary]*/bool][/*⇒1,number,[]*/3]/*⇒1,operator,[]*/*/*⇒7,type,[defaultLibrary]*/float64 -) - -/*⇒5,keyword,[]*/const ( - /*⇒2,variable,[definition readonly]*/xx /*⇒1,type,[]*/F = /*⇒4,variable,[readonly]*/iota - /*⇒2,variable,[definition readonly]*/yy = /*⇒2,variable,[readonly]*/xx /*⇒1,operator,[]*/+ /*⇒1,number,[]*/3 - /*⇒2,variable,[definition readonly]*/zz = /*⇒2,string,[]*/"" - /*⇒2,variable,[definition readonly]*/ww = /*⇒6,string,[]*/"not " /*⇒1,operator,[]*/+ /*⇒2,variable,[readonly]*/zz -) - -/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/A /*⇒6,keyword,[]*/struct { - /*⇒1,variable,[definition]*/X /*⇒3,type,[defaultLibrary]*/int /*⇒6,string,[]*/`foof` -} -/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/B /*⇒9,keyword,[]*/interface { - /*⇒1,type,[]*/A - /*⇒3,method,[definition]*/sad(/*⇒3,type,[defaultLibrary]*/int) /*⇒4,type,[defaultLibrary]*/bool -} - -/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/F /*⇒3,type,[defaultLibrary]*/int - -/*⇒4,keyword,[]*/func (/*⇒1,variable,[]*/a /*⇒1,operator,[]*/*/*⇒1,type,[]*/A) /*⇒1,method,[definition]*/f() /*⇒4,type,[defaultLibrary]*/bool { - /*⇒3,keyword,[]*/var /*⇒1,variable,[definition]*/z /*⇒6,type,[defaultLibrary]*/string - /*⇒1,variable,[definition]*/x /*⇒2,operator,[]*/:= /*⇒5,string,[]*/"foo" - /*⇒1,variable,[]*/a(/*⇒1,variable,[]*/x) - /*⇒1,variable,[definition]*/y /*⇒2,operator,[]*/:= /*⇒5,string,[]*/"bar" /*⇒1,operator,[]*/+ /*⇒1,variable,[]*/x - /*⇒6,keyword,[]*/switch /*⇒1,variable,[]*/z { - /*⇒4,keyword,[]*/case /*⇒4,string,[]*/"xx": - /*⇒7,keyword,[]*/default: - } - /*⇒6,keyword,[]*/select { - /*⇒4,keyword,[]*/case /*⇒1,variable,[definition]*/z /*⇒2,operator,[]*/:= /*⇒2,operator,[]*/<-/*⇒2,variable,[]*/c3[/*⇒1,number,[]*/0]: - /*⇒7,keyword,[]*/default: - } - /*⇒3,keyword,[]*/for /*⇒1,variable,[definition]*/k, /*⇒1,variable,[definition]*/v := /*⇒5,keyword,[]*/range /*⇒1,variable,[]*/m { - /*⇒6,keyword,[]*/return (/*⇒1,operator,[]*/!/*⇒1,variable,[]*/k) /*⇒2,operator,[]*/&& /*⇒1,variable,[]*/v[/*⇒1,number,[]*/0] /*⇒2,operator,[]*/== /*⇒3,variable,[readonly defaultLibrary]*/nil - } - /*⇒2,variable,[]*/c2 /*⇒2,operator,[]*/<- /*⇒1,type,[]*/A./*⇒1,variable,[]*/X - /*⇒1,variable,[definition]*/w /*⇒2,operator,[]*/:= /*⇒1,variable,[]*/b[/*⇒1,number,[]*/4:] - /*⇒1,variable,[definition]*/j /*⇒2,operator,[]*/:= /*⇒3,function,[defaultLibrary]*/len(/*⇒1,variable,[]*/x) - /*⇒1,variable,[]*/j/*⇒2,operator,[]*/-- - /*⇒1,variable,[definition]*/q /*⇒2,operator,[]*/:= []/*⇒9,keyword,[]*/interface{}{/*⇒1,variable,[]*/j, /*⇒3,number,[]*/23i, /*⇒1,operator,[]*/&/*⇒1,variable,[]*/y} - /*⇒1,function,[]*/g(/*⇒1,variable,[]*/q/*⇒3,operator,[]*/...) - /*⇒6,keyword,[]*/return /*⇒4,variable,[readonly]*/true -} - -/*⇒4,keyword,[]*/func /*⇒1,function,[definition]*/g(/*⇒2,parameter,[definition]*/vv /*⇒3,operator,[]*/.../*⇒9,keyword,[]*/interface{}) { - /*⇒2,variable,[definition]*/ff /*⇒2,operator,[]*/:= /*⇒4,keyword,[]*/func() {} - /*⇒5,keyword,[]*/defer /*⇒2,function,[]*/ff() - /*⇒2,keyword,[]*/go /*⇒3,namespace,[]*/utf./*⇒9,function,[]*/RuneCount(/*⇒2,string,[]*/"") - /*⇒2,keyword,[]*/go /*⇒4,namespace,[]*/utf8./*⇒9,function,[]*/RuneCount(/*⇒2,parameter,[]*/vv.(/*⇒6,type,[]*/string)) - /*⇒2,keyword,[]*/if /*⇒4,variable,[readonly]*/true { - } /*⇒4,keyword,[]*/else { - } -/*⇒5,parameter,[definition]*/Never: - /*⇒3,keyword,[]*/for /*⇒1,variable,[definition]*/i /*⇒2,operator,[]*/:= /*⇒1,number,[]*/0; /*⇒1,variable,[]*/i /*⇒1,operator,[]*/< /*⇒2,number,[]*/10; { - /*⇒5,keyword,[]*/break Never - } - _, /*⇒2,variable,[definition]*/ok /*⇒2,operator,[]*/:= /*⇒2,parameter,[]*/vv[/*⇒1,number,[]*/0].(/*⇒1,type,[]*/A) - /*⇒2,keyword,[]*/if /*⇒1,operator,[]*/!/*⇒2,variable,[]*/ok { - /*⇒6,keyword,[]*/switch /*⇒1,variable,[definition]*/x /*⇒2,operator,[]*/:= /*⇒2,parameter,[]*/vv[/*⇒1,number,[]*/0].(/*⇒4,keyword,[]*/type) { - } - /*⇒4,keyword,[]*/goto Never - } -} - diff --git a/gopls/internal/lsp/testdata/semantic/b.go b/gopls/internal/lsp/testdata/semantic/b.go deleted file mode 100644 index 496b0863d7b..00000000000 --- a/gopls/internal/lsp/testdata/semantic/b.go +++ /dev/null @@ -1,38 +0,0 @@ -package semantictokens //@ semantic("") - -func f(x ...interface{}) { -} - -func weirⰀd() { /*😀*/ // comment - const ( - snil = nil - nil = true - true = false - false = snil - cmd = `foof` - double = iota - iota = copy - four = (len(cmd)/2 < 5) - five = four - ) - f(cmd, nil, double, iota) -} - -/* - -multiline */ /* -multiline -*/ -type AA int -type BB struct { - AA -} -type CC struct { - AA int -} -type D func(aa AA) (BB error) -type E func(AA) BB - -var a chan<- chan int -var b chan<- <-chan int -var c <-chan <-chan int diff --git a/gopls/internal/lsp/testdata/semantic/b.go.golden b/gopls/internal/lsp/testdata/semantic/b.go.golden deleted file mode 100644 index 59071374ba7..00000000000 --- a/gopls/internal/lsp/testdata/semantic/b.go.golden +++ /dev/null @@ -1,40 +0,0 @@ --- semantic -- -/*⇒7,keyword,[]*/package /*⇒14,namespace,[]*/semantictokens /*⇒16,comment,[]*///@ semantic("") - -/*⇒4,keyword,[]*/func /*⇒1,function,[definition]*/f(/*⇒1,parameter,[definition]*/x /*⇒3,operator,[]*/.../*⇒9,keyword,[]*/interface{}) { -} - -/*⇒4,keyword,[]*/func /*⇒6,function,[definition]*/weirⰀd() { /*⇒5,comment,[]*//*😀*/ /*⇒10,comment,[]*/// comment - /*⇒5,keyword,[]*/const ( - /*⇒4,variable,[definition readonly]*/snil = /*⇒3,variable,[readonly defaultLibrary]*/nil - /*⇒3,variable,[definition readonly]*/nil = /*⇒4,variable,[readonly]*/true - /*⇒4,variable,[definition readonly]*/true = /*⇒5,variable,[readonly]*/false - /*⇒5,variable,[definition readonly]*/false = /*⇒4,variable,[readonly]*/snil - /*⇒3,variable,[definition readonly]*/cmd = /*⇒6,string,[]*/`foof` - /*⇒6,variable,[definition readonly]*/double = /*⇒4,variable,[readonly]*/iota - /*⇒4,variable,[definition readonly]*/iota = /*⇒4,function,[defaultLibrary]*/copy - /*⇒4,variable,[definition readonly]*/four = (/*⇒3,function,[defaultLibrary]*/len(/*⇒3,variable,[readonly]*/cmd)/*⇒1,operator,[]*// /*⇒1,number,[]*/2 /*⇒1,operator,[]*/< /*⇒1,number,[]*/5) - /*⇒4,variable,[definition readonly]*/five = /*⇒4,variable,[readonly]*/four - ) - /*⇒1,function,[]*/f(/*⇒3,variable,[readonly]*/cmd, /*⇒3,variable,[readonly]*/nil, /*⇒6,variable,[readonly]*/double, /*⇒4,variable,[readonly]*/iota) -} - -/*⇒2,comment,[]*//* -/*⇒0,comment,[]*/ -/*⇒12,comment,[]*/multiline */ /*⇒2,comment,[]*//* -/*⇒9,comment,[]*/multiline -/*⇒2,comment,[]*/*/ -/*⇒4,keyword,[]*/type /*⇒2,type,[definition]*/AA /*⇒3,type,[defaultLibrary]*/int -/*⇒4,keyword,[]*/type /*⇒2,type,[definition]*/BB /*⇒6,keyword,[]*/struct { - /*⇒2,type,[]*/AA -} -/*⇒4,keyword,[]*/type /*⇒2,type,[definition]*/CC /*⇒6,keyword,[]*/struct { - /*⇒2,variable,[definition]*/AA /*⇒3,type,[defaultLibrary]*/int -} -/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/D /*⇒4,keyword,[]*/func(/*⇒2,parameter,[definition]*/aa /*⇒2,type,[]*/AA) (/*⇒2,parameter,[definition]*/BB /*⇒5,type,[]*/error) -/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/E /*⇒4,keyword,[]*/func(/*⇒2,type,[]*/AA) /*⇒2,type,[]*/BB - -/*⇒3,keyword,[]*/var /*⇒1,variable,[definition]*/a /*⇒4,keyword,[]*/chan/*⇒2,operator,[]*/<- /*⇒4,keyword,[]*/chan /*⇒3,type,[defaultLibrary]*/int -/*⇒3,keyword,[]*/var /*⇒1,variable,[definition]*/b /*⇒4,keyword,[]*/chan/*⇒2,operator,[]*/<- /*⇒2,operator,[]*/<-/*⇒4,keyword,[]*/chan /*⇒3,type,[defaultLibrary]*/int -/*⇒3,keyword,[]*/var /*⇒1,variable,[definition]*/c /*⇒2,operator,[]*/<-/*⇒4,keyword,[]*/chan /*⇒2,operator,[]*/<-/*⇒4,keyword,[]*/chan /*⇒3,type,[defaultLibrary]*/int - diff --git a/gopls/internal/lsp/testdata/semantic/semantic_test.go b/gopls/internal/lsp/testdata/semantic/semantic_test.go deleted file mode 100644 index 63d59f666ca..00000000000 --- a/gopls/internal/lsp/testdata/semantic/semantic_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package semantictokens - -import ( - "os" - "testing" -) - -func TestSemanticTokens(t *testing.T) { - a, _ := os.Getwd() - // climb up to find internal/lsp - // find all the .go files - -} diff --git a/gopls/internal/lsp/testdata/stub/other/other.go b/gopls/internal/lsp/testdata/stub/other/other.go deleted file mode 100644 index ba3c1747ab7..00000000000 --- a/gopls/internal/lsp/testdata/stub/other/other.go +++ /dev/null @@ -1,10 +0,0 @@ -package other - -import ( - "bytes" - renamed_context "context" -) - -type Interface interface { - Get(renamed_context.Context) *bytes.Buffer -} diff --git a/gopls/internal/lsp/testdata/stub/stub_add_selector.go b/gopls/internal/lsp/testdata/stub/stub_add_selector.go deleted file mode 100644 index 326996a0f67..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_add_selector.go +++ /dev/null @@ -1,12 +0,0 @@ -package stub - -import "io" - -// This file tests that if an interface -// method references a type from its own package -// then our implementation must add the import/package selector -// in the concrete method if the concrete type is outside of the interface -// package -var _ io.ReaderFrom = &readerFrom{} //@suggestedfix("&readerFrom", "quickfix", "") - -type readerFrom struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_add_selector.go.golden b/gopls/internal/lsp/testdata/stub/stub_add_selector.go.golden deleted file mode 100644 index 8f6f62cceef..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_add_selector.go.golden +++ /dev/null @@ -1,19 +0,0 @@ --- suggestedfix_stub_add_selector_10_23 -- -package stub - -import "io" - -// This file tests that if an interface -// method references a type from its own package -// then our implementation must add the import/package selector -// in the concrete method if the concrete type is outside of the interface -// package -var _ io.ReaderFrom = &readerFrom{} //@suggestedfix("&readerFrom", "quickfix", "") - -type readerFrom struct{} - -// ReadFrom implements io.ReaderFrom. -func (*readerFrom) ReadFrom(r io.Reader) (n int64, err error) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_assign.go b/gopls/internal/lsp/testdata/stub/stub_assign.go deleted file mode 100644 index cdbbc2ff59d..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_assign.go +++ /dev/null @@ -1,10 +0,0 @@ -package stub - -import "io" - -func main() { - var br io.ByteWriter - br = &byteWriter{} //@suggestedfix("&", "quickfix", "") -} - -type byteWriter struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_assign.go.golden b/gopls/internal/lsp/testdata/stub/stub_assign.go.golden deleted file mode 100644 index 4815a0d3c39..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_assign.go.golden +++ /dev/null @@ -1,17 +0,0 @@ --- suggestedfix_stub_assign_7_7 -- -package stub - -import "io" - -func main() { - var br io.ByteWriter - br = &byteWriter{} //@suggestedfix("&", "quickfix", "") -} - -type byteWriter struct{} - -// WriteByte implements io.ByteWriter. -func (*byteWriter) WriteByte(c byte) error { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_assign_multivars.go b/gopls/internal/lsp/testdata/stub/stub_assign_multivars.go deleted file mode 100644 index 84b94b0441a..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_assign_multivars.go +++ /dev/null @@ -1,11 +0,0 @@ -package stub - -import "io" - -func main() { - var br io.ByteWriter - var i int - i, br = 1, &multiByteWriter{} //@suggestedfix("&", "quickfix", "") -} - -type multiByteWriter struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_assign_multivars.go.golden b/gopls/internal/lsp/testdata/stub/stub_assign_multivars.go.golden deleted file mode 100644 index ab638634e86..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_assign_multivars.go.golden +++ /dev/null @@ -1,18 +0,0 @@ --- suggestedfix_stub_assign_multivars_8_13 -- -package stub - -import "io" - -func main() { - var br io.ByteWriter - var i int - i, br = 1, &multiByteWriter{} //@suggestedfix("&", "quickfix", "") -} - -type multiByteWriter struct{} - -// WriteByte implements io.ByteWriter. -func (*multiByteWriter) WriteByte(c byte) error { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_call_expr.go b/gopls/internal/lsp/testdata/stub/stub_call_expr.go deleted file mode 100644 index 35429041c9d..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_call_expr.go +++ /dev/null @@ -1,13 +0,0 @@ -package stub - -func main() { - check(&callExpr{}) //@suggestedfix("&", "quickfix", "") -} - -func check(err error) { - if err != nil { - panic(err) - } -} - -type callExpr struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_call_expr.go.golden b/gopls/internal/lsp/testdata/stub/stub_call_expr.go.golden deleted file mode 100644 index ceef769c7ff..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_call_expr.go.golden +++ /dev/null @@ -1,20 +0,0 @@ --- suggestedfix_stub_call_expr_4_8 -- -package stub - -func main() { - check(&callExpr{}) //@suggestedfix("&", "quickfix", "") -} - -func check(err error) { - if err != nil { - panic(err) - } -} - -type callExpr struct{} - -// Error implements error. -func (*callExpr) Error() string { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_embedded.go b/gopls/internal/lsp/testdata/stub/stub_embedded.go deleted file mode 100644 index 3773850f514..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_embedded.go +++ /dev/null @@ -1,15 +0,0 @@ -package stub - -import ( - "io" - "sort" -) - -var _ embeddedInterface = (*embeddedConcrete)(nil) //@suggestedfix("(", "quickfix", "") - -type embeddedConcrete struct{} - -type embeddedInterface interface { - sort.Interface - io.Reader -} diff --git a/gopls/internal/lsp/testdata/stub/stub_embedded.go.golden b/gopls/internal/lsp/testdata/stub/stub_embedded.go.golden deleted file mode 100644 index 98449e63977..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_embedded.go.golden +++ /dev/null @@ -1,37 +0,0 @@ --- suggestedfix_stub_embedded_8_27 -- -package stub - -import ( - "io" - "sort" -) - -var _ embeddedInterface = (*embeddedConcrete)(nil) //@suggestedfix("(", "quickfix", "") - -type embeddedConcrete struct{} - -// Len implements embeddedInterface. -func (*embeddedConcrete) Len() int { - panic("unimplemented") -} - -// Less implements embeddedInterface. -func (*embeddedConcrete) Less(i int, j int) bool { - panic("unimplemented") -} - -// Read implements embeddedInterface. -func (*embeddedConcrete) Read(p []byte) (n int, err error) { - panic("unimplemented") -} - -// Swap implements embeddedInterface. -func (*embeddedConcrete) Swap(i int, j int) { - panic("unimplemented") -} - -type embeddedInterface interface { - sort.Interface - io.Reader -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_err.go b/gopls/internal/lsp/testdata/stub/stub_err.go deleted file mode 100644 index aa4d8ce0a8e..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_err.go +++ /dev/null @@ -1,7 +0,0 @@ -package stub - -func main() { - var br error = &customErr{} //@suggestedfix("&", "quickfix", "") -} - -type customErr struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_err.go.golden b/gopls/internal/lsp/testdata/stub/stub_err.go.golden deleted file mode 100644 index c628e98ea2d..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_err.go.golden +++ /dev/null @@ -1,14 +0,0 @@ --- suggestedfix_stub_err_4_17 -- -package stub - -func main() { - var br error = &customErr{} //@suggestedfix("&", "quickfix", "") -} - -type customErr struct{} - -// Error implements error. -func (*customErr) Error() string { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_function_return.go b/gopls/internal/lsp/testdata/stub/stub_function_return.go deleted file mode 100644 index 1a9ad49420a..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_function_return.go +++ /dev/null @@ -1,11 +0,0 @@ -package stub - -import ( - "io" -) - -func newCloser() io.Closer { - return closer{} //@suggestedfix("c", "quickfix", "") -} - -type closer struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_function_return.go.golden b/gopls/internal/lsp/testdata/stub/stub_function_return.go.golden deleted file mode 100644 index 6798d444c89..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_function_return.go.golden +++ /dev/null @@ -1,18 +0,0 @@ --- suggestedfix_stub_function_return_8_9 -- -package stub - -import ( - "io" -) - -func newCloser() io.Closer { - return closer{} //@suggestedfix("c", "quickfix", "") -} - -type closer struct{} - -// Close implements io.Closer. -func (closer) Close() error { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_generic_receiver.go b/gopls/internal/lsp/testdata/stub/stub_generic_receiver.go deleted file mode 100644 index c16adc83e76..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_generic_receiver.go +++ /dev/null @@ -1,15 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package stub - -import "io" - -// This file tests that that the stub method generator accounts for concrete -// types that have type parameters defined. -var _ io.ReaderFrom = &genReader[string, int]{} //@suggestedfix("&genReader", "quickfix", "Implement io.ReaderFrom") - -type genReader[T, Y any] struct { - T T - Y Y -} diff --git a/gopls/internal/lsp/testdata/stub/stub_generic_receiver.go.golden b/gopls/internal/lsp/testdata/stub/stub_generic_receiver.go.golden deleted file mode 100644 index 3f08fc2edab..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_generic_receiver.go.golden +++ /dev/null @@ -1,22 +0,0 @@ --- suggestedfix_stub_generic_receiver_10_23 -- -//go:build go1.18 -// +build go1.18 - -package stub - -import "io" - -// This file tests that that the stub method generator accounts for concrete -// types that have type parameters defined. -var _ io.ReaderFrom = &genReader[string, int]{} //@suggestedfix("&genReader", "quickfix", "Implement io.ReaderFrom") - -type genReader[T, Y any] struct { - T T - Y Y -} - -// ReadFrom implements io.ReaderFrom. -func (*genReader[T, Y]) ReadFrom(r io.Reader) (n int64, err error) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_ignored_imports.go b/gopls/internal/lsp/testdata/stub/stub_ignored_imports.go deleted file mode 100644 index 9d50fe4dacc..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_ignored_imports.go +++ /dev/null @@ -1,18 +0,0 @@ -package stub - -import ( - "compress/zlib" - . "io" - _ "io" -) - -// This file tests that dot-imports and underscore imports -// are properly ignored and that a new import is added to -// reference method types - -var ( - _ Reader - _ zlib.Resetter = (*ignoredResetter)(nil) //@suggestedfix("(", "quickfix", "") -) - -type ignoredResetter struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_ignored_imports.go.golden b/gopls/internal/lsp/testdata/stub/stub_ignored_imports.go.golden deleted file mode 100644 index 2cf9545b8f2..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_ignored_imports.go.golden +++ /dev/null @@ -1,25 +0,0 @@ --- suggestedfix_stub_ignored_imports_15_20 -- -package stub - -import ( - "compress/zlib" - . "io" - _ "io" -) - -// This file tests that dot-imports and underscore imports -// are properly ignored and that a new import is added to -// reference method types - -var ( - _ Reader - _ zlib.Resetter = (*ignoredResetter)(nil) //@suggestedfix("(", "quickfix", "") -) - -type ignoredResetter struct{} - -// Reset implements zlib.Resetter. -func (*ignoredResetter) Reset(r Reader, dict []byte) error { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_issue2606.go b/gopls/internal/lsp/testdata/stub/stub_issue2606.go deleted file mode 100644 index c028ebb7307..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_issue2606.go +++ /dev/null @@ -1,7 +0,0 @@ -package stub - -type I interface{ error } - -type C int - -var _ I = C(0) //@suggestedfix("C", "quickfix", "") diff --git a/gopls/internal/lsp/testdata/stub/stub_issue2606.go.golden b/gopls/internal/lsp/testdata/stub/stub_issue2606.go.golden deleted file mode 100644 index 0ef06768abb..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_issue2606.go.golden +++ /dev/null @@ -1,14 +0,0 @@ --- suggestedfix_stub_issue2606_7_11 -- -package stub - -type I interface{ error } - -type C int - -// Error implements I. -func (C) Error() string { - panic("unimplemented") -} - -var _ I = C(0) //@suggestedfix("C", "quickfix", "") - diff --git a/gopls/internal/lsp/testdata/stub/stub_multi_var.go b/gopls/internal/lsp/testdata/stub/stub_multi_var.go deleted file mode 100644 index a258f7202da..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_multi_var.go +++ /dev/null @@ -1,11 +0,0 @@ -package stub - -import "io" - -// This test ensures that a variable declaration that -// has multiple values on the same line can still be -// analyzed correctly to target the interface implementation -// diagnostic. -var one, two, three io.Reader = nil, &multiVar{}, nil //@suggestedfix("&", "quickfix", "") - -type multiVar struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_multi_var.go.golden b/gopls/internal/lsp/testdata/stub/stub_multi_var.go.golden deleted file mode 100644 index 1fac524c616..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_multi_var.go.golden +++ /dev/null @@ -1,18 +0,0 @@ --- suggestedfix_stub_multi_var_9_38 -- -package stub - -import "io" - -// This test ensures that a variable declaration that -// has multiple values on the same line can still be -// analyzed correctly to target the interface implementation -// diagnostic. -var one, two, three io.Reader = nil, &multiVar{}, nil //@suggestedfix("&", "quickfix", "") - -type multiVar struct{} - -// Read implements io.Reader. -func (*multiVar) Read(p []byte) (n int, err error) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_pointer.go b/gopls/internal/lsp/testdata/stub/stub_pointer.go deleted file mode 100644 index fab2cc26787..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_pointer.go +++ /dev/null @@ -1,9 +0,0 @@ -package stub - -import "io" - -func getReaderFrom() io.ReaderFrom { - return &pointerImpl{} //@suggestedfix("&", "quickfix", "") -} - -type pointerImpl struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_pointer.go.golden b/gopls/internal/lsp/testdata/stub/stub_pointer.go.golden deleted file mode 100644 index 6d2d602adb3..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_pointer.go.golden +++ /dev/null @@ -1,16 +0,0 @@ --- suggestedfix_stub_pointer_6_9 -- -package stub - -import "io" - -func getReaderFrom() io.ReaderFrom { - return &pointerImpl{} //@suggestedfix("&", "quickfix", "") -} - -type pointerImpl struct{} - -// ReadFrom implements io.ReaderFrom. -func (*pointerImpl) ReadFrom(r io.Reader) (n int64, err error) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_renamed_import.go b/gopls/internal/lsp/testdata/stub/stub_renamed_import.go deleted file mode 100644 index 04653244ccf..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_renamed_import.go +++ /dev/null @@ -1,11 +0,0 @@ -package stub - -import ( - "compress/zlib" - myio "io" -) - -var _ zlib.Resetter = &myIO{} //@suggestedfix("&", "quickfix", "") -var _ myio.Reader - -type myIO struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_renamed_import.go.golden b/gopls/internal/lsp/testdata/stub/stub_renamed_import.go.golden deleted file mode 100644 index b214f33cea5..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_renamed_import.go.golden +++ /dev/null @@ -1,18 +0,0 @@ --- suggestedfix_stub_renamed_import_8_23 -- -package stub - -import ( - "compress/zlib" - myio "io" -) - -var _ zlib.Resetter = &myIO{} //@suggestedfix("&", "quickfix", "") -var _ myio.Reader - -type myIO struct{} - -// Reset implements zlib.Resetter. -func (*myIO) Reset(r myio.Reader, dict []byte) error { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_renamed_import_iface.go b/gopls/internal/lsp/testdata/stub/stub_renamed_import_iface.go deleted file mode 100644 index 91804c2c430..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_renamed_import_iface.go +++ /dev/null @@ -1,13 +0,0 @@ -package stub - -import ( - "golang.org/lsptests/stub/other" -) - -// This file tests that if an interface -// method references an import from its own package -// that the concrete type does not yet import, and that import happens -// to be renamed, then we prefer the renaming of the interface. -var _ other.Interface = &otherInterfaceImpl{} //@suggestedfix("&otherInterfaceImpl", "quickfix", "") - -type otherInterfaceImpl struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_renamed_import_iface.go.golden b/gopls/internal/lsp/testdata/stub/stub_renamed_import_iface.go.golden deleted file mode 100644 index 18d7d450b29..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_renamed_import_iface.go.golden +++ /dev/null @@ -1,22 +0,0 @@ --- suggestedfix_stub_renamed_import_iface_11_25 -- -package stub - -import ( - "bytes" - "context" - "golang.org/lsptests/stub/other" -) - -// This file tests that if an interface -// method references an import from its own package -// that the concrete type does not yet import, and that import happens -// to be renamed, then we prefer the renaming of the interface. -var _ other.Interface = &otherInterfaceImpl{} //@suggestedfix("&otherInterfaceImpl", "quickfix", "") - -type otherInterfaceImpl struct{} - -// Get implements other.Interface. -func (*otherInterfaceImpl) Get(context.Context) *bytes.Buffer { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_stdlib.go b/gopls/internal/lsp/testdata/stub/stub_stdlib.go deleted file mode 100644 index 4e13cf9a68a..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_stdlib.go +++ /dev/null @@ -1,9 +0,0 @@ -package stub - -import ( - "io" -) - -var _ io.Writer = writer{} //@suggestedfix("w", "quickfix", "") - -type writer struct{} diff --git a/gopls/internal/lsp/testdata/stub/stub_stdlib.go.golden b/gopls/internal/lsp/testdata/stub/stub_stdlib.go.golden deleted file mode 100644 index b750695fed3..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_stdlib.go.golden +++ /dev/null @@ -1,16 +0,0 @@ --- suggestedfix_stub_stdlib_7_19 -- -package stub - -import ( - "io" -) - -var _ io.Writer = writer{} //@suggestedfix("w", "quickfix", "") - -type writer struct{} - -// Write implements io.Writer. -func (writer) Write(p []byte) (n int, err error) { - panic("unimplemented") -} - diff --git a/gopls/internal/lsp/testdata/stub/stub_typedecl_group.go b/gopls/internal/lsp/testdata/stub/stub_typedecl_group.go deleted file mode 100644 index 67817fa4770..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_typedecl_group.go +++ /dev/null @@ -1,27 +0,0 @@ -package stub - -// Regression test for Issue #56825: file corrupted by insertion of -// methods after TypeSpec in a parenthesized TypeDecl. - -import "io" - -func newReadCloser() io.ReadCloser { - return rdcloser{} //@suggestedfix("rd", "quickfix", "") -} - -type ( - A int - rdcloser struct{} - B int -) - -func _() { - // Local types can't be stubbed as there's nowhere to put the methods. - // The suggestedfix assertion can't express this yet. TODO(adonovan): support it. - type local struct{} - var _ io.ReadCloser = local{} // want error: `local type "local" cannot be stubbed` -} - -type ( - C int -) diff --git a/gopls/internal/lsp/testdata/stub/stub_typedecl_group.go.golden b/gopls/internal/lsp/testdata/stub/stub_typedecl_group.go.golden deleted file mode 100644 index 1cd11239120..00000000000 --- a/gopls/internal/lsp/testdata/stub/stub_typedecl_group.go.golden +++ /dev/null @@ -1,39 +0,0 @@ --- suggestedfix_stub_typedecl_group_9_9 -- -package stub - -// Regression test for Issue #56825: file corrupted by insertion of -// methods after TypeSpec in a parenthesized TypeDecl. - -import "io" - -func newReadCloser() io.ReadCloser { - return rdcloser{} //@suggestedfix("rd", "quickfix", "") -} - -type ( - A int - rdcloser struct{} - B int -) - -// Close implements io.ReadCloser. -func (rdcloser) Close() error { - panic("unimplemented") -} - -// Read implements io.ReadCloser. -func (rdcloser) Read(p []byte) (n int, err error) { - panic("unimplemented") -} - -func _() { - // Local types can't be stubbed as there's nowhere to put the methods. - // The suggestedfix assertion can't express this yet. TODO(adonovan): support it. - type local struct{} - var _ io.ReadCloser = local{} // want error: `local type "local" cannot be stubbed` -} - -type ( - C int -) - diff --git a/gopls/internal/lsp/testdata/summary.txt.golden b/gopls/internal/lsp/testdata/summary.txt.golden deleted file mode 100644 index b48a44d4dff..00000000000 --- a/gopls/internal/lsp/testdata/summary.txt.golden +++ /dev/null @@ -1,8 +0,0 @@ --- summary -- -CallHierarchyCount = 2 -SemanticTokenCount = 3 -SuggestedFixCount = 39 -InlayHintsCount = 5 -RenamesCount = 45 -SelectionRangesCount = 3 - diff --git a/gopls/internal/lsp/testdata/typeerrors/noresultvalues.go b/gopls/internal/lsp/testdata/typeerrors/noresultvalues.go deleted file mode 100644 index 729e7bbccd4..00000000000 --- a/gopls/internal/lsp/testdata/typeerrors/noresultvalues.go +++ /dev/null @@ -1,5 +0,0 @@ -package typeerrors - -func x() { return nil } //@suggestedfix("nil", "quickfix", "") - -func y() { return nil, "hello" } //@suggestedfix("nil", "quickfix", "") diff --git a/gopls/internal/lsp/testdata/typeerrors/noresultvalues.go.golden b/gopls/internal/lsp/testdata/typeerrors/noresultvalues.go.golden deleted file mode 100644 index 48409a0b7dd..00000000000 --- a/gopls/internal/lsp/testdata/typeerrors/noresultvalues.go.golden +++ /dev/null @@ -1,14 +0,0 @@ --- suggestedfix_noresultvalues_3_19 -- -package typeerrors - -func x() { return } //@suggestedfix("nil", "quickfix", "") - -func y() { return nil, "hello" } //@suggestedfix("nil", "quickfix", "") - --- suggestedfix_noresultvalues_5_19 -- -package typeerrors - -func x() { return nil } //@suggestedfix("nil", "quickfix", "") - -func y() { return } //@suggestedfix("nil", "quickfix", "") - diff --git a/gopls/internal/lsp/tests/README.md b/gopls/internal/lsp/tests/README.md deleted file mode 100644 index 07df28815c1..00000000000 --- a/gopls/internal/lsp/tests/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# Testing - -LSP has "marker tests" defined in `internal/lsp/testdata`, as well as -traditional tests. - -## Marker tests - -Marker tests have a standard input file, like -`internal/lsp/testdata/foo/bar.go`, and some may have a corresponding golden -file, like `internal/lsp/testdata/foo/bar.go.golden`. The former is the "input" -and the latter is the expected output. - -Each input file contains annotations like -`//@suggestedfix("}", "refactor.rewrite", "Fill anonymous struct")`. These annotations are interpreted by -test runners to perform certain actions. The expected output after those actions -is encoded in the golden file. - -When tests are run, each annotation results in a new subtest, which is encoded -in the golden file with a heading like, - -```bash --- suggestedfix_bar_11_21 -- -// expected contents go here --- suggestedfix_bar_13_20 -- -// expected contents go here -``` - -The format of these headings vary: they are defined by the -[`Golden`](https://pkg.go.dev/golang.org/x/tools/gopls/internal/lsp/tests#Data.Golden) -function for each annotation. In the case above, the format is: annotation -name, file name, annotation line location, annotation character location. - -So, if `internal/lsp/testdata/foo/bar.go` has three `suggestedfix` annotations, -the golden file should have three headers with `suggestedfix_bar_xx_yy` -headings. - -To see a list of all available annotations, see the exported "expectations" in -[tests.go](https://github.com/golang/tools/blob/299f270db45902e93469b1152fafed034bb3f033/internal/lsp/tests/tests.go#L418-L447). - -To run marker tests, - -```bash -cd /path/to/tools - -# The marker tests are located in "internal/lsp", "internal/lsp/cmd, and -# "internal/lsp/source". -go test ./internal/lsp/... -``` - -There are quite a lot of marker tests, so to run one individually, pass the test -path and heading into a -run argument: - -```bash -cd /path/to/tools -go test ./internal/lsp/... -v -run TestLSP/Modules/SuggestedFix/bar_11_21 -``` - -## Resetting marker tests - -Sometimes, a change is made to lsp that requires a change to multiple golden -files. When this happens, you can run, - -```bash -cd /path/to/tools -./internal/lsp/reset_golden.sh -``` diff --git a/gopls/internal/lsp/tests/markdown_go118.go b/gopls/internal/lsp/tests/markdown_go118.go deleted file mode 100644 index 3701018bd19..00000000000 --- a/gopls/internal/lsp/tests/markdown_go118.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package tests - -import ( - "regexp" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/tests/compare" -) - -// DiffMarkdown compares two markdown strings produced by parsing go doc -// comments. -// -// For go1.19 and later, markdown conversion is done using go/doc/comment. -// Compared to the newer version, the older version has extra escapes, and -// treats code blocks slightly differently. -func DiffMarkdown(want, got string) string { - want = normalizeMarkdown(want) - got = normalizeMarkdown(got) - return compare.Text(want, got) -} - -// normalizeMarkdown normalizes whitespace and escaping of the input string, to -// eliminate differences between the Go 1.18 and Go 1.19 generated markdown for -// doc comments. Note that it does not normalize to either the 1.18 or 1.19 -// formatting: it simplifies both so that they may be compared. -// -// This function may need to be adjusted as we encounter more differences in -// the generated text. -// -// TODO(rfindley): this function doesn't correctly handle the case of -// multi-line docstrings. -func normalizeMarkdown(input string) string { - input = strings.TrimSpace(input) - - // For simplicity, eliminate blank lines. - input = regexp.MustCompile("\n+").ReplaceAllString(input, "\n") - - // Replace common escaped characters with their unescaped version. - // - // This list may not be exhaustive: it was just sufficient to make tests - // pass. - input = strings.NewReplacer( - `\\`, ``, - `\@`, `@`, - `\(`, `(`, - `\)`, `)`, - `\{`, `{`, - `\}`, `}`, - `\"`, `"`, - `\.`, `.`, - `\-`, `-`, - `\'`, `'`, - `\+`, `+`, - `\~`, `~`, - `\=`, `=`, - `\:`, `:`, - `\?`, `?`, - `\n\n\n`, `\n\n`, // Note that these are *escaped* newlines. - ).Replace(input) - - return input -} diff --git a/gopls/internal/lsp/tests/markdown_go119.go b/gopls/internal/lsp/tests/markdown_go119.go deleted file mode 100644 index a7fcf1a42ef..00000000000 --- a/gopls/internal/lsp/tests/markdown_go119.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package tests - -import ( - "golang.org/x/tools/gopls/internal/lsp/tests/compare" -) - -// DiffMarkdown compares two markdown strings produced by parsing go doc -// comments. -// -// For go1.19 and later, markdown conversion is done using go/doc/comment. -// Compared to the newer version, the older version has extra escapes, and -// treats code blocks slightly differently. -func DiffMarkdown(want, got string) string { - return compare.Text(want, got) -} diff --git a/gopls/internal/lsp/tests/tests.go b/gopls/internal/lsp/tests/tests.go deleted file mode 100644 index d310a2331cc..00000000000 --- a/gopls/internal/lsp/tests/tests.go +++ /dev/null @@ -1,645 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package tests exports functionality to be used across a variety of gopls tests. -package tests - -import ( - "bytes" - "context" - "flag" - "fmt" - "go/ast" - "go/token" - "io" - "os" - "path/filepath" - "sort" - "strings" - "sync" - "testing" - "time" - - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/packages/packagestest" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/typeparams" - "golang.org/x/tools/txtar" -) - -const ( - overlayFileSuffix = ".overlay" - goldenFileSuffix = ".golden" - inFileSuffix = ".in" - summaryFile = "summary.txt" - - // The module path containing the testdata packages. - // - // Warning: the length of this module path matters, as we have bumped up - // against command-line limitations on windows (golang/go#54800). - testModule = "golang.org/lsptests" -) - -var UpdateGolden = flag.Bool("golden", false, "Update golden files") - -// These type names apparently avoid the need to repeat the -// type in the field name and the make() expression. -type CallHierarchy = map[span.Span]*CallHierarchyResult -type SemanticTokens = []span.Span -type SuggestedFixes = map[span.Span][]SuggestedFix -type Renames = map[span.Span]string -type InlayHints = []span.Span -type AddImport = map[span.URI]string -type SelectionRanges = []span.Span - -type Data struct { - Config packages.Config - Exported *packagestest.Exported - CallHierarchy CallHierarchy - SemanticTokens SemanticTokens - SuggestedFixes SuggestedFixes - Renames Renames - InlayHints InlayHints - AddImport AddImport - SelectionRanges SelectionRanges - - fragments map[string]string - dir string - golden map[string]*Golden - mode string - - ModfileFlagAvailable bool - - mappersMu sync.Mutex - mappers map[span.URI]*protocol.Mapper -} - -// The Tests interface abstracts the LSP-based implementation of the marker -// test operators appearing in files beneath ../testdata/. -// -// TODO(adonovan): reduce duplication; see https://github.com/golang/go/issues/54845. -// There is only one implementation (*runner in ../lsp_test.go), so -// we can abolish the interface now. -type Tests interface { - CallHierarchy(*testing.T, span.Span, *CallHierarchyResult) - SemanticTokens(*testing.T, span.Span) - SuggestedFix(*testing.T, span.Span, []SuggestedFix, int) - InlayHints(*testing.T, span.Span) - Rename(*testing.T, span.Span, string) - AddImport(*testing.T, span.URI, string) - SelectionRanges(*testing.T, span.Span) -} - -type Completion struct { - CompletionItems []token.Pos -} - -type CompletionSnippet struct { - CompletionItem token.Pos - PlainSnippet string - PlaceholderSnippet string -} - -type CallHierarchyResult struct { - IncomingCalls, OutgoingCalls []protocol.CallHierarchyItem -} - -type Link struct { - Src span.Span - Target string - NotePosition token.Position -} - -type SuggestedFix struct { - ActionKind, Title string -} - -type Golden struct { - Filename string - Archive *txtar.Archive - Modified bool -} - -func Context(t testing.TB) context.Context { - return context.Background() -} - -func DefaultOptions(o *source.Options) { - o.SupportedCodeActions = map[source.FileKind]map[protocol.CodeActionKind]bool{ - source.Go: { - protocol.SourceOrganizeImports: true, - protocol.QuickFix: true, - protocol.RefactorRewrite: true, - protocol.RefactorInline: true, - protocol.RefactorExtract: true, - protocol.SourceFixAll: true, - }, - source.Mod: { - protocol.SourceOrganizeImports: true, - }, - source.Sum: {}, - source.Work: {}, - source.Tmpl: {}, - } - o.InsertTextFormat = protocol.SnippetTextFormat - o.CompletionBudget = time.Minute - o.HierarchicalDocumentSymbolSupport = true - o.SemanticTokens = true - o.InternalOptions.NewDiff = "new" - - // Enable all inlay hints. - if o.Hints == nil { - o.Hints = make(map[string]bool) - } - for name := range source.AllInlayHints { - o.Hints[name] = true - } -} - -func RunTests(t *testing.T, dataDir string, includeMultiModule bool, f func(*testing.T, *Data)) { - t.Helper() - modes := []string{"Modules", "GOPATH"} - if includeMultiModule { - modes = append(modes, "MultiModule") - } - for _, mode := range modes { - t.Run(mode, func(t *testing.T) { - datum := load(t, mode, dataDir) - t.Helper() - f(t, datum) - }) - } -} - -func load(t testing.TB, mode string, dir string) *Data { - datum := &Data{ - CallHierarchy: make(CallHierarchy), - Renames: make(Renames), - SuggestedFixes: make(SuggestedFixes), - AddImport: make(AddImport), - - dir: dir, - fragments: map[string]string{}, - golden: map[string]*Golden{}, - mode: mode, - mappers: map[span.URI]*protocol.Mapper{}, - } - - if !*UpdateGolden { - summary := filepath.Join(filepath.FromSlash(dir), summaryFile+goldenFileSuffix) - if _, err := os.Stat(summary); os.IsNotExist(err) { - t.Fatalf("could not find golden file summary.txt in %#v", dir) - } - archive, err := txtar.ParseFile(summary) - if err != nil { - t.Fatalf("could not read golden file %v/%v: %v", dir, summary, err) - } - datum.golden[summaryFile] = &Golden{ - Filename: summary, - Archive: archive, - } - } - - files := packagestest.MustCopyFileTree(dir) - // Prune test cases that exercise generics. - if !typeparams.Enabled { - for name := range files { - if strings.Contains(name, "_generics") { - delete(files, name) - } - } - } - overlays := map[string][]byte{} - for fragment, operation := range files { - if trimmed := strings.TrimSuffix(fragment, goldenFileSuffix); trimmed != fragment { - delete(files, fragment) - goldFile := filepath.Join(dir, fragment) - archive, err := txtar.ParseFile(goldFile) - if err != nil { - t.Fatalf("could not read golden file %v: %v", fragment, err) - } - datum.golden[trimmed] = &Golden{ - Filename: goldFile, - Archive: archive, - } - } else if trimmed := strings.TrimSuffix(fragment, inFileSuffix); trimmed != fragment { - delete(files, fragment) - files[trimmed] = operation - } else if index := strings.Index(fragment, overlayFileSuffix); index >= 0 { - delete(files, fragment) - partial := fragment[:index] + fragment[index+len(overlayFileSuffix):] - contents, err := os.ReadFile(filepath.Join(dir, fragment)) - if err != nil { - t.Fatal(err) - } - overlays[partial] = contents - } - } - - modules := []packagestest.Module{ - { - Name: testModule, - Files: files, - Overlay: overlays, - }, - } - switch mode { - case "Modules": - datum.Exported = packagestest.Export(t, packagestest.Modules, modules) - case "GOPATH": - datum.Exported = packagestest.Export(t, packagestest.GOPATH, modules) - case "MultiModule": - files := map[string]interface{}{} - for k, v := range modules[0].Files { - files[filepath.Join("testmodule", k)] = v - } - modules[0].Files = files - - overlays := map[string][]byte{} - for k, v := range modules[0].Overlay { - overlays[filepath.Join("testmodule", k)] = v - } - modules[0].Overlay = overlays - - golden := map[string]*Golden{} - for k, v := range datum.golden { - if k == summaryFile { - golden[k] = v - } else { - golden[filepath.Join("testmodule", k)] = v - } - } - datum.golden = golden - - datum.Exported = packagestest.Export(t, packagestest.Modules, modules) - default: - panic("unknown mode " + mode) - } - - for _, m := range modules { - for fragment := range m.Files { - filename := datum.Exported.File(m.Name, fragment) - datum.fragments[filename] = fragment - } - } - - // Turn off go/packages debug logging. - datum.Exported.Config.Logf = nil - datum.Config.Logf = nil - - // Merge the exported.Config with the view.Config. - datum.Config = *datum.Exported.Config - datum.Config.Fset = token.NewFileSet() - datum.Config.Context = Context(nil) - datum.Config.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) { - panic("ParseFile should not be called") - } - - // Do a first pass to collect special markers for completion and workspace symbols. - if err := datum.Exported.Expect(map[string]interface{}{ - "item": func(name string, r packagestest.Range, _ []string) { - datum.Exported.Mark(name, r) - }, - "symbol": func(name string, r packagestest.Range, _ []string) { - datum.Exported.Mark(name, r) - }, - }); err != nil { - t.Fatal(err) - } - - // Collect any data that needs to be used by subsequent tests. - if err := datum.Exported.Expect(map[string]interface{}{ - "semantic": datum.collectSemanticTokens, - "inlayHint": datum.collectInlayHints, - "rename": datum.collectRenames, - "suggestedfix": datum.collectSuggestedFixes, - "incomingcalls": datum.collectIncomingCalls, - "outgoingcalls": datum.collectOutgoingCalls, - "addimport": datum.collectAddImports, - "selectionrange": datum.collectSelectionRanges, - }); err != nil { - t.Fatal(err) - } - - if mode == "MultiModule" { - if err := moveFile(filepath.Join(datum.Config.Dir, "go.mod"), filepath.Join(datum.Config.Dir, "testmodule/go.mod")); err != nil { - t.Fatal(err) - } - } - - return datum -} - -// moveFile moves the file at oldpath to newpath, by renaming if possible -// or copying otherwise. -func moveFile(oldpath, newpath string) (err error) { - renameErr := os.Rename(oldpath, newpath) - if renameErr == nil { - return nil - } - - src, err := os.Open(oldpath) - if err != nil { - return err - } - defer func() { - src.Close() - if err == nil { - err = os.Remove(oldpath) - } - }() - - perm := os.ModePerm - fi, err := src.Stat() - if err == nil { - perm = fi.Mode().Perm() - } - - dst, err := os.OpenFile(newpath, os.O_WRONLY|os.O_CREATE|os.O_EXCL, perm) - if err != nil { - return err - } - - _, err = io.Copy(dst, src) - if closeErr := dst.Close(); err == nil { - err = closeErr - } - return err -} - -func Run(t *testing.T, tests Tests, data *Data) { - t.Helper() - checkData(t, data) - - t.Run("CallHierarchy", func(t *testing.T) { - t.Helper() - for spn, callHierarchyResult := range data.CallHierarchy { - t.Run(SpanName(spn), func(t *testing.T) { - t.Helper() - tests.CallHierarchy(t, spn, callHierarchyResult) - }) - } - }) - - t.Run("SemanticTokens", func(t *testing.T) { - t.Helper() - for _, spn := range data.SemanticTokens { - t.Run(uriName(spn.URI()), func(t *testing.T) { - t.Helper() - tests.SemanticTokens(t, spn) - }) - } - }) - - t.Run("SuggestedFix", func(t *testing.T) { - t.Helper() - for spn, actionKinds := range data.SuggestedFixes { - // Check if we should skip this spn if the -modfile flag is not available. - if shouldSkip(data, spn.URI()) { - continue - } - t.Run(SpanName(spn), func(t *testing.T) { - t.Helper() - tests.SuggestedFix(t, spn, actionKinds, 1) - }) - } - }) - - t.Run("InlayHints", func(t *testing.T) { - t.Helper() - for _, src := range data.InlayHints { - t.Run(SpanName(src), func(t *testing.T) { - t.Helper() - tests.InlayHints(t, src) - }) - } - }) - - t.Run("Renames", func(t *testing.T) { - t.Helper() - for spn, newText := range data.Renames { - t.Run(uriName(spn.URI())+"_"+newText, func(t *testing.T) { - t.Helper() - tests.Rename(t, spn, newText) - }) - } - }) - - t.Run("AddImport", func(t *testing.T) { - t.Helper() - for uri, exp := range data.AddImport { - t.Run(uriName(uri), func(t *testing.T) { - tests.AddImport(t, uri, exp) - }) - } - }) - - t.Run("SelectionRanges", func(t *testing.T) { - t.Helper() - for _, span := range data.SelectionRanges { - t.Run(SpanName(span), func(t *testing.T) { - tests.SelectionRanges(t, span) - }) - } - }) - - if *UpdateGolden { - for _, golden := range data.golden { - if !golden.Modified { - continue - } - sort.Slice(golden.Archive.Files, func(i, j int) bool { - return golden.Archive.Files[i].Name < golden.Archive.Files[j].Name - }) - if err := os.WriteFile(golden.Filename, txtar.Format(golden.Archive), 0666); err != nil { - t.Fatal(err) - } - } - } -} - -func checkData(t *testing.T, data *Data) { - buf := &bytes.Buffer{} - - fmt.Fprintf(buf, "CallHierarchyCount = %v\n", len(data.CallHierarchy)) - fmt.Fprintf(buf, "SemanticTokenCount = %v\n", len(data.SemanticTokens)) - fmt.Fprintf(buf, "SuggestedFixCount = %v\n", len(data.SuggestedFixes)) - fmt.Fprintf(buf, "InlayHintsCount = %v\n", len(data.InlayHints)) - fmt.Fprintf(buf, "RenamesCount = %v\n", len(data.Renames)) - fmt.Fprintf(buf, "SelectionRangesCount = %v\n", len(data.SelectionRanges)) - - want := string(data.Golden(t, "summary", summaryFile, func() ([]byte, error) { - return buf.Bytes(), nil - })) - got := buf.String() - if want != got { - // These counters change when assertions are added or removed. - // They act as an independent safety net to ensure that the - // tests didn't spuriously pass because they did no work. - t.Errorf("test summary does not match:\n%s\n(Run with -golden to update golden file; also, there may be one per Go version.)", compare.Text(want, got)) - } -} - -func (data *Data) Mapper(uri span.URI) (*protocol.Mapper, error) { - data.mappersMu.Lock() - defer data.mappersMu.Unlock() - - if _, ok := data.mappers[uri]; !ok { - content, err := data.Exported.FileContents(uri.Filename()) - if err != nil { - return nil, err - } - data.mappers[uri] = protocol.NewMapper(uri, content) - } - return data.mappers[uri], nil -} - -func (data *Data) Golden(t *testing.T, tag, target string, update func() ([]byte, error)) []byte { - t.Helper() - fragment, found := data.fragments[target] - if !found { - if filepath.IsAbs(target) { - t.Fatalf("invalid golden file fragment %v", target) - } - fragment = target - } - golden := data.golden[fragment] - if golden == nil { - if !*UpdateGolden { - t.Fatalf("could not find golden file %v: %v", fragment, tag) - } - golden = &Golden{ - Filename: filepath.Join(data.dir, fragment+goldenFileSuffix), - Archive: &txtar.Archive{}, - Modified: true, - } - data.golden[fragment] = golden - } - var file *txtar.File - for i := range golden.Archive.Files { - f := &golden.Archive.Files[i] - if f.Name == tag { - file = f - break - } - } - if *UpdateGolden { - if file == nil { - golden.Archive.Files = append(golden.Archive.Files, txtar.File{ - Name: tag, - }) - file = &golden.Archive.Files[len(golden.Archive.Files)-1] - } - contents, err := update() - if err != nil { - t.Fatalf("could not update golden file %v: %v", fragment, err) - } - file.Data = append(contents, '\n') // add trailing \n for txtar - golden.Modified = true - - } - if file == nil { - t.Fatalf("could not find golden contents %v: %v", fragment, tag) - } - if len(file.Data) == 0 { - return file.Data - } - return file.Data[:len(file.Data)-1] // drop the trailing \n -} - -func (data *Data) collectAddImports(spn span.Span, imp string) { - data.AddImport[spn.URI()] = imp -} - -func (data *Data) collectSemanticTokens(spn span.Span) { - data.SemanticTokens = append(data.SemanticTokens, spn) -} - -func (data *Data) collectSuggestedFixes(spn span.Span, actionKind, fix string) { - data.SuggestedFixes[spn] = append(data.SuggestedFixes[spn], SuggestedFix{actionKind, fix}) -} - -func (data *Data) collectSelectionRanges(spn span.Span) { - data.SelectionRanges = append(data.SelectionRanges, spn) -} - -func (data *Data) collectIncomingCalls(src span.Span, calls []span.Span) { - for _, call := range calls { - rng := data.mustRange(call) - // we're only comparing protocol.range - if data.CallHierarchy[src] != nil { - data.CallHierarchy[src].IncomingCalls = append(data.CallHierarchy[src].IncomingCalls, - protocol.CallHierarchyItem{ - URI: protocol.DocumentURI(call.URI()), - Range: rng, - }) - } else { - data.CallHierarchy[src] = &CallHierarchyResult{ - IncomingCalls: []protocol.CallHierarchyItem{ - {URI: protocol.DocumentURI(call.URI()), Range: rng}, - }, - } - } - } -} - -func (data *Data) collectOutgoingCalls(src span.Span, calls []span.Span) { - if data.CallHierarchy[src] == nil { - data.CallHierarchy[src] = &CallHierarchyResult{} - } - for _, call := range calls { - // we're only comparing protocol.range - data.CallHierarchy[src].OutgoingCalls = append(data.CallHierarchy[src].OutgoingCalls, - protocol.CallHierarchyItem{ - URI: protocol.DocumentURI(call.URI()), - Range: data.mustRange(call), - }) - } -} - -func (data *Data) collectInlayHints(src span.Span) { - data.InlayHints = append(data.InlayHints, src) -} - -func (data *Data) collectRenames(src span.Span, newText string) { - data.Renames[src] = newText -} - -// mustRange converts spn into a protocol.Range, panicking on any error. -func (data *Data) mustRange(spn span.Span) protocol.Range { - m, err := data.Mapper(spn.URI()) - rng, err := m.SpanRange(spn) - if err != nil { - panic(fmt.Sprintf("converting span %s to range: %v", spn, err)) - } - return rng -} - -func uriName(uri span.URI) string { - return filepath.Base(strings.TrimSuffix(uri.Filename(), ".go")) -} - -// TODO(golang/go#54845): improve the formatting here to match standard -// line:column position formatting. -func SpanName(spn span.Span) string { - return fmt.Sprintf("%v_%v_%v", uriName(spn.URI()), spn.Start().Line(), spn.Start().Column()) -} - -func shouldSkip(data *Data, uri span.URI) bool { - if data.ModfileFlagAvailable { - return false - } - // If the -modfile flag is not available, then we do not want to run - // any tests on the go.mod file. - if strings.HasSuffix(uri.Filename(), ".mod") { - return true - } - // If the -modfile flag is not available, then we do not want to test any - // uri that contains "go mod tidy". - m, err := data.Mapper(uri) - return err == nil && strings.Contains(string(m.Content), ", \"go mod tidy\",") -} diff --git a/gopls/internal/lsp/tests/util.go b/gopls/internal/lsp/tests/util.go deleted file mode 100644 index ea0920d2e62..00000000000 --- a/gopls/internal/lsp/tests/util.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package tests - -import ( - "fmt" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -// DiffCallHierarchyItems returns the diff between expected and actual call locations for incoming/outgoing call hierarchies -func DiffCallHierarchyItems(gotCalls []protocol.CallHierarchyItem, expectedCalls []protocol.CallHierarchyItem) string { - expected := make(map[protocol.Location]bool) - for _, call := range expectedCalls { - expected[protocol.Location{URI: call.URI, Range: call.Range}] = true - } - - got := make(map[protocol.Location]bool) - for _, call := range gotCalls { - got[protocol.Location{URI: call.URI, Range: call.Range}] = true - } - if len(got) != len(expected) { - return fmt.Sprintf("expected %d calls but got %d", len(expected), len(got)) - } - for spn := range got { - if !expected[spn] { - return fmt.Sprintf("incorrect calls, expected locations %v but got locations %v", expected, got) - } - } - return "" -} diff --git a/gopls/internal/lsp/text_synchronization.go b/gopls/internal/lsp/text_synchronization.go deleted file mode 100644 index bccba846110..00000000000 --- a/gopls/internal/lsp/text_synchronization.go +++ /dev/null @@ -1,367 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "bytes" - "context" - "errors" - "fmt" - "path/filepath" - "sync" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/tag" - "golang.org/x/tools/internal/jsonrpc2" -) - -// ModificationSource identifies the origin of a change. -type ModificationSource int - -const ( - // FromDidOpen is from a didOpen notification. - FromDidOpen = ModificationSource(iota) - - // FromDidChange is from a didChange notification. - FromDidChange - - // FromDidChangeWatchedFiles is from didChangeWatchedFiles notification. - FromDidChangeWatchedFiles - - // FromDidSave is from a didSave notification. - FromDidSave - - // FromDidClose is from a didClose notification. - FromDidClose - - // FromDidChangeConfiguration is from a didChangeConfiguration notification. - FromDidChangeConfiguration - - // FromRegenerateCgo refers to file modifications caused by regenerating - // the cgo sources for the workspace. - FromRegenerateCgo - - // FromInitialWorkspaceLoad refers to the loading of all packages in the - // workspace when the view is first created. - FromInitialWorkspaceLoad -) - -func (m ModificationSource) String() string { - switch m { - case FromDidOpen: - return "opened files" - case FromDidChange: - return "changed files" - case FromDidChangeWatchedFiles: - return "files changed on disk" - case FromDidSave: - return "saved files" - case FromDidClose: - return "close files" - case FromRegenerateCgo: - return "regenerate cgo" - case FromInitialWorkspaceLoad: - return "initial workspace load" - default: - return "unknown file modification" - } -} - -func (s *Server) didOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didOpen", tag.URI.Of(params.TextDocument.URI)) - defer done() - - uri := params.TextDocument.URI.SpanURI() - if !uri.IsFile() { - return nil - } - // There may not be any matching view in the current session. If that's - // the case, try creating a new view based on the opened file path. - // - // TODO(rstambler): This seems like it would continuously add new - // views, but it won't because ViewOf only returns an error when there - // are no views in the session. I don't know if that logic should go - // here, or if we can continue to rely on that implementation detail. - // - // TODO(golang/go#57979): this will be generalized to a different view calculation. - if _, err := s.session.ViewOf(uri); err != nil { - dir := filepath.Dir(uri.Filename()) - if err := s.addFolders(ctx, []protocol.WorkspaceFolder{{ - URI: string(protocol.URIFromPath(dir)), - Name: filepath.Base(dir), - }}); err != nil { - return err - } - } - return s.didModifyFiles(ctx, []source.FileModification{{ - URI: uri, - Action: source.Open, - Version: params.TextDocument.Version, - Text: []byte(params.TextDocument.Text), - LanguageID: params.TextDocument.LanguageID, - }}, FromDidOpen) -} - -func (s *Server) didChange(ctx context.Context, params *protocol.DidChangeTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChange", tag.URI.Of(params.TextDocument.URI)) - defer done() - - uri := params.TextDocument.URI.SpanURI() - if !uri.IsFile() { - return nil - } - - text, err := s.changedText(ctx, uri, params.ContentChanges) - if err != nil { - return err - } - c := source.FileModification{ - URI: uri, - Action: source.Change, - Version: params.TextDocument.Version, - Text: text, - } - if err := s.didModifyFiles(ctx, []source.FileModification{c}, FromDidChange); err != nil { - return err - } - return s.warnAboutModifyingGeneratedFiles(ctx, uri) -} - -// warnAboutModifyingGeneratedFiles shows a warning if a user tries to edit a -// generated file for the first time. -func (s *Server) warnAboutModifyingGeneratedFiles(ctx context.Context, uri span.URI) error { - s.changedFilesMu.Lock() - _, ok := s.changedFiles[uri] - if !ok { - s.changedFiles[uri] = struct{}{} - } - s.changedFilesMu.Unlock() - - // This file has already been edited before. - if ok { - return nil - } - - // Ideally, we should be able to specify that a generated file should - // be opened as read-only. Tell the user that they should not be - // editing a generated file. - view, err := s.session.ViewOf(uri) - if err != nil { - return err - } - snapshot, release, err := view.Snapshot() - if err != nil { - return err - } - isGenerated := source.IsGenerated(ctx, snapshot, uri) - release() - - if !isGenerated { - return nil - } - return s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ - Message: fmt.Sprintf("Do not edit this file! %s is a generated file.", uri.Filename()), - Type: protocol.Warning, - }) -} - -func (s *Server) didChangeWatchedFiles(ctx context.Context, params *protocol.DidChangeWatchedFilesParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChangeWatchedFiles") - defer done() - - var modifications []source.FileModification - for _, change := range params.Changes { - uri := change.URI.SpanURI() - if !uri.IsFile() { - continue - } - action := changeTypeToFileAction(change.Type) - modifications = append(modifications, source.FileModification{ - URI: uri, - Action: action, - OnDisk: true, - }) - } - return s.didModifyFiles(ctx, modifications, FromDidChangeWatchedFiles) -} - -func (s *Server) didSave(ctx context.Context, params *protocol.DidSaveTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didSave", tag.URI.Of(params.TextDocument.URI)) - defer done() - - uri := params.TextDocument.URI.SpanURI() - if !uri.IsFile() { - return nil - } - c := source.FileModification{ - URI: uri, - Action: source.Save, - } - if params.Text != nil { - c.Text = []byte(*params.Text) - } - return s.didModifyFiles(ctx, []source.FileModification{c}, FromDidSave) -} - -func (s *Server) didClose(ctx context.Context, params *protocol.DidCloseTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didClose", tag.URI.Of(params.TextDocument.URI)) - defer done() - - uri := params.TextDocument.URI.SpanURI() - if !uri.IsFile() { - return nil - } - return s.didModifyFiles(ctx, []source.FileModification{ - { - URI: uri, - Action: source.Close, - Version: -1, - Text: nil, - }, - }, FromDidClose) -} - -func (s *Server) didModifyFiles(ctx context.Context, modifications []source.FileModification, cause ModificationSource) error { - // wg guards two conditions: - // 1. didModifyFiles is complete - // 2. the goroutine diagnosing changes on behalf of didModifyFiles is - // complete, if it was started - // - // Both conditions must be satisfied for the purpose of testing: we don't - // want to observe the completion of change processing until we have received - // all diagnostics as well as all server->client notifications done on behalf - // of this function. - var wg sync.WaitGroup - wg.Add(1) - defer wg.Done() - - if s.Options().VerboseWorkDoneProgress { - work := s.progress.Start(ctx, DiagnosticWorkTitle(cause), "Calculating file diagnostics...", nil, nil) - go func() { - wg.Wait() - work.End(ctx, "Done.") - }() - } - - onDisk := cause == FromDidChangeWatchedFiles - - s.stateMu.Lock() - if s.state >= serverShutDown { - // This state check does not prevent races below, and exists only to - // produce a better error message. The actual race to the cache should be - // guarded by Session.viewMu. - s.stateMu.Unlock() - return errors.New("server is shut down") - } - s.stateMu.Unlock() - - // If the set of changes included directories, expand those directories - // to their files. - modifications = s.session.ExpandModificationsToDirectories(ctx, modifications) - - // Build a lookup map for file modifications, so that we can later join - // with the snapshot file associations. - modMap := make(map[span.URI]source.FileModification) - for _, mod := range modifications { - modMap[mod.URI] = mod - } - - snapshots, release, err := s.session.DidModifyFiles(ctx, modifications) - if err != nil { - return err - } - - // golang/go#50267: diagnostics should be re-sent after an open or close. For - // some clients, it may be helpful to re-send after each change. - for snapshot, uris := range snapshots { - for _, uri := range uris { - mod := modMap[uri] - if snapshot.Options().ChattyDiagnostics || mod.Action == source.Open || mod.Action == source.Close { - s.mustPublishDiagnostics(uri) - } - } - } - - wg.Add(1) - go func() { - s.diagnoseSnapshots(snapshots, onDisk, cause) - release() - wg.Done() - }() - - // After any file modifications, we need to update our watched files, - // in case something changed. Compute the new set of directories to watch, - // and if it differs from the current set, send updated registrations. - return s.updateWatchedDirectories(ctx) -} - -// DiagnosticWorkTitle returns the title of the diagnostic work resulting from a -// file change originating from the given cause. -func DiagnosticWorkTitle(cause ModificationSource) string { - return fmt.Sprintf("diagnosing %v", cause) -} - -func (s *Server) changedText(ctx context.Context, uri span.URI, changes []protocol.TextDocumentContentChangeEvent) ([]byte, error) { - if len(changes) == 0 { - return nil, fmt.Errorf("%w: no content changes provided", jsonrpc2.ErrInternal) - } - - // Check if the client sent the full content of the file. - // We accept a full content change even if the server expected incremental changes. - if len(changes) == 1 && changes[0].Range == nil && changes[0].RangeLength == 0 { - return []byte(changes[0].Text), nil - } - return s.applyIncrementalChanges(ctx, uri, changes) -} - -func (s *Server) applyIncrementalChanges(ctx context.Context, uri span.URI, changes []protocol.TextDocumentContentChangeEvent) ([]byte, error) { - fh, err := s.session.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - content, err := fh.Content() - if err != nil { - return nil, fmt.Errorf("%w: file not found (%v)", jsonrpc2.ErrInternal, err) - } - for _, change := range changes { - // TODO(adonovan): refactor to use diff.Apply, which is robust w.r.t. - // out-of-order or overlapping changes---and much more efficient. - - // Make sure to update mapper along with the content. - m := protocol.NewMapper(uri, content) - if change.Range == nil { - return nil, fmt.Errorf("%w: unexpected nil range for change", jsonrpc2.ErrInternal) - } - spn, err := m.RangeSpan(*change.Range) - if err != nil { - return nil, err - } - start, end := spn.Start().Offset(), spn.End().Offset() - if end < start { - return nil, fmt.Errorf("%w: invalid range for content change", jsonrpc2.ErrInternal) - } - var buf bytes.Buffer - buf.Write(content[:start]) - buf.WriteString(change.Text) - buf.Write(content[end:]) - content = buf.Bytes() - } - return content, nil -} - -func changeTypeToFileAction(ct protocol.FileChangeType) source.FileAction { - switch ct { - case protocol.Changed: - return source.Change - case protocol.Created: - return source.Create - case protocol.Deleted: - return source.Delete - } - return source.UnknownFileAction -} diff --git a/gopls/internal/lsp/work/completion.go b/gopls/internal/lsp/work/completion.go deleted file mode 100644 index 36a3ad60acc..00000000000 --- a/gopls/internal/lsp/work/completion.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package work - -import ( - "context" - "errors" - "fmt" - "os" - "path/filepath" - "sort" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" -) - -func Completion(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) (*protocol.CompletionList, error) { - ctx, done := event.Start(ctx, "work.Completion") - defer done() - - // Get the position of the cursor. - pw, err := snapshot.ParseWork(ctx, fh) - if err != nil { - return nil, fmt.Errorf("getting go.work file handle: %w", err) - } - cursor, err := pw.Mapper.PositionOffset(position) - if err != nil { - return nil, fmt.Errorf("computing cursor offset: %w", err) - } - - // Find the use statement the user is in. - use, pathStart, _ := usePath(pw, cursor) - if use == nil { - return &protocol.CompletionList{}, nil - } - completingFrom := use.Path[:cursor-pathStart] - - // We're going to find the completions of the user input - // (completingFrom) by doing a walk on the innermost directory - // of the given path, and comparing the found paths to make sure - // that they match the component of the path after the - // innermost directory. - // - // We'll maintain two paths when doing this: pathPrefixSlash - // is essentially the path the user typed in, and pathPrefixAbs - // is the path made absolute from the go.work directory. - - pathPrefixSlash := completingFrom - pathPrefixAbs := filepath.FromSlash(pathPrefixSlash) - if !filepath.IsAbs(pathPrefixAbs) { - pathPrefixAbs = filepath.Join(filepath.Dir(pw.URI.Filename()), pathPrefixAbs) - } - - // pathPrefixDir is the directory that will be walked to find matches. - // If pathPrefixSlash is not explicitly a directory boundary (is either equivalent to "." or - // ends in a separator) we need to examine its parent directory to find sibling files that - // match. - depthBound := 5 - pathPrefixDir, pathPrefixBase := pathPrefixAbs, "" - pathPrefixSlashDir := pathPrefixSlash - if filepath.Clean(pathPrefixSlash) != "." && !strings.HasSuffix(pathPrefixSlash, "/") { - depthBound++ - pathPrefixDir, pathPrefixBase = filepath.Split(pathPrefixAbs) - pathPrefixSlashDir = dirNonClean(pathPrefixSlash) - } - - var completions []string - // Stop traversing deeper once we've hit 10k files to try to stay generally under 100ms. - const numSeenBound = 10000 - var numSeen int - stopWalking := errors.New("hit numSeenBound") - err = filepath.Walk(pathPrefixDir, func(wpath string, info os.FileInfo, err error) error { - if numSeen > numSeenBound { - // Stop traversing if we hit bound. - return stopWalking - } - numSeen++ - - // rel is the path relative to pathPrefixDir. - // Make sure that it has pathPrefixBase as a prefix - // otherwise it won't match the beginning of the - // base component of the path the user typed in. - rel := strings.TrimPrefix(wpath[len(pathPrefixDir):], string(filepath.Separator)) - if info.IsDir() && wpath != pathPrefixDir && !strings.HasPrefix(rel, pathPrefixBase) { - return filepath.SkipDir - } - - // Check for a match (a module directory). - if filepath.Base(rel) == "go.mod" { - relDir := strings.TrimSuffix(dirNonClean(rel), string(os.PathSeparator)) - completionPath := join(pathPrefixSlashDir, filepath.ToSlash(relDir)) - - if !strings.HasPrefix(completionPath, completingFrom) { - return nil - } - if strings.HasSuffix(completionPath, "/") { - // Don't suggest paths that end in "/". This happens - // when the input is a path that ends in "/" and - // the completion is empty. - return nil - } - completion := completionPath[len(completingFrom):] - if completingFrom == "" && !strings.HasPrefix(completion, "./") { - // Bias towards "./" prefixes. - completion = join(".", completion) - } - - completions = append(completions, completion) - } - - if depth := strings.Count(rel, string(filepath.Separator)); depth >= depthBound { - return filepath.SkipDir - } - return nil - }) - if err != nil && !errors.Is(err, stopWalking) { - return nil, fmt.Errorf("walking to find completions: %w", err) - } - - sort.Strings(completions) - - items := []protocol.CompletionItem{} // must be a slice - for _, c := range completions { - items = append(items, protocol.CompletionItem{ - Label: c, - InsertText: c, - }) - } - return &protocol.CompletionList{Items: items}, nil -} - -// dirNonClean is filepath.Dir, without the Clean at the end. -func dirNonClean(path string) string { - vol := filepath.VolumeName(path) - i := len(path) - 1 - for i >= len(vol) && !os.IsPathSeparator(path[i]) { - i-- - } - return path[len(vol) : i+1] -} - -func join(a, b string) string { - if a == "" { - return b - } - if b == "" { - return a - } - return strings.TrimSuffix(a, "/") + "/" + b -} diff --git a/gopls/internal/lsp/work/diagnostics.go b/gopls/internal/lsp/work/diagnostics.go deleted file mode 100644 index 2977dc32b78..00000000000 --- a/gopls/internal/lsp/work/diagnostics.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package work - -import ( - "context" - "fmt" - "os" - "path/filepath" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" -) - -func Diagnostics(ctx context.Context, snapshot source.Snapshot) (map[span.URI][]*source.Diagnostic, error) { - ctx, done := event.Start(ctx, "work.Diagnostics", source.SnapshotLabels(snapshot)...) - defer done() - - reports := map[span.URI][]*source.Diagnostic{} - uri := snapshot.WorkFile() - if uri == "" { - return nil, nil - } - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - reports[fh.URI()] = []*source.Diagnostic{} - diagnostics, err := DiagnosticsForWork(ctx, snapshot, fh) - if err != nil { - return nil, err - } - for _, d := range diagnostics { - fh, err := snapshot.ReadFile(ctx, d.URI) - if err != nil { - return nil, err - } - reports[fh.URI()] = append(reports[fh.URI()], d) - } - - return reports, nil -} - -func DiagnosticsForWork(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]*source.Diagnostic, error) { - pw, err := snapshot.ParseWork(ctx, fh) - if err != nil { - if pw == nil || len(pw.ParseErrors) == 0 { - return nil, err - } - return pw.ParseErrors, nil - } - - // Add diagnostic if a directory does not contain a module. - var diagnostics []*source.Diagnostic - for _, use := range pw.File.Use { - rng, err := pw.Mapper.OffsetRange(use.Syntax.Start.Byte, use.Syntax.End.Byte) - if err != nil { - return nil, err - } - - modfh, err := snapshot.ReadFile(ctx, modFileURI(pw, use)) - if err != nil { - return nil, err - } - if _, err := modfh.Content(); err != nil && os.IsNotExist(err) { - diagnostics = append(diagnostics, &source.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityError, - Source: source.WorkFileError, - Message: fmt.Sprintf("directory %v does not contain a module", use.Path), - }) - } - } - return diagnostics, nil -} - -func modFileURI(pw *source.ParsedWorkFile, use *modfile.Use) span.URI { - workdir := filepath.Dir(pw.URI.Filename()) - - modroot := filepath.FromSlash(use.Path) - if !filepath.IsAbs(modroot) { - modroot = filepath.Join(workdir, modroot) - } - - return span.URIFromPath(filepath.Join(modroot, "go.mod")) -} diff --git a/gopls/internal/lsp/work/format.go b/gopls/internal/lsp/work/format.go deleted file mode 100644 index 70cbe59d174..00000000000 --- a/gopls/internal/lsp/work/format.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package work - -import ( - "context" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" -) - -func Format(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.TextEdit, error) { - ctx, done := event.Start(ctx, "work.Format") - defer done() - - pw, err := snapshot.ParseWork(ctx, fh) - if err != nil { - return nil, err - } - formatted := modfile.Format(pw.File.Syntax) - // Calculate the edits to be made due to the change. - diffs := snapshot.Options().ComputeEdits(string(pw.Mapper.Content), string(formatted)) - return source.ToProtocolEdits(pw.Mapper, diffs) -} diff --git a/gopls/internal/lsp/work/hover.go b/gopls/internal/lsp/work/hover.go deleted file mode 100644 index d777acdf3b4..00000000000 --- a/gopls/internal/lsp/work/hover.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package work - -import ( - "bytes" - "context" - "fmt" - - "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/internal/event" -) - -func Hover(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) (*protocol.Hover, error) { - // We only provide hover information for the view's go.work file. - if fh.URI() != snapshot.WorkFile() { - return nil, nil - } - - ctx, done := event.Start(ctx, "work.Hover") - defer done() - - // Get the position of the cursor. - pw, err := snapshot.ParseWork(ctx, fh) - if err != nil { - return nil, fmt.Errorf("getting go.work file handle: %w", err) - } - offset, err := pw.Mapper.PositionOffset(position) - if err != nil { - return nil, fmt.Errorf("computing cursor offset: %w", err) - } - - // Confirm that the cursor is inside a use statement, and then find - // the position of the use statement's directory path. - use, pathStart, pathEnd := usePath(pw, offset) - - // The cursor position is not on a use statement. - if use == nil { - return nil, nil - } - - // Get the mod file denoted by the use. - modfh, err := snapshot.ReadFile(ctx, modFileURI(pw, use)) - if err != nil { - return nil, fmt.Errorf("getting modfile handle: %w", err) - } - pm, err := snapshot.ParseMod(ctx, modfh) - if err != nil { - return nil, fmt.Errorf("getting modfile handle: %w", err) - } - if pm.File.Module == nil { - return nil, fmt.Errorf("modfile has no module declaration") - } - mod := pm.File.Module.Mod - - // Get the range to highlight for the hover. - rng, err := pw.Mapper.OffsetRange(pathStart, pathEnd) - if err != nil { - return nil, err - } - options := snapshot.Options() - return &protocol.Hover{ - Contents: protocol.MarkupContent{ - Kind: options.PreferredContentFormat, - Value: mod.Path, - }, - Range: rng, - }, nil -} - -func usePath(pw *source.ParsedWorkFile, offset int) (use *modfile.Use, pathStart, pathEnd int) { - for _, u := range pw.File.Use { - path := []byte(u.Path) - s, e := u.Syntax.Start.Byte, u.Syntax.End.Byte - i := bytes.Index(pw.Mapper.Content[s:e], path) - if i == -1 { - // This should not happen. - continue - } - // Shift the start position to the location of the - // module directory within the use statement. - pathStart, pathEnd = s+i, s+i+len(path) - if pathStart <= offset && offset <= pathEnd { - return u, pathStart, pathEnd - } - } - return nil, 0, 0 -} diff --git a/gopls/internal/lsp/workspace.go b/gopls/internal/lsp/workspace.go deleted file mode 100644 index 0a7dd4c73a7..00000000000 --- a/gopls/internal/lsp/workspace.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - "fmt" - "sync" - - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/event" -) - -func (s *Server) didChangeWorkspaceFolders(ctx context.Context, params *protocol.DidChangeWorkspaceFoldersParams) error { - event := params.Event - for _, folder := range event.Removed { - view := s.session.ViewByName(folder.Name) - if view != nil { - s.session.RemoveView(view) - } else { - return fmt.Errorf("view %s for %v not found", folder.Name, folder.URI) - } - } - return s.addFolders(ctx, event.Added) -} - -// addView returns a Snapshot and a release function that must be -// called when it is no longer needed. -func (s *Server) addView(ctx context.Context, name string, uri span.URI) (source.Snapshot, func(), error) { - s.stateMu.Lock() - state := s.state - s.stateMu.Unlock() - if state < serverInitialized { - return nil, nil, fmt.Errorf("addView called before server initialized") - } - options, err := s.fetchFolderOptions(ctx, uri) - if err != nil { - return nil, nil, err - } - folder := &cache.Folder{ - Dir: uri, - Name: name, - Options: options, - } - _, snapshot, release, err := s.session.NewView(ctx, folder) - return snapshot, release, err -} - -func (s *Server) didChangeConfiguration(ctx context.Context, _ *protocol.DidChangeConfigurationParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChangeConfiguration") - defer done() - - // Apply any changes to the session-level settings. - options, err := s.fetchFolderOptions(ctx, "") - if err != nil { - return err - } - s.SetOptions(options) - - // Collect options for all workspace folders. - seen := make(map[span.URI]bool) - for _, view := range s.session.Views() { - if seen[view.Folder()] { - continue - } - seen[view.Folder()] = true - options, err := s.fetchFolderOptions(ctx, view.Folder()) - if err != nil { - return err - } - s.session.SetFolderOptions(ctx, view.Folder(), options) - } - - var wg sync.WaitGroup - for _, view := range s.session.Views() { - view := view - wg.Add(1) - go func() { - defer wg.Done() - snapshot, release, err := view.Snapshot() - if err != nil { - return // view is shut down; no need to diagnose - } - defer release() - s.diagnoseSnapshot(snapshot, nil, false, 0) - }() - } - - if s.Options().VerboseWorkDoneProgress { - work := s.progress.Start(ctx, DiagnosticWorkTitle(FromDidChangeConfiguration), "Calculating diagnostics...", nil, nil) - go func() { - wg.Wait() - work.End(ctx, "Done.") - }() - } - - // An options change may have affected the detected Go version. - s.checkViewGoVersions() - - return nil -} diff --git a/gopls/internal/lsp/workspace_symbol.go b/gopls/internal/lsp/workspace_symbol.go deleted file mode 100644 index eb690b047e5..00000000000 --- a/gopls/internal/lsp/workspace_symbol.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lsp - -import ( - "context" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/telemetry" - "golang.org/x/tools/internal/event" -) - -func (s *Server) symbol(ctx context.Context, params *protocol.WorkspaceSymbolParams) (_ []protocol.SymbolInformation, rerr error) { - recordLatency := telemetry.StartLatencyTimer("symbol") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.symbol") - defer done() - - views := s.session.Views() - matcher := s.Options().SymbolMatcher - style := s.Options().SymbolStyle - // TODO(rfindley): it looks wrong that we need to pass views here. - // - // Evidence: - // - this is the only place we convert views to []source.View - // - workspace symbols is the only place where we call source.View.Snapshot - var sourceViews []source.View - for _, v := range views { - sourceViews = append(sourceViews, v) - } - return source.WorkspaceSymbols(ctx, matcher, style, sourceViews, params.Query) -} diff --git a/gopls/internal/lsp/lsprpc/autostart_default.go b/gopls/internal/lsprpc/autostart_default.go similarity index 96% rename from gopls/internal/lsp/lsprpc/autostart_default.go rename to gopls/internal/lsprpc/autostart_default.go index 20b974728d9..a170b56203c 100644 --- a/gopls/internal/lsp/lsprpc/autostart_default.go +++ b/gopls/internal/lsprpc/autostart_default.go @@ -6,8 +6,7 @@ package lsprpc import ( "fmt" - - exec "golang.org/x/sys/execabs" + "os/exec" ) var ( diff --git a/gopls/internal/lsp/lsprpc/autostart_posix.go b/gopls/internal/lsprpc/autostart_posix.go similarity index 98% rename from gopls/internal/lsp/lsprpc/autostart_posix.go rename to gopls/internal/lsprpc/autostart_posix.go index 90cc72ddf10..6aeac3ec70d 100644 --- a/gopls/internal/lsp/lsprpc/autostart_posix.go +++ b/gopls/internal/lsprpc/autostart_posix.go @@ -13,12 +13,11 @@ import ( "fmt" "log" "os" + "os/exec" "os/user" "path/filepath" "strconv" "syscall" - - exec "golang.org/x/sys/execabs" ) func init() { diff --git a/gopls/internal/lsprpc/binder.go b/gopls/internal/lsprpc/binder.go new file mode 100644 index 00000000000..708e0ad6afe --- /dev/null +++ b/gopls/internal/lsprpc/binder.go @@ -0,0 +1,5 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lsprpc diff --git a/gopls/internal/lsprpc/binder_test.go b/gopls/internal/lsprpc/binder_test.go new file mode 100644 index 00000000000..042056e7777 --- /dev/null +++ b/gopls/internal/lsprpc/binder_test.go @@ -0,0 +1,199 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lsprpc_test + +import ( + "context" + "regexp" + "strings" + "testing" + "time" + + "golang.org/x/tools/gopls/internal/protocol" + jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" + + . "golang.org/x/tools/gopls/internal/lsprpc" +) + +// ServerBinder binds incoming connections to a new server. +type ServerBinder struct { + newServer ServerFunc +} + +func NewServerBinder(newServer ServerFunc) *ServerBinder { + return &ServerBinder{newServer: newServer} +} + +// streamServer used to have this method, but it was never used. +// TODO(adonovan): figure out whether we need any of this machinery +// and, if not, delete it. In the meantime, it's better that it sit +// in the test package with all the other mothballed machinery +// than in the production code where it would couple streamServer +// and ServerBinder. +/* +func (s *streamServer) Binder() *ServerBinder { + newServer := func(ctx context.Context, client protocol.ClientCloser) protocol.Server { + session := cache.NewSession(ctx, s.cache) + svr := s.serverForTest + if svr == nil { + options := settings.DefaultOptions(s.optionsOverrides) + svr = server.New(session, client, options) + if instance := debug.GetInstance(ctx); instance != nil { + instance.AddService(svr, session) + } + } + return svr + } + return NewServerBinder(newServer) +} +*/ + +func (b *ServerBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { + client := protocol.ClientDispatcherV2(conn) + server := b.newServer(ctx, client) + serverHandler := protocol.ServerHandlerV2(server) + // Wrap the server handler to inject the client into each request context, so + // that log events are reflected back to the client. + wrapped := jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + ctx = protocol.WithClient(ctx, client) + return serverHandler.Handle(ctx, req) + }) + preempter := &Canceler{ + Conn: conn, + } + return jsonrpc2_v2.ConnectionOptions{ + Handler: wrapped, + Preempter: preempter, + } +} + +type TestEnv struct { + Conns []*jsonrpc2_v2.Connection + Servers []*jsonrpc2_v2.Server +} + +func (e *TestEnv) Shutdown(t *testing.T) { + for _, s := range e.Servers { + s.Shutdown() + } + for _, c := range e.Conns { + if err := c.Close(); err != nil { + t.Error(err) + } + } + for _, s := range e.Servers { + if err := s.Wait(); err != nil { + t.Error(err) + } + } +} + +func (e *TestEnv) serve(ctx context.Context, t *testing.T, server jsonrpc2_v2.Binder) (jsonrpc2_v2.Listener, *jsonrpc2_v2.Server) { + l, err := jsonrpc2_v2.NetPipeListener(ctx) + if err != nil { + t.Fatal(err) + } + s := jsonrpc2_v2.NewServer(ctx, l, server) + e.Servers = append(e.Servers, s) + return l, s +} + +func (e *TestEnv) dial(ctx context.Context, t *testing.T, dialer jsonrpc2_v2.Dialer, client jsonrpc2_v2.Binder, forwarded bool) *jsonrpc2_v2.Connection { + if forwarded { + l, _ := e.serve(ctx, t, NewForwardBinder(dialer)) + dialer = l.Dialer() + } + conn, err := jsonrpc2_v2.Dial(ctx, dialer, client) + if err != nil { + t.Fatal(err) + } + e.Conns = append(e.Conns, conn) + return conn +} + +func staticClientBinder(client protocol.Client) jsonrpc2_v2.Binder { + f := func(context.Context, protocol.Server) protocol.Client { return client } + return NewClientBinder(f) +} + +func staticServerBinder(server protocol.Server) jsonrpc2_v2.Binder { + f := func(ctx context.Context, client protocol.ClientCloser) protocol.Server { + return server + } + return NewServerBinder(f) +} + +func TestClientLoggingV2(t *testing.T) { + ctx := context.Background() + + for name, forwarded := range map[string]bool{ + "forwarded": true, + "standalone": false, + } { + t.Run(name, func(t *testing.T) { + client := FakeClient{Logs: make(chan string, 10)} + env := new(TestEnv) + defer env.Shutdown(t) + l, _ := env.serve(ctx, t, staticServerBinder(PingServer{})) + conn := env.dial(ctx, t, l.Dialer(), staticClientBinder(client), forwarded) + + if err := protocol.ServerDispatcherV2(conn).DidOpen(ctx, &protocol.DidOpenTextDocumentParams{}); err != nil { + t.Errorf("DidOpen: %v", err) + } + select { + case got := <-client.Logs: + want := "ping" + matched, err := regexp.MatchString(want, got) + if err != nil { + t.Fatal(err) + } + if !matched { + t.Errorf("got log %q, want a log containing %q", got, want) + } + case <-time.After(1 * time.Second): + t.Error("timeout waiting for client log") + } + }) + } +} + +func TestRequestCancellationV2(t *testing.T) { + ctx := context.Background() + + for name, forwarded := range map[string]bool{ + "forwarded": true, + "standalone": false, + } { + t.Run(name, func(t *testing.T) { + server := WaitableServer{ + Started: make(chan struct{}), + Completed: make(chan error), + } + env := new(TestEnv) + defer env.Shutdown(t) + l, _ := env.serve(ctx, t, staticServerBinder(server)) + client := FakeClient{Logs: make(chan string, 10)} + conn := env.dial(ctx, t, l.Dialer(), staticClientBinder(client), forwarded) + + sd := protocol.ServerDispatcherV2(conn) + ctx, cancel := context.WithCancel(ctx) + + result := make(chan error) + go func() { + _, err := sd.Hover(ctx, &protocol.HoverParams{}) + result <- err + }() + // Wait for the Hover request to start. + <-server.Started + cancel() + if err := <-result; err == nil { + t.Error("nil error for cancelled Hover(), want non-nil") + } + if err := <-server.Completed; err == nil || !strings.Contains(err.Error(), "cancelled hover") { + t.Errorf("Hover(): unexpected server-side error %v", err) + } + }) + } +} diff --git a/gopls/internal/lsprpc/commandinterceptor_test.go b/gopls/internal/lsprpc/commandinterceptor_test.go new file mode 100644 index 00000000000..7c83ef993f0 --- /dev/null +++ b/gopls/internal/lsprpc/commandinterceptor_test.go @@ -0,0 +1,61 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lsprpc_test + +import ( + "context" + "encoding/json" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" + + . "golang.org/x/tools/gopls/internal/lsprpc" +) + +func CommandInterceptor(command string, run func(*protocol.ExecuteCommandParams) (interface{}, error)) Middleware { + return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { + return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + if req.Method == "workspace/executeCommand" { + var params protocol.ExecuteCommandParams + if err := json.Unmarshal(req.Params, ¶ms); err == nil { + if params.Command == command { + return run(¶ms) + } + } + } + + return delegate.Handle(ctx, req) + }) + }) +} + +func TestCommandInterceptor(t *testing.T) { + const command = "foo" + caught := false + intercept := func(_ *protocol.ExecuteCommandParams) (interface{}, error) { + caught = true + return map[string]interface{}{}, nil + } + + ctx := context.Background() + env := new(TestEnv) + defer env.Shutdown(t) + mw := CommandInterceptor(command, intercept) + l, _ := env.serve(ctx, t, mw(noopBinder)) + conn := env.dial(ctx, t, l.Dialer(), noopBinder, false) + + params := &protocol.ExecuteCommandParams{ + Command: command, + } + var res interface{} + err := conn.Call(ctx, "workspace/executeCommand", params).Await(ctx, &res) + if err != nil { + t.Fatal(err) + } + if !caught { + t.Errorf("workspace/executeCommand was not intercepted") + } +} diff --git a/gopls/internal/lsp/lsprpc/dialer.go b/gopls/internal/lsprpc/dialer.go similarity index 86% rename from gopls/internal/lsp/lsprpc/dialer.go rename to gopls/internal/lsprpc/dialer.go index 37e0c568031..a5f038df9f1 100644 --- a/gopls/internal/lsp/lsprpc/dialer.go +++ b/gopls/internal/lsprpc/dialer.go @@ -10,18 +10,18 @@ import ( "io" "net" "os" + "os/exec" "time" - exec "golang.org/x/sys/execabs" "golang.org/x/tools/internal/event" ) -// AutoNetwork is the pseudo network type used to signal that gopls should use +// autoNetwork is the pseudo network type used to signal that gopls should use // automatic discovery to resolve a remote address. -const AutoNetwork = "auto" +const autoNetwork = "auto" -// An AutoDialer is a jsonrpc2 dialer that understands the 'auto' network. -type AutoDialer struct { +// An autoDialer is a jsonrpc2 dialer that understands the 'auto' network. +type autoDialer struct { network, addr string // the 'real' network and address isAuto bool // whether the server is on the 'auto' network @@ -29,12 +29,12 @@ type AutoDialer struct { argFunc func(network, addr string) []string } -func NewAutoDialer(rawAddr string, argFunc func(network, addr string) []string) (*AutoDialer, error) { - d := AutoDialer{ +func newAutoDialer(rawAddr string, argFunc func(network, addr string) []string) (*autoDialer, error) { + d := autoDialer{ argFunc: argFunc, } d.network, d.addr = ParseAddr(rawAddr) - if d.network == AutoNetwork { + if d.network == autoNetwork { d.isAuto = true bin, err := os.Executable() if err != nil { @@ -47,14 +47,14 @@ func NewAutoDialer(rawAddr string, argFunc func(network, addr string) []string) } // Dial implements the jsonrpc2.Dialer interface. -func (d *AutoDialer) Dial(ctx context.Context) (io.ReadWriteCloser, error) { +func (d *autoDialer) Dial(ctx context.Context) (io.ReadWriteCloser, error) { conn, err := d.dialNet(ctx) return conn, err } // TODO(rFindley): remove this once we no longer need to integrate with v1 of // the jsonrpc2 package. -func (d *AutoDialer) dialNet(ctx context.Context) (net.Conn, error) { +func (d *autoDialer) dialNet(ctx context.Context) (net.Conn, error) { // Attempt to verify that we own the remote. This is imperfect, but if we can // determine that the remote is owned by a different user, we should fail. ok, err := verifyRemoteOwnership(d.network, d.addr) diff --git a/gopls/internal/lsprpc/export_test.go b/gopls/internal/lsprpc/export_test.go new file mode 100644 index 00000000000..509129870dc --- /dev/null +++ b/gopls/internal/lsprpc/export_test.go @@ -0,0 +1,142 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lsprpc + +// This file defines things (and opens backdoors) needed only by tests. + +import ( + "context" + "encoding/json" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" + "golang.org/x/tools/internal/xcontext" +) + +const HandshakeMethod = handshakeMethod + +// A ServerFunc is used to construct an LSP server for a given client. +type ServerFunc func(context.Context, protocol.ClientCloser) protocol.Server + +type Canceler struct { + Conn *jsonrpc2_v2.Connection +} + +func (c *Canceler) Preempt(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + if req.Method != "$/cancelRequest" { + return nil, jsonrpc2_v2.ErrNotHandled + } + var params protocol.CancelParams + if err := json.Unmarshal(req.Params, ¶ms); err != nil { + return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrParse, err) + } + var id jsonrpc2_v2.ID + switch raw := params.ID.(type) { + case float64: + id = jsonrpc2_v2.Int64ID(int64(raw)) + case string: + id = jsonrpc2_v2.StringID(raw) + default: + return nil, fmt.Errorf("%w: invalid ID type %T", jsonrpc2_v2.ErrParse, params.ID) + } + c.Conn.Cancel(id) + return nil, nil +} + +type ForwardBinder struct { + dialer jsonrpc2_v2.Dialer + onBind func(*jsonrpc2_v2.Connection) +} + +func NewForwardBinder(dialer jsonrpc2_v2.Dialer) *ForwardBinder { + return &ForwardBinder{ + dialer: dialer, + } +} + +func (b *ForwardBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) (opts jsonrpc2_v2.ConnectionOptions) { + client := protocol.ClientDispatcherV2(conn) + clientBinder := NewClientBinder(func(context.Context, protocol.Server) protocol.Client { return client }) + + serverConn, err := jsonrpc2_v2.Dial(context.Background(), b.dialer, clientBinder) + if err != nil { + return jsonrpc2_v2.ConnectionOptions{ + Handler: jsonrpc2_v2.HandlerFunc(func(context.Context, *jsonrpc2_v2.Request) (interface{}, error) { + return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrInternal, err) + }), + } + } + + if b.onBind != nil { + b.onBind(serverConn) + } + server := protocol.ServerDispatcherV2(serverConn) + preempter := &Canceler{ + Conn: conn, + } + detached := xcontext.Detach(ctx) + go func() { + conn.Wait() + if err := serverConn.Close(); err != nil { + event.Log(detached, fmt.Sprintf("closing remote connection: %v", err)) + } + }() + return jsonrpc2_v2.ConnectionOptions{ + Handler: protocol.ServerHandlerV2(server), + Preempter: preempter, + } +} + +func NewClientBinder(newClient ClientFunc) *clientBinder { + return &clientBinder{newClient} +} + +// A ClientFunc is used to construct an LSP client for a given server. +type ClientFunc func(context.Context, protocol.Server) protocol.Client + +// clientBinder binds an LSP client to an incoming connection. +type clientBinder struct { + newClient ClientFunc +} + +func (b *clientBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { + server := protocol.ServerDispatcherV2(conn) + client := b.newClient(ctx, server) + return jsonrpc2_v2.ConnectionOptions{ + Handler: protocol.ClientHandlerV2(client), + } +} + +// HandlerMiddleware is a middleware that only modifies the jsonrpc2 handler. +type HandlerMiddleware func(jsonrpc2_v2.Handler) jsonrpc2_v2.Handler + +// BindHandler transforms a HandlerMiddleware into a Middleware. +func BindHandler(hmw HandlerMiddleware) Middleware { + return Middleware(func(binder jsonrpc2_v2.Binder) jsonrpc2_v2.Binder { + return BinderFunc(func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { + opts := binder.Bind(ctx, conn) + opts.Handler = hmw(opts.Handler) + return opts + }) + }) +} + +// The BinderFunc type adapts a bind function to implement the jsonrpc2.Binder +// interface. +type BinderFunc func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions + +func (f BinderFunc) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { + return f(ctx, conn) +} + +// Middleware defines a transformation of jsonrpc2 Binders, that may be +// composed to build jsonrpc2 servers. +type Middleware func(jsonrpc2_v2.Binder) jsonrpc2_v2.Binder + +var GetGoEnv = getGoEnv + +type StreamServer = streamServer diff --git a/gopls/internal/lsprpc/goenv.go b/gopls/internal/lsprpc/goenv.go new file mode 100644 index 00000000000..52ec08ff7eb --- /dev/null +++ b/gopls/internal/lsprpc/goenv.go @@ -0,0 +1,34 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lsprpc + +import ( + "context" + "encoding/json" + "fmt" + + "golang.org/x/tools/internal/gocommand" +) + +func getGoEnv(ctx context.Context, env map[string]interface{}) (map[string]string, error) { + var runEnv []string + for k, v := range env { + runEnv = append(runEnv, fmt.Sprintf("%s=%s", k, v)) + } + runner := gocommand.Runner{} + output, err := runner.Run(ctx, gocommand.Invocation{ + Verb: "env", + Args: []string{"-json"}, + Env: runEnv, + }) + if err != nil { + return nil, err + } + envmap := make(map[string]string) + if err := json.Unmarshal(output.Bytes(), &envmap); err != nil { + return nil, err + } + return envmap, nil +} diff --git a/gopls/internal/lsprpc/goenv_test.go b/gopls/internal/lsprpc/goenv_test.go new file mode 100644 index 00000000000..6c41540fafb --- /dev/null +++ b/gopls/internal/lsprpc/goenv_test.go @@ -0,0 +1,133 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lsprpc_test + +import ( + "context" + "encoding/json" + "fmt" + "os" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" + "golang.org/x/tools/internal/testenv" + + . "golang.org/x/tools/gopls/internal/lsprpc" +) + +func GoEnvMiddleware() (Middleware, error) { + return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { + return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + if req.Method == "initialize" { + if err := addGoEnvToInitializeRequestV2(ctx, req); err != nil { + event.Error(ctx, "adding go env to initialize", err) + } + } + return delegate.Handle(ctx, req) + }) + }), nil +} + +// This function is almost identical to addGoEnvToInitializeRequest in lsprpc.go. +// Make changes in parallel. +func addGoEnvToInitializeRequestV2(ctx context.Context, req *jsonrpc2_v2.Request) error { + var params protocol.ParamInitialize + if err := json.Unmarshal(req.Params, ¶ms); err != nil { + return err + } + var opts map[string]interface{} + switch v := params.InitializationOptions.(type) { + case nil: + opts = make(map[string]interface{}) + case map[string]interface{}: + opts = v + default: + return fmt.Errorf("unexpected type for InitializationOptions: %T", v) + } + envOpt, ok := opts["env"] + if !ok { + envOpt = make(map[string]interface{}) + } + env, ok := envOpt.(map[string]interface{}) + if !ok { + return fmt.Errorf("env option is %T, expected a map", envOpt) + } + goenv, err := GetGoEnv(ctx, env) + if err != nil { + return err + } + // We don't want to propagate GOWORK unless explicitly set since that could mess with + // path inference during cmd/go invocations, see golang/go#51825. + _, goworkSet := os.LookupEnv("GOWORK") + for govar, value := range goenv { + if govar == "GOWORK" && !goworkSet { + continue + } + env[govar] = value + } + opts["env"] = env + params.InitializationOptions = opts + raw, err := json.Marshal(params) + if err != nil { + return fmt.Errorf("marshaling updated options: %v", err) + } + req.Params = json.RawMessage(raw) + return nil +} + +type initServer struct { + protocol.Server + + params *protocol.ParamInitialize +} + +func (s *initServer) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { + s.params = params + return &protocol.InitializeResult{}, nil +} + +func TestGoEnvMiddleware(t *testing.T) { + testenv.NeedsTool(t, "go") + + ctx := context.Background() + + server := &initServer{} + env := new(TestEnv) + defer env.Shutdown(t) + l, _ := env.serve(ctx, t, staticServerBinder(server)) + mw, err := GoEnvMiddleware() + if err != nil { + t.Fatal(err) + } + binder := mw(NewForwardBinder(l.Dialer())) + l, _ = env.serve(ctx, t, binder) + conn := env.dial(ctx, t, l.Dialer(), noopBinder, true) + dispatch := protocol.ServerDispatcherV2(conn) + initParams := &protocol.ParamInitialize{} + initParams.InitializationOptions = map[string]interface{}{ + "env": map[string]interface{}{ + "GONOPROXY": "example.com", + }, + } + if _, err := dispatch.Initialize(ctx, initParams); err != nil { + t.Fatal(err) + } + + if server.params == nil { + t.Fatalf("initialize params are unset") + } + envOpts := server.params.InitializationOptions.(map[string]interface{})["env"].(map[string]interface{}) + + // Check for an arbitrary Go variable. It should be set. + if _, ok := envOpts["GOPRIVATE"]; !ok { + t.Errorf("Go environment variable GOPRIVATE unset in initialization options") + } + // Check that the variable present in our user config was not overwritten. + if got, want := envOpts["GONOPROXY"], "example.com"; got != want { + t.Errorf("GONOPROXY=%q, want %q", got, want) + } +} diff --git a/gopls/internal/lsp/lsprpc/lsprpc.go b/gopls/internal/lsprpc/lsprpc.go similarity index 85% rename from gopls/internal/lsp/lsprpc/lsprpc.go rename to gopls/internal/lsprpc/lsprpc.go index 7dc709291fb..0497612106d 100644 --- a/gopls/internal/lsp/lsprpc/lsprpc.go +++ b/gopls/internal/lsprpc/lsprpc.go @@ -19,12 +19,12 @@ import ( "sync/atomic" "time" - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/server" + "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" "golang.org/x/tools/internal/jsonrpc2" @@ -33,15 +33,15 @@ import ( // Unique identifiers for client/server. var serverIndex int64 -// The StreamServer type is a jsonrpc2.StreamServer that handles incoming +// The streamServer type is a jsonrpc2.streamServer that handles incoming // streams as a new LSP session, using a shared cache. -type StreamServer struct { +type streamServer struct { cache *cache.Cache // daemon controls whether or not to log new connections. daemon bool // optionsOverrides is passed to newly created sessions. - optionsOverrides func(*source.Options) + optionsOverrides func(*settings.Options) // serverForTest may be set to a test fake for testing. serverForTest protocol.Server @@ -50,44 +50,28 @@ type StreamServer struct { // NewStreamServer creates a StreamServer using the shared cache. If // withTelemetry is true, each session is instrumented with telemetry that // records RPC statistics. -func NewStreamServer(cache *cache.Cache, daemon bool, optionsFunc func(*source.Options)) *StreamServer { - return &StreamServer{cache: cache, daemon: daemon, optionsOverrides: optionsFunc} -} - -func (s *StreamServer) Binder() *ServerBinder { - newServer := func(ctx context.Context, client protocol.ClientCloser) protocol.Server { - session := cache.NewSession(ctx, s.cache) - server := s.serverForTest - if server == nil { - options := source.DefaultOptions(s.optionsOverrides) - server = lsp.NewServer(session, client, options) - if instance := debug.GetInstance(ctx); instance != nil { - instance.AddService(server, session) - } - } - return server - } - return NewServerBinder(newServer) +func NewStreamServer(cache *cache.Cache, daemon bool, optionsFunc func(*settings.Options)) jsonrpc2.StreamServer { + return &streamServer{cache: cache, daemon: daemon, optionsOverrides: optionsFunc} } // ServeStream implements the jsonrpc2.StreamServer interface, by handling // incoming streams using a new lsp server. -func (s *StreamServer) ServeStream(ctx context.Context, conn jsonrpc2.Conn) error { +func (s *streamServer) ServeStream(ctx context.Context, conn jsonrpc2.Conn) error { client := protocol.ClientDispatcher(conn) session := cache.NewSession(ctx, s.cache) - server := s.serverForTest - if server == nil { - options := source.DefaultOptions(s.optionsOverrides) - server = lsp.NewServer(session, client, options) + svr := s.serverForTest + if svr == nil { + options := settings.DefaultOptions(s.optionsOverrides) + svr = server.New(session, client, options) if instance := debug.GetInstance(ctx); instance != nil { - instance.AddService(server, session) + instance.AddService(svr, session) } } // Clients may or may not send a shutdown message. Make sure the server is // shut down. // TODO(rFindley): this shutdown should perhaps be on a disconnected context. defer func() { - if err := server.Shutdown(ctx); err != nil { + if err := svr.Shutdown(ctx); err != nil { event.Error(ctx, "error shutting down", err) } }() @@ -100,7 +84,7 @@ func (s *StreamServer) ServeStream(ctx context.Context, conn jsonrpc2.Conn) erro conn.Go(ctx, protocol.Handlers( handshaker(session, executable, s.daemon, - protocol.ServerHandler(server, + protocol.ServerHandler(svr, jsonrpc2.MethodNotFound)))) if s.daemon { log.Printf("Session %s: connected", session.ID()) @@ -110,14 +94,14 @@ func (s *StreamServer) ServeStream(ctx context.Context, conn jsonrpc2.Conn) erro return conn.Err() } -// A Forwarder is a jsonrpc2.StreamServer that handles an LSP stream by +// A forwarder is a jsonrpc2.StreamServer that handles an LSP stream by // forwarding it to a remote. This is used when the gopls process started by // the editor is in the `-remote` mode, which means it finds and connects to a // separate gopls daemon. In these cases, we still want the forwarder gopls to // be instrumented with telemetry, and want to be able to in some cases hijack // the jsonrpc2 connection with the daemon. -type Forwarder struct { - dialer *AutoDialer +type forwarder struct { + dialer *autoDialer mu sync.Mutex // Hold on to the server connection so that we can redo the handshake if any @@ -126,28 +110,29 @@ type Forwarder struct { serverID string } -// NewForwarder creates a new Forwarder, ready to forward connections to the +// NewForwarder creates a new forwarder (a [jsonrpc2.StreamServer]), +// ready to forward connections to the // remote server specified by rawAddr. If provided and rawAddr indicates an // 'automatic' address (starting with 'auto;'), argFunc may be used to start a // remote server for the auto-discovered address. -func NewForwarder(rawAddr string, argFunc func(network, address string) []string) (*Forwarder, error) { - dialer, err := NewAutoDialer(rawAddr, argFunc) +func NewForwarder(rawAddr string, argFunc func(network, address string) []string) (jsonrpc2.StreamServer, error) { + dialer, err := newAutoDialer(rawAddr, argFunc) if err != nil { return nil, err } - fwd := &Forwarder{ + fwd := &forwarder{ dialer: dialer, } return fwd, nil } -// QueryServerState queries the server state of the current server. -func QueryServerState(ctx context.Context, addr string) (*ServerState, error) { +// QueryServerState returns a JSON-encodable struct describing the state of the named server. +func QueryServerState(ctx context.Context, addr string) (any, error) { serverConn, err := dialRemote(ctx, addr) if err != nil { return nil, err } - var state ServerState + var state serverState if err := protocol.Call(ctx, serverConn, sessionsMethod, nil, &state); err != nil { return nil, fmt.Errorf("querying server state: %w", err) } @@ -159,7 +144,7 @@ func QueryServerState(ctx context.Context, addr string) (*ServerState, error) { // or auto://...). func dialRemote(ctx context.Context, addr string) (jsonrpc2.Conn, error) { network, address := ParseAddr(addr) - if network == AutoNetwork { + if network == autoNetwork { gp, err := os.Executable() if err != nil { return nil, fmt.Errorf("getting gopls path: %w", err) @@ -175,7 +160,10 @@ func dialRemote(ctx context.Context, addr string) (jsonrpc2.Conn, error) { return serverConn, nil } -func ExecuteCommand(ctx context.Context, addr string, id string, request, result interface{}) error { +// ExecuteCommand connects to the named server, sends it a +// workspace/executeCommand request (with command 'id' and arguments +// JSON encoded in 'request'), and populates the result variable. +func ExecuteCommand(ctx context.Context, addr string, id string, request, result any) error { serverConn, err := dialRemote(ctx, addr) if err != nil { return err @@ -193,7 +181,7 @@ func ExecuteCommand(ctx context.Context, addr string, id string, request, result // ServeStream dials the forwarder remote and binds the remote to serve the LSP // on the incoming stream. -func (f *Forwarder) ServeStream(ctx context.Context, clientConn jsonrpc2.Conn) error { +func (f *forwarder) ServeStream(ctx context.Context, clientConn jsonrpc2.Conn) error { client := protocol.ClientDispatcher(clientConn) netConn, err := f.dialer.dialNet(ctx) @@ -243,7 +231,7 @@ func (f *Forwarder) ServeStream(ctx context.Context, clientConn jsonrpc2.Conn) e } // TODO(rfindley): remove this handshaking in favor of middleware. -func (f *Forwarder) handshake(ctx context.Context) { +func (f *forwarder) handshake(ctx context.Context) { // This call to os.Executable is redundant, and will be eliminated by the // transition to the V2 API. goplsPath, err := os.Executable() @@ -280,7 +268,7 @@ func (f *Forwarder) handshake(ctx context.Context) { } func ConnectToRemote(ctx context.Context, addr string) (net.Conn, error) { - dialer, err := NewAutoDialer(addr, nil) + dialer, err := newAutoDialer(addr, nil) if err != nil { return nil, err } @@ -289,7 +277,7 @@ func ConnectToRemote(ctx context.Context, addr string) (net.Conn, error) { // handler intercepts messages to the daemon to enrich them with local // information. -func (f *Forwarder) handler(handler jsonrpc2.Handler) jsonrpc2.Handler { +func (f *forwarder) handler(handler jsonrpc2.Handler) jsonrpc2.Handler { return func(ctx context.Context, reply jsonrpc2.Replier, r jsonrpc2.Request) error { // Intercept certain messages to add special handling. switch r.Method() { @@ -374,7 +362,7 @@ func addGoEnvToInitializeRequest(ctx context.Context, r jsonrpc2.Request) (jsonr return jsonrpc2.NewCall(call.ID(), "initialize", params) } -func (f *Forwarder) replyWithDebugAddress(outerCtx context.Context, r jsonrpc2.Replier, args command.DebuggingArgs) jsonrpc2.Replier { +func (f *forwarder) replyWithDebugAddress(outerCtx context.Context, r jsonrpc2.Replier, args command.DebuggingArgs) jsonrpc2.Replier { di := debug.GetInstance(outerCtx) if di == nil { event.Log(outerCtx, "no debug instance to start") @@ -440,24 +428,24 @@ type handshakeResponse struct { GoplsPath string `json:"goplsPath"` } -// ClientSession identifies a current client LSP session on the server. Note +// clientSession identifies a current client LSP session on the server. Note // that it looks similar to handshakeResposne, but in fact 'Logfile' and // 'DebugAddr' now refer to the client. -type ClientSession struct { +type clientSession struct { SessionID string `json:"sessionID"` Logfile string `json:"logfile"` DebugAddr string `json:"debugAddr"` } -// ServerState holds information about the gopls daemon process, including its +// serverState holds information about the gopls daemon process, including its // debug information and debug information of all of its current connected // clients. -type ServerState struct { +type serverState struct { Logfile string `json:"logfile"` DebugAddr string `json:"debugAddr"` GoplsPath string `json:"goplsPath"` CurrentClientID string `json:"currentClientID"` - Clients []ClientSession `json:"clients"` + Clients []clientSession `json:"clients"` } const ( @@ -501,7 +489,7 @@ func handshaker(session *cache.Session, goplsPath string, logHandshakes bool, ha return reply(ctx, resp, nil) case sessionsMethod: - resp := ServerState{ + resp := serverState{ GoplsPath: goplsPath, CurrentClientID: session.ID(), } @@ -509,7 +497,7 @@ func handshaker(session *cache.Session, goplsPath string, logHandshakes bool, ha resp.Logfile = di.Logfile resp.DebugAddr = di.ListenedDebugAddress() for _, c := range di.State.Clients() { - resp.Clients = append(resp.Clients, ClientSession{ + resp.Clients = append(resp.Clients, clientSession{ SessionID: c.Session.ID(), Logfile: c.Logfile, DebugAddr: c.DebugAddress, @@ -535,8 +523,8 @@ func sendError(ctx context.Context, reply jsonrpc2.Replier, err error) { func ParseAddr(listen string) (network string, address string) { // Allow passing just -remote=auto, as a shorthand for using automatic remote // resolution. - if listen == AutoNetwork { - return AutoNetwork, "" + if listen == autoNetwork { + return autoNetwork, "" } if parts := strings.SplitN(listen, ";", 2); len(parts) == 2 { return parts[0], parts[1] diff --git a/gopls/internal/lsp/lsprpc/lsprpc_test.go b/gopls/internal/lsprpc/lsprpc_test.go similarity index 95% rename from gopls/internal/lsp/lsprpc/lsprpc_test.go rename to gopls/internal/lsprpc/lsprpc_test.go index 0afe131e89f..1d643bf2095 100644 --- a/gopls/internal/lsp/lsprpc/lsprpc_test.go +++ b/gopls/internal/lsprpc/lsprpc_test.go @@ -13,10 +13,10 @@ import ( "testing" "time" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2/servertest" @@ -58,8 +58,8 @@ func TestClientLogging(t *testing.T) { server := PingServer{} client := FakeClient{Logs: make(chan string, 10)} - ctx = debug.WithInstance(ctx, "", "") - ss := NewStreamServer(cache.New(nil), false, nil) + ctx = debug.WithInstance(ctx, "") + ss := NewStreamServer(cache.New(nil), false, nil).(*StreamServer) ss.serverForTest = server ts := servertest.NewPipeServer(ss, nil) defer checkClose(t, ts.Close) @@ -121,8 +121,8 @@ func checkClose(t *testing.T, closer func() error) { func setupForwarding(ctx context.Context, t *testing.T, s protocol.Server) (direct, forwarded servertest.Connector, cleanup func()) { t.Helper() - serveCtx := debug.WithInstance(ctx, "", "") - ss := NewStreamServer(cache.New(nil), false, nil) + serveCtx := debug.WithInstance(ctx, "") + ss := NewStreamServer(cache.New(nil), false, nil).(*StreamServer) ss.serverForTest = s tsDirect := servertest.NewTCPServer(serveCtx, ss, nil) @@ -214,8 +214,8 @@ func TestDebugInfoLifecycle(t *testing.T) { baseCtx, cancel := context.WithCancel(context.Background()) defer cancel() - clientCtx := debug.WithInstance(baseCtx, "", "") - serverCtx := debug.WithInstance(baseCtx, "", "") + clientCtx := debug.WithInstance(baseCtx, "") + serverCtx := debug.WithInstance(baseCtx, "") cache := cache.New(nil) ss := NewStreamServer(cache, false, nil) diff --git a/gopls/internal/lsprpc/middleware_test.go b/gopls/internal/lsprpc/middleware_test.go new file mode 100644 index 00000000000..526c7343b78 --- /dev/null +++ b/gopls/internal/lsprpc/middleware_test.go @@ -0,0 +1,223 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lsprpc_test + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sync" + "testing" + "time" + + . "golang.org/x/tools/gopls/internal/lsprpc" + "golang.org/x/tools/internal/event" + jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" +) + +var noopBinder = BinderFunc(func(context.Context, *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { + return jsonrpc2_v2.ConnectionOptions{} +}) + +func TestHandshakeMiddleware(t *testing.T) { + sh := &Handshaker{ + metadata: metadata{ + "answer": 42, + }, + } + ctx := context.Background() + env := new(TestEnv) + defer env.Shutdown(t) + l, _ := env.serve(ctx, t, sh.Middleware(noopBinder)) + conn := env.dial(ctx, t, l.Dialer(), noopBinder, false) + ch := &Handshaker{ + metadata: metadata{ + "question": 6 * 9, + }, + } + + check := func(connected bool) error { + clients := sh.Peers() + servers := ch.Peers() + want := 0 + if connected { + want = 1 + } + if got := len(clients); got != want { + return fmt.Errorf("got %d clients on the server, want %d", got, want) + } + if got := len(servers); got != want { + return fmt.Errorf("got %d servers on the client, want %d", got, want) + } + if !connected { + return nil + } + client := clients[0] + server := servers[0] + if _, ok := client.Metadata["question"]; !ok { + return errors.New("no client metadata") + } + if _, ok := server.Metadata["answer"]; !ok { + return errors.New("no server metadata") + } + if client.LocalID != server.RemoteID { + return fmt.Errorf("client.LocalID == %d, server.PeerID == %d", client.LocalID, server.RemoteID) + } + if client.RemoteID != server.LocalID { + return fmt.Errorf("client.PeerID == %d, server.LocalID == %d", client.RemoteID, server.LocalID) + } + return nil + } + + if err := check(false); err != nil { + t.Fatalf("before handshake: %v", err) + } + ch.ClientHandshake(ctx, conn) + if err := check(true); err != nil { + t.Fatalf("after handshake: %v", err) + } + conn.Close() + // Wait for up to ~2s for connections to get cleaned up. + delay := 25 * time.Millisecond + for retries := 3; retries >= 0; retries-- { + time.Sleep(delay) + err := check(false) + if err == nil { + return + } + if retries == 0 { + t.Fatalf("after closing connection: %v", err) + } + delay *= 4 + } +} + +// Handshaker handles both server and client handshaking over jsonrpc2 v2. +// To instrument server-side handshaking, use Handshaker.Middleware. +// To instrument client-side handshaking, call +// Handshaker.ClientHandshake for any new client-side connections. +type Handshaker struct { + // metadata will be shared with peers via handshaking. + metadata metadata + + mu sync.Mutex + prevID int64 + peers map[int64]PeerInfo +} + +// metadata holds arbitrary data transferred between jsonrpc2 peers. +type metadata map[string]any + +// PeerInfo holds information about a peering between jsonrpc2 servers. +type PeerInfo struct { + // RemoteID is the identity of the current server on its peer. + RemoteID int64 + + // LocalID is the identity of the peer on the server. + LocalID int64 + + // IsClient reports whether the peer is a client. If false, the peer is a + // server. + IsClient bool + + // Metadata holds arbitrary information provided by the peer. + Metadata metadata +} + +// Peers returns the peer info this handshaker knows about by way of either the +// server-side handshake middleware, or client-side handshakes. +func (h *Handshaker) Peers() []PeerInfo { + h.mu.Lock() + defer h.mu.Unlock() + + var c []PeerInfo + for _, v := range h.peers { + c = append(c, v) + } + return c +} + +// Middleware is a jsonrpc2 middleware function to augment connection binding +// to handle the handshake method, and record disconnections. +func (h *Handshaker) Middleware(inner jsonrpc2_v2.Binder) jsonrpc2_v2.Binder { + return BinderFunc(func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { + opts := inner.Bind(ctx, conn) + + localID := h.nextID() + info := &PeerInfo{ + RemoteID: localID, + Metadata: h.metadata, + } + + // Wrap the delegated handler to accept the handshake. + delegate := opts.Handler + opts.Handler = jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + if req.Method == HandshakeMethod { + var peerInfo PeerInfo + if err := json.Unmarshal(req.Params, &peerInfo); err != nil { + return nil, fmt.Errorf("%w: unmarshaling client info: %v", jsonrpc2_v2.ErrInvalidParams, err) + } + peerInfo.LocalID = localID + peerInfo.IsClient = true + h.recordPeer(peerInfo) + return info, nil + } + return delegate.Handle(ctx, req) + }) + + // Record the dropped client. + go h.cleanupAtDisconnect(conn, localID) + + return opts + }) +} + +// ClientHandshake performs a client-side handshake with the server at the +// other end of conn, recording the server's peer info and watching for conn's +// disconnection. +func (h *Handshaker) ClientHandshake(ctx context.Context, conn *jsonrpc2_v2.Connection) { + localID := h.nextID() + info := &PeerInfo{ + RemoteID: localID, + Metadata: h.metadata, + } + + call := conn.Call(ctx, HandshakeMethod, info) + var serverInfo PeerInfo + if err := call.Await(ctx, &serverInfo); err != nil { + event.Error(ctx, "performing handshake", err) + return + } + serverInfo.LocalID = localID + h.recordPeer(serverInfo) + + go h.cleanupAtDisconnect(conn, localID) +} + +func (h *Handshaker) nextID() int64 { + h.mu.Lock() + defer h.mu.Unlock() + + h.prevID++ + return h.prevID +} + +func (h *Handshaker) cleanupAtDisconnect(conn *jsonrpc2_v2.Connection, peerID int64) { + conn.Wait() + + h.mu.Lock() + defer h.mu.Unlock() + delete(h.peers, peerID) +} + +func (h *Handshaker) recordPeer(info PeerInfo) { + h.mu.Lock() + defer h.mu.Unlock() + if h.peers == nil { + h.peers = make(map[int64]PeerInfo) + } + h.peers[info.LocalID] = info +} diff --git a/gopls/internal/mod/code_lens.go b/gopls/internal/mod/code_lens.go new file mode 100644 index 00000000000..85d8182e8fe --- /dev/null +++ b/gopls/internal/mod/code_lens.go @@ -0,0 +1,193 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mod + +import ( + "context" + "fmt" + "os" + "path/filepath" + + "golang.org/x/mod/modfile" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" +) + +// LensFuncs returns the supported lensFuncs for go.mod files. +func LensFuncs() map[command.Command]golang.LensFunc { + return map[command.Command]golang.LensFunc{ + command.UpgradeDependency: upgradeLenses, + command.Tidy: tidyLens, + command.Vendor: vendorLens, + command.RunGovulncheck: vulncheckLenses, + } +} + +func upgradeLenses(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil || pm.File == nil { + return nil, err + } + uri := fh.URI() + reset, err := command.NewResetGoModDiagnosticsCommand("Reset go.mod diagnostics", command.ResetGoModDiagnosticsArgs{URIArg: command.URIArg{URI: uri}}) + if err != nil { + return nil, err + } + // Put the `Reset go.mod diagnostics` codelens on the module statement. + modrng, err := moduleStmtRange(fh, pm) + if err != nil { + return nil, err + } + lenses := []protocol.CodeLens{{Range: modrng, Command: &reset}} + if len(pm.File.Require) == 0 { + // Nothing to upgrade. + return lenses, nil + } + var requires []string + for _, req := range pm.File.Require { + requires = append(requires, req.Mod.Path) + } + checkUpgrade, err := command.NewCheckUpgradesCommand("Check for upgrades", command.CheckUpgradesArgs{ + URI: uri, + Modules: requires, + }) + if err != nil { + return nil, err + } + upgradeTransitive, err := command.NewUpgradeDependencyCommand("Upgrade transitive dependencies", command.DependencyArgs{ + URI: uri, + AddRequire: false, + GoCmdArgs: []string{"-d", "-u", "-t", "./..."}, + }) + if err != nil { + return nil, err + } + upgradeDirect, err := command.NewUpgradeDependencyCommand("Upgrade direct dependencies", command.DependencyArgs{ + URI: uri, + AddRequire: false, + GoCmdArgs: append([]string{"-d"}, requires...), + }) + if err != nil { + return nil, err + } + + // Put the upgrade code lenses above the first require block or statement. + rng, err := firstRequireRange(fh, pm) + if err != nil { + return nil, err + } + + return append(lenses, []protocol.CodeLens{ + {Range: rng, Command: &checkUpgrade}, + {Range: rng, Command: &upgradeTransitive}, + {Range: rng, Command: &upgradeDirect}, + }...), nil +} + +func tidyLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil || pm.File == nil { + return nil, err + } + uri := fh.URI() + cmd, err := command.NewTidyCommand("Run go mod tidy", command.URIArgs{URIs: []protocol.DocumentURI{uri}}) + if err != nil { + return nil, err + } + rng, err := moduleStmtRange(fh, pm) + if err != nil { + return nil, err + } + return []protocol.CodeLens{{ + Range: rng, + Command: &cmd, + }}, nil +} + +func vendorLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil || pm.File == nil { + return nil, err + } + if len(pm.File.Require) == 0 { + // Nothing to vendor. + return nil, nil + } + rng, err := moduleStmtRange(fh, pm) + if err != nil { + return nil, err + } + title := "Create vendor directory" + uri := fh.URI() + cmd, err := command.NewVendorCommand(title, command.URIArg{URI: uri}) + if err != nil { + return nil, err + } + // Change the message depending on whether or not the module already has a + // vendor directory. + vendorDir := filepath.Join(filepath.Dir(fh.URI().Path()), "vendor") + if info, _ := os.Stat(vendorDir); info != nil && info.IsDir() { + title = "Sync vendor directory" + } + return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil +} + +func moduleStmtRange(fh file.Handle, pm *cache.ParsedModule) (protocol.Range, error) { + if pm.File == nil || pm.File.Module == nil || pm.File.Module.Syntax == nil { + return protocol.Range{}, fmt.Errorf("no module statement in %s", fh.URI()) + } + syntax := pm.File.Module.Syntax + return pm.Mapper.OffsetRange(syntax.Start.Byte, syntax.End.Byte) +} + +// firstRequireRange returns the range for the first "require" in the given +// go.mod file. This is either a require block or an individual require line. +func firstRequireRange(fh file.Handle, pm *cache.ParsedModule) (protocol.Range, error) { + if len(pm.File.Require) == 0 { + return protocol.Range{}, fmt.Errorf("no requires in the file %s", fh.URI()) + } + var start, end modfile.Position + for _, stmt := range pm.File.Syntax.Stmt { + if b, ok := stmt.(*modfile.LineBlock); ok && len(b.Token) == 1 && b.Token[0] == "require" { + start, end = b.Span() + break + } + } + + firstRequire := pm.File.Require[0].Syntax + if start.Byte == 0 || firstRequire.Start.Byte < start.Byte { + start, end = firstRequire.Start, firstRequire.End + } + return pm.Mapper.OffsetRange(start.Byte, end.Byte) +} + +func vulncheckLenses(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil || pm.File == nil { + return nil, err + } + // Place the codelenses near the module statement. + // A module may not have the require block, + // but vulnerabilities can exist in standard libraries. + uri := fh.URI() + rng, err := moduleStmtRange(fh, pm) + if err != nil { + return nil, err + } + + vulncheck, err := command.NewRunGovulncheckCommand("Run govulncheck", command.VulncheckArgs{ + URI: uri, + Pattern: "./...", + }) + if err != nil { + return nil, err + } + return []protocol.CodeLens{ + {Range: rng, Command: &vulncheck}, + }, nil +} diff --git a/gopls/internal/mod/diagnostics.go b/gopls/internal/mod/diagnostics.go new file mode 100644 index 00000000000..655beedeb27 --- /dev/null +++ b/gopls/internal/mod/diagnostics.go @@ -0,0 +1,560 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package mod provides core features related to go.mod file +// handling for use by Go editors and tools. +package mod + +import ( + "context" + "fmt" + "runtime" + "sort" + "strings" + "sync" + + "golang.org/x/mod/modfile" + "golang.org/x/mod/semver" + "golang.org/x/sync/errgroup" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" + "golang.org/x/tools/internal/event" +) + +// ParseDiagnostics returns diagnostics from parsing the go.mod files in the workspace. +func ParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) + defer done() + + return collectDiagnostics(ctx, snapshot, ModParseDiagnostics) +} + +// Diagnostics returns diagnostics from running go mod tidy. +func TidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) + defer done() + + return collectDiagnostics(ctx, snapshot, ModTidyDiagnostics) +} + +// UpgradeDiagnostics returns upgrade diagnostics for the modules in the +// workspace with known upgrades. +func UpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + ctx, done := event.Start(ctx, "mod.UpgradeDiagnostics", snapshot.Labels()...) + defer done() + + return collectDiagnostics(ctx, snapshot, ModUpgradeDiagnostics) +} + +// VulnerabilityDiagnostics returns vulnerability diagnostics for the active modules in the +// workspace with known vulnerabilities. +func VulnerabilityDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + ctx, done := event.Start(ctx, "mod.VulnerabilityDiagnostics", snapshot.Labels()...) + defer done() + + return collectDiagnostics(ctx, snapshot, ModVulnerabilityDiagnostics) +} + +func collectDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagFn func(context.Context, *cache.Snapshot, file.Handle) ([]*cache.Diagnostic, error)) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + g, ctx := errgroup.WithContext(ctx) + cpulimit := runtime.GOMAXPROCS(0) + g.SetLimit(cpulimit) + + var mu sync.Mutex + reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) + + for _, uri := range snapshot.View().ModFiles() { + uri := uri + g.Go(func() error { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return err + } + diagnostics, err := diagFn(ctx, snapshot, fh) + if err != nil { + return err + } + for _, d := range diagnostics { + mu.Lock() + reports[d.URI] = append(reports[fh.URI()], d) + mu.Unlock() + } + return nil + }) + } + + if err := g.Wait(); err != nil { + return nil, err + } + return reports, nil +} + +// ModParseDiagnostics reports diagnostics from parsing the mod file. +func ModParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (diagnostics []*cache.Diagnostic, err error) { + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil { + if pm == nil || len(pm.ParseErrors) == 0 { + return nil, err + } + return pm.ParseErrors, nil + } + return nil, nil +} + +// ModTidyDiagnostics reports diagnostics from running go mod tidy. +func ModTidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]*cache.Diagnostic, error) { + pm, err := snapshot.ParseMod(ctx, fh) // memoized + if err != nil { + return nil, nil // errors reported by ModDiagnostics above + } + + tidied, err := snapshot.ModTidy(ctx, pm) + if err != nil { + if err != cache.ErrNoModOnDisk { + // TODO(rfindley): the check for ErrNoModOnDisk was historically determined + // to be benign, but may date back to the time when the Go command did not + // have overlay support. + // + // See if we can pass the overlay to the Go command, and eliminate this guard.. + event.Error(ctx, fmt.Sprintf("tidy: diagnosing %s", pm.URI), err) + } + return nil, nil + } + return tidied.Diagnostics, nil +} + +// ModUpgradeDiagnostics adds upgrade quick fixes for individual modules if the upgrades +// are recorded in the view. +func ModUpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (upgradeDiagnostics []*cache.Diagnostic, err error) { + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil { + // Don't return an error if there are parse error diagnostics to be shown, but also do not + // continue since we won't be able to show the upgrade diagnostics. + if pm != nil && len(pm.ParseErrors) != 0 { + return nil, nil + } + return nil, err + } + + upgrades := snapshot.ModuleUpgrades(fh.URI()) + for _, req := range pm.File.Require { + ver, ok := upgrades[req.Mod.Path] + if !ok || req.Mod.Version == ver { + continue + } + rng, err := pm.Mapper.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) + if err != nil { + return nil, err + } + // Upgrade to the exact version we offer the user, not the most recent. + title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, ver) + cmd, err := command.NewUpgradeDependencyCommand(title, command.DependencyArgs{ + URI: fh.URI(), + AddRequire: false, + GoCmdArgs: []string{req.Mod.Path + "@" + ver}, + }) + if err != nil { + return nil, err + } + upgradeDiagnostics = append(upgradeDiagnostics, &cache.Diagnostic{ + URI: fh.URI(), + Range: rng, + Severity: protocol.SeverityInformation, + Source: cache.UpgradeNotification, + Message: fmt.Sprintf("%v can be upgraded", req.Mod.Path), + SuggestedFixes: []cache.SuggestedFix{cache.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, + }) + } + + return upgradeDiagnostics, nil +} + +const upgradeCodeActionPrefix = "Upgrade to " + +// ModVulnerabilityDiagnostics adds diagnostics for vulnerabilities in individual modules +// if the vulnerability is recorded in the view. +func ModVulnerabilityDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (vulnDiagnostics []*cache.Diagnostic, err error) { + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil { + // Don't return an error if there are parse error diagnostics to be shown, but also do not + // continue since we won't be able to show the vulnerability diagnostics. + if pm != nil && len(pm.ParseErrors) != 0 { + return nil, nil + } + return nil, err + } + + diagSource := cache.Govulncheck + vs := snapshot.Vulnerabilities(fh.URI())[fh.URI()] + if vs == nil && snapshot.Options().Vulncheck == settings.ModeVulncheckImports { + vs, err = snapshot.ModVuln(ctx, fh.URI()) + if err != nil { + return nil, err + } + diagSource = cache.Vulncheck + } + if vs == nil || len(vs.Findings) == 0 { + return nil, nil + } + + suggestRunOrResetGovulncheck, err := suggestGovulncheckAction(diagSource == cache.Govulncheck, fh.URI()) + if err != nil { + // must not happen + return nil, err // TODO: bug report + } + vulnsByModule := make(map[string][]*govulncheck.Finding) + + for _, finding := range vs.Findings { + if vuln, typ := foundVuln(finding); typ == vulnCalled || typ == vulnImported { + vulnsByModule[vuln.Module] = append(vulnsByModule[vuln.Module], finding) + } + } + for _, req := range pm.File.Require { + mod := req.Mod.Path + findings := vulnsByModule[mod] + if len(findings) == 0 { + continue + } + // note: req.Syntax is the line corresponding to 'require', which means + // req.Syntax.Start can point to the beginning of the "require" keyword + // for a single line require (e.g. "require golang.org/x/mod v0.0.0"). + start := req.Syntax.Start.Byte + if len(req.Syntax.Token) == 3 { + start += len("require ") + } + rng, err := pm.Mapper.OffsetRange(start, req.Syntax.End.Byte) + if err != nil { + return nil, err + } + // Map affecting vulns to 'warning' level diagnostics, + // others to 'info' level diagnostics. + // Fixes will include only the upgrades for warning level diagnostics. + var warningFixes, infoFixes []cache.SuggestedFix + var warningSet, infoSet = map[string]bool{}, map[string]bool{} + for _, finding := range findings { + // It is possible that the source code was changed since the last + // govulncheck run and information in the `vulns` info is stale. + // For example, imagine that a user is in the middle of updating + // problematic modules detected by the govulncheck run by applying + // quick fixes. Stale diagnostics can be confusing and prevent the + // user from quickly locating the next module to fix. + // Ideally we should rerun the analysis with the updated module + // dependencies or any other code changes, but we are not yet + // in the position of automatically triggering the analysis + // (govulncheck can take a while). We also don't know exactly what + // part of source code was changed since `vulns` was computed. + // As a heuristic, we assume that a user upgrades the affecting + // module to the version with the fix or the latest one, and if the + // version in the require statement is equal to or higher than the + // fixed version, skip generating a diagnostic about the vulnerability. + // Eventually, the user has to rerun govulncheck. + if finding.FixedVersion != "" && semver.IsValid(req.Mod.Version) && semver.Compare(finding.FixedVersion, req.Mod.Version) <= 0 { + continue + } + switch _, typ := foundVuln(finding); typ { + case vulnImported: + infoSet[finding.OSV] = true + case vulnCalled: + warningSet[finding.OSV] = true + } + // Upgrade to the exact version we offer the user, not the most recent. + if fixedVersion := finding.FixedVersion; semver.IsValid(fixedVersion) && semver.Compare(req.Mod.Version, fixedVersion) < 0 { + cmd, err := getUpgradeCodeAction(fh, req, fixedVersion) + if err != nil { + return nil, err // TODO: bug report + } + sf := cache.SuggestedFixFromCommand(cmd, protocol.QuickFix) + switch _, typ := foundVuln(finding); typ { + case vulnImported: + infoFixes = append(infoFixes, sf) + case vulnCalled: + warningFixes = append(warningFixes, sf) + } + } + } + + if len(warningSet) == 0 && len(infoSet) == 0 { + continue + } + // Remove affecting osvs from the non-affecting osv list if any. + if len(warningSet) > 0 { + for k := range infoSet { + if warningSet[k] { + delete(infoSet, k) + } + } + } + // Add an upgrade for module@latest. + // TODO(suzmue): verify if latest is the same as fixedVersion. + latest, err := getUpgradeCodeAction(fh, req, "latest") + if err != nil { + return nil, err // TODO: bug report + } + sf := cache.SuggestedFixFromCommand(latest, protocol.QuickFix) + if len(warningFixes) > 0 { + warningFixes = append(warningFixes, sf) + } + if len(infoFixes) > 0 { + infoFixes = append(infoFixes, sf) + } + if len(warningSet) > 0 { + warning := sortedKeys(warningSet) + warningFixes = append(warningFixes, suggestRunOrResetGovulncheck) + vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ + URI: fh.URI(), + Range: rng, + Severity: protocol.SeverityWarning, + Source: diagSource, + Message: getVulnMessage(req.Mod.Path, warning, true, diagSource == cache.Govulncheck), + SuggestedFixes: warningFixes, + }) + } + if len(infoSet) > 0 { + info := sortedKeys(infoSet) + infoFixes = append(infoFixes, suggestRunOrResetGovulncheck) + vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ + URI: fh.URI(), + Range: rng, + Severity: protocol.SeverityInformation, + Source: diagSource, + Message: getVulnMessage(req.Mod.Path, info, false, diagSource == cache.Govulncheck), + SuggestedFixes: infoFixes, + }) + } + } + + // TODO(hyangah): place this diagnostic on the `go` directive or `toolchain` directive + // after https://go.dev/issue/57001. + const diagnoseStdLib = false + + // If diagnosing the stdlib, add standard library vulnerability diagnostics + // on the module declaration. + // + // Only proceed if we have a valid module declaration on which to position + // the diagnostics. + if diagnoseStdLib && pm.File.Module != nil && pm.File.Module.Syntax != nil { + // Add standard library vulnerabilities. + stdlibVulns := vulnsByModule["stdlib"] + if len(stdlibVulns) == 0 { + return vulnDiagnostics, nil + } + + // Put the standard library diagnostic on the module declaration. + rng, err := pm.Mapper.OffsetRange(pm.File.Module.Syntax.Start.Byte, pm.File.Module.Syntax.End.Byte) + if err != nil { + return vulnDiagnostics, nil // TODO: bug report + } + + var warningSet, infoSet = map[string]bool{}, map[string]bool{} + for _, finding := range stdlibVulns { + switch _, typ := foundVuln(finding); typ { + case vulnImported: + infoSet[finding.OSV] = true + case vulnCalled: + warningSet[finding.OSV] = true + } + } + if len(warningSet) > 0 { + warning := sortedKeys(warningSet) + fixes := []cache.SuggestedFix{suggestRunOrResetGovulncheck} + vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ + URI: fh.URI(), + Range: rng, + Severity: protocol.SeverityWarning, + Source: diagSource, + Message: getVulnMessage("go", warning, true, diagSource == cache.Govulncheck), + SuggestedFixes: fixes, + }) + + // remove affecting osvs from the non-affecting osv list if any. + for k := range infoSet { + if warningSet[k] { + delete(infoSet, k) + } + } + } + if len(infoSet) > 0 { + info := sortedKeys(infoSet) + fixes := []cache.SuggestedFix{suggestRunOrResetGovulncheck} + vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ + URI: fh.URI(), + Range: rng, + Severity: protocol.SeverityInformation, + Source: diagSource, + Message: getVulnMessage("go", info, false, diagSource == cache.Govulncheck), + SuggestedFixes: fixes, + }) + } + } + + return vulnDiagnostics, nil +} + +type vulnFindingType int + +const ( + vulnUnknown vulnFindingType = iota + vulnCalled + vulnImported + vulnRequired +) + +// foundVuln returns the frame info describing discovered vulnerable symbol/package/module +// and how this vulnerability affects the analyzed package or module. +func foundVuln(finding *govulncheck.Finding) (*govulncheck.Frame, vulnFindingType) { + // finding.Trace is sorted from the imported vulnerable symbol to + // the entry point in the callstack. + // If Function is set, then Package must be set. Module will always be set. + // If Function is set it was found in the call graph, otherwise if Package is set + // it was found in the import graph, otherwise it was found in the require graph. + // See the documentation of govulncheck.Finding. + if len(finding.Trace) == 0 { // this shouldn't happen, but just in case... + return nil, vulnUnknown + } + vuln := finding.Trace[0] + if vuln.Package == "" { + return vuln, vulnRequired + } + if vuln.Function == "" { + return vuln, vulnImported + } + return vuln, vulnCalled +} + +func sortedKeys(m map[string]bool) []string { + ret := make([]string, 0, len(m)) + for k := range m { + ret = append(ret, k) + } + sort.Strings(ret) + return ret +} + +// suggestGovulncheckAction returns a code action that suggests either run govulncheck +// for more accurate investigation (if the present vulncheck diagnostics are based on +// analysis less accurate than govulncheck) or reset the existing govulncheck result +// (if the present vulncheck diagnostics are already based on govulncheck run). +func suggestGovulncheckAction(fromGovulncheck bool, uri protocol.DocumentURI) (cache.SuggestedFix, error) { + if fromGovulncheck { + resetVulncheck, err := command.NewResetGoModDiagnosticsCommand("Reset govulncheck result", command.ResetGoModDiagnosticsArgs{ + URIArg: command.URIArg{URI: uri}, + DiagnosticSource: string(cache.Govulncheck), + }) + if err != nil { + return cache.SuggestedFix{}, err + } + return cache.SuggestedFixFromCommand(resetVulncheck, protocol.QuickFix), nil + } + vulncheck, err := command.NewRunGovulncheckCommand("Run govulncheck to verify", command.VulncheckArgs{ + URI: uri, + Pattern: "./...", + }) + if err != nil { + return cache.SuggestedFix{}, err + } + return cache.SuggestedFixFromCommand(vulncheck, protocol.QuickFix), nil +} + +func getVulnMessage(mod string, vulns []string, used, fromGovulncheck bool) string { + var b strings.Builder + if used { + switch len(vulns) { + case 1: + fmt.Fprintf(&b, "%v has a vulnerability used in the code: %v.", mod, vulns[0]) + default: + fmt.Fprintf(&b, "%v has vulnerabilities used in the code: %v.", mod, strings.Join(vulns, ", ")) + } + } else { + if fromGovulncheck { + switch len(vulns) { + case 1: + fmt.Fprintf(&b, "%v has a vulnerability %v that is not used in the code.", mod, vulns[0]) + default: + fmt.Fprintf(&b, "%v has known vulnerabilities %v that are not used in the code.", mod, strings.Join(vulns, ", ")) + } + } else { + switch len(vulns) { + case 1: + fmt.Fprintf(&b, "%v has a vulnerability %v.", mod, vulns[0]) + default: + fmt.Fprintf(&b, "%v has known vulnerabilities %v.", mod, strings.Join(vulns, ", ")) + } + } + } + return b.String() +} + +// href returns the url for the vulnerability information. +// Eventually we should retrieve the url embedded in the osv.Entry. +// While vuln.go.dev is under development, this always returns +// the page in pkg.go.dev. +func href(vulnID string) string { + return fmt.Sprintf("https://pkg.go.dev/vuln/%s", vulnID) +} + +func getUpgradeCodeAction(fh file.Handle, req *modfile.Require, version string) (protocol.Command, error) { + cmd, err := command.NewUpgradeDependencyCommand(upgradeTitle(version), command.DependencyArgs{ + URI: fh.URI(), + AddRequire: false, + GoCmdArgs: []string{req.Mod.Path + "@" + version}, + }) + if err != nil { + return protocol.Command{}, err + } + return cmd, nil +} + +func upgradeTitle(fixedVersion string) string { + title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, fixedVersion) + return title +} + +// SelectUpgradeCodeActions takes a list of code actions for a required module +// and returns a more selective list of upgrade code actions, +// where the code actions have been deduped. Code actions unrelated to upgrade +// are deduplicated by the name. +func SelectUpgradeCodeActions(actions []protocol.CodeAction) []protocol.CodeAction { + if len(actions) <= 1 { + return actions // return early if no sorting necessary + } + var versionedUpgrade, latestUpgrade, resetAction protocol.CodeAction + var chosenVersionedUpgrade string + var selected []protocol.CodeAction + + seenTitles := make(map[string]bool) + + for _, action := range actions { + if strings.HasPrefix(action.Title, upgradeCodeActionPrefix) { + if v := getUpgradeVersion(action); v == "latest" && latestUpgrade.Title == "" { + latestUpgrade = action + } else if versionedUpgrade.Title == "" || semver.Compare(v, chosenVersionedUpgrade) > 0 { + chosenVersionedUpgrade = v + versionedUpgrade = action + } + } else if strings.HasPrefix(action.Title, "Reset govulncheck") { + resetAction = action + } else if !seenTitles[action.Command.Title] { + seenTitles[action.Command.Title] = true + selected = append(selected, action) + } + } + if versionedUpgrade.Title != "" { + selected = append(selected, versionedUpgrade) + } + if latestUpgrade.Title != "" { + selected = append(selected, latestUpgrade) + } + if resetAction.Title != "" { + selected = append(selected, resetAction) + } + return selected +} + +func getUpgradeVersion(p protocol.CodeAction) string { + return strings.TrimPrefix(p.Title, upgradeCodeActionPrefix) +} diff --git a/gopls/internal/mod/format.go b/gopls/internal/mod/format.go new file mode 100644 index 00000000000..14408393969 --- /dev/null +++ b/gopls/internal/mod/format.go @@ -0,0 +1,32 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mod + +import ( + "context" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/event" +) + +func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.TextEdit, error) { + ctx, done := event.Start(ctx, "mod.Format") + defer done() + + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil { + return nil, err + } + formatted, err := pm.File.Format() + if err != nil { + return nil, err + } + // Calculate the edits to be made due to the change. + diffs := diff.Bytes(pm.Mapper.Content, formatted) + return protocol.EditsFromDiffEdits(pm.Mapper, diffs) +} diff --git a/gopls/internal/mod/hover.go b/gopls/internal/mod/hover.go new file mode 100644 index 00000000000..458c5ce67d5 --- /dev/null +++ b/gopls/internal/mod/hover.go @@ -0,0 +1,380 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mod + +import ( + "bytes" + "context" + "fmt" + "sort" + "strings" + + "golang.org/x/mod/modfile" + "golang.org/x/mod/semver" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/vulncheck" + "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" + "golang.org/x/tools/gopls/internal/vulncheck/osv" + "golang.org/x/tools/internal/event" +) + +func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { + var found bool + for _, uri := range snapshot.View().ModFiles() { + if fh.URI() == uri { + found = true + break + } + } + + // We only provide hover information for the view's go.mod files. + if !found { + return nil, nil + } + + ctx, done := event.Start(ctx, "mod.Hover") + defer done() + + // Get the position of the cursor. + pm, err := snapshot.ParseMod(ctx, fh) + if err != nil { + return nil, fmt.Errorf("getting modfile handle: %w", err) + } + offset, err := pm.Mapper.PositionOffset(position) + if err != nil { + return nil, fmt.Errorf("computing cursor position: %w", err) + } + + // If the cursor position is on a module statement + if hover, ok := hoverOnModuleStatement(ctx, pm, offset, snapshot, fh); ok { + return hover, nil + } + return hoverOnRequireStatement(ctx, pm, offset, snapshot, fh) +} + +func hoverOnRequireStatement(ctx context.Context, pm *cache.ParsedModule, offset int, snapshot *cache.Snapshot, fh file.Handle) (*protocol.Hover, error) { + // Confirm that the cursor is at the position of a require statement. + var req *modfile.Require + var startOffset, endOffset int + for _, r := range pm.File.Require { + dep := []byte(r.Mod.Path) + s, e := r.Syntax.Start.Byte, r.Syntax.End.Byte + i := bytes.Index(pm.Mapper.Content[s:e], dep) + if i == -1 { + continue + } + // Shift the start position to the location of the + // dependency within the require statement. + startOffset, endOffset = s+i, e + if startOffset <= offset && offset <= endOffset { + req = r + break + } + } + // TODO(hyangah): find position for info about vulnerabilities in Go + + // The cursor position is not on a require statement. + if req == nil { + return nil, nil + } + + // Get the vulnerability info. + fromGovulncheck := true + vs := snapshot.Vulnerabilities(fh.URI())[fh.URI()] + if vs == nil && snapshot.Options().Vulncheck == settings.ModeVulncheckImports { + var err error + vs, err = snapshot.ModVuln(ctx, fh.URI()) + if err != nil { + return nil, err + } + fromGovulncheck = false + } + affecting, nonaffecting, osvs := lookupVulns(vs, req.Mod.Path, req.Mod.Version) + + // Get the `go mod why` results for the given file. + why, err := snapshot.ModWhy(ctx, fh) + if err != nil { + return nil, err + } + explanation, ok := why[req.Mod.Path] + if !ok { + return nil, nil + } + + // Get the range to highlight for the hover. + // TODO(hyangah): adjust the hover range to include the version number + // to match the diagnostics' range. + rng, err := pm.Mapper.OffsetRange(startOffset, endOffset) + if err != nil { + return nil, err + } + options := snapshot.Options() + isPrivate := snapshot.IsGoPrivatePath(req.Mod.Path) + header := formatHeader(req.Mod.Path, options) + explanation = formatExplanation(explanation, req, options, isPrivate) + vulns := formatVulnerabilities(affecting, nonaffecting, osvs, options, fromGovulncheck) + + return &protocol.Hover{ + Contents: protocol.MarkupContent{ + Kind: options.PreferredContentFormat, + Value: header + vulns + explanation, + }, + Range: rng, + }, nil +} + +func hoverOnModuleStatement(ctx context.Context, pm *cache.ParsedModule, offset int, snapshot *cache.Snapshot, fh file.Handle) (*protocol.Hover, bool) { + module := pm.File.Module + if module == nil { + return nil, false // no module stmt + } + if offset < module.Syntax.Start.Byte || offset > module.Syntax.End.Byte { + return nil, false // cursor not in module stmt + } + + rng, err := pm.Mapper.OffsetRange(module.Syntax.Start.Byte, module.Syntax.End.Byte) + if err != nil { + return nil, false + } + fromGovulncheck := true + vs := snapshot.Vulnerabilities(fh.URI())[fh.URI()] + + if vs == nil && snapshot.Options().Vulncheck == settings.ModeVulncheckImports { + vs, err = snapshot.ModVuln(ctx, fh.URI()) + if err != nil { + return nil, false + } + fromGovulncheck = false + } + modpath := "stdlib" + goVersion := snapshot.View().GoVersionString() + affecting, nonaffecting, osvs := lookupVulns(vs, modpath, goVersion) + options := snapshot.Options() + vulns := formatVulnerabilities(affecting, nonaffecting, osvs, options, fromGovulncheck) + + return &protocol.Hover{ + Contents: protocol.MarkupContent{ + Kind: options.PreferredContentFormat, + Value: vulns, + }, + Range: rng, + }, true +} + +func formatHeader(modpath string, options *settings.Options) string { + var b strings.Builder + // Write the heading as an H3. + b.WriteString("#### " + modpath) + if options.PreferredContentFormat == protocol.Markdown { + b.WriteString("\n\n") + } else { + b.WriteRune('\n') + } + return b.String() +} + +func lookupVulns(vulns *vulncheck.Result, modpath, version string) (affecting, nonaffecting []*govulncheck.Finding, osvs map[string]*osv.Entry) { + if vulns == nil || len(vulns.Entries) == 0 { + return nil, nil, nil + } + for _, finding := range vulns.Findings { + vuln, typ := foundVuln(finding) + if vuln.Module != modpath { + continue + } + // It is possible that the source code was changed since the last + // govulncheck run and information in the `vulns` info is stale. + // For example, imagine that a user is in the middle of updating + // problematic modules detected by the govulncheck run by applying + // quick fixes. Stale diagnostics can be confusing and prevent the + // user from quickly locating the next module to fix. + // Ideally we should rerun the analysis with the updated module + // dependencies or any other code changes, but we are not yet + // in the position of automatically triggering the analysis + // (govulncheck can take a while). We also don't know exactly what + // part of source code was changed since `vulns` was computed. + // As a heuristic, we assume that a user upgrades the affecting + // module to the version with the fix or the latest one, and if the + // version in the require statement is equal to or higher than the + // fixed version, skip the vulnerability information in the hover. + // Eventually, the user has to rerun govulncheck. + if finding.FixedVersion != "" && semver.IsValid(version) && semver.Compare(finding.FixedVersion, version) <= 0 { + continue + } + switch typ { + case vulnCalled: + affecting = append(affecting, finding) + case vulnImported: + nonaffecting = append(nonaffecting, finding) + } + } + + // Remove affecting elements from nonaffecting. + // An OSV entry can appear in both lists if an OSV entry covers + // multiple packages imported but not all vulnerable symbols are used. + // The current wording of hover message doesn't clearly + // present this case well IMO, so let's skip reporting nonaffecting. + if len(affecting) > 0 && len(nonaffecting) > 0 { + affectingSet := map[string]bool{} + for _, f := range affecting { + affectingSet[f.OSV] = true + } + n := 0 + for _, v := range nonaffecting { + if !affectingSet[v.OSV] { + nonaffecting[n] = v + n++ + } + } + nonaffecting = nonaffecting[:n] + } + sort.Slice(nonaffecting, func(i, j int) bool { return nonaffecting[i].OSV < nonaffecting[j].OSV }) + sort.Slice(affecting, func(i, j int) bool { return affecting[i].OSV < affecting[j].OSV }) + return affecting, nonaffecting, vulns.Entries +} + +func fixedVersion(fixed string) string { + if fixed == "" { + return "No fix is available." + } + return "Fixed in " + fixed + "." +} + +func formatVulnerabilities(affecting, nonaffecting []*govulncheck.Finding, osvs map[string]*osv.Entry, options *settings.Options, fromGovulncheck bool) string { + if len(osvs) == 0 || (len(affecting) == 0 && len(nonaffecting) == 0) { + return "" + } + byOSV := func(findings []*govulncheck.Finding) map[string][]*govulncheck.Finding { + m := make(map[string][]*govulncheck.Finding) + for _, f := range findings { + m[f.OSV] = append(m[f.OSV], f) + } + return m + } + affectingByOSV := byOSV(affecting) + nonaffectingByOSV := byOSV(nonaffecting) + + // TODO(hyangah): can we use go templates to generate hover messages? + // Then, we can use a different template for markdown case. + useMarkdown := options.PreferredContentFormat == protocol.Markdown + + var b strings.Builder + + if len(affectingByOSV) > 0 { + // TODO(hyangah): make the message more eyecatching (icon/codicon/color) + if len(affectingByOSV) == 1 { + fmt.Fprintf(&b, "\n**WARNING:** Found %d reachable vulnerability.\n", len(affectingByOSV)) + } else { + fmt.Fprintf(&b, "\n**WARNING:** Found %d reachable vulnerabilities.\n", len(affectingByOSV)) + } + } + for id, findings := range affectingByOSV { + fix := fixedVersion(findings[0].FixedVersion) + pkgs := vulnerablePkgsInfo(findings, useMarkdown) + osvEntry := osvs[id] + + if useMarkdown { + fmt.Fprintf(&b, "- [**%v**](%v) %v%v\n%v\n", id, href(id), osvEntry.Summary, pkgs, fix) + } else { + fmt.Fprintf(&b, " - [%v] %v (%v) %v%v\n", id, osvEntry.Summary, href(id), pkgs, fix) + } + } + if len(nonaffecting) > 0 { + if fromGovulncheck { + fmt.Fprintf(&b, "\n**Note:** The project imports packages with known vulnerabilities, but does not call the vulnerable code.\n") + } else { + fmt.Fprintf(&b, "\n**Note:** The project imports packages with known vulnerabilities. Use `govulncheck` to check if the project uses vulnerable symbols.\n") + } + } + for k, findings := range nonaffectingByOSV { + fix := fixedVersion(findings[0].FixedVersion) + pkgs := vulnerablePkgsInfo(findings, useMarkdown) + osvEntry := osvs[k] + + if useMarkdown { + fmt.Fprintf(&b, "- [%v](%v) %v%v\n%v\n", k, href(k), osvEntry.Summary, pkgs, fix) + } else { + fmt.Fprintf(&b, " - [%v] %v (%v) %v\n%v\n", k, osvEntry.Summary, href(k), pkgs, fix) + } + } + b.WriteString("\n") + return b.String() +} + +func vulnerablePkgsInfo(findings []*govulncheck.Finding, useMarkdown bool) string { + var b strings.Builder + seen := map[string]bool{} + for _, f := range findings { + p := f.Trace[0].Package + if !seen[p] { + seen[p] = true + if useMarkdown { + b.WriteString("\n * `") + } else { + b.WriteString("\n ") + } + b.WriteString(p) + if useMarkdown { + b.WriteString("`") + } + } + } + return b.String() +} + +func formatExplanation(text string, req *modfile.Require, options *settings.Options, isPrivate bool) string { + text = strings.TrimSuffix(text, "\n") + splt := strings.Split(text, "\n") + length := len(splt) + + var b strings.Builder + + // If the explanation is 2 lines, then it is of the form: + // # golang.org/x/text/encoding + // (main module does not need package golang.org/x/text/encoding) + if length == 2 { + b.WriteString(splt[1]) + return b.String() + } + + imp := splt[length-1] // import path + reference := imp + // See golang/go#36998: don't link to modules matching GOPRIVATE. + if !isPrivate && options.PreferredContentFormat == protocol.Markdown { + target := imp + if strings.ToLower(options.LinkTarget) == "pkg.go.dev" { + target = strings.Replace(target, req.Mod.Path, req.Mod.String(), 1) + } + reference = fmt.Sprintf("[%s](%s)", imp, cache.BuildLink(options.LinkTarget, target, "")) + } + b.WriteString("This module is necessary because " + reference + " is imported in") + + // If the explanation is 3 lines, then it is of the form: + // # golang.org/x/tools + // modtest + // golang.org/x/tools/go/packages + if length == 3 { + msg := fmt.Sprintf(" `%s`.", splt[1]) + b.WriteString(msg) + return b.String() + } + + // If the explanation is more than 3 lines, then it is of the form: + // # golang.org/x/text/language + // rsc.io/quote + // rsc.io/sampler + // golang.org/x/text/language + b.WriteString(":\n```text") + dash := "" + for _, imp := range splt[1 : length-1] { + dash += "-" + b.WriteString("\n" + dash + " " + imp) + } + b.WriteString("\n```") + return b.String() +} diff --git a/gopls/internal/lsp/mod/inlayhint.go b/gopls/internal/mod/inlayhint.go similarity index 77% rename from gopls/internal/lsp/mod/inlayhint.go rename to gopls/internal/mod/inlayhint.go index 4f08dd29bbc..73286be4be6 100644 --- a/gopls/internal/lsp/mod/inlayhint.go +++ b/gopls/internal/mod/inlayhint.go @@ -8,20 +8,24 @@ import ( "fmt" "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" ) -func InlayHint(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, rng protocol.Range) ([]protocol.InlayHint, error) { +func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, _ protocol.Range) ([]protocol.InlayHint, error) { // Inlay hints are enabled if the client supports them. pm, err := snapshot.ParseMod(ctx, fh) if err != nil { return nil, err } - // Compare the version of the module used in the snapshot's metadata with the - // version requested by the module, in both cases, taking replaces into account. - // Produce an InlayHint when the version is the module is not the one used. + // Compare the version of the module used in the snapshot's + // metadata (i.e. the solution to the MVS constraints computed + // by go list) with the version requested by the module, in + // both cases, taking replaces into account. Produce an + // InlayHint when the version of the module is not the one + // used. replaces := make(map[string]*modfile.Replace) for _, x := range pm.File.Replace { @@ -79,7 +83,7 @@ func genHint(mline *modfile.Line, oldVersion, newVersion string, m *protocol.Map part := protocol.InlayHintLabelPart{ Value: newVersion, Tooltip: &protocol.OrPTooltipPLabel{ - Value: fmt.Sprintf("used metadata's version %s rather than go.mod's version %s", newVersion, oldVersion), + Value: fmt.Sprintf("The build selects version %s rather than go.mod's version %s.", newVersion, oldVersion), }, } rng, err := m.OffsetRange(x, mline.End.Byte) diff --git a/gopls/internal/lsp/progress/progress.go b/gopls/internal/progress/progress.go similarity index 83% rename from gopls/internal/lsp/progress/progress.go rename to gopls/internal/progress/progress.go index 6ccf086df13..0bb17b35669 100644 --- a/gopls/internal/lsp/progress/progress.go +++ b/gopls/internal/progress/progress.go @@ -2,22 +2,36 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// The progress package defines utilities for reporting the progress +// of long-running operations using features of the LSP client +// interface such as Progress and ShowMessage. package progress import ( "context" "fmt" + "io" "math/rand" "strconv" "strings" "sync" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" "golang.org/x/tools/internal/xcontext" ) +// NewTracker returns a new Tracker that reports progress to the +// specified client. +func NewTracker(client protocol.Client) *Tracker { + return &Tracker{ + client: client, + inProgress: make(map[protocol.ProgressToken]*WorkDone), + } +} + +// A Tracker reports the progress of a long-running operation to an LSP client. type Tracker struct { client protocol.Client supportsWorkDoneProgress bool @@ -26,27 +40,20 @@ type Tracker struct { inProgress map[protocol.ProgressToken]*WorkDone } -func NewTracker(client protocol.Client) *Tracker { - return &Tracker{ - client: client, - inProgress: make(map[protocol.ProgressToken]*WorkDone), - } -} - -// SetSupportsWorkDoneProgress sets whether the client supports work done +// SetSupportsWorkDoneProgress sets whether the client supports "work done" // progress reporting. It must be set before using the tracker. // // TODO(rfindley): fix this broken initialization pattern. // Also: do we actually need the fall-back progress behavior using ShowMessage? // Surely ShowMessage notifications are too noisy to be worthwhile. -func (tracker *Tracker) SetSupportsWorkDoneProgress(b bool) { - tracker.supportsWorkDoneProgress = b +func (t *Tracker) SetSupportsWorkDoneProgress(b bool) { + t.supportsWorkDoneProgress = b } // SupportsWorkDoneProgress reports whether the tracker supports work done // progress reporting. -func (tracker *Tracker) SupportsWorkDoneProgress() bool { - return tracker.supportsWorkDoneProgress +func (t *Tracker) SupportsWorkDoneProgress() bool { + return t.supportsWorkDoneProgress } // Start notifies the client of work being done on the server. It uses either @@ -247,36 +254,38 @@ func (wd *WorkDone) End(ctx context.Context, message string) { } } -// EventWriter writes every incoming []byte to -// event.Print with the operation=generate tag -// to distinguish its logs from others. -type EventWriter struct { - ctx context.Context - operation string +// NewEventWriter returns an [io.Writer] that calls the context's +// event printer for each data payload, wrapping it with the +// operation=generate tag to distinguish its logs from others. +func NewEventWriter(ctx context.Context, operation string) io.Writer { + return &eventWriter{ctx: ctx, operation: operation} } -func NewEventWriter(ctx context.Context, operation string) *EventWriter { - return &EventWriter{ctx: ctx, operation: operation} +type eventWriter struct { + ctx context.Context + operation string } -func (ew *EventWriter) Write(p []byte) (n int, err error) { +func (ew *eventWriter) Write(p []byte) (n int, err error) { event.Log(ew.ctx, string(p), tag.Operation.Of(ew.operation)) return len(p), nil } -// WorkDoneWriter wraps a workDone handle to provide a Writer interface, +// NewWorkDoneWriter wraps a WorkDone handle to provide a Writer interface, // so that workDone reporting can more easily be hooked into commands. -type WorkDoneWriter struct { +func NewWorkDoneWriter(ctx context.Context, wd *WorkDone) io.Writer { + return &workDoneWriter{ctx: ctx, wd: wd} +} + +// workDoneWriter wraps a workDone handle to provide a Writer interface, +// so that workDone reporting can more easily be hooked into commands. +type workDoneWriter struct { // In order to implement the io.Writer interface, we must close over ctx. ctx context.Context wd *WorkDone } -func NewWorkDoneWriter(ctx context.Context, wd *WorkDone) *WorkDoneWriter { - return &WorkDoneWriter{ctx: ctx, wd: wd} -} - -func (wdw *WorkDoneWriter) Write(p []byte) (n int, err error) { +func (wdw *workDoneWriter) Write(p []byte) (n int, err error) { wdw.wd.Report(wdw.ctx, string(p), 0) // Don't fail just because of a failure to report progress. return len(p), nil diff --git a/gopls/internal/lsp/progress/progress_test.go b/gopls/internal/progress/progress_test.go similarity index 95% rename from gopls/internal/lsp/progress/progress_test.go rename to gopls/internal/progress/progress_test.go index ef87eba121a..642103ae025 100644 --- a/gopls/internal/lsp/progress/progress_test.go +++ b/gopls/internal/progress/progress_test.go @@ -10,7 +10,7 @@ import ( "sync" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) type fakeClient struct { @@ -63,7 +63,7 @@ func (c *fakeClient) ShowMessage(context.Context, *protocol.ShowMessageParams) e return nil } -func setup(token protocol.ProgressToken) (context.Context, *Tracker, *fakeClient) { +func setup() (context.Context, *Tracker, *fakeClient) { c := &fakeClient{} tracker := NewTracker(c) tracker.SetSupportsWorkDoneProgress(true) @@ -109,7 +109,7 @@ func TestProgressTracker_Reporting(t *testing.T) { } { test := test t.Run(test.name, func(t *testing.T) { - ctx, tracker, client := setup(test.token) + ctx, tracker, client := setup() ctx, cancel := context.WithCancel(ctx) defer cancel() tracker.supportsWorkDoneProgress = test.supported @@ -147,7 +147,7 @@ func TestProgressTracker_Reporting(t *testing.T) { func TestProgressTracker_Cancellation(t *testing.T) { for _, token := range []protocol.ProgressToken{nil, 1, "a"} { - ctx, tracker, _ := setup(token) + ctx, tracker, _ := setup() var canceled bool cancel := func() { canceled = true } work := tracker.Start(ctx, "work", "message", token, cancel) diff --git a/gopls/internal/lsp/protocol/codeactionkind.go b/gopls/internal/protocol/codeactionkind.go similarity index 89% rename from gopls/internal/lsp/protocol/codeactionkind.go rename to gopls/internal/protocol/codeactionkind.go index 9a95800fb86..29bc6d44bdb 100644 --- a/gopls/internal/lsp/protocol/codeactionkind.go +++ b/gopls/internal/protocol/codeactionkind.go @@ -8,4 +8,6 @@ package protocol const ( GoTest CodeActionKind = "goTest" // TODO: Add GoGenerate, RegenerateCgo etc. + + GoDoc CodeActionKind = "source.doc" ) diff --git a/gopls/internal/lsp/command/command_gen.go b/gopls/internal/protocol/command/command_gen.go similarity index 90% rename from gopls/internal/lsp/command/command_gen.go rename to gopls/internal/protocol/command/command_gen.go index e54030ceea9..9009a771086 100644 --- a/gopls/internal/lsp/command/command_gen.go +++ b/gopls/internal/protocol/command/command_gen.go @@ -7,7 +7,7 @@ //go:build !generate // +build !generate -// Code generated by generate.go. DO NOT EDIT. +// Code generated by gen.go. DO NOT EDIT. package command @@ -15,9 +15,12 @@ import ( "context" "fmt" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) +// Symbolic names for gopls commands, excluding "gopls." prefix. +// These commands may be requested by ExecuteCommand, CodeLens, +// CodeAction, and other LSP requests. const ( AddDependency Command = "add_dependency" AddImport Command = "add_import" @@ -25,6 +28,8 @@ const ( ApplyFix Command = "apply_fix" ChangeSignature Command = "change_signature" CheckUpgrades Command = "check_upgrades" + DiagnoseFiles Command = "diagnose_files" + Doc Command = "doc" EditGoDirective Command = "edit_go_directive" FetchVulncheckResult Command = "fetch_vulncheck_result" GCDetails Command = "gc_details" @@ -49,6 +54,7 @@ const ( UpdateGoSum Command = "update_go_sum" UpgradeDependency Command = "upgrade_dependency" Vendor Command = "vendor" + Views Command = "views" WorkspaceStats Command = "workspace_stats" ) @@ -59,6 +65,8 @@ var Commands = []Command{ ApplyFix, ChangeSignature, CheckUpgrades, + DiagnoseFiles, + Doc, EditGoDirective, FetchVulncheckResult, GCDetails, @@ -83,6 +91,7 @@ var Commands = []Command{ UpdateGoSum, UpgradeDependency, Vendor, + Views, WorkspaceStats, } @@ -111,19 +120,31 @@ func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Inte if err := UnmarshalArgs(params.Arguments, &a0); err != nil { return nil, err } - return nil, s.ApplyFix(ctx, a0) + return s.ApplyFix(ctx, a0) case "gopls.change_signature": var a0 ChangeSignatureArgs if err := UnmarshalArgs(params.Arguments, &a0); err != nil { return nil, err } - return nil, s.ChangeSignature(ctx, a0) + return s.ChangeSignature(ctx, a0) case "gopls.check_upgrades": var a0 CheckUpgradesArgs if err := UnmarshalArgs(params.Arguments, &a0); err != nil { return nil, err } return nil, s.CheckUpgrades(ctx, a0) + case "gopls.diagnose_files": + var a0 DiagnoseFilesArgs + if err := UnmarshalArgs(params.Arguments, &a0); err != nil { + return nil, err + } + return nil, s.DiagnoseFiles(ctx, a0) + case "gopls.doc": + var a0 protocol.Location + if err := UnmarshalArgs(params.Arguments, &a0); err != nil { + return nil, err + } + return nil, s.Doc(ctx, a0) case "gopls.edit_go_directive": var a0 EditGoDirectiveArgs if err := UnmarshalArgs(params.Arguments, &a0); err != nil { @@ -262,6 +283,8 @@ func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Inte return nil, err } return nil, s.Vendor(ctx, a0) + case "gopls.views": + return s.Views(ctx) case "gopls.workspace_stats": return s.WorkspaceStats(ctx) } @@ -340,6 +363,30 @@ func NewCheckUpgradesCommand(title string, a0 CheckUpgradesArgs) (protocol.Comma }, nil } +func NewDiagnoseFilesCommand(title string, a0 DiagnoseFilesArgs) (protocol.Command, error) { + args, err := MarshalArgs(a0) + if err != nil { + return protocol.Command{}, err + } + return protocol.Command{ + Title: title, + Command: "gopls.diagnose_files", + Arguments: args, + }, nil +} + +func NewDocCommand(title string, a0 protocol.Location) (protocol.Command, error) { + args, err := MarshalArgs(a0) + if err != nil { + return protocol.Command{}, err + } + return protocol.Command{ + Title: title, + Command: "gopls.doc", + Arguments: args, + }, nil +} + func NewEditGoDirectiveCommand(title string, a0 EditGoDirectiveArgs) (protocol.Command, error) { args, err := MarshalArgs(a0) if err != nil { @@ -628,6 +675,18 @@ func NewVendorCommand(title string, a0 URIArg) (protocol.Command, error) { }, nil } +func NewViewsCommand(title string) (protocol.Command, error) { + args, err := MarshalArgs() + if err != nil { + return protocol.Command{}, err + } + return protocol.Command{ + Title: title, + Command: "gopls.views", + Arguments: args, + }, nil +} + func NewWorkspaceStatsCommand(title string) (protocol.Command, error) { args, err := MarshalArgs() if err != nil { diff --git a/gopls/internal/lsp/command/commandmeta/meta.go b/gopls/internal/protocol/command/commandmeta/meta.go similarity index 95% rename from gopls/internal/lsp/command/commandmeta/meta.go rename to gopls/internal/protocol/command/commandmeta/meta.go index bf85c4faa9b..f34d5467ad9 100644 --- a/gopls/internal/lsp/command/commandmeta/meta.go +++ b/gopls/internal/protocol/command/commandmeta/meta.go @@ -17,7 +17,8 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/lsp/command" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/internal/aliases" ) type Command struct { @@ -52,7 +53,7 @@ func Load() (*packages.Package, []*Command, error) { Mode: packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps, BuildFlags: []string{"-tags=generate"}, }, - "golang.org/x/tools/gopls/internal/lsp/command", + "golang.org/x/tools/gopls/internal/protocol/command", ) if err != nil { return nil, nil, fmt.Errorf("packages.Load: %v", err) @@ -126,7 +127,7 @@ func (l *fieldLoader) loadMethod(pkg *packages.Package, m *types.Func) (*Command if i == 0 { // Lazy check that the first argument is a context. We could relax this, // but then the generated code gets more complicated. - if named, ok := fld.Type.(*types.Named); !ok || named.Obj().Name() != "Context" || named.Obj().Pkg().Path() != "context" { + if named, ok := aliases.Unalias(fld.Type).(*types.Named); !ok || named.Obj().Name() != "Context" || named.Obj().Pkg().Path() != "context" { return nil, fmt.Errorf("first method parameter must be context.Context") } // Skip the context argument, as it is implied. diff --git a/gopls/internal/lsp/command/gen/gen.go b/gopls/internal/protocol/command/gen/gen.go similarity index 85% rename from gopls/internal/lsp/command/gen/gen.go rename to gopls/internal/protocol/command/gen/gen.go index 9f0453c62cc..1ecfce712cd 100644 --- a/gopls/internal/lsp/command/gen/gen.go +++ b/gopls/internal/protocol/command/gen/gen.go @@ -12,7 +12,7 @@ import ( "go/types" "text/template" - "golang.org/x/tools/gopls/internal/lsp/command/commandmeta" + "golang.org/x/tools/gopls/internal/protocol/command/commandmeta" "golang.org/x/tools/internal/imports" ) @@ -25,7 +25,7 @@ const src = `// Copyright 2021 The Go Authors. All rights reserved. //go:build !generate // +build !generate -// Code generated by generate.go. DO NOT EDIT. +// Code generated by gen.go. DO NOT EDIT. package command @@ -35,6 +35,9 @@ import ( {{end}} ) +// Symbolic names for gopls commands, excluding "gopls." prefix. +// These commands may be requested by ExecuteCommand, CodeLens, +// CodeAction, and other LSP requests. const ( {{- range .Commands}} {{.MethodName}} Command = "{{.Name}}" @@ -109,10 +112,10 @@ func Generate() ([]byte, error) { Imports: map[string]bool{ "context": true, "fmt": true, - "golang.org/x/tools/gopls/internal/lsp/protocol": true, + "golang.org/x/tools/gopls/internal/protocol": true, }, } - const thispkg = "golang.org/x/tools/gopls/internal/lsp/command" + const thispkg = "golang.org/x/tools/gopls/internal/protocol/command" for _, c := range d.Commands { for _, arg := range c.Args { pth := pkgPath(arg.Type) @@ -146,8 +149,11 @@ func Generate() ([]byte, error) { } func pkgPath(t types.Type) string { - if n, ok := t.(*types.Named); ok { - if pkg := n.Obj().Pkg(); pkg != nil { + type hasTypeName interface { // *Named or *Alias (or *TypeParam) + Obj() *types.TypeName + } + if t, ok := t.(hasTypeName); ok { + if pkg := t.Obj().Pkg(); pkg != nil { return pkg.Path() } } diff --git a/gopls/internal/protocol/command/generate.go b/gopls/internal/protocol/command/generate.go new file mode 100644 index 00000000000..f63b2e6e5ba --- /dev/null +++ b/gopls/internal/protocol/command/generate.go @@ -0,0 +1,25 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore +// +build ignore + +package main + +import ( + "log" + "os" + + "golang.org/x/tools/gopls/internal/protocol/command/gen" +) + +func main() { + content, err := gen.Generate() + if err != nil { + log.Fatal(err) + } + if err := os.WriteFile("command_gen.go", content, 0644); err != nil { + log.Fatal(err) + } +} diff --git a/gopls/internal/lsp/command/interface.go b/gopls/internal/protocol/command/interface.go similarity index 84% rename from gopls/internal/lsp/command/interface.go rename to gopls/internal/protocol/command/interface.go index 066f16f790f..8acaf52fc23 100644 --- a/gopls/internal/lsp/command/interface.go +++ b/gopls/internal/protocol/command/interface.go @@ -17,7 +17,7 @@ package command import ( "context" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/vulncheck" ) @@ -32,11 +32,19 @@ import ( // 3. The first line of the doc string is special. Everything after the colon // is considered the command 'Title'. // TODO(rFindley): reconsider this -- Title may be unnecessary. +// +// The doc comment on each method is eventually published at +// https://github.com/golang/tools/blob/master/gopls/doc/commands.md, +// so please be consistent in using this form: +// +// Command: Capitalized verb phrase with no period +// +// Longer description here... type Interface interface { // ApplyFix: Apply a fix // // Applies a fix to a region of source code. - ApplyFix(context.Context, ApplyFixArgs) error + ApplyFix(context.Context, ApplyFixArgs) (*protocol.WorkspaceEdit, error) // Test: Run test(s) (legacy) // @@ -55,6 +63,12 @@ type Interface interface { // Runs `go generate` for a given directory. Generate(context.Context, GenerateArgs) error + // Doc: View package documentation. + // + // Opens the Go package documentation page for the current + // package in a browser. + Doc(context.Context, protocol.Location) error + // RegenerateCgo: Regenerate cgo // // Regenerates cgo definitions. @@ -105,7 +119,7 @@ type Interface interface { // Reset diagnostics in the go.mod file of a module. ResetGoModDiagnostics(context.Context, ResetGoModDiagnosticsArgs) error - // GoGetPackage: go get a package + // GoGetPackage: 'go get' a package // // Runs `go get` to fetch a package. GoGetPackage(context.Context, GoGetPackageArgs) error @@ -146,7 +160,7 @@ type Interface interface { // address. StartDebugging(context.Context, DebuggingArgs) (DebuggingResult, error) - // StartProfile: start capturing a profile of gopls' execution. + // StartProfile: Start capturing a profile of gopls' execution // // Start a new pprof profile. Before using the resulting file, profiling must // be stopped with a corresponding call to StopProfile. @@ -155,13 +169,13 @@ type Interface interface { // runner. StartProfile(context.Context, StartProfileArgs) (StartProfileResult, error) - // StopProfile: stop an ongoing profile. + // StopProfile: Stop an ongoing profile // // This command is intended for internal use only, by the gopls benchmark // runner. StopProfile(context.Context, StopProfileArgs) (StopProfileResult, error) - // RunGovulncheck: Run vulncheck. + // RunGovulncheck: Run vulncheck // // Run vulnerability check (`govulncheck`). RunGovulncheck(context.Context, VulncheckArgs) (RunVulncheckResult, error) @@ -171,7 +185,7 @@ type Interface interface { // Fetch the result of latest vulnerability check (`govulncheck`). FetchVulncheckResult(context.Context, URIArg) (map[protocol.DocumentURI]*vulncheck.Result, error) - // MemStats: fetch memory statistics + // MemStats: Fetch memory statistics // // Call runtime.GC multiple times and return memory statistics as reported by // runtime.MemStats. @@ -179,7 +193,7 @@ type Interface interface { // This command is used for benchmarking, and may change in the future. MemStats(context.Context) (MemStatsResult, error) - // WorkspaceStats: fetch workspace statistics + // WorkspaceStats: Fetch workspace statistics // // Query statistics about workspace builds, modules, packages, and files. // @@ -187,26 +201,38 @@ type Interface interface { // command. WorkspaceStats(context.Context) (WorkspaceStatsResult, error) - // RunGoWorkCommand: run `go work [args...]`, and apply the resulting go.work - // edits to the current go.work file. + // RunGoWorkCommand: Run `go work [args...]`, and apply the resulting go.work + // edits to the current go.work file RunGoWorkCommand(context.Context, RunGoWorkArgs) error - // AddTelemetryCounters: update the given telemetry counters. + // AddTelemetryCounters: Update the given telemetry counters // // Gopls will prepend "fwd/" to all the counters updated using this command // to avoid conflicts with other counters gopls collects. AddTelemetryCounters(context.Context, AddTelemetryCountersArgs) error - // MaybePromptForTelemetry: checks for the right conditions, and then prompts - // the user to ask if they want to enable Go telemetry uploading. If the user - // responds 'Yes', the telemetry mode is set to "on". + // MaybePromptForTelemetry: Prompt user to enable telemetry + // + // Checks for the right conditions, and then prompts the user + // to ask if they want to enable Go telemetry uploading. If + // the user responds 'Yes', the telemetry mode is set to "on". MaybePromptForTelemetry(context.Context) error - // ChangeSignature: performs a "change signature" refactoring. + // ChangeSignature: Perform a "change signature" refactoring // // This command is experimental, currently only supporting parameter removal. // Its signature will certainly change in the future (pun intended). - ChangeSignature(context.Context, ChangeSignatureArgs) error + ChangeSignature(context.Context, ChangeSignatureArgs) (*protocol.WorkspaceEdit, error) + + // DiagnoseFiles: Cause server to publish diagnostics for the specified files. + // + // This command is needed by the 'gopls {check,fix}' CLI subcommands. + DiagnoseFiles(context.Context, DiagnoseFilesArgs) error + + // Views: List current Views on the server. + // + // This command is intended for use by gopls tests only. + Views(context.Context) ([]View, error) } type RunTestsArgs struct { @@ -231,12 +257,22 @@ type GenerateArgs struct { // TODO(rFindley): document the rest of these once the docgen is fleshed out. type ApplyFixArgs struct { - // The fix to apply. + // The name of the fix to apply. + // + // For fixes suggested by analyzers, this is a string constant + // advertised by the analyzer that matches the Category of + // the analysis.Diagnostic with a SuggestedFix containing no edits. + // + // For fixes suggested by code actions, this is a string agreed + // upon by the code action and golang.ApplyFix. Fix string + // The file URI for the document to fix. URI protocol.DocumentURI // The document range to scan for fixes. Range protocol.Range + // Whether to resolve and return the edits. + ResolveEdits bool } type URIArg struct { @@ -410,12 +446,6 @@ type RunVulncheckResult struct { Token protocol.ProgressToken } -type VulncheckResult struct { - Vuln []Vuln - - // TODO: Text string format output? -} - // CallStack models a trace of function calls starting // with a client function or method and ending with a // call to a vulnerable symbol. @@ -432,49 +462,6 @@ type StackEntry struct { Pos protocol.Position // Start position. (0-based. Column is always 0) } -// Vuln models an osv.Entry and representative call stacks. -// TODO: deprecate -type Vuln struct { - // ID is the vulnerability ID (osv.Entry.ID). - // https://ossf.github.io/osv-schema/#id-modified-fields - ID string - // Details is the description of the vulnerability (osv.Entry.Details). - // https://ossf.github.io/osv-schema/#summary-details-fields - Details string `json:",omitempty"` - // Aliases are alternative IDs of the vulnerability. - // https://ossf.github.io/osv-schema/#aliases-field - Aliases []string `json:",omitempty"` - - // Symbol is the name of the detected vulnerable function or method. - // Can be empty if the vulnerability exists in required modules, but no vulnerable symbols are used. - Symbol string `json:",omitempty"` - // PkgPath is the package path of the detected Symbol. - // Can be empty if the vulnerability exists in required modules, but no vulnerable packages are used. - PkgPath string `json:",omitempty"` - // ModPath is the module path corresponding to PkgPath. - // TODO: how do we specify standard library's vulnerability? - ModPath string `json:",omitempty"` - - // URL is the URL for more info about the information. - // Either the database specific URL or the one of the URLs - // included in osv.Entry.References. - URL string `json:",omitempty"` - - // Current is the current module version. - CurrentVersion string `json:",omitempty"` - - // Fixed is the minimum module version that contains the fix. - FixedVersion string `json:",omitempty"` - - // Example call stacks. - CallStacks []CallStack `json:",omitempty"` - - // Short description of each call stack in CallStacks. - CallStackSummaries []string `json:",omitempty"` - - // TODO: import graph & module graph. -} - // MemStatsResult holds selected fields from runtime.MemStats. type MemStatsResult struct { HeapAlloc uint64 @@ -529,4 +516,19 @@ type AddTelemetryCountersArgs struct { // ChangeSignatureArgs specifies a "change signature" refactoring to perform. type ChangeSignatureArgs struct { RemoveParameter protocol.Location + // Whether to resolve and return the edits. + ResolveEdits bool +} + +// DiagnoseFilesArgs specifies a set of files for which diagnostics are wanted. +type DiagnoseFilesArgs struct { + Files []protocol.DocumentURI +} + +// A View holds summary information about a cache.View. +type View struct { + Type string // view type (via cache.ViewType.String) + Root protocol.DocumentURI // root dir of the view (e.g. containing go.mod or go.work) + Folder protocol.DocumentURI // workspace folder associated with the view + EnvOverlay []string // environment variable overrides } diff --git a/gopls/internal/lsp/command/interface_test.go b/gopls/internal/protocol/command/interface_test.go similarity index 92% rename from gopls/internal/lsp/command/interface_test.go rename to gopls/internal/protocol/command/interface_test.go index f81a2aa22fd..4ddc5fa2e67 100644 --- a/gopls/internal/lsp/command/interface_test.go +++ b/gopls/internal/protocol/command/interface_test.go @@ -9,7 +9,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/command/gen" + "golang.org/x/tools/gopls/internal/protocol/command/gen" "golang.org/x/tools/internal/testenv" ) diff --git a/gopls/internal/lsp/command/util.go b/gopls/internal/protocol/command/util.go similarity index 100% rename from gopls/internal/lsp/command/util.go rename to gopls/internal/protocol/command/util.go diff --git a/gopls/internal/protocol/context.go b/gopls/internal/protocol/context.go new file mode 100644 index 00000000000..5f3151cda97 --- /dev/null +++ b/gopls/internal/protocol/context.go @@ -0,0 +1,65 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protocol + +import ( + "bytes" + "context" + "sync" + + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/core" + "golang.org/x/tools/internal/event/export" + "golang.org/x/tools/internal/event/label" + "golang.org/x/tools/internal/xcontext" +) + +type contextKey int + +const ( + clientKey = contextKey(iota) +) + +func WithClient(ctx context.Context, client Client) context.Context { + return context.WithValue(ctx, clientKey, client) +} + +func LogEvent(ctx context.Context, ev core.Event, lm label.Map, mt MessageType) context.Context { + client, ok := ctx.Value(clientKey).(Client) + if !ok { + return ctx + } + buf := &bytes.Buffer{} + p := export.Printer{} + p.WriteEvent(buf, ev, lm) + msg := &LogMessageParams{Type: mt, Message: buf.String()} + // Handle messages generated via event.Error, which won't have a level Label. + if event.IsError(ev) { + msg.Type = Error + } + + // The background goroutine lives forever once started, + // and ensures log messages are sent in order (#61216). + startLogSenderOnce.Do(func() { + go func() { + for f := range logQueue { + f() + } + }() + }) + + // Add the log item to a queue, rather than sending a + // window/logMessage request to the client synchronously, + // which would slow down this thread. + ctx2 := xcontext.Detach(ctx) + logQueue <- func() { client.LogMessage(ctx2, msg) } + + return ctx +} + +var ( + startLogSenderOnce sync.Once + logQueue = make(chan func(), 100) // big enough for a large transient burst +) diff --git a/gopls/internal/lsp/protocol/doc.go b/gopls/internal/protocol/doc.go similarity index 100% rename from gopls/internal/lsp/protocol/doc.go rename to gopls/internal/protocol/doc.go diff --git a/gopls/internal/protocol/edits.go b/gopls/internal/protocol/edits.go new file mode 100644 index 00000000000..53fd4cf94e3 --- /dev/null +++ b/gopls/internal/protocol/edits.go @@ -0,0 +1,128 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protocol + +import ( + "fmt" + + "golang.org/x/tools/internal/diff" +) + +// EditsFromDiffEdits converts diff.Edits to a non-nil slice of LSP TextEdits. +// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray +func EditsFromDiffEdits(m *Mapper, edits []diff.Edit) ([]TextEdit, error) { + // LSP doesn't require TextEditArray to be sorted: + // this is the receiver's concern. But govim, and perhaps + // other clients have historically relied on the order. + edits = append([]diff.Edit(nil), edits...) + diff.SortEdits(edits) + + result := make([]TextEdit, len(edits)) + for i, edit := range edits { + rng, err := m.OffsetRange(edit.Start, edit.End) + if err != nil { + return nil, err + } + result[i] = TextEdit{ + Range: rng, + NewText: edit.New, + } + } + return result, nil +} + +// EditsToDiffEdits converts LSP TextEdits to diff.Edits. +// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray +func EditsToDiffEdits(m *Mapper, edits []TextEdit) ([]diff.Edit, error) { + if edits == nil { + return nil, nil + } + result := make([]diff.Edit, len(edits)) + for i, edit := range edits { + start, end, err := m.RangeOffsets(edit.Range) + if err != nil { + return nil, err + } + result[i] = diff.Edit{ + Start: start, + End: end, + New: edit.NewText, + } + } + return result, nil +} + +// ApplyEdits applies the patch (edits) to m.Content and returns the result. +// It also returns the edits converted to diff-package form. +func ApplyEdits(m *Mapper, edits []TextEdit) ([]byte, []diff.Edit, error) { + diffEdits, err := EditsToDiffEdits(m, edits) + if err != nil { + return nil, nil, err + } + out, err := diff.ApplyBytes(m.Content, diffEdits) + return out, diffEdits, err +} + +// AsTextEdits converts a slice possibly containing AnnotatedTextEdits +// to a slice of TextEdits. +func AsTextEdits(edits []Or_TextDocumentEdit_edits_Elem) []TextEdit { + var result []TextEdit + for _, e := range edits { + var te TextEdit + if x, ok := e.Value.(AnnotatedTextEdit); ok { + te = x.TextEdit + } else if x, ok := e.Value.(TextEdit); ok { + te = x + } else { + panic(fmt.Sprintf("unexpected type %T, expected AnnotatedTextEdit or TextEdit", e.Value)) + } + result = append(result, te) + } + return result +} + +// AsAnnotatedTextEdits converts a slice of TextEdits +// to a slice of Or_TextDocumentEdit_edits_Elem. +// (returning a typed nil is required in server: in code_action.go and command.go)) +func AsAnnotatedTextEdits(edits []TextEdit) []Or_TextDocumentEdit_edits_Elem { + if edits == nil { + return []Or_TextDocumentEdit_edits_Elem{} + } + var result []Or_TextDocumentEdit_edits_Elem + for _, e := range edits { + result = append(result, Or_TextDocumentEdit_edits_Elem{ + Value: TextEdit{ + Range: e.Range, + NewText: e.NewText, + }, + }) + } + return result +} + +// TextEditsToDocumentChanges converts a set of edits within the +// specified (versioned) file to a singleton list of DocumentChanges +// (as required for a WorkspaceEdit). +func TextEditsToDocumentChanges(uri DocumentURI, version int32, edits []TextEdit) []DocumentChanges { + return []DocumentChanges{{ + TextDocumentEdit: &TextDocumentEdit{ + TextDocument: OptionalVersionedTextDocumentIdentifier{ + Version: version, + TextDocumentIdentifier: TextDocumentIdentifier{URI: uri}, + }, + Edits: AsAnnotatedTextEdits(edits), + }, + }} +} + +// TextDocumentEditsToDocumentChanges wraps each TextDocumentEdit in a DocumentChange. +func TextDocumentEditsToDocumentChanges(edits []TextDocumentEdit) []DocumentChanges { + changes := []DocumentChanges{} // non-nil + for _, edit := range edits { + edit := edit + changes = append(changes, DocumentChanges{TextDocumentEdit: &edit}) + } + return changes +} diff --git a/gopls/internal/lsp/protocol/enums.go b/gopls/internal/protocol/enums.go similarity index 86% rename from gopls/internal/lsp/protocol/enums.go rename to gopls/internal/protocol/enums.go index 87c14d8d553..e3f8b515542 100644 --- a/gopls/internal/lsp/protocol/enums.go +++ b/gopls/internal/protocol/enums.go @@ -117,7 +117,7 @@ func init() { namesTextDocumentSaveReason[int(FocusOut)] = "FocusOut" } -func formatEnum(f fmt.State, c rune, i int, names []string, unknown string) { +func formatEnum(f fmt.State, i int, names []string, unknown string) { s := "" if i >= 0 && i < len(names) { s = names[i] @@ -130,45 +130,45 @@ func formatEnum(f fmt.State, c rune, i int, names []string, unknown string) { } func (e TextDocumentSyncKind) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesTextDocumentSyncKind[:], "TextDocumentSyncKind") + formatEnum(f, int(e), namesTextDocumentSyncKind[:], "TextDocumentSyncKind") } func (e MessageType) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesMessageType[:], "MessageType") + formatEnum(f, int(e), namesMessageType[:], "MessageType") } func (e FileChangeType) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesFileChangeType[:], "FileChangeType") + formatEnum(f, int(e), namesFileChangeType[:], "FileChangeType") } func (e CompletionTriggerKind) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesCompletionTriggerKind[:], "CompletionTriggerKind") + formatEnum(f, int(e), namesCompletionTriggerKind[:], "CompletionTriggerKind") } func (e DiagnosticSeverity) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesDiagnosticSeverity[:], "DiagnosticSeverity") + formatEnum(f, int(e), namesDiagnosticSeverity[:], "DiagnosticSeverity") } func (e DiagnosticTag) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesDiagnosticTag[:], "DiagnosticTag") + formatEnum(f, int(e), namesDiagnosticTag[:], "DiagnosticTag") } func (e CompletionItemKind) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesCompletionItemKind[:], "CompletionItemKind") + formatEnum(f, int(e), namesCompletionItemKind[:], "CompletionItemKind") } func (e InsertTextFormat) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesInsertTextFormat[:], "InsertTextFormat") + formatEnum(f, int(e), namesInsertTextFormat[:], "InsertTextFormat") } func (e DocumentHighlightKind) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesDocumentHighlightKind[:], "DocumentHighlightKind") + formatEnum(f, int(e), namesDocumentHighlightKind[:], "DocumentHighlightKind") } func (e SymbolKind) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesSymbolKind[:], "SymbolKind") + formatEnum(f, int(e), namesSymbolKind[:], "SymbolKind") } func (e TextDocumentSaveReason) Format(f fmt.State, c rune) { - formatEnum(f, c, int(e), namesTextDocumentSaveReason[:], "TextDocumentSaveReason") + formatEnum(f, int(e), namesTextDocumentSaveReason[:], "TextDocumentSaveReason") } diff --git a/gopls/internal/lsp/protocol/generate/README.md b/gopls/internal/protocol/generate/README.md similarity index 100% rename from gopls/internal/lsp/protocol/generate/README.md rename to gopls/internal/protocol/generate/README.md diff --git a/gopls/internal/protocol/generate/generate.go b/gopls/internal/protocol/generate/generate.go new file mode 100644 index 00000000000..7418918f51f --- /dev/null +++ b/gopls/internal/protocol/generate/generate.go @@ -0,0 +1,118 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bytes" + "fmt" + "log" + "strings" +) + +// a newType is a type that needs a name and a definition +// These are the various types that the json specification doesn't name +type newType struct { + name string + properties Properties // for struct/literal types + items []*Type // for other types ("and", "tuple") + line int + kind string // Or, And, Tuple, Lit, Map + typ *Type +} + +func generateDoc(out *bytes.Buffer, doc string) { + if doc == "" { + return + } + + if !strings.Contains(doc, "\n") { + fmt.Fprintf(out, "// %s\n", doc) + return + } + var list bool + for _, line := range strings.Split(doc, "\n") { + // Lists in metaModel.json start with a dash. + // To make a go doc list they have to be preceded + // by a blank line, and indented. + // (see type TextDccumentFilter in protocol.go) + if len(line) > 0 && line[0] == '-' { + if !list { + list = true + fmt.Fprintf(out, "//\n") + } + fmt.Fprintf(out, "// %s\n", line) + } else { + if len(line) == 0 { + list = false + } + fmt.Fprintf(out, "// %s\n", line) + } + } +} + +// decide if a property is optional, and if it needs a * +// return ",omitempty" if it is optional, and "*" if it needs a pointer +func propStar(name string, t NameType, gotype string) (string, string) { + var opt, star string + if t.Optional { + star = "*" + opt = ",omitempty" + } + if strings.HasPrefix(gotype, "[]") || strings.HasPrefix(gotype, "map[") { + star = "" // passed by reference, so no need for * + } else { + switch gotype { + case "bool", "uint32", "int32", "string", "interface{}": + star = "" // gopls compatibility if t.Optional + } + } + ostar, oopt := star, opt + if newStar, ok := goplsStar[prop{name, t.Name}]; ok { + switch newStar { + case nothing: + star, opt = "", "" + case wantStar: + star, opt = "*", "" + case wantOpt: + star, opt = "", ",omitempty" + case wantOptStar: + star, opt = "*", ",omitempty" + } + if star == ostar && opt == oopt { // no change + log.Printf("goplsStar[ {%q, %q} ](%d) useless %s/%s %s/%s", name, t.Name, t.Line, ostar, star, oopt, opt) + } + usedGoplsStar[prop{name, t.Name}] = true + } + + return opt, star +} + +func goName(s string) string { + // Go naming conventions + if strings.HasSuffix(s, "Id") { + s = s[:len(s)-len("Id")] + "ID" + } else if strings.HasSuffix(s, "Uri") { + s = s[:len(s)-3] + "URI" + } else if s == "uri" { + s = "URI" + } else if s == "id" { + s = "ID" + } + + // renames for temporary GOPLS compatibility + if news := goplsType[s]; news != "" { + usedGoplsType[s] = true + s = news + } + // Names beginning _ are not exported + if strings.HasPrefix(s, "_") { + s = strings.Replace(s, "_", "X", 1) + } + if s != "string" { // base types are unchanged (textDocuemnt/diagnostic) + // Title is deprecated, but a) s is only one word, b) replacement is too heavy-weight + s = strings.Title(s) + } + return s +} diff --git a/gopls/internal/protocol/generate/main.go b/gopls/internal/protocol/generate/main.go new file mode 100644 index 00000000000..dc3a6c8fbd4 --- /dev/null +++ b/gopls/internal/protocol/generate/main.go @@ -0,0 +1,377 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The generate command generates Go declarations from VSCode's +// description of the Language Server Protocol. +// +// To run it, type 'go generate' in the parent (protocol) directory. +package main + +// see https://github.com/golang/go/issues/61217 for discussion of an issue + +import ( + "bytes" + "encoding/json" + "flag" + "fmt" + "go/format" + "log" + "os" + "os/exec" + "path/filepath" + "strings" +) + +const vscodeRepo = "https://github.com/microsoft/vscode-languageserver-node" + +// lspGitRef names a branch or tag in vscodeRepo. +// It implicitly determines the protocol version of the LSP used by gopls. +// For example, tag release/protocol/3.17.3 of the repo defines protocol version 3.17.0. +// (Point releases are reflected in the git tag version even when they are cosmetic +// and don't change the protocol.) +var lspGitRef = "release/protocol/3.17.6-next.2" + +var ( + repodir = flag.String("d", "", "directory containing clone of "+vscodeRepo) + outputdir = flag.String("o", ".", "output directory") + // PJW: not for real code + cmpdir = flag.String("c", "", "directory of earlier code") + doboth = flag.String("b", "", "generate and compare") + lineNumbers = flag.Bool("l", false, "add line numbers to generated output") +) + +func main() { + log.SetFlags(log.Lshortfile) // log file name and line number, not time + flag.Parse() + + processinline() +} + +func processinline() { + // A local repository may be specified during debugging. + // The default behavior is to download the canonical version. + if *repodir == "" { + tmpdir, err := os.MkdirTemp("", "") + if err != nil { + log.Fatal(err) + } + defer os.RemoveAll(tmpdir) // ignore error + + // Clone the repository. + cmd := exec.Command("git", "clone", "--quiet", "--depth=1", "-c", "advice.detachedHead=false", vscodeRepo, "--branch="+lspGitRef, "--single-branch", tmpdir) + cmd.Stdout = os.Stderr + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + log.Fatal(err) + } + + *repodir = tmpdir + } else { + lspGitRef = fmt.Sprintf("(not git, local dir %s)", *repodir) + } + + model := parse(filepath.Join(*repodir, "protocol/metaModel.json")) + + findTypeNames(model) + generateOutput(model) + + fileHdr = fileHeader(model) + + // write the files + writeclient() + writeserver() + writeprotocol() + writejsons() + + checkTables() +} + +// common file header for output files +var fileHdr string + +func writeclient() { + out := new(bytes.Buffer) + fmt.Fprintln(out, fileHdr) + out.WriteString( + `import ( + "context" + + "golang.org/x/tools/internal/jsonrpc2" +) +`) + out.WriteString("type Client interface {\n") + for _, k := range cdecls.keys() { + out.WriteString(cdecls[k]) + } + out.WriteString("}\n\n") + out.WriteString(`func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { + defer recoverHandlerPanic(r.Method()) + switch r.Method() { +`) + for _, k := range ccases.keys() { + out.WriteString(ccases[k]) + } + out.WriteString(("\tdefault:\n\t\treturn false, nil\n\t}\n}\n\n")) + for _, k := range cfuncs.keys() { + out.WriteString(cfuncs[k]) + } + + x, err := format.Source(out.Bytes()) + if err != nil { + os.WriteFile("/tmp/a.go", out.Bytes(), 0644) + log.Fatalf("tsclient.go: %v", err) + } + + if err := os.WriteFile(filepath.Join(*outputdir, "tsclient.go"), x, 0644); err != nil { + log.Fatalf("%v writing tsclient.go", err) + } +} + +func writeserver() { + out := new(bytes.Buffer) + fmt.Fprintln(out, fileHdr) + out.WriteString( + `import ( + "context" + + "golang.org/x/tools/internal/jsonrpc2" +) +`) + out.WriteString("type Server interface {\n") + for _, k := range sdecls.keys() { + out.WriteString(sdecls[k]) + } + out.WriteString(` +} + +func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { + defer recoverHandlerPanic(r.Method()) + switch r.Method() { +`) + for _, k := range scases.keys() { + out.WriteString(scases[k]) + } + out.WriteString(("\tdefault:\n\t\treturn false, nil\n\t}\n}\n\n")) + for _, k := range sfuncs.keys() { + out.WriteString(sfuncs[k]) + } + x, err := format.Source(out.Bytes()) + if err != nil { + os.WriteFile("/tmp/a.go", out.Bytes(), 0644) + log.Fatalf("tsserver.go: %v", err) + } + + if err := os.WriteFile(filepath.Join(*outputdir, "tsserver.go"), x, 0644); err != nil { + log.Fatalf("%v writing tsserver.go", err) + } +} + +func writeprotocol() { + out := new(bytes.Buffer) + fmt.Fprintln(out, fileHdr) + out.WriteString("import \"encoding/json\"\n\n") + + // The followiing are unneeded, but make the new code a superset of the old + hack := func(newer, existing string) { + if _, ok := types[existing]; !ok { + log.Fatalf("types[%q] not found", existing) + } + types[newer] = strings.Replace(types[existing], existing, newer, 1) + } + hack("ConfigurationParams", "ParamConfiguration") + hack("InitializeParams", "ParamInitialize") + hack("PreviousResultId", "PreviousResultID") + hack("WorkspaceFoldersServerCapabilities", "WorkspaceFolders5Gn") + hack("_InitializeParams", "XInitializeParams") + + for _, k := range types.keys() { + if k == "WatchKind" { + types[k] = "type WatchKind = uint32" // strict gopls compatibility needs the '=' + } + out.WriteString(types[k]) + } + + out.WriteString("\nconst (\n") + for _, k := range consts.keys() { + out.WriteString(consts[k]) + } + out.WriteString(")\n\n") + x, err := format.Source(out.Bytes()) + if err != nil { + os.WriteFile("/tmp/a.go", out.Bytes(), 0644) + log.Fatalf("tsprotocol.go: %v", err) + } + if err := os.WriteFile(filepath.Join(*outputdir, "tsprotocol.go"), x, 0644); err != nil { + log.Fatalf("%v writing tsprotocol.go", err) + } +} + +func writejsons() { + out := new(bytes.Buffer) + fmt.Fprintln(out, fileHdr) + out.WriteString("import \"encoding/json\"\n\n") + out.WriteString("import \"fmt\"\n") + + out.WriteString(` +// UnmarshalError indicates that a JSON value did not conform to +// one of the expected cases of an LSP union type. +type UnmarshalError struct { + msg string +} + +func (e UnmarshalError) Error() string { + return e.msg +} +`) + + for _, k := range jsons.keys() { + out.WriteString(jsons[k]) + } + x, err := format.Source(out.Bytes()) + if err != nil { + os.WriteFile("/tmp/a.go", out.Bytes(), 0644) + log.Fatalf("tsjson.go: %v", err) + } + if err := os.WriteFile(filepath.Join(*outputdir, "tsjson.go"), x, 0644); err != nil { + log.Fatalf("%v writing tsjson.go", err) + } +} + +// create the common file header for the output files +func fileHeader(model Model) string { + fname := filepath.Join(*repodir, ".git", "HEAD") + buf, err := os.ReadFile(fname) + if err != nil { + log.Fatal(err) + } + buf = bytes.TrimSpace(buf) + var githash string + if len(buf) == 40 { + githash = string(buf[:40]) + } else if bytes.HasPrefix(buf, []byte("ref: ")) { + fname = filepath.Join(*repodir, ".git", string(buf[5:])) + buf, err = os.ReadFile(fname) + if err != nil { + log.Fatal(err) + } + githash = string(buf[:40]) + } else { + log.Fatalf("githash cannot be recovered from %s", fname) + } + + format := `// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated for LSP. DO NOT EDIT. + +package protocol + +// Code generated from %[1]s at ref %[2]s (hash %[3]s). +// %[4]s/blob/%[2]s/%[1]s +// LSP metaData.version = %[5]s. + +` + return fmt.Sprintf(format, + "protocol/metaModel.json", // 1 + lspGitRef, // 2 + githash, // 3 + vscodeRepo, // 4 + model.Version.Version) // 5 +} + +func parse(fname string) Model { + buf, err := os.ReadFile(fname) + if err != nil { + log.Fatal(err) + } + buf = addLineNumbers(buf) + var model Model + if err := json.Unmarshal(buf, &model); err != nil { + log.Fatal(err) + } + return model +} + +// Type.Value has to be treated specially for literals and maps +func (t *Type) UnmarshalJSON(data []byte) error { + // First unmarshal only the unambiguous fields. + var x struct { + Kind string `json:"kind"` + Items []*Type `json:"items"` + Element *Type `json:"element"` + Name string `json:"name"` + Key *Type `json:"key"` + Value any `json:"value"` + Line int `json:"line"` + } + if err := json.Unmarshal(data, &x); err != nil { + return err + } + *t = Type{ + Kind: x.Kind, + Items: x.Items, + Element: x.Element, + Name: x.Name, + Value: x.Value, + Line: x.Line, + } + + // Then unmarshal the 'value' field based on the kind. + // This depends on Unmarshal ignoring fields it doesn't know about. + switch x.Kind { + case "map": + var x struct { + Key *Type `json:"key"` + Value *Type `json:"value"` + } + if err := json.Unmarshal(data, &x); err != nil { + return fmt.Errorf("Type.kind=map: %v", err) + } + t.Key = x.Key + t.Value = x.Value + + case "literal": + var z struct { + Value ParseLiteral `json:"value"` + } + + if err := json.Unmarshal(data, &z); err != nil { + return fmt.Errorf("Type.kind=literal: %v", err) + } + t.Value = z.Value + + case "base", "reference", "array", "and", "or", "tuple", + "stringLiteral": + // no-op. never seen integerLiteral or booleanLiteral. + + default: + return fmt.Errorf("cannot decode Type.kind %q: %s", x.Kind, data) + } + return nil +} + +// which table entries were not used +func checkTables() { + for k := range disambiguate { + if !usedDisambiguate[k] { + log.Printf("disambiguate[%v] unused", k) + } + } + for k := range renameProp { + if !usedRenameProp[k] { + log.Printf("renameProp {%q, %q} unused", k[0], k[1]) + } + } + for k := range goplsStar { + if !usedGoplsStar[k] { + log.Printf("goplsStar {%q, %q} unused", k[0], k[1]) + } + } + for k := range goplsType { + if !usedGoplsType[k] { + log.Printf("unused goplsType[%q]->%s", k, goplsType[k]) + } + } +} diff --git a/gopls/internal/protocol/generate/main_test.go b/gopls/internal/protocol/generate/main_test.go new file mode 100644 index 00000000000..73c22048a80 --- /dev/null +++ b/gopls/internal/protocol/generate/main_test.go @@ -0,0 +1,116 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "encoding/json" + "fmt" + "log" + "os" + "testing" +) + +// These tests require the result of +//"git clone https://github.com/microsoft/vscode-languageserver-node" in the HOME directory + +// this is not a test, but a way to get code coverage, +// (in vscode, just run the test with "go.coverOnSingleTest": true) +func TestAll(t *testing.T) { + t.Skip("needs vscode-languageserver-node repository") + *lineNumbers = true + log.SetFlags(log.Lshortfile) + main() +} + +// check that the parsed file includes all the information +// from the json file. This test will fail if the spec +// introduces new fields. (one can test this test by +// commenting out the version field in Model.) +func TestParseContents(t *testing.T) { + t.Skip("needs vscode-languageserver-node repository") + log.SetFlags(log.Lshortfile) + + // compute our parse of the specification + dir := os.Getenv("HOME") + "/vscode-languageserver-node" + fname := dir + "/protocol/metaModel.json" + v := parse(fname) + out, err := json.Marshal(v) + if err != nil { + t.Fatal(err) + } + var our interface{} + if err := json.Unmarshal(out, &our); err != nil { + t.Fatal(err) + } + + // process the json file + buf, err := os.ReadFile(fname) + if err != nil { + t.Fatalf("could not read metaModel.json: %v", err) + } + var raw interface{} + if err := json.Unmarshal(buf, &raw); err != nil { + t.Fatal(err) + } + + // convert to strings showing the fields + them := flatten(raw) + us := flatten(our) + + // everything in them should be in us + lesser := make(sortedMap[bool]) + for _, s := range them { + lesser[s] = true + } + greater := make(sortedMap[bool]) // set of fields we have + for _, s := range us { + greater[s] = true + } + for _, k := range lesser.keys() { // set if fields they have + if !greater[k] { + t.Errorf("missing %s", k) + } + } +} + +// flatten(nil) = "nil" +// flatten(v string) = fmt.Sprintf("%q", v) +// flatten(v float64)= fmt.Sprintf("%g", v) +// flatten(v bool) = fmt.Sprintf("%v", v) +// flatten(v []any) = []string{"[0]"flatten(v[0]), "[1]"flatten(v[1]), ...} +// flatten(v map[string]any) = {"key1": flatten(v["key1"]), "key2": flatten(v["key2"]), ...} +func flatten(x any) []string { + switch v := x.(type) { + case nil: + return []string{"nil"} + case string: + return []string{fmt.Sprintf("%q", v)} + case float64: + return []string{fmt.Sprintf("%g", v)} + case bool: + return []string{fmt.Sprintf("%v", v)} + case []any: + var ans []string + for i, x := range v { + idx := fmt.Sprintf("[%.3d]", i) + for _, s := range flatten(x) { + ans = append(ans, idx+s) + } + } + return ans + case map[string]any: + var ans []string + for k, x := range v { + idx := fmt.Sprintf("%q:", k) + for _, s := range flatten(x) { + ans = append(ans, idx+s) + } + } + return ans + default: + log.Fatalf("unexpected type %T", x) + return nil + } +} diff --git a/gopls/internal/lsp/protocol/generate/output.go b/gopls/internal/protocol/generate/output.go similarity index 96% rename from gopls/internal/lsp/protocol/generate/output.go rename to gopls/internal/protocol/generate/output.go index df7cc9c8257..47608626b82 100644 --- a/gopls/internal/lsp/protocol/generate/output.go +++ b/gopls/internal/protocol/generate/output.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.19 -// +build go1.19 - package main import ( @@ -101,7 +98,7 @@ func genCase(method string, param, result *Type, dir string) { nm = "ParamConfiguration" // gopls compatibility } fmt.Fprintf(out, "\t\tvar params %s\n", nm) - fmt.Fprintf(out, "\t\tif err := json.Unmarshal(r.Params(), ¶ms); err != nil {\n") + fmt.Fprintf(out, "\t\tif err := UnmarshalJSON(r.Params(), ¶ms); err != nil {\n") fmt.Fprintf(out, "\t\t\treturn true, sendParseError(ctx, reply, err)\n\t\t}\n") p = ", ¶ms" } @@ -115,6 +112,7 @@ func genCase(method string, param, result *Type, dir string) { fmt.Fprintf(out, "\t\terr := %%s.%s(ctx%s)\n", fname, p) out.WriteString("\t\treturn true, reply(ctx, nil, err)\n") } + out.WriteString("\n") msg := out.String() switch dir { case "clientToServer": @@ -238,13 +236,12 @@ func genStructs(model Model) { out.WriteString("}\n") types[nm] = out.String() } - // base types - types["DocumentURI"] = "type DocumentURI string\n" - types["URI"] = "type URI = string\n" + // base types + // (For URI and DocumentURI, see ../uri.go.) types["LSPAny"] = "type LSPAny = interface{}\n" // A special case, the only previously existing Or type - types["DocumentDiagnosticReport"] = "type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) line 13909\n" + types["DocumentDiagnosticReport"] = "type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) \n" } @@ -275,7 +272,7 @@ func genAliases(model Model) { continue // renamed the type, e.g., "DocumentDiagnosticReport", an or-type to "string" } tp := goplsName(ta.Type) - fmt.Fprintf(out, "type %s = %s // (alias) line %d\n", nm, tp, ta.Line) + fmt.Fprintf(out, "type %s = %s // (alias)\n", nm, tp) types[nm] = out.String() } } @@ -370,7 +367,6 @@ func genMarshal() { } sort.Strings(names) var buf bytes.Buffer - fmt.Fprintf(&buf, "// from line %d\n", nt.line) fmt.Fprintf(&buf, "func (t %s) MarshalJSON() ([]byte, error) {\n", nm) buf.WriteString("\tswitch x := t.Value.(type){\n") for _, nmx := range names { diff --git a/gopls/internal/protocol/generate/tables.go b/gopls/internal/protocol/generate/tables.go new file mode 100644 index 00000000000..632242cae3a --- /dev/null +++ b/gopls/internal/protocol/generate/tables.go @@ -0,0 +1,275 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import "log" + +// prop combines the name of a property with the name of the structure it is in. +type prop [2]string + +const ( + nothing = iota + wantStar + wantOpt + wantOptStar +) + +// goplsStar records the optionality of each field in the protocol. +// The comments are vague hints as to why removing the line is not trivial. +// A.B.C.D means that one of B or C would change to a pointer +// so a test or initialization would be needed +var goplsStar = map[prop]int{ + {"AnnotatedTextEdit", "annotationId"}: wantOptStar, + {"ClientCapabilities", "textDocument"}: wantOpt, // A.B.C.D at fake/editor.go:255 + {"ClientCapabilities", "window"}: wantOpt, // test failures + {"ClientCapabilities", "workspace"}: wantOpt, // test failures + {"CodeAction", "kind"}: wantOpt, // A.B.C.D + + {"CodeActionClientCapabilities", "codeActionLiteralSupport"}: wantOpt, // test failures + + {"CompletionClientCapabilities", "completionItem"}: wantOpt, // A.B.C.D + {"CompletionClientCapabilities", "insertTextMode"}: wantOpt, // A.B.C.D + {"CompletionItem", "kind"}: wantOpt, // need temporary variables + {"CompletionParams", "context"}: wantOpt, // needs nil checks + + {"Diagnostic", "severity"}: wantOpt, // nil checks or more careful thought + {"DidSaveTextDocumentParams", "text"}: wantOptStar, // capabilities_test.go:112 logic + {"DocumentHighlight", "kind"}: wantOpt, // need temporary variables + {"Hover", "range"}: wantOpt, // complex expressions + {"InlayHint", "kind"}: wantOpt, // temporary variables + + {"TextDocumentClientCapabilities", "codeAction"}: wantOpt, // A.B.C.D + {"TextDocumentClientCapabilities", "completion"}: wantOpt, // A.B.C.D + {"TextDocumentClientCapabilities", "documentSymbol"}: wantOpt, // A.B.C.D + {"TextDocumentClientCapabilities", "publishDiagnostics"}: wantOpt, //A.B.C.D + {"TextDocumentClientCapabilities", "semanticTokens"}: wantOpt, // A.B.C.D + {"TextDocumentContentChangePartial", "range"}: wantOptStar, // == nil test + {"TextDocumentSyncOptions", "change"}: wantOpt, // &constant + {"WorkDoneProgressParams", "workDoneToken"}: wantOpt, // test failures + {"WorkspaceClientCapabilities", "didChangeConfiguration"}: wantOpt, // A.B.C.D + {"WorkspaceClientCapabilities", "didChangeWatchedFiles"}: wantOpt, // A.B.C.D +} + +// keep track of which entries in goplsStar are used +var usedGoplsStar = make(map[prop]bool) + +// For gopls compatibility, use a different, typically more restrictive, type for some fields. +var renameProp = map[prop]string{ + {"CancelParams", "id"}: "interface{}", + {"Command", "arguments"}: "[]json.RawMessage", + {"CompletionItem", "textEdit"}: "TextEdit", + {"CodeAction", "data"}: "json.RawMessage", // delay unmarshalling commands + {"Diagnostic", "code"}: "interface{}", + {"Diagnostic", "data"}: "json.RawMessage", // delay unmarshalling quickfixes + + {"DocumentDiagnosticReportPartialResult", "relatedDocuments"}: "map[DocumentURI]interface{}", + + {"ExecuteCommandParams", "arguments"}: "[]json.RawMessage", + {"FoldingRange", "kind"}: "string", + {"Hover", "contents"}: "MarkupContent", + {"InlayHint", "label"}: "[]InlayHintLabelPart", + + {"RelatedFullDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]interface{}", + {"RelatedUnchangedDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]interface{}", + + // PJW: this one is tricky. + {"ServerCapabilities", "codeActionProvider"}: "interface{}", + + {"ServerCapabilities", "inlayHintProvider"}: "interface{}", + // slightly tricky + {"ServerCapabilities", "renameProvider"}: "interface{}", + // slightly tricky + {"ServerCapabilities", "semanticTokensProvider"}: "interface{}", + // slightly tricky + {"ServerCapabilities", "textDocumentSync"}: "interface{}", + {"TextDocumentSyncOptions", "save"}: "SaveOptions", + {"WorkspaceEdit", "documentChanges"}: "[]DocumentChanges", +} + +// which entries of renameProp were used +var usedRenameProp = make(map[prop]bool) + +type adjust struct { + prefix, suffix string +} + +// disambiguate specifies prefixes or suffixes to add to all values of +// some enum types to avoid name conflicts +var disambiguate = map[string]adjust{ + "CodeActionTriggerKind": {"CodeAction", ""}, + "CompletionItemKind": {"", "Completion"}, + "CompletionItemTag": {"Compl", ""}, + "DiagnosticSeverity": {"Severity", ""}, + "DocumentDiagnosticReportKind": {"Diagnostic", ""}, + "FileOperationPatternKind": {"", "Pattern"}, + "InlineCompletionTriggerKind": {"Inline", ""}, + "InsertTextFormat": {"", "TextFormat"}, + "LanguageKind": {"Lang", ""}, + "SemanticTokenModifiers": {"Mod", ""}, + "SemanticTokenTypes": {"", "Type"}, + "SignatureHelpTriggerKind": {"Sig", ""}, + "SymbolTag": {"", "Symbol"}, + "WatchKind": {"Watch", ""}, +} + +// which entries of disambiguate got used +var usedDisambiguate = make(map[string]bool) + +// for gopls compatibility, replace generated type names with existing ones +var goplsType = map[string]string{ + "And_RegOpt_textDocument_colorPresentation": "WorkDoneProgressOptionsAndTextDocumentRegistrationOptions", + "ConfigurationParams": "ParamConfiguration", + "DocumentDiagnosticParams": "string", + "DocumentDiagnosticReport": "string", + "DocumentUri": "DocumentURI", + "InitializeParams": "ParamInitialize", + "LSPAny": "interface{}", + + "Lit_SemanticTokensOptions_range_Item1": "PRangeESemanticTokensOptions", + + "Or_Declaration": "[]Location", + "Or_DidChangeConfigurationRegistrationOptions_section": "OrPSection_workspace_didChangeConfiguration", + "Or_InlayHintLabelPart_tooltip": "OrPTooltipPLabel", + "Or_InlayHint_tooltip": "OrPTooltip_textDocument_inlayHint", + "Or_LSPAny": "interface{}", + + "Or_ParameterInformation_documentation": "string", + "Or_ParameterInformation_label": "string", + "Or_PrepareRenameResult": "PrepareRenamePlaceholder", + "Or_ProgressToken": "interface{}", + "Or_Result_textDocument_completion": "CompletionList", + "Or_Result_textDocument_declaration": "Or_textDocument_declaration", + "Or_Result_textDocument_definition": "[]Location", + "Or_Result_textDocument_documentSymbol": "[]interface{}", + "Or_Result_textDocument_implementation": "[]Location", + "Or_Result_textDocument_semanticTokens_full_delta": "interface{}", + "Or_Result_textDocument_typeDefinition": "[]Location", + "Or_Result_workspace_symbol": "[]SymbolInformation", + "Or_TextDocumentContentChangeEvent": "TextDocumentContentChangePartial", + "Or_RelativePattern_baseUri": "DocumentURI", + + "Or_WorkspaceFoldersServerCapabilities_changeNotifications": "string", + "Or_WorkspaceSymbol_location": "OrPLocation_workspace_symbol", + + "Tuple_ParameterInformation_label_Item1": "UIntCommaUInt", + "WorkspaceFoldersServerCapabilities": "WorkspaceFolders5Gn", + "[]LSPAny": "[]interface{}", + + "[]Or_Result_textDocument_codeAction_Item0_Elem": "[]CodeAction", + "[]PreviousResultId": "[]PreviousResultID", + "[]uinteger": "[]uint32", + "boolean": "bool", + "decimal": "float64", + "integer": "int32", + "map[DocumentUri][]TextEdit": "map[DocumentURI][]TextEdit", + "uinteger": "uint32", +} + +var usedGoplsType = make(map[string]bool) + +// methodNames is a map from the method to the name of the function that handles it +var methodNames = map[string]string{ + "$/cancelRequest": "CancelRequest", + "$/logTrace": "LogTrace", + "$/progress": "Progress", + "$/setTrace": "SetTrace", + "callHierarchy/incomingCalls": "IncomingCalls", + "callHierarchy/outgoingCalls": "OutgoingCalls", + "client/registerCapability": "RegisterCapability", + "client/unregisterCapability": "UnregisterCapability", + "codeAction/resolve": "ResolveCodeAction", + "codeLens/resolve": "ResolveCodeLens", + "completionItem/resolve": "ResolveCompletionItem", + "documentLink/resolve": "ResolveDocumentLink", + "exit": "Exit", + "initialize": "Initialize", + "initialized": "Initialized", + "inlayHint/resolve": "Resolve", + "notebookDocument/didChange": "DidChangeNotebookDocument", + "notebookDocument/didClose": "DidCloseNotebookDocument", + "notebookDocument/didOpen": "DidOpenNotebookDocument", + "notebookDocument/didSave": "DidSaveNotebookDocument", + "shutdown": "Shutdown", + "telemetry/event": "Event", + "textDocument/codeAction": "CodeAction", + "textDocument/codeLens": "CodeLens", + "textDocument/colorPresentation": "ColorPresentation", + "textDocument/completion": "Completion", + "textDocument/declaration": "Declaration", + "textDocument/definition": "Definition", + "textDocument/diagnostic": "Diagnostic", + "textDocument/didChange": "DidChange", + "textDocument/didClose": "DidClose", + "textDocument/didOpen": "DidOpen", + "textDocument/didSave": "DidSave", + "textDocument/documentColor": "DocumentColor", + "textDocument/documentHighlight": "DocumentHighlight", + "textDocument/documentLink": "DocumentLink", + "textDocument/documentSymbol": "DocumentSymbol", + "textDocument/foldingRange": "FoldingRange", + "textDocument/formatting": "Formatting", + "textDocument/hover": "Hover", + "textDocument/implementation": "Implementation", + "textDocument/inlayHint": "InlayHint", + "textDocument/inlineCompletion": "InlineCompletion", + "textDocument/inlineValue": "InlineValue", + "textDocument/linkedEditingRange": "LinkedEditingRange", + "textDocument/moniker": "Moniker", + "textDocument/onTypeFormatting": "OnTypeFormatting", + "textDocument/prepareCallHierarchy": "PrepareCallHierarchy", + "textDocument/prepareRename": "PrepareRename", + "textDocument/prepareTypeHierarchy": "PrepareTypeHierarchy", + "textDocument/publishDiagnostics": "PublishDiagnostics", + "textDocument/rangeFormatting": "RangeFormatting", + "textDocument/rangesFormatting": "RangesFormatting", + "textDocument/references": "References", + "textDocument/rename": "Rename", + "textDocument/selectionRange": "SelectionRange", + "textDocument/semanticTokens/full": "SemanticTokensFull", + "textDocument/semanticTokens/full/delta": "SemanticTokensFullDelta", + "textDocument/semanticTokens/range": "SemanticTokensRange", + "textDocument/signatureHelp": "SignatureHelp", + "textDocument/typeDefinition": "TypeDefinition", + "textDocument/willSave": "WillSave", + "textDocument/willSaveWaitUntil": "WillSaveWaitUntil", + "typeHierarchy/subtypes": "Subtypes", + "typeHierarchy/supertypes": "Supertypes", + "window/logMessage": "LogMessage", + "window/showDocument": "ShowDocument", + "window/showMessage": "ShowMessage", + "window/showMessageRequest": "ShowMessageRequest", + "window/workDoneProgress/cancel": "WorkDoneProgressCancel", + "window/workDoneProgress/create": "WorkDoneProgressCreate", + "workspace/applyEdit": "ApplyEdit", + "workspace/codeLens/refresh": "CodeLensRefresh", + "workspace/configuration": "Configuration", + "workspace/diagnostic": "DiagnosticWorkspace", + "workspace/diagnostic/refresh": "DiagnosticRefresh", + "workspace/didChangeConfiguration": "DidChangeConfiguration", + "workspace/didChangeWatchedFiles": "DidChangeWatchedFiles", + "workspace/didChangeWorkspaceFolders": "DidChangeWorkspaceFolders", + "workspace/didCreateFiles": "DidCreateFiles", + "workspace/didDeleteFiles": "DidDeleteFiles", + "workspace/didRenameFiles": "DidRenameFiles", + "workspace/executeCommand": "ExecuteCommand", + "workspace/foldingRange/refresh": "FoldingRangeRefresh", + "workspace/inlayHint/refresh": "InlayHintRefresh", + "workspace/inlineValue/refresh": "InlineValueRefresh", + "workspace/semanticTokens/refresh": "SemanticTokensRefresh", + "workspace/symbol": "Symbol", + "workspace/willCreateFiles": "WillCreateFiles", + "workspace/willDeleteFiles": "WillDeleteFiles", + "workspace/willRenameFiles": "WillRenameFiles", + "workspace/workspaceFolders": "WorkspaceFolders", + "workspaceSymbol/resolve": "ResolveWorkspaceSymbol", +} + +func methodName(method string) string { + ans := methodNames[method] + if ans == "" { + log.Fatalf("unknown method %q", method) + } + return ans +} diff --git a/gopls/internal/lsp/protocol/generate/typenames.go b/gopls/internal/protocol/generate/typenames.go similarity index 99% rename from gopls/internal/lsp/protocol/generate/typenames.go rename to gopls/internal/protocol/generate/typenames.go index 8bacdd2a1cf..83f25a010a0 100644 --- a/gopls/internal/lsp/protocol/generate/typenames.go +++ b/gopls/internal/protocol/generate/typenames.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.19 -// +build go1.19 - package main import ( diff --git a/gopls/internal/protocol/generate/types.go b/gopls/internal/protocol/generate/types.go new file mode 100644 index 00000000000..0537748eb5b --- /dev/null +++ b/gopls/internal/protocol/generate/types.go @@ -0,0 +1,167 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "fmt" + "sort" +) + +// Model contains the parsed version of the spec +type Model struct { + Version Metadata `json:"metaData"` + Requests []*Request `json:"requests"` + Notifications []*Notification `json:"notifications"` + Structures []*Structure `json:"structures"` + Enumerations []*Enumeration `json:"enumerations"` + TypeAliases []*TypeAlias `json:"typeAliases"` + Line int `json:"line"` +} + +// Metadata is information about the version of the spec +type Metadata struct { + Version string `json:"version"` + Line int `json:"line"` +} + +// A Request is the parsed version of an LSP request +type Request struct { + Documentation string `json:"documentation"` + ErrorData *Type `json:"errorData"` + Direction string `json:"messageDirection"` + Method string `json:"method"` + Params *Type `json:"params"` + PartialResult *Type `json:"partialResult"` + Proposed bool `json:"proposed"` + RegistrationMethod string `json:"registrationMethod"` + RegistrationOptions *Type `json:"registrationOptions"` + Result *Type `json:"result"` + Since string `json:"since"` + Line int `json:"line"` +} + +// A Notificatin is the parsed version of an LSP notification +type Notification struct { + Documentation string `json:"documentation"` + Direction string `json:"messageDirection"` + Method string `json:"method"` + Params *Type `json:"params"` + Proposed bool `json:"proposed"` + RegistrationMethod string `json:"registrationMethod"` + RegistrationOptions *Type `json:"registrationOptions"` + Since string `json:"since"` + Line int `json:"line"` +} + +// A Structure is the parsed version of an LSP structure from the spec +type Structure struct { + Documentation string `json:"documentation"` + Extends []*Type `json:"extends"` + Mixins []*Type `json:"mixins"` + Name string `json:"name"` + Properties []NameType `json:"properties"` + Proposed bool `json:"proposed"` + Since string `json:"since"` + Line int `json:"line"` +} + +// An enumeration is the parsed version of an LSP enumeration from the spec +type Enumeration struct { + Documentation string `json:"documentation"` + Name string `json:"name"` + Proposed bool `json:"proposed"` + Since string `json:"since"` + SupportsCustomValues bool `json:"supportsCustomValues"` + Type *Type `json:"type"` + Values []NameValue `json:"values"` + Line int `json:"line"` +} + +// A TypeAlias is the parsed version of an LSP type alias from the spec +type TypeAlias struct { + Documentation string `json:"documentation"` + Deprecated string `json:"deprecated"` + Name string `json:"name"` + Proposed bool `json:"proposed"` + Since string `json:"since"` + Type *Type `json:"type"` + Line int `json:"line"` +} + +// A NameValue describes an enumeration constant +type NameValue struct { + Documentation string `json:"documentation"` + Name string `json:"name"` + Proposed bool `json:"proposed"` + Since string `json:"since"` + Value any `json:"value"` // number or string + Line int `json:"line"` +} + +// A Type is the parsed version of an LSP type from the spec, +// or a Type the code constructs +type Type struct { + Kind string `json:"kind"` // -- which kind goes with which field -- + Items []*Type `json:"items"` // "and", "or", "tuple" + Element *Type `json:"element"` // "array" + Name string `json:"name"` // "base", "reference" + Key *Type `json:"key"` // "map" + Value any `json:"value"` // "map", "stringLiteral", "literal" + Line int `json:"line"` // JSON source line +} + +// ParsedLiteral is Type.Value when Type.Kind is "literal" +type ParseLiteral struct { + Properties `json:"properties"` +} + +// A NameType represents the name and type of a structure element +type NameType struct { + Name string `json:"name"` + Type *Type `json:"type"` + Optional bool `json:"optional"` + Documentation string `json:"documentation"` + Deprecated string `json:"deprecated"` + Since string `json:"since"` + Proposed bool `json:"proposed"` + Line int `json:"line"` +} + +// Properties are the collection of structure fields +type Properties []NameType + +// addLineNumbers adds a "line" field to each object in the JSON. +func addLineNumbers(buf []byte) []byte { + var ans []byte + // In the specification .json file, the delimiter '{' is + // always followed by a newline. There are other {s embedded in strings. + // json.Token does not return \n, or :, or , so using it would + // require parsing the json to reconstruct the missing information. + for linecnt, i := 1, 0; i < len(buf); i++ { + ans = append(ans, buf[i]) + switch buf[i] { + case '{': + if buf[i+1] == '\n' { + ans = append(ans, fmt.Sprintf(`"line": %d, `, linecnt)...) + // warning: this would fail if the spec file had + // `"value": {\n}`, but it does not, as comma is a separator. + } + case '\n': + linecnt++ + } + } + return ans +} + +type sortedMap[T any] map[string]T + +func (s sortedMap[T]) keys() []string { + var keys []string + for k := range s { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} diff --git a/gopls/test/json_test.go b/gopls/internal/protocol/json_test.go similarity index 92% rename from gopls/test/json_test.go rename to gopls/internal/protocol/json_test.go index 7a91a953ae5..9aac110fa3b 100644 --- a/gopls/test/json_test.go +++ b/gopls/internal/protocol/json_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package gopls_test +package protocol_test import ( "encoding/json" @@ -12,7 +12,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) // verify that type errors in Initialize lsp messages don't cause @@ -30,7 +30,7 @@ import ( // a recent Initialize message taken from a log (at some point // some field incompatibly changed from bool to int32) -const input = `{"processId":46408,"clientInfo":{"name":"Visual Studio Code - Insiders","version":"1.76.0-insider"},"locale":"en-us","rootPath":"/Users/pjw/hakim","rootUri":"file:///Users/pjw/hakim","capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional","normalizesLineEndings":true,"changeAnnotationSupport":{"groupsOnLabel":true}},"configuration":true,"didChangeWatchedFiles":{"dynamicRegistration":true,"relativePatternSupport":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]},"resolveSupport":{"properties":["location.range"]}},"codeLens":{"refreshSupport":true},"executeCommand":{"dynamicRegistration":true},"didChangeConfiguration":{"dynamicRegistration":true},"workspaceFolders":true,"semanticTokens":{"refreshSupport":true},"fileOperations":{"dynamicRegistration":true,"didCreate":true,"didRename":true,"didDelete":true,"willCreate":true,"willRename":true,"willDelete":true},"inlineValue":{"refreshSupport":true},"inlayHint":{"refreshSupport":true},"diagnostics":{"refreshSupport":true}},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"codeDescriptionSupport":true,"dataSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true,"resolveSupport":{"properties":["documentation","detail","additionalTextEdits"]},"insertTextModeSupport":{"valueSet":[1,2]},"labelDetailsSupport":true},"insertTextMode":2,"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]},"completionList":{"itemDefaults":["commitCharacters","editRange","insertTextFormat","insertTextMode"]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true},"activeParameterSupport":true},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]},"labelSupport":true},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"disabledSupport":true,"dataSupport":true,"resolveSupport":{"properties":["edit"]},"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}},"honorsChangeAnnotations":false},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true,"prepareSupportDefaultBehavior":1,"honorsChangeAnnotations":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true,"foldingRangeKind":{"valueSet":["comment","imports","region"]},"foldingRange":{"collapsedText":false}},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"callHierarchy":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["namespace","type","class","enum","interface","struct","typeParameter","parameter","variable","property","enumMember","event","function","method","macro","keyword","modifier","comment","string","number","regexp","operator","decorator"],"tokenModifiers":["declaration","definition","readonly","static","deprecated","abstract","async","modification","documentation","defaultLibrary"],"formats":["relative"],"requests":{"range":true,"full":{"delta":true}},"multilineTokenSupport":false,"overlappingTokenSupport":false,"serverCancelSupport":true,"augmentsSyntaxTokens":true},"linkedEditingRange":{"dynamicRegistration":true},"typeHierarchy":{"dynamicRegistration":true},"inlineValue":{"dynamicRegistration":true},"inlayHint":{"dynamicRegistration":true,"resolveSupport":{"properties":["tooltip","textEdits","label.tooltip","label.location","label.command"]}},"diagnostic":{"dynamicRegistration":true,"relatedDocumentSupport":false}},"window":{"showMessage":{"messageActionItem":{"additionalPropertiesSupport":true}},"showDocument":{"support":true},"workDoneProgress":true},"general":{"staleRequestSupport":{"cancel":true,"retryOnContentModified":["textDocument/semanticTokens/full","textDocument/semanticTokens/range","textDocument/semanticTokens/full/delta"]},"regularExpressions":{"engine":"ECMAScript","version":"ES2020"},"markdown":{"parser":"marked","version":"1.1.0"},"positionEncodings":["utf-16"]},"notebookDocument":{"synchronization":{"dynamicRegistration":true,"executionSummarySupport":true}}},"initializationOptions":{"usePlaceholders":true,"completionDocumentation":true,"verboseOutput":false,"build.directoryFilters":["-foof","-internal/lsp/protocol/typescript"],"codelenses":{"reference":true,"gc_details":true},"analyses":{"fillstruct":true,"staticcheck":true,"unusedparams":false,"composites":false},"semanticTokens":true,"noSemanticString":true,"noSemanticNumber":true,"templateExtensions":["tmpl","gotmpl"],"ui.completion.matcher":"Fuzzy","ui.inlayhint.hints":{"assignVariableTypes":false,"compositeLiteralFields":false,"compositeLiteralTypes":false,"constantValues":false,"functionTypeParameters":false,"parameterNames":false,"rangeVariableTypes":false},"ui.vulncheck":"Off","allExperiments":true},"trace":"off","workspaceFolders":[{"uri":"file:///Users/pjw/hakim","name":"hakim"}]}` +const input = `{"processId":46408,"clientInfo":{"name":"Visual Studio Code - Insiders","version":"1.76.0-insider"},"locale":"en-us","rootPath":"/Users/pjw/hakim","rootUri":"file:///Users/pjw/hakim","capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional","normalizesLineEndings":true,"changeAnnotationSupport":{"groupsOnLabel":true}},"configuration":true,"didChangeWatchedFiles":{"dynamicRegistration":true,"relativePatternSupport":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]},"resolveSupport":{"properties":["location.range"]}},"codeLens":{"refreshSupport":true},"executeCommand":{"dynamicRegistration":true},"didChangeConfiguration":{"dynamicRegistration":true},"workspaceFolders":true,"semanticTokens":{"refreshSupport":true},"fileOperations":{"dynamicRegistration":true,"didCreate":true,"didRename":true,"didDelete":true,"willCreate":true,"willRename":true,"willDelete":true},"inlineValue":{"refreshSupport":true},"inlayHint":{"refreshSupport":true},"diagnostics":{"refreshSupport":true}},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"codeDescriptionSupport":true,"dataSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true,"resolveSupport":{"properties":["documentation","detail","additionalTextEdits"]},"insertTextModeSupport":{"valueSet":[1,2]},"labelDetailsSupport":true},"insertTextMode":2,"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]},"completionList":{"itemDefaults":["commitCharacters","editRange","insertTextFormat","insertTextMode"]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true},"activeParameterSupport":true},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]},"labelSupport":true},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"disabledSupport":true,"dataSupport":true,"resolveSupport":{"properties":["edit"]},"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}},"honorsChangeAnnotations":false},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true,"prepareSupportDefaultBehavior":1,"honorsChangeAnnotations":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true,"foldingRangeKind":{"valueSet":["comment","imports","region"]},"foldingRange":{"collapsedText":false}},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"callHierarchy":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["namespace","type","class","enum","interface","struct","typeParameter","parameter","variable","property","enumMember","event","function","method","macro","keyword","modifier","comment","string","number","regexp","operator","decorator"],"tokenModifiers":["declaration","definition","readonly","static","deprecated","abstract","async","modification","documentation","defaultLibrary"],"formats":["relative"],"requests":{"range":true,"full":{"delta":true}},"multilineTokenSupport":false,"overlappingTokenSupport":false,"serverCancelSupport":true,"augmentsSyntaxTokens":true},"linkedEditingRange":{"dynamicRegistration":true},"typeHierarchy":{"dynamicRegistration":true},"inlineValue":{"dynamicRegistration":true},"inlayHint":{"dynamicRegistration":true,"resolveSupport":{"properties":["tooltip","textEdits","label.tooltip","label.location","label.command"]}},"diagnostic":{"dynamicRegistration":true,"relatedDocumentSupport":false}},"window":{"showMessage":{"messageActionItem":{"additionalPropertiesSupport":true}},"showDocument":{"support":true},"workDoneProgress":true},"general":{"staleRequestSupport":{"cancel":true,"retryOnContentModified":["textDocument/semanticTokens/full","textDocument/semanticTokens/range","textDocument/semanticTokens/full/delta"]},"regularExpressions":{"engine":"ECMAScript","version":"ES2020"},"markdown":{"parser":"marked","version":"1.1.0"},"positionEncodings":["utf-16"]},"notebookDocument":{"synchronization":{"dynamicRegistration":true,"executionSummarySupport":true}}},"initializationOptions":{"usePlaceholders":true,"completionDocumentation":true,"verboseOutput":false,"build.directoryFilters":["-foof","-internal/protocol/typescript"],"codelenses":{"reference":true,"gc_details":true},"analyses":{"fillstruct":true,"staticcheck":true,"unusedparams":false,"composites":false},"semanticTokens":true,"noSemanticString":true,"noSemanticNumber":true,"templateExtensions":["tmpl","gotmpl"],"ui.completion.matcher":"Fuzzy","ui.inlayhint.hints":{"assignVariableTypes":false,"compositeLiteralFields":false,"compositeLiteralTypes":false,"constantValues":false,"functionTypeParameters":false,"parameterNames":false,"rangeVariableTypes":false},"ui.vulncheck":"Off","allExperiments":true},"trace":"off","workspaceFolders":[{"uri":"file:///Users/pjw/hakim","name":"hakim"}]}` type DiffReporter struct { path cmp.Path diff --git a/gopls/internal/lsp/protocol/log.go b/gopls/internal/protocol/log.go similarity index 100% rename from gopls/internal/lsp/protocol/log.go rename to gopls/internal/protocol/log.go diff --git a/gopls/internal/protocol/mapper.go b/gopls/internal/protocol/mapper.go new file mode 100644 index 00000000000..d1bd957a9e5 --- /dev/null +++ b/gopls/internal/protocol/mapper.go @@ -0,0 +1,434 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protocol + +// This file defines Mapper, which wraps a file content buffer +// ([]byte) and provides efficient conversion between every kind of +// position representation. +// +// gopls uses four main representations of position: +// +// 1. byte offsets, e.g. (start, end int), starting from zero. +// +// 2. go/token notation. Use these types when interacting directly +// with the go/* syntax packages: +// +// token.Pos +// token.FileSet +// token.File +// +// Because File.Offset and File.Pos panic on invalid inputs, +// we do not call them directly and instead use the safetoken package +// for these conversions. This is enforced by a static check. +// +// Beware also that the methods of token.File have two bugs for which +// safetoken contains workarounds: +// - #57490, whereby the parser may create ast.Nodes during error +// recovery whose computed positions are out of bounds (EOF+1). +// - #41029, whereby the wrong line number is returned for the EOF position. +// +// 3. the cmd package. +// +// cmd.point = (line, col8, offset). +// cmd.Span = (uri URI, start, end cmd.point) +// +// Line and column are 1-based. +// Columns are measured in bytes (UTF-8 codes). +// All fields are optional. +// +// These types are useful as intermediate conversions of validated +// ranges (though MappedRange is superior as it is self contained +// and universally convertible). Since their fields are optional +// they are also useful for parsing user-provided positions (e.g. in +// the CLI) before we have access to file contents. +// +// 4. protocol, the LSP RPC message format. +// +// protocol.Position = (Line, Character uint32) +// protocol.Range = (start, end Position) +// protocol.Location = (URI, protocol.Range) +// +// Line and Character are 0-based. +// Characters (columns) are measured in UTF-16 codes. +// +// protocol.Mapper holds the (URI, Content) of a file, enabling +// efficient mapping between byte offsets, cmd ranges, and +// protocol ranges. +// +// protocol.MappedRange holds a protocol.Mapper and valid (start, +// end int) byte offsets, enabling infallible, efficient conversion +// to any other format. + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "sort" + "strings" + "sync" + "unicode/utf8" + + "golang.org/x/tools/gopls/internal/util/safetoken" +) + +// A Mapper wraps the content of a file and provides mapping +// between byte offsets and notations of position such as: +// +// - (line, col8) pairs, where col8 is a 1-based UTF-8 column number +// (bytes), as used by the go/token and cmd packages. +// +// - (line, col16) pairs, where col16 is a 1-based UTF-16 column +// number, as used by the LSP protocol. +// +// All conversion methods are named "FromTo", where From and To are the two types. +// For example, the PointPosition method converts from a Point to a Position. +// +// Mapper does not intrinsically depend on go/token-based +// representations. Use safetoken to map between token.Pos <=> byte +// offsets, or the convenience methods such as PosPosition, +// NodePosition, or NodeRange. +// +// See overview comments at top of this file. +type Mapper struct { + URI DocumentURI + Content []byte + + // Line-number information is requested only for a tiny + // fraction of Mappers, so we compute it lazily. + // Call initLines() before accessing fields below. + linesOnce sync.Once + lineStart []int // byte offset of start of ith line (0-based); last=EOF iff \n-terminated + nonASCII bool + + // TODO(adonovan): adding an extra lineStart entry for EOF + // might simplify every method that accesses it. Try it out. +} + +// NewMapper creates a new mapper for the given URI and content. +func NewMapper(uri DocumentURI, content []byte) *Mapper { + return &Mapper{URI: uri, Content: content} +} + +// initLines populates the lineStart table. +func (m *Mapper) initLines() { + m.linesOnce.Do(func() { + nlines := bytes.Count(m.Content, []byte("\n")) + m.lineStart = make([]int, 1, nlines+1) // initially []int{0} + for offset, b := range m.Content { + if b == '\n' { + m.lineStart = append(m.lineStart, offset+1) + } + if b >= utf8.RuneSelf { + m.nonASCII = true + } + } + }) +} + +// LineCol8Position converts a valid line and UTF-8 column number, +// both 1-based, to a protocol (UTF-16) position. +func (m *Mapper) LineCol8Position(line, col8 int) (Position, error) { + m.initLines() + line0 := line - 1 // 0-based + if !(0 <= line0 && line0 < len(m.lineStart)) { + return Position{}, fmt.Errorf("line number %d out of range (max %d)", line, len(m.lineStart)) + } + + // content[start:end] is the preceding partial line. + start := m.lineStart[line0] + end := start + col8 - 1 + + // Validate column. + if end > len(m.Content) { + return Position{}, fmt.Errorf("column is beyond end of file") + } else if line0+1 < len(m.lineStart) && end >= m.lineStart[line0+1] { + return Position{}, fmt.Errorf("column is beyond end of line") + } + + char := UTF16Len(m.Content[start:end]) + return Position{Line: uint32(line0), Character: uint32(char)}, nil +} + +// -- conversions from byte offsets -- + +// OffsetLocation converts a byte-offset interval to a protocol (UTF-16) location. +func (m *Mapper) OffsetLocation(start, end int) (Location, error) { + rng, err := m.OffsetRange(start, end) + if err != nil { + return Location{}, err + } + return m.RangeLocation(rng), nil +} + +// OffsetRange converts a byte-offset interval to a protocol (UTF-16) range. +func (m *Mapper) OffsetRange(start, end int) (Range, error) { + if start > end { + return Range{}, fmt.Errorf("start offset (%d) > end (%d)", start, end) + } + startPosition, err := m.OffsetPosition(start) + if err != nil { + return Range{}, fmt.Errorf("start: %v", err) + } + endPosition, err := m.OffsetPosition(end) + if err != nil { + return Range{}, fmt.Errorf("end: %v", err) + } + return Range{Start: startPosition, End: endPosition}, nil +} + +// OffsetPosition converts a byte offset to a protocol (UTF-16) position. +func (m *Mapper) OffsetPosition(offset int) (Position, error) { + if !(0 <= offset && offset <= len(m.Content)) { + return Position{}, fmt.Errorf("invalid offset %d (want 0-%d)", offset, len(m.Content)) + } + // No error may be returned after this point, + // even if the offset does not fall at a rune boundary. + // (See panic in MappedRange.Range reachable.) + + line, col16 := m.lineCol16(offset) + return Position{Line: uint32(line), Character: uint32(col16)}, nil +} + +// lineCol16 converts a valid byte offset to line and UTF-16 column numbers, both 0-based. +func (m *Mapper) lineCol16(offset int) (int, int) { + line, start, cr := m.line(offset) + var col16 int + if m.nonASCII { + col16 = UTF16Len(m.Content[start:offset]) + } else { + col16 = offset - start + } + if cr { + col16-- // retreat from \r at line end + } + return line, col16 +} + +// OffsetLineCol8 converts a valid byte offset to line and UTF-8 column numbers, both 1-based. +func (m *Mapper) OffsetLineCol8(offset int) (int, int) { + line, start, cr := m.line(offset) + col8 := offset - start + if cr { + col8-- // retreat from \r at line end + } + return line + 1, col8 + 1 +} + +// line returns: +// - the 0-based index of the line that encloses the (valid) byte offset; +// - the start offset of that line; and +// - whether the offset denotes a carriage return (\r) at line end. +func (m *Mapper) line(offset int) (int, int, bool) { + m.initLines() + // In effect, binary search returns a 1-based result. + line := sort.Search(len(m.lineStart), func(i int) bool { + return offset < m.lineStart[i] + }) + + // Adjustment for line-endings: \r|\n is the same as |\r\n. + var eol int + if line == len(m.lineStart) { + eol = len(m.Content) // EOF + } else { + eol = m.lineStart[line] - 1 + } + cr := offset == eol && offset > 0 && m.Content[offset-1] == '\r' + + line-- // 0-based + + return line, m.lineStart[line], cr +} + +// OffsetMappedRange returns a MappedRange for the given byte offsets. +// A MappedRange can be converted to any other form. +func (m *Mapper) OffsetMappedRange(start, end int) (MappedRange, error) { + if !(0 <= start && start <= end && end <= len(m.Content)) { + return MappedRange{}, fmt.Errorf("invalid offsets (%d, %d) (file %s has size %d)", start, end, m.URI, len(m.Content)) + } + return MappedRange{m, start, end}, nil +} + +// -- conversions from protocol (UTF-16) domain -- + +// RangeOffsets converts a protocol (UTF-16) range to start/end byte offsets. +func (m *Mapper) RangeOffsets(r Range) (int, int, error) { + start, err := m.PositionOffset(r.Start) + if err != nil { + return 0, 0, err + } + end, err := m.PositionOffset(r.End) + if err != nil { + return 0, 0, err + } + return start, end, nil +} + +// PositionOffset converts a protocol (UTF-16) position to a byte offset. +func (m *Mapper) PositionOffset(p Position) (int, error) { + m.initLines() + + // Validate line number. + if p.Line > uint32(len(m.lineStart)) { + return 0, fmt.Errorf("line number %d out of range 0-%d", p.Line, len(m.lineStart)) + } else if p.Line == uint32(len(m.lineStart)) { + if p.Character == 0 { + return len(m.Content), nil // EOF + } + return 0, fmt.Errorf("column is beyond end of file") + } + + offset := m.lineStart[p.Line] + content := m.Content[offset:] // rest of file from start of enclosing line + + // Advance bytes up to the required number of UTF-16 codes. + col8 := 0 + for col16 := 0; col16 < int(p.Character); col16++ { + r, sz := utf8.DecodeRune(content) + if sz == 0 { + return 0, fmt.Errorf("column is beyond end of file") + } + if r == '\n' { + return 0, fmt.Errorf("column is beyond end of line") + } + if sz == 1 && r == utf8.RuneError { + return 0, fmt.Errorf("buffer contains invalid UTF-8 text") + } + content = content[sz:] + + if r >= 0x10000 { + col16++ // rune was encoded by a pair of surrogate UTF-16 codes + + if col16 == int(p.Character) { + break // requested position is in the middle of a rune + } + } + col8 += sz + } + return offset + col8, nil +} + +// -- go/token domain convenience methods -- + +// PosPosition converts a token pos to a protocol (UTF-16) position. +func (m *Mapper) PosPosition(tf *token.File, pos token.Pos) (Position, error) { + offset, err := safetoken.Offset(tf, pos) + if err != nil { + return Position{}, err + } + return m.OffsetPosition(offset) +} + +// PosLocation converts a token range to a protocol (UTF-16) location. +func (m *Mapper) PosLocation(tf *token.File, start, end token.Pos) (Location, error) { + startOffset, endOffset, err := safetoken.Offsets(tf, start, end) + if err != nil { + return Location{}, err + } + rng, err := m.OffsetRange(startOffset, endOffset) + if err != nil { + return Location{}, err + } + return m.RangeLocation(rng), nil +} + +// PosRange converts a token range to a protocol (UTF-16) range. +func (m *Mapper) PosRange(tf *token.File, start, end token.Pos) (Range, error) { + startOffset, endOffset, err := safetoken.Offsets(tf, start, end) + if err != nil { + return Range{}, err + } + return m.OffsetRange(startOffset, endOffset) +} + +// NodeRange converts a syntax node range to a protocol (UTF-16) range. +func (m *Mapper) NodeRange(tf *token.File, node ast.Node) (Range, error) { + return m.PosRange(tf, node.Pos(), node.End()) +} + +// RangeLocation pairs a protocol Range with its URI, in a Location. +func (m *Mapper) RangeLocation(rng Range) Location { + return Location{URI: m.URI, Range: rng} +} + +// PosMappedRange returns a MappedRange for the given token.Pos range. +func (m *Mapper) PosMappedRange(tf *token.File, start, end token.Pos) (MappedRange, error) { + startOffset, endOffset, err := safetoken.Offsets(tf, start, end) + if err != nil { + return MappedRange{}, nil + } + return m.OffsetMappedRange(startOffset, endOffset) +} + +// NodeMappedRange returns a MappedRange for the given node range. +func (m *Mapper) NodeMappedRange(tf *token.File, node ast.Node) (MappedRange, error) { + return m.PosMappedRange(tf, node.Pos(), node.End()) +} + +// -- MappedRange -- + +// A MappedRange represents a valid byte-offset range of a file. +// Through its Mapper it can be converted into other forms such +// as protocol.Range or UTF-8. +// +// Construct one by calling Mapper.OffsetMappedRange with start/end offsets. +// From the go/token domain, call safetoken.Offsets first, +// or use a helper such as parsego.File.MappedPosRange. +// +// Two MappedRanges produced the same Mapper are equal if and only if they +// denote the same range. Two MappedRanges produced by different Mappers +// are unequal even when they represent the same range of the same file. +type MappedRange struct { + Mapper *Mapper + start, end int // valid byte offsets: 0 <= start <= end <= len(Mapper.Content) +} + +// Offsets returns the (start, end) byte offsets of this range. +func (mr MappedRange) Offsets() (start, end int) { return mr.start, mr.end } + +// -- convenience functions -- + +// URI returns the URI of the range's file. +func (mr MappedRange) URI() DocumentURI { + return mr.Mapper.URI +} + +// Range returns the range in protocol (UTF-16) form. +func (mr MappedRange) Range() Range { + rng, err := mr.Mapper.OffsetRange(mr.start, mr.end) + if err != nil { + panic(err) // can't happen + } + return rng +} + +// Location returns the range in protocol location (UTF-16) form. +func (mr MappedRange) Location() Location { + return mr.Mapper.RangeLocation(mr.Range()) +} + +// String formats the range in UTF-8 notation. +func (mr MappedRange) String() string { + var s strings.Builder + startLine, startCol8 := mr.Mapper.OffsetLineCol8(mr.start) + fmt.Fprintf(&s, "%d:%d", startLine, startCol8) + if mr.end != mr.start { + endLine, endCol8 := mr.Mapper.OffsetLineCol8(mr.end) + if endLine == startLine { + fmt.Fprintf(&s, "-%d", endCol8) + } else { + fmt.Fprintf(&s, "-%d:%d", endLine, endCol8) + } + } + return s.String() +} + +// LocationTextDocumentPositionParams converts its argument to its result. +func LocationTextDocumentPositionParams(loc Location) TextDocumentPositionParams { + return TextDocumentPositionParams{ + TextDocument: TextDocumentIdentifier{URI: loc.URI}, + Position: loc.Range.Start, + } +} diff --git a/gopls/internal/lsp/protocol/mapper_test.go b/gopls/internal/protocol/mapper_test.go similarity index 88% rename from gopls/internal/lsp/protocol/mapper_test.go rename to gopls/internal/protocol/mapper_test.go index 15fc54b405a..8ba611a99f9 100644 --- a/gopls/internal/lsp/protocol/mapper_test.go +++ b/gopls/internal/protocol/mapper_test.go @@ -9,13 +9,12 @@ import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" ) -// This file tests Mapper's logic for converting between -// span.Point and UTF-16 columns. (The strange form attests to an -// earlier abstraction.) +// This file tests Mapper's logic for converting between offsets, +// UTF-8 columns, and UTF-16 columns. (The strange form attests to +// earlier abstractions.) // 𐐀 is U+10400 = [F0 90 90 80] in UTF-8, [D801 DC00] in UTF-16. var funnyString = []byte("𐐀23\n𐐀45") @@ -233,9 +232,16 @@ func TestToUTF16(t *testing.T) { if e.issue != nil && !*e.issue { t.Skip("expected to fail") } - p := span.NewPoint(e.line, e.col, e.offset) m := protocol.NewMapper("", e.input) - pos, err := m.PointPosition(p) + var pos protocol.Position + var err error + if e.line > 0 { + pos, err = m.LineCol8Position(e.line, e.col) + } else if e.offset >= 0 { + pos, err = m.OffsetPosition(e.offset) + } else { + err = fmt.Errorf("point has neither offset nor line/column") + } if err != nil { if err.Error() != e.err { t.Fatalf("expected error %v; got %v", e.err, err) @@ -249,12 +255,12 @@ func TestToUTF16(t *testing.T) { if got != e.resUTF16col { t.Fatalf("expected result %v; got %v", e.resUTF16col, got) } - pre, post := getPrePost(e.input, p.Offset()) - if string(pre) != e.pre { - t.Fatalf("expected #%d pre %q; got %q", p.Offset(), e.pre, pre) + pre, post := getPrePost(e.input, e.offset) + if pre != e.pre { + t.Fatalf("expected #%d pre %q; got %q", e.offset, e.pre, pre) } - if string(post) != e.post { - t.Fatalf("expected #%d, post %q; got %q", p.Offset(), e.post, post) + if post != e.post { + t.Fatalf("expected #%d, post %q; got %q", e.offset, e.post, post) } }) } @@ -263,8 +269,8 @@ func TestToUTF16(t *testing.T) { func TestFromUTF16(t *testing.T) { for _, e := range fromUTF16Tests { t.Run(e.scenario, func(t *testing.T) { - m := protocol.NewMapper("", []byte(e.input)) - p, err := m.PositionPoint(protocol.Position{ + m := protocol.NewMapper("", e.input) + offset, err := m.PositionOffset(protocol.Position{ Line: uint32(e.line - 1), Character: uint32(e.utf16col - 1), }) @@ -277,18 +283,22 @@ func TestFromUTF16(t *testing.T) { if e.err != "" { t.Fatalf("unexpected success; wanted %v", e.err) } - if p.Column() != e.resCol { - t.Fatalf("expected resulting col %v; got %v", e.resCol, p.Column()) + if offset != e.resOffset { + t.Fatalf("expected offset %v; got %v", e.resOffset, offset) + } + line, col8 := m.OffsetLineCol8(offset) + if line != e.line { + t.Fatalf("expected resulting line %v; got %v", e.line, line) } - if p.Offset() != e.resOffset { - t.Fatalf("expected resulting offset %v; got %v", e.resOffset, p.Offset()) + if col8 != e.resCol { + t.Fatalf("expected resulting col %v; got %v", e.resCol, col8) } - pre, post := getPrePost(e.input, p.Offset()) - if string(pre) != e.pre { - t.Fatalf("expected #%d pre %q; got %q", p.Offset(), e.pre, pre) + pre, post := getPrePost(e.input, offset) + if pre != e.pre { + t.Fatalf("expected #%d pre %q; got %q", offset, e.pre, pre) } - if string(post) != e.post { - t.Fatalf("expected #%d post %q; got %q", p.Offset(), e.post, post) + if post != e.post { + t.Fatalf("expected #%d post %q; got %q", offset, e.post, post) } }) } @@ -426,14 +436,14 @@ func TestBytesOffset(t *testing.T) { for i, test := range tests { fname := fmt.Sprintf("test %d", i) - uri := span.URIFromPath(fname) + uri := protocol.URIFromPath(fname) mapper := protocol.NewMapper(uri, []byte(test.text)) - got, err := mapper.PositionPoint(test.pos) + got, err := mapper.PositionOffset(test.pos) if err != nil && test.want != -1 { t.Errorf("%d: unexpected error: %v", i, err) } - if err == nil && got.Offset() != test.want { - t.Errorf("want %d for %q(Line:%d,Character:%d), but got %d", test.want, test.text, int(test.pos.Line), int(test.pos.Character), got.Offset()) + if err == nil && got != test.want { + t.Errorf("want %d for %q(Line:%d,Character:%d), but got %d", test.want, test.text, int(test.pos.Line), int(test.pos.Character), got) } } } diff --git a/gopls/internal/lsp/protocol/protocol.go b/gopls/internal/protocol/protocol.go similarity index 87% rename from gopls/internal/lsp/protocol/protocol.go rename to gopls/internal/protocol/protocol.go index 7ca8f2bc66a..7cc5589aa0b 100644 --- a/gopls/internal/lsp/protocol/protocol.go +++ b/gopls/internal/protocol/protocol.go @@ -5,11 +5,14 @@ package protocol import ( + "bytes" "context" "encoding/json" "fmt" "io" + "golang.org/x/telemetry/crashmonitor" + "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/jsonrpc2" jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" @@ -158,16 +161,7 @@ func ServerHandler(server Server, handler jsonrpc2.Handler) jsonrpc2.Handler { if handled || err != nil { return err } - //TODO: This code is wrong, it ignores handler and assumes non standard - // request handles everything - // non standard request should just be a layered handler. - var params interface{} - if err := json.Unmarshal(req.Params(), ¶ms); err != nil { - return sendParseError(ctx, reply, err) - } - resp, err := server.NonstandardRequest(ctx, req.Method(), params) - return reply(ctx, resp, err) - + return handler(ctx, reply, req) } } @@ -249,7 +243,7 @@ func CancelHandler(handler jsonrpc2.Handler) jsonrpc2.Handler { return handler(ctx, replyWithDetachedContext, req) } var params CancelParams - if err := json.Unmarshal(req.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(req.Params(), ¶ms); err != nil { return sendParseError(ctx, reply, err) } if n, ok := params.ID.(float64); ok { @@ -279,6 +273,41 @@ func cancelCall(ctx context.Context, sender connSender, id jsonrpc2.ID) { sender.Notify(ctx, "$/cancelRequest", &CancelParams{ID: &id}) } +// UnmarshalJSON unmarshals msg into the variable pointed to by +// params. In JSONRPC, optional messages may be +// "null", in which case it is a no-op. +func UnmarshalJSON(msg json.RawMessage, v any) error { + if len(msg) == 0 || bytes.Equal(msg, []byte("null")) { + return nil + } + return json.Unmarshal(msg, v) +} + func sendParseError(ctx context.Context, reply jsonrpc2.Replier, err error) error { return reply(ctx, nil, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err)) } + +// NonNilSlice returns x, or an empty slice if x was nil. +// +// (Many slice fields of protocol structs must be non-nil +// to avoid being encoded as JSON "null".) +func NonNilSlice[T comparable](x []T) []T { + if x == nil { + return []T{} + } + return x +} + +func recoverHandlerPanic(method string) { + // Report panics in the handler goroutine, + // unless we have enabled the monitor, + // which reports all crashes. + if !crashmonitor.Supported() { + defer func() { + if x := recover(); x != nil { + bug.Reportf("panic in %s request", method) + panic(x) + } + }() + } +} diff --git a/gopls/internal/protocol/semantic.go b/gopls/internal/protocol/semantic.go new file mode 100644 index 00000000000..03407899b57 --- /dev/null +++ b/gopls/internal/protocol/semantic.go @@ -0,0 +1,56 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protocol + +// The file defines helpers for semantics tokens. + +import "fmt" + +// SemanticTypes to use in case there is no client, as in the command line, or tests. +func SemanticTypes() []string { + return semanticTypes[:] +} + +// SemanticModifiers to use in case there is no client. +func SemanticModifiers() []string { + return semanticModifiers[:] +} + +// SemType returns a string equivalent of the type, for gopls semtok +func SemType(n int) string { + tokTypes := SemanticTypes() + tokMods := SemanticModifiers() + if n >= 0 && n < len(tokTypes) { + return tokTypes[n] + } + // not found for some reason + return fmt.Sprintf("?%d[%d,%d]?", n, len(tokTypes), len(tokMods)) +} + +// SemMods returns the []string equivalent of the mods, for gopls semtok. +func SemMods(n int) []string { + tokMods := SemanticModifiers() + mods := []string{} + for i := 0; i < len(tokMods); i++ { + if (n & (1 << uint(i))) != 0 { + mods = append(mods, tokMods[i]) + } + } + return mods +} + +// From https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens +var ( + semanticTypes = [...]string{ + "namespace", "type", "class", "enum", "interface", + "struct", "typeParameter", "parameter", "variable", "property", "enumMember", + "event", "function", "method", "macro", "keyword", "modifier", "comment", + "string", "number", "regexp", "operator", + } + semanticModifiers = [...]string{ + "declaration", "definition", "readonly", "static", + "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", + } +) diff --git a/gopls/internal/protocol/semtok/semtok.go b/gopls/internal/protocol/semtok/semtok.go new file mode 100644 index 00000000000..850e234a1b0 --- /dev/null +++ b/gopls/internal/protocol/semtok/semtok.go @@ -0,0 +1,108 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The semtok package provides an encoder for LSP's semantic tokens. +package semtok + +import "sort" + +// A Token provides the extent and semantics of a token. +type Token struct { + Line, Start uint32 + Len uint32 + Type TokenType + Modifiers []string +} + +type TokenType string + +const ( + // These are the tokens defined by LSP 3.17, but a client is + // free to send its own set; any tokens that the server emits + // that are not in this set are simply not encoded in the bitfield. + TokNamespace TokenType = "namespace" + TokType TokenType = "type" + TokInterface TokenType = "interface" + TokTypeParam TokenType = "typeParameter" + TokParameter TokenType = "parameter" + TokVariable TokenType = "variable" + TokMethod TokenType = "method" + TokFunction TokenType = "function" + TokKeyword TokenType = "keyword" + TokComment TokenType = "comment" + TokString TokenType = "string" + TokNumber TokenType = "number" + TokOperator TokenType = "operator" + TokMacro TokenType = "macro" // for templates + + // not part of LSP 3.17 (even though JS has labels) + // https://github.com/microsoft/vscode-languageserver-node/issues/1422 + TokLabel TokenType = "label" +) + +// Encode returns the LSP encoding of a sequence of tokens. +// The noStrings, noNumbers options cause strings, numbers to be skipped. +// The lists of types and modifiers determines the bitfield encoding. +func Encode( + tokens []Token, + noStrings, noNumbers bool, + types, modifiers []string) []uint32 { + + // binary operators, at least, will be out of order + sort.Slice(tokens, func(i, j int) bool { + if tokens[i].Line != tokens[j].Line { + return tokens[i].Line < tokens[j].Line + } + return tokens[i].Start < tokens[j].Start + }) + + typeMap := make(map[TokenType]int) + for i, t := range types { + typeMap[TokenType(t)] = i + } + + modMap := make(map[string]int) + for i, m := range modifiers { + modMap[m] = 1 << uint(i) // go 1.12 compatibility + } + + // each semantic token needs five values + // (see Integer Encoding for Tokens in the LSP spec) + x := make([]uint32, 5*len(tokens)) + var j int + var last Token + for i := 0; i < len(tokens); i++ { + item := tokens[i] + typ, ok := typeMap[item.Type] + if !ok { + continue // client doesn't want typeStr + } + if item.Type == TokString && noStrings { + continue + } + if item.Type == TokNumber && noNumbers { + continue + } + if j == 0 { + x[0] = tokens[0].Line + } else { + x[j] = item.Line - last.Line + } + x[j+1] = item.Start + if j > 0 && x[j] == 0 { + x[j+1] = item.Start - last.Start + } + x[j+2] = item.Len + x[j+3] = uint32(typ) + mask := 0 + for _, s := range item.Modifiers { + // modMap[s] is 0 if the client doesn't want this modifier + mask |= modMap[s] + } + x[j+4] = uint32(mask) + j += 5 + last = item + } + return x[:j] +} diff --git a/gopls/internal/protocol/span.go b/gopls/internal/protocol/span.go new file mode 100644 index 00000000000..47d04df9d0e --- /dev/null +++ b/gopls/internal/protocol/span.go @@ -0,0 +1,100 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protocol + +import ( + "fmt" + "unicode/utf8" +) + +// CompareLocation defines a three-valued comparison over locations, +// lexicographically ordered by (URI, Range). +func CompareLocation(x, y Location) int { + if x.URI != y.URI { + if x.URI < y.URI { + return -1 + } else { + return +1 + } + } + return CompareRange(x.Range, y.Range) +} + +// CompareRange returns -1 if a is before b, 0 if a == b, and 1 if a is after b. +// +// A range a is defined to be 'before' b if a.Start is before b.Start, or +// a.Start == b.Start and a.End is before b.End. +func CompareRange(a, b Range) int { + if r := ComparePosition(a.Start, b.Start); r != 0 { + return r + } + return ComparePosition(a.End, b.End) +} + +// ComparePosition returns -1 if a is before b, 0 if a == b, and 1 if a is after b. +func ComparePosition(a, b Position) int { + if a.Line != b.Line { + if a.Line < b.Line { + return -1 + } else { + return +1 + } + } + if a.Character != b.Character { + if a.Character < b.Character { + return -1 + } else { + return +1 + } + } + return 0 +} + +func Intersect(a, b Range) bool { + if a.Start.Line > b.End.Line || a.End.Line < b.Start.Line { + return false + } + return !((a.Start.Line == b.End.Line) && a.Start.Character > b.End.Character || + (a.End.Line == b.Start.Line) && a.End.Character < b.Start.Character) +} + +// Format implements fmt.Formatter. +// +// Note: Formatter is implemented instead of Stringer (presumably) for +// performance reasons, though it is not clear that it matters in practice. +func (r Range) Format(f fmt.State, _ rune) { + fmt.Fprintf(f, "%v-%v", r.Start, r.End) +} + +// Format implements fmt.Formatter. +// +// See Range.Format for discussion of why the Formatter interface is +// implemented rather than Stringer. +func (p Position) Format(f fmt.State, _ rune) { + fmt.Fprintf(f, "%v:%v", p.Line, p.Character) +} + +// -- implementation helpers -- + +// UTF16Len returns the number of codes in the UTF-16 transcoding of s. +func UTF16Len(s []byte) int { + var n int + for len(s) > 0 { + n++ + + // Fast path for ASCII. + if s[0] < 0x80 { + s = s[1:] + continue + } + + r, size := utf8.DecodeRune(s) + if r >= 0x10000 { + n++ // surrogate pair + } + s = s[size:] + } + return n +} diff --git a/gopls/internal/lsp/protocol/tsclient.go b/gopls/internal/protocol/tsclient.go similarity index 88% rename from gopls/internal/lsp/protocol/tsclient.go rename to gopls/internal/protocol/tsclient.go index 85fd60c0133..6305d766ed3 100644 --- a/gopls/internal/lsp/protocol/tsclient.go +++ b/gopls/internal/protocol/tsclient.go @@ -6,13 +6,12 @@ package protocol -// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.2 (hash 184c8a7f010d335582f24337fe182baa6f2fccdd). -// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.2/protocol/metaModel.json +// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.2 (hash 654dc9be6673c61476c28fda604406279c3258d7). +// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.2/protocol/metaModel.json // LSP metaData.version = 3.17.0. import ( "context" - "encoding/json" "golang.org/x/tools/internal/jsonrpc2" ) @@ -33,6 +32,7 @@ type Client interface { CodeLensRefresh(context.Context) error // workspace/codeLens/refresh Configuration(context.Context, *ParamConfiguration) ([]LSPAny, error) // workspace/configuration DiagnosticRefresh(context.Context) error // workspace/diagnostic/refresh + FoldingRangeRefresh(context.Context) error // workspace/foldingRange/refresh InlayHintRefresh(context.Context) error // workspace/inlayHint/refresh InlineValueRefresh(context.Context) error // workspace/inlineValue/refresh SemanticTokensRefresh(context.Context) error // workspace/semanticTokens/refresh @@ -40,59 +40,67 @@ type Client interface { } func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { + defer recoverHandlerPanic(r.Method()) switch r.Method() { case "$/logTrace": var params LogTraceParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.LogTrace(ctx, ¶ms) return true, reply(ctx, nil, err) + case "$/progress": var params ProgressParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.Progress(ctx, ¶ms) return true, reply(ctx, nil, err) + case "client/registerCapability": var params RegistrationParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.RegisterCapability(ctx, ¶ms) return true, reply(ctx, nil, err) + case "client/unregisterCapability": var params UnregistrationParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.UnregisterCapability(ctx, ¶ms) return true, reply(ctx, nil, err) + case "telemetry/event": var params interface{} - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.Event(ctx, ¶ms) return true, reply(ctx, nil, err) + case "textDocument/publishDiagnostics": var params PublishDiagnosticsParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.PublishDiagnostics(ctx, ¶ms) return true, reply(ctx, nil, err) + case "window/logMessage": var params LogMessageParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.LogMessage(ctx, ¶ms) return true, reply(ctx, nil, err) + case "window/showDocument": var params ShowDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := client.ShowDocument(ctx, ¶ms) @@ -100,16 +108,18 @@ func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "window/showMessage": var params ShowMessageParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.ShowMessage(ctx, ¶ms) return true, reply(ctx, nil, err) + case "window/showMessageRequest": var params ShowMessageRequestParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := client.ShowMessageRequest(ctx, ¶ms) @@ -117,16 +127,18 @@ func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "window/workDoneProgress/create": var params WorkDoneProgressCreateParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := client.WorkDoneProgressCreate(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/applyEdit": var params ApplyWorkspaceEditParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := client.ApplyEdit(ctx, ¶ms) @@ -134,12 +146,14 @@ func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspace/codeLens/refresh": err := client.CodeLensRefresh(ctx) return true, reply(ctx, nil, err) + case "workspace/configuration": var params ParamConfiguration - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := client.Configuration(ctx, ¶ms) @@ -147,24 +161,34 @@ func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspace/diagnostic/refresh": err := client.DiagnosticRefresh(ctx) return true, reply(ctx, nil, err) + + case "workspace/foldingRange/refresh": + err := client.FoldingRangeRefresh(ctx) + return true, reply(ctx, nil, err) + case "workspace/inlayHint/refresh": err := client.InlayHintRefresh(ctx) return true, reply(ctx, nil, err) + case "workspace/inlineValue/refresh": err := client.InlineValueRefresh(ctx) return true, reply(ctx, nil, err) + case "workspace/semanticTokens/refresh": err := client.SemanticTokensRefresh(ctx) return true, reply(ctx, nil, err) + case "workspace/workspaceFolders": resp, err := client.WorkspaceFolders(ctx) if err != nil { return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + default: return false, nil } @@ -231,6 +255,9 @@ func (s *clientDispatcher) Configuration(ctx context.Context, params *ParamConfi func (s *clientDispatcher) DiagnosticRefresh(ctx context.Context) error { return s.sender.Call(ctx, "workspace/diagnostic/refresh", nil, nil) } +func (s *clientDispatcher) FoldingRangeRefresh(ctx context.Context) error { + return s.sender.Call(ctx, "workspace/foldingRange/refresh", nil, nil) +} func (s *clientDispatcher) InlayHintRefresh(ctx context.Context) error { return s.sender.Call(ctx, "workspace/inlayHint/refresh", nil, nil) } diff --git a/gopls/internal/lsp/protocol/tsdocument_changes.go b/gopls/internal/protocol/tsdocument_changes.go similarity index 100% rename from gopls/internal/lsp/protocol/tsdocument_changes.go rename to gopls/internal/protocol/tsdocument_changes.go diff --git a/gopls/internal/lsp/protocol/tsjson.go b/gopls/internal/protocol/tsjson.go similarity index 89% rename from gopls/internal/lsp/protocol/tsjson.go rename to gopls/internal/protocol/tsjson.go index 98010d8682e..7f77ffa999f 100644 --- a/gopls/internal/lsp/protocol/tsjson.go +++ b/gopls/internal/protocol/tsjson.go @@ -6,8 +6,8 @@ package protocol -// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.2 (hash 184c8a7f010d335582f24337fe182baa6f2fccdd). -// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.2/protocol/metaModel.json +// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.2 (hash 654dc9be6673c61476c28fda604406279c3258d7). +// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.2/protocol/metaModel.json // LSP metaData.version = 3.17.0. import "encoding/json" @@ -23,57 +23,54 @@ type UnmarshalError struct { func (e UnmarshalError) Error() string { return e.msg } - -// from line 4964 -func (t OrFEditRangePItemDefaults) MarshalJSON() ([]byte, error) { +func (t OrPLocation_workspace_symbol) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case FEditRangePItemDefaults: + case Location: return json.Marshal(x) - case Range: + case LocationUriOnly: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [FEditRangePItemDefaults Range]", t) + return nil, fmt.Errorf("type %T not one of [Location LocationUriOnly]", t) } -func (t *OrFEditRangePItemDefaults) UnmarshalJSON(x []byte) error { +func (t *OrPLocation_workspace_symbol) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 FEditRangePItemDefaults + var h0 Location if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 Range + var h1 LocationUriOnly if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [FEditRangePItemDefaults Range]"} + return &UnmarshalError{"unmarshal failed to match one of [Location LocationUriOnly]"} } -// from line 10165 -func (t OrFNotebookPNotebookSelector) MarshalJSON() ([]byte, error) { +func (t OrPSection_workspace_didChangeConfiguration) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case NotebookDocumentFilter: + case []string: return json.Marshal(x) case string: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t) + return nil, fmt.Errorf("type %T not one of [[]string string]", t) } -func (t *OrFNotebookPNotebookSelector) UnmarshalJSON(x []byte) error { +func (t *OrPSection_workspace_didChangeConfiguration) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 NotebookDocumentFilter + var h0 []string if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil @@ -83,59 +80,57 @@ func (t *OrFNotebookPNotebookSelector) UnmarshalJSON(x []byte) error { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} + return &UnmarshalError{"unmarshal failed to match one of [[]string string]"} } -// from line 5715 -func (t OrPLocation_workspace_symbol) MarshalJSON() ([]byte, error) { +func (t OrPTooltipPLabel) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case Location: + case MarkupContent: return json.Marshal(x) - case PLocationMsg_workspace_symbol: + case string: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [Location PLocationMsg_workspace_symbol]", t) + return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) } -func (t *OrPLocation_workspace_symbol) UnmarshalJSON(x []byte) error { +func (t *OrPTooltipPLabel) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 Location + var h0 MarkupContent if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 PLocationMsg_workspace_symbol + var h1 string if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [Location PLocationMsg_workspace_symbol]"} + return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} } -// from line 4358 -func (t OrPSection_workspace_didChangeConfiguration) MarshalJSON() ([]byte, error) { +func (t OrPTooltip_textDocument_inlayHint) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case []string: + case MarkupContent: return json.Marshal(x) case string: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [[]string string]", t) + return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) } -func (t *OrPSection_workspace_didChangeConfiguration) UnmarshalJSON(x []byte) error { +func (t *OrPTooltip_textDocument_inlayHint) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 []string + var h0 MarkupContent if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil @@ -145,28 +140,27 @@ func (t *OrPSection_workspace_didChangeConfiguration) UnmarshalJSON(x []byte) er t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [[]string string]"} + return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} } -// from line 7311 -func (t OrPTooltipPLabel) MarshalJSON() ([]byte, error) { +func (t Or_CancelParams_id) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case MarkupContent: + case int32: return json.Marshal(x) case string: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) + return nil, fmt.Errorf("type %T not one of [int32 string]", t) } -func (t *OrPTooltipPLabel) UnmarshalJSON(x []byte) error { +func (t *Or_CancelParams_id) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 MarkupContent + var h0 int32 if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil @@ -176,72 +170,99 @@ func (t *OrPTooltipPLabel) UnmarshalJSON(x []byte) error { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} + return &UnmarshalError{"unmarshal failed to match one of [int32 string]"} } -// from line 3772 -func (t OrPTooltip_textDocument_inlayHint) MarshalJSON() ([]byte, error) { +func (t Or_ClientSemanticTokensRequestOptions_full) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case MarkupContent: + case ClientSemanticTokensRequestFullDelta: return json.Marshal(x) - case string: + case bool: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) + return nil, fmt.Errorf("type %T not one of [ClientSemanticTokensRequestFullDelta bool]", t) } -func (t *OrPTooltip_textDocument_inlayHint) UnmarshalJSON(x []byte) error { +func (t *Or_ClientSemanticTokensRequestOptions_full) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 MarkupContent + var h0 ClientSemanticTokensRequestFullDelta if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 string + var h1 bool if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} + return &UnmarshalError{"unmarshal failed to match one of [ClientSemanticTokensRequestFullDelta bool]"} } -// from line 6420 -func (t Or_CancelParams_id) MarshalJSON() ([]byte, error) { +func (t Or_ClientSemanticTokensRequestOptions_range) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case int32: + case Lit_ClientSemanticTokensRequestOptions_range_Item1: return json.Marshal(x) - case string: + case bool: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [int32 string]", t) + return nil, fmt.Errorf("type %T not one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]", t) } -func (t *Or_CancelParams_id) UnmarshalJSON(x []byte) error { +func (t *Or_ClientSemanticTokensRequestOptions_range) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 int32 + var h0 Lit_ClientSemanticTokensRequestOptions_range_Item1 if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 string + var h1 bool if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [int32 string]"} + return &UnmarshalError{"unmarshal failed to match one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]"} +} + +func (t Or_CompletionItemDefaults_editRange) MarshalJSON() ([]byte, error) { + switch x := t.Value.(type) { + case EditRangeWithInsertReplace: + return json.Marshal(x) + case Range: + return json.Marshal(x) + case nil: + return []byte("null"), nil + } + return nil, fmt.Errorf("type %T not one of [EditRangeWithInsertReplace Range]", t) +} + +func (t *Or_CompletionItemDefaults_editRange) UnmarshalJSON(x []byte) error { + if string(x) == "null" { + t.Value = nil + return nil + } + var h0 EditRangeWithInsertReplace + if err := json.Unmarshal(x, &h0); err == nil { + t.Value = h0 + return nil + } + var h1 Range + if err := json.Unmarshal(x, &h1); err == nil { + t.Value = h1 + return nil + } + return &UnmarshalError{"unmarshal failed to match one of [EditRangeWithInsertReplace Range]"} } -// from line 4777 func (t Or_CompletionItem_documentation) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case MarkupContent: @@ -272,7 +293,6 @@ func (t *Or_CompletionItem_documentation) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} } -// from line 4860 func (t Or_CompletionItem_textEdit) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case InsertReplaceEdit: @@ -303,7 +323,6 @@ func (t *Or_CompletionItem_textEdit) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [InsertReplaceEdit TextEdit]"} } -// from line 14168 func (t Or_Definition) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case Location: @@ -334,7 +353,6 @@ func (t *Or_Definition) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"} } -// from line 8865 func (t Or_Diagnostic_code) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case int32: @@ -365,7 +383,6 @@ func (t *Or_Diagnostic_code) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [int32 string]"} } -// from line 14300 func (t Or_DocumentDiagnosticReport) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case RelatedFullDocumentDiagnosticReport: @@ -396,7 +413,6 @@ func (t *Or_DocumentDiagnosticReport) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]"} } -// from line 3895 func (t Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case FullDocumentDiagnosticReport: @@ -427,7 +443,6 @@ func (t *Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) Unmars return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} } -// from line 14510 func (t Or_DocumentFilter) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case NotebookCellTextDocumentFilter: @@ -458,7 +473,36 @@ func (t *Or_DocumentFilter) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [NotebookCellTextDocumentFilter TextDocumentFilter]"} } -// from line 5086 +func (t Or_GlobPattern) MarshalJSON() ([]byte, error) { + switch x := t.Value.(type) { + case Pattern: + return json.Marshal(x) + case RelativePattern: + return json.Marshal(x) + case nil: + return []byte("null"), nil + } + return nil, fmt.Errorf("type %T not one of [Pattern RelativePattern]", t) +} + +func (t *Or_GlobPattern) UnmarshalJSON(x []byte) error { + if string(x) == "null" { + t.Value = nil + return nil + } + var h0 Pattern + if err := json.Unmarshal(x, &h0); err == nil { + t.Value = h0 + return nil + } + var h1 RelativePattern + if err := json.Unmarshal(x, &h1); err == nil { + t.Value = h1 + return nil + } + return &UnmarshalError{"unmarshal failed to match one of [Pattern RelativePattern]"} +} + func (t Or_Hover_contents) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case MarkedString: @@ -496,7 +540,6 @@ func (t *Or_Hover_contents) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [MarkedString MarkupContent []MarkedString]"} } -// from line 3731 func (t Or_InlayHint_label) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case []InlayHintLabelPart: @@ -527,7 +570,6 @@ func (t *Or_InlayHint_label) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [[]InlayHintLabelPart string]"} } -// from line 4163 func (t Or_InlineCompletionItem_insertText) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case StringValue: @@ -558,7 +600,6 @@ func (t *Or_InlineCompletionItem_insertText) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [StringValue string]"} } -// from line 14278 func (t Or_InlineValue) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case InlineValueEvaluatableExpression: @@ -596,17 +637,16 @@ func (t *Or_InlineValue) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]"} } -// from line 14475 func (t Or_MarkedString) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case Msg_MarkedString: + case MarkedStringWithLanguage: return json.Marshal(x) case string: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [Msg_MarkedString string]", t) + return nil, fmt.Errorf("type %T not one of [MarkedStringWithLanguage string]", t) } func (t *Or_MarkedString) UnmarshalJSON(x []byte) error { @@ -614,7 +654,7 @@ func (t *Or_MarkedString) UnmarshalJSON(x []byte) error { t.Value = nil return nil } - var h0 Msg_MarkedString + var h0 MarkedStringWithLanguage if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil @@ -624,10 +664,9 @@ func (t *Or_MarkedString) UnmarshalJSON(x []byte) error { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [Msg_MarkedString string]"} + return &UnmarshalError{"unmarshal failed to match one of [MarkedStringWithLanguage string]"} } -// from line 10472 func (t Or_NotebookCellTextDocumentFilter_notebook) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case NotebookDocumentFilter: @@ -658,265 +697,263 @@ func (t *Or_NotebookCellTextDocumentFilter_notebook) UnmarshalJSON(x []byte) err return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} } -// from line 10211 -func (t Or_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_notebook) MarshalJSON() ([]byte, error) { +func (t Or_NotebookDocumentFilter) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case NotebookDocumentFilter: + case NotebookDocumentFilterNotebookType: return json.Marshal(x) - case string: + case NotebookDocumentFilterPattern: + return json.Marshal(x) + case NotebookDocumentFilterScheme: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t) + return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]", t) } -func (t *Or_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_notebook) UnmarshalJSON(x []byte) error { +func (t *Or_NotebookDocumentFilter) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 NotebookDocumentFilter + var h0 NotebookDocumentFilterNotebookType if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 string + var h1 NotebookDocumentFilterPattern if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} + var h2 NotebookDocumentFilterScheme + if err := json.Unmarshal(x, &h2); err == nil { + t.Value = h2 + return nil + } + return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]"} } -// from line 7404 -func (t Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) { +func (t Or_NotebookDocumentFilterWithCells_notebook) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case FullDocumentDiagnosticReport: + case NotebookDocumentFilter: return json.Marshal(x) - case UnchangedDocumentDiagnosticReport: + case string: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t) + return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t) } -func (t *Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error { +func (t *Or_NotebookDocumentFilterWithCells_notebook) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 FullDocumentDiagnosticReport + var h0 NotebookDocumentFilter if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 UnchangedDocumentDiagnosticReport + var h1 string if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} + return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} } -// from line 7443 -func (t Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) { +func (t Or_NotebookDocumentFilterWithNotebook_notebook) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case FullDocumentDiagnosticReport: + case NotebookDocumentFilter: return json.Marshal(x) - case UnchangedDocumentDiagnosticReport: + case string: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t) + return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t) } -func (t *Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error { +func (t *Or_NotebookDocumentFilterWithNotebook_notebook) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 FullDocumentDiagnosticReport + var h0 NotebookDocumentFilter if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 UnchangedDocumentDiagnosticReport + var h1 string if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} + return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} } -// from line 11106 -func (t Or_RelativePattern_baseUri) MarshalJSON() ([]byte, error) { +func (t Or_NotebookDocumentSyncOptions_notebookSelector_Elem) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case URI: + case NotebookDocumentFilterWithCells: return json.Marshal(x) - case WorkspaceFolder: + case NotebookDocumentFilterWithNotebook: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [URI WorkspaceFolder]", t) + return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]", t) } -func (t *Or_RelativePattern_baseUri) UnmarshalJSON(x []byte) error { +func (t *Or_NotebookDocumentSyncOptions_notebookSelector_Elem) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 URI + var h0 NotebookDocumentFilterWithCells if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 WorkspaceFolder + var h1 NotebookDocumentFilterWithNotebook if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [URI WorkspaceFolder]"} + return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]"} } -// from line 1413 -func (t Or_Result_textDocument_codeAction_Item0_Elem) MarshalJSON() ([]byte, error) { +func (t Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case CodeAction: + case FullDocumentDiagnosticReport: return json.Marshal(x) - case Command: + case UnchangedDocumentDiagnosticReport: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [CodeAction Command]", t) + return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t) } -func (t *Or_Result_textDocument_codeAction_Item0_Elem) UnmarshalJSON(x []byte) error { +func (t *Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 CodeAction + var h0 FullDocumentDiagnosticReport if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 Command + var h1 UnchangedDocumentDiagnosticReport if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [CodeAction Command]"} + return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} } -// from line 980 -func (t Or_Result_textDocument_inlineCompletion) MarshalJSON() ([]byte, error) { +func (t Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case InlineCompletionList: + case FullDocumentDiagnosticReport: return json.Marshal(x) - case []InlineCompletionItem: + case UnchangedDocumentDiagnosticReport: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [InlineCompletionList []InlineCompletionItem]", t) + return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t) } -func (t *Or_Result_textDocument_inlineCompletion) UnmarshalJSON(x []byte) error { +func (t *Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 InlineCompletionList + var h0 FullDocumentDiagnosticReport if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 []InlineCompletionItem + var h1 UnchangedDocumentDiagnosticReport if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionList []InlineCompletionItem]"} + return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} } -// from line 12573 -func (t Or_SemanticTokensClientCapabilities_requests_full) MarshalJSON() ([]byte, error) { +func (t Or_Result_textDocument_codeAction_Item0_Elem) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case FFullPRequests: + case CodeAction: return json.Marshal(x) - case bool: + case Command: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [FFullPRequests bool]", t) + return nil, fmt.Errorf("type %T not one of [CodeAction Command]", t) } -func (t *Or_SemanticTokensClientCapabilities_requests_full) UnmarshalJSON(x []byte) error { +func (t *Or_Result_textDocument_codeAction_Item0_Elem) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 FFullPRequests + var h0 CodeAction if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 bool + var h1 Command if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [FFullPRequests bool]"} + return &UnmarshalError{"unmarshal failed to match one of [CodeAction Command]"} } -// from line 12553 -func (t Or_SemanticTokensClientCapabilities_requests_range) MarshalJSON() ([]byte, error) { +func (t Or_Result_textDocument_inlineCompletion) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case FRangePRequests: + case InlineCompletionList: return json.Marshal(x) - case bool: + case []InlineCompletionItem: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [FRangePRequests bool]", t) + return nil, fmt.Errorf("type %T not one of [InlineCompletionList []InlineCompletionItem]", t) } -func (t *Or_SemanticTokensClientCapabilities_requests_range) UnmarshalJSON(x []byte) error { +func (t *Or_Result_textDocument_inlineCompletion) UnmarshalJSON(x []byte) error { if string(x) == "null" { t.Value = nil return nil } - var h0 FRangePRequests + var h0 InlineCompletionList if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil } - var h1 bool + var h1 []InlineCompletionItem if err := json.Unmarshal(x, &h1); err == nil { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [FRangePRequests bool]"} + return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionList []InlineCompletionItem]"} } -// from line 6815 func (t Or_SemanticTokensOptions_full) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { - case PFullESemanticTokensOptions: + case SemanticTokensFullDelta: return json.Marshal(x) case bool: return json.Marshal(x) case nil: return []byte("null"), nil } - return nil, fmt.Errorf("type %T not one of [PFullESemanticTokensOptions bool]", t) + return nil, fmt.Errorf("type %T not one of [SemanticTokensFullDelta bool]", t) } func (t *Or_SemanticTokensOptions_full) UnmarshalJSON(x []byte) error { @@ -924,7 +961,7 @@ func (t *Or_SemanticTokensOptions_full) UnmarshalJSON(x []byte) error { t.Value = nil return nil } - var h0 PFullESemanticTokensOptions + var h0 SemanticTokensFullDelta if err := json.Unmarshal(x, &h0); err == nil { t.Value = h0 return nil @@ -934,10 +971,9 @@ func (t *Or_SemanticTokensOptions_full) UnmarshalJSON(x []byte) error { t.Value = h1 return nil } - return &UnmarshalError{"unmarshal failed to match one of [PFullESemanticTokensOptions bool]"} + return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensFullDelta bool]"} } -// from line 6795 func (t Or_SemanticTokensOptions_range) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case PRangeESemanticTokensOptions: @@ -968,7 +1004,6 @@ func (t *Or_SemanticTokensOptions_range) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [PRangeESemanticTokensOptions bool]"} } -// from line 8525 func (t Or_ServerCapabilities_callHierarchyProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case CallHierarchyOptions: @@ -1006,7 +1041,6 @@ func (t *Or_ServerCapabilities_callHierarchyProvider) UnmarshalJSON(x []byte) er return &UnmarshalError{"unmarshal failed to match one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]"} } -// from line 8333 func (t Or_ServerCapabilities_codeActionProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case CodeActionOptions: @@ -1037,7 +1071,6 @@ func (t *Or_ServerCapabilities_codeActionProvider) UnmarshalJSON(x []byte) error return &UnmarshalError{"unmarshal failed to match one of [CodeActionOptions bool]"} } -// from line 8369 func (t Or_ServerCapabilities_colorProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DocumentColorOptions: @@ -1075,7 +1108,6 @@ func (t *Or_ServerCapabilities_colorProvider) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [DocumentColorOptions DocumentColorRegistrationOptions bool]"} } -// from line 8195 func (t Or_ServerCapabilities_declarationProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DeclarationOptions: @@ -1113,7 +1145,6 @@ func (t *Or_ServerCapabilities_declarationProvider) UnmarshalJSON(x []byte) erro return &UnmarshalError{"unmarshal failed to match one of [DeclarationOptions DeclarationRegistrationOptions bool]"} } -// from line 8217 func (t Or_ServerCapabilities_definitionProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DefinitionOptions: @@ -1144,7 +1175,6 @@ func (t *Or_ServerCapabilities_definitionProvider) UnmarshalJSON(x []byte) error return &UnmarshalError{"unmarshal failed to match one of [DefinitionOptions bool]"} } -// from line 8682 func (t Or_ServerCapabilities_diagnosticProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DiagnosticOptions: @@ -1175,7 +1205,6 @@ func (t *Or_ServerCapabilities_diagnosticProvider) UnmarshalJSON(x []byte) error return &UnmarshalError{"unmarshal failed to match one of [DiagnosticOptions DiagnosticRegistrationOptions]"} } -// from line 8409 func (t Or_ServerCapabilities_documentFormattingProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DocumentFormattingOptions: @@ -1206,7 +1235,6 @@ func (t *Or_ServerCapabilities_documentFormattingProvider) UnmarshalJSON(x []byt return &UnmarshalError{"unmarshal failed to match one of [DocumentFormattingOptions bool]"} } -// from line 8297 func (t Or_ServerCapabilities_documentHighlightProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DocumentHighlightOptions: @@ -1237,7 +1265,6 @@ func (t *Or_ServerCapabilities_documentHighlightProvider) UnmarshalJSON(x []byte return &UnmarshalError{"unmarshal failed to match one of [DocumentHighlightOptions bool]"} } -// from line 8427 func (t Or_ServerCapabilities_documentRangeFormattingProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DocumentRangeFormattingOptions: @@ -1268,7 +1295,6 @@ func (t *Or_ServerCapabilities_documentRangeFormattingProvider) UnmarshalJSON(x return &UnmarshalError{"unmarshal failed to match one of [DocumentRangeFormattingOptions bool]"} } -// from line 8315 func (t Or_ServerCapabilities_documentSymbolProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case DocumentSymbolOptions: @@ -1299,7 +1325,6 @@ func (t *Or_ServerCapabilities_documentSymbolProvider) UnmarshalJSON(x []byte) e return &UnmarshalError{"unmarshal failed to match one of [DocumentSymbolOptions bool]"} } -// from line 8472 func (t Or_ServerCapabilities_foldingRangeProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case FoldingRangeOptions: @@ -1337,7 +1362,6 @@ func (t *Or_ServerCapabilities_foldingRangeProvider) UnmarshalJSON(x []byte) err return &UnmarshalError{"unmarshal failed to match one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]"} } -// from line 8168 func (t Or_ServerCapabilities_hoverProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case HoverOptions: @@ -1368,7 +1392,6 @@ func (t *Or_ServerCapabilities_hoverProvider) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [HoverOptions bool]"} } -// from line 8257 func (t Or_ServerCapabilities_implementationProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case ImplementationOptions: @@ -1406,7 +1429,6 @@ func (t *Or_ServerCapabilities_implementationProvider) UnmarshalJSON(x []byte) e return &UnmarshalError{"unmarshal failed to match one of [ImplementationOptions ImplementationRegistrationOptions bool]"} } -// from line 8659 func (t Or_ServerCapabilities_inlayHintProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case InlayHintOptions: @@ -1444,7 +1466,6 @@ func (t *Or_ServerCapabilities_inlayHintProvider) UnmarshalJSON(x []byte) error return &UnmarshalError{"unmarshal failed to match one of [InlayHintOptions InlayHintRegistrationOptions bool]"} } -// from line 8701 func (t Or_ServerCapabilities_inlineCompletionProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case InlineCompletionOptions: @@ -1475,7 +1496,6 @@ func (t *Or_ServerCapabilities_inlineCompletionProvider) UnmarshalJSON(x []byte) return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionOptions bool]"} } -// from line 8636 func (t Or_ServerCapabilities_inlineValueProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case InlineValueOptions: @@ -1513,7 +1533,6 @@ func (t *Or_ServerCapabilities_inlineValueProvider) UnmarshalJSON(x []byte) erro return &UnmarshalError{"unmarshal failed to match one of [InlineValueOptions InlineValueRegistrationOptions bool]"} } -// from line 8548 func (t Or_ServerCapabilities_linkedEditingRangeProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case LinkedEditingRangeOptions: @@ -1551,7 +1570,6 @@ func (t *Or_ServerCapabilities_linkedEditingRangeProvider) UnmarshalJSON(x []byt return &UnmarshalError{"unmarshal failed to match one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]"} } -// from line 8590 func (t Or_ServerCapabilities_monikerProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case MonikerOptions: @@ -1589,7 +1607,6 @@ func (t *Or_ServerCapabilities_monikerProvider) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [MonikerOptions MonikerRegistrationOptions bool]"} } -// from line 8140 func (t Or_ServerCapabilities_notebookDocumentSync) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case NotebookDocumentSyncOptions: @@ -1620,7 +1637,6 @@ func (t *Or_ServerCapabilities_notebookDocumentSync) UnmarshalJSON(x []byte) err return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]"} } -// from line 8279 func (t Or_ServerCapabilities_referencesProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case ReferenceOptions: @@ -1651,7 +1667,6 @@ func (t *Or_ServerCapabilities_referencesProvider) UnmarshalJSON(x []byte) error return &UnmarshalError{"unmarshal failed to match one of [ReferenceOptions bool]"} } -// from line 8454 func (t Or_ServerCapabilities_renameProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case RenameOptions: @@ -1682,7 +1697,6 @@ func (t *Or_ServerCapabilities_renameProvider) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [RenameOptions bool]"} } -// from line 8494 func (t Or_ServerCapabilities_selectionRangeProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case SelectionRangeOptions: @@ -1720,7 +1734,6 @@ func (t *Or_ServerCapabilities_selectionRangeProvider) UnmarshalJSON(x []byte) e return &UnmarshalError{"unmarshal failed to match one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]"} } -// from line 8571 func (t Or_ServerCapabilities_semanticTokensProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case SemanticTokensOptions: @@ -1751,7 +1764,6 @@ func (t *Or_ServerCapabilities_semanticTokensProvider) UnmarshalJSON(x []byte) e return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensOptions SemanticTokensRegistrationOptions]"} } -// from line 8122 func (t Or_ServerCapabilities_textDocumentSync) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case TextDocumentSyncKind: @@ -1782,7 +1794,6 @@ func (t *Or_ServerCapabilities_textDocumentSync) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [TextDocumentSyncKind TextDocumentSyncOptions]"} } -// from line 8235 func (t Or_ServerCapabilities_typeDefinitionProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case TypeDefinitionOptions: @@ -1820,7 +1831,6 @@ func (t *Or_ServerCapabilities_typeDefinitionProvider) UnmarshalJSON(x []byte) e return &UnmarshalError{"unmarshal failed to match one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]"} } -// from line 8613 func (t Or_ServerCapabilities_typeHierarchyProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case TypeHierarchyOptions: @@ -1858,7 +1868,6 @@ func (t *Or_ServerCapabilities_typeHierarchyProvider) UnmarshalJSON(x []byte) er return &UnmarshalError{"unmarshal failed to match one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]"} } -// from line 8391 func (t Or_ServerCapabilities_workspaceSymbolProvider) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case WorkspaceSymbolOptions: @@ -1889,7 +1898,6 @@ func (t *Or_ServerCapabilities_workspaceSymbolProvider) UnmarshalJSON(x []byte) return &UnmarshalError{"unmarshal failed to match one of [WorkspaceSymbolOptions bool]"} } -// from line 9159 func (t Or_SignatureInformation_documentation) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case MarkupContent: @@ -1920,7 +1928,6 @@ func (t *Or_SignatureInformation_documentation) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} } -// from line 6928 func (t Or_TextDocumentEdit_edits_Elem) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case AnnotatedTextEdit: @@ -1951,7 +1958,43 @@ func (t *Or_TextDocumentEdit_edits_Elem) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [AnnotatedTextEdit TextEdit]"} } -// from line 10131 +func (t Or_TextDocumentFilter) MarshalJSON() ([]byte, error) { + switch x := t.Value.(type) { + case TextDocumentFilterLanguage: + return json.Marshal(x) + case TextDocumentFilterPattern: + return json.Marshal(x) + case TextDocumentFilterScheme: + return json.Marshal(x) + case nil: + return []byte("null"), nil + } + return nil, fmt.Errorf("type %T not one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]", t) +} + +func (t *Or_TextDocumentFilter) UnmarshalJSON(x []byte) error { + if string(x) == "null" { + t.Value = nil + return nil + } + var h0 TextDocumentFilterLanguage + if err := json.Unmarshal(x, &h0); err == nil { + t.Value = h0 + return nil + } + var h1 TextDocumentFilterPattern + if err := json.Unmarshal(x, &h1); err == nil { + t.Value = h1 + return nil + } + var h2 TextDocumentFilterScheme + if err := json.Unmarshal(x, &h2); err == nil { + t.Value = h2 + return nil + } + return &UnmarshalError{"unmarshal failed to match one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]"} +} + func (t Or_TextDocumentSyncOptions_save) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case SaveOptions: @@ -1982,7 +2025,6 @@ func (t *Or_TextDocumentSyncOptions_save) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [SaveOptions bool]"} } -// from line 14401 func (t Or_WorkspaceDocumentDiagnosticReport) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case WorkspaceFullDocumentDiagnosticReport: @@ -2013,7 +2055,6 @@ func (t *Or_WorkspaceDocumentDiagnosticReport) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]"} } -// from line 3292 func (t Or_WorkspaceEdit_documentChanges_Elem) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case CreateFile: @@ -2058,7 +2099,6 @@ func (t *Or_WorkspaceEdit_documentChanges_Elem) UnmarshalJSON(x []byte) error { return &UnmarshalError{"unmarshal failed to match one of [CreateFile DeleteFile RenameFile TextDocumentEdit]"} } -// from line 248 func (t Or_textDocument_declaration) MarshalJSON() ([]byte, error) { switch x := t.Value.(type) { case Declaration: diff --git a/gopls/internal/lsp/protocol/tsprotocol.go b/gopls/internal/protocol/tsprotocol.go similarity index 90% rename from gopls/internal/lsp/protocol/tsprotocol.go rename to gopls/internal/protocol/tsprotocol.go index f571be379a8..6fcfee23d0e 100644 --- a/gopls/internal/lsp/protocol/tsprotocol.go +++ b/gopls/internal/protocol/tsprotocol.go @@ -6,8 +6,8 @@ package protocol -// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.2 (hash 184c8a7f010d335582f24337fe182baa6f2fccdd). -// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.2/protocol/metaModel.json +// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.2 (hash 654dc9be6673c61476c28fda604406279c3258d7). +// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.2/protocol/metaModel.json // LSP metaData.version = 3.17.0. import "encoding/json" @@ -17,7 +17,7 @@ import "encoding/json" // @since 3.16.0. type AnnotatedTextEdit struct { // The actual identifier of the change annotation - AnnotationID ChangeAnnotationIdentifier `json:"annotationId"` + AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"` TextEdit } @@ -182,7 +182,16 @@ type ChangeAnnotation struct { } // An identifier to refer to a change annotation stored with a workspace edit. -type ChangeAnnotationIdentifier = string // (alias) line 14391 +type ChangeAnnotationIdentifier = string // (alias) +// @since 3.18.0 +// @proposed +type ChangeAnnotationsSupportOptions struct { + // Whether the client groups edits with equal labels into tree nodes, + // for instance all edits labelled with "Changes in Strings" would + // be a tree node. + GroupsOnLabel bool `json:"groupsOnLabel,omitempty"` +} + // Defines the capabilities provided by the client. type ClientCapabilities struct { // Workspace specific client capabilities. @@ -203,6 +212,243 @@ type ClientCapabilities struct { Experimental interface{} `json:"experimental,omitempty"` } +// @since 3.18.0 +// @proposed +type ClientCodeActionKindOptions struct { + // The code action kind values the client supports. When this + // property exists the client also guarantees that it will + // handle values outside its set gracefully and falls back + // to a default value when unknown. + ValueSet []CodeActionKind `json:"valueSet"` +} + +// @since 3.18.0 +// @proposed +type ClientCodeActionLiteralOptions struct { + // The code action kind is support with the following value + // set. + CodeActionKind ClientCodeActionKindOptions `json:"codeActionKind"` +} + +// @since 3.18.0 +// @proposed +type ClientCodeActionResolveOptions struct { + // The properties that a client can resolve lazily. + Properties []string `json:"properties"` +} + +// @since 3.18.0 +// @proposed +type ClientCompletionItemInsertTextModeOptions struct { + ValueSet []InsertTextMode `json:"valueSet"` +} + +// @since 3.18.0 +// @proposed +type ClientCompletionItemOptions struct { + // Client supports snippets as insert text. + // + // A snippet can define tab stops and placeholders with `$1`, `$2` + // and `${3:foo}`. `$0` defines the final tab stop, it defaults to + // the end of the snippet. Placeholders with equal identifiers are linked, + // that is typing in one will update others too. + SnippetSupport bool `json:"snippetSupport,omitempty"` + // Client supports commit characters on a completion item. + CommitCharactersSupport bool `json:"commitCharactersSupport,omitempty"` + // Client supports the following content formats for the documentation + // property. The order describes the preferred format of the client. + DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"` + // Client supports the deprecated property on a completion item. + DeprecatedSupport bool `json:"deprecatedSupport,omitempty"` + // Client supports the preselect property on a completion item. + PreselectSupport bool `json:"preselectSupport,omitempty"` + // Client supports the tag property on a completion item. Clients supporting + // tags have to handle unknown tags gracefully. Clients especially need to + // preserve unknown tags when sending a completion item back to the server in + // a resolve call. + // + // @since 3.15.0 + TagSupport *CompletionItemTagOptions `json:"tagSupport,omitempty"` + // Client support insert replace edit to control different behavior if a + // completion item is inserted in the text or should replace text. + // + // @since 3.16.0 + InsertReplaceSupport bool `json:"insertReplaceSupport,omitempty"` + // Indicates which properties a client can resolve lazily on a completion + // item. Before version 3.16.0 only the predefined properties `documentation` + // and `details` could be resolved lazily. + // + // @since 3.16.0 + ResolveSupport *ClientCompletionItemResolveOptions `json:"resolveSupport,omitempty"` + // The client supports the `insertTextMode` property on + // a completion item to override the whitespace handling mode + // as defined by the client (see `insertTextMode`). + // + // @since 3.16.0 + InsertTextModeSupport *ClientCompletionItemInsertTextModeOptions `json:"insertTextModeSupport,omitempty"` + // The client has support for completion item label + // details (see also `CompletionItemLabelDetails`). + // + // @since 3.17.0 + LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientCompletionItemOptionsKind struct { + // The completion item kind values the client supports. When this + // property exists the client also guarantees that it will + // handle values outside its set gracefully and falls back + // to a default value when unknown. + // + // If this property is not present the client only supports + // the completion items kinds from `Text` to `Reference` as defined in + // the initial version of the protocol. + ValueSet []CompletionItemKind `json:"valueSet,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientCompletionItemResolveOptions struct { + // The properties that a client can resolve lazily. + Properties []string `json:"properties"` +} + +// @since 3.18.0 +// @proposed +type ClientDiagnosticsTagOptions struct { + // The tags supported by the client. + ValueSet []DiagnosticTag `json:"valueSet"` +} + +// @since 3.18.0 +// @proposed +type ClientFoldingRangeKindOptions struct { + // The folding range kind values the client supports. When this + // property exists the client also guarantees that it will + // handle values outside its set gracefully and falls back + // to a default value when unknown. + ValueSet []FoldingRangeKind `json:"valueSet,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientFoldingRangeOptions struct { + // If set, the client signals that it supports setting collapsedText on + // folding ranges to display custom labels instead of the default text. + // + // @since 3.17.0 + CollapsedText bool `json:"collapsedText,omitempty"` +} + +// Information about the client +// +// @since 3.15.0 +// @since 3.18.0 ClientInfo type name added. +// @proposed +type ClientInfo struct { + // The name of the client as defined by the client. + Name string `json:"name"` + // The client's version as defined by the client. + Version string `json:"version,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientInlayHintResolveOptions struct { + // The properties that a client can resolve lazily. + Properties []string `json:"properties"` +} + +// @since 3.18.0 +// @proposed +type ClientSemanticTokensRequestFullDelta struct { + // The client will send the `textDocument/semanticTokens/full/delta` request if + // the server provides a corresponding handler. + Delta bool `json:"delta,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientSemanticTokensRequestOptions struct { + // The client will send the `textDocument/semanticTokens/range` request if + // the server provides a corresponding handler. + Range *Or_ClientSemanticTokensRequestOptions_range `json:"range,omitempty"` + // The client will send the `textDocument/semanticTokens/full` request if + // the server provides a corresponding handler. + Full *Or_ClientSemanticTokensRequestOptions_full `json:"full,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientShowMessageActionItemOptions struct { + // Whether the client supports additional attributes which + // are preserved and send back to the server in the + // request's response. + AdditionalPropertiesSupport bool `json:"additionalPropertiesSupport,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientSignatureInformationOptions struct { + // Client supports the following content formats for the documentation + // property. The order describes the preferred format of the client. + DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"` + // Client capabilities specific to parameter information. + ParameterInformation *ClientSignatureParameterInformationOptions `json:"parameterInformation,omitempty"` + // The client supports the `activeParameter` property on `SignatureInformation` + // literal. + // + // @since 3.16.0 + ActiveParameterSupport bool `json:"activeParameterSupport,omitempty"` + // The client supports the `activeParameter` property on + // `SignatureHelp`/`SignatureInformation` being set to `null` to + // indicate that no parameter should be active. + // + // @since 3.18.0 + // @proposed + NoActiveParameterSupport bool `json:"noActiveParameterSupport,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientSignatureParameterInformationOptions struct { + // The client supports processing label offsets instead of a + // simple label string. + // + // @since 3.14.0 + LabelOffsetSupport bool `json:"labelOffsetSupport,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientSymbolKindOptions struct { + // The symbol kind values the client supports. When this + // property exists the client also guarantees that it will + // handle values outside its set gracefully and falls back + // to a default value when unknown. + // + // If this property is not present the client only supports + // the symbol kinds from `File` to `Array` as defined in + // the initial version of the protocol. + ValueSet []SymbolKind `json:"valueSet,omitempty"` +} + +// @since 3.18.0 +// @proposed +type ClientSymbolResolveOptions struct { + // The properties that a client can resolve lazily. Usually + // `location.range` + Properties []string `json:"properties"` +} + +// @since 3.18.0 +// @proposed +type ClientSymbolTagOptions struct { + // The tags supported by the client. + ValueSet []SymbolTag `json:"valueSet"` +} + // A code action represents a change that can be performed in code, e.g. to fix a problem or // to refactor code. // @@ -239,7 +485,7 @@ type CodeAction struct { // error message with `reason` in the editor. // // @since 3.16.0 - Disabled *PDisabledMsg_textDocument_codeAction `json:"disabled,omitempty"` + Disabled *CodeActionDisabled `json:"disabled,omitempty"` // The workspace edit this code action performs. Edit *WorkspaceEdit `json:"edit,omitempty"` // A command this code action executes. If a code action @@ -250,7 +496,7 @@ type CodeAction struct { // a `textDocument/codeAction` and a `codeAction/resolve` request. // // @since 3.16.0 - Data interface{} `json:"data,omitempty"` + Data *json.RawMessage `json:"data,omitempty"` } // The Client Capabilities of a {@link CodeActionRequest}. @@ -262,7 +508,7 @@ type CodeActionClientCapabilities struct { // set the request can only return `Command` literals. // // @since 3.8.0 - CodeActionLiteralSupport PCodeActionLiteralSupportPCodeAction `json:"codeActionLiteralSupport,omitempty"` + CodeActionLiteralSupport ClientCodeActionLiteralOptions `json:"codeActionLiteralSupport,omitempty"` // Whether code action supports the `isPreferred` property. // // @since 3.15.0 @@ -281,7 +527,7 @@ type CodeActionClientCapabilities struct { // properties via a separate `codeAction/resolve` request. // // @since 3.16.0 - ResolveSupport *PResolveSupportPCodeAction `json:"resolveSupport,omitempty"` + ResolveSupport *ClientCodeActionResolveOptions `json:"resolveSupport,omitempty"` // Whether the client honors the change annotations in // text edits and resource operations returned via the // `CodeAction#edit` property by for example presenting @@ -290,6 +536,12 @@ type CodeActionClientCapabilities struct { // // @since 3.16.0 HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"` + // Whether the client supports documentation for a class of + // code actions. + // + // @since 3.18.0 + // @proposed + DocumentationSupport bool `json:"documentationSupport,omitempty"` } // Contains additional diagnostic information about the context in which @@ -312,9 +564,37 @@ type CodeActionContext struct { TriggerKind *CodeActionTriggerKind `json:"triggerKind,omitempty"` } +// Captures why the code action is currently disabled. +// +// @since 3.18.0 +// @proposed +type CodeActionDisabled struct { + // Human readable description of why the code action is currently disabled. + // + // This is displayed in the code actions UI. + Reason string `json:"reason"` +} + // A set of predefined code action kinds type CodeActionKind string +// Documentation for a class of code actions. +// +// @since 3.18.0 +// @proposed +type CodeActionKindDocumentation struct { + // The kind of the code action being documented. + // + // If the kind is generic, such as `CodeActionKind.Refactor`, the documentation will be shown whenever any + // refactorings are returned. If the kind if more specific, such as `CodeActionKind.RefactorExtract`, the + // documentation will only be shown when extract refactoring code actions are returned. + Kind CodeActionKind `json:"kind"` + // Command that is ued to display the documentation to the user. + // + // The title of this documentation code action is taken from {@linkcode Command.title} + Command Command `json:"command"` +} + // Provider options for a {@link CodeActionRequest}. type CodeActionOptions struct { // CodeActionKinds that this server may return. @@ -322,6 +602,24 @@ type CodeActionOptions struct { // The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server // may list out every specific kind they provide. CodeActionKinds []CodeActionKind `json:"codeActionKinds,omitempty"` + // Static documentation for a class of code actions. + // + // Documentation from the provider should be shown in the code actions menu if either: + // + // + // - Code actions of `kind` are requested by the editor. In this case, the editor will show the documentation that + // most closely matches the requested code action kind. For example, if a provider has documentation for + // both `Refactor` and `RefactorExtract`, when the user requests code actions for `RefactorExtract`, + // the editor will use the documentation for `RefactorExtract` instead of the documentation for `Refactor`. + // + // + // - Any code actions of `kind` are returned by the provider. + // + // At most one documentation entry should be shown per provider. + // + // @since 3.18.0 + // @proposed + Documentation []CodeActionKindDocumentation `json:"documentation,omitempty"` // The server provides support to resolve additional // information for a code action. // @@ -372,8 +670,7 @@ type CodeLens struct { // The command this code lens represents. Command *Command `json:"command,omitempty"` // A data entry field that is preserved on a code lens item between - // a {@link CodeLensRequest} and a [CodeLensResolveRequest] - // (#CodeLensResolveRequest) + // a {@link CodeLensRequest} and a {@link CodeLensResolveRequest} Data interface{} `json:"data,omitempty"` } @@ -468,6 +765,11 @@ type ColorPresentationParams struct { type Command struct { // Title of the command, like `save`. Title string `json:"title"` + // An optional tooltip. + // + // @since 3.18.0 + // @proposed + Tooltip string `json:"tooltip,omitempty"` // The identifier of the actual command handler. Command string `json:"command"` // Arguments that the command handler should be @@ -481,8 +783,8 @@ type CompletionClientCapabilities struct { DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client supports the following `CompletionItem` specific // capabilities. - CompletionItem PCompletionItemPCompletion `json:"completionItem,omitempty"` - CompletionItemKind *PCompletionItemKindPCompletion `json:"completionItemKind,omitempty"` + CompletionItem ClientCompletionItemOptions `json:"completionItem,omitempty"` + CompletionItemKind *ClientCompletionItemOptionsKind `json:"completionItemKind,omitempty"` // Defines how the client handles whitespace and indentation // when accepting a completion item that uses multi line // text in either `insertText` or `textEdit`. @@ -496,7 +798,7 @@ type CompletionClientCapabilities struct { // capabilities. // // @since 3.17.0 - CompletionList *PCompletionListPCompletion `json:"completionList,omitempty"` + CompletionList *CompletionListCapabilities `json:"completionList,omitempty"` } // Contains additional information about the context in which a completion request is triggered. @@ -630,16 +932,52 @@ type CompletionItem struct { Data interface{} `json:"data,omitempty"` } -// The kind of a completion entry. -type CompletionItemKind uint32 - -// Additional details for a completion item label. +// In many cases the items of an actual completion result share the same +// value for properties like `commitCharacters` or the range of a text +// edit. A completion list can therefore define item defaults which will +// be used if a completion item itself doesn't specify the value. +// +// If a completion list specifies a default value and a completion item +// also specifies a corresponding value the one from the item is used. +// +// Servers are only allowed to return default values if the client +// signals support for this via the `completionList.itemDefaults` +// capability. // // @since 3.17.0 -type CompletionItemLabelDetails struct { - // An optional string which is rendered less prominently directly after {@link CompletionItem.label label}, - // without any spacing. Should be used for function signatures and type annotations. - Detail string `json:"detail,omitempty"` +type CompletionItemDefaults struct { + // A default commit character set. + // + // @since 3.17.0 + CommitCharacters []string `json:"commitCharacters,omitempty"` + // A default edit range. + // + // @since 3.17.0 + EditRange *Or_CompletionItemDefaults_editRange `json:"editRange,omitempty"` + // A default insert text format. + // + // @since 3.17.0 + InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"` + // A default insert text mode. + // + // @since 3.17.0 + InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"` + // A default data value. + // + // @since 3.17.0 + Data interface{} `json:"data,omitempty"` +} + +// The kind of a completion entry. +type CompletionItemKind uint32 + +// Additional details for a completion item label. +// +// @since 3.17.0 +type CompletionItemLabelDetails struct { + // An optional string which is rendered less prominently directly after {@link CompletionItem.label label}, + // without any spacing. Should be used for function signatures and type annotations. + Detail string `json:"detail,omitempty"` // An optional string which is rendered less prominently after {@link CompletionItem.detail}. Should be used // for fully qualified names and file paths. Description string `json:"description,omitempty"` @@ -651,6 +989,13 @@ type CompletionItemLabelDetails struct { // @since 3.15.0 type CompletionItemTag uint32 +// @since 3.18.0 +// @proposed +type CompletionItemTagOptions struct { + // The tags supported by the client. + ValueSet []CompletionItemTag `json:"valueSet"` +} + // Represents a collection of {@link CompletionItem completion items} to be presented // in the editor. type CompletionList struct { @@ -672,11 +1017,27 @@ type CompletionList struct { // capability. // // @since 3.17.0 - ItemDefaults *PItemDefaultsMsg_textDocument_completion `json:"itemDefaults,omitempty"` + ItemDefaults *CompletionItemDefaults `json:"itemDefaults,omitempty"` // The completion items. Items []CompletionItem `json:"items"` } +// The client supports the following `CompletionList` specific +// capabilities. +// +// @since 3.17.0 +type CompletionListCapabilities struct { + // The client supports the following itemDefaults on + // a completion list. + // + // The value lists the supported property names of the + // `CompletionList.itemDefaults` object. If omitted + // no properties are supported. + // + // @since 3.17.0 + ItemDefaults []string `json:"itemDefaults,omitempty"` +} + // Completion options. type CompletionOptions struct { // Most tools trigger completion request automatically without explicitly requesting @@ -704,7 +1065,7 @@ type CompletionOptions struct { // capabilities. // // @since 3.17.0 - CompletionItem *PCompletionItemPCompletionProvider `json:"completionItem,omitempty"` + CompletionItem *ServerCompletionItemOptions `json:"completionItem,omitempty"` WorkDoneProgressOptions } @@ -728,7 +1089,7 @@ type CompletionRegistrationOptions struct { type CompletionTriggerKind uint32 type ConfigurationItem struct { // The scope to get the configuration section for. - ScopeURI string `json:"scopeUri,omitempty"` + ScopeURI *URI `json:"scopeUri,omitempty"` // The configuration section asked for. Section string `json:"section,omitempty"` } @@ -767,7 +1128,7 @@ type CreateFilesParams struct { } // The declaration of a symbol representation as one or many {@link Location locations}. -type Declaration = []Location // (alias) line 14248 +type Declaration = []Location // (alias) // @since 3.14.0 type DeclarationClientCapabilities struct { // Whether declaration supports dynamic registration. If this is set to `true` @@ -785,7 +1146,7 @@ type DeclarationClientCapabilities struct { // // Servers should prefer returning `DeclarationLink` over `Declaration` if supported // by the client. -type DeclarationLink = LocationLink // (alias) line 14268 +type DeclarationLink = LocationLink // (alias) type DeclarationOptions struct { WorkDoneProgressOptions } @@ -806,7 +1167,7 @@ type DeclarationRegistrationOptions struct { // // Servers should prefer returning `DefinitionLink` over `Definition` if supported // by the client. -type Definition = Or_Definition // (alias) line 14166 +type Definition = Or_Definition // (alias) // Client Capabilities for a {@link DefinitionRequest}. type DefinitionClientCapabilities struct { // Whether definition supports dynamic registration. @@ -821,7 +1182,7 @@ type DefinitionClientCapabilities struct { // // Provides additional metadata over normal {@link Location location} definitions, including the range of // the defining symbol -type DefinitionLink = LocationLink // (alias) line 14186 +type DefinitionLink = LocationLink // (alias) // Server Capabilities for a {@link DefinitionRequest}. type DefinitionOptions struct { WorkDoneProgressOptions @@ -1155,7 +1516,7 @@ type DocumentDiagnosticParams struct { WorkDoneProgressParams PartialResultParams } -type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) line 13909 +type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) // The document diagnostic report kinds. // // @since 3.17.0 @@ -1172,7 +1533,7 @@ type DocumentDiagnosticReportPartialResult struct { // a notebook cell document. // // @since 3.17.0 - proposed support for NotebookCellTextDocumentFilter. -type DocumentFilter = Or_DocumentFilter // (alias) line 14508 +type DocumentFilter = Or_DocumentFilter // (alias) // Client capabilities of a {@link DocumentFormattingRequest}. type DocumentFormattingClientCapabilities struct { // Whether formatting supports dynamic registration. @@ -1381,7 +1742,7 @@ type DocumentRangesFormattingParams struct { // @sample `let sel:DocumentSelector = [{ language: 'typescript' }, { language: 'json', pattern: '**∕tsconfig.json' }]`; // // The use of a string as a document filter is deprecated @since 3.16.0. -type DocumentSelector = []DocumentFilter // (alias) line 14363 +type DocumentSelector = []DocumentFilter // (alias) // Represents programming constructs like variables, classes, interfaces etc. // that appear in a document. Document symbols can be hierarchical and they // have two ranges: one that encloses its definition and one that points to @@ -1419,7 +1780,7 @@ type DocumentSymbolClientCapabilities struct { DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Specific capabilities for the `SymbolKind` in the // `textDocument/documentSymbol` request. - SymbolKind *PSymbolKindPDocumentSymbol `json:"symbolKind,omitempty"` + SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"` // The client supports hierarchical document symbols. HierarchicalDocumentSymbolSupport bool `json:"hierarchicalDocumentSymbolSupport,omitempty"` // The client supports tags on `SymbolInformation`. Tags are supported on @@ -1427,7 +1788,7 @@ type DocumentSymbolClientCapabilities struct { // Clients supporting tags have to handle unknown tags gracefully. // // @since 3.16.0 - TagSupport *PTagSupportPDocumentSymbol `json:"tagSupport,omitempty"` + TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"` // The client supports an additional label presented in the UI when // registering a document symbol provider. // @@ -1458,7 +1819,15 @@ type DocumentSymbolRegistrationOptions struct { TextDocumentRegistrationOptions DocumentSymbolOptions } -type DocumentURI string + +// Edit range variant that includes ranges for insert and replace operations. +// +// @since 3.18.0 +// @proposed +type EditRangeWithInsertReplace struct { + Insert Range `json:"insert"` + Replace Range `json:"replace"` +} // Predefined error codes. type ErrorCodes int32 @@ -1498,68 +1867,6 @@ type ExecutionSummary struct { // not if known by the client. Success bool `json:"success,omitempty"` } - -// created for Literal (Lit_CodeActionClientCapabilities_codeActionLiteralSupport_codeActionKind) -type FCodeActionKindPCodeActionLiteralSupport struct { - // The code action kind values the client supports. When this - // property exists the client also guarantees that it will - // handle values outside its set gracefully and falls back - // to a default value when unknown. - ValueSet []CodeActionKind `json:"valueSet"` -} - -// created for Literal (Lit_CompletionList_itemDefaults_editRange_Item1) -type FEditRangePItemDefaults struct { - Insert Range `json:"insert"` - Replace Range `json:"replace"` -} - -// created for Literal (Lit_SemanticTokensClientCapabilities_requests_full_Item1) -type FFullPRequests struct { - // The client will send the `textDocument/semanticTokens/full/delta` request if - // the server provides a corresponding handler. - Delta bool `json:"delta"` -} - -// created for Literal (Lit_CompletionClientCapabilities_completionItem_insertTextModeSupport) -type FInsertTextModeSupportPCompletionItem struct { - ValueSet []InsertTextMode `json:"valueSet"` -} - -// created for Literal (Lit_SignatureHelpClientCapabilities_signatureInformation_parameterInformation) -type FParameterInformationPSignatureInformation struct { - // The client supports processing label offsets instead of a - // simple label string. - // - // @since 3.14.0 - LabelOffsetSupport bool `json:"labelOffsetSupport,omitempty"` -} - -// created for Literal (Lit_SemanticTokensClientCapabilities_requests_range_Item1) -type FRangePRequests struct { -} - -// created for Literal (Lit_CompletionClientCapabilities_completionItem_resolveSupport) -type FResolveSupportPCompletionItem struct { - // The properties that a client can resolve lazily. - Properties []string `json:"properties"` -} - -// created for Literal (Lit_NotebookDocumentChangeEvent_cells_structure) -type FStructurePCells struct { - // The change to the cell array. - Array NotebookCellArrayChange `json:"array"` - // Additional opened cell text documents. - DidOpen []TextDocumentItem `json:"didOpen,omitempty"` - // Additional closed cell text documents. - DidClose []TextDocumentIdentifier `json:"didClose,omitempty"` -} - -// created for Literal (Lit_CompletionClientCapabilities_completionItem_tagSupport) -type FTagSupportPCompletionItem struct { - // The tags supported by the client. - ValueSet []CompletionItemTag `json:"valueSet"` -} type FailureHandlingKind string // The file event type @@ -1718,7 +2025,7 @@ type FoldingRange struct { EndLine uint32 `json:"endLine"` // The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line. EndCharacter uint32 `json:"endCharacter,omitempty"` - // Describes the kind of the folding range such as `comment' or 'region'. The kind + // Describes the kind of the folding range such as 'comment' or 'region'. The kind // is used to categorize folding ranges and used by commands like 'Fold all comments'. // See {@link FoldingRangeKind} for an enumeration of standardized kinds. Kind string `json:"kind,omitempty"` @@ -1746,11 +2053,11 @@ type FoldingRangeClientCapabilities struct { // Specific options for the folding range kind. // // @since 3.17.0 - FoldingRangeKind *PFoldingRangeKindPFoldingRange `json:"foldingRangeKind,omitempty"` + FoldingRangeKind *ClientFoldingRangeKindOptions `json:"foldingRangeKind,omitempty"` // Specific options for the folding range. // // @since 3.17.0 - FoldingRange *PFoldingRangePFoldingRange `json:"foldingRange,omitempty"` + FoldingRange *ClientFoldingRangeOptions `json:"foldingRange,omitempty"` } // A set of predefined range kinds. @@ -1772,6 +2079,24 @@ type FoldingRangeRegistrationOptions struct { StaticRegistrationOptions } +// Client workspace capabilities specific to folding ranges +// +// @since 3.18.0 +// @proposed +type FoldingRangeWorkspaceClientCapabilities struct { + // Whether the client implementation supports a refresh request sent from the + // server to the client. + // + // Note that this event is global and will force the client to refresh all + // folding ranges currently shown. It should be used with absolute care and is + // useful for situation where a server for example detects a project wide + // change that requires such a calculation. + // + // @since 3.18.0 + // @proposed + RefreshSupport bool `json:"refreshSupport,omitempty"` +} + // Value-object describing what options formatting should use. type FormattingOptions struct { // Size of a tab in spaces. @@ -1816,7 +2141,7 @@ type GeneralClientCapabilities struct { // anymore since the information is outdated). // // @since 3.17.0 - StaleRequestSupport *PStaleRequestSupportPGeneral `json:"staleRequestSupport,omitempty"` + StaleRequestSupport *StaleRequestSupportOptions `json:"staleRequestSupport,omitempty"` // Client capabilities specific to regular expressions. // // @since 3.16.0 @@ -1849,7 +2174,7 @@ type GeneralClientCapabilities struct { // The glob pattern. Either a string pattern or a relative pattern. // // @since 3.17.0 -type GlobPattern = string // (alias) line 14542 +type GlobPattern = Or_GlobPattern // (alias) // The result of a hover request. type Hover struct { // The hover's content @@ -1929,7 +2254,7 @@ type InitializeResult struct { // Information about the server. // // @since 3.15.0 - ServerInfo *PServerInfoMsg_initialize `json:"serverInfo,omitempty"` + ServerInfo *ServerInfo `json:"serverInfo,omitempty"` } type InitializedParams struct { } @@ -1939,6 +2264,9 @@ type InitializedParams struct { // @since 3.17.0 type InlayHint struct { // The position of this hint. + // + // If multiple hints have the same position, they will be shown in the order + // they appear in the response. Position Position `json:"position"` // The label of this hint. A human readable string or an array of // InlayHintLabelPart label parts. @@ -1981,7 +2309,7 @@ type InlayHintClientCapabilities struct { DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Indicates which properties a client can resolve lazily on an inlay // hint. - ResolveSupport *PResolveSupportPInlayHint `json:"resolveSupport,omitempty"` + ResolveSupport *ClientInlayHintResolveOptions `json:"resolveSupport,omitempty"` } // Inlay hint kinds. @@ -2152,7 +2480,7 @@ type InlineCompletionTriggerKind uint32 // The InlineValue types combines all inline value types into one type. // // @since 3.17.0 -type InlineValue = Or_InlineValue // (alias) line 14276 +type InlineValue = Or_InlineValue // (alias) // Client capabilities specific to inline values. // // @since 3.17.0 @@ -2277,12 +2605,17 @@ type LSPAny = interface{} // LSP arrays. // @since 3.17.0 -type LSPArray = []interface{} // (alias) line 14194 +type LSPArray = []interface{} // (alias) type LSPErrorCodes int32 // LSP object definition. // @since 3.17.0 -type LSPObject = map[string]LSPAny // (alias) line 14526 +type LSPObject = map[string]LSPAny // (alias) +// Predefined Language kinds +// @since 3.18.0 +// @proposed +type LanguageKind string + // Client capabilities for the linked editing range request. // // @since 3.16.0 @@ -2318,71 +2651,8 @@ type LinkedEditingRanges struct { WordPattern string `json:"wordPattern,omitempty"` } -// created for Literal (Lit_NotebookDocumentChangeEvent_cells_textContent_Elem) -type Lit_NotebookDocumentChangeEvent_cells_textContent_Elem struct { - Document VersionedTextDocumentIdentifier `json:"document"` - Changes []TextDocumentContentChangeEvent `json:"changes"` -} - -// created for Literal (Lit_NotebookDocumentFilter_Item1) -type Lit_NotebookDocumentFilter_Item1 struct { - // The type of the enclosing notebook. - NotebookType string `json:"notebookType,omitempty"` - // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. - Scheme string `json:"scheme"` - // A glob pattern. - Pattern string `json:"pattern,omitempty"` -} - -// created for Literal (Lit_NotebookDocumentFilter_Item2) -type Lit_NotebookDocumentFilter_Item2 struct { - // The type of the enclosing notebook. - NotebookType string `json:"notebookType,omitempty"` - // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. - Scheme string `json:"scheme,omitempty"` - // A glob pattern. - Pattern string `json:"pattern"` -} - -// created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0_cells_Elem) -type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0_cells_Elem struct { - Language string `json:"language"` -} - -// created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1) -type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1 struct { - // The notebook to be synced If a string - // value is provided it matches against the - // notebook type. '*' matches every notebook. - Notebook *Or_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_notebook `json:"notebook,omitempty"` - // The cells of the matching notebook to be synced. - Cells []Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_cells_Elem `json:"cells"` -} - -// created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_cells_Elem) -type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_cells_Elem struct { - Language string `json:"language"` -} - -// created for Literal (Lit_PrepareRenameResult_Item2) -type Lit_PrepareRenameResult_Item2 struct { - DefaultBehavior bool `json:"defaultBehavior"` -} - -// created for Literal (Lit_TextDocumentContentChangeEvent_Item1) -type Lit_TextDocumentContentChangeEvent_Item1 struct { - // The new text of the whole document. - Text string `json:"text"` -} - -// created for Literal (Lit_TextDocumentFilter_Item2) -type Lit_TextDocumentFilter_Item2 struct { - // A language id, like `typescript`. - Language string `json:"language,omitempty"` - // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. - Scheme string `json:"scheme,omitempty"` - // A glob pattern, like `*.{ts,js}`. - Pattern string `json:"pattern"` +// created for Literal (Lit_ClientSemanticTokensRequestOptions_range_Item1) +type Lit_ClientSemanticTokensRequestOptions_range_Item1 struct { } // Represents a location inside a resource, such as a line @@ -2411,6 +2681,14 @@ type LocationLink struct { TargetSelectionRange Range `json:"targetSelectionRange"` } +// Location with only uri and does not include range. +// +// @since 3.18.0 +// @proposed +type LocationUriOnly struct { + URI DocumentURI `json:"uri"` +} + // The log message parameters. type LogMessageParams struct { // The message type. See {@link MessageType} @@ -2450,7 +2728,15 @@ type MarkdownClientCapabilities struct { // // Note that markdown strings will be sanitized - that means html will be escaped. // @deprecated use MarkupContent instead. -type MarkedString = Or_MarkedString // (alias) line 14473 +type MarkedString = Or_MarkedString // (alias) +// @since 3.18.0 +// @proposed +// @deprecated use MarkupContent instead. +type MarkedStringWithLanguage struct { + Language string `json:"language"` + Value string `json:"value"` +} + // A `MarkupContent` literal represents a string value which content is interpreted base on its // kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds. // @@ -2538,58 +2824,6 @@ type MonikerRegistrationOptions struct { MonikerOptions } -// created for Literal (Lit_MarkedString_Item1) -type Msg_MarkedString struct { - Language string `json:"language"` - Value string `json:"value"` -} - -// created for Literal (Lit_NotebookDocumentFilter_Item0) -type Msg_NotebookDocumentFilter struct { - // The type of the enclosing notebook. - NotebookType string `json:"notebookType"` - // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. - Scheme string `json:"scheme,omitempty"` - // A glob pattern. - Pattern string `json:"pattern,omitempty"` -} - -// created for Literal (Lit_PrepareRenameResult_Item1) -type Msg_PrepareRename2Gn struct { - Range Range `json:"range"` - Placeholder string `json:"placeholder"` -} - -// created for Literal (Lit_TextDocumentContentChangeEvent_Item0) -type Msg_TextDocumentContentChangeEvent struct { - // The range of the document that changed. - Range *Range `json:"range"` - // The optional length of the range that got replaced. - // - // @deprecated use range instead. - RangeLength uint32 `json:"rangeLength,omitempty"` - // The new text for the provided range. - Text string `json:"text"` -} - -// created for Literal (Lit_TextDocumentFilter_Item1) -type Msg_TextDocumentFilter struct { - // A language id, like `typescript`. - Language string `json:"language,omitempty"` - // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. - Scheme string `json:"scheme"` - // A glob pattern, like `*.{ts,js}`. - Pattern string `json:"pattern,omitempty"` -} - -// created for Literal (Lit__InitializeParams_clientInfo) -type Msg_XInitializeParams_clientInfo struct { - // The name of the client as defined by the client. - Name string `json:"name"` - // The client's version as defined by the client. - Version string `json:"version,omitempty"` -} - // A notebook cell. // // A cell's document URI must be unique across ALL notebook @@ -2630,6 +2864,12 @@ type NotebookCellArrayChange struct { // @since 3.17.0 type NotebookCellKind uint32 +// @since 3.18.0 +// @proposed +type NotebookCellLanguage struct { + Language string `json:"language"` +} + // A notebook cell text document filter denotes a cell text // document by different properties. // @@ -2667,6 +2907,43 @@ type NotebookDocument struct { Cells []NotebookCell `json:"cells"` } +// Structural changes to cells in a notebook document. +// +// @since 3.18.0 +// @proposed +type NotebookDocumentCellChangeStructure struct { + // The change to the cell array. + Array NotebookCellArrayChange `json:"array"` + // Additional opened cell text documents. + DidOpen []TextDocumentItem `json:"didOpen,omitempty"` + // Additional closed cell text documents. + DidClose []TextDocumentIdentifier `json:"didClose,omitempty"` +} + +// Cell changes to a notebook document. +// +// @since 3.18.0 +// @proposed +type NotebookDocumentCellChanges struct { + // Changes to the cell structure to add or + // remove cells. + Structure *NotebookDocumentCellChangeStructure `json:"structure,omitempty"` + // Changes to notebook cells properties like its + // kind, execution summary or metadata. + Data []NotebookCell `json:"data,omitempty"` + // Changes to the text content of notebook cells. + TextContent []NotebookDocumentCellContentChanges `json:"textContent,omitempty"` +} + +// Content changes to a cell in a notebook document. +// +// @since 3.18.0 +// @proposed +type NotebookDocumentCellContentChanges struct { + Document VersionedTextDocumentIdentifier `json:"document"` + Changes []TextDocumentContentChangeEvent `json:"changes"` +} + // A change event for a notebook document. // // @since 3.17.0 @@ -2676,7 +2953,7 @@ type NotebookDocumentChangeEvent struct { // Note: should always be an object literal (e.g. LSPObject) Metadata *LSPObject `json:"metadata,omitempty"` // Changes to cells - Cells *PCellsPChange `json:"cells,omitempty"` + Cells *NotebookDocumentCellChanges `json:"cells,omitempty"` } // Capabilities specific to the notebook document support. @@ -2694,16 +2971,77 @@ type NotebookDocumentClientCapabilities struct { // against the notebook's URI (same as with documents) // // @since 3.17.0 -type NotebookDocumentFilter = Msg_NotebookDocumentFilter // (alias) line 14669 -// A literal to identify a notebook document in the client. +type NotebookDocumentFilter = Or_NotebookDocumentFilter // (alias) +// A notebook document filter where `notebookType` is required field. // -// @since 3.17.0 -type NotebookDocumentIdentifier struct { - // The notebook document's uri. - URI URI `json:"uri"` -} - -// Notebook specific client capabilities. +// @since 3.18.0 +// @proposed +type NotebookDocumentFilterNotebookType struct { + // The type of the enclosing notebook. + NotebookType string `json:"notebookType"` + // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + Scheme string `json:"scheme,omitempty"` + // A glob pattern. + Pattern string `json:"pattern,omitempty"` +} + +// A notebook document filter where `pattern` is required field. +// +// @since 3.18.0 +// @proposed +type NotebookDocumentFilterPattern struct { + // The type of the enclosing notebook. + NotebookType string `json:"notebookType,omitempty"` + // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + Scheme string `json:"scheme,omitempty"` + // A glob pattern. + Pattern string `json:"pattern"` +} + +// A notebook document filter where `scheme` is required field. +// +// @since 3.18.0 +// @proposed +type NotebookDocumentFilterScheme struct { + // The type of the enclosing notebook. + NotebookType string `json:"notebookType,omitempty"` + // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + Scheme string `json:"scheme"` + // A glob pattern. + Pattern string `json:"pattern,omitempty"` +} + +// @since 3.18.0 +// @proposed +type NotebookDocumentFilterWithCells struct { + // The notebook to be synced If a string + // value is provided it matches against the + // notebook type. '*' matches every notebook. + Notebook *Or_NotebookDocumentFilterWithCells_notebook `json:"notebook,omitempty"` + // The cells of the matching notebook to be synced. + Cells []NotebookCellLanguage `json:"cells"` +} + +// @since 3.18.0 +// @proposed +type NotebookDocumentFilterWithNotebook struct { + // The notebook to be synced If a string + // value is provided it matches against the + // notebook type. '*' matches every notebook. + Notebook Or_NotebookDocumentFilterWithNotebook_notebook `json:"notebook"` + // The cells of the matching notebook to be synced. + Cells []NotebookCellLanguage `json:"cells,omitempty"` +} + +// A literal to identify a notebook document in the client. +// +// @since 3.17.0 +type NotebookDocumentIdentifier struct { + // The notebook document's uri. + URI URI `json:"uri"` +} + +// Notebook specific client capabilities. // // @since 3.17.0 type NotebookDocumentSyncClientCapabilities struct { @@ -2731,7 +3069,7 @@ type NotebookDocumentSyncClientCapabilities struct { // @since 3.17.0 type NotebookDocumentSyncOptions struct { // The notebooks to be synced - NotebookSelector []PNotebookSelectorPNotebookDocumentSync `json:"notebookSelector"` + NotebookSelector []Or_NotebookDocumentSyncOptions_notebookSelector_Elem `json:"notebookSelector"` // Whether save notification should be forwarded to // the server. Will only be honored if mode === `notebook`. Save bool `json:"save,omitempty"` @@ -2756,17 +3094,7 @@ type OptionalVersionedTextDocumentIdentifier struct { TextDocumentIdentifier } -// created for Or [FEditRangePItemDefaults Range] -type OrFEditRangePItemDefaults struct { - Value interface{} `json:"value"` -} - -// created for Or [NotebookDocumentFilter string] -type OrFNotebookPNotebookSelector struct { - Value interface{} `json:"value"` -} - -// created for Or [Location PLocationMsg_workspace_symbol] +// created for Or [Location LocationUriOnly] type OrPLocation_workspace_symbol struct { Value interface{} `json:"value"` } @@ -2791,6 +3119,21 @@ type Or_CancelParams_id struct { Value interface{} `json:"value"` } +// created for Or [ClientSemanticTokensRequestFullDelta bool] +type Or_ClientSemanticTokensRequestOptions_full struct { + Value interface{} `json:"value"` +} + +// created for Or [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool] +type Or_ClientSemanticTokensRequestOptions_range struct { + Value interface{} `json:"value"` +} + +// created for Or [EditRangeWithInsertReplace Range] +type Or_CompletionItemDefaults_editRange struct { + Value interface{} `json:"value"` +} + // created for Or [MarkupContent string] type Or_CompletionItem_documentation struct { Value interface{} `json:"value"` @@ -2826,6 +3169,11 @@ type Or_DocumentFilter struct { Value interface{} `json:"value"` } +// created for Or [Pattern RelativePattern] +type Or_GlobPattern struct { + Value interface{} `json:"value"` +} + // created for Or [MarkedString MarkupContent []MarkedString] type Or_Hover_contents struct { Value interface{} `json:"value"` @@ -2846,7 +3194,7 @@ type Or_InlineValue struct { Value interface{} `json:"value"` } -// created for Or [Msg_MarkedString string] +// created for Or [MarkedStringWithLanguage string] type Or_MarkedString struct { Value interface{} `json:"value"` } @@ -2856,47 +3204,47 @@ type Or_NotebookCellTextDocumentFilter_notebook struct { Value interface{} `json:"value"` } -// created for Or [NotebookDocumentFilter string] -type Or_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_notebook struct { +// created for Or [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme] +type Or_NotebookDocumentFilter struct { Value interface{} `json:"value"` } -// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] -type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct { +// created for Or [NotebookDocumentFilter string] +type Or_NotebookDocumentFilterWithCells_notebook struct { Value interface{} `json:"value"` } -// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] -type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct { +// created for Or [NotebookDocumentFilter string] +type Or_NotebookDocumentFilterWithNotebook_notebook struct { Value interface{} `json:"value"` } -// created for Or [URI WorkspaceFolder] -type Or_RelativePattern_baseUri struct { +// created for Or [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook] +type Or_NotebookDocumentSyncOptions_notebookSelector_Elem struct { Value interface{} `json:"value"` } -// created for Or [CodeAction Command] -type Or_Result_textDocument_codeAction_Item0_Elem struct { +// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] +type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct { Value interface{} `json:"value"` } -// created for Or [InlineCompletionList []InlineCompletionItem] -type Or_Result_textDocument_inlineCompletion struct { +// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] +type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct { Value interface{} `json:"value"` } -// created for Or [FFullPRequests bool] -type Or_SemanticTokensClientCapabilities_requests_full struct { +// created for Or [CodeAction Command] +type Or_Result_textDocument_codeAction_Item0_Elem struct { Value interface{} `json:"value"` } -// created for Or [FRangePRequests bool] -type Or_SemanticTokensClientCapabilities_requests_range struct { +// created for Or [InlineCompletionList []InlineCompletionItem] +type Or_Result_textDocument_inlineCompletion struct { Value interface{} `json:"value"` } -// created for Or [PFullESemanticTokensOptions bool] +// created for Or [SemanticTokensFullDelta bool] type Or_SemanticTokensOptions_full struct { Value interface{} `json:"value"` } @@ -3003,372 +3351,81 @@ type Or_ServerCapabilities_notebookDocumentSync struct { // created for Or [ReferenceOptions bool] type Or_ServerCapabilities_referencesProvider struct { - Value interface{} `json:"value"` -} - -// created for Or [RenameOptions bool] -type Or_ServerCapabilities_renameProvider struct { - Value interface{} `json:"value"` -} - -// created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool] -type Or_ServerCapabilities_selectionRangeProvider struct { - Value interface{} `json:"value"` -} - -// created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions] -type Or_ServerCapabilities_semanticTokensProvider struct { - Value interface{} `json:"value"` -} - -// created for Or [TextDocumentSyncKind TextDocumentSyncOptions] -type Or_ServerCapabilities_textDocumentSync struct { - Value interface{} `json:"value"` -} - -// created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool] -type Or_ServerCapabilities_typeDefinitionProvider struct { - Value interface{} `json:"value"` -} - -// created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool] -type Or_ServerCapabilities_typeHierarchyProvider struct { - Value interface{} `json:"value"` -} - -// created for Or [WorkspaceSymbolOptions bool] -type Or_ServerCapabilities_workspaceSymbolProvider struct { - Value interface{} `json:"value"` -} - -// created for Or [MarkupContent string] -type Or_SignatureInformation_documentation struct { - Value interface{} `json:"value"` -} - -// created for Or [AnnotatedTextEdit TextEdit] -type Or_TextDocumentEdit_edits_Elem struct { - Value interface{} `json:"value"` -} - -// created for Or [SaveOptions bool] -type Or_TextDocumentSyncOptions_save struct { - Value interface{} `json:"value"` -} - -// created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport] -type Or_WorkspaceDocumentDiagnosticReport struct { - Value interface{} `json:"value"` -} - -// created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit] -type Or_WorkspaceEdit_documentChanges_Elem struct { - Value interface{} `json:"value"` -} - -// created for Or [Declaration []DeclarationLink] -type Or_textDocument_declaration struct { - Value interface{} `json:"value"` -} - -// created for Literal (Lit_NotebookDocumentChangeEvent_cells) -type PCellsPChange struct { - // Changes to the cell structure to add or - // remove cells. - Structure *FStructurePCells `json:"structure,omitempty"` - // Changes to notebook cells properties like its - // kind, execution summary or metadata. - Data []NotebookCell `json:"data,omitempty"` - // Changes to the text content of notebook cells. - TextContent []Lit_NotebookDocumentChangeEvent_cells_textContent_Elem `json:"textContent,omitempty"` -} - -// created for Literal (Lit_WorkspaceEditClientCapabilities_changeAnnotationSupport) -type PChangeAnnotationSupportPWorkspaceEdit struct { - // Whether the client groups edits with equal labels into tree nodes, - // for instance all edits labelled with "Changes in Strings" would - // be a tree node. - GroupsOnLabel bool `json:"groupsOnLabel,omitempty"` -} - -// created for Literal (Lit_CodeActionClientCapabilities_codeActionLiteralSupport) -type PCodeActionLiteralSupportPCodeAction struct { - // The code action kind is support with the following value - // set. - CodeActionKind FCodeActionKindPCodeActionLiteralSupport `json:"codeActionKind"` -} - -// created for Literal (Lit_CompletionClientCapabilities_completionItemKind) -type PCompletionItemKindPCompletion struct { - // The completion item kind values the client supports. When this - // property exists the client also guarantees that it will - // handle values outside its set gracefully and falls back - // to a default value when unknown. - // - // If this property is not present the client only supports - // the completion items kinds from `Text` to `Reference` as defined in - // the initial version of the protocol. - ValueSet []CompletionItemKind `json:"valueSet,omitempty"` -} - -// created for Literal (Lit_CompletionClientCapabilities_completionItem) -type PCompletionItemPCompletion struct { - // Client supports snippets as insert text. - // - // A snippet can define tab stops and placeholders with `$1`, `$2` - // and `${3:foo}`. `$0` defines the final tab stop, it defaults to - // the end of the snippet. Placeholders with equal identifiers are linked, - // that is typing in one will update others too. - SnippetSupport bool `json:"snippetSupport,omitempty"` - // Client supports commit characters on a completion item. - CommitCharactersSupport bool `json:"commitCharactersSupport,omitempty"` - // Client supports the following content formats for the documentation - // property. The order describes the preferred format of the client. - DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"` - // Client supports the deprecated property on a completion item. - DeprecatedSupport bool `json:"deprecatedSupport,omitempty"` - // Client supports the preselect property on a completion item. - PreselectSupport bool `json:"preselectSupport,omitempty"` - // Client supports the tag property on a completion item. Clients supporting - // tags have to handle unknown tags gracefully. Clients especially need to - // preserve unknown tags when sending a completion item back to the server in - // a resolve call. - // - // @since 3.15.0 - TagSupport FTagSupportPCompletionItem `json:"tagSupport"` - // Client support insert replace edit to control different behavior if a - // completion item is inserted in the text or should replace text. - // - // @since 3.16.0 - InsertReplaceSupport bool `json:"insertReplaceSupport,omitempty"` - // Indicates which properties a client can resolve lazily on a completion - // item. Before version 3.16.0 only the predefined properties `documentation` - // and `details` could be resolved lazily. - // - // @since 3.16.0 - ResolveSupport *FResolveSupportPCompletionItem `json:"resolveSupport,omitempty"` - // The client supports the `insertTextMode` property on - // a completion item to override the whitespace handling mode - // as defined by the client (see `insertTextMode`). - // - // @since 3.16.0 - InsertTextModeSupport *FInsertTextModeSupportPCompletionItem `json:"insertTextModeSupport,omitempty"` - // The client has support for completion item label - // details (see also `CompletionItemLabelDetails`). - // - // @since 3.17.0 - LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"` -} - -// created for Literal (Lit_CompletionOptions_completionItem) -type PCompletionItemPCompletionProvider struct { - // The server has support for completion item label - // details (see also `CompletionItemLabelDetails`) when - // receiving a completion item in a resolve call. - // - // @since 3.17.0 - LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"` -} - -// created for Literal (Lit_CompletionClientCapabilities_completionList) -type PCompletionListPCompletion struct { - // The client supports the following itemDefaults on - // a completion list. - // - // The value lists the supported property names of the - // `CompletionList.itemDefaults` object. If omitted - // no properties are supported. - // - // @since 3.17.0 - ItemDefaults []string `json:"itemDefaults,omitempty"` -} - -// created for Literal (Lit_CodeAction_disabled) -type PDisabledMsg_textDocument_codeAction struct { - // Human readable description of why the code action is currently disabled. - // - // This is displayed in the code actions UI. - Reason string `json:"reason"` -} - -// created for Literal (Lit_FoldingRangeClientCapabilities_foldingRangeKind) -type PFoldingRangeKindPFoldingRange struct { - // The folding range kind values the client supports. When this - // property exists the client also guarantees that it will - // handle values outside its set gracefully and falls back - // to a default value when unknown. - ValueSet []FoldingRangeKind `json:"valueSet,omitempty"` -} - -// created for Literal (Lit_FoldingRangeClientCapabilities_foldingRange) -type PFoldingRangePFoldingRange struct { - // If set, the client signals that it supports setting collapsedText on - // folding ranges to display custom labels instead of the default text. - // - // @since 3.17.0 - CollapsedText bool `json:"collapsedText,omitempty"` -} - -// created for Literal (Lit_SemanticTokensOptions_full_Item1) -type PFullESemanticTokensOptions struct { - // The server supports deltas for full documents. - Delta bool `json:"delta"` -} - -// created for Literal (Lit_CompletionList_itemDefaults) -type PItemDefaultsMsg_textDocument_completion struct { - // A default commit character set. - // - // @since 3.17.0 - CommitCharacters []string `json:"commitCharacters,omitempty"` - // A default edit range. - // - // @since 3.17.0 - EditRange *OrFEditRangePItemDefaults `json:"editRange,omitempty"` - // A default insert text format. - // - // @since 3.17.0 - InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"` - // A default insert text mode. - // - // @since 3.17.0 - InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"` - // A default data value. - // - // @since 3.17.0 - Data interface{} `json:"data,omitempty"` -} - -// created for Literal (Lit_WorkspaceSymbol_location_Item1) -type PLocationMsg_workspace_symbol struct { - URI DocumentURI `json:"uri"` + Value interface{} `json:"value"` } -// created for Literal (Lit_ShowMessageRequestClientCapabilities_messageActionItem) -type PMessageActionItemPShowMessage struct { - // Whether the client supports additional attributes which - // are preserved and send back to the server in the - // request's response. - AdditionalPropertiesSupport bool `json:"additionalPropertiesSupport,omitempty"` +// created for Or [RenameOptions bool] +type Or_ServerCapabilities_renameProvider struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0) -type PNotebookSelectorPNotebookDocumentSync struct { - // The notebook to be synced If a string - // value is provided it matches against the - // notebook type. '*' matches every notebook. - Notebook OrFNotebookPNotebookSelector `json:"notebook"` - // The cells of the matching notebook to be synced. - Cells []Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0_cells_Elem `json:"cells,omitempty"` +// created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool] +type Or_ServerCapabilities_selectionRangeProvider struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_SemanticTokensOptions_range_Item1) -type PRangeESemanticTokensOptions struct { +// created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions] +type Or_ServerCapabilities_semanticTokensProvider struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_SemanticTokensClientCapabilities_requests) -type PRequestsPSemanticTokens struct { - // The client will send the `textDocument/semanticTokens/range` request if - // the server provides a corresponding handler. - Range Or_SemanticTokensClientCapabilities_requests_range `json:"range"` - // The client will send the `textDocument/semanticTokens/full` request if - // the server provides a corresponding handler. - Full Or_SemanticTokensClientCapabilities_requests_full `json:"full"` +// created for Or [TextDocumentSyncKind TextDocumentSyncOptions] +type Or_ServerCapabilities_textDocumentSync struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_CodeActionClientCapabilities_resolveSupport) -type PResolveSupportPCodeAction struct { - // The properties that a client can resolve lazily. - Properties []string `json:"properties"` +// created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool] +type Or_ServerCapabilities_typeDefinitionProvider struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_InlayHintClientCapabilities_resolveSupport) -type PResolveSupportPInlayHint struct { - // The properties that a client can resolve lazily. - Properties []string `json:"properties"` +// created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool] +type Or_ServerCapabilities_typeHierarchyProvider struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_WorkspaceSymbolClientCapabilities_resolveSupport) -type PResolveSupportPSymbol struct { - // The properties that a client can resolve lazily. Usually - // `location.range` - Properties []string `json:"properties"` +// created for Or [WorkspaceSymbolOptions bool] +type Or_ServerCapabilities_workspaceSymbolProvider struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_InitializeResult_serverInfo) -type PServerInfoMsg_initialize struct { - // The name of the server as defined by the server. - Name string `json:"name"` - // The server's version as defined by the server. - Version string `json:"version,omitempty"` +// created for Or [MarkupContent string] +type Or_SignatureInformation_documentation struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_SignatureHelpClientCapabilities_signatureInformation) -type PSignatureInformationPSignatureHelp struct { - // Client supports the following content formats for the documentation - // property. The order describes the preferred format of the client. - DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"` - // Client capabilities specific to parameter information. - ParameterInformation *FParameterInformationPSignatureInformation `json:"parameterInformation,omitempty"` - // The client supports the `activeParameter` property on `SignatureInformation` - // literal. - // - // @since 3.16.0 - ActiveParameterSupport bool `json:"activeParameterSupport,omitempty"` +// created for Or [AnnotatedTextEdit TextEdit] +type Or_TextDocumentEdit_edits_Elem struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_GeneralClientCapabilities_staleRequestSupport) -type PStaleRequestSupportPGeneral struct { - // The client will actively cancel the request. - Cancel bool `json:"cancel"` - // The list of requests for which the client - // will retry the request if it receives a - // response with error code `ContentModified` - RetryOnContentModified []string `json:"retryOnContentModified"` +// created for Or [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme] +type Or_TextDocumentFilter struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_DocumentSymbolClientCapabilities_symbolKind) -type PSymbolKindPDocumentSymbol struct { - // The symbol kind values the client supports. When this - // property exists the client also guarantees that it will - // handle values outside its set gracefully and falls back - // to a default value when unknown. - // - // If this property is not present the client only supports - // the symbol kinds from `File` to `Array` as defined in - // the initial version of the protocol. - ValueSet []SymbolKind `json:"valueSet,omitempty"` +// created for Or [SaveOptions bool] +type Or_TextDocumentSyncOptions_save struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_WorkspaceSymbolClientCapabilities_symbolKind) -type PSymbolKindPSymbol struct { - // The symbol kind values the client supports. When this - // property exists the client also guarantees that it will - // handle values outside its set gracefully and falls back - // to a default value when unknown. - // - // If this property is not present the client only supports - // the symbol kinds from `File` to `Array` as defined in - // the initial version of the protocol. - ValueSet []SymbolKind `json:"valueSet,omitempty"` +// created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport] +type Or_WorkspaceDocumentDiagnosticReport struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_DocumentSymbolClientCapabilities_tagSupport) -type PTagSupportPDocumentSymbol struct { - // The tags supported by the client. - ValueSet []SymbolTag `json:"valueSet"` +// created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit] +type Or_WorkspaceEdit_documentChanges_Elem struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_PublishDiagnosticsClientCapabilities_tagSupport) -type PTagSupportPPublishDiagnostics struct { - // The tags supported by the client. - ValueSet []DiagnosticTag `json:"valueSet"` +// created for Or [Declaration []DeclarationLink] +type Or_textDocument_declaration struct { + Value interface{} `json:"value"` } -// created for Literal (Lit_WorkspaceSymbolClientCapabilities_tagSupport) -type PTagSupportPSymbol struct { - // The tags supported by the client. - ValueSet []SymbolTag `json:"valueSet"` +// created for Literal (Lit_SemanticTokensOptions_range_Item1) +type PRangeESemanticTokensOptions struct { } // The parameters of a configuration request. @@ -3389,6 +3446,10 @@ type ParameterInformation struct { // signature label. (see SignatureInformation.label). The offsets are based on a UTF-16 // string representation as `Position` and `Range` does. // + // To avoid ambiguities a server should use the [start, end] offset value instead of using + // a substring. Whether a client support this is controlled via `labelOffsetSupport` client + // capability. + // // *Note*: a label of type string should be a substring of its containing signature label. // Its intended use case is to highlight the parameter label part in the `SignatureInformation.label`. Label string `json:"label"` @@ -3412,7 +3473,7 @@ type PartialResultParams struct { // - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`) // // @since 3.17.0 -type Pattern = string // (alias) line 14778 +type Pattern = string // (alias) // Position in a text document expressed as zero-based line and character // offset. Prior to 3.17 the offsets were always based on a UTF-16 string // representation. So a string of the form `a𐐀b` the character offset of the @@ -3420,14 +3481,14 @@ type Pattern = string // (alias) line 14778 // offset of b is 3 since `𐐀` is represented using two code units in UTF-16. // Since 3.17 clients and servers can agree on a different string encoding // representation (e.g. UTF-8). The client announces it's supported encoding -// via the client capability [`general.positionEncodings`](#clientCapabilities). +// via the client capability [`general.positionEncodings`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#clientCapabilities). // The value is an array of position encodings the client supports, with // decreasing preference (e.g. the encoding at index `0` is the most preferred // one). To stay backwards compatible the only mandatory encoding is UTF-16 // represented via the string `utf-16`. The server can pick one of the // encodings offered by the client and signals that encoding back to the // client via the initialize result's property -// [`capabilities.positionEncoding`](#serverCapabilities). If the string value +// [`capabilities.positionEncoding`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#serverCapabilities). If the string value // `utf-16` is missing from the client's capability `general.positionEncodings` // servers can safely assume that the client supports UTF-16. If the server // omits the position encoding in its initialize result the encoding defaults @@ -3460,12 +3521,24 @@ type Position struct { // // @since 3.17.0 type PositionEncodingKind string -type PrepareRename2Gn = Msg_PrepareRename2Gn // (alias) line 13927 + +// @since 3.18.0 +// @proposed +type PrepareRenameDefaultBehavior struct { + DefaultBehavior bool `json:"defaultBehavior"` +} type PrepareRenameParams struct { TextDocumentPositionParams WorkDoneProgressParams } -type PrepareRenameResult = Msg_PrepareRename2Gn // (alias) line 13927 + +// @since 3.18.0 +// @proposed +type PrepareRenamePlaceholder struct { + Range Range `json:"range"` + Placeholder string `json:"placeholder"` +} +type PrepareRenameResult = PrepareRenamePlaceholder // (alias) type PrepareSupportDefaultBehavior uint32 // A previous result id in a workspace pull request. @@ -3495,7 +3568,7 @@ type ProgressParams struct { // The progress data. Value interface{} `json:"value"` } -type ProgressToken = interface{} // (alias) line 14375 +type ProgressToken = interface{} // (alias) // The publish diagnostic client capabilities. type PublishDiagnosticsClientCapabilities struct { // Whether the clients accepts diagnostics with related information. @@ -3504,7 +3577,7 @@ type PublishDiagnosticsClientCapabilities struct { // Clients supporting tags have to handle unknown tags gracefully. // // @since 3.15.0 - TagSupport *PTagSupportPPublishDiagnostics `json:"tagSupport,omitempty"` + TagSupport *ClientDiagnosticsTagOptions `json:"tagSupport,omitempty"` // Whether the client interprets the version property of the // `textDocument/publishDiagnostics` notification's parameter. // @@ -3599,13 +3672,13 @@ type Registration struct { type RegistrationParams struct { Registrations []Registration `json:"registrations"` } - +type RegularExpressionEngineKind = string // (alias) // Client capabilities specific to regular expressions. // // @since 3.16.0 type RegularExpressionsClientCapabilities struct { // The engine's name. - Engine string `json:"engine"` + Engine RegularExpressionEngineKind `json:"engine"` // The engine's version. Version string `json:"version,omitempty"` } @@ -3648,7 +3721,7 @@ type RelatedUnchangedDocumentDiagnosticReport struct { type RelativePattern struct { // A workspace folder or a base URI to which this pattern will be matched // against relatively. - BaseURI Or_RelativePattern_baseUri `json:"baseUri"` + BaseURI DocumentURI `json:"baseUri"` // The actual glob pattern; Pattern Pattern `json:"pattern"` } @@ -3836,7 +3909,7 @@ type SemanticTokensClientCapabilities struct { // `request.range` are both set to true but the server only provides a // range provider the client might not render a minimap correctly or might // even decide to not show any semantic tokens at all. - Requests PRequestsPSemanticTokens `json:"requests"` + Requests ClientSemanticTokensRequestOptions `json:"requests"` // The token types that the client supports. TokenTypes []string `json:"tokenTypes"` // The token modifiers that the client supports. @@ -3900,6 +3973,15 @@ type SemanticTokensEdit struct { Data []uint32 `json:"data,omitempty"` } +// Semantic tokens options to support deltas for full documents +// +// @since 3.18.0 +// @proposed +type SemanticTokensFullDelta struct { + // The server supports deltas for full documents. + Delta bool `json:"delta,omitempty"` +} + // @since 3.16.0 type SemanticTokensLegend struct { // The token types a server uses. @@ -4069,12 +4151,35 @@ type ServerCapabilities struct { // @proposed InlineCompletionProvider *Or_ServerCapabilities_inlineCompletionProvider `json:"inlineCompletionProvider,omitempty"` // Workspace specific server capabilities. - Workspace *Workspace6Gn `json:"workspace,omitempty"` + Workspace *WorkspaceOptions `json:"workspace,omitempty"` // Experimental server capabilities. Experimental interface{} `json:"experimental,omitempty"` } + +// @since 3.18.0 +// @proposed +type ServerCompletionItemOptions struct { + // The server has support for completion item label + // details (see also `CompletionItemLabelDetails`) when + // receiving a completion item in a resolve call. + // + // @since 3.17.0 + LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"` +} + +// Information about the server +// +// @since 3.15.0 +// @since 3.18.0 ServerInfo type name added. +// @proposed +type ServerInfo struct { + // The name of the server as defined by the server. + Name string `json:"name"` + // The server's version as defined by the server. + Version string `json:"version,omitempty"` +} type SetTraceParams struct { - Value TraceValues `json:"value"` + Value TraceValue `json:"value"` } // Client capabilities for the showDocument request. @@ -4127,7 +4232,7 @@ type ShowMessageParams struct { // Show message request client capabilities type ShowMessageRequestClientCapabilities struct { // Capabilities specific to the `MessageActionItem` type. - MessageActionItem *PMessageActionItemPShowMessage `json:"messageActionItem,omitempty"` + MessageActionItem *ClientShowMessageActionItemOptions `json:"messageActionItem,omitempty"` } type ShowMessageRequestParams struct { // The message type. See {@link MessageType} @@ -4154,13 +4259,22 @@ type SignatureHelp struct { // In future version of the protocol this property might become // mandatory to better express this. ActiveSignature uint32 `json:"activeSignature,omitempty"` - // The active parameter of the active signature. If omitted or the value - // lies outside the range of `signatures[activeSignature].parameters` - // defaults to 0 if the active signature has parameters. If - // the active signature has no parameters it is ignored. + // The active parameter of the active signature. + // + // If `null`, no parameter of the signature is active (for example a named + // argument that does not match any declared parameters). This is only valid + // if the client specifies the client capability + // `textDocument.signatureHelp.noActiveParameterSupport === true` + // + // If omitted or the value lies outside the range of + // `signatures[activeSignature].parameters` defaults to 0 if the active + // signature has parameters. + // + // If the active signature has no parameters it is ignored. + // // In future version of the protocol this property might become - // mandatory to better express the active parameter if the - // active signature does have any. + // mandatory (but still nullable) to better express the active parameter if + // the active signature does have any. ActiveParameter uint32 `json:"activeParameter,omitempty"` } @@ -4170,7 +4284,7 @@ type SignatureHelpClientCapabilities struct { DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client supports the following `SignatureInformation` // specific properties. - SignatureInformation *PSignatureInformationPSignatureHelp `json:"signatureInformation,omitempty"` + SignatureInformation *ClientSignatureInformationOptions `json:"signatureInformation,omitempty"` // The client supports to send additional context information for a // `textDocument/signatureHelp` request. A client that opts into // contextSupport will also support the `retriggerCharacters` on @@ -4252,12 +4366,29 @@ type SignatureInformation struct { Parameters []ParameterInformation `json:"parameters,omitempty"` // The index of the active parameter. // - // If provided, this is used in place of `SignatureHelp.activeParameter`. + // If `null`, no parameter of the signature is active (for example a named + // argument that does not match any declared parameters). This is only valid + // if the client specifies the client capability + // `textDocument.signatureHelp.noActiveParameterSupport === true` + // + // If provided (or `null`), this is used in place of + // `SignatureHelp.activeParameter`. // // @since 3.16.0 ActiveParameter uint32 `json:"activeParameter,omitempty"` } +// @since 3.18.0 +// @proposed +type StaleRequestSupportOptions struct { + // The client will actively cancel the request. + Cancel bool `json:"cancel"` + // The list of requests for which the client + // will retry the request if it receives a + // response with error code `ContentModified` + RetryOnContentModified []string `json:"retryOnContentModified"` +} + // Static registration options to be returned in the initialize // request. type StaticRegistrationOptions struct { @@ -4431,7 +4562,27 @@ type TextDocumentClientCapabilities struct { // An event describing a change to a text document. If only a text is provided // it is considered to be the full content of the document. -type TextDocumentContentChangeEvent = Msg_TextDocumentContentChangeEvent // (alias) line 14417 +type TextDocumentContentChangeEvent = TextDocumentContentChangePartial // (alias) +// @since 3.18.0 +// @proposed +type TextDocumentContentChangePartial struct { + // The range of the document that changed. + Range *Range `json:"range,omitempty"` + // The optional length of the range that got replaced. + // + // @deprecated use range instead. + RangeLength uint32 `json:"rangeLength,omitempty"` + // The new text for the provided range. + Text string `json:"text"` +} + +// @since 3.18.0 +// @proposed +type TextDocumentContentChangeWholeDocument struct { + // The new text of the whole document. + Text string `json:"text"` +} + // Describes textual changes on a text document. A TextDocumentEdit describes all changes // on a document version Si and after they are applied move the document to version Si+1. // So the creator of a TextDocumentEdit doesn't need to sort the array of edits or do any @@ -4443,7 +4594,7 @@ type TextDocumentEdit struct { // // @since 3.16.0 - support for AnnotatedTextEdit. This is guarded using a // client capability. - Edits []TextEdit `json:"edits"` + Edits []Or_TextDocumentEdit_edits_Elem `json:"edits"` } // A document filter denotes a document by different properties like @@ -4463,7 +4614,46 @@ type TextDocumentEdit struct { // @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }` // // @since 3.17.0 -type TextDocumentFilter = Msg_TextDocumentFilter // (alias) line 14560 +type TextDocumentFilter = Or_TextDocumentFilter // (alias) +// A document filter where `language` is required field. +// +// @since 3.18.0 +// @proposed +type TextDocumentFilterLanguage struct { + // A language id, like `typescript`. + Language string `json:"language"` + // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + Scheme string `json:"scheme,omitempty"` + // A glob pattern, like **​/*.{ts,js}. See TextDocumentFilter for examples. + Pattern string `json:"pattern,omitempty"` +} + +// A document filter where `pattern` is required field. +// +// @since 3.18.0 +// @proposed +type TextDocumentFilterPattern struct { + // A language id, like `typescript`. + Language string `json:"language,omitempty"` + // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + Scheme string `json:"scheme,omitempty"` + // A glob pattern, like **​/*.{ts,js}. See TextDocumentFilter for examples. + Pattern string `json:"pattern"` +} + +// A document filter where `scheme` is required field. +// +// @since 3.18.0 +// @proposed +type TextDocumentFilterScheme struct { + // A language id, like `typescript`. + Language string `json:"language,omitempty"` + // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + Scheme string `json:"scheme"` + // A glob pattern, like **​/*.{ts,js}. See TextDocumentFilter for examples. + Pattern string `json:"pattern,omitempty"` +} + // A literal to identify a text document in the client. type TextDocumentIdentifier struct { // The text document's uri. @@ -4476,7 +4666,7 @@ type TextDocumentItem struct { // The text document's uri. URI DocumentURI `json:"uri"` // The text document's language identifier. - LanguageID string `json:"languageId"` + LanguageID LanguageKind `json:"languageId"` // The version number of this document (it will increase after each // change, including undo/redo). Version int32 `json:"version"` @@ -4552,7 +4742,7 @@ type TextEdit struct { NewText string `json:"newText"` } type TokenFormat string -type TraceValues string +type TraceValue string // Since 3.6.0 type TypeDefinitionClientCapabilities struct { @@ -4660,7 +4850,6 @@ type UIntCommaUInt struct { Fld0 uint32 `json:"fld0"` Fld1 uint32 `json:"fld1"` } -type URI = string // A diagnostic report indicating that the last returned // report is still accurate. @@ -4710,7 +4899,7 @@ type VersionedTextDocumentIdentifier struct { Version int32 `json:"version"` TextDocumentIdentifier } -type WatchKind = uint32 // line 13505// The parameters sent in a will save text document notification. +type WatchKind = uint32 // The parameters sent in a will save text document notification. type WillSaveTextDocumentParams struct { // The document that will be saved. TextDocument TextDocumentIdentifier `json:"textDocument"` @@ -4811,18 +5000,6 @@ type WorkDoneProgressReport struct { Percentage uint32 `json:"percentage,omitempty"` } -// created for Literal (Lit_ServerCapabilities_workspace) -type Workspace6Gn struct { - // The server supports workspace folder. - // - // @since 3.6.0 - WorkspaceFolders *WorkspaceFolders5Gn `json:"workspaceFolders,omitempty"` - // The server is interested in notifications/requests for operations on files. - // - // @since 3.16.0 - FileOperations *FileOperationOptions `json:"fileOperations,omitempty"` -} - // Workspace specific client capabilities. type WorkspaceClientCapabilities struct { // The client supports applying batch edits @@ -4876,6 +5053,11 @@ type WorkspaceClientCapabilities struct { // // @since 3.17.0. Diagnostics *DiagnosticWorkspaceClientCapabilities `json:"diagnostics,omitempty"` + // Capabilities specific to the folding range requests scoped to the workspace. + // + // @since 3.18.0 + // @proposed + FoldingRange *FoldingRangeWorkspaceClientCapabilities `json:"foldingRange,omitempty"` } // Parameters of the workspace diagnostic request. @@ -4908,7 +5090,7 @@ type WorkspaceDiagnosticReportPartialResult struct { // A workspace diagnostic document report. // // @since 3.17.0 -type WorkspaceDocumentDiagnosticReport = Or_WorkspaceDocumentDiagnosticReport // (alias) line 14399 +type WorkspaceDocumentDiagnosticReport = Or_WorkspaceDocumentDiagnosticReport // (alias) // A workspace edit represents changes to many resources managed in the workspace. The edit // should either provide `changes` or `documentChanges`. If documentChanges are present // they are preferred over `changes` if the client can handle versioned document edits. @@ -4968,7 +5150,7 @@ type WorkspaceEditClientCapabilities struct { // create file, rename file and delete file changes. // // @since 3.16.0 - ChangeAnnotationSupport *PChangeAnnotationSupportPWorkspaceEdit `json:"changeAnnotationSupport,omitempty"` + ChangeAnnotationSupport *ChangeAnnotationsSupportOptions `json:"changeAnnotationSupport,omitempty"` } // A workspace folder inside a client. @@ -5034,6 +5216,21 @@ type WorkspaceFullDocumentDiagnosticReport struct { FullDocumentDiagnosticReport } +// Defines workspace specific capabilities of the server. +// +// @since 3.18.0 +// @proposed +type WorkspaceOptions struct { + // The server supports workspace folder. + // + // @since 3.6.0 + WorkspaceFolders *WorkspaceFolders5Gn `json:"workspaceFolders,omitempty"` + // The server is interested in notifications/requests for operations on files. + // + // @since 3.16.0 + FileOperations *FileOperationOptions `json:"fileOperations,omitempty"` +} + // A special workspace symbol that supports locations without a range. // // See also SymbolInformation. @@ -5057,18 +5254,18 @@ type WorkspaceSymbolClientCapabilities struct { // Symbol request supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Specific capabilities for the `SymbolKind` in the `workspace/symbol` request. - SymbolKind *PSymbolKindPSymbol `json:"symbolKind,omitempty"` + SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"` // The client supports tags on `SymbolInformation`. // Clients supporting tags have to handle unknown tags gracefully. // // @since 3.16.0 - TagSupport *PTagSupportPSymbol `json:"tagSupport,omitempty"` + TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"` // The client support partial workspace symbols. The client will send the // request `workspaceSymbol/resolve` to the server to resolve additional // properties. // // @since 3.17.0 - ResolveSupport *PResolveSupportPSymbol `json:"resolveSupport,omitempty"` + ResolveSupport *ClientSymbolResolveOptions `json:"resolveSupport,omitempty"` } // Server capabilities for a {@link WorkspaceSymbolRequest}. @@ -5118,7 +5315,7 @@ type XInitializeParams struct { // Information about the client // // @since 3.15.0 - ClientInfo *Msg_XInitializeParams_clientInfo `json:"clientInfo,omitempty"` + ClientInfo *ClientInfo `json:"clientInfo,omitempty"` // The locale the client is currently showing the user interface // in. This must not necessarily be the locale of the operating // system. @@ -5144,7 +5341,7 @@ type XInitializeParams struct { // User provided initialization options. InitializationOptions interface{} `json:"initializationOptions,omitempty"` // The initial trace setting. If omitted trace is disabled ('off'). - Trace *TraceValues `json:"trace,omitempty"` + Trace *TraceValue `json:"trace,omitempty"` WorkDoneProgressParams } @@ -5159,7 +5356,7 @@ type _InitializeParams struct { // Information about the client // // @since 3.15.0 - ClientInfo *Msg_XInitializeParams_clientInfo `json:"clientInfo,omitempty"` + ClientInfo *ClientInfo `json:"clientInfo,omitempty"` // The locale the client is currently showing the user interface // in. This must not necessarily be the locale of the operating // system. @@ -5185,7 +5382,7 @@ type _InitializeParams struct { // User provided initialization options. InitializationOptions interface{} `json:"initializationOptions,omitempty"` // The initial trace setting. If omitted trace is disabled ('off'). - Trace *TraceValues `json:"trace,omitempty"` + Trace *TraceValue `json:"trace,omitempty"` WorkDoneProgressParams } @@ -5218,6 +5415,19 @@ const ( // - Inline constant // - ... RefactorInline CodeActionKind = "refactor.inline" + // Base kind for refactoring move actions: `refactor.move` + // + // Example move actions: + // + // + // - Move a function to a new file + // - Move a property between classes + // - Move method to base class + // - ... + // + // @since 3.18.0 + // @proposed + RefactorMove CodeActionKind = "refactor.move" // Base kind for refactoring rewrite actions: 'refactor.rewrite' // // Example rewrite actions: @@ -5243,6 +5453,11 @@ const ( // // @since 3.15.0 SourceFixAll CodeActionKind = "source.fixAll" + // Base kind for all code actions applying to the entire notebook's scope. CodeActionKinds using + // this should always begin with `notebook.` + // + // @since 3.18.0 + Notebook CodeActionKind = "notebook" // The reason why code actions were requested. // // @since 3.17.0 @@ -5388,9 +5603,9 @@ const ( // @since 3.18.0 // @proposed // Completion was triggered explicitly by a user gesture. - InlineInvoked InlineCompletionTriggerKind = 0 + InlineInvoked InlineCompletionTriggerKind = 1 // Completion was triggered automatically while editing. - InlineAutomatic InlineCompletionTriggerKind = 1 + InlineAutomatic InlineCompletionTriggerKind = 2 // Defines whether the insert text in a completion item should be interpreted as // plain text or a snippet. // The primary text to be inserted is treated as a plain string. @@ -5447,6 +5662,75 @@ const ( // The client has canceled a request and a server as detected // the cancel. RequestCancelled LSPErrorCodes = -32800 + // Predefined Language kinds + // @since 3.18.0 + // @proposed + LangABAP LanguageKind = "abap" + LangWindowsBat LanguageKind = "bat" + LangBibTeX LanguageKind = "bibtex" + LangClojure LanguageKind = "clojure" + LangCoffeescript LanguageKind = "coffeescript" + LangC LanguageKind = "c" + LangCPP LanguageKind = "cpp" + LangCSharp LanguageKind = "csharp" + LangCSS LanguageKind = "css" + // @since 3.18.0 + // @proposed + LangD LanguageKind = "d" + // @since 3.18.0 + // @proposed + LangDelphi LanguageKind = "pascal" + LangDiff LanguageKind = "diff" + LangDart LanguageKind = "dart" + LangDockerfile LanguageKind = "dockerfile" + LangElixir LanguageKind = "elixir" + LangErlang LanguageKind = "erlang" + LangFSharp LanguageKind = "fsharp" + LangGitCommit LanguageKind = "git-commit" + LangGitRebase LanguageKind = "rebase" + LangGo LanguageKind = "go" + LangGroovy LanguageKind = "groovy" + LangHandlebars LanguageKind = "handlebars" + LangHTML LanguageKind = "html" + LangIni LanguageKind = "ini" + LangJava LanguageKind = "java" + LangJavaScript LanguageKind = "javascript" + LangJavaScriptReact LanguageKind = "javascriptreact" + LangJSON LanguageKind = "json" + LangLaTeX LanguageKind = "latex" + LangLess LanguageKind = "less" + LangLua LanguageKind = "lua" + LangMakefile LanguageKind = "makefile" + LangMarkdown LanguageKind = "markdown" + LangObjectiveC LanguageKind = "objective-c" + LangObjectiveCPP LanguageKind = "objective-cpp" + // @since 3.18.0 + // @proposed + LangPascal LanguageKind = "pascal" + LangPerl LanguageKind = "perl" + LangPerl6 LanguageKind = "perl6" + LangPHP LanguageKind = "php" + LangPowershell LanguageKind = "powershell" + LangPug LanguageKind = "jade" + LangPython LanguageKind = "python" + LangR LanguageKind = "r" + LangRazor LanguageKind = "razor" + LangRuby LanguageKind = "ruby" + LangRust LanguageKind = "rust" + LangSCSS LanguageKind = "scss" + LangSASS LanguageKind = "sass" + LangScala LanguageKind = "scala" + LangShaderLab LanguageKind = "shaderlab" + LangShellScript LanguageKind = "shellscript" + LangSQL LanguageKind = "sql" + LangSwift LanguageKind = "swift" + LangTypeScript LanguageKind = "typescript" + LangTypeScriptReact LanguageKind = "typescriptreact" + LangTeX LanguageKind = "tex" + LangVisualBasic LanguageKind = "vb" + LangXML LanguageKind = "xml" + LangXSL LanguageKind = "xsl" + LangYAML LanguageKind = "yaml" // Describes the content type that a client supports in various // result literals like `Hover`, `ParameterInfo` or `CompletionItem`. // @@ -5465,6 +5749,11 @@ const ( Info MessageType = 3 // A log message. Log MessageType = 4 + // A debug message. + // + // @since 3.18.0 + // @proposed + Debug MessageType = 5 // The moniker kind. // // @since 3.16.0 @@ -5615,11 +5904,11 @@ const ( Incremental TextDocumentSyncKind = 2 Relative TokenFormat = "relative" // Turn tracing off. - Off TraceValues = "off" + Off TraceValue = "off" // Trace messages only. - Messages TraceValues = "messages" + Messages TraceValue = "messages" // Verbose message tracing. - Verbose TraceValues = "verbose" + Verbose TraceValue = "verbose" // Moniker uniqueness level to define scope of the moniker. // // @since 3.16.0 diff --git a/gopls/internal/lsp/protocol/tsserver.go b/gopls/internal/protocol/tsserver.go similarity index 90% rename from gopls/internal/lsp/protocol/tsserver.go rename to gopls/internal/protocol/tsserver.go index 327cbddce30..5ebd19b3d86 100644 --- a/gopls/internal/lsp/protocol/tsserver.go +++ b/gopls/internal/protocol/tsserver.go @@ -6,13 +6,12 @@ package protocol -// Code generated from protocol/metaModel.json at ref release/protocol/3.17.4-next.2 (hash 184c8a7f010d335582f24337fe182baa6f2fccdd). -// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.4-next.2/protocol/metaModel.json +// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.2 (hash 654dc9be6673c61476c28fda604406279c3258d7). +// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.2/protocol/metaModel.json // LSP metaData.version = 3.17.0. import ( "context" - "encoding/json" "golang.org/x/tools/internal/jsonrpc2" ) @@ -61,7 +60,7 @@ type Server interface { Moniker(context.Context, *MonikerParams) ([]Moniker, error) // textDocument/moniker OnTypeFormatting(context.Context, *DocumentOnTypeFormattingParams) ([]TextEdit, error) // textDocument/onTypeFormatting PrepareCallHierarchy(context.Context, *CallHierarchyPrepareParams) ([]CallHierarchyItem, error) // textDocument/prepareCallHierarchy - PrepareRename(context.Context, *PrepareRenameParams) (*PrepareRename2Gn, error) // textDocument/prepareRename + PrepareRename(context.Context, *PrepareRenameParams) (*PrepareRenameResult, error) // textDocument/prepareRename PrepareTypeHierarchy(context.Context, *TypeHierarchyPrepareParams) ([]TypeHierarchyItem, error) // textDocument/prepareTypeHierarchy RangeFormatting(context.Context, *DocumentRangeFormattingParams) ([]TextEdit, error) // textDocument/rangeFormatting RangesFormatting(context.Context, *DocumentRangesFormattingParams) ([]TextEdit, error) // textDocument/rangesFormatting @@ -91,28 +90,31 @@ type Server interface { WillDeleteFiles(context.Context, *DeleteFilesParams) (*WorkspaceEdit, error) // workspace/willDeleteFiles WillRenameFiles(context.Context, *RenameFilesParams) (*WorkspaceEdit, error) // workspace/willRenameFiles ResolveWorkspaceSymbol(context.Context, *WorkspaceSymbol) (*WorkspaceSymbol, error) // workspaceSymbol/resolve - NonstandardRequest(ctx context.Context, method string, params interface{}) (interface{}, error) + } func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { + defer recoverHandlerPanic(r.Method()) switch r.Method() { case "$/progress": var params ProgressParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.Progress(ctx, ¶ms) return true, reply(ctx, nil, err) + case "$/setTrace": var params SetTraceParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.SetTrace(ctx, ¶ms) return true, reply(ctx, nil, err) + case "callHierarchy/incomingCalls": var params CallHierarchyIncomingCallsParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.IncomingCalls(ctx, ¶ms) @@ -120,9 +122,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "callHierarchy/outgoingCalls": var params CallHierarchyOutgoingCallsParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.OutgoingCalls(ctx, ¶ms) @@ -130,9 +133,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "codeAction/resolve": var params CodeAction - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.ResolveCodeAction(ctx, ¶ms) @@ -140,9 +144,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "codeLens/resolve": var params CodeLens - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.ResolveCodeLens(ctx, ¶ms) @@ -150,9 +155,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "completionItem/resolve": var params CompletionItem - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.ResolveCompletionItem(ctx, ¶ms) @@ -160,9 +166,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "documentLink/resolve": var params DocumentLink - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.ResolveDocumentLink(ctx, ¶ms) @@ -170,12 +177,14 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "exit": err := server.Exit(ctx) return true, reply(ctx, nil, err) + case "initialize": var params ParamInitialize - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Initialize(ctx, ¶ms) @@ -183,16 +192,18 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "initialized": var params InitializedParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.Initialized(ctx, ¶ms) return true, reply(ctx, nil, err) + case "inlayHint/resolve": var params InlayHint - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Resolve(ctx, ¶ms) @@ -200,40 +211,46 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "notebookDocument/didChange": var params DidChangeNotebookDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidChangeNotebookDocument(ctx, ¶ms) return true, reply(ctx, nil, err) + case "notebookDocument/didClose": var params DidCloseNotebookDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidCloseNotebookDocument(ctx, ¶ms) return true, reply(ctx, nil, err) + case "notebookDocument/didOpen": var params DidOpenNotebookDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidOpenNotebookDocument(ctx, ¶ms) return true, reply(ctx, nil, err) + case "notebookDocument/didSave": var params DidSaveNotebookDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidSaveNotebookDocument(ctx, ¶ms) return true, reply(ctx, nil, err) + case "shutdown": err := server.Shutdown(ctx) return true, reply(ctx, nil, err) + case "textDocument/codeAction": var params CodeActionParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.CodeAction(ctx, ¶ms) @@ -241,9 +258,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/codeLens": var params CodeLensParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.CodeLens(ctx, ¶ms) @@ -251,9 +269,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/colorPresentation": var params ColorPresentationParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.ColorPresentation(ctx, ¶ms) @@ -261,9 +280,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/completion": var params CompletionParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Completion(ctx, ¶ms) @@ -271,9 +291,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/declaration": var params DeclarationParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Declaration(ctx, ¶ms) @@ -281,9 +302,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/definition": var params DefinitionParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Definition(ctx, ¶ms) @@ -291,9 +313,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/diagnostic": var params string - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Diagnostic(ctx, ¶ms) @@ -301,37 +324,42 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/didChange": var params DidChangeTextDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidChange(ctx, ¶ms) return true, reply(ctx, nil, err) + case "textDocument/didClose": var params DidCloseTextDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidClose(ctx, ¶ms) return true, reply(ctx, nil, err) + case "textDocument/didOpen": var params DidOpenTextDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidOpen(ctx, ¶ms) return true, reply(ctx, nil, err) + case "textDocument/didSave": var params DidSaveTextDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidSave(ctx, ¶ms) return true, reply(ctx, nil, err) + case "textDocument/documentColor": var params DocumentColorParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.DocumentColor(ctx, ¶ms) @@ -339,9 +367,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/documentHighlight": var params DocumentHighlightParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.DocumentHighlight(ctx, ¶ms) @@ -349,9 +378,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/documentLink": var params DocumentLinkParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.DocumentLink(ctx, ¶ms) @@ -359,9 +389,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/documentSymbol": var params DocumentSymbolParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.DocumentSymbol(ctx, ¶ms) @@ -369,9 +400,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/foldingRange": var params FoldingRangeParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.FoldingRange(ctx, ¶ms) @@ -379,9 +411,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/formatting": var params DocumentFormattingParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Formatting(ctx, ¶ms) @@ -389,9 +422,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/hover": var params HoverParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Hover(ctx, ¶ms) @@ -399,9 +433,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/implementation": var params ImplementationParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Implementation(ctx, ¶ms) @@ -409,9 +444,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/inlayHint": var params InlayHintParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.InlayHint(ctx, ¶ms) @@ -419,9 +455,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/inlineCompletion": var params InlineCompletionParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.InlineCompletion(ctx, ¶ms) @@ -429,9 +466,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/inlineValue": var params InlineValueParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.InlineValue(ctx, ¶ms) @@ -439,9 +477,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/linkedEditingRange": var params LinkedEditingRangeParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.LinkedEditingRange(ctx, ¶ms) @@ -449,9 +488,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/moniker": var params MonikerParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Moniker(ctx, ¶ms) @@ -459,9 +499,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/onTypeFormatting": var params DocumentOnTypeFormattingParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.OnTypeFormatting(ctx, ¶ms) @@ -469,9 +510,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/prepareCallHierarchy": var params CallHierarchyPrepareParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.PrepareCallHierarchy(ctx, ¶ms) @@ -479,9 +521,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/prepareRename": var params PrepareRenameParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.PrepareRename(ctx, ¶ms) @@ -489,9 +532,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/prepareTypeHierarchy": var params TypeHierarchyPrepareParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.PrepareTypeHierarchy(ctx, ¶ms) @@ -499,9 +543,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/rangeFormatting": var params DocumentRangeFormattingParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.RangeFormatting(ctx, ¶ms) @@ -509,9 +554,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/rangesFormatting": var params DocumentRangesFormattingParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.RangesFormatting(ctx, ¶ms) @@ -519,9 +565,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/references": var params ReferenceParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.References(ctx, ¶ms) @@ -529,9 +576,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/rename": var params RenameParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Rename(ctx, ¶ms) @@ -539,9 +587,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/selectionRange": var params SelectionRangeParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.SelectionRange(ctx, ¶ms) @@ -549,9 +598,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/semanticTokens/full": var params SemanticTokensParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.SemanticTokensFull(ctx, ¶ms) @@ -559,9 +609,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/semanticTokens/full/delta": var params SemanticTokensDeltaParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.SemanticTokensFullDelta(ctx, ¶ms) @@ -569,9 +620,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/semanticTokens/range": var params SemanticTokensRangeParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.SemanticTokensRange(ctx, ¶ms) @@ -579,9 +631,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/signatureHelp": var params SignatureHelpParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.SignatureHelp(ctx, ¶ms) @@ -589,9 +642,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/typeDefinition": var params TypeDefinitionParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.TypeDefinition(ctx, ¶ms) @@ -599,16 +653,18 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "textDocument/willSave": var params WillSaveTextDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.WillSave(ctx, ¶ms) return true, reply(ctx, nil, err) + case "textDocument/willSaveWaitUntil": var params WillSaveTextDocumentParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.WillSaveWaitUntil(ctx, ¶ms) @@ -616,9 +672,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "typeHierarchy/subtypes": var params TypeHierarchySubtypesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Subtypes(ctx, ¶ms) @@ -626,9 +683,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "typeHierarchy/supertypes": var params TypeHierarchySupertypesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Supertypes(ctx, ¶ms) @@ -636,16 +694,18 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "window/workDoneProgress/cancel": var params WorkDoneProgressCancelParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.WorkDoneProgressCancel(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/diagnostic": var params WorkspaceDiagnosticParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.DiagnosticWorkspace(ctx, ¶ms) @@ -653,51 +713,58 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspace/didChangeConfiguration": var params DidChangeConfigurationParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidChangeConfiguration(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/didChangeWatchedFiles": var params DidChangeWatchedFilesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidChangeWatchedFiles(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/didChangeWorkspaceFolders": var params DidChangeWorkspaceFoldersParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidChangeWorkspaceFolders(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/didCreateFiles": var params CreateFilesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidCreateFiles(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/didDeleteFiles": var params DeleteFilesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidDeleteFiles(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/didRenameFiles": var params RenameFilesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } err := server.DidRenameFiles(ctx, ¶ms) return true, reply(ctx, nil, err) + case "workspace/executeCommand": var params ExecuteCommandParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.ExecuteCommand(ctx, ¶ms) @@ -705,9 +772,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspace/symbol": var params WorkspaceSymbolParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.Symbol(ctx, ¶ms) @@ -715,9 +783,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspace/willCreateFiles": var params CreateFilesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.WillCreateFiles(ctx, ¶ms) @@ -725,9 +794,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspace/willDeleteFiles": var params DeleteFilesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.WillDeleteFiles(ctx, ¶ms) @@ -735,9 +805,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspace/willRenameFiles": var params RenameFilesParams - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.WillRenameFiles(ctx, ¶ms) @@ -745,9 +816,10 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + case "workspaceSymbol/resolve": var params WorkspaceSymbol - if err := json.Unmarshal(r.Params(), ¶ms); err != nil { + if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } resp, err := server.ResolveWorkspaceSymbol(ctx, ¶ms) @@ -755,6 +827,7 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) } return true, reply(ctx, resp, nil) + default: return false, nil } @@ -1009,8 +1082,8 @@ func (s *serverDispatcher) PrepareCallHierarchy(ctx context.Context, params *Cal } return result, nil } -func (s *serverDispatcher) PrepareRename(ctx context.Context, params *PrepareRenameParams) (*PrepareRename2Gn, error) { - var result *PrepareRename2Gn +func (s *serverDispatcher) PrepareRename(ctx context.Context, params *PrepareRenameParams) (*PrepareRenameResult, error) { + var result *PrepareRenameResult if err := s.sender.Call(ctx, "textDocument/prepareRename", params, &result); err != nil { return nil, err } @@ -1187,10 +1260,3 @@ func (s *serverDispatcher) ResolveWorkspaceSymbol(ctx context.Context, params *W } return result, nil } -func (s *serverDispatcher) NonstandardRequest(ctx context.Context, method string, params interface{}) (interface{}, error) { - var result interface{} - if err := s.sender.Call(ctx, method, params, &result); err != nil { - return nil, err - } - return result, nil -} diff --git a/gopls/internal/protocol/uri.go b/gopls/internal/protocol/uri.go new file mode 100644 index 00000000000..86775b065f5 --- /dev/null +++ b/gopls/internal/protocol/uri.go @@ -0,0 +1,220 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protocol + +// This file declares URI, DocumentURI, and its methods. +// +// For the LSP definition of these types, see +// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#uri + +import ( + "fmt" + "net/url" + "path/filepath" + "strings" + "unicode" + + "golang.org/x/tools/gopls/internal/util/pathutil" +) + +// A DocumentURI is the URI of a client editor document. +// +// According to the LSP specification: +// +// Care should be taken to handle encoding in URIs. For +// example, some clients (such as VS Code) may encode colons +// in drive letters while others do not. The URIs below are +// both valid, but clients and servers should be consistent +// with the form they use themselves to ensure the other party +// doesn’t interpret them as distinct URIs. Clients and +// servers should not assume that each other are encoding the +// same way (for example a client encoding colons in drive +// letters cannot assume server responses will have encoded +// colons). The same applies to casing of drive letters - one +// party should not assume the other party will return paths +// with drive letters cased the same as it. +// +// file:///c:/project/readme.md +// file:///C%3A/project/readme.md +// +// This is done during JSON unmarshalling; +// see [DocumentURI.UnmarshalText] for details. +type DocumentURI string + +// A URI is an arbitrary URL (e.g. https), not necessarily a file. +type URI = string + +// UnmarshalText implements decoding of DocumentURI values. +// +// In particular, it implements a systematic correction of various odd +// features of the definition of DocumentURI in the LSP spec that +// appear to be workarounds for bugs in VS Code. For example, it may +// URI-encode the URI itself, so that colon becomes %3A, and it may +// send file://foo.go URIs that have two slashes (not three) and no +// hostname. +// +// We use UnmarshalText, not UnmarshalJSON, because it is called even +// for non-addressable values such as keys and values of map[K]V, +// where there is no pointer of type *K or *V on which to call +// UnmarshalJSON. (See Go issue #28189 for more detail.) +// +// Non-empty DocumentURIs are valid "file"-scheme URIs. +// The empty DocumentURI is valid. +func (uri *DocumentURI) UnmarshalText(data []byte) (err error) { + *uri, err = ParseDocumentURI(string(data)) + return +} + +// Path returns the file path for the given URI. +// +// DocumentURI("").Path() returns the empty string. +// +// Path panics if called on a URI that is not a valid filename. +func (uri DocumentURI) Path() string { + filename, err := filename(uri) + if err != nil { + // e.g. ParseRequestURI failed. + // + // This can only affect DocumentURIs created by + // direct string manipulation; all DocumentURIs + // received from the client pass through + // ParseRequestURI, which ensures validity. + panic(err) + } + return filepath.FromSlash(filename) +} + +// Dir returns the URI for the directory containing the receiver. +func (uri DocumentURI) Dir() DocumentURI { + // This function could be more efficiently implemented by avoiding any call + // to Path(), but at least consolidates URI manipulation. + return URIFromPath(filepath.Dir(uri.Path())) +} + +// Encloses reports whether uri's path, considered as a sequence of segments, +// is a prefix of file's path. +func (uri DocumentURI) Encloses(file DocumentURI) bool { + return pathutil.InDir(uri.Path(), file.Path()) +} + +func filename(uri DocumentURI) (string, error) { + if uri == "" { + return "", nil + } + + // This conservative check for the common case + // of a simple non-empty absolute POSIX filename + // avoids the allocation of a net.URL. + if strings.HasPrefix(string(uri), "file:///") { + rest := string(uri)[len("file://"):] // leave one slash + for i := 0; i < len(rest); i++ { + b := rest[i] + // Reject these cases: + if b < ' ' || b == 0x7f || // control character + b == '%' || b == '+' || // URI escape + b == ':' || // Windows drive letter + b == '@' || b == '&' || b == '?' { // authority or query + goto slow + } + } + return rest, nil + } +slow: + + u, err := url.ParseRequestURI(string(uri)) + if err != nil { + return "", err + } + if u.Scheme != fileScheme { + return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri) + } + // If the URI is a Windows URI, we trim the leading "/" and uppercase + // the drive letter, which will never be case sensitive. + if isWindowsDriveURIPath(u.Path) { + u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:] + } + + return u.Path, nil +} + +// ParseDocumentURI interprets a string as a DocumentURI, applying VS +// Code workarounds; see [DocumentURI.UnmarshalText] for details. +func ParseDocumentURI(s string) (DocumentURI, error) { + if s == "" { + return "", nil + } + + if !strings.HasPrefix(s, "file://") { + return "", fmt.Errorf("DocumentURI scheme is not 'file': %s", s) + } + + // VS Code sends URLs with only two slashes, + // which are invalid. golang/go#39789. + if !strings.HasPrefix(s, "file:///") { + s = "file:///" + s[len("file://"):] + } + + // Even though the input is a URI, it may not be in canonical form. VS Code + // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize. + path, err := url.PathUnescape(s[len("file://"):]) + if err != nil { + return "", err + } + + // File URIs from Windows may have lowercase drive letters. + // Since drive letters are guaranteed to be case insensitive, + // we change them to uppercase to remain consistent. + // For example, file:///c:/x/y/z becomes file:///C:/x/y/z. + if isWindowsDriveURIPath(path) { + path = path[:1] + strings.ToUpper(string(path[1])) + path[2:] + } + u := url.URL{Scheme: fileScheme, Path: path} + return DocumentURI(u.String()), nil +} + +// URIFromPath returns DocumentURI for the supplied file path. +// Given "", it returns "". +func URIFromPath(path string) DocumentURI { + if path == "" { + return "" + } + if !isWindowsDrivePath(path) { + if abs, err := filepath.Abs(path); err == nil { + path = abs + } + } + // Check the file path again, in case it became absolute. + if isWindowsDrivePath(path) { + path = "/" + strings.ToUpper(string(path[0])) + path[1:] + } + path = filepath.ToSlash(path) + u := url.URL{ + Scheme: fileScheme, + Path: path, + } + return DocumentURI(u.String()) +} + +const fileScheme = "file" + +// isWindowsDrivePath returns true if the file path is of the form used by +// Windows. We check if the path begins with a drive letter, followed by a ":". +// For example: C:/x/y/z. +func isWindowsDrivePath(path string) bool { + if len(path) < 3 { + return false + } + return unicode.IsLetter(rune(path[0])) && path[1] == ':' +} + +// isWindowsDriveURIPath returns true if the file URI is of the format used by +// Windows URIs. The url.Parse package does not specially handle Windows paths +// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:"). +func isWindowsDriveURIPath(uri string) bool { + if len(uri) < 4 { + return false + } + return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':' +} diff --git a/gopls/internal/protocol/uri_test.go b/gopls/internal/protocol/uri_test.go new file mode 100644 index 00000000000..cad71ddc13c --- /dev/null +++ b/gopls/internal/protocol/uri_test.go @@ -0,0 +1,134 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !windows +// +build !windows + +package protocol_test + +import ( + "testing" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// TestURIFromPath tests the conversion between URIs and filenames. The test cases +// include Windows-style URIs and filepaths, but we avoid having OS-specific +// tests by using only forward slashes, assuming that the standard library +// functions filepath.ToSlash and filepath.FromSlash do not need testing. +func TestURIFromPath(t *testing.T) { + for _, test := range []struct { + path, wantFile string + wantURI protocol.DocumentURI + }{ + { + path: ``, + wantFile: ``, + wantURI: protocol.DocumentURI(""), + }, + { + path: `C:/Windows/System32`, + wantFile: `C:/Windows/System32`, + wantURI: protocol.DocumentURI("file:///C:/Windows/System32"), + }, + { + path: `C:/Go/src/bob.go`, + wantFile: `C:/Go/src/bob.go`, + wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), + }, + { + path: `c:/Go/src/bob.go`, + wantFile: `C:/Go/src/bob.go`, + wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), + }, + { + path: `/path/to/dir`, + wantFile: `/path/to/dir`, + wantURI: protocol.DocumentURI("file:///path/to/dir"), + }, + { + path: `/a/b/c/src/bob.go`, + wantFile: `/a/b/c/src/bob.go`, + wantURI: protocol.DocumentURI("file:///a/b/c/src/bob.go"), + }, + { + path: `c:/Go/src/bob george/george/george.go`, + wantFile: `C:/Go/src/bob george/george/george.go`, + wantURI: protocol.DocumentURI("file:///C:/Go/src/bob%20george/george/george.go"), + }, + } { + got := protocol.URIFromPath(test.path) + if got != test.wantURI { + t.Errorf("URIFromPath(%q): got %q, expected %q", test.path, got, test.wantURI) + } + gotFilename := got.Path() + if gotFilename != test.wantFile { + t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) + } + } +} + +func TestParseDocumentURI(t *testing.T) { + for _, test := range []struct { + input string + want string // string(DocumentURI) on success or error.Error() on failure + wantPath string // expected DocumentURI.Path on success + }{ + { + input: `file:///c:/Go/src/bob%20george/george/george.go`, + want: "file:///C:/Go/src/bob%20george/george/george.go", + wantPath: `C:/Go/src/bob george/george/george.go`, + }, + { + input: `file:///C%3A/Go/src/bob%20george/george/george.go`, + want: "file:///C:/Go/src/bob%20george/george/george.go", + wantPath: `C:/Go/src/bob george/george/george.go`, + }, + { + input: `file:///path/to/%25p%25ercent%25/per%25cent.go`, + want: `file:///path/to/%25p%25ercent%25/per%25cent.go`, + wantPath: `/path/to/%p%ercent%/per%cent.go`, + }, + { + input: `file:///C%3A/`, + want: `file:///C:/`, + wantPath: `C:/`, + }, + { + input: `file:///`, + want: `file:///`, + wantPath: `/`, + }, + { + input: `file://wsl%24/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, + want: `file:///wsl$/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, + wantPath: `/wsl$/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, + }, + { + input: "", + want: "", + wantPath: "", + }, + // Errors: + { + input: "https://go.dev/", + want: "DocumentURI scheme is not 'file': https://go.dev/", + }, + } { + uri, err := protocol.ParseDocumentURI(test.input) + var got string + if err != nil { + got = err.Error() + } else { + got = string(uri) + } + if got != test.want { + t.Errorf("ParseDocumentURI(%q): got %q, want %q", test.input, got, test.want) + } + if err == nil && uri.Path() != test.wantPath { + t.Errorf("DocumentURI(%s).Path = %q, want %q", uri, + uri.Path(), test.wantPath) + } + } +} diff --git a/gopls/internal/protocol/uri_windows_test.go b/gopls/internal/protocol/uri_windows_test.go new file mode 100644 index 00000000000..08471167a22 --- /dev/null +++ b/gopls/internal/protocol/uri_windows_test.go @@ -0,0 +1,139 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build windows +// +build windows + +package protocol_test + +import ( + "path/filepath" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// TestURIFromPath tests the conversion between URIs and filenames. The test cases +// include Windows-style URIs and filepaths, but we avoid having OS-specific +// tests by using only forward slashes, assuming that the standard library +// functions filepath.ToSlash and filepath.FromSlash do not need testing. +func TestURIFromPath(t *testing.T) { + rootPath, err := filepath.Abs("/") + if err != nil { + t.Fatal(err) + } + if len(rootPath) < 2 || rootPath[1] != ':' { + t.Fatalf("malformed root path %q", rootPath) + } + driveLetter := string(rootPath[0]) + + for _, test := range []struct { + path, wantFile string + wantURI protocol.DocumentURI + }{ + { + path: ``, + wantFile: ``, + wantURI: protocol.DocumentURI(""), + }, + { + path: `C:\Windows\System32`, + wantFile: `C:\Windows\System32`, + wantURI: protocol.DocumentURI("file:///C:/Windows/System32"), + }, + { + path: `C:\Go\src\bob.go`, + wantFile: `C:\Go\src\bob.go`, + wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), + }, + { + path: `c:\Go\src\bob.go`, + wantFile: `C:\Go\src\bob.go`, + wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), + }, + { + path: `\path\to\dir`, + wantFile: driveLetter + `:\path\to\dir`, + wantURI: protocol.DocumentURI("file:///" + driveLetter + ":/path/to/dir"), + }, + { + path: `\a\b\c\src\bob.go`, + wantFile: driveLetter + `:\a\b\c\src\bob.go`, + wantURI: protocol.DocumentURI("file:///" + driveLetter + ":/a/b/c/src/bob.go"), + }, + { + path: `c:\Go\src\bob george\george\george.go`, + wantFile: `C:\Go\src\bob george\george\george.go`, + wantURI: protocol.DocumentURI("file:///C:/Go/src/bob%20george/george/george.go"), + }, + } { + got := protocol.URIFromPath(test.path) + if got != test.wantURI { + t.Errorf("URIFromPath(%q): got %q, expected %q", test.path, got, test.wantURI) + } + gotFilename := got.Path() + if gotFilename != test.wantFile { + t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) + } + } +} + +func TestParseDocumentURI(t *testing.T) { + for _, test := range []struct { + input string + want string // string(DocumentURI) on success or error.Error() on failure + wantPath string // expected DocumentURI.Path on success + }{ + { + input: `file:///c:/Go/src/bob%20george/george/george.go`, + want: "file:///C:/Go/src/bob%20george/george/george.go", + wantPath: `C:\Go\src\bob george\george\george.go`, + }, + { + input: `file:///C%3A/Go/src/bob%20george/george/george.go`, + want: "file:///C:/Go/src/bob%20george/george/george.go", + wantPath: `C:\Go\src\bob george\george\george.go`, + }, + { + input: `file:///c:/path/to/%25p%25ercent%25/per%25cent.go`, + want: `file:///C:/path/to/%25p%25ercent%25/per%25cent.go`, + wantPath: `C:\path\to\%p%ercent%\per%cent.go`, + }, + { + input: `file:///C%3A/`, + want: `file:///C:/`, + wantPath: `C:\`, + }, + { + input: `file:///`, + want: `file:///`, + wantPath: `\`, + }, + { + input: "", + want: "", + wantPath: "", + }, + // Errors: + { + input: "https://go.dev/", + want: "DocumentURI scheme is not 'file': https://go.dev/", + }, + } { + uri, err := protocol.ParseDocumentURI(test.input) + var got string + if err != nil { + got = err.Error() + } else { + got = string(uri) + } + if got != test.want { + t.Errorf("ParseDocumentURI(%q): got %q, want %q", test.input, got, test.want) + } + if err == nil && uri.Path() != test.wantPath { + t.Errorf("DocumentURI(%s).Path = %q, want %q", uri, + uri.Path(), test.wantPath) + } + } +} diff --git a/gopls/internal/regtest/bench/completion_test.go b/gopls/internal/regtest/bench/completion_test.go deleted file mode 100644 index 02e640423b9..00000000000 --- a/gopls/internal/regtest/bench/completion_test.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package bench - -import ( - "flag" - "fmt" - "sync/atomic" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -// TODO(rfindley): update these completion tests to run on multiple repos. - -type completionBenchOptions struct { - file, locationRegexp string - - // Hooks to run edits before initial completion - setup func(*Env) // run before the benchmark starts - beforeCompletion func(*Env) // run before each completion -} - -func benchmarkCompletion(options completionBenchOptions, b *testing.B) { - repo := getRepo(b, "tools") - _ = repo.sharedEnv(b) // ensure cache is warm - env := repo.newEnv(b, fake.EditorConfig{}, "completion", false) - defer env.Close() - - // Run edits required for this completion. - if options.setup != nil { - options.setup(env) - } - - // Run a completion to make sure the system is warm. - loc := env.RegexpSearch(options.file, options.locationRegexp) - completions := env.Completion(loc) - - if testing.Verbose() { - fmt.Println("Results:") - for i := 0; i < len(completions.Items); i++ { - fmt.Printf("\t%d. %v\n", i, completions.Items[i]) - } - } - - b.Run("tools", func(b *testing.B) { - if stopAndRecord := startProfileIfSupported(b, env, qualifiedName("tools", "completion")); stopAndRecord != nil { - defer stopAndRecord() - } - - for i := 0; i < b.N; i++ { - if options.beforeCompletion != nil { - options.beforeCompletion(env) - } - env.Completion(loc) - } - }) -} - -// endRangeInBuffer returns the position for last character in the buffer for -// the given file. -func endRangeInBuffer(env *Env, name string) protocol.Range { - buffer := env.BufferText(name) - m := protocol.NewMapper("", []byte(buffer)) - rng, err := m.OffsetRange(len(buffer), len(buffer)) - if err != nil { - env.T.Fatal(err) - } - return rng -} - -// Benchmark struct completion in tools codebase. -func BenchmarkStructCompletion(b *testing.B) { - file := "internal/lsp/cache/session.go" - - setup := func(env *Env) { - env.OpenFile(file) - env.EditBuffer(file, protocol.TextEdit{ - Range: endRangeInBuffer(env, file), - NewText: "\nvar testVariable map[string]bool = Session{}.\n", - }) - } - - benchmarkCompletion(completionBenchOptions{ - file: file, - locationRegexp: `var testVariable map\[string\]bool = Session{}(\.)`, - setup: setup, - }, b) -} - -// Benchmark import completion in tools codebase. -func BenchmarkImportCompletion(b *testing.B) { - const file = "internal/lsp/source/completion/completion.go" - benchmarkCompletion(completionBenchOptions{ - file: file, - locationRegexp: `go\/()`, - setup: func(env *Env) { env.OpenFile(file) }, - }, b) -} - -// Benchmark slice completion in tools codebase. -func BenchmarkSliceCompletion(b *testing.B) { - file := "internal/lsp/cache/session.go" - - setup := func(env *Env) { - env.OpenFile(file) - env.EditBuffer(file, protocol.TextEdit{ - Range: endRangeInBuffer(env, file), - NewText: "\nvar testVariable []byte = \n", - }) - } - - benchmarkCompletion(completionBenchOptions{ - file: file, - locationRegexp: `var testVariable \[\]byte (=)`, - setup: setup, - }, b) -} - -// Benchmark deep completion in function call in tools codebase. -func BenchmarkFuncDeepCompletion(b *testing.B) { - file := "internal/lsp/source/completion/completion.go" - fileContent := ` -func (c *completer) _() { - c.inference.kindMatches(c.) -} -` - setup := func(env *Env) { - env.OpenFile(file) - originalBuffer := env.BufferText(file) - env.EditBuffer(file, protocol.TextEdit{ - Range: endRangeInBuffer(env, file), - // TODO(rfindley): this is a bug: it should just be fileContent. - NewText: originalBuffer + fileContent, - }) - } - - benchmarkCompletion(completionBenchOptions{ - file: file, - locationRegexp: `func \(c \*completer\) _\(\) {\n\tc\.inference\.kindMatches\((c)`, - setup: setup, - }, b) -} - -type completionFollowingEditTest struct { - repo string - name string - file string // repo-relative file to create - content string // file content - locationRegexp string // regexp for completion -} - -var completionFollowingEditTests = []completionFollowingEditTest{ - { - "tools", - "selector", - "internal/lsp/source/completion/completion2.go", - ` -package completion - -func (c *completer) _() { - c.inference.kindMatches(c.) -} -`, - `func \(c \*completer\) _\(\) {\n\tc\.inference\.kindMatches\((c)`, - }, - { - "kubernetes", - "selector", - "pkg/kubelet/kubelet2.go", - ` -package kubelet - -func (kl *Kubelet) _() { - kl. -} -`, - `kl\.()`, - }, - { - "kubernetes", - "identifier", - "pkg/kubelet/kubelet2.go", - ` -package kubelet - -func (kl *Kubelet) _() { - k // here -} -`, - `k() // here`, - }, - { - "oracle", - "selector", - "dataintegration/pivot2.go", - ` -package dataintegration - -func (p *Pivot) _() { - p. -} -`, - `p\.()`, - }, -} - -// Benchmark completion following an arbitrary edit. -// -// Edits force type-checked packages to be invalidated, so we want to measure -// how long it takes before completion results are available. -func BenchmarkCompletionFollowingEdit(b *testing.B) { - for _, test := range completionFollowingEditTests { - b.Run(fmt.Sprintf("%s_%s", test.repo, test.name), func(b *testing.B) { - for _, completeUnimported := range []bool{true, false} { - b.Run(fmt.Sprintf("completeUnimported=%v", completeUnimported), func(b *testing.B) { - for _, budget := range []string{"0s", "100ms"} { - b.Run(fmt.Sprintf("budget=%s", budget), func(b *testing.B) { - runCompletionFollowingEdit(b, test, completeUnimported, budget) - }) - } - }) - } - }) - } -} - -var gomodcache = flag.String("gomodcache", "", "optional GOMODCACHE for unimported completion benchmarks") - -func runCompletionFollowingEdit(b *testing.B, test completionFollowingEditTest, completeUnimported bool, budget string) { - repo := getRepo(b, test.repo) - sharedEnv := repo.sharedEnv(b) // ensure cache is warm - envvars := map[string]string{ - "GOPATH": sharedEnv.Sandbox.GOPATH(), // use the warm cache - } - - if *gomodcache != "" { - envvars["GOMODCACHE"] = *gomodcache - } - - env := repo.newEnv(b, fake.EditorConfig{ - Env: envvars, - Settings: map[string]interface{}{ - "completeUnimported": completeUnimported, - "completionBudget": budget, - }, - }, "completionFollowingEdit", false) - defer env.Close() - - env.CreateBuffer(test.file, "// __REGTEST_PLACEHOLDER_0__\n"+test.content) - editPlaceholder := func() { - edits := atomic.AddInt64(&editID, 1) - env.EditBuffer(test.file, protocol.TextEdit{ - Range: protocol.Range{ - Start: protocol.Position{Line: 0, Character: 0}, - End: protocol.Position{Line: 1, Character: 0}, - }, - // Increment the placeholder text, to ensure cache misses. - NewText: fmt.Sprintf("// __REGTEST_PLACEHOLDER_%d__\n", edits), - }) - } - env.AfterChange() - - // Run a completion to make sure the system is warm. - loc := env.RegexpSearch(test.file, test.locationRegexp) - completions := env.Completion(loc) - - if testing.Verbose() { - fmt.Println("Results:") - for i, item := range completions.Items { - fmt.Printf("\t%d. %v\n", i, item) - } - } - - b.ResetTimer() - - if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "completionFollowingEdit")); stopAndRecord != nil { - defer stopAndRecord() - } - - for i := 0; i < b.N; i++ { - editPlaceholder() - loc := env.RegexpSearch(test.file, test.locationRegexp) - env.Completion(loc) - } -} diff --git a/gopls/internal/regtest/bench/iwl_test.go b/gopls/internal/regtest/bench/iwl_test.go deleted file mode 100644 index 6206f00a4d5..00000000000 --- a/gopls/internal/regtest/bench/iwl_test.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package bench - -import ( - "testing" - - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -// BenchmarkInitialWorkspaceLoad benchmarks the initial workspace load time for -// a new editing session. -func BenchmarkInitialWorkspaceLoad(b *testing.B) { - tests := []struct { - repo string - file string - }{ - {"google-cloud-go", "httpreplay/httpreplay.go"}, - {"istio", "pkg/fuzz/util.go"}, - {"kubernetes", "pkg/controller/lookup_cache.go"}, - {"kuma", "api/generic/insights.go"}, - {"oracle", "dataintegration/data_type.go"}, - {"pkgsite", "internal/frontend/server.go"}, - {"starlark", "starlark/eval.go"}, - {"tools", "internal/lsp/cache/snapshot.go"}, - {"hashiform", "internal/provider/provider.go"}, - } - - for _, test := range tests { - b.Run(test.repo, func(b *testing.B) { - repo := getRepo(b, test.repo) - // get the (initialized) shared env to ensure the cache is warm. - // Reuse its GOPATH so that we get cache hits for things in the module - // cache. - sharedEnv := repo.sharedEnv(b) - b.ResetTimer() - - for i := 0; i < b.N; i++ { - doIWL(b, sharedEnv.Sandbox.GOPATH(), repo, test.file) - } - }) - } -} - -func doIWL(b *testing.B, gopath string, repo *repo, file string) { - // Exclude the time to set up the env from the benchmark time, as this may - // involve installing gopls and/or checking out the repo dir. - b.StopTimer() - config := fake.EditorConfig{Env: map[string]string{"GOPATH": gopath}} - env := repo.newEnv(b, config, "iwl", true) - defer env.Close() - b.StartTimer() - - // Note: in the future, we may need to open a file in order to cause gopls to - // start loading the workspace. - - env.Await(InitialWorkspaceLoad) - - if env.Editor.HasCommand(command.MemStats.ID()) { - b.StopTimer() - params := &protocol.ExecuteCommandParams{ - Command: command.MemStats.ID(), - } - var memstats command.MemStatsResult - env.ExecuteCommand(params, &memstats) - b.ReportMetric(float64(memstats.HeapAlloc), "alloc_bytes") - b.ReportMetric(float64(memstats.HeapInUse), "in_use_bytes") - b.ReportMetric(float64(memstats.TotalAlloc), "total_alloc_bytes") - b.StartTimer() - } -} diff --git a/gopls/internal/regtest/codelens/codelens_test.go b/gopls/internal/regtest/codelens/codelens_test.go deleted file mode 100644 index 107db1a2c29..00000000000 --- a/gopls/internal/regtest/codelens/codelens_test.go +++ /dev/null @@ -1,352 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package codelens - -import ( - "fmt" - "testing" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" - - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/testenv" -) - -func TestMain(m *testing.M) { - bug.PanicOnBugs = true - Main(m, hooks.Options) -} - -func TestDisablingCodeLens(t *testing.T) { - const workspace = ` --- go.mod -- -module codelens.test - -go 1.12 --- lib.go -- -package lib - -type Number int - -const ( - Zero Number = iota - One - Two -) - -//` + `go:generate stringer -type=Number -` - tests := []struct { - label string - enabled map[string]bool - wantCodeLens bool - }{ - { - label: "default", - wantCodeLens: true, - }, - { - label: "generate disabled", - enabled: map[string]bool{string(command.Generate): false}, - wantCodeLens: false, - }, - } - for _, test := range tests { - t.Run(test.label, func(t *testing.T) { - WithOptions( - Settings{"codelenses": test.enabled}, - ).Run(t, workspace, func(t *testing.T, env *Env) { - env.OpenFile("lib.go") - lens := env.CodeLens("lib.go") - if gotCodeLens := len(lens) > 0; gotCodeLens != test.wantCodeLens { - t.Errorf("got codeLens: %t, want %t", gotCodeLens, test.wantCodeLens) - } - }) - }) - } -} - -// This test confirms the full functionality of the code lenses for updating -// dependencies in a go.mod file. It checks for the code lens that suggests -// an update and then executes the command associated with that code lens. A -// regression test for golang/go#39446. It also checks that these code lenses -// only affect the diagnostics and contents of the containing go.mod file. -func TestUpgradeCodelens(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work - - const proxyWithLatest = ` --- golang.org/x/hello@v1.3.3/go.mod -- -module golang.org/x/hello - -go 1.12 --- golang.org/x/hello@v1.3.3/hi/hi.go -- -package hi - -var Goodbye error --- golang.org/x/hello@v1.2.3/go.mod -- -module golang.org/x/hello - -go 1.12 --- golang.org/x/hello@v1.2.3/hi/hi.go -- -package hi - -var Goodbye error -` - - const shouldUpdateDep = ` --- go.work -- -go 1.18 - -use ( - ./a - ./b -) --- a/go.mod -- -module mod.com/a - -go 1.14 - -require golang.org/x/hello v1.2.3 --- a/go.sum -- -golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= -golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= --- a/main.go -- -package main - -import "golang.org/x/hello/hi" - -func main() { - _ = hi.Goodbye -} --- b/go.mod -- -module mod.com/b - -go 1.14 - -require golang.org/x/hello v1.2.3 --- b/go.sum -- -golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= -golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= --- b/main.go -- -package main - -import ( - "golang.org/x/hello/hi" -) - -func main() { - _ = hi.Goodbye -} -` - - const wantGoModA = `module mod.com/a - -go 1.14 - -require golang.org/x/hello v1.3.3 -` - // Applying the diagnostics or running the codelenses for a/go.mod - // should not change the contents of b/go.mod - const wantGoModB = `module mod.com/b - -go 1.14 - -require golang.org/x/hello v1.2.3 -` - - for _, commandTitle := range []string{ - "Upgrade transitive dependencies", - "Upgrade direct dependencies", - } { - t.Run(commandTitle, func(t *testing.T) { - WithOptions( - ProxyFiles(proxyWithLatest), - ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { - env.OpenFile("a/go.mod") - env.OpenFile("b/go.mod") - var lens protocol.CodeLens - var found bool - for _, l := range env.CodeLens("a/go.mod") { - if l.Command.Title == commandTitle { - lens = l - found = true - } - } - if !found { - t.Fatalf("found no command with the title %s", commandTitle) - } - if _, err := env.Editor.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ - Command: lens.Command.Command, - Arguments: lens.Command.Arguments, - }); err != nil { - t.Fatal(err) - } - env.AfterChange() - if got := env.BufferText("a/go.mod"); got != wantGoModA { - t.Fatalf("a/go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) - } - if got := env.BufferText("b/go.mod"); got != wantGoModB { - t.Fatalf("b/go.mod changed unexpectedly:\n%s", compare.Text(wantGoModB, got)) - } - }) - }) - } - for _, vendoring := range []bool{false, true} { - t.Run(fmt.Sprintf("Upgrade individual dependency vendoring=%v", vendoring), func(t *testing.T) { - WithOptions( - ProxyFiles(proxyWithLatest), - ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { - if vendoring { - env.RunGoCommandInDirWithEnv("a", []string{"GOWORK=off"}, "mod", "vendor") - } - env.AfterChange() - env.OpenFile("a/go.mod") - env.OpenFile("b/go.mod") - - // Await the diagnostics resulting from opening the modfiles, because - // otherwise they may cause races when running asynchronously to the - // explicit re-diagnosing below. - // - // TODO(golang/go#58750): there is still a race here, inherent to - // accessing state on the View; we should create a new snapshot when - // the view diagnostics change. - env.AfterChange() - - env.ExecuteCodeLensCommand("a/go.mod", command.CheckUpgrades, nil) - d := &protocol.PublishDiagnosticsParams{} - env.OnceMet( - Diagnostics(env.AtRegexp("a/go.mod", `require`), WithMessage("can be upgraded")), - ReadDiagnostics("a/go.mod", d), - // We do not want there to be a diagnostic for b/go.mod, - // but there may be some subtlety in timing here, where this - // should always succeed, but may not actually test the correct - // behavior. - NoDiagnostics(env.AtRegexp("b/go.mod", `require`)), - ) - // Check for upgrades in b/go.mod and then clear them. - env.ExecuteCodeLensCommand("b/go.mod", command.CheckUpgrades, nil) - env.Await(Diagnostics(env.AtRegexp("b/go.mod", `require`), WithMessage("can be upgraded"))) - env.ExecuteCodeLensCommand("b/go.mod", command.ResetGoModDiagnostics, nil) - env.Await(NoDiagnostics(ForFile("b/go.mod"))) - - // Apply the diagnostics to a/go.mod. - env.ApplyQuickFixes("a/go.mod", d.Diagnostics) - env.AfterChange() - if got := env.BufferText("a/go.mod"); got != wantGoModA { - t.Fatalf("a/go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) - } - if got := env.BufferText("b/go.mod"); got != wantGoModB { - t.Fatalf("b/go.mod changed unexpectedly:\n%s", compare.Text(wantGoModB, got)) - } - }) - }) - } -} - -func TestUnusedDependenciesCodelens(t *testing.T) { - const proxy = ` --- golang.org/x/hello@v1.0.0/go.mod -- -module golang.org/x/hello - -go 1.14 --- golang.org/x/hello@v1.0.0/hi/hi.go -- -package hi - -var Goodbye error --- golang.org/x/unused@v1.0.0/go.mod -- -module golang.org/x/unused - -go 1.14 --- golang.org/x/unused@v1.0.0/nouse/nouse.go -- -package nouse - -var NotUsed error -` - - const shouldRemoveDep = ` --- go.mod -- -module mod.com - -go 1.14 - -require golang.org/x/hello v1.0.0 -require golang.org/x/unused v1.0.0 --- go.sum -- -golang.org/x/hello v1.0.0 h1:qbzE1/qT0/zojAMd/JcPsO2Vb9K4Bkeyq0vB2JGMmsw= -golang.org/x/hello v1.0.0/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= -golang.org/x/unused v1.0.0 h1:LecSbCn5P3vTcxubungSt1Pn4D/WocCaiWOPDC0y0rw= -golang.org/x/unused v1.0.0/go.mod h1:ihoW8SgWzugwwj0N2SfLfPZCxTB1QOVfhMfB5PWTQ8U= --- main.go -- -package main - -import "golang.org/x/hello/hi" - -func main() { - _ = hi.Goodbye -} -` - WithOptions(ProxyFiles(proxy)).Run(t, shouldRemoveDep, func(t *testing.T, env *Env) { - env.OpenFile("go.mod") - env.ExecuteCodeLensCommand("go.mod", command.Tidy, nil) - env.Await(env.DoneWithChangeWatchedFiles()) - got := env.BufferText("go.mod") - const wantGoMod = `module mod.com - -go 1.14 - -require golang.org/x/hello v1.0.0 -` - if got != wantGoMod { - t.Fatalf("go.mod tidy failed:\n%s", compare.Text(wantGoMod, got)) - } - }) -} - -func TestRegenerateCgo(t *testing.T) { - testenv.NeedsTool(t, "cgo") - const workspace = ` --- go.mod -- -module example.com - -go 1.12 --- cgo.go -- -package x - -/* -int fortythree() { return 42; } -*/ -import "C" - -func Foo() { - print(C.fortytwo()) -} -` - Run(t, workspace, func(t *testing.T, env *Env) { - // Open the file. We have a nonexistant symbol that will break cgo processing. - env.OpenFile("cgo.go") - env.AfterChange( - Diagnostics(env.AtRegexp("cgo.go", ``), WithMessage("go list failed to return CompiledGoFiles")), - ) - - // Fix the C function name. We haven't regenerated cgo, so nothing should be fixed. - env.RegexpReplace("cgo.go", `int fortythree`, "int fortytwo") - env.SaveBuffer("cgo.go") - env.AfterChange( - Diagnostics(env.AtRegexp("cgo.go", ``), WithMessage("go list failed to return CompiledGoFiles")), - ) - - // Regenerate cgo, fixing the diagnostic. - env.ExecuteCodeLensCommand("cgo.go", command.RegenerateCgo, nil) - env.OnceMet( - CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromRegenerateCgo), 1, true), - NoDiagnostics(ForFile("cgo.go")), - ) - }) -} diff --git a/gopls/internal/regtest/completion/completion_test.go b/gopls/internal/regtest/completion/completion_test.go deleted file mode 100644 index 81300eb07e0..00000000000 --- a/gopls/internal/regtest/completion/completion_test.go +++ /dev/null @@ -1,1005 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "fmt" - "sort" - "strings" - "testing" - "time" - - "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/testenv" -) - -func TestMain(m *testing.M) { - bug.PanicOnBugs = true - Main(m, hooks.Options) -} - -const proxy = ` --- example.com@v1.2.3/go.mod -- -module example.com - -go 1.12 --- example.com@v1.2.3/blah/blah.go -- -package blah - -const Name = "Blah" --- random.org@v1.2.3/go.mod -- -module random.org - -go 1.12 --- random.org@v1.2.3/blah/blah.go -- -package hello - -const Name = "Hello" -` - -func TestPackageCompletion(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- fruits/apple.go -- -package apple - -fun apple() int { - return 0 -} - --- fruits/testfile.go -- -// this is a comment - -/* - this is a multiline comment -*/ - -import "fmt" - -func test() {} - --- fruits/testfile2.go -- -package - --- fruits/testfile3.go -- -pac --- 123f_r.u~its-123/testfile.go -- -package - --- .invalid-dir@-name/testfile.go -- -package -` - var ( - testfile4 = "" - testfile5 = "/*a comment*/ " - testfile6 = "/*a comment*/\n" - ) - for _, tc := range []struct { - name string - filename string - content *string - triggerRegexp string - want []string - editRegexp string - }{ - { - name: "package completion at valid position", - filename: "fruits/testfile.go", - triggerRegexp: "\n()", - want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, - editRegexp: "\n()", - }, - { - name: "package completion in a comment", - filename: "fruits/testfile.go", - triggerRegexp: "th(i)s", - want: nil, - }, - { - name: "package completion in a multiline comment", - filename: "fruits/testfile.go", - triggerRegexp: `\/\*\n()`, - want: nil, - }, - { - name: "package completion at invalid position", - filename: "fruits/testfile.go", - triggerRegexp: "import \"fmt\"\n()", - want: nil, - }, - { - name: "package completion after keyword 'package'", - filename: "fruits/testfile2.go", - triggerRegexp: "package()", - want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, - editRegexp: "package\n", - }, - { - name: "package completion with 'pac' prefix", - filename: "fruits/testfile3.go", - triggerRegexp: "pac()", - want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, - editRegexp: "pac", - }, - { - name: "package completion for empty file", - filename: "fruits/testfile4.go", - triggerRegexp: "^$", - content: &testfile4, - want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, - editRegexp: "^$", - }, - { - name: "package completion without terminal newline", - filename: "fruits/testfile5.go", - triggerRegexp: `\*\/ ()`, - content: &testfile5, - want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, - editRegexp: `\*\/ ()`, - }, - { - name: "package completion on terminal newline", - filename: "fruits/testfile6.go", - triggerRegexp: `\*\/\n()`, - content: &testfile6, - want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, - editRegexp: `\*\/\n()`, - }, - // Issue golang/go#44680 - { - name: "package completion for dir name with punctuation", - filename: "123f_r.u~its-123/testfile.go", - triggerRegexp: "package()", - want: []string{"package fruits123", "package fruits123_test", "package main"}, - editRegexp: "package\n", - }, - { - name: "package completion for invalid dir name", - filename: ".invalid-dir@-name/testfile.go", - triggerRegexp: "package()", - want: []string{"package main"}, - editRegexp: "package\n", - }, - } { - t.Run(tc.name, func(t *testing.T) { - Run(t, files, func(t *testing.T, env *Env) { - if tc.content != nil { - env.WriteWorkspaceFile(tc.filename, *tc.content) - env.Await(env.DoneWithChangeWatchedFiles()) - } - env.OpenFile(tc.filename) - completions := env.Completion(env.RegexpSearch(tc.filename, tc.triggerRegexp)) - - // Check that the completion item suggestions are in the range - // of the file. {Start,End}.Line are zero-based. - lineCount := len(strings.Split(env.BufferText(tc.filename), "\n")) - for _, item := range completions.Items { - if start := int(item.TextEdit.Range.Start.Line); start > lineCount { - t.Fatalf("unexpected text edit range start line number: got %d, want <= %d", start, lineCount) - } - if end := int(item.TextEdit.Range.End.Line); end > lineCount { - t.Fatalf("unexpected text edit range end line number: got %d, want <= %d", end, lineCount) - } - } - - if tc.want != nil { - expectedLoc := env.RegexpSearch(tc.filename, tc.editRegexp) - for _, item := range completions.Items { - gotRng := item.TextEdit.Range - if expectedLoc.Range != gotRng { - t.Errorf("unexpected completion range for completion item %s: got %v, want %v", - item.Label, gotRng, expectedLoc.Range) - } - } - } - - diff := compareCompletionLabels(tc.want, completions.Items) - if diff != "" { - t.Error(diff) - } - }) - }) - } -} - -func TestPackageNameCompletion(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- math/add.go -- -package ma -` - - want := []string{"ma", "ma_test", "main", "math", "math_test"} - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("math/add.go") - completions := env.Completion(env.RegexpSearch("math/add.go", "package ma()")) - - diff := compareCompletionLabels(want, completions.Items) - if diff != "" { - t.Fatal(diff) - } - }) -} - -// TODO(rfindley): audit/clean up call sites for this helper, to ensure -// consistent test errors. -func compareCompletionLabels(want []string, gotItems []protocol.CompletionItem) string { - var got []string - for _, item := range gotItems { - got = append(got, item.Label) - if item.Label != item.InsertText && item.TextEdit == nil { - // Label should be the same as InsertText, if InsertText is to be used - return fmt.Sprintf("label not the same as InsertText %#v", item) - } - } - - if len(got) == 0 && len(want) == 0 { - return "" // treat nil and the empty slice as equivalent - } - - if diff := cmp.Diff(want, got); diff != "" { - return fmt.Sprintf("completion item mismatch (-want +got):\n%s", diff) - } - return "" -} - -func TestUnimportedCompletion(t *testing.T) { - const mod = ` --- go.mod -- -module mod.com - -go 1.14 - -require example.com v1.2.3 --- go.sum -- -example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= --- main.go -- -package main - -func main() { - _ = blah -} --- main2.go -- -package main - -import "example.com/blah" - -func _() { - _ = blah.Hello -} -` - WithOptions( - ProxyFiles(proxy), - ).Run(t, mod, func(t *testing.T, env *Env) { - // Make sure the dependency is in the module cache and accessible for - // unimported completions, and then remove it before proceeding. - env.RemoveWorkspaceFile("main2.go") - env.RunGoCommand("mod", "tidy") - env.Await(env.DoneWithChangeWatchedFiles()) - - // Trigger unimported completions for the example.com/blah package. - env.OpenFile("main.go") - env.Await(env.DoneWithOpen()) - loc := env.RegexpSearch("main.go", "ah") - completions := env.Completion(loc) - if len(completions.Items) == 0 { - t.Fatalf("no completion items") - } - env.AcceptCompletion(loc, completions.Items[0]) // adds blah import to main.go - env.Await(env.DoneWithChange()) - - // Trigger completions once again for the blah.<> selector. - env.RegexpReplace("main.go", "_ = blah", "_ = blah.") - env.Await(env.DoneWithChange()) - loc = env.RegexpSearch("main.go", "\n}") - completions = env.Completion(loc) - if len(completions.Items) != 1 { - t.Fatalf("expected 1 completion item, got %v", len(completions.Items)) - } - item := completions.Items[0] - if item.Label != "Name" { - t.Fatalf("expected completion item blah.Name, got %v", item.Label) - } - env.AcceptCompletion(loc, item) - - // Await the diagnostics to add example.com/blah to the go.mod file. - env.AfterChange( - Diagnostics(env.AtRegexp("main.go", `"example.com/blah"`)), - ) - }) -} - -// Test that completions still work with an undownloaded module, golang/go#43333. -func TestUndownloadedModule(t *testing.T) { - // mod.com depends on example.com, but only in a file that's hidden by a - // build tag, so the IWL won't download example.com. That will cause errors - // in the go list -m call performed by the imports package. - const files = ` --- go.mod -- -module mod.com - -go 1.14 - -require example.com v1.2.3 --- go.sum -- -example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= --- useblah.go -- -// +build hidden - -package pkg -import "example.com/blah" -var _ = blah.Name --- mainmod/mainmod.go -- -package mainmod - -const Name = "mainmod" -` - WithOptions(ProxyFiles(proxy)).Run(t, files, func(t *testing.T, env *Env) { - env.CreateBuffer("import.go", "package pkg\nvar _ = mainmod.Name\n") - env.SaveBuffer("import.go") - content := env.ReadWorkspaceFile("import.go") - if !strings.Contains(content, `import "mod.com/mainmod`) { - t.Errorf("expected import of mod.com/mainmod in %q", content) - } - }) -} - -// Test that we can doctor the source code enough so the file is -// parseable and completion works as expected. -func TestSourceFixup(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- foo.go -- -package foo - -func _() { - var s S - if s. -} - -type S struct { - i int -} -` - - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("foo.go") - completions := env.Completion(env.RegexpSearch("foo.go", `if s\.()`)) - diff := compareCompletionLabels([]string{"i"}, completions.Items) - if diff != "" { - t.Fatal(diff) - } - }) -} - -func TestCompletion_Issue45510(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- main.go -- -package main - -func _() { - type a *a - var aaaa1, aaaa2 a - var _ a = aaaa - - type b a - var bbbb1, bbbb2 b - var _ b = bbbb -} - -type ( - c *d - d *e - e **c -) - -func _() { - var ( - xxxxc c - xxxxd d - xxxxe e - ) - - var _ c = xxxx - var _ d = xxxx - var _ e = xxxx -} -` - - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - - tests := []struct { - re string - want []string - }{ - {`var _ a = aaaa()`, []string{"aaaa1", "aaaa2"}}, - {`var _ b = bbbb()`, []string{"bbbb1", "bbbb2"}}, - {`var _ c = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, - {`var _ d = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, - {`var _ e = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, - } - for _, tt := range tests { - completions := env.Completion(env.RegexpSearch("main.go", tt.re)) - diff := compareCompletionLabels(tt.want, completions.Items) - if diff != "" { - t.Errorf("%s: %s", tt.re, diff) - } - } - }) -} - -func TestCompletionDeprecation(t *testing.T) { - const files = ` --- go.mod -- -module test.com - -go 1.16 --- prog.go -- -package waste -// Deprecated, use newFoof -func fooFunc() bool { - return false -} - -// Deprecated -const badPi = 3.14 - -func doit() { - if fooF - panic() - x := badP -} -` - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("prog.go") - loc := env.RegexpSearch("prog.go", "if fooF") - loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte("if fooF"))) - completions := env.Completion(loc) - diff := compareCompletionLabels([]string{"fooFunc"}, completions.Items) - if diff != "" { - t.Error(diff) - } - if completions.Items[0].Tags == nil { - t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags) - } - loc = env.RegexpSearch("prog.go", "= badP") - loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte("= badP"))) - completions = env.Completion(loc) - diff = compareCompletionLabels([]string{"badPi"}, completions.Items) - if diff != "" { - t.Error(diff) - } - if completions.Items[0].Tags == nil { - t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags) - } - }) -} - -func TestUnimportedCompletion_VSCodeIssue1489(t *testing.T) { - const src = ` --- go.mod -- -module mod.com - -go 1.14 - --- main.go -- -package main - -import "fmt" - -func main() { - fmt.Println("a") - math.Sqr -} -` - WithOptions( - WindowsLineEndings(), - Settings{"ui.completion.usePlaceholders": true}, - ).Run(t, src, func(t *testing.T, env *Env) { - // Trigger unimported completions for the mod.com package. - env.OpenFile("main.go") - env.Await(env.DoneWithOpen()) - loc := env.RegexpSearch("main.go", "Sqr()") - completions := env.Completion(loc) - if len(completions.Items) == 0 { - t.Fatalf("no completion items") - } - env.AcceptCompletion(loc, completions.Items[0]) - env.Await(env.DoneWithChange()) - got := env.BufferText("main.go") - want := "package main\r\n\r\nimport (\r\n\t\"fmt\"\r\n\t\"math\"\r\n)\r\n\r\nfunc main() {\r\n\tfmt.Println(\"a\")\r\n\tmath.Sqrt(${1:x float64})\r\n}\r\n" - if diff := cmp.Diff(want, got); diff != "" { - t.Errorf("unimported completion (-want +got):\n%s", diff) - } - }) -} - -func TestUnimportedCompletionHasPlaceholders60269(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses type params - - // We can't express this as a marker test because it doesn't support AcceptCompletion. - const src = ` --- go.mod -- -module example.com -go 1.12 - --- a/a.go -- -package a - -var _ = b.F - --- b/b.go -- -package b - -func F0(a, b int, c float64) {} -func F1(int, chan *string) {} -func F2[K, V any](map[K]V, chan V) {} // missing type parameters was issue #60959 -func F3[K comparable, V any](map[K]V, chan V) {} -` - WithOptions( - WindowsLineEndings(), - Settings{"ui.completion.usePlaceholders": true}, - ).Run(t, src, func(t *testing.T, env *Env) { - env.OpenFile("a/a.go") - env.Await(env.DoneWithOpen()) - - // The table lists the expected completions of b.F as they appear in Items. - const common = "package a\r\n\r\nimport \"example.com/b\"\r\n\r\nvar _ = " - for i, want := range []string{ - common + "b.F0(${1:a int}, ${2:b int}, ${3:c float64})\r\n", - common + "b.F1(${1:_ int}, ${2:_ chan *string})\r\n", - common + "b.F2[${1:K any}, ${2:V any}](${3:_ map[K]V}, ${4:_ chan V})\r\n", - common + "b.F3[${1:K comparable}, ${2:V any}](${3:_ map[K]V}, ${4:_ chan V})\r\n", - } { - loc := env.RegexpSearch("a/a.go", "b.F()") - completions := env.Completion(loc) - if len(completions.Items) == 0 { - t.Fatalf("no completion items") - } - saved := env.BufferText("a/a.go") - env.AcceptCompletion(loc, completions.Items[i]) - env.Await(env.DoneWithChange()) - got := env.BufferText("a/a.go") - if diff := cmp.Diff(want, got); diff != "" { - t.Errorf("%d: unimported completion (-want +got):\n%s", i, diff) - } - env.SetBufferContent("a/a.go", saved) // restore - } - }) -} - -func TestPackageMemberCompletionAfterSyntaxError(t *testing.T) { - // This test documents the current broken behavior due to golang/go#58833. - const src = ` --- go.mod -- -module mod.com - -go 1.14 - --- main.go -- -package main - -import "math" - -func main() { - math.Sqrt(,0) - math.Ldex -} -` - Run(t, src, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - env.Await(env.DoneWithOpen()) - loc := env.RegexpSearch("main.go", "Ldex()") - completions := env.Completion(loc) - if len(completions.Items) == 0 { - t.Fatalf("no completion items") - } - env.AcceptCompletion(loc, completions.Items[0]) - env.Await(env.DoneWithChange()) - got := env.BufferText("main.go") - // The completion of math.Ldex after the syntax error on the - // previous line is not "math.Ldexp" but "math.Ldexmath.Abs". - // (In VSCode, "Abs" wrongly appears in the completion menu.) - // This is a consequence of poor error recovery in the parser - // causing "math.Ldex" to become a BadExpr. - want := "package main\n\nimport \"math\"\n\nfunc main() {\n\tmath.Sqrt(,0)\n\tmath.Ldexmath.Abs(${1:})\n}\n" - if diff := cmp.Diff(want, got); diff != "" { - t.Errorf("unimported completion (-want +got):\n%s", diff) - } - }) -} - -func TestCompleteAllFields(t *testing.T) { - // This test verifies that completion results always include all struct fields. - // See golang/go#53992. - - const src = ` --- go.mod -- -module mod.com - -go 1.18 - --- p/p.go -- -package p - -import ( - "fmt" - - . "net/http" - . "runtime" - . "go/types" - . "go/parser" - . "go/ast" -) - -type S struct { - a, b, c, d, e, f, g, h, i, j, k, l, m int - n, o, p, q, r, s, t, u, v, w, x, y, z int -} - -func _() { - var s S - fmt.Println(s.) -} -` - - WithOptions(Settings{ - "completionBudget": "1ns", // must be non-zero as 0 => infinity - }).Run(t, src, func(t *testing.T, env *Env) { - wantFields := make(map[string]bool) - for c := 'a'; c <= 'z'; c++ { - wantFields[string(c)] = true - } - - env.OpenFile("p/p.go") - // Make an arbitrary edit to ensure we're not hitting the cache. - env.EditBuffer("p/p.go", fake.NewEdit(0, 0, 0, 0, fmt.Sprintf("// current time: %v\n", time.Now()))) - loc := env.RegexpSearch("p/p.go", `s\.()`) - completions := env.Completion(loc) - gotFields := make(map[string]bool) - for _, item := range completions.Items { - if item.Kind == protocol.FieldCompletion { - gotFields[item.Label] = true - } - } - - if diff := cmp.Diff(wantFields, gotFields); diff != "" { - t.Errorf("Completion(...) returned mismatching fields (-want +got):\n%s", diff) - } - }) -} - -func TestDefinition(t *testing.T) { - testenv.NeedsGo1Point(t, 17) // in go1.16, The FieldList in func x is not empty - files := ` --- go.mod -- -module mod.com - -go 1.18 --- a_test.go -- -package foo -` - tests := []struct { - line string // the sole line in the buffer after the package statement - pat string // the pattern to search for - want []string // expected completions - }{ - {"func T", "T", []string{"TestXxx(t *testing.T)", "TestMain(m *testing.M)"}}, - {"func T()", "T", []string{"TestMain", "Test"}}, - {"func TestM", "TestM", []string{"TestMain(m *testing.M)", "TestM(t *testing.T)"}}, - {"func TestM()", "TestM", []string{"TestMain"}}, - {"func TestMi", "TestMi", []string{"TestMi(t *testing.T)"}}, - {"func TestMi()", "TestMi", nil}, - {"func TestG", "TestG", []string{"TestG(t *testing.T)"}}, - {"func TestG(", "TestG", nil}, - {"func Ben", "B", []string{"BenchmarkXxx(b *testing.B)"}}, - {"func Ben(", "Ben", []string{"Benchmark"}}, - {"func BenchmarkFoo", "BenchmarkFoo", []string{"BenchmarkFoo(b *testing.B)"}}, - {"func BenchmarkFoo(", "BenchmarkFoo", nil}, - {"func Fuz", "F", []string{"FuzzXxx(f *testing.F)"}}, - {"func Fuz(", "Fuz", []string{"Fuzz"}}, - {"func Testx", "Testx", nil}, - {"func TestMe(t *testing.T)", "TestMe", nil}, - {"func Te(t *testing.T)", "Te", []string{"TestMain", "Test"}}, - } - fname := "a_test.go" - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile(fname) - env.Await(env.DoneWithOpen()) - for _, test := range tests { - env.SetBufferContent(fname, "package foo\n"+test.line) - loc := env.RegexpSearch(fname, test.pat) - loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte(test.pat))) - completions := env.Completion(loc) - if diff := compareCompletionLabels(test.want, completions.Items); diff != "" { - t.Error(diff) - } - } - }) -} - -// Test that completing a definition replaces source text when applied, golang/go#56852. -// Note: With go <= 1.16 the completions does not add parameters and fails these tests. -func TestDefinitionReplaceRange(t *testing.T) { - testenv.NeedsGo1Point(t, 17) - - const mod = ` --- go.mod -- -module mod.com - -go 1.17 -` - - tests := []struct { - name string - before, after string - }{ - { - name: "func TestMa", - before: ` -package foo_test - -func TestMa -`, - after: ` -package foo_test - -func TestMain(m *testing.M) -`, - }, - { - name: "func TestSome", - before: ` -package foo_test - -func TestSome -`, - after: ` -package foo_test - -func TestSome(t *testing.T) -`, - }, - { - name: "func Bench", - before: ` -package foo_test - -func Bench -`, - // Note: Snippet with escaped }. - after: ` -package foo_test - -func Benchmark${1:Xxx}(b *testing.B) { - $0 -\} -`, - }, - } - - Run(t, mod, func(t *testing.T, env *Env) { - env.CreateBuffer("foo_test.go", "") - - for _, tst := range tests { - tst.before = strings.Trim(tst.before, "\n") - tst.after = strings.Trim(tst.after, "\n") - env.SetBufferContent("foo_test.go", tst.before) - - loc := env.RegexpSearch("foo_test.go", tst.name) - loc.Range.Start.Character = uint32(protocol.UTF16Len([]byte(tst.name))) - completions := env.Completion(loc) - if len(completions.Items) == 0 { - t.Fatalf("no completion items") - } - - env.AcceptCompletion(loc, completions.Items[0]) - env.Await(env.DoneWithChange()) - if buf := env.BufferText("foo_test.go"); buf != tst.after { - t.Errorf("%s:incorrect completion: got %q, want %q", tst.name, buf, tst.after) - } - } - }) -} - -func TestGoWorkCompletion(t *testing.T) { - const files = ` --- go.work -- -go 1.18 - -use ./a -use ./a/ba -use ./a/b/ -use ./dir/foo -use ./dir/foobar/ --- a/go.mod -- --- go.mod -- --- a/bar/go.mod -- --- a/b/c/d/e/f/go.mod -- --- dir/bar -- --- dir/foobar/go.mod -- -` - - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("go.work") - - tests := []struct { - re string - want []string - }{ - {`use ()\.`, []string{".", "./a", "./a/bar", "./dir/foobar"}}, - {`use \.()`, []string{"", "/a", "/a/bar", "/dir/foobar"}}, - {`use \./()`, []string{"a", "a/bar", "dir/foobar"}}, - {`use ./a()`, []string{"", "/b/c/d/e/f", "/bar"}}, - {`use ./a/b()`, []string{"/c/d/e/f", "ar"}}, - {`use ./a/b/()`, []string{`c/d/e/f`}}, - {`use ./a/ba()`, []string{"r"}}, - {`use ./dir/foo()`, []string{"bar"}}, - {`use ./dir/foobar/()`, []string{}}, - } - for _, tt := range tests { - completions := env.Completion(env.RegexpSearch("go.work", tt.re)) - diff := compareCompletionLabels(tt.want, completions.Items) - if diff != "" { - t.Errorf("%s: %s", tt.re, diff) - } - } - }) -} - -func TestBuiltinCompletion(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.18 --- a.go -- -package a - -func _() { - // here -} -` - - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("a.go") - result := env.Completion(env.RegexpSearch("a.go", `// here`)) - builtins := []string{ - "any", "append", "bool", "byte", "cap", "close", - "comparable", "complex", "complex128", "complex64", "copy", "delete", - "error", "false", "float32", "float64", "imag", "int", "int16", "int32", - "int64", "int8", "len", "make", "new", "panic", "print", "println", "real", - "recover", "rune", "string", "true", "uint", "uint16", "uint32", "uint64", - "uint8", "uintptr", "nil", - } - if testenv.Go1Point() >= 21 { - builtins = append(builtins, "clear", "max", "min") - } - sort.Strings(builtins) - var got []string - - for _, item := range result.Items { - // TODO(rfindley): for flexibility, ignore zero while it is being - // implemented. Remove this if/when zero lands. - if item.Label != "zero" { - got = append(got, item.Label) - } - } - sort.Strings(got) - - if diff := cmp.Diff(builtins, got); diff != "" { - t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) - } - }) -} - -func TestOverlayCompletion(t *testing.T) { - const files = ` --- go.mod -- -module foo.test - -go 1.18 - --- foo/foo.go -- -package foo - -type Foo struct{} -` - - Run(t, files, func(t *testing.T, env *Env) { - env.CreateBuffer("nodisk/nodisk.go", ` -package nodisk - -import ( - "foo.test/foo" -) - -func _() { - foo.Foo() -} -`) - list := env.Completion(env.RegexpSearch("nodisk/nodisk.go", "foo.(Foo)")) - want := []string{"Foo"} - var got []string - for _, item := range list.Items { - got = append(got, item.Label) - } - if diff := cmp.Diff(want, got); diff != "" { - t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) - } - }) -} - -// Fix for golang/go#60062: unimported completion included "golang.org/toolchain" results. -func TestToolchainCompletions(t *testing.T) { - const files = ` --- go.mod -- -module foo.test/foo - -go 1.21 - --- foo.go -- -package foo - -func _() { - os.Open -} - -func _() { - strings -} -` - - const proxy = ` --- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/go.mod -- -module golang.org/toolchain --- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/os/os.go -- -package os - -func Open() {} --- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/strings/strings.go -- -package strings - -func Join() {} -` - - WithOptions( - ProxyFiles(proxy), - ).Run(t, files, func(t *testing.T, env *Env) { - env.RunGoCommand("mod", "download", "golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64") - env.OpenFile("foo.go") - - for _, pattern := range []string{"os.Open()", "string()"} { - loc := env.RegexpSearch("foo.go", pattern) - res := env.Completion(loc) - for _, item := range res.Items { - if strings.Contains(item.Detail, "golang.org/toolchain") { - t.Errorf("Completion(...) returned toolchain item %#v", item) - } - } - } - }) -} diff --git a/gopls/internal/regtest/completion/postfix_snippet_test.go b/gopls/internal/regtest/completion/postfix_snippet_test.go deleted file mode 100644 index bfaa8f664f4..00000000000 --- a/gopls/internal/regtest/completion/postfix_snippet_test.go +++ /dev/null @@ -1,590 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "strings" - "testing" - - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestPostfixSnippetCompletion(t *testing.T) { - const mod = ` --- go.mod -- -module mod.com - -go 1.12 -` - - cases := []struct { - name string - before, after string - }{ - { - name: "sort", - before: ` -package foo - -func _() { - var foo []int - foo.sort -} -`, - after: ` -package foo - -import "sort" - -func _() { - var foo []int - sort.Slice(foo, func(i, j int) bool { - $0 -}) -} -`, - }, - { - name: "sort_renamed_sort_package", - before: ` -package foo - -import blahsort "sort" - -var j int - -func _() { - var foo []int - foo.sort -} -`, - after: ` -package foo - -import blahsort "sort" - -var j int - -func _() { - var foo []int - blahsort.Slice(foo, func(i, j2 int) bool { - $0 -}) -} -`, - }, - { - name: "last", - before: ` -package foo - -func _() { - var s struct { i []int } - s.i.last -} -`, - after: ` -package foo - -func _() { - var s struct { i []int } - s.i[len(s.i)-1] -} -`, - }, - { - name: "reverse", - before: ` -package foo - -func _() { - var foo []int - foo.reverse -} -`, - after: ` -package foo - -func _() { - var foo []int - for i, j := 0, len(foo)-1; i < j; i, j = i+1, j-1 { - foo[i], foo[j] = foo[j], foo[i] -} - -} -`, - }, - { - name: "slice_range", - before: ` -package foo - -func _() { - type myThing struct{} - var foo []myThing - foo.range -} -`, - after: ` -package foo - -func _() { - type myThing struct{} - var foo []myThing - for i, mt := range foo { - $0 -} -} -`, - }, - { - name: "append_stmt", - before: ` -package foo - -func _() { - var foo []int - foo.append -} -`, - after: ` -package foo - -func _() { - var foo []int - foo = append(foo, $0) -} -`, - }, - { - name: "append_expr", - before: ` -package foo - -func _() { - var foo []int - var _ []int = foo.append -} -`, - after: ` -package foo - -func _() { - var foo []int - var _ []int = append(foo, $0) -} -`, - }, - { - name: "slice_copy", - before: ` -package foo - -func _() { - var foo []int - foo.copy -} -`, - after: ` -package foo - -func _() { - var foo []int - fooCopy := make([]int, len(foo)) -copy(fooCopy, foo) - -} -`, - }, - { - name: "map_range", - before: ` -package foo - -func _() { - var foo map[string]int - foo.range -} -`, - after: ` -package foo - -func _() { - var foo map[string]int - for k, v := range foo { - $0 -} -} -`, - }, - { - name: "map_clear", - before: ` -package foo - -func _() { - var foo map[string]int - foo.clear -} -`, - after: ` -package foo - -func _() { - var foo map[string]int - for k := range foo { - delete(foo, k) -} - -} -`, - }, - { - name: "map_keys", - before: ` -package foo - -func _() { - var foo map[string]int - foo.keys -} -`, - after: ` -package foo - -func _() { - var foo map[string]int - keys := make([]string, 0, len(foo)) -for k := range foo { - keys = append(keys, k) -} - -} -`, - }, - { - name: "channel_range", - before: ` -package foo - -func _() { - foo := make(chan int) - foo.range -} -`, - after: ` -package foo - -func _() { - foo := make(chan int) - for e := range foo { - $0 -} -} -`, - }, - { - name: "var", - before: ` -package foo - -func foo() (int, error) { return 0, nil } - -func _() { - foo().var -} -`, - after: ` -package foo - -func foo() (int, error) { return 0, nil } - -func _() { - i, err := foo() -} -`, - }, - { - name: "var_single_value", - before: ` -package foo - -func foo() error { return nil } - -func _() { - foo().var -} -`, - after: ` -package foo - -func foo() error { return nil } - -func _() { - err := foo() -} -`, - }, - { - name: "var_same_type", - before: ` -package foo - -func foo() (int, int) { return 0, 0 } - -func _() { - foo().var -} -`, - after: ` -package foo - -func foo() (int, int) { return 0, 0 } - -func _() { - i, i2 := foo() -} -`, - }, - { - name: "print_scalar", - before: ` -package foo - -func _() { - var foo int - foo.print -} -`, - after: ` -package foo - -import "fmt" - -func _() { - var foo int - fmt.Printf("foo: %v\n", foo) -} -`, - }, - { - name: "print_multi", - before: ` -package foo - -func foo() (int, error) { return 0, nil } - -func _() { - foo().print -} -`, - after: ` -package foo - -import "fmt" - -func foo() (int, error) { return 0, nil } - -func _() { - fmt.Println(foo()) -} -`, - }, - { - name: "string split", - before: ` -package foo - -func foo() []string { - x := "test" - return x.split -}`, - after: ` -package foo - -import "strings" - -func foo() []string { - x := "test" - return strings.Split(x, "$0") -}`, - }, - { - name: "string slice join", - before: ` -package foo - -func foo() string { - x := []string{"a", "test"} - return x.join -}`, - after: ` -package foo - -import "strings" - -func foo() string { - x := []string{"a", "test"} - return strings.Join(x, "$0") -}`, - }, - { - name: "if not nil interface", - before: ` -package foo - -func _() { - var foo error - foo.ifnotnil -} -`, - after: ` -package foo - -func _() { - var foo error - if foo != nil { - $0 -} -} -`, - }, - { - name: "if not nil pointer", - before: ` -package foo - -func _() { - var foo *int - foo.ifnotnil -} -`, - after: ` -package foo - -func _() { - var foo *int - if foo != nil { - $0 -} -} -`, - }, - { - name: "if not nil slice", - before: ` -package foo - -func _() { - var foo []int - foo.ifnotnil -} -`, - after: ` -package foo - -func _() { - var foo []int - if foo != nil { - $0 -} -} -`, - }, - { - name: "if not nil map", - before: ` -package foo - -func _() { - var foo map[string]any - foo.ifnotnil -} -`, - after: ` -package foo - -func _() { - var foo map[string]any - if foo != nil { - $0 -} -} -`, - }, - { - name: "if not nil channel", - before: ` -package foo - -func _() { - var foo chan int - foo.ifnotnil -} -`, - after: ` -package foo - -func _() { - var foo chan int - if foo != nil { - $0 -} -} -`, - }, - { - name: "if not nil function", - before: ` -package foo - -func _() { - var foo func() - foo.ifnotnil -} -`, - after: ` -package foo - -func _() { - var foo func() - if foo != nil { - $0 -} -} -`, - }, - } - - r := WithOptions( - Settings{ - "experimentalPostfixCompletions": true, - }, - ) - r.Run(t, mod, func(t *testing.T, env *Env) { - env.CreateBuffer("foo.go", "") - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - c.before = strings.Trim(c.before, "\n") - c.after = strings.Trim(c.after, "\n") - - env.SetBufferContent("foo.go", c.before) - - loc := env.RegexpSearch("foo.go", "\n}") - completions := env.Completion(loc) - if len(completions.Items) != 1 { - t.Fatalf("expected one completion, got %v", completions.Items) - } - - env.AcceptCompletion(loc, completions.Items[0]) - - if buf := env.BufferText("foo.go"); buf != c.after { - t.Errorf("\nGOT:\n%s\nEXPECTED:\n%s", buf, c.after) - } - }) - } - }) -} diff --git a/gopls/internal/regtest/debug/debug_test.go b/gopls/internal/regtest/debug/debug_test.go deleted file mode 100644 index 261abf956fe..00000000000 --- a/gopls/internal/regtest/debug/debug_test.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package debug - -import ( - "context" - "encoding/json" - "io" - "net/http" - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestMain(m *testing.M) { - Main(m, hooks.Options) -} - -func TestBugNotification(t *testing.T) { - // Verify that a properly configured session gets notified of a bug on the - // server. - WithOptions( - Modes(Default), // must be in-process to receive the bug report below - Settings{"showBugReports": true}, - ).Run(t, "", func(t *testing.T, env *Env) { - const desc = "got a bug" - bug.Report(desc) - env.Await(ShownMessage(desc)) - }) -} - -// TestStartDebugging executes a gopls.start_debugging command to -// start the internal web server. -func TestStartDebugging(t *testing.T) { - WithOptions( - Modes(Default|Experimental), // doesn't work in Forwarded mode - ).Run(t, "", func(t *testing.T, env *Env) { - // Start a debugging server. - res, err := startDebugging(env.Ctx, env.Editor.Server, &command.DebuggingArgs{ - Addr: "", // any free port - }) - if err != nil { - t.Fatalf("startDebugging: %v", err) - } - - // Assert that the server requested that the - // client show the debug page in a browser. - debugURL := res.URLs[0] - env.Await(ShownDocument(debugURL)) - - // Send a request to the debug server and ensure it responds. - resp, err := http.Get(debugURL) - if err != nil { - t.Fatal(err) - } - defer resp.Body.Close() - data, err := io.ReadAll(resp.Body) - if err != nil { - t.Fatalf("reading HTTP response body: %v", err) - } - const want = "GoPls" - if !strings.Contains(string(data), want) { - t.Errorf("GET %s response does not contain %q: <<%s>>", debugURL, want, data) - } - }) -} - -// startDebugging starts a debugging server. -// TODO(adonovan): move into command package? -func startDebugging(ctx context.Context, server protocol.Server, args *command.DebuggingArgs) (*command.DebuggingResult, error) { - rawArgs, err := command.MarshalArgs(args) - if err != nil { - return nil, err - } - res0, err := server.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ - Command: command.StartDebugging.ID(), - Arguments: rawArgs, - }) - if err != nil { - return nil, err - } - // res0 is the result of a schemaless (map[string]any) JSON decoding. - // Re-encode and decode into the correct Go struct type. - // TODO(adonovan): fix (*serverDispatcher).ExecuteCommand. - data, err := json.Marshal(res0) - if err != nil { - return nil, err - } - var res *command.DebuggingResult - if err := json.Unmarshal(data, &res); err != nil { - return nil, err - } - return res, nil -} diff --git a/gopls/internal/regtest/diagnostics/undeclared_test.go b/gopls/internal/regtest/diagnostics/undeclared_test.go deleted file mode 100644 index ac5f598cc48..00000000000 --- a/gopls/internal/regtest/diagnostics/undeclared_test.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package diagnostics - -import ( - "testing" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestUndeclaredDiagnostics(t *testing.T) { - src := ` --- go.mod -- -module mod.com - -go 1.12 --- a/a.go -- -package a - -func _() int { - return x -} --- b/b.go -- -package b - -func _() int { - var y int - y = y - return y -} -` - Run(t, src, func(t *testing.T, env *Env) { - isUnnecessary := func(diag protocol.Diagnostic) bool { - for _, tag := range diag.Tags { - if tag == protocol.Unnecessary { - return true - } - } - return false - } - - // 'x' is undeclared, but still necessary. - env.OpenFile("a/a.go") - var adiags protocol.PublishDiagnosticsParams - env.AfterChange( - Diagnostics(env.AtRegexp("a/a.go", "x")), - ReadDiagnostics("a/a.go", &adiags), - ) - if got := len(adiags.Diagnostics); got != 1 { - t.Errorf("len(Diagnostics) = %d, want 1", got) - } - if diag := adiags.Diagnostics[0]; isUnnecessary(diag) { - t.Errorf("%v tagged unnecessary, want necessary", diag) - } - - // 'y = y' is pointless, and should be detected as unnecessary. - env.OpenFile("b/b.go") - var bdiags protocol.PublishDiagnosticsParams - env.AfterChange( - Diagnostics(env.AtRegexp("b/b.go", "y = y")), - ReadDiagnostics("b/b.go", &bdiags), - ) - if got := len(bdiags.Diagnostics); got != 1 { - t.Errorf("len(Diagnostics) = %d, want 1", got) - } - if diag := bdiags.Diagnostics[0]; !isUnnecessary(diag) { - t.Errorf("%v tagged necessary, want unnecessary", diag) - } - }) -} diff --git a/gopls/internal/regtest/marker/marker_test.go b/gopls/internal/regtest/marker/marker_test.go deleted file mode 100644 index 557c2228d79..00000000000 --- a/gopls/internal/regtest/marker/marker_test.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package marker - -import ( - "os" - "testing" - - "golang.org/x/tools/gopls/internal/bug" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/testenv" -) - -func TestMain(m *testing.M) { - bug.PanicOnBugs = true - testenv.ExitIfSmallMachine() - os.Exit(m.Run()) -} - -// Note: we use a separate package for the marker tests so that we can easily -// compare their performance to the existing marker tests in ./internal/lsp. - -// TestMarkers runs the marker tests from the testdata directory. -// -// See RunMarkerTests for details on how marker tests work. -func TestMarkers(t *testing.T) { - RunMarkerTests(t, "testdata") -} diff --git a/gopls/internal/regtest/marker/testdata/codeaction/infertypeargs.txt b/gopls/internal/regtest/marker/testdata/codeaction/infertypeargs.txt deleted file mode 100644 index 6f7b5fbe8c0..00000000000 --- a/gopls/internal/regtest/marker/testdata/codeaction/infertypeargs.txt +++ /dev/null @@ -1,38 +0,0 @@ -This test verifies the infertypeargs refactoring. - --- flags -- --min_go=go1.18 - --- go.mod -- -module mod.test/infertypeargs - -go 1.18 - --- p.go -- -package infertypeargs - -func app[S interface{ ~[]E }, E interface{}](s S, e E) S { - return append(s, e) -} - -func _() { - _ = app[[]int] - _ = app[[]int, int] - _ = app[[]int]([]int{}, 0) //@codeaction("app", ")", "refactor.rewrite", infer) - _ = app([]int{}, 0) -} - --- @infer/p.go -- -package infertypeargs - -func app[S interface{ ~[]E }, E interface{}](s S, e E) S { - return append(s, e) -} - -func _() { - _ = app[[]int] - _ = app[[]int, int] - _ = app([]int{}, 0) //@codeaction("app", ")", "refactor.rewrite", infer) - _ = app([]int{}, 0) -} - diff --git a/gopls/internal/regtest/marker/testdata/codeaction/inline.txt b/gopls/internal/regtest/marker/testdata/codeaction/inline.txt deleted file mode 100644 index 813a69ce09c..00000000000 --- a/gopls/internal/regtest/marker/testdata/codeaction/inline.txt +++ /dev/null @@ -1,23 +0,0 @@ -This is a minimal test of the refactor.inline code action. - --- go.mod -- -module testdata/codeaction -go 1.18 - --- a/a.go -- -package a - -func _() { - println(add(1, 2)) //@codeaction("add", ")", "refactor.inline", inline) -} - -func add(x, y int) int { return x + y } - --- @inline/a/a.go -- -package a - -func _() { - println(1 + 2) //@codeaction("add", ")", "refactor.inline", inline) -} - -func add(x, y int) int { return x + y } diff --git a/gopls/internal/regtest/marker/testdata/completion/bad.txt b/gopls/internal/regtest/marker/testdata/completion/bad.txt deleted file mode 100644 index 4da021ae322..00000000000 --- a/gopls/internal/regtest/marker/testdata/completion/bad.txt +++ /dev/null @@ -1,68 +0,0 @@ -This test exercises completion in the presence of type errors. - -Note: this test was ported from the old marker tests, which did not enable -unimported completion. Enabling it causes matches in e.g. crypto/rand. - --- settings.json -- -{ - "completeUnimported": false -} - --- go.mod -- -module bad.test - -go 1.18 - --- bad/bad0.go -- -package bad - -func stuff() { //@item(stuff, "stuff", "func()", "func") - x := "heeeeyyyy" - random2(x) //@diag("x", re"cannot use x \\(variable of type string\\) as int value in argument to random2") - random2(1) //@complete("dom", random, random2, random3) - y := 3 //@diag("y", re"y declared (and|but) not used") -} - -type bob struct { //@item(bob, "bob", "struct{...}", "struct") - x int -} - -func _() { - var q int - _ = &bob{ - f: q, //@diag("f: q", re"unknown field f in struct literal") - } -} - --- bad/bad1.go -- -package bad - -// See #36637 -type stateFunc func() stateFunc //@item(stateFunc, "stateFunc", "func() stateFunc", "type") - -var a unknown //@item(global_a, "a", "unknown", "var"),diag("unknown", re"(undeclared name|undefined): unknown") - -func random() int { //@item(random, "random", "func() int", "func") - //@complete("", global_a, bob, random, random2, random3, stateFunc, stuff) - return 0 -} - -func random2(y int) int { //@item(random2, "random2", "func(y int) int", "func"),item(bad_y_param, "y", "int", "var") - x := 6 //@item(x, "x", "int", "var"),diag("x", re"x declared (and|but) not used") - var q blah //@item(q, "q", "blah", "var"),diag("q", re"q declared (and|but) not used"),diag("blah", re"(undeclared name|undefined): blah") - var t **blob //@item(t, "t", "**blob", "var"),diag("t", re"t declared (and|but) not used"),diag("blob", re"(undeclared name|undefined): blob") - //@complete("", q, t, x, bad_y_param, global_a, bob, random, random2, random3, stateFunc, stuff) - - return y -} - -func random3(y ...int) { //@item(random3, "random3", "func(y ...int)", "func"),item(y_variadic_param, "y", "[]int", "var") - //@complete("", y_variadic_param, global_a, bob, random, random2, random3, stateFunc, stuff) - - var ch chan (favType1) //@item(ch, "ch", "chan (favType1)", "var"),diag("ch", re"ch declared (and|but) not used"),diag("favType1", re"(undeclared name|undefined): favType1") - var m map[keyType]int //@item(m, "m", "map[keyType]int", "var"),diag("m", re"m declared (and|but) not used"),diag("keyType", re"(undeclared name|undefined): keyType") - var arr []favType2 //@item(arr, "arr", "[]favType2", "var"),diag("arr", re"arr declared (and|but) not used"),diag("favType2", re"(undeclared name|undefined): favType2") - var fn1 func() badResult //@item(fn1, "fn1", "func() badResult", "var"),diag("fn1", re"fn1 declared (and|but) not used"),diag("badResult", re"(undeclared name|undefined): badResult") - var fn2 func(badParam) //@item(fn2, "fn2", "func(badParam)", "var"),diag("fn2", re"fn2 declared (and|but) not used"),diag("badParam", re"(undeclared name|undefined): badParam") - //@complete("", arr, ch, fn1, fn2, m, y_variadic_param, global_a, bob, random, random2, random3, stateFunc, stuff) -} diff --git a/gopls/internal/regtest/marker/testdata/completion/nested_complit.txt b/gopls/internal/regtest/marker/testdata/completion/nested_complit.txt deleted file mode 100644 index f3a148dedf8..00000000000 --- a/gopls/internal/regtest/marker/testdata/completion/nested_complit.txt +++ /dev/null @@ -1,23 +0,0 @@ -This test checks completion of nested composite literals; - -TODO(rfindley): investigate an un-skip the disabled test below. - --- flags -- --ignore_extra_diags - --- nested_complit.go -- -package nested_complit - -type ncFoo struct {} //@item(structNCFoo, "ncFoo", "struct{...}", "struct") - -type ncBar struct { //@item(structNCBar, "ncBar", "struct{...}", "struct") - baz []ncFoo -} - -func _() { - []ncFoo{} //@item(litNCFoo, "[]ncFoo{}", "", "var") - _ := ncBar{ - // disabled - see issue #54822 - baz: [] // complete(" //", structNCFoo, structNCBar) - } -} diff --git a/gopls/internal/regtest/marker/testdata/completion/postfix.txt b/gopls/internal/regtest/marker/testdata/completion/postfix.txt deleted file mode 100644 index a4485d7efd6..00000000000 --- a/gopls/internal/regtest/marker/testdata/completion/postfix.txt +++ /dev/null @@ -1,51 +0,0 @@ -These tests check that postfix completions do and do not show up in certain -cases. Tests for the postfix completion contents are implemented as ad-hoc -regtests. - --- flags -- --ignore_extra_diags - --- go.mod -- -module golang.org/lsptests/snippets - -go 1.18 - --- postfix.go -- -package snippets - -func _() { - var foo []int - foo.append //@rank(" //", postfixAppend) - - []int{}.append //@complete(" //") - - []int{}.last //@complete(" //") - - /* copy! */ //@item(postfixCopy, "copy!", "duplicate slice", "snippet") - - foo.copy //@rank(" //", postfixCopy) - - var s struct{ i []int } - s.i.copy //@rank(" //", postfixCopy) - - var _ []int = s.i.copy //@complete(" //") - - var blah func() []int - blah().append //@complete(" //") -} - -func _() { - /* append! */ //@item(postfixAppend, "append!", "append and re-assign slice", "snippet") - /* last! */ //@item(postfixLast, "last!", "s[len(s)-1]", "snippet") - /* print! */ //@item(postfixPrint, "print!", "print to stdout", "snippet") - /* range! */ //@item(postfixRange, "range!", "range over slice", "snippet") - /* reverse! */ //@item(postfixReverse, "reverse!", "reverse slice", "snippet") - /* sort! */ //@item(postfixSort, "sort!", "sort.Slice()", "snippet") - /* var! */ //@item(postfixVar, "var!", "assign to variable", "snippet") - /* ifnotnil! */ //@item(postfixIfNotNil, "ifnotnil!", "if expr != nil", "snippet") - - var foo []int - foo. //@complete(" //", postfixAppend, postfixCopy, postfixIfNotNil, postfixLast, postfixPrint, postfixRange, postfixReverse, postfixSort, postfixVar) - - foo = nil -} diff --git a/gopls/internal/regtest/marker/testdata/completion/testy.txt b/gopls/internal/regtest/marker/testdata/completion/testy.txt deleted file mode 100644 index 983fc09160b..00000000000 --- a/gopls/internal/regtest/marker/testdata/completion/testy.txt +++ /dev/null @@ -1,61 +0,0 @@ - --- flags -- --ignore_extra_diags - --- go.mod -- -module testy.test - -go 1.18 - --- types/types.go -- -package types - - --- signature/signature.go -- -package signature - -type Alias = int - --- snippets/snippets.go -- -package snippets - -import ( - "testy.test/signature" - t "testy.test/types" -) - -func X(_ map[signature.Alias]t.CoolAlias) (map[signature.Alias]t.CoolAlias) { - return nil -} - --- testy/testy.go -- -package testy - -func a() { //@item(funcA, "a", "func()", "func") - //@complete("", funcA) -} - - --- testy/testy_test.go -- -package testy - -import ( - "testing" - - sig "testy.test/signature" - "testy.test/snippets" -) - -func TestSomething(t *testing.T) { //@item(TestSomething, "TestSomething(t *testing.T)", "", "func") - var x int //@loc(testyX, "x"), diag("x", re"x declared (and|but) not used") - a() //@loc(testyA, "a") -} - -func _() { - _ = snippets.X(nil) //@signature("nil", "X(_ map[sig.Alias]types.CoolAlias) map[sig.Alias]types.CoolAlias", 0) - var _ sig.Alias -} - -func issue63578(err error) { - err.Error() //@signature(")", "Error()", 0) -} diff --git a/gopls/internal/regtest/marker/testdata/definition/embed.txt b/gopls/internal/regtest/marker/testdata/definition/embed.txt deleted file mode 100644 index e28c7fed6c2..00000000000 --- a/gopls/internal/regtest/marker/testdata/definition/embed.txt +++ /dev/null @@ -1,254 +0,0 @@ -This test checks definition and hover operations over embedded fields and methods. - --- go.mod -- -module mod.com - -go 1.18 - --- a/a.go -- -package a - -type A string //@loc(AString, "A") - -func (_ A) Hi() {} //@loc(AHi, "Hi") - -type S struct { - Field int //@loc(SField, "Field") - R // embed a struct - H // embed an interface -} - -type R struct { - Field2 int //@loc(RField2, "Field2") -} - -func (_ R) Hey() {} //@loc(RHey, "Hey") - -type H interface { //@loc(H, "H") - Goodbye() //@loc(HGoodbye, "Goodbye") -} - -type I interface { //@loc(I, "I") - B() //@loc(IB, "B") - J -} - -type J interface { //@loc(J, "J") - Hello() //@loc(JHello, "Hello") -} - --- b/b.go -- -package b - -import "mod.com/a" //@loc(AImport, re"\".*\"") - -type embed struct { - F int //@loc(F, "F") -} - -func (embed) M() //@loc(M, "M") - -type Embed struct { - embed - *a.A - a.I - a.S -} - -func _() { - e := Embed{} - e.Hi() //@def("Hi", AHi),hover("Hi", "Hi", AHi) - e.B() //@def("B", IB),hover("B", "B", IB) - _ = e.Field //@def("Field", SField),hover("Field", "Field", SField) - _ = e.Field2 //@def("Field2", RField2),hover("Field2", "Field2", RField2) - e.Hello() //@def("Hello", JHello),hover("Hello", "Hello",JHello) - e.Hey() //@def("Hey", RHey),hover("Hey", "Hey", RHey) - e.Goodbye() //@def("Goodbye", HGoodbye),hover("Goodbye", "Goodbye", HGoodbye) - e.M() //@def("M", M),hover("M", "M", M) - _ = e.F //@def("F", F),hover("F", "F", F) -} - -type aAlias = a.A //@loc(aAlias, "aAlias") - -type S1 struct { //@loc(S1, "S1") - F1 int //@loc(S1F1, "F1") - S2 //@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) - a.A //@def("A", AString),hover("A", "A", aA) - aAlias //@def("a", aAlias),hover("a", "aAlias", aAlias) -} - -type S2 struct { //@loc(S2, "S2") - F1 string //@loc(S2F1, "F1") - F2 int //@loc(S2F2, "F2") - *a.A //@def("A", AString),def("a",AImport) -} - -type S3 struct { - F1 struct { - a.A //@def("A", AString) - } -} - -func Bar() { - var x S1 //@def("S1", S1),hover("S1", "S1", S1) - _ = x.S2 //@def("S2", S1S2),hover("S2", "S2", S1S2) - _ = x.F1 //@def("F1", S1F1),hover("F1", "F1", S1F1) - _ = x.F2 //@def("F2", S2F2),hover("F2", "F2", S2F2) - _ = x.S2.F1 //@def("F1", S2F1),hover("F1", "F1", S2F1) -} - --- b/c.go -- -package b - -var _ = S1{ //@def("S1", S1),hover("S1", "S1", S1) - F1: 99, //@def("F1", S1F1),hover("F1", "F1", S1F1) -} - --- @AHi/hover.md -- -```go -func (a.A).Hi() -``` - -[`(a.A).Hi` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A.Hi) --- @F/hover.md -- -```go -field F int -``` - -@loc(F, "F") - - -[`(b.Embed).F` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.F) --- @HGoodbye/hover.md -- -```go -func (a.H).Goodbye() -``` - -@loc(HGoodbye, "Goodbye") - - -[`(a.H).Goodbye` on pkg.go.dev](https://pkg.go.dev/mod.com/a#H.Goodbye) --- @IB/hover.md -- -```go -func (a.I).B() -``` - -@loc(IB, "B") - - -[`(a.I).B` on pkg.go.dev](https://pkg.go.dev/mod.com/a#I.B) --- @JHello/hover.md -- -```go -func (a.J).Hello() -``` - -@loc(JHello, "Hello") - - -[`(a.J).Hello` on pkg.go.dev](https://pkg.go.dev/mod.com/a#J.Hello) --- @M/hover.md -- -```go -func (embed).M() -``` - -[`(b.Embed).M` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.M) --- @RField2/hover.md -- -```go -field Field2 int -``` - -@loc(RField2, "Field2") - - -[`(a.R).Field2` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Field2) --- @RHey/hover.md -- -```go -func (a.R).Hey() -``` - -[`(a.R).Hey` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Hey) --- @S1/hover.md -- -```go -type S1 struct { - F1 int //@loc(S1F1, "F1") - S2 //@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) - a.A //@def("A", AString),hover("A", "A", aA) - aAlias //@def("a", aAlias),hover("a", "aAlias", aAlias) -} -``` - -[`b.S1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1) --- @S1F1/hover.md -- -```go -field F1 int -``` - -@loc(S1F1, "F1") - - -[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.F1) --- @S1S2/hover.md -- -```go -field S2 S2 -``` - -@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) - - -[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.S2) --- @S2/hover.md -- -```go -type S2 struct { - F1 string //@loc(S2F1, "F1") - F2 int //@loc(S2F2, "F2") - *a.A //@def("A", AString),def("a",AImport) -} -``` - -[`b.S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2) --- @S2F1/hover.md -- -```go -field F1 string -``` - -@loc(S2F1, "F1") - - -[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F1) --- @S2F2/hover.md -- -```go -field F2 int -``` - -@loc(S2F2, "F2") - - -[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F2) --- @SField/hover.md -- -```go -field Field int -``` - -@loc(SField, "Field") - - -[`(a.S).Field` on pkg.go.dev](https://pkg.go.dev/mod.com/a#S.Field) --- @aA/hover.md -- -```go -type A string - -func (a.A).Hi() -``` - -@loc(AString, "A") - - -[`a.A` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A) --- @aAlias/hover.md -- -```go -type aAlias = a.A - -func (a.A).Hi() -``` - -@loc(aAlias, "aAlias") diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/addgowork.txt b/gopls/internal/regtest/marker/testdata/diagnostics/addgowork.txt deleted file mode 100644 index 41518e81813..00000000000 --- a/gopls/internal/regtest/marker/testdata/diagnostics/addgowork.txt +++ /dev/null @@ -1,50 +0,0 @@ -This test demonstrates diagnostics for adding a go.work file. - -Quick-fixes change files on disk, so are tested by regtests. - -TODO(rfindley): improve the "cannot find package" import errors. - --- skip -- -Skipping due to go.dev/issue/60584#issuecomment-1622238115. -There appears to be a real race in the critical error logic causing this test -to flake with high frequency. - --- flags -- --min_go=go1.18 - --- a/go.mod -- -module mod.com/a - -go 1.18 - --- a/main.go -- -package main //@diag("main", re"add a go.work file") - -import "mod.com/a/lib" //@diag("\"mod.com", re"cannot find package") - -func main() { - _ = lib.C -} - --- a/lib/lib.go -- -package lib //@diag("lib", re"add a go.work file") - -const C = "b" --- b/go.mod -- -module mod.com/b - -go 1.18 - --- b/main.go -- -package main //@diag("main", re"add a go.work file") - -import "mod.com/b/lib" //@diag("\"mod.com", re"cannot find package") - -func main() { - _ = lib.C -} - --- b/lib/lib.go -- -package lib //@diag("lib", re"add a go.work file") - -const C = "b" diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/excludedfile.txt b/gopls/internal/regtest/marker/testdata/diagnostics/excludedfile.txt deleted file mode 100644 index 5944cbecb4e..00000000000 --- a/gopls/internal/regtest/marker/testdata/diagnostics/excludedfile.txt +++ /dev/null @@ -1,38 +0,0 @@ -This test demonstrates diagnostics for various forms of file exclusion. - -Skip on plan9, an arbitrary GOOS, so that we can exercise GOOS exclusions -resulting from file suffixes. - --- flags -- --min_go=go1.18 --skip_goos=plan9 - --- go.work -- -go 1.21 - -use ( - ./a -) --- a/go.mod -- -module mod.com/a - -go 1.18 - --- a/a.go -- -package a - --- a/a_plan9.go -- -package a //@diag(re"package (a)", re"excluded due to its GOOS/GOARCH") - --- a/a_ignored.go -- -//go:build skip -package a //@diag(re"package (a)", re"excluded due to its build tags") - --- b/go.mod -- -module mod.com/b - -go 1.18 - --- b/b.go -- -package b //@diag(re"package (b)", re"add this module to your go.work") - diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/generated.txt b/gopls/internal/regtest/marker/testdata/diagnostics/generated.txt deleted file mode 100644 index bae69b1cd3a..00000000000 --- a/gopls/internal/regtest/marker/testdata/diagnostics/generated.txt +++ /dev/null @@ -1,21 +0,0 @@ -Test of "undeclared" diagnostic in generated code. - --- go.mod -- -module example.com -go 1.12 - --- generated.go -- -package generated - -// Code generated by generator.go. DO NOT EDIT. - -func _() { - var y int //@diag("y", re"y declared (and|but) not used") -} - --- generator.go -- -package generated - -func _() { - var x int //@diag("x", re"x declared (and|but) not used") -} diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/issue60544.txt b/gopls/internal/regtest/marker/testdata/diagnostics/issue60544.txt deleted file mode 100644 index b644d453164..00000000000 --- a/gopls/internal/regtest/marker/testdata/diagnostics/issue60544.txt +++ /dev/null @@ -1,13 +0,0 @@ -This test exercises a crash due to treatment of "comparable" in methodset -calculation (golang/go#60544). - --min_go is 1.19 as the error message changed at this Go version. --- flags -- --min_go=go1.19 - --- main.go -- -package main - -type X struct{} - -func (X) test(x comparable) {} //@diag("comparable", re"outside a type constraint") diff --git a/gopls/internal/regtest/marker/testdata/fixedbugs/issue59318.txt b/gopls/internal/regtest/marker/testdata/fixedbugs/issue59318.txt deleted file mode 100644 index 65385f703e5..00000000000 --- a/gopls/internal/regtest/marker/testdata/fixedbugs/issue59318.txt +++ /dev/null @@ -1,22 +0,0 @@ -This test verifies that we can load multiple orphaned files as -command-line-arguments packages. - -Previously, we would load only one because go/packages returns at most one -command-line-arguments package per query. - --- a/main.go -- -package main - -func main() { - var a int //@diag(re"var (a)", re"not used") -} --- b/main.go -- -package main - -func main() { - var b int //@diag(re"var (b)", re"not used") -} --- c/go.mod -- -module c.com // The existence of this module avoids a workspace error. - -go 1.18 diff --git a/gopls/internal/regtest/marker/testdata/hover/generics.txt b/gopls/internal/regtest/marker/testdata/hover/generics.txt deleted file mode 100644 index d512f7feb0e..00000000000 --- a/gopls/internal/regtest/marker/testdata/hover/generics.txt +++ /dev/null @@ -1,76 +0,0 @@ -This file contains tests for hovering over generic Go code. - --- flags -- --min_go=go1.18 - --- go.mod -- -// A go.mod is require for correct pkgsite links. -// TODO(rfindley): don't link to ad-hoc or command-line-arguments packages! -module mod.com - -go 1.18 - --- generics.go -- -package generics - -type value[T any] struct { //hover("lue", "value", value),hover("T", "T", valueT) - val T //@hover("T", "T", valuevalT) - Q int //@hover("Q", "Q", valueQ) -} - -type Value[T any] struct { //@hover("T", "T", ValueT) - val T //@hover("T", "T", ValuevalT) - Q int //@hover("Q", "Q", ValueQ) -} - -// disabled - see issue #54822 -func F[P interface{ ~int | string }]() { // hover("P","P",Ptparam) - // disabled - see issue #54822 - var _ P // hover("P","P",Pvar) -} - --- inferred.go -- -package generics - -func app[S interface{ ~[]E }, E interface{}](s S, e E) S { - return append(s, e) -} - -func _() { - _ = app[[]int] //@hover("app", "app", appint) - _ = app[[]int, int] //@hover("app", "app", appint) - _ = app[[]int]([]int{}, 0) //@hover("app", "app", appint) - _ = app([]int{}, 0) //@hover("app", "app", appint) -} - --- @ValueQ/hover.md -- -```go -field Q int -``` - -@hover("Q", "Q", ValueQ) - - -[`(generics.Value).Q` on pkg.go.dev](https://pkg.go.dev/mod.com#Value.Q) --- @ValueT/hover.md -- -```go -type parameter T any -``` --- @ValuevalT/hover.md -- -```go -type parameter T any -``` --- @appint/hover.md -- -```go -func app(s []int, e int) []int // func[S interface{~[]E}, E interface{}](s S, e E) S -``` --- @valueQ/hover.md -- -```go -field Q int -``` - -@hover("Q", "Q", valueQ) --- @valuevalT/hover.md -- -```go -type parameter T any -``` diff --git a/gopls/internal/regtest/marker/testdata/hover/linkable.txt b/gopls/internal/regtest/marker/testdata/hover/linkable.txt deleted file mode 100644 index 981716d84c4..00000000000 --- a/gopls/internal/regtest/marker/testdata/hover/linkable.txt +++ /dev/null @@ -1,120 +0,0 @@ -This test checks that we correctly determine pkgsite links for various -identifiers. - -We should only produce links that work, meaning the object is reachable via the -package's public API. --- go.mod -- -module mod.com - -go 1.18 --- p.go -- -package p - -type E struct { - Embed int -} - -// T is in the package scope, and so should be linkable. -type T struct{ //@hover("T", "T", T) - // Only exported fields should be linkable - - f int //@hover("f", "f", f) - F int //@hover("F", "F", F) - - E - - // TODO(rfindley): is the link here correct? It ignores N. - N struct { - // Nested fields should also be linkable. - Nested int //@hover("Nested", "Nested", Nested) - } -} -// M is an exported method, and so should be linkable. -func (T) M() {} - -// m is not exported, and so should not be linkable. -func (T) m() {} - -func _() { - var t T - - // Embedded fields should be linkable. - _ = t.Embed //@hover("Embed", "Embed", Embed) - - // Local variables should not be linkable, even if they are capitalized. - var X int //@hover("X", "X", X) - _ = X - - // Local types should not be linkable, even if they are capitalized. - type Local struct { //@hover("Local", "Local", Local) - E - } - - // But the embedded field should still be linkable. - var l Local - _ = l.Embed //@hover("Embed", "Embed", Embed) -} --- @Embed/hover.md -- -```go -field Embed int -``` - -[`(p.E).Embed` on pkg.go.dev](https://pkg.go.dev/mod.com#E.Embed) --- @F/hover.md -- -```go -field F int -``` - -@hover("F", "F", F) - - -[`(p.T).F` on pkg.go.dev](https://pkg.go.dev/mod.com#T.F) --- @Local/hover.md -- -```go -type Local struct { - E -} -``` - -Local types should not be linkable, even if they are capitalized. --- @Nested/hover.md -- -```go -field Nested int -``` - -Nested fields should also be linkable. --- @T/hover.md -- -```go -type T struct { - f int //@hover("f", "f", f) - F int //@hover("F", "F", F) - - E - - // TODO(rfindley): is the link here correct? It ignores N. - N struct { - // Nested fields should also be linkable. - Nested int //@hover("Nested", "Nested", Nested) - } -} - -func (T).M() -func (T).m() -``` - -T is in the package scope, and so should be linkable. - - -[`p.T` on pkg.go.dev](https://pkg.go.dev/mod.com#T) --- @X/hover.md -- -```go -var X int -``` - -Local variables should not be linkable, even if they are capitalized. --- @f/hover.md -- -```go -field f int -``` - -@hover("f", "f", f) diff --git a/gopls/internal/regtest/marker/testdata/implementation/generics.txt b/gopls/internal/regtest/marker/testdata/implementation/generics.txt deleted file mode 100644 index 2a9fcb842f1..00000000000 --- a/gopls/internal/regtest/marker/testdata/implementation/generics.txt +++ /dev/null @@ -1,34 +0,0 @@ -Test of 'implementation' query on generic types. - --- flags -- --min_go=go1.18 - --- go.mod -- -module example.com -go 1.18 - --- implementation/implementation.go -- -package implementation - -type GenIface[T any] interface { //@loc(GenIface, "GenIface"),implementation("GenIface", GC) - F(int, string, T) //@loc(GenIfaceF, "F"),implementation("F", GCF) -} - -type GenConc[U any] int //@loc(GenConc, "GenConc"),implementation("GenConc", GI) - -func (GenConc[V]) F(int, string, V) {} //@loc(GenConcF, "F"),implementation("F", GIF) - -type GenConcString struct{ GenConc[string] } //@loc(GenConcString, "GenConcString"),implementation(GenConcString, GIString) - --- other/other.go -- -package other - -type GI[T any] interface { //@loc(GI, "GI"),implementation("GI", GenConc) - F(int, string, T) //@loc(GIF, "F"),implementation("F", GenConcF) -} - -type GIString GI[string] //@loc(GIString, "GIString"),implementation("GIString", GenConcString) - -type GC[U any] int //@loc(GC, "GC"),implementation("GC", GenIface) - -func (GC[V]) F(int, string, V) {} //@loc(GCF, "F"),implementation("F", GenIfaceF) diff --git a/gopls/internal/regtest/marker/testdata/rename/basic.txt b/gopls/internal/regtest/marker/testdata/rename/basic.txt deleted file mode 100644 index 8a1d42d23ec..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/basic.txt +++ /dev/null @@ -1,38 +0,0 @@ -This test performs basic coverage of 'rename' within a single package. - --- basic.go -- -package p - -func f(x int) { println(x) } //@rename("x", "y", xToy) - --- @xToy/basic.go -- -package p - -func f(y int) { println(y) } //@rename("x", "y", xToy) - --- alias.go -- -package p - -// from golang/go#61625 -type LongNameHere struct{} -type A = LongNameHere //@rename("A", "B", AToB) -func Foo() A - --- errors.go -- -package p - -func _(x []int) { //@renameerr("_", "blank", `can't rename "_"`) - x = append(x, 1) //@renameerr("append", "blank", "built in and cannot be renamed") - x = nil //@renameerr("nil", "blank", "built in and cannot be renamed") - x = nil //@renameerr("x", "x", "old and new names are the same: x") - _ = 1 //@renameerr("1", "x", "no identifier found") -} - --- @AToB/alias.go -- -package p - -// from golang/go#61625 -type LongNameHere struct{} -type B = LongNameHere //@rename("A", "B", AToB) -func Foo() B - diff --git a/gopls/internal/regtest/marker/testdata/rename/embed.txt b/gopls/internal/regtest/marker/testdata/rename/embed.txt deleted file mode 100644 index c0b0301fac6..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/embed.txt +++ /dev/null @@ -1,36 +0,0 @@ -This test exercises renaming of types used as embedded fields. - --- go.mod -- -module example.com -go 1.12 - --- a/a.go -- -package a - -type A int //@rename("A", "A2", type) - --- b/b.go -- -package b - -import "example.com/a" - -type B struct { a.A } //@renameerr("A", "A3", errAnonField) - -var _ = new(B).A //@renameerr("A", "A4", errAnonField) - --- @errAnonField -- -can't rename embedded fields: rename the type directly or name the field --- @type/a/a.go -- -package a - -type A2 int //@rename("A", "A2", type) - --- @type/b/b.go -- -package b - -import "example.com/a" - -type B struct { a.A2 } //@renameerr("A", "A3", errAnonField) - -var _ = new(B).A2 //@renameerr("A", "A4", errAnonField) - diff --git a/gopls/internal/regtest/marker/testdata/rename/generics.txt b/gopls/internal/regtest/marker/testdata/rename/generics.txt deleted file mode 100644 index db64bb4fbf9..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/generics.txt +++ /dev/null @@ -1,240 +0,0 @@ -This test exercises various renaming features on generic code. - -Fixed bugs: - -- golang/go#61614: renaming a method of a type in a package that uses type - parameter composite lits used to panic, because previous iterations of the - satisfy analysis did not account for this language feature. - -- golang/go#61635: renaming type parameters did not work when they were - capitalized and the package was imported by another package. - --- flags -- --min_go=go1.18 - --- go.mod -- -module example.com -go 1.20 - --- a.go -- -package a - -type I int - -func (I) m() {} //@rename("m", "M", mToM) - -func _[P ~[]int]() { - _ = P{} -} - --- @mToM/a.go -- -package a - -type I int - -func (I) M() {} //@rename("m", "M", mToM) - -func _[P ~[]int]() { - _ = P{} -} - --- g.go -- -package a - -type S[P any] struct { //@rename("P", "Q", PToQ) - P P - F func(P) P -} - -func F[R any](r R) { - var _ R //@rename("R", "S", RToS) -} - --- @PToQ/g.go -- -package a - -type S[Q any] struct { //@rename("P", "Q", PToQ) - P Q - F func(Q) Q -} - -func F[R any](r R) { - var _ R //@rename("R", "S", RToS) -} - --- @RToS/g.go -- -package a - -type S[P any] struct { //@rename("P", "Q", PToQ) - P P - F func(P) P -} - -func F[S any](r S) { - var _ S //@rename("R", "S", RToS) -} - --- issue61635/p.go -- -package issue61635 - -type builder[S ~[]F, F ~string] struct { //@rename("S", "T", SToT) - name string - elements S - elemData map[F][]ElemData[F] - // other fields... -} - -type ElemData[F ~string] struct { - Name F - // other fields... -} - -type BuilderImpl[S ~[]F, F ~string] struct{ builder[S, F] } - --- importer/i.go -- -package importer - -import "example.com/issue61635" // importing is necessary to repro golang/go#61635 - -var _ issue61635.ElemData[string] - --- @SToT/issue61635/p.go -- -package issue61635 - -type builder[T ~[]F, F ~string] struct { //@rename("S", "T", SToT) - name string - elements T - elemData map[F][]ElemData[F] - // other fields... -} - -type ElemData[F ~string] struct { - Name F - // other fields... -} - -type BuilderImpl[S ~[]F, F ~string] struct{ builder[S, F] } - --- instances/type.go -- -package instances - -type R[P any] struct { //@rename("R", "u", Rtou) - Next *R[P] //@rename("R", "s", RTos) -} - -func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) - var x R[P] - return rv.Do(x) //@rename("Do", "Do2", DoToDo2) -} - -func _() { - var x R[int] //@rename("R", "r", RTor) - x = x.Do(x) -} - --- @RTos/instances/type.go -- -package instances - -type s[P any] struct { //@rename("R", "u", Rtou) - Next *s[P] //@rename("R", "s", RTos) -} - -func (rv s[P]) Do(s[P]) s[P] { //@rename("Do", "Do1", DoToDo1) - var x s[P] - return rv.Do(x) //@rename("Do", "Do2", DoToDo2) -} - -func _() { - var x s[int] //@rename("R", "r", RTor) - x = x.Do(x) -} - --- @Rtou/instances/type.go -- -package instances - -type u[P any] struct { //@rename("R", "u", Rtou) - Next *u[P] //@rename("R", "s", RTos) -} - -func (rv u[P]) Do(u[P]) u[P] { //@rename("Do", "Do1", DoToDo1) - var x u[P] - return rv.Do(x) //@rename("Do", "Do2", DoToDo2) -} - -func _() { - var x u[int] //@rename("R", "r", RTor) - x = x.Do(x) -} - --- @DoToDo1/instances/type.go -- -package instances - -type R[P any] struct { //@rename("R", "u", Rtou) - Next *R[P] //@rename("R", "s", RTos) -} - -func (rv R[P]) Do1(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) - var x R[P] - return rv.Do1(x) //@rename("Do", "Do2", DoToDo2) -} - -func _() { - var x R[int] //@rename("R", "r", RTor) - x = x.Do1(x) -} - --- @DoToDo2/instances/type.go -- -package instances - -type R[P any] struct { //@rename("R", "u", Rtou) - Next *R[P] //@rename("R", "s", RTos) -} - -func (rv R[P]) Do2(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) - var x R[P] - return rv.Do2(x) //@rename("Do", "Do2", DoToDo2) -} - -func _() { - var x R[int] //@rename("R", "r", RTor) - x = x.Do2(x) -} - --- instances/func.go -- -package instances - -func Foo[P any](p P) { //@rename("Foo", "Bar", FooToBar) - Foo(p) //@rename("Foo", "Baz", FooToBaz) -} - --- @FooToBar/instances/func.go -- -package instances - -func Bar[P any](p P) { //@rename("Foo", "Bar", FooToBar) - Bar(p) //@rename("Foo", "Baz", FooToBaz) -} - --- @FooToBaz/instances/func.go -- -package instances - -func Baz[P any](p P) { //@rename("Foo", "Bar", FooToBar) - Baz(p) //@rename("Foo", "Baz", FooToBaz) -} - --- @RTor/instances/type.go -- -package instances - -type r[P any] struct { //@rename("R", "u", Rtou) - Next *r[P] //@rename("R", "s", RTos) -} - -func (rv r[P]) Do(r[P]) r[P] { //@rename("Do", "Do1", DoToDo1) - var x r[P] - return rv.Do(x) //@rename("Do", "Do2", DoToDo2) -} - -func _() { - var x r[int] //@rename("R", "r", RTor) - x = x.Do(x) -} - diff --git a/gopls/internal/regtest/marker/testdata/rename/issue43616.txt b/gopls/internal/regtest/marker/testdata/rename/issue43616.txt deleted file mode 100644 index 3ff2ee37e27..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/issue43616.txt +++ /dev/null @@ -1,19 +0,0 @@ -This test verifies the fix for golang/go#43616: renaming mishandles embedded -fields. - --- p.go -- -package issue43616 - -type foo int //@rename("foo", "bar", fooToBar),preparerename("oo","foo","foo") - -var x struct{ foo } //@renameerr("foo", "baz", "rename the type directly") - -var _ = x.foo //@renameerr("foo", "quux", "rename the type directly") --- @fooToBar/p.go -- -package issue43616 - -type bar int //@rename("foo", "bar", fooToBar),preparerename("oo","foo","foo") - -var x struct{ bar } //@renameerr("foo", "baz", "rename the type directly") - -var _ = x.bar //@renameerr("foo", "quux", "rename the type directly") diff --git a/gopls/internal/regtest/marker/testdata/rename/issue61294.txt b/gopls/internal/regtest/marker/testdata/rename/issue61294.txt deleted file mode 100644 index 3ce1dbc7670..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/issue61294.txt +++ /dev/null @@ -1,29 +0,0 @@ - -This test renames a parameter var whose name is the same as a -package-level var, which revealed a bug in isLocal. - -This is a regression test for issue #61294. - --- go.mod -- -module example.com -go 1.18 - --- a/a.go -- -package a - -func One() - -func Two(One int) //@rename("One", "Three", OneToThree) - --- b/b.go -- -package b - -import _ "example.com/a" - --- @OneToThree/a/a.go -- -package a - -func One() - -func Two(Three int) //@rename("One", "Three", OneToThree) - diff --git a/gopls/internal/regtest/marker/testdata/rename/issue61640.txt b/gopls/internal/regtest/marker/testdata/rename/issue61640.txt deleted file mode 100644 index 70a6123ab32..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/issue61640.txt +++ /dev/null @@ -1,47 +0,0 @@ -This test verifies that gopls can rename instantiated fields. - --- flags -- --min_go=go1.18 - --- a.go -- -package a - -// This file is adapted from the example in the issue. - -type builder[S ~[]int] struct { - elements S //@rename("elements", "elements2", OneToTwo) -} - -type BuilderImpl[S ~[]int] struct{ builder[S] } - -func NewBuilderImpl[S ~[]int](name string) *BuilderImpl[S] { - impl := &BuilderImpl[S]{ - builder[S]{ - elements: S{}, - }, - } - - _ = impl.elements - return impl -} --- @OneToTwo/a.go -- -package a - -// This file is adapted from the example in the issue. - -type builder[S ~[]int] struct { - elements2 S //@rename("elements", "elements2", OneToTwo) -} - -type BuilderImpl[S ~[]int] struct{ builder[S] } - -func NewBuilderImpl[S ~[]int](name string) *BuilderImpl[S] { - impl := &BuilderImpl[S]{ - builder[S]{ - elements2: S{}, - }, - } - - _ = impl.elements2 - return impl -} diff --git a/gopls/internal/regtest/marker/testdata/rename/issue61813.txt b/gopls/internal/regtest/marker/testdata/rename/issue61813.txt deleted file mode 100644 index 52813f869a4..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/issue61813.txt +++ /dev/null @@ -1,18 +0,0 @@ -This test exercises the panic reported in golang/go#61813. - --- p.go -- -package p - -type P struct{} - -func (P) M() {} //@rename("M", "N", MToN) - -var x = []*P{{}} --- @MToN/p.go -- -package p - -type P struct{} - -func (P) N() {} //@rename("M", "N", MToN) - -var x = []*P{{}} diff --git a/gopls/internal/regtest/marker/testdata/rename/methods.txt b/gopls/internal/regtest/marker/testdata/rename/methods.txt deleted file mode 100644 index 05a5cd8697b..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/methods.txt +++ /dev/null @@ -1,67 +0,0 @@ -This test exercises renaming of interface methods. - -The golden is currently wrong due to https://github.com/golang/go/issues/58506: -the reference to B.F in package b should be renamed too. - --- go.mod -- -module example.com -go 1.12 - --- a/a.go -- -package a - -type A int - -func (A) F() {} //@renameerr("F", "G", errAfToG) - --- b/b.go -- -package b - -import "example.com/a" -import "example.com/c" - -type B interface { F() } //@rename("F", "G", BfToG) - -var _ B = a.A(0) -var _ B = c.C(0) - --- c/c.go -- -package c - -type C int - -func (C) F() {} //@renameerr("F", "G", errCfToG) - --- d/d.go -- -package d - -import "example.com/b" - -var _ = b.B.F - --- @errAfToG -- -a/a.go:5:10: renaming this method "F" to "G" -b/b.go:6:6: would make example.com/a.A no longer assignable to interface B -b/b.go:6:20: (rename example.com/b.B.F if you intend to change both types) --- @BfToG/b/b.go -- -package b - -import "example.com/a" -import "example.com/c" - -type B interface { G() } //@rename("F", "G", BfToG) - -var _ B = a.A(0) -var _ B = c.C(0) - --- @BfToG/d/d.go -- -package d - -import "example.com/b" - -var _ = b.B.G - --- @errCfToG -- -c/c.go:5:10: renaming this method "F" to "G" -b/b.go:6:6: would make example.com/c.C no longer assignable to interface B -b/b.go:6:20: (rename example.com/b.B.F if you intend to change both types) diff --git a/gopls/internal/regtest/marker/testdata/rename/typeswitch.txt b/gopls/internal/regtest/marker/testdata/rename/typeswitch.txt deleted file mode 100644 index 252c8db7af6..00000000000 --- a/gopls/internal/regtest/marker/testdata/rename/typeswitch.txt +++ /dev/null @@ -1,26 +0,0 @@ -This test covers the special case of renaming a type switch var. - --- p.go -- -package p - -func _(x interface{}) { - switch y := x.(type) { //@rename("y", "z", yToZ) - case string: - print(y) //@rename("y", "z", yToZ) - default: - print(y) //@rename("y", "z", yToZ) - } -} - --- @yToZ/p.go -- -package p - -func _(x interface{}) { - switch z := x.(type) { //@rename("y", "z", yToZ) - case string: - print(z) //@rename("y", "z", yToZ) - default: - print(z) //@rename("y", "z", yToZ) - } -} - diff --git a/gopls/internal/regtest/marker/testdata/stubmethods/basic.txt b/gopls/internal/regtest/marker/testdata/stubmethods/basic.txt deleted file mode 100644 index 9a651288306..00000000000 --- a/gopls/internal/regtest/marker/testdata/stubmethods/basic.txt +++ /dev/null @@ -1,23 +0,0 @@ -This test exercises basic 'stub methods' functionality. - --- go.mod -- -module example.com -go 1.12 - --- a/a.go -- -package a - -type C int - -var _ error = C(0) //@suggestedfix(re"C.0.", re"missing method Error", stub) --- @stub/a/a.go -- ---- before -+++ after -@@ -3 +3,6 @@ --type C int -+type C int -+ -+// Error implements error. -+func (C) Error() string { -+ panic("unimplemented") -+} diff --git a/gopls/internal/regtest/misc/fix_test.go b/gopls/internal/regtest/misc/fix_test.go deleted file mode 100644 index 67e37c9a2cb..00000000000 --- a/gopls/internal/regtest/misc/fix_test.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misc - -import ( - "testing" - - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -// A basic test for fillstruct, now that it uses a command. -func TestFillStruct(t *testing.T) { - const basic = ` --- go.mod -- -module mod.com - -go 1.14 --- main.go -- -package main - -type Info struct { - WordCounts map[string]int - Words []string -} - -func Foo() { - _ = Info{} -} -` - Run(t, basic, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - if err := env.Editor.RefactorRewrite(env.Ctx, env.RegexpSearch("main.go", "Info{}")); err != nil { - t.Fatal(err) - } - want := `package main - -type Info struct { - WordCounts map[string]int - Words []string -} - -func Foo() { - _ = Info{ - WordCounts: map[string]int{}, - Words: []string{}, - } -} -` - if got := env.BufferText("main.go"); got != want { - t.Fatalf("TestFillStruct failed:\n%s", compare.Text(want, got)) - } - }) -} - -func TestFillReturns(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- main.go -- -package main - -func Foo() error { - return -} -` - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - var d protocol.PublishDiagnosticsParams - env.AfterChange( - // The error message here changed in 1.18; "return values" covers both forms. - Diagnostics(env.AtRegexp("main.go", `return`), WithMessage("return values")), - ReadDiagnostics("main.go", &d), - ) - codeActions := env.CodeAction("main.go", d.Diagnostics) - if len(codeActions) != 2 { - t.Fatalf("expected 2 code actions, got %v", len(codeActions)) - } - var foundQuickFix, foundFixAll bool - for _, a := range codeActions { - if a.Kind == protocol.QuickFix { - foundQuickFix = true - } - if a.Kind == protocol.SourceFixAll { - foundFixAll = true - } - } - if !foundQuickFix { - t.Fatalf("expected quickfix code action, got none") - } - if !foundFixAll { - t.Fatalf("expected fixall code action, got none") - } - env.ApplyQuickFixes("main.go", d.Diagnostics) - env.AfterChange(NoDiagnostics(ForFile("main.go"))) - }) -} - -func TestUnusedParameter_Issue63755(t *testing.T) { - // This test verifies the fix for #63755, where codeActions panicked on parameters - // of functions with no function body. - - // We should not detect parameters as unused for external functions. - - const files = ` --- go.mod -- -module unused.mod - -go 1.18 - --- external.go -- -package external - -func External(z int) //@codeaction("refactor.rewrite", "z", "z", recursive) - -func _() { - External(1) -} - ` - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("external.go") - actions, err := env.Editor.CodeAction(env.Ctx, env.RegexpSearch("external.go", "z"), nil) - if err != nil { - t.Fatal(err) - } - if len(actions) > 0 { - t.Errorf("CodeAction(): got %d code actions, want 0", len(actions)) - } - }) -} diff --git a/gopls/internal/regtest/misc/generate_test.go b/gopls/internal/regtest/misc/generate_test.go deleted file mode 100644 index 0cfcab59d24..00000000000 --- a/gopls/internal/regtest/misc/generate_test.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// TODO(rfindley): figure out why go generate fails on android builders. - -//go:build !android -// +build !android - -package misc - -import ( - "testing" - - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestGenerateProgress(t *testing.T) { - const generatedWorkspace = ` --- go.mod -- -module fake.test - -go 1.14 --- generate.go -- -// +build ignore - -package main - -import ( - "os" -) - -func main() { - os.WriteFile("generated.go", []byte("package " + os.Args[1] + "\n\nconst Answer = 21"), 0644) -} - --- lib1/lib.go -- -package lib1 - -//` + `go:generate go run ../generate.go lib1 - --- lib2/lib.go -- -package lib2 - -//` + `go:generate go run ../generate.go lib2 - --- main.go -- -package main - -import ( - "fake.test/lib1" - "fake.test/lib2" -) - -func main() { - println(lib1.Answer + lib2.Answer) -} -` - - Run(t, generatedWorkspace, func(t *testing.T, env *Env) { - env.OnceMet( - InitialWorkspaceLoad, - Diagnostics(env.AtRegexp("main.go", "lib1.(Answer)")), - ) - env.RunGenerate("./lib1") - env.RunGenerate("./lib2") - env.AfterChange( - NoDiagnostics(ForFile("main.go")), - ) - }) -} diff --git a/gopls/internal/regtest/misc/misc_test.go b/gopls/internal/regtest/misc/misc_test.go deleted file mode 100644 index 5138b76279f..00000000000 --- a/gopls/internal/regtest/misc/misc_test.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misc - -import ( - "testing" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestMain(m *testing.M) { - bug.PanicOnBugs = true - regtest.Main(m, hooks.Options) -} diff --git a/gopls/internal/regtest/misc/prompt_test.go b/gopls/internal/regtest/misc/prompt_test.go deleted file mode 100644 index 7a262ad934e..00000000000 --- a/gopls/internal/regtest/misc/prompt_test.go +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misc - -import ( - "fmt" - "os" - "path/filepath" - "regexp" - "testing" - - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -// Test that gopls prompts for telemetry only when it is supposed to. -func TestTelemetryPrompt_Conditions(t *testing.T) { - const src = ` --- go.mod -- -module mod.com - -go 1.12 --- main.go -- -package main - -func main() { -} -` - - for _, enabled := range []bool{true, false} { - t.Run(fmt.Sprintf("telemetryPrompt=%v", enabled), func(t *testing.T) { - for _, initialMode := range []string{"", "off", "on"} { - t.Run(fmt.Sprintf("initial_mode=%s", initialMode), func(t *testing.T) { - modeFile := filepath.Join(t.TempDir(), "mode") - if initialMode != "" { - if err := os.WriteFile(modeFile, []byte(initialMode), 0666); err != nil { - t.Fatal(err) - } - } - WithOptions( - Modes(Default), // no need to run this in all modes - EnvVars{ - lsp.GoplsConfigDirEnvvar: t.TempDir(), - lsp.FakeTelemetryModefileEnvvar: modeFile, - }, - Settings{ - "telemetryPrompt": enabled, - }, - ).Run(t, src, func(t *testing.T, env *Env) { - wantPrompt := enabled && (initialMode == "" || initialMode == "off") - expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") - if !wantPrompt { - expectation = Not(expectation) - } - env.OnceMet( - CompletedWork(lsp.TelemetryPromptWorkTitle, 1, true), - expectation, - ) - }) - }) - } - }) - } -} - -// Test that responding to the telemetry prompt results in the expected state. -func TestTelemetryPrompt_Response(t *testing.T) { - const src = ` --- go.mod -- -module mod.com - -go 1.12 --- main.go -- -package main - -func main() { -} -` - - tests := []struct { - response string // response to choose for the telemetry dialog - wantMode string // resulting telemetry mode - wantMsg string // substring contained in the follow-up popup (if empty, no popup is expected) - }{ - {lsp.TelemetryYes, "on", "uploading is now enabled"}, - {lsp.TelemetryNo, "", ""}, - {"", "", ""}, - } - for _, test := range tests { - t.Run(fmt.Sprintf("response=%s", test.response), func(t *testing.T) { - modeFile := filepath.Join(t.TempDir(), "mode") - msgRE := regexp.MustCompile(".*Would you like to enable Go telemetry?") - respond := func(m *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) { - if msgRE.MatchString(m.Message) { - for _, item := range m.Actions { - if item.Title == test.response { - return &item, nil - } - } - if test.response != "" { - t.Errorf("action item %q not found", test.response) - } - } - return nil, nil - } - WithOptions( - Modes(Default), // no need to run this in all modes - EnvVars{ - lsp.GoplsConfigDirEnvvar: t.TempDir(), - lsp.FakeTelemetryModefileEnvvar: modeFile, - }, - Settings{ - "telemetryPrompt": true, - }, - MessageResponder(respond), - ).Run(t, src, func(t *testing.T, env *Env) { - var postConditions []Expectation - if test.wantMsg != "" { - postConditions = append(postConditions, ShownMessage(test.wantMsg)) - } - env.OnceMet( - CompletedWork(lsp.TelemetryPromptWorkTitle, 1, true), - postConditions..., - ) - gotMode := "" - if contents, err := os.ReadFile(modeFile); err == nil { - gotMode = string(contents) - } else if !os.IsNotExist(err) { - t.Fatal(err) - } - if gotMode != test.wantMode { - t.Errorf("after prompt, mode=%s, want %s", gotMode, test.wantMode) - } - }) - }) - } -} - -// Test that we stop asking about telemetry after the user ignores the question -// 5 times. -func TestTelemetryPrompt_GivingUp(t *testing.T) { - const src = ` --- go.mod -- -module mod.com - -go 1.12 --- main.go -- -package main - -func main() { -} -` - - // For this test, we want to share state across gopls sessions. - modeFile := filepath.Join(t.TempDir(), "mode") - configDir := t.TempDir() - - const maxPrompts = 5 // internal prompt limit defined by gopls - - for i := 0; i < maxPrompts+1; i++ { - WithOptions( - Modes(Default), // no need to run this in all modes - EnvVars{ - lsp.GoplsConfigDirEnvvar: configDir, - lsp.FakeTelemetryModefileEnvvar: modeFile, - }, - Settings{ - "telemetryPrompt": true, - }, - ).Run(t, src, func(t *testing.T, env *Env) { - wantPrompt := i < maxPrompts - expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") - if !wantPrompt { - expectation = Not(expectation) - } - env.OnceMet( - CompletedWork(lsp.TelemetryPromptWorkTitle, 1, true), - expectation, - ) - }) - } -} - -// Test that gopls prompts for telemetry only when it is supposed to. -func TestTelemetryPrompt_Conditions2(t *testing.T) { - const src = ` --- go.mod -- -module mod.com - -go 1.12 --- main.go -- -package main - -func main() { -} -` - modeFile := filepath.Join(t.TempDir(), "mode") - WithOptions( - Modes(Default), // no need to run this in all modes - EnvVars{ - lsp.GoplsConfigDirEnvvar: t.TempDir(), - lsp.FakeTelemetryModefileEnvvar: modeFile, - }, - Settings{ - // off because we are testing - // if we can trigger the prompt with command. - "telemetryPrompt": false, - }, - ).Run(t, src, func(t *testing.T, env *Env) { - cmd, err := command.NewMaybePromptForTelemetryCommand("prompt") - if err != nil { - t.Fatal(err) - } - var result error - env.ExecuteCommand(&protocol.ExecuteCommandParams{ - Command: cmd.Command, - }, &result) - if result != nil { - t.Fatal(err) - } - expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") - env.OnceMet( - CompletedWork(lsp.TelemetryPromptWorkTitle, 2, true), - expectation, - ) - }) -} diff --git a/gopls/internal/regtest/misc/semantictokens_test.go b/gopls/internal/regtest/misc/semantictokens_test.go deleted file mode 100644 index 5f243a158af..00000000000 --- a/gopls/internal/regtest/misc/semantictokens_test.go +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misc - -import ( - "testing" - - "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/typeparams" -) - -func TestBadURICrash_VSCodeIssue1498(t *testing.T) { - const src = ` --- go.mod -- -module example.com - -go 1.12 - --- main.go -- -package main - -func main() {} - -` - WithOptions( - Modes(Default), - Settings{"allExperiments": true}, - ).Run(t, src, func(t *testing.T, env *Env) { - params := &protocol.SemanticTokensParams{} - const badURI = "http://foo/" - params.TextDocument.URI = badURI - // This call panicked in the past: golang/vscode-go#1498. - if _, err := env.Editor.Server.SemanticTokensFull(env.Ctx, params); err != nil { - // Requests to an invalid URI scheme shouldn't result in an error, we - // simply don't support this so return empty result. This could be - // changed, but for now assert on the current behavior. - t.Errorf("SemanticTokensFull(%q): %v", badURI, err) - } - }) -} - -// fix bug involving type parameters and regular parameters -// (golang/vscode-go#2527) -func TestSemantic_2527(t *testing.T) { - // these are the expected types of identifiers in text order - want := []fake.SemanticToken{ - {Token: "package", TokenType: "keyword"}, - {Token: "foo", TokenType: "namespace"}, - {Token: "// Deprecated (for testing)", TokenType: "comment"}, - {Token: "func", TokenType: "keyword"}, - {Token: "Add", TokenType: "function", Mod: "definition deprecated"}, - {Token: "T", TokenType: "typeParameter", Mod: "definition"}, - {Token: "int", TokenType: "type", Mod: "defaultLibrary"}, - {Token: "target", TokenType: "parameter", Mod: "definition"}, - {Token: "T", TokenType: "typeParameter"}, - {Token: "l", TokenType: "parameter", Mod: "definition"}, - {Token: "T", TokenType: "typeParameter"}, - {Token: "T", TokenType: "typeParameter"}, - {Token: "return", TokenType: "keyword"}, - {Token: "append", TokenType: "function", Mod: "defaultLibrary"}, - {Token: "l", TokenType: "parameter"}, - {Token: "target", TokenType: "parameter"}, - {Token: "for", TokenType: "keyword"}, - {Token: "range", TokenType: "keyword"}, - {Token: "l", TokenType: "parameter"}, - {Token: "// test coverage", TokenType: "comment"}, - {Token: "return", TokenType: "keyword"}, - {Token: "nil", TokenType: "variable", Mod: "readonly defaultLibrary"}, - } - src := ` --- go.mod -- -module example.com - -go 1.19 --- main.go -- -package foo -// Deprecated (for testing) -func Add[T int](target T, l []T) []T { - return append(l, target) - for range l {} // test coverage - return nil -} -` - WithOptions( - Modes(Default), - Settings{"semanticTokens": true}, - ).Run(t, src, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - env.AfterChange( - Diagnostics(env.AtRegexp("main.go", "for range")), - ) - seen, err := env.Editor.SemanticTokens(env.Ctx, "main.go") - if err != nil { - t.Fatal(err) - } - if x := cmp.Diff(want, seen); x != "" { - t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) - } - }) - -} - -// fix inconsistency in TypeParameters -// https://github.com/golang/go/issues/57619 -func TestSemantic_57619(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type parameters are needed for this test") - } - src := ` --- go.mod -- -module example.com - -go 1.19 --- main.go -- -package foo -type Smap[K int, V any] struct { - Store map[K]V -} -func (s *Smap[K, V]) Get(k K) (V, bool) { - v, ok := s.Store[k] - return v, ok -} -func New[K int, V any]() Smap[K, V] { - return Smap[K, V]{Store: make(map[K]V)} -} -` - WithOptions( - Modes(Default), - Settings{"semanticTokens": true}, - ).Run(t, src, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - seen, err := env.Editor.SemanticTokens(env.Ctx, "main.go") - if err != nil { - t.Fatal(err) - } - for i, s := range seen { - if (s.Token == "K" || s.Token == "V") && s.TokenType != "typeParameter" { - t.Errorf("%d: expected K and V to be type parameters, but got %v", i, s) - } - } - }) -} - -func TestSemanticGoDirectives(t *testing.T) { - src := ` --- go.mod -- -module example.com - -go 1.19 --- main.go -- -package foo - -//go:linkname now time.Now -func now() - -//go:noinline -func foo() {} - -// Mentioning go:noinline should not tokenize. - -//go:notadirective -func bar() {} -` - want := []fake.SemanticToken{ - {Token: "package", TokenType: "keyword"}, - {Token: "foo", TokenType: "namespace"}, - - {Token: "//", TokenType: "comment"}, - {Token: "go:linkname", TokenType: "namespace"}, - {Token: "now time.Now", TokenType: "comment"}, - {Token: "func", TokenType: "keyword"}, - {Token: "now", TokenType: "function", Mod: "definition"}, - - {Token: "//", TokenType: "comment"}, - {Token: "go:noinline", TokenType: "namespace"}, - {Token: "func", TokenType: "keyword"}, - {Token: "foo", TokenType: "function", Mod: "definition"}, - - {Token: "// Mentioning go:noinline should not tokenize.", TokenType: "comment"}, - - {Token: "//go:notadirective", TokenType: "comment"}, - {Token: "func", TokenType: "keyword"}, - {Token: "bar", TokenType: "function", Mod: "definition"}, - } - - WithOptions( - Modes(Default), - Settings{"semanticTokens": true}, - ).Run(t, src, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - seen, err := env.Editor.SemanticTokens(env.Ctx, "main.go") - if err != nil { - t.Fatal(err) - } - if x := cmp.Diff(want, seen); x != "" { - t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) - } - }) -} diff --git a/gopls/internal/regtest/misc/settings_test.go b/gopls/internal/regtest/misc/settings_test.go deleted file mode 100644 index dd4042989a8..00000000000 --- a/gopls/internal/regtest/misc/settings_test.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misc - -import ( - "testing" - - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestEmptyDirectoryFilters_Issue51843(t *testing.T) { - const src = ` --- go.mod -- -module mod.com - -go 1.12 --- main.go -- -package main - -func main() { -} -` - - WithOptions( - Settings{"directoryFilters": []string{""}}, - ).Run(t, src, func(t *testing.T, env *Env) { - // No need to do anything. Issue golang/go#51843 is triggered by the empty - // directory filter above. - }) -} diff --git a/gopls/internal/regtest/misc/vendor_test.go b/gopls/internal/regtest/misc/vendor_test.go deleted file mode 100644 index efed16b4be3..00000000000 --- a/gopls/internal/regtest/misc/vendor_test.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misc - -import ( - "testing" - - . "golang.org/x/tools/gopls/internal/lsp/regtest" - - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -const basicProxy = ` --- golang.org/x/hello@v1.2.3/go.mod -- -module golang.org/x/hello - -go 1.14 --- golang.org/x/hello@v1.2.3/hi/hi.go -- -package hi - -var Goodbye error -` - -func TestInconsistentVendoring(t *testing.T) { - const pkgThatUsesVendoring = ` --- go.mod -- -module mod.com - -go 1.14 - -require golang.org/x/hello v1.2.3 --- go.sum -- -golang.org/x/hello v1.2.3 h1:EcMp5gSkIhaTkPXp8/3+VH+IFqTpk3ZbpOhqk0Ncmho= -golang.org/x/hello v1.2.3/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= --- vendor/modules.txt -- --- a/a1.go -- -package a - -import "golang.org/x/hello/hi" - -func _() { - _ = hi.Goodbye - var q int // hardcode a diagnostic -} -` - WithOptions( - Modes(Default), - ProxyFiles(basicProxy), - ).Run(t, pkgThatUsesVendoring, func(t *testing.T, env *Env) { - env.OpenFile("a/a1.go") - d := &protocol.PublishDiagnosticsParams{} - env.OnceMet( - InitialWorkspaceLoad, - Diagnostics(env.AtRegexp("go.mod", "module mod.com"), WithMessage("Inconsistent vendoring")), - ReadDiagnostics("go.mod", d), - ) - env.ApplyQuickFixes("go.mod", d.Diagnostics) - - env.AfterChange( - Diagnostics(env.AtRegexp("a/a1.go", `q int`), WithMessage("not used")), - ) - }) -} - -func TestWindowsVendoring_Issue56291(t *testing.T) { - const src = ` --- go.mod -- -module mod.com - -go 1.14 - -require golang.org/x/hello v1.2.3 --- go.sum -- -golang.org/x/hello v1.2.3 h1:EcMp5gSkIhaTkPXp8/3+VH+IFqTpk3ZbpOhqk0Ncmho= -golang.org/x/hello v1.2.3/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= --- main.go -- -package main - -import "golang.org/x/hello/hi" - -func main() { - _ = hi.Goodbye -} -` - WithOptions( - Modes(Default), - ProxyFiles(basicProxy), - ).Run(t, src, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - env.AfterChange(NoDiagnostics()) - env.RunGoCommand("mod", "tidy") - env.RunGoCommand("mod", "vendor") - env.AfterChange(NoDiagnostics()) - env.RegexpReplace("main.go", `import "golang.org/x/hello/hi"`, "") - env.AfterChange( - Diagnostics(env.AtRegexp("main.go", "hi.Goodbye")), - ) - env.SaveBuffer("main.go") - env.AfterChange(NoDiagnostics()) - }) -} diff --git a/gopls/internal/regtest/misc/workspace_symbol_test.go b/gopls/internal/regtest/misc/workspace_symbol_test.go deleted file mode 100644 index 7b2866e98a9..00000000000 --- a/gopls/internal/regtest/misc/workspace_symbol_test.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misc - -import ( - "testing" - - "github.com/google/go-cmp/cmp" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/source" -) - -func TestWorkspaceSymbolMissingMetadata(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.17 --- a.go -- -package p - -const K1 = "a.go" --- exclude.go -- - -//go:build exclude -// +build exclude - -package exclude - -const K2 = "exclude.go" -` - - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("a.go") - checkSymbols(env, "K", "K1") - - // Opening up an ignored file will result in an overlay with missing - // metadata, but this shouldn't break workspace symbols requests. - env.OpenFile("exclude.go") - checkSymbols(env, "K", "K1") - }) -} - -func TestWorkspaceSymbolSorting(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.17 --- a/a.go -- -package a - -const ( - Foo = iota - FooBar - Fooey - Fooex - Fooest -) -` - - var symbolMatcher = string(source.SymbolFastFuzzy) - WithOptions( - Settings{"symbolMatcher": symbolMatcher}, - ).Run(t, files, func(t *testing.T, env *Env) { - checkSymbols(env, "Foo", - "Foo", // prefer exact segment matches first - "FooBar", // ...followed by exact word matches - "Fooex", // shorter than Fooest, FooBar, lexically before Fooey - "Fooey", // shorter than Fooest, Foobar - "Fooest", - ) - }) -} - -func TestWorkspaceSymbolSpecialPatterns(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.17 --- a/a.go -- -package a - -const ( - AxxBxxCxx - ABC -) -` - - var symbolMatcher = string(source.SymbolFastFuzzy) - WithOptions( - Settings{"symbolMatcher": symbolMatcher}, - ).Run(t, files, func(t *testing.T, env *Env) { - checkSymbols(env, "ABC", "ABC", "AxxBxxCxx") - checkSymbols(env, "'ABC", "ABC") - checkSymbols(env, "^mod.com", "mod.com/a.ABC", "mod.com/a.AxxBxxCxx") - checkSymbols(env, "^mod.com Axx", "mod.com/a.AxxBxxCxx") - checkSymbols(env, "C$", "ABC") - }) -} - -func checkSymbols(env *Env, query string, want ...string) { - env.T.Helper() - var got []string - for _, info := range env.Symbol(query) { - got = append(got, info.Name) - } - if diff := cmp.Diff(got, want); diff != "" { - env.T.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff) - } -} diff --git a/gopls/internal/regtest/template/template_test.go b/gopls/internal/regtest/template/template_test.go deleted file mode 100644 index cd190cd3813..00000000000 --- a/gopls/internal/regtest/template/template_test.go +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestMain(m *testing.M) { - bug.PanicOnBugs = true - Main(m, hooks.Options) -} - -func TestMultilineTokens(t *testing.T) { - // 51731: panic: runtime error: slice bounds out of range [38:3] - const files = ` --- go.mod -- -module mod.com - -go 1.17 --- hi.tmpl -- -{{if (foÜx .X.Y)}}😀{{$A := - "hi" - }}{{.Z $A}}{{else}} -{{$A.X 12}} -{{foo (.X.Y) 23 ($A.Z)}} -{{end}} -` - WithOptions( - Settings{ - "templateExtensions": []string{"tmpl"}, - "semanticTokens": true, - }, - ).Run(t, files, func(t *testing.T, env *Env) { - var p protocol.SemanticTokensParams - p.TextDocument.URI = env.Sandbox.Workdir.URI("hi.tmpl") - toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p) - if err != nil { - t.Errorf("semantic token failed: %v", err) - } - if toks == nil || len(toks.Data) == 0 { - t.Errorf("got no semantic tokens") - } - }) -} - -func TestTemplatesFromExtensions(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- hello.tmpl -- -{{range .Planets}} -Hello {{}} <-- missing body -{{end}} -` - WithOptions( - Settings{ - "templateExtensions": []string{"tmpl"}, - "semanticTokens": true, - }, - ).Run(t, files, func(t *testing.T, env *Env) { - // TODO: can we move this diagnostic onto {{}}? - var diags protocol.PublishDiagnosticsParams - env.OnceMet( - InitialWorkspaceLoad, - Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}")), - ReadDiagnostics("hello.tmpl", &diags), - ) - d := diags.Diagnostics // issue 50786: check for Source - if len(d) != 1 { - t.Errorf("expected 1 diagnostic, got %d", len(d)) - return - } - if d[0].Source != "template" { - t.Errorf("expected Source 'template', got %q", d[0].Source) - } - // issue 50801 (even broken templates could return some semantic tokens) - var p protocol.SemanticTokensParams - p.TextDocument.URI = env.Sandbox.Workdir.URI("hello.tmpl") - toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p) - if err != nil { - t.Errorf("semantic token failed: %v", err) - } - if toks == nil || len(toks.Data) == 0 { - t.Errorf("got no semantic tokens") - } - - env.WriteWorkspaceFile("hello.tmpl", "{{range .Planets}}\nHello {{.}}\n{{end}}") - env.AfterChange(NoDiagnostics(ForFile("hello.tmpl"))) - }) -} - -func TestTemplatesObserveDirectoryFilters(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- a/a.tmpl -- -A {{}} <-- missing body --- b/b.tmpl -- -B {{}} <-- missing body -` - - WithOptions( - Settings{ - "directoryFilters": []string{"-b"}, - "templateExtensions": []string{"tmpl"}, - }, - ).Run(t, files, func(t *testing.T, env *Env) { - env.OnceMet( - InitialWorkspaceLoad, - Diagnostics(env.AtRegexp("a/a.tmpl", "()A")), - NoDiagnostics(ForFile("b/b.tmpl")), - ) - }) -} - -func TestTemplatesFromLangID(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 -` - - Run(t, files, func(t *testing.T, env *Env) { - env.CreateBuffer("hello.tmpl", "") - env.AfterChange( - NoDiagnostics(ForFile("hello.tmpl")), // Don't get spurious errors for empty templates. - ) - env.SetBufferContent("hello.tmpl", "{{range .Planets}}\nHello {{}}\n{{end}}") - env.Await(Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}"))) - env.RegexpReplace("hello.tmpl", "{{}}", "{{.}}") - env.Await(NoDiagnostics(ForFile("hello.tmpl"))) - }) -} - -func TestClosingTemplatesMakesDiagnosticsDisappear(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- hello.tmpl -- -{{range .Planets}} -Hello {{}} <-- missing body -{{end}} -` - - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("hello.tmpl") - env.AfterChange( - Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}")), - ) - // Since we don't have templateExtensions configured, closing hello.tmpl - // should make its diagnostics disappear. - env.CloseBuffer("hello.tmpl") - env.AfterChange( - NoDiagnostics(ForFile("hello.tmpl")), - ) - }) -} - -func TestMultipleSuffixes(t *testing.T) { - const files = ` --- go.mod -- -module mod.com - -go 1.12 --- b.gotmpl -- -{{define "A"}}goo{{end}} --- a.tmpl -- -{{template "A"}} -` - - WithOptions( - Settings{ - "templateExtensions": []string{"tmpl", "gotmpl"}, - }, - ).Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("a.tmpl") - x := env.RegexpSearch("a.tmpl", `A`) - loc := env.GoToDefinition(x) - refs := env.References(loc) - if len(refs) != 2 { - t.Fatalf("got %v reference(s), want 2", len(refs)) - } - // make sure we got one from b.gotmpl - want := env.Sandbox.Workdir.URI("b.gotmpl") - if refs[0].URI != want && refs[1].URI != want { - t.Errorf("failed to find reference to %s", shorten(want)) - for i, r := range refs { - t.Logf("%d: URI:%s %v", i, shorten(r.URI), r.Range) - } - } - - content, nloc := env.Hover(loc) - if loc != nloc { - t.Errorf("loc? got %v, wanted %v", nloc, loc) - } - if content.Value != "template A defined" { - t.Errorf("got %s, wanted 'template A defined", content.Value) - } - }) -} - -// shorten long URIs -func shorten(fn protocol.DocumentURI) string { - if len(fn) <= 20 { - return string(fn) - } - pieces := strings.Split(string(fn), "/") - if len(pieces) < 2 { - return string(fn) - } - j := len(pieces) - return pieces[j-2] + "/" + pieces[j-1] -} - -// Hover needs tests diff --git a/gopls/internal/regtest/workspace/directoryfilters_test.go b/gopls/internal/regtest/workspace/directoryfilters_test.go deleted file mode 100644 index 6e2a15557fd..00000000000 --- a/gopls/internal/regtest/workspace/directoryfilters_test.go +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package workspace - -import ( - "sort" - "strings" - "testing" - - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/testenv" -) - -// This file contains regression tests for the directoryFilters setting. -// -// TODO: -// - consolidate some of these tests into a single test -// - add more tests for changing directory filters - -func TestDirectoryFilters(t *testing.T) { - WithOptions( - ProxyFiles(workspaceProxy), - WorkspaceFolders("pkg"), - Settings{ - "directoryFilters": []string{"-inner"}, - }, - ).Run(t, workspaceModule, func(t *testing.T, env *Env) { - syms := env.Symbol("Hi") - sort.Slice(syms, func(i, j int) bool { return syms[i].ContainerName < syms[j].ContainerName }) - for _, s := range syms { - if strings.Contains(s.ContainerName, "inner") { - t.Errorf("WorkspaceSymbol: found symbol %q with container %q, want \"inner\" excluded", s.Name, s.ContainerName) - } - } - }) -} - -func TestDirectoryFiltersLoads(t *testing.T) { - // exclude, and its error, should be excluded from the workspace. - const files = ` --- go.mod -- -module example.com - -go 1.12 --- exclude/exclude.go -- -package exclude - -const _ = Nonexistant -` - - WithOptions( - Settings{"directoryFilters": []string{"-exclude"}}, - ).Run(t, files, func(t *testing.T, env *Env) { - env.OnceMet( - InitialWorkspaceLoad, - NoDiagnostics(ForFile("exclude/x.go")), - ) - }) -} - -func TestDirectoryFiltersTransitiveDep(t *testing.T) { - // Even though exclude is excluded from the workspace, it should - // still be importable as a non-workspace package. - const files = ` --- go.mod -- -module example.com - -go 1.12 --- include/include.go -- -package include -import "example.com/exclude" - -const _ = exclude.X --- exclude/exclude.go -- -package exclude - -const _ = Nonexistant // should be ignored, since this is a non-workspace package -const X = 1 -` - - WithOptions( - Settings{"directoryFilters": []string{"-exclude"}}, - ).Run(t, files, func(t *testing.T, env *Env) { - env.OnceMet( - InitialWorkspaceLoad, - NoDiagnostics(ForFile("exclude/exclude.go")), // filtered out - NoDiagnostics(ForFile("include/include.go")), // successfully builds - ) - }) -} - -func TestDirectoryFiltersWorkspaceModules(t *testing.T) { - // Define a module include.com which should be in the workspace, plus a - // module exclude.com which should be excluded and therefore come from - // the proxy. - const files = ` --- include/go.mod -- -module include.com - -go 1.12 - -require exclude.com v1.0.0 - --- include/go.sum -- -exclude.com v1.0.0 h1:Q5QSfDXY5qyNCBeUiWovUGqcLCRZKoTs9XdBeVz+w1I= -exclude.com v1.0.0/go.mod h1:hFox2uDlNB2s2Jfd9tHlQVfgqUiLVTmh6ZKat4cvnj4= - --- include/include.go -- -package include - -import "exclude.com" - -var _ = exclude.X // satisfied only by the workspace version --- exclude/go.mod -- -module exclude.com - -go 1.12 --- exclude/exclude.go -- -package exclude - -const X = 1 -` - const proxy = ` --- exclude.com@v1.0.0/go.mod -- -module exclude.com - -go 1.12 --- exclude.com@v1.0.0/exclude.go -- -package exclude -` - WithOptions( - Modes(Experimental), - ProxyFiles(proxy), - Settings{"directoryFilters": []string{"-exclude"}}, - ).Run(t, files, func(t *testing.T, env *Env) { - env.Await(Diagnostics(env.AtRegexp("include/include.go", `exclude.(X)`))) - }) -} - -// Test for golang/go#46438: support for '**' in directory filters. -func TestDirectoryFilters_Wildcard(t *testing.T) { - filters := []string{"-**/bye"} - WithOptions( - ProxyFiles(workspaceProxy), - WorkspaceFolders("pkg"), - Settings{ - "directoryFilters": filters, - }, - ).Run(t, workspaceModule, func(t *testing.T, env *Env) { - syms := env.Symbol("Bye") - sort.Slice(syms, func(i, j int) bool { return syms[i].ContainerName < syms[j].ContainerName }) - for _, s := range syms { - if strings.Contains(s.ContainerName, "bye") { - t.Errorf("WorkspaceSymbol: found symbol %q with container %q with filters %v", s.Name, s.ContainerName, filters) - } - } - }) -} - -// Test for golang/go#52993: wildcard directoryFilters should apply to -// goimports scanning as well. -func TestDirectoryFilters_ImportScanning(t *testing.T) { - const files = ` --- go.mod -- -module mod.test - -go 1.12 --- main.go -- -package main - -func main() { - bye.Goodbye() -} --- p/bye/bye.go -- -package bye - -func Goodbye() {} -` - - WithOptions( - Settings{ - "directoryFilters": []string{"-**/bye"}, - }, - // This test breaks in 'Experimental' mode, because with - // experimentalWorkspaceModule set we the goimports scan behaves - // differently. - // - // Since this feature is going away (golang/go#52897), don't investigate. - Modes(Default), - ).Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - beforeSave := env.BufferText("main.go") - env.OrganizeImports("main.go") - got := env.BufferText("main.go") - if got != beforeSave { - t.Errorf("after organizeImports code action, got modified buffer:\n%s", got) - } - }) -} - -// Test for golang/go#52993: non-wildcard directoryFilters should still be -// applied relative to the workspace folder, not the module root. -func TestDirectoryFilters_MultiRootImportScanning(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work - - const files = ` --- go.work -- -go 1.18 - -use ( - a - b -) --- a/go.mod -- -module mod1.test - -go 1.18 --- a/main.go -- -package main - -func main() { - hi.Hi() -} --- a/hi/hi.go -- -package hi - -func Hi() {} --- b/go.mod -- -module mod2.test - -go 1.18 --- b/main.go -- -package main - -func main() { - hi.Hi() -} --- b/hi/hi.go -- -package hi - -func Hi() {} -` - - WithOptions( - Settings{ - "directoryFilters": []string{"-hi"}, // this test fails with -**/hi - }, - ).Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("a/main.go") - beforeSave := env.BufferText("a/main.go") - env.OrganizeImports("a/main.go") - got := env.BufferText("a/main.go") - if got == beforeSave { - t.Errorf("after organizeImports code action, got identical buffer:\n%s", got) - } - }) -} diff --git a/gopls/internal/regtest/workspace/quickfix_test.go b/gopls/internal/regtest/workspace/quickfix_test.go deleted file mode 100644 index 995a4988091..00000000000 --- a/gopls/internal/regtest/workspace/quickfix_test.go +++ /dev/null @@ -1,342 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package workspace - -import ( - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" - "golang.org/x/tools/internal/testenv" - - . "golang.org/x/tools/gopls/internal/lsp/regtest" -) - -func TestQuickFix_UseModule(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // needs go.work - - const files = ` --- go.work -- -go 1.20 - -use ( - ./a -) --- a/go.mod -- -module mod.com/a - -go 1.18 - --- a/main.go -- -package main - -import "mod.com/a/lib" - -func main() { - _ = lib.C -} - --- a/lib/lib.go -- -package lib - -const C = "b" --- b/go.mod -- -module mod.com/b - -go 1.18 - --- b/main.go -- -package main - -import "mod.com/b/lib" - -func main() { - _ = lib.C -} - --- b/lib/lib.go -- -package lib - -const C = "b" -` - - for _, title := range []string{ - "Use this module", - "Use all modules", - } { - t.Run(title, func(t *testing.T) { - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("b/main.go") - var d protocol.PublishDiagnosticsParams - env.AfterChange(ReadDiagnostics("b/main.go", &d)) - fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) - var toApply []protocol.CodeAction - for _, fix := range fixes { - if strings.Contains(fix.Title, title) { - toApply = append(toApply, fix) - } - } - if len(toApply) != 1 { - t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) - } - env.ApplyCodeAction(toApply[0]) - env.AfterChange(NoDiagnostics()) - want := `go 1.20 - -use ( - ./a - ./b -) -` - got := env.ReadWorkspaceFile("go.work") - if diff := compare.Text(want, got); diff != "" { - t.Errorf("unexpeced go.work content:\n%s", diff) - } - }) - }) - } -} - -func TestQuickFix_AddGoWork(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // needs go.work - - const files = ` --- a/go.mod -- -module mod.com/a - -go 1.18 - --- a/main.go -- -package main - -import "mod.com/a/lib" - -func main() { - _ = lib.C -} - --- a/lib/lib.go -- -package lib - -const C = "b" --- b/go.mod -- -module mod.com/b - -go 1.18 - --- b/main.go -- -package main - -import "mod.com/b/lib" - -func main() { - _ = lib.C -} - --- b/lib/lib.go -- -package lib - -const C = "b" -` - - tests := []struct { - name string - file string - title string - want string // expected go.work content, excluding go directive line - }{ - { - "use b", - "b/main.go", - "Add a go.work file using this module", - ` -use ./b -`, - }, - { - "use a", - "a/main.go", - "Add a go.work file using this module", - ` -use ./a -`, - }, - { - "use all", - "a/main.go", - "Add a go.work file using all modules", - ` -use ( - ./a - ./b -) -`, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile(test.file) - var d protocol.PublishDiagnosticsParams - env.AfterChange(ReadDiagnostics(test.file, &d)) - fixes := env.GetQuickFixes(test.file, d.Diagnostics) - var toApply []protocol.CodeAction - for _, fix := range fixes { - if strings.Contains(fix.Title, test.title) { - toApply = append(toApply, fix) - } - } - if len(toApply) != 1 { - t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), test.title, toApply) - } - env.ApplyCodeAction(toApply[0]) - env.AfterChange( - NoDiagnostics(ForFile(test.file)), - ) - - got := env.ReadWorkspaceFile("go.work") - // Ignore the `go` directive, which we assume is on the first line of - // the go.work file. This allows the test to be independent of go version. - got = strings.Join(strings.Split(got, "\n")[1:], "\n") - if diff := compare.Text(test.want, got); diff != "" { - t.Errorf("unexpected go.work content:\n%s", diff) - } - }) - }) - } -} - -func TestQuickFix_UnsavedGoWork(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // needs go.work - - const files = ` --- go.work -- -go 1.21 - -use ( - ./a -) --- a/go.mod -- -module mod.com/a - -go 1.18 - --- a/main.go -- -package main - -func main() {} --- b/go.mod -- -module mod.com/b - -go 1.18 - --- b/main.go -- -package main - -func main() {} -` - - for _, title := range []string{ - "Use this module", - "Use all modules", - } { - t.Run(title, func(t *testing.T) { - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("go.work") - env.OpenFile("b/main.go") - env.RegexpReplace("go.work", "go 1.21", "go 1.21 // arbitrary comment") - var d protocol.PublishDiagnosticsParams - env.AfterChange(ReadDiagnostics("b/main.go", &d)) - fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) - var toApply []protocol.CodeAction - for _, fix := range fixes { - if strings.Contains(fix.Title, title) { - toApply = append(toApply, fix) - } - } - if len(toApply) != 1 { - t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) - } - fix := toApply[0] - err := env.Editor.ApplyCodeAction(env.Ctx, fix) - if err == nil { - t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title) - } - - if got := err.Error(); !strings.Contains(got, "must save") { - t.Errorf("codeAction(%q) returned error %q, want containing \"must save\"", fix.Title, err) - } - }) - }) - } -} - -func TestQuickFix_GOWORKOff(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // needs go.work - - const files = ` --- go.work -- -go 1.21 - -use ( - ./a -) --- a/go.mod -- -module mod.com/a - -go 1.18 - --- a/main.go -- -package main - -func main() {} --- b/go.mod -- -module mod.com/b - -go 1.18 - --- b/main.go -- -package main - -func main() {} -` - - for _, title := range []string{ - "Use this module", - "Use all modules", - } { - t.Run(title, func(t *testing.T) { - WithOptions( - EnvVars{"GOWORK": "off"}, - ).Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("go.work") - env.OpenFile("b/main.go") - var d protocol.PublishDiagnosticsParams - env.AfterChange(ReadDiagnostics("b/main.go", &d)) - fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) - var toApply []protocol.CodeAction - for _, fix := range fixes { - if strings.Contains(fix.Title, title) { - toApply = append(toApply, fix) - } - } - if len(toApply) != 1 { - t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) - } - fix := toApply[0] - err := env.Editor.ApplyCodeAction(env.Ctx, fix) - if err == nil { - t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title) - } - - if got := err.Error(); !strings.Contains(got, "GOWORK=off") { - t.Errorf("codeAction(%q) returned error %q, want containing \"GOWORK=off\"", fix.Title, err) - } - }) - }) - } -} diff --git a/gopls/internal/server/assets/favicon.ico b/gopls/internal/server/assets/favicon.ico new file mode 100644 index 00000000000..8d225846dbc Binary files /dev/null and b/gopls/internal/server/assets/favicon.ico differ diff --git a/gopls/internal/server/assets/go-logo-blue.svg b/gopls/internal/server/assets/go-logo-blue.svg new file mode 100644 index 00000000000..da6ea83de1e --- /dev/null +++ b/gopls/internal/server/assets/go-logo-blue.svg @@ -0,0 +1 @@ +<svg height="78" viewBox="0 0 207 78" width="207" xmlns="http://www.w3.org/2000/svg"><g fill="#00acd7" fill-rule="evenodd"><path d="m16.2 24.1c-.4 0-.5-.2-.3-.5l2.1-2.7c.2-.3.7-.5 1.1-.5h35.7c.4 0 .5.3.3.6l-1.7 2.6c-.2.3-.7.6-1 .6z"/><path d="m1.1 33.3c-.4 0-.5-.2-.3-.5l2.1-2.7c.2-.3.7-.5 1.1-.5h45.6c.4 0 .6.3.5.6l-.8 2.4c-.1.4-.5.6-.9.6z"/><path d="m25.3 42.5c-.4 0-.5-.3-.3-.6l1.4-2.5c.2-.3.6-.6 1-.6h20c.4 0 .6.3.6.7l-.2 2.4c0 .4-.4.7-.7.7z"/><g transform="translate(55)"><path d="m74.1 22.3c-6.3 1.6-10.6 2.8-16.8 4.4-1.5.4-1.6.5-2.9-1-1.5-1.7-2.6-2.8-4.7-3.8-6.3-3.1-12.4-2.2-18.1 1.5-6.8 4.4-10.3 10.9-10.2 19 .1 8 5.6 14.6 13.5 15.7 6.8.9 12.5-1.5 17-6.6.9-1.1 1.7-2.3 2.7-3.7-3.6 0-8.1 0-19.3 0-2.1 0-2.6-1.3-1.9-3 1.3-3.1 3.7-8.3 5.1-10.9.3-.6 1-1.6 2.5-1.6h36.4c-.2 2.7-.2 5.4-.6 8.1-1.1 7.2-3.8 13.8-8.2 19.6-7.2 9.5-16.6 15.4-28.5 17-9.8 1.3-18.9-.6-26.9-6.6-7.4-5.6-11.6-13-12.7-22.2-1.3-10.9 1.9-20.7 8.5-29.3 7.1-9.3 16.5-15.2 28-17.3 9.4-1.7 18.4-.6 26.5 4.9 5.3 3.5 9.1 8.3 11.6 14.1.6.9.2 1.4-1 1.7z"/><path d="m107.2 77.6c-9.1-.2-17.4-2.8-24.4-8.8-5.9-5.1-9.6-11.6-10.8-19.3-1.8-11.3 1.3-21.3 8.1-30.2 7.3-9.6 16.1-14.6 28-16.7 10.2-1.8 19.8-.8 28.5 5.1 7.9 5.4 12.8 12.7 14.1 22.3 1.7 13.5-2.2 24.5-11.5 33.9-6.6 6.7-14.7 10.9-24 12.8-2.7.5-5.4.6-8 .9zm23.8-40.4c-.1-1.3-.1-2.3-.3-3.3-1.8-9.9-10.9-15.5-20.4-13.3-9.3 2.1-15.3 8-17.5 17.4-1.8 7.8 2 15.7 9.2 18.9 5.5 2.4 11 2.1 16.3-.6 7.9-4.1 12.2-10.5 12.7-19.1z" fill-rule="nonzero"/></g></g></svg> \ No newline at end of file diff --git a/gopls/internal/server/call_hierarchy.go b/gopls/internal/server/call_hierarchy.go new file mode 100644 index 00000000000..671d4f8c81c --- /dev/null +++ b/gopls/internal/server/call_hierarchy.go @@ -0,0 +1,59 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { + ctx, done := event.Start(ctx, "lsp.Server.prepareCallHierarchy") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + if snapshot.FileKind(fh) != file.Go { + return nil, nil // empty result + } + return golang.PrepareCallHierarchy(ctx, snapshot, fh, params.Position) +} + +func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { + ctx, done := event.Start(ctx, "lsp.Server.incomingCalls") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) + if err != nil { + return nil, err + } + defer release() + if snapshot.FileKind(fh) != file.Go { + return nil, nil // empty result + } + return golang.IncomingCalls(ctx, snapshot, fh, params.Item.Range.Start) +} + +func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { + ctx, done := event.Start(ctx, "lsp.Server.outgoingCalls") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) + if err != nil { + return nil, err + } + defer release() + if snapshot.FileKind(fh) != file.Go { + return nil, nil // empty result + } + return golang.OutgoingCalls(ctx, snapshot, fh, params.Item.Range.Start) +} diff --git a/gopls/internal/server/code_action.go b/gopls/internal/server/code_action.go new file mode 100644 index 00000000000..b26f9780c60 --- /dev/null +++ b/gopls/internal/server/code_action.go @@ -0,0 +1,275 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + "sort" + "strings" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/mod" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/internal/event" +) + +func (s *server) CodeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { + ctx, done := event.Start(ctx, "lsp.Server.codeAction") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + uri := fh.URI() + + // Determine the supported actions for this file kind. + kind := snapshot.FileKind(fh) + supportedCodeActions, ok := snapshot.Options().SupportedCodeActions[kind] + if !ok { + return nil, fmt.Errorf("no supported code actions for %v file kind", kind) + } + if len(supportedCodeActions) == 0 { + return nil, nil // not an error if there are none supported + } + + // The Only field of the context specifies which code actions the client wants. + // If Only is empty, assume that the client wants all of the non-explicit code actions. + var want map[protocol.CodeActionKind]bool + { + // Explicit Code Actions are opt-in and shouldn't be returned to the client unless + // requested using Only. + // TODO: Add other CodeLenses such as GoGenerate, RegenerateCgo, etc.. + explicit := map[protocol.CodeActionKind]bool{ + protocol.GoTest: true, + } + + if len(params.Context.Only) == 0 { + want = supportedCodeActions + } else { + want = make(map[protocol.CodeActionKind]bool) + for _, only := range params.Context.Only { + for k, v := range supportedCodeActions { + if only == k || strings.HasPrefix(string(k), string(only)+".") { + want[k] = want[k] || v + } + } + want[only] = want[only] || explicit[only] + } + } + } + if len(want) == 0 { + return nil, fmt.Errorf("no supported code action to execute for %s, wanted %v", uri, params.Context.Only) + } + + switch kind { + case file.Mod: + var actions []protocol.CodeAction + + fixes, err := s.codeActionsMatchingDiagnostics(ctx, fh.URI(), snapshot, params.Context.Diagnostics, want) + if err != nil { + return nil, err + } + + // Group vulnerability fixes by their range, and select only the most + // appropriate upgrades. + // + // TODO(rfindley): can this instead be accomplished on the diagnosis side, + // so that code action handling remains uniform? + vulnFixes := make(map[protocol.Range][]protocol.CodeAction) + searchFixes: + for _, fix := range fixes { + for _, diag := range fix.Diagnostics { + if diag.Source == string(cache.Govulncheck) || diag.Source == string(cache.Vulncheck) { + vulnFixes[diag.Range] = append(vulnFixes[diag.Range], fix) + continue searchFixes + } + } + actions = append(actions, fix) + } + + for _, fixes := range vulnFixes { + fixes = mod.SelectUpgradeCodeActions(fixes) + actions = append(actions, fixes...) + } + + return actions, nil + + case file.Go: + // Don't suggest fixes for generated files, since they are generally + // not useful and some editors may apply them automatically on save. + if golang.IsGenerated(ctx, snapshot, uri) { + return nil, nil + } + + actions, err := s.codeActionsMatchingDiagnostics(ctx, uri, snapshot, params.Context.Diagnostics, want) + if err != nil { + return nil, err + } + + moreActions, err := golang.CodeActions(ctx, snapshot, fh, params.Range, params.Context.Diagnostics, want) + if err != nil { + return nil, err + } + actions = append(actions, moreActions...) + + return actions, nil + + default: + // Unsupported file kind for a code action. + return nil, nil + } +} + +// ResolveCodeAction resolves missing Edit information (that is, computes the +// details of the necessary patch) in the given code action using the provided +// Data field of the CodeAction, which should contain the raw json of a protocol.Command. +// +// This should be called by the client before applying code actions, when the +// client has code action resolve support. +// +// This feature allows capable clients to preview and selectively apply the diff +// instead of applying the whole thing unconditionally through workspace/applyEdit. +func (s *server) ResolveCodeAction(ctx context.Context, ca *protocol.CodeAction) (*protocol.CodeAction, error) { + ctx, done := event.Start(ctx, "lsp.Server.resolveCodeAction") + defer done() + + // Only resolve the code action if there is Data provided. + var cmd protocol.Command + if ca.Data != nil { + if err := protocol.UnmarshalJSON(*ca.Data, &cmd); err != nil { + return nil, err + } + } + if cmd.Command != "" { + params := &protocol.ExecuteCommandParams{ + Command: cmd.Command, + Arguments: cmd.Arguments, + } + + handler := &commandHandler{ + s: s, + params: params, + } + edit, err := command.Dispatch(ctx, params, handler) + if err != nil { + + return nil, err + } + var ok bool + if ca.Edit, ok = edit.(*protocol.WorkspaceEdit); !ok { + return nil, fmt.Errorf("unable to resolve code action %q", ca.Title) + } + } + return ca, nil +} + +// codeActionsMatchingDiagnostics fetches code actions for the provided +// diagnostics, by first attempting to unmarshal code actions directly from the +// bundled protocol.Diagnostic.Data field, and failing that by falling back on +// fetching a matching Diagnostic from the set of stored diagnostics for +// this file. +func (s *server) codeActionsMatchingDiagnostics(ctx context.Context, uri protocol.DocumentURI, snapshot *cache.Snapshot, pds []protocol.Diagnostic, want map[protocol.CodeActionKind]bool) ([]protocol.CodeAction, error) { + var actions []protocol.CodeAction + var unbundled []protocol.Diagnostic // diagnostics without bundled code actions in their Data field + for _, pd := range pds { + bundled := cache.BundledQuickFixes(pd) + if len(bundled) > 0 { + for _, fix := range bundled { + if want[fix.Kind] { + actions = append(actions, fix) + } + } + } else { + // No bundled actions: keep searching for a match. + unbundled = append(unbundled, pd) + } + } + + for _, pd := range unbundled { + for _, sd := range s.findMatchingDiagnostics(uri, pd) { + diagActions, err := codeActionsForDiagnostic(ctx, snapshot, sd, &pd, want) + if err != nil { + return nil, err + } + actions = append(actions, diagActions...) + } + } + return actions, nil +} + +func codeActionsForDiagnostic(ctx context.Context, snapshot *cache.Snapshot, sd *cache.Diagnostic, pd *protocol.Diagnostic, want map[protocol.CodeActionKind]bool) ([]protocol.CodeAction, error) { + var actions []protocol.CodeAction + for _, fix := range sd.SuggestedFixes { + if !want[fix.ActionKind] { + continue + } + changes := []protocol.DocumentChanges{} // must be a slice + for uri, edits := range fix.Edits { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + changes = append(changes, documentChanges(fh, edits)...) + } + actions = append(actions, protocol.CodeAction{ + Title: fix.Title, + Kind: fix.ActionKind, + Edit: &protocol.WorkspaceEdit{ + DocumentChanges: changes, + }, + Command: fix.Command, + Diagnostics: []protocol.Diagnostic{*pd}, + }) + } + return actions, nil +} + +func (s *server) findMatchingDiagnostics(uri protocol.DocumentURI, pd protocol.Diagnostic) []*cache.Diagnostic { + s.diagnosticsMu.Lock() + defer s.diagnosticsMu.Unlock() + + var sds []*cache.Diagnostic + for _, viewDiags := range s.diagnostics[uri].byView { + for _, sd := range viewDiags.diagnostics { + sameDiagnostic := (pd.Message == strings.TrimSpace(sd.Message) && // extra space may have been trimmed when converting to protocol.Diagnostic + protocol.CompareRange(pd.Range, sd.Range) == 0 && + pd.Source == string(sd.Source)) + + if sameDiagnostic { + sds = append(sds, sd) + } + } + } + return sds +} + +func (s *server) getSupportedCodeActions() []protocol.CodeActionKind { + allCodeActionKinds := make(map[protocol.CodeActionKind]struct{}) + for _, kinds := range s.Options().SupportedCodeActions { + for kind := range kinds { + allCodeActionKinds[kind] = struct{}{} + } + } + var result []protocol.CodeActionKind + for kind := range allCodeActionKinds { + result = append(result, kind) + } + sort.Slice(result, func(i, j int) bool { + return result[i] < result[j] + }) + return result +} + +type unit = struct{} + +func documentChanges(fh file.Handle, edits []protocol.TextEdit) []protocol.DocumentChanges { + return protocol.TextEditsToDocumentChanges(fh.URI(), fh.Version(), edits) +} diff --git a/gopls/internal/server/code_lens.go b/gopls/internal/server/code_lens.go new file mode 100644 index 00000000000..7e6506c2b65 --- /dev/null +++ b/gopls/internal/server/code_lens.go @@ -0,0 +1,63 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + "sort" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/mod" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) CodeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { + ctx, done := event.Start(ctx, "lsp.Server.codeLens", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + var lenses map[command.Command]golang.LensFunc + switch snapshot.FileKind(fh) { + case file.Mod: + lenses = mod.LensFuncs() + case file.Go: + lenses = golang.LensFuncs() + default: + // Unsupported file kind for a code lens. + return nil, nil + } + var result []protocol.CodeLens + for cmd, lf := range lenses { + if !snapshot.Options().Codelenses[string(cmd)] { + continue + } + added, err := lf(ctx, snapshot, fh) + // Code lens is called on every keystroke, so we should just operate in + // a best-effort mode, ignoring errors. + if err != nil { + event.Error(ctx, fmt.Sprintf("code lens %s failed", cmd), err) + continue + } + result = append(result, added...) + } + sort.Slice(result, func(i, j int) bool { + a, b := result[i], result[j] + if cmp := protocol.CompareRange(a.Range, b.Range); cmp != 0 { + return cmp < 0 + } + return a.Command.Command < b.Command.Command + }) + return result, nil +} diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go new file mode 100644 index 00000000000..a6a3cddfaf6 --- /dev/null +++ b/gopls/internal/server/command.go @@ -0,0 +1,1431 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "go/ast" + "io" + "os" + "path/filepath" + "regexp" + "runtime" + "runtime/pprof" + "sort" + "strings" + "sync" + + "golang.org/x/mod/modfile" + "golang.org/x/telemetry/counter" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/progress" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/vulncheck" + "golang.org/x/tools/gopls/internal/vulncheck/scan" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/tokeninternal" + "golang.org/x/tools/internal/xcontext" +) + +func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (interface{}, error) { + ctx, done := event.Start(ctx, "lsp.Server.executeCommand") + defer done() + + var found bool + for _, name := range s.Options().SupportedCommands { + if name == params.Command { + found = true + break + } + } + if !found { + return nil, fmt.Errorf("%s is not a supported command", params.Command) + } + + handler := &commandHandler{ + s: s, + params: params, + } + return command.Dispatch(ctx, params, handler) +} + +type commandHandler struct { + s *server + params *protocol.ExecuteCommandParams +} + +func (h *commandHandler) MaybePromptForTelemetry(ctx context.Context) error { + go h.s.maybePromptForTelemetry(ctx, true) + return nil +} + +func (*commandHandler) AddTelemetryCounters(_ context.Context, args command.AddTelemetryCountersArgs) error { + if len(args.Names) != len(args.Values) { + return fmt.Errorf("Names and Values must have the same length") + } + // invalid counter update requests will be silently dropped. (no audience) + for i, n := range args.Names { + v := args.Values[i] + if n == "" || v < 0 { + continue + } + counter.Add("fwd/"+n, v) + } + return nil +} + +// commandConfig configures common command set-up and execution. +type commandConfig struct { + // TODO(adonovan): whether a command is synchronous or + // asynchronous is part of the server interface contract, not + // a mere implementation detail of the handler. + // Export a (command.Command).IsAsync() property so that + // clients can tell. (The tricky part is ensuring the handler + // remains consistent with the command.Command metadata, as at + // the point were we read the 'async' field below, we no + // longer know that command.Command.) + + async bool // whether to run the command asynchronously. Async commands can only return errors. + requireSave bool // whether all files must be saved for the command to work + progress string // title to use for progress reporting. If empty, no progress will be reported. + forView string // view to resolve to a snapshot; incompatible with forURI + forURI protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil. +} + +// commandDeps is evaluated from a commandConfig. Note that not all fields may +// be populated, depending on which configuration is set. See comments in-line +// for details. +type commandDeps struct { + snapshot *cache.Snapshot // present if cfg.forURI was set + fh file.Handle // present if cfg.forURI was set + work *progress.WorkDone // present cfg.progress was set +} + +type commandFunc func(context.Context, commandDeps) error + +// These strings are reported as the final WorkDoneProgressEnd message +// for each workspace/executeCommand request. +const ( + CommandCanceled = "canceled" + CommandFailed = "failed" + CommandCompleted = "completed" +) + +// run performs command setup for command execution, and invokes the given run +// function. If cfg.async is set, run executes the given func in a separate +// goroutine, and returns as soon as setup is complete and the goroutine is +// scheduled. +// +// Invariant: if the resulting error is non-nil, the given run func will +// (eventually) be executed exactly once. +func (c *commandHandler) run(ctx context.Context, cfg commandConfig, run commandFunc) (err error) { + if cfg.requireSave { + var unsaved []string + for _, overlay := range c.s.session.Overlays() { + if !overlay.SameContentsOnDisk() { + unsaved = append(unsaved, overlay.URI().Path()) + } + } + if len(unsaved) > 0 { + return fmt.Errorf("All files must be saved first (unsaved: %v).", unsaved) + } + } + var deps commandDeps + var release func() + if cfg.forURI != "" && cfg.forView != "" { + return bug.Errorf("internal error: forURI=%q, forView=%q", cfg.forURI, cfg.forView) + } + if cfg.forURI != "" { + deps.fh, deps.snapshot, release, err = c.s.fileOf(ctx, cfg.forURI) + if err != nil { + return err + } + + } else if cfg.forView != "" { + view, err := c.s.session.View(cfg.forView) + if err != nil { + return err + } + deps.snapshot, release, err = view.Snapshot() + if err != nil { + return err + } + + } else { + release = func() {} + } + // Inv: release() must be called exactly once after this point. + // In the async case, runcmd may outlive run(). + + ctx, cancel := context.WithCancel(xcontext.Detach(ctx)) + if cfg.progress != "" { + deps.work = c.s.progress.Start(ctx, cfg.progress, "Running...", c.params.WorkDoneToken, cancel) + } + runcmd := func() error { + defer release() + defer cancel() + err := run(ctx, deps) + if deps.work != nil { + switch { + case errors.Is(err, context.Canceled): + deps.work.End(ctx, CommandCanceled) + case err != nil: + event.Error(ctx, "command error", err) + deps.work.End(ctx, CommandFailed) + default: + deps.work.End(ctx, CommandCompleted) + } + } + return err + } + if cfg.async { + go func() { + if err := runcmd(); err != nil { + showMessage(ctx, c.s.client, protocol.Error, err.Error()) + } + }() + return nil + } + return runcmd() +} + +func (c *commandHandler) ApplyFix(ctx context.Context, args command.ApplyFixArgs) (*protocol.WorkspaceEdit, error) { + var result *protocol.WorkspaceEdit + err := c.run(ctx, commandConfig{ + // Note: no progress here. Applying fixes should be quick. + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + edits, err := golang.ApplyFix(ctx, args.Fix, deps.snapshot, deps.fh, args.Range) + if err != nil { + return err + } + changes := []protocol.DocumentChanges{} // must be a slice + for _, edit := range edits { + edit := edit + changes = append(changes, protocol.DocumentChanges{ + TextDocumentEdit: &edit, + }) + } + edit := protocol.WorkspaceEdit{ + DocumentChanges: changes, + } + if args.ResolveEdits { + result = &edit + return nil + } + r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ + Edit: edit, + }) + if err != nil { + return err + } + if !r.Applied { + return errors.New(r.FailureReason) + } + return nil + }) + return result, err +} + +func (c *commandHandler) RegenerateCgo(ctx context.Context, args command.URIArg) error { + return c.run(ctx, commandConfig{ + progress: "Regenerating Cgo", + }, func(ctx context.Context, _ commandDeps) error { + return c.modifyState(ctx, FromRegenerateCgo, func() (*cache.Snapshot, func(), error) { + // Resetting the view causes cgo to be regenerated via `go list`. + v, err := c.s.session.ResetView(ctx, args.URI) + if err != nil { + return nil, nil, err + } + return v.Snapshot() + }) + }) +} + +// modifyState performs an operation that modifies the snapshot state. +// +// It causes a snapshot diagnosis for the provided ModificationSource. +func (c *commandHandler) modifyState(ctx context.Context, source ModificationSource, work func() (*cache.Snapshot, func(), error)) error { + var wg sync.WaitGroup // tracks work done on behalf of this function, incl. diagnostics + wg.Add(1) + defer wg.Done() + + // Track progress on this operation for testing. + if c.s.Options().VerboseWorkDoneProgress { + work := c.s.progress.Start(ctx, DiagnosticWorkTitle(source), "Calculating file diagnostics...", nil, nil) + go func() { + wg.Wait() + work.End(ctx, "Done.") + }() + } + snapshot, release, err := work() + if err != nil { + return err + } + wg.Add(1) + go func() { + c.s.diagnoseSnapshot(snapshot, nil, 0) + release() + wg.Done() + }() + return nil +} + +func (c *commandHandler) CheckUpgrades(ctx context.Context, args command.CheckUpgradesArgs) error { + return c.run(ctx, commandConfig{ + forURI: args.URI, + progress: "Checking for upgrades", + }, func(ctx context.Context, deps commandDeps) error { + return c.modifyState(ctx, FromCheckUpgrades, func() (*cache.Snapshot, func(), error) { + upgrades, err := c.s.getUpgrades(ctx, deps.snapshot, args.URI, args.Modules) + if err != nil { + return nil, nil, err + } + return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ + ModuleUpgrades: map[protocol.DocumentURI]map[string]string{args.URI: upgrades}, + }) + }) + }) +} + +func (c *commandHandler) AddDependency(ctx context.Context, args command.DependencyArgs) error { + return c.GoGetModule(ctx, args) +} + +func (c *commandHandler) UpgradeDependency(ctx context.Context, args command.DependencyArgs) error { + return c.GoGetModule(ctx, args) +} + +func (c *commandHandler) ResetGoModDiagnostics(ctx context.Context, args command.ResetGoModDiagnosticsArgs) error { + return c.run(ctx, commandConfig{ + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + return c.modifyState(ctx, FromResetGoModDiagnostics, func() (*cache.Snapshot, func(), error) { + return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ + ModuleUpgrades: map[protocol.DocumentURI]map[string]string{ + deps.fh.URI(): nil, + }, + Vulns: map[protocol.DocumentURI]*vulncheck.Result{ + deps.fh.URI(): nil, + }, + }) + }) + }) +} + +func (c *commandHandler) GoGetModule(ctx context.Context, args command.DependencyArgs) error { + return c.run(ctx, commandConfig{ + progress: "Running go get", + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { + return runGoGetModule(invoke, args.AddRequire, args.GoCmdArgs) + }) + }) +} + +// TODO(rFindley): UpdateGoSum, Tidy, and Vendor could probably all be one command. +func (c *commandHandler) UpdateGoSum(ctx context.Context, args command.URIArgs) error { + return c.run(ctx, commandConfig{ + progress: "Updating go.sum", + }, func(ctx context.Context, _ commandDeps) error { + for _, uri := range args.URIs { + fh, snapshot, release, err := c.s.fileOf(ctx, uri) + if err != nil { + return err + } + defer release() + if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { + _, err := invoke("list", "all") + return err + }); err != nil { + return err + } + } + return nil + }) +} + +func (c *commandHandler) Tidy(ctx context.Context, args command.URIArgs) error { + return c.run(ctx, commandConfig{ + requireSave: true, + progress: "Running go mod tidy", + }, func(ctx context.Context, _ commandDeps) error { + for _, uri := range args.URIs { + fh, snapshot, release, err := c.s.fileOf(ctx, uri) + if err != nil { + return err + } + defer release() + if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { + _, err := invoke("mod", "tidy") + return err + }); err != nil { + return err + } + } + return nil + }) +} + +func (c *commandHandler) Vendor(ctx context.Context, args command.URIArg) error { + return c.run(ctx, commandConfig{ + requireSave: true, + progress: "Running go mod vendor", + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + // Use RunGoCommandPiped here so that we don't compete with any other go + // command invocations. go mod vendor deletes modules.txt before recreating + // it, and therefore can run into file locking issues on Windows if that + // file is in use by another process, such as go list. + // + // If golang/go#44119 is resolved, go mod vendor will instead modify + // modules.txt in-place. In that case we could theoretically allow this + // command to run concurrently. + stderr := new(bytes.Buffer) + err := deps.snapshot.RunGoCommandPiped(ctx, cache.Normal|cache.AllowNetwork, &gocommand.Invocation{ + Verb: "mod", + Args: []string{"vendor"}, + WorkingDir: filepath.Dir(args.URI.Path()), + }, &bytes.Buffer{}, stderr) + if err != nil { + return fmt.Errorf("running go mod vendor failed: %v\nstderr:\n%s", err, stderr.String()) + } + return nil + }) +} + +func (c *commandHandler) EditGoDirective(ctx context.Context, args command.EditGoDirectiveArgs) error { + return c.run(ctx, commandConfig{ + requireSave: true, // if go.mod isn't saved it could cause a problem + forURI: args.URI, + }, func(ctx context.Context, _ commandDeps) error { + fh, snapshot, release, err := c.s.fileOf(ctx, args.URI) + if err != nil { + return err + } + defer release() + if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { + _, err := invoke("mod", "edit", "-go", args.Version) + return err + }); err != nil { + return err + } + return nil + }) +} + +func (c *commandHandler) RemoveDependency(ctx context.Context, args command.RemoveDependencyArgs) error { + return c.run(ctx, commandConfig{ + progress: "Removing dependency", + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + // See the documentation for OnlyDiagnostic. + // + // TODO(rfindley): In Go 1.17+, we will be able to use the go command + // without checking if the module is tidy. + if args.OnlyDiagnostic { + return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { + if err := runGoGetModule(invoke, false, []string{args.ModulePath + "@none"}); err != nil { + return err + } + _, err := invoke("mod", "tidy") + return err + }) + } + pm, err := deps.snapshot.ParseMod(ctx, deps.fh) + if err != nil { + return err + } + edits, err := dropDependency(pm, args.ModulePath) + if err != nil { + return err + } + response, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ + Edit: protocol.WorkspaceEdit{ + DocumentChanges: documentChanges(deps.fh, edits), + }, + }) + if err != nil { + return err + } + if !response.Applied { + return fmt.Errorf("edits not applied because of %s", response.FailureReason) + } + return nil + }) +} + +// dropDependency returns the edits to remove the given require from the go.mod +// file. +func dropDependency(pm *cache.ParsedModule, modulePath string) ([]protocol.TextEdit, error) { + // We need a private copy of the parsed go.mod file, since we're going to + // modify it. + copied, err := modfile.Parse("", pm.Mapper.Content, nil) + if err != nil { + return nil, err + } + if err := copied.DropRequire(modulePath); err != nil { + return nil, err + } + copied.Cleanup() + newContent, err := copied.Format() + if err != nil { + return nil, err + } + // Calculate the edits to be made due to the change. + diff := diff.Bytes(pm.Mapper.Content, newContent) + return protocol.EditsFromDiffEdits(pm.Mapper, diff) +} + +func (c *commandHandler) Test(ctx context.Context, uri protocol.DocumentURI, tests, benchmarks []string) error { + return c.RunTests(ctx, command.RunTestsArgs{ + URI: uri, + Tests: tests, + Benchmarks: benchmarks, + }) +} + +func (c *commandHandler) Doc(ctx context.Context, loc protocol.Location) error { + return c.run(ctx, commandConfig{ + progress: "", // the operation should be fast + forURI: loc.URI, + }, func(ctx context.Context, deps commandDeps) error { + pkg, pgf, err := golang.NarrowestPackageForFile(ctx, deps.snapshot, loc.URI) + if err != nil { + return err + } + + // When invoked from a _test.go file, show the + // documentation of the package under test. + pkgpath := pkg.Metadata().PkgPath + if pkg.Metadata().ForTest != "" { + pkgpath = pkg.Metadata().ForTest + } + + // Start web server. + web, err := c.s.getWeb() + if err != nil { + return err + } + + // Compute fragment (e.g. "#Buffer.Len") based on + // enclosing top-level declaration, if exported. + var fragment string + pos, err := pgf.PositionPos(loc.Range.Start) + if err != nil { + return err + } + path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) + if n := len(path); n > 1 { + switch decl := path[n-2].(type) { + case *ast.FuncDecl: + if decl.Name.IsExported() { + // e.g. "#Println" + fragment = decl.Name.Name + + // method? + if decl.Recv != nil && len(decl.Recv.List) > 0 { + recv := decl.Recv.List[0].Type + if star, ok := recv.(*ast.StarExpr); ok { + recv = star.X // *N -> N + } + if id, ok := recv.(*ast.Ident); ok && id.IsExported() { + // e.g. "#Buffer.Len" + fragment = id.Name + "." + fragment + } else { + fragment = "" + } + } + } + + case *ast.GenDecl: + // path=[... Spec? GenDecl File] + for _, spec := range decl.Specs { + if n > 2 && spec == path[n-3] { + var name *ast.Ident + switch spec := spec.(type) { + case *ast.ValueSpec: + // var, const: use first name + name = spec.Names[0] + case *ast.TypeSpec: + name = spec.Name + } + if name != nil && name.IsExported() { + fragment = name.Name + } + break + } + } + } + } + + // Direct the client to open the /pkg page. + url := web.pkgURL(deps.snapshot.View(), pkgpath, fragment) + openClientBrowser(ctx, c.s.client, url) + + return nil + }) +} + +func (c *commandHandler) RunTests(ctx context.Context, args command.RunTestsArgs) error { + return c.run(ctx, commandConfig{ + async: true, + progress: "Running go test", + requireSave: true, + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + return c.runTests(ctx, deps.snapshot, deps.work, args.URI, args.Tests, args.Benchmarks) + }) +} + +func (c *commandHandler) runTests(ctx context.Context, snapshot *cache.Snapshot, work *progress.WorkDone, uri protocol.DocumentURI, tests, benchmarks []string) error { + // TODO: fix the error reporting when this runs async. + meta, err := golang.NarrowestMetadataForFile(ctx, snapshot, uri) + if err != nil { + return err + } + pkgPath := string(meta.ForTest) + + // create output + buf := &bytes.Buffer{} + ew := progress.NewEventWriter(ctx, "test") + out := io.MultiWriter(ew, progress.NewWorkDoneWriter(ctx, work), buf) + + // Run `go test -run Func` on each test. + var failedTests int + for _, funcName := range tests { + inv := &gocommand.Invocation{ + Verb: "test", + Args: []string{pkgPath, "-v", "-count=1", fmt.Sprintf("-run=^%s$", regexp.QuoteMeta(funcName))}, + WorkingDir: filepath.Dir(uri.Path()), + } + if err := snapshot.RunGoCommandPiped(ctx, cache.Normal, inv, out, out); err != nil { + if errors.Is(err, context.Canceled) { + return err + } + failedTests++ + } + } + + // Run `go test -run=^$ -bench Func` on each test. + var failedBenchmarks int + for _, funcName := range benchmarks { + inv := &gocommand.Invocation{ + Verb: "test", + Args: []string{pkgPath, "-v", "-run=^$", fmt.Sprintf("-bench=^%s$", regexp.QuoteMeta(funcName))}, + WorkingDir: filepath.Dir(uri.Path()), + } + if err := snapshot.RunGoCommandPiped(ctx, cache.Normal, inv, out, out); err != nil { + if errors.Is(err, context.Canceled) { + return err + } + failedBenchmarks++ + } + } + + var title string + if len(tests) > 0 && len(benchmarks) > 0 { + title = "tests and benchmarks" + } else if len(tests) > 0 { + title = "tests" + } else if len(benchmarks) > 0 { + title = "benchmarks" + } else { + return errors.New("No functions were provided") + } + message := fmt.Sprintf("all %s passed", title) + if failedTests > 0 && failedBenchmarks > 0 { + message = fmt.Sprintf("%d / %d tests failed and %d / %d benchmarks failed", failedTests, len(tests), failedBenchmarks, len(benchmarks)) + } else if failedTests > 0 { + message = fmt.Sprintf("%d / %d tests failed", failedTests, len(tests)) + } else if failedBenchmarks > 0 { + message = fmt.Sprintf("%d / %d benchmarks failed", failedBenchmarks, len(benchmarks)) + } + if failedTests > 0 || failedBenchmarks > 0 { + message += "\n" + buf.String() + } + + showMessage(ctx, c.s.client, protocol.Info, message) + + if failedTests > 0 || failedBenchmarks > 0 { + return errors.New("gopls.test command failed") + } + return nil +} + +func (c *commandHandler) Generate(ctx context.Context, args command.GenerateArgs) error { + title := "Running go generate ." + if args.Recursive { + title = "Running go generate ./..." + } + return c.run(ctx, commandConfig{ + requireSave: true, + progress: title, + forURI: args.Dir, + }, func(ctx context.Context, deps commandDeps) error { + er := progress.NewEventWriter(ctx, "generate") + + pattern := "." + if args.Recursive { + pattern = "./..." + } + inv := &gocommand.Invocation{ + Verb: "generate", + Args: []string{"-x", pattern}, + WorkingDir: args.Dir.Path(), + } + stderr := io.MultiWriter(er, progress.NewWorkDoneWriter(ctx, deps.work)) + if err := deps.snapshot.RunGoCommandPiped(ctx, cache.AllowNetwork, inv, er, stderr); err != nil { + return err + } + return nil + }) +} + +func (c *commandHandler) GoGetPackage(ctx context.Context, args command.GoGetPackageArgs) error { + return c.run(ctx, commandConfig{ + forURI: args.URI, + progress: "Running go get", + }, func(ctx context.Context, deps commandDeps) error { + // Run on a throwaway go.mod, otherwise it'll write to the real one. + stdout, err := deps.snapshot.RunGoCommandDirect(ctx, cache.WriteTemporaryModFile|cache.AllowNetwork, &gocommand.Invocation{ + Verb: "list", + Args: []string{"-f", "{{.Module.Path}}@{{.Module.Version}}", args.Pkg}, + WorkingDir: filepath.Dir(args.URI.Path()), + }) + if err != nil { + return err + } + ver := strings.TrimSpace(stdout.String()) + return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { + if args.AddRequire { + if err := addModuleRequire(invoke, []string{ver}); err != nil { + return err + } + } + _, err := invoke(append([]string{"get", "-d"}, args.Pkg)...) + return err + }) + }) +} + +func (s *server) runGoModUpdateCommands(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, run func(invoke func(...string) (*bytes.Buffer, error)) error) error { + newModBytes, newSumBytes, err := snapshot.RunGoModUpdateCommands(ctx, filepath.Dir(uri.Path()), run) + if err != nil { + return err + } + modURI := snapshot.GoModForFile(uri) + sumURI := protocol.URIFromPath(strings.TrimSuffix(modURI.Path(), ".mod") + ".sum") + modEdits, err := collectFileEdits(ctx, snapshot, modURI, newModBytes) + if err != nil { + return err + } + sumEdits, err := collectFileEdits(ctx, snapshot, sumURI, newSumBytes) + if err != nil { + return err + } + return applyFileEdits(ctx, s.client, append(sumEdits, modEdits...)) +} + +// collectFileEdits collects any file edits required to transform the snapshot +// file specified by uri to the provided new content. +// +// If the file is not open, collectFileEdits simply writes the new content to +// disk. +// +// TODO(rfindley): fix this API asymmetry. It should be up to the caller to +// write the file or apply the edits. +func collectFileEdits(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, newContent []byte) ([]protocol.TextDocumentEdit, error) { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + oldContent, err := fh.Content() + if err != nil && !os.IsNotExist(err) { + return nil, err + } + + if bytes.Equal(oldContent, newContent) { + return nil, nil + } + + // Sending a workspace edit to a closed file causes VS Code to open the + // file and leave it unsaved. We would rather apply the changes directly, + // especially to go.sum, which should be mostly invisible to the user. + if !snapshot.IsOpen(uri) { + err := os.WriteFile(uri.Path(), newContent, 0666) + return nil, err + } + + m := protocol.NewMapper(fh.URI(), oldContent) + diff := diff.Bytes(oldContent, newContent) + edits, err := protocol.EditsFromDiffEdits(m, diff) + if err != nil { + return nil, err + } + return []protocol.TextDocumentEdit{{ + TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ + Version: fh.Version(), + TextDocumentIdentifier: protocol.TextDocumentIdentifier{ + URI: uri, + }, + }, + Edits: protocol.AsAnnotatedTextEdits(edits), + }}, nil +} + +func applyFileEdits(ctx context.Context, cli protocol.Client, edits []protocol.TextDocumentEdit) error { + if len(edits) == 0 { + return nil + } + response, err := cli.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ + Edit: protocol.WorkspaceEdit{ + DocumentChanges: protocol.TextDocumentEditsToDocumentChanges(edits), + }, + }) + if err != nil { + return err + } + if !response.Applied { + return fmt.Errorf("edits not applied because of %s", response.FailureReason) + } + return nil +} + +func runGoGetModule(invoke func(...string) (*bytes.Buffer, error), addRequire bool, args []string) error { + if addRequire { + if err := addModuleRequire(invoke, args); err != nil { + return err + } + } + _, err := invoke(append([]string{"get", "-d"}, args...)...) + return err +} + +func addModuleRequire(invoke func(...string) (*bytes.Buffer, error), args []string) error { + // Using go get to create a new dependency results in an + // `// indirect` comment we may not want. The only way to avoid it + // is to add the require as direct first. Then we can use go get to + // update go.sum and tidy up. + _, err := invoke(append([]string{"mod", "edit", "-require"}, args...)...) + return err +} + +// TODO(rfindley): inline. +func (s *server) getUpgrades(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, modules []string) (map[string]string, error) { + stdout, err := snapshot.RunGoCommandDirect(ctx, cache.Normal|cache.AllowNetwork, &gocommand.Invocation{ + Verb: "list", + Args: append([]string{"-m", "-u", "-json"}, modules...), + ModFlag: "readonly", // necessary when vendor is present (golang/go#66055) + WorkingDir: filepath.Dir(uri.Path()), + }) + if err != nil { + return nil, err + } + + upgrades := map[string]string{} + for dec := json.NewDecoder(stdout); dec.More(); { + mod := &gocommand.ModuleJSON{} + if err := dec.Decode(mod); err != nil { + return nil, err + } + if mod.Update == nil { + continue + } + upgrades[mod.Path] = mod.Update.Version + } + return upgrades, nil +} + +func (c *commandHandler) GCDetails(ctx context.Context, uri protocol.DocumentURI) error { + return c.ToggleGCDetails(ctx, command.URIArg{URI: uri}) +} + +func (c *commandHandler) ToggleGCDetails(ctx context.Context, args command.URIArg) error { + return c.run(ctx, commandConfig{ + requireSave: true, + progress: "Toggling GC Details", + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + return c.modifyState(ctx, FromToggleGCDetails, func() (*cache.Snapshot, func(), error) { + meta, err := golang.NarrowestMetadataForFile(ctx, deps.snapshot, deps.fh.URI()) + if err != nil { + return nil, nil, err + } + wantDetails := !deps.snapshot.WantGCDetails(meta.ID) // toggle the gc details state + return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ + GCDetails: map[metadata.PackageID]bool{ + meta.ID: wantDetails, + }, + }) + }) + }) +} + +func (c *commandHandler) ListKnownPackages(ctx context.Context, args command.URIArg) (command.ListKnownPackagesResult, error) { + var result command.ListKnownPackagesResult + err := c.run(ctx, commandConfig{ + progress: "Listing packages", + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + pkgs, err := golang.KnownPackagePaths(ctx, deps.snapshot, deps.fh) + for _, pkg := range pkgs { + result.Packages = append(result.Packages, string(pkg)) + } + return err + }) + return result, err +} + +func (c *commandHandler) ListImports(ctx context.Context, args command.URIArg) (command.ListImportsResult, error) { + var result command.ListImportsResult + err := c.run(ctx, commandConfig{ + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + fh, err := deps.snapshot.ReadFile(ctx, args.URI) + if err != nil { + return err + } + pgf, err := deps.snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + return err + } + fset := tokeninternal.FileSetFor(pgf.Tok) + for _, group := range astutil.Imports(fset, pgf.File) { + for _, imp := range group { + if imp.Path == nil { + continue + } + var name string + if imp.Name != nil { + name = imp.Name.Name + } + result.Imports = append(result.Imports, command.FileImport{ + Path: string(metadata.UnquoteImportPath(imp)), + Name: name, + }) + } + } + meta, err := golang.NarrowestMetadataForFile(ctx, deps.snapshot, args.URI) + if err != nil { + return err // e.g. cancelled + } + for pkgPath := range meta.DepsByPkgPath { + result.PackageImports = append(result.PackageImports, + command.PackageImport{Path: string(pkgPath)}) + } + sort.Slice(result.PackageImports, func(i, j int) bool { + return result.PackageImports[i].Path < result.PackageImports[j].Path + }) + return nil + }) + return result, err +} + +func (c *commandHandler) AddImport(ctx context.Context, args command.AddImportArgs) error { + return c.run(ctx, commandConfig{ + progress: "Adding import", + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + edits, err := golang.AddImport(ctx, deps.snapshot, deps.fh, args.ImportPath) + if err != nil { + return fmt.Errorf("could not add import: %v", err) + } + if _, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ + Edit: protocol.WorkspaceEdit{ + DocumentChanges: documentChanges(deps.fh, edits), + }, + }); err != nil { + return fmt.Errorf("could not apply import edits: %v", err) + } + return nil + }) +} + +func (c *commandHandler) StartDebugging(ctx context.Context, args command.DebuggingArgs) (result command.DebuggingResult, _ error) { + addr := args.Addr + if addr == "" { + addr = "localhost:0" + } + di := debug.GetInstance(ctx) + if di == nil { + return result, errors.New("internal error: server has no debugging instance") + } + listenedAddr, err := di.Serve(ctx, addr) + if err != nil { + return result, fmt.Errorf("starting debug server: %w", err) + } + result.URLs = []string{"http://" + listenedAddr} + openClientBrowser(ctx, c.s.client, result.URLs[0]) + return result, nil +} + +func (c *commandHandler) StartProfile(ctx context.Context, args command.StartProfileArgs) (result command.StartProfileResult, _ error) { + file, err := os.CreateTemp("", "gopls-profile-*") + if err != nil { + return result, fmt.Errorf("creating temp profile file: %v", err) + } + + c.s.ongoingProfileMu.Lock() + defer c.s.ongoingProfileMu.Unlock() + + if c.s.ongoingProfile != nil { + file.Close() // ignore error + return result, fmt.Errorf("profile already started (for %q)", c.s.ongoingProfile.Name()) + } + + if err := pprof.StartCPUProfile(file); err != nil { + file.Close() // ignore error + return result, fmt.Errorf("starting profile: %v", err) + } + + c.s.ongoingProfile = file + return result, nil +} + +func (c *commandHandler) StopProfile(ctx context.Context, args command.StopProfileArgs) (result command.StopProfileResult, _ error) { + c.s.ongoingProfileMu.Lock() + defer c.s.ongoingProfileMu.Unlock() + + prof := c.s.ongoingProfile + c.s.ongoingProfile = nil + + if prof == nil { + return result, fmt.Errorf("no ongoing profile") + } + + pprof.StopCPUProfile() + if err := prof.Close(); err != nil { + return result, fmt.Errorf("closing profile file: %v", err) + } + result.File = prof.Name() + return result, nil +} + +func (c *commandHandler) FetchVulncheckResult(ctx context.Context, arg command.URIArg) (map[protocol.DocumentURI]*vulncheck.Result, error) { + ret := map[protocol.DocumentURI]*vulncheck.Result{} + err := c.run(ctx, commandConfig{forURI: arg.URI}, func(ctx context.Context, deps commandDeps) error { + if deps.snapshot.Options().Vulncheck == settings.ModeVulncheckImports { + for _, modfile := range deps.snapshot.View().ModFiles() { + res, err := deps.snapshot.ModVuln(ctx, modfile) + if err != nil { + return err + } + ret[modfile] = res + } + } + // Overwrite if there is any govulncheck-based result. + for modfile, result := range deps.snapshot.Vulnerabilities() { + ret[modfile] = result + } + return nil + }) + return ret, err +} + +func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.VulncheckArgs) (command.RunVulncheckResult, error) { + if args.URI == "" { + return command.RunVulncheckResult{}, errors.New("VulncheckArgs is missing URI field") + } + + // Return the workdone token so that clients can identify when this + // vulncheck invocation is complete. + // + // Since the run function executes asynchronously, we use a channel to + // synchronize the start of the run and return the token. + tokenChan := make(chan protocol.ProgressToken, 1) + err := c.run(ctx, commandConfig{ + async: true, // need to be async to be cancellable + progress: "govulncheck", + requireSave: true, + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + tokenChan <- deps.work.Token() + + workDoneWriter := progress.NewWorkDoneWriter(ctx, deps.work) + dir := filepath.Dir(args.URI.Path()) + pattern := args.Pattern + + result, err := scan.RunGovulncheck(ctx, pattern, deps.snapshot, dir, workDoneWriter) + if err != nil { + return err + } + + snapshot, release, err := c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ + Vulns: map[protocol.DocumentURI]*vulncheck.Result{args.URI: result}, + }) + if err != nil { + return err + } + defer release() + c.s.diagnoseSnapshot(snapshot, nil, 0) + + affecting := make(map[string]bool, len(result.Entries)) + for _, finding := range result.Findings { + if len(finding.Trace) > 1 { // at least 2 frames if callstack exists (vulnerability, entry) + affecting[finding.OSV] = true + } + } + if len(affecting) == 0 { + showMessage(ctx, c.s.client, protocol.Info, "No vulnerabilities found") + return nil + } + affectingOSVs := make([]string, 0, len(affecting)) + for id := range affecting { + affectingOSVs = append(affectingOSVs, id) + } + sort.Strings(affectingOSVs) + + showMessage(ctx, c.s.client, protocol.Warning, fmt.Sprintf("Found %v", strings.Join(affectingOSVs, ", "))) + + return nil + }) + if err != nil { + return command.RunVulncheckResult{}, err + } + select { + case <-ctx.Done(): + return command.RunVulncheckResult{}, ctx.Err() + case token := <-tokenChan: + return command.RunVulncheckResult{Token: token}, nil + } +} + +// MemStats implements the MemStats command. It returns an error as a +// future-proof API, but the resulting error is currently always nil. +func (c *commandHandler) MemStats(ctx context.Context) (command.MemStatsResult, error) { + // GC a few times for stable results. + runtime.GC() + runtime.GC() + runtime.GC() + var m runtime.MemStats + runtime.ReadMemStats(&m) + return command.MemStatsResult{ + HeapAlloc: m.HeapAlloc, + HeapInUse: m.HeapInuse, + TotalAlloc: m.TotalAlloc, + }, nil +} + +// WorkspaceStats implements the WorkspaceStats command, reporting information +// about the current state of the loaded workspace for the current session. +func (c *commandHandler) WorkspaceStats(ctx context.Context) (command.WorkspaceStatsResult, error) { + var res command.WorkspaceStatsResult + res.Files = c.s.session.Cache().FileStats() + + for _, view := range c.s.session.Views() { + vs, err := collectViewStats(ctx, view) + if err != nil { + return res, err + } + res.Views = append(res.Views, vs) + } + return res, nil +} + +func collectViewStats(ctx context.Context, view *cache.View) (command.ViewStats, error) { + s, release, err := view.Snapshot() + if err != nil { + return command.ViewStats{}, err + } + defer release() + + allMD, err := s.AllMetadata(ctx) + if err != nil { + return command.ViewStats{}, err + } + allPackages := collectPackageStats(allMD) + + wsMD, err := s.WorkspaceMetadata(ctx) + if err != nil { + return command.ViewStats{}, err + } + workspacePackages := collectPackageStats(wsMD) + + var ids []golang.PackageID + for _, mp := range wsMD { + ids = append(ids, mp.ID) + } + + diags, err := s.PackageDiagnostics(ctx, ids...) + if err != nil { + return command.ViewStats{}, err + } + + ndiags := 0 + for _, d := range diags { + ndiags += len(d) + } + + return command.ViewStats{ + GoCommandVersion: view.GoVersionString(), + AllPackages: allPackages, + WorkspacePackages: workspacePackages, + Diagnostics: ndiags, + }, nil +} + +func collectPackageStats(mps []*metadata.Package) command.PackageStats { + var stats command.PackageStats + stats.Packages = len(mps) + modules := make(map[string]bool) + + for _, mp := range mps { + n := len(mp.CompiledGoFiles) + stats.CompiledGoFiles += n + if n > stats.LargestPackage { + stats.LargestPackage = n + } + if mp.Module != nil { + modules[mp.Module.Path] = true + } + } + stats.Modules = len(modules) + + return stats +} + +// RunGoWorkCommand invokes `go work <args>` with the provided arguments. +// +// args.InitFirst controls whether to first run `go work init`. This allows a +// single command to both create and recursively populate a go.work file -- as +// of writing there is no `go work init -r`. +// +// Some thought went into implementing this command. Unlike the go.mod commands +// above, this command simply invokes the go command and relies on the client +// to notify gopls of file changes via didChangeWatchedFile notifications. +// We could instead run these commands with GOWORK set to a temp file, but that +// poses the following problems: +// - directory locations in the resulting temp go.work file will be computed +// relative to the directory containing that go.work. If the go.work is in a +// tempdir, the directories will need to be translated to/from that dir. +// - it would be simpler to use a temp go.work file in the workspace +// directory, or whichever directory contains the real go.work file, but +// that sets a bad precedent of writing to a user-owned directory. We +// shouldn't start doing that. +// - Sending workspace edits to create a go.work file would require using +// the CreateFile resource operation, which would need to be tested in every +// client as we haven't used it before. We don't have time for that right +// now. +// +// Therefore, we simply require that the current go.work file is saved (if it +// exists), and delegate to the go command. +func (c *commandHandler) RunGoWorkCommand(ctx context.Context, args command.RunGoWorkArgs) error { + return c.run(ctx, commandConfig{ + progress: "Running go work command", + forView: args.ViewID, + }, func(ctx context.Context, deps commandDeps) (runErr error) { + snapshot := deps.snapshot + view := snapshot.View() + viewDir := snapshot.Folder().Path() + + if view.Type() != cache.GoWorkView && view.GoWork() != "" { + // If we are not using an existing go.work file, GOWORK must be explicitly off. + // TODO(rfindley): what about GO111MODULE=off? + return fmt.Errorf("cannot modify go.work files when GOWORK=off") + } + + var gowork string + // If the user has explicitly set GOWORK=off, we should warn them + // explicitly and avoid potentially misleading errors below. + if view.GoWork() != "" { + gowork = view.GoWork().Path() + fh, err := snapshot.ReadFile(ctx, view.GoWork()) + if err != nil { + return err // e.g. canceled + } + if !fh.SameContentsOnDisk() { + return fmt.Errorf("must save workspace file %s before running go work commands", view.GoWork()) + } + } else { + if !args.InitFirst { + // If go.work does not exist, we should have detected that and asked + // for InitFirst. + return bug.Errorf("internal error: cannot run go work command: required go.work file not found") + } + gowork = filepath.Join(viewDir, "go.work") + if err := c.invokeGoWork(ctx, viewDir, gowork, []string{"init"}); err != nil { + return fmt.Errorf("running `go work init`: %v", err) + } + } + + return c.invokeGoWork(ctx, viewDir, gowork, args.Args) + }) +} + +func (c *commandHandler) invokeGoWork(ctx context.Context, viewDir, gowork string, args []string) error { + inv := gocommand.Invocation{ + Verb: "work", + Args: args, + WorkingDir: viewDir, + Env: append(os.Environ(), fmt.Sprintf("GOWORK=%s", gowork)), + } + if _, err := c.s.session.GoCommandRunner().Run(ctx, inv); err != nil { + return fmt.Errorf("running go work command: %v", err) + } + return nil +} + +// showMessage causes the client to show a progress or error message. +// +// It reports whether it succeeded. If it fails, it writes an error to +// the server log, so most callers can safely ignore the result. +func showMessage(ctx context.Context, cli protocol.Client, typ protocol.MessageType, message string) bool { + err := cli.ShowMessage(ctx, &protocol.ShowMessageParams{ + Type: typ, + Message: message, + }) + if err != nil { + event.Error(ctx, "client.showMessage: %v", err) + return false + } + return true +} + +// openClientBrowser causes the LSP client to open the specified URL +// in an external browser. +func openClientBrowser(ctx context.Context, cli protocol.Client, url protocol.URI) { + showDocumentImpl(ctx, cli, url, nil) +} + +// openClientEditor causes the LSP client to open the specified document +// and select the indicated range. +// +// Note that VS Code 1.87.2 doesn't currently raise the window; this is +// https://github.com/microsoft/vscode/issues/207634 +func openClientEditor(ctx context.Context, cli protocol.Client, loc protocol.Location) { + showDocumentImpl(ctx, cli, protocol.URI(loc.URI), &loc.Range) +} + +func showDocumentImpl(ctx context.Context, cli protocol.Client, url protocol.URI, rangeOpt *protocol.Range) { + // In principle we shouldn't send a showDocument request to a + // client that doesn't support it, as reported by + // ShowDocumentClientCapabilities. But even clients that do + // support it may defer the real work of opening the document + // asynchronously, to avoid deadlocks due to rentrancy. + // + // For example: client sends request to server; server sends + // showDocument to client; client opens editor; editor causes + // new RPC to be sent to server, which is still busy with + // previous request. (This happens in eglot.) + // + // So we can't rely on the success/failure information. + // That's the reason this function doesn't return an error. + + // "External" means run the system-wide handler (e.g. open(1) + // on macOS or xdg-open(1) on Linux) for this URL, ignoring + // TakeFocus and Selection. Note that this may still end up + // opening the same editor (e.g. VSCode) for a file: URL. + res, err := cli.ShowDocument(ctx, &protocol.ShowDocumentParams{ + URI: url, + External: rangeOpt == nil, + TakeFocus: true, + Selection: rangeOpt, // optional + }) + if err != nil { + event.Error(ctx, "client.showDocument: %v", err) + } else if res != nil && !res.Success { + event.Log(ctx, fmt.Sprintf("client declined to open document %v", url)) + } +} + +func (c *commandHandler) ChangeSignature(ctx context.Context, args command.ChangeSignatureArgs) (*protocol.WorkspaceEdit, error) { + var result *protocol.WorkspaceEdit + err := c.run(ctx, commandConfig{ + forURI: args.RemoveParameter.URI, + }, func(ctx context.Context, deps commandDeps) error { + // For now, gopls only supports removing unused parameters. + changes, err := golang.RemoveUnusedParameter(ctx, deps.fh, args.RemoveParameter.Range, deps.snapshot) + if err != nil { + return err + } + edit := protocol.WorkspaceEdit{ + DocumentChanges: changes, + } + if args.ResolveEdits { + result = &edit + return nil + } + r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ + Edit: edit, + }) + if !r.Applied { + return fmt.Errorf("failed to apply edits: %v", r.FailureReason) + } + + return nil + }) + return result, err +} + +func (c *commandHandler) DiagnoseFiles(ctx context.Context, args command.DiagnoseFilesArgs) error { + return c.run(ctx, commandConfig{ + progress: "Diagnose files", + }, func(ctx context.Context, _ commandDeps) error { + + // TODO(rfindley): even better would be textDocument/diagnostics (golang/go#60122). + // Though note that implementing pull diagnostics may cause some servers to + // request diagnostics in an ad-hoc manner, and break our intentional pacing. + + ctx, done := event.Start(ctx, "lsp.server.DiagnoseFiles") + defer done() + + snapshots := make(map[*cache.Snapshot]bool) + for _, uri := range args.Files { + fh, snapshot, release, err := c.s.fileOf(ctx, uri) + if err != nil { + return err + } + if snapshots[snapshot] || snapshot.FileKind(fh) != file.Go { + release() + continue + } + defer release() + snapshots[snapshot] = true + } + + var wg sync.WaitGroup + for snapshot := range snapshots { + snapshot := snapshot + wg.Add(1) + go func() { + defer wg.Done() + c.s.diagnoseSnapshot(snapshot, nil, 0) + }() + } + wg.Wait() + + return nil + }) +} + +func (c *commandHandler) Views(ctx context.Context) ([]command.View, error) { + var summaries []command.View + for _, view := range c.s.session.Views() { + summaries = append(summaries, command.View{ + Type: view.Type().String(), + Root: view.Root(), + Folder: view.Folder().Dir, + EnvOverlay: view.EnvOverlay(), + }) + } + return summaries, nil +} diff --git a/gopls/internal/server/completion.go b/gopls/internal/server/completion.go new file mode 100644 index 00000000000..54297397644 --- /dev/null +++ b/gopls/internal/server/completion.go @@ -0,0 +1,171 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + "strings" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/golang/completion" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/telemetry" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/gopls/internal/work" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) Completion(ctx context.Context, params *protocol.CompletionParams) (_ *protocol.CompletionList, rerr error) { + recordLatency := telemetry.StartLatencyTimer("completion") + defer func() { + recordLatency(ctx, rerr) + }() + + ctx, done := event.Start(ctx, "lsp.Server.completion", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + var candidates []completion.CompletionItem + var surrounding *completion.Selection + switch snapshot.FileKind(fh) { + case file.Go: + candidates, surrounding, err = completion.Completion(ctx, snapshot, fh, params.Position, params.Context) + case file.Mod: + candidates, surrounding = nil, nil + case file.Work: + cl, err := work.Completion(ctx, snapshot, fh, params.Position) + if err != nil { + break + } + return cl, nil + case file.Tmpl: + var cl *protocol.CompletionList + cl, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context) + if err != nil { + break // use common error handling, candidates==nil + } + return cl, nil + } + if err != nil { + event.Error(ctx, "no completions found", err, tag.Position.Of(params.Position)) + } + if candidates == nil { + complEmpty.Inc() + return &protocol.CompletionList{ + IsIncomplete: true, + Items: []protocol.CompletionItem{}, + }, nil + } + + rng, err := surrounding.Range() + if err != nil { + return nil, err + } + + // When using deep completions/fuzzy matching, report results as incomplete so + // client fetches updated completions after every key stroke. + options := snapshot.Options() + incompleteResults := options.DeepCompletion || options.Matcher == settings.Fuzzy + + items := toProtocolCompletionItems(candidates, rng, options) + if snapshot.FileKind(fh) == file.Go { + s.saveLastCompletion(fh.URI(), fh.Version(), items, params.Position) + } + + if len(items) > 10 { + // TODO(pjw): long completions are ok for field lists + complLong.Inc() + } else { + complShort.Inc() + } + return &protocol.CompletionList{ + IsIncomplete: incompleteResults, + Items: items, + }, nil +} + +func (s *server) saveLastCompletion(uri protocol.DocumentURI, version int32, items []protocol.CompletionItem, pos protocol.Position) { + s.efficacyMu.Lock() + defer s.efficacyMu.Unlock() + s.efficacyVersion = version + s.efficacyURI = uri + s.efficacyPos = pos + s.efficacyItems = items +} + +func toProtocolCompletionItems(candidates []completion.CompletionItem, rng protocol.Range, options *settings.Options) []protocol.CompletionItem { + var ( + items = make([]protocol.CompletionItem, 0, len(candidates)) + numDeepCompletionsSeen int + ) + for i, candidate := range candidates { + // Limit the number of deep completions to not overwhelm the user in cases + // with dozens of deep completion matches. + if candidate.Depth > 0 { + if !options.DeepCompletion { + continue + } + if numDeepCompletionsSeen >= completion.MaxDeepCompletions { + continue + } + numDeepCompletionsSeen++ + } + insertText := candidate.InsertText + if options.InsertTextFormat == protocol.SnippetTextFormat { + insertText = candidate.Snippet() + } + + // This can happen if the client has snippets disabled but the + // candidate only supports snippet insertion. + if insertText == "" { + continue + } + + doc := &protocol.Or_CompletionItem_documentation{ + Value: protocol.MarkupContent{ + Kind: protocol.Markdown, + Value: golang.CommentToMarkdown(candidate.Documentation, options), + }, + } + if options.PreferredContentFormat != protocol.Markdown { + doc.Value = candidate.Documentation + } + item := protocol.CompletionItem{ + Label: candidate.Label, + Detail: candidate.Detail, + Kind: candidate.Kind, + TextEdit: &protocol.TextEdit{ + NewText: insertText, + Range: rng, + }, + InsertTextFormat: &options.InsertTextFormat, + AdditionalTextEdits: candidate.AdditionalTextEdits, + // This is a hack so that the client sorts completion results in the order + // according to their score. This can be removed upon the resolution of + // https://github.com/Microsoft/language-server-protocol/issues/348. + SortText: fmt.Sprintf("%05d", i), + + // Trim operators (VSCode doesn't like weird characters in + // filterText). + FilterText: strings.TrimLeft(candidate.InsertText, "&*"), + + Preselect: i == 0, + Documentation: doc, + Tags: protocol.NonNilSlice(candidate.Tags), + Deprecated: candidate.Deprecated, + } + items = append(items, item) + } + return items +} diff --git a/gopls/internal/server/counters.go b/gopls/internal/server/counters.go new file mode 100644 index 00000000000..dc403faa694 --- /dev/null +++ b/gopls/internal/server/counters.go @@ -0,0 +1,28 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import "golang.org/x/telemetry/counter" + +// Proposed counters for evaluating gopls code completion. +var ( + complEmpty = counter.New("gopls/completion/len:0") // count empty suggestions + complShort = counter.New("gopls/completion/len:<=10") // not empty, not long + complLong = counter.New("gopls/completion/len:>10") // returning more than 10 items + + changeFull = counter.New("gopls/completion/used:unknown") // full file change in didChange + complUnused = counter.New("gopls/completion/used:no") // did not use a completion + complUsed = counter.New("gopls/completion/used:yes") // used a completion + + // exported so tests can verify that counters are incremented + CompletionCounters = []*counter.Counter{ + complEmpty, + complShort, + complLong, + changeFull, + complUnused, + complUsed, + } +) diff --git a/gopls/internal/server/definition.go b/gopls/internal/server/definition.go new file mode 100644 index 00000000000..7a0eb25679b --- /dev/null +++ b/gopls/internal/server/definition.go @@ -0,0 +1,61 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/telemetry" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) Definition(ctx context.Context, params *protocol.DefinitionParams) (_ []protocol.Location, rerr error) { + recordLatency := telemetry.StartLatencyTimer("definition") + defer func() { + recordLatency(ctx, rerr) + }() + + ctx, done := event.Start(ctx, "lsp.Server.definition", tag.URI.Of(params.TextDocument.URI)) + defer done() + + // TODO(rfindley): definition requests should be multiplexed across all views. + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + switch kind := snapshot.FileKind(fh); kind { + case file.Tmpl: + return template.Definition(snapshot, fh, params.Position) + case file.Go: + return golang.Definition(ctx, snapshot, fh, params.Position) + default: + return nil, fmt.Errorf("can't find definitions for file type %s", kind) + } +} + +func (s *server) TypeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { + ctx, done := event.Start(ctx, "lsp.Server.typeDefinition", tag.URI.Of(params.TextDocument.URI)) + defer done() + + // TODO(rfindley): type definition requests should be multiplexed across all views. + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + switch kind := snapshot.FileKind(fh); kind { + case file.Go: + return golang.TypeDefinition(ctx, snapshot, fh, params.Position) + default: + return nil, fmt.Errorf("can't find type definitions for file type %s", kind) + } +} diff --git a/gopls/internal/server/diagnostics.go b/gopls/internal/server/diagnostics.go new file mode 100644 index 00000000000..d5808d42f30 --- /dev/null +++ b/gopls/internal/server/diagnostics.go @@ -0,0 +1,1000 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "crypto/sha256" + "errors" + "fmt" + "os" + "path/filepath" + "runtime" + "sort" + "strings" + "sync" + "time" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/mod" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/gopls/internal/util/maps" + "golang.org/x/tools/gopls/internal/work" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/keys" + "golang.org/x/tools/internal/event/tag" +) + +// fileDiagnostics holds the current state of published diagnostics for a file. +type fileDiagnostics struct { + publishedHash file.Hash // hash of the last set of diagnostics published for this URI + mustPublish bool // if set, publish diagnostics even if they haven't changed + + // Orphaned file diagnostics are not necessarily associated with any *View + // (since they are orphaned). Instead, keep track of the modification ID at + // which they were orphaned (see server.lastModificationID). + orphanedAt uint64 // modification ID at which this file was orphaned. + orphanedFileDiagnostics []*cache.Diagnostic + + // Files may have their diagnostics computed by multiple views, and so + // diagnostics are organized by View. See the documentation for update for more + // details about how the set of file diagnostics evolves over time. + byView map[*cache.View]viewDiagnostics +} + +// viewDiagnostics holds a set of file diagnostics computed from a given View. +type viewDiagnostics struct { + snapshot uint64 // snapshot sequence ID + version int32 // file version + diagnostics []*cache.Diagnostic +} + +// common types; for brevity +type ( + viewSet = map[*cache.View]unit + diagMap = map[protocol.DocumentURI][]*cache.Diagnostic +) + +// hashDiagnostic computes a hash to identify a diagnostic. +// The hash is for deduplicating within a file, +// so it need not incorporate d.URI. +func hashDiagnostic(d *cache.Diagnostic) file.Hash { + h := sha256.New() + for _, t := range d.Tags { + fmt.Fprintf(h, "tag: %s\n", t) + } + for _, r := range d.Related { + fmt.Fprintf(h, "related: %s %s %s\n", r.Location.URI, r.Message, r.Location.Range) + } + fmt.Fprintf(h, "code: %s\n", d.Code) + fmt.Fprintf(h, "codeHref: %s\n", d.CodeHref) + fmt.Fprintf(h, "message: %s\n", d.Message) + fmt.Fprintf(h, "range: %s\n", d.Range) + fmt.Fprintf(h, "severity: %s\n", d.Severity) + fmt.Fprintf(h, "source: %s\n", d.Source) + if d.BundledFixes != nil { + fmt.Fprintf(h, "fixes: %s\n", *d.BundledFixes) + } + var hash [sha256.Size]byte + h.Sum(hash[:0]) + return hash +} + +func sortDiagnostics(d []*cache.Diagnostic) { + sort.Slice(d, func(i int, j int) bool { + a, b := d[i], d[j] + if r := protocol.CompareRange(a.Range, b.Range); r != 0 { + return r < 0 + } + if a.Source != b.Source { + return a.Source < b.Source + } + return a.Message < b.Message + }) +} + +func (s *server) diagnoseChangedViews(ctx context.Context, modID uint64, lastChange map[*cache.View][]protocol.DocumentURI, cause ModificationSource) { + // Collect views needing diagnosis. + s.modificationMu.Lock() + needsDiagnosis := maps.Keys(s.viewsToDiagnose) + s.modificationMu.Unlock() + + // Diagnose views concurrently. + var wg sync.WaitGroup + for _, v := range needsDiagnosis { + v := v + snapshot, release, err := v.Snapshot() + if err != nil { + s.modificationMu.Lock() + // The View is shut down. Unlike below, no need to check + // s.needsDiagnosis[v], since the view can never be diagnosed. + delete(s.viewsToDiagnose, v) + s.modificationMu.Unlock() + continue + } + + // Collect uris for fast diagnosis. We only care about the most recent + // change here, because this is just an optimization for the case where the + // user is actively editing a single file. + uris := lastChange[v] + if snapshot.Options().DiagnosticsTrigger == settings.DiagnosticsOnSave && cause == FromDidChange { + // The user requested to update the diagnostics only on save. + // Do not diagnose yet. + release() + continue + } + + wg.Add(1) + go func(snapshot *cache.Snapshot, uris []protocol.DocumentURI) { + defer release() + defer wg.Done() + s.diagnoseSnapshot(snapshot, uris, snapshot.Options().DiagnosticsDelay) + s.modificationMu.Lock() + + // Only remove v from s.viewsToDiagnose if the snapshot is not cancelled. + // This ensures that the snapshot was not cloned before its state was + // fully evaluated, and therefore avoids missing a change that was + // irrelevant to an incomplete snapshot. + // + // See the documentation for s.viewsToDiagnose for details. + if snapshot.BackgroundContext().Err() == nil && s.viewsToDiagnose[v] <= modID { + delete(s.viewsToDiagnose, v) + } + s.modificationMu.Unlock() + }(snapshot, uris) + } + + wg.Wait() + + // Diagnose orphaned files for the session. + orphanedFileDiagnostics, err := s.session.OrphanedFileDiagnostics(ctx) + if err == nil { + err = s.updateOrphanedFileDiagnostics(ctx, modID, orphanedFileDiagnostics) + } + if err != nil { + if ctx.Err() == nil { + event.Error(ctx, "warning: while diagnosing orphaned files", err) + } + } +} + +// diagnoseSnapshot computes and publishes diagnostics for the given snapshot. +// +// If delay is non-zero, computing diagnostics does not start until after this +// delay has expired, to allow work to be cancelled by subsequent changes. +// +// If changedURIs is non-empty, it is a set of recently changed files that +// should be diagnosed immediately, and onDisk reports whether these file +// changes came from a change to on-disk files. +func (s *server) diagnoseSnapshot(snapshot *cache.Snapshot, changedURIs []protocol.DocumentURI, delay time.Duration) { + ctx := snapshot.BackgroundContext() + ctx, done := event.Start(ctx, "Server.diagnoseSnapshot", snapshot.Labels()...) + defer done() + + if delay > 0 { + // 2-phase diagnostics. + // + // The first phase just parses and type-checks (but + // does not analyze) packages directly affected by + // file modifications. + // + // The second phase runs after the delay, and does everything. + // + // We wait a brief delay before the first phase, to allow higher priority + // work such as autocompletion to acquire the type checking mutex (though + // typically both diagnosing changed files and performing autocompletion + // will be doing the same work: recomputing active packages). + const minDelay = 20 * time.Millisecond + select { + case <-time.After(minDelay): + case <-ctx.Done(): + return + } + + if len(changedURIs) > 0 { + diagnostics, err := s.diagnoseChangedFiles(ctx, snapshot, changedURIs) + if err != nil { + if ctx.Err() == nil { + event.Error(ctx, "warning: while diagnosing changed files", err, snapshot.Labels()...) + } + return + } + s.updateDiagnostics(ctx, snapshot, diagnostics, false) + } + + if delay < minDelay { + delay = 0 + } else { + delay -= minDelay + } + + select { + case <-time.After(delay): + case <-ctx.Done(): + return + } + } + + diagnostics, err := s.diagnose(ctx, snapshot) + if err != nil { + if ctx.Err() == nil { + event.Error(ctx, "warning: while diagnosing snapshot", err, snapshot.Labels()...) + } + return + } + s.updateDiagnostics(ctx, snapshot, diagnostics, true) +} + +func (s *server) diagnoseChangedFiles(ctx context.Context, snapshot *cache.Snapshot, uris []protocol.DocumentURI) (diagMap, error) { + ctx, done := event.Start(ctx, "Server.diagnoseChangedFiles", snapshot.Labels()...) + defer done() + + toDiagnose := make(map[metadata.PackageID]*metadata.Package) + for _, uri := range uris { + // If the file is not open, don't diagnose its package. + // + // We don't care about fast diagnostics for files that are no longer open, + // because the user isn't looking at them. Also, explicitly requesting a + // package can lead to "command-line-arguments" packages if the file isn't + // covered by the current View. By avoiding requesting packages for e.g. + // unrelated file movement, we can minimize these unnecessary packages. + if !snapshot.IsOpen(uri) { + continue + } + // If the file is not known to the snapshot (e.g., if it was deleted), + // don't diagnose it. + if snapshot.FindFile(uri) == nil { + continue + } + + // Don't request type-checking for builtin.go: it's not a real package. + if snapshot.IsBuiltin(uri) { + continue + } + + // Don't diagnose files that are ignored by `go list` (e.g. testdata). + if snapshot.IgnoredFile(uri) { + continue + } + + // Find all packages that include this file and diagnose them in parallel. + meta, err := golang.NarrowestMetadataForFile(ctx, snapshot, uri) + if err != nil { + if ctx.Err() != nil { + return nil, ctx.Err() + } + // TODO(findleyr): we should probably do something with the error here, + // but as of now this can fail repeatedly if load fails, so can be too + // noisy to log (and we'll handle things later in the slow pass). + continue + } + // golang/go#65801: only diagnose changes to workspace packages. Otherwise, + // diagnostics will be unstable, as the slow-path diagnostics will erase + // them. + if snapshot.IsWorkspacePackage(ctx, meta.ID) { + toDiagnose[meta.ID] = meta + } + } + diags, err := snapshot.PackageDiagnostics(ctx, maps.Keys(toDiagnose)...) + if err != nil { + if ctx.Err() == nil { + event.Error(ctx, "warning: diagnostics failed", err, snapshot.Labels()...) + } + return nil, err + } + // golang/go#59587: guarantee that we compute type-checking diagnostics + // for every compiled package file, otherwise diagnostics won't be quickly + // cleared following a fix. + for _, meta := range toDiagnose { + for _, uri := range meta.CompiledGoFiles { + if _, ok := diags[uri]; !ok { + diags[uri] = nil + } + } + } + return diags, nil +} + +func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMap, error) { + ctx, done := event.Start(ctx, "Server.diagnose", snapshot.Labels()...) + defer done() + + // Wait for a free diagnostics slot. + // TODO(adonovan): opt: shouldn't it be the analysis implementation's + // job to de-dup and limit resource consumption? In any case this + // function spends most its time waiting for awaitLoaded, at + // least initially. + select { + case <-ctx.Done(): + return nil, ctx.Err() + case s.diagnosticsSema <- struct{}{}: + } + defer func() { + <-s.diagnosticsSema + }() + + var ( + diagnosticsMu sync.Mutex + diagnostics = make(diagMap) + ) + // common code for dispatching diagnostics + store := func(operation string, diagsByFile diagMap, err error) { + if err != nil { + if ctx.Err() == nil { + event.Error(ctx, "warning: while "+operation, err, snapshot.Labels()...) + } + return + } + diagnosticsMu.Lock() + defer diagnosticsMu.Unlock() + for uri, diags := range diagsByFile { + diagnostics[uri] = append(diagnostics[uri], diags...) + } + } + + // Diagnostics below are organized by increasing specificity: + // go.work > mod > mod upgrade > mod vuln > package, etc. + + // Diagnose go.work file. + workReports, workErr := work.Diagnostics(ctx, snapshot) + if ctx.Err() != nil { + return nil, ctx.Err() + } + store("diagnosing go.work file", workReports, workErr) + + // Diagnose go.mod file. + modReports, modErr := mod.ParseDiagnostics(ctx, snapshot) + if ctx.Err() != nil { + return nil, ctx.Err() + } + store("diagnosing go.mod file", modReports, modErr) + + // Diagnose go.mod upgrades. + upgradeReports, upgradeErr := mod.UpgradeDiagnostics(ctx, snapshot) + if ctx.Err() != nil { + return nil, ctx.Err() + } + store("diagnosing go.mod upgrades", upgradeReports, upgradeErr) + + // Diagnose vulnerabilities. + vulnReports, vulnErr := mod.VulnerabilityDiagnostics(ctx, snapshot) + if ctx.Err() != nil { + return nil, ctx.Err() + } + store("diagnosing vulnerabilities", vulnReports, vulnErr) + + workspacePkgs, err := snapshot.WorkspaceMetadata(ctx) + if s.shouldIgnoreError(snapshot, err) { + return diagnostics, ctx.Err() + } + + initialErr := snapshot.InitializationError() + if ctx.Err() != nil { + // Don't update initialization status if the context is cancelled. + return nil, ctx.Err() + } + + if initialErr != nil { + store("critical error", initialErr.Diagnostics, nil) + } + + // Show the error as a progress error report so that it appears in the + // status bar. If a client doesn't support progress reports, the error + // will still be shown as a ShowMessage. If there is no error, any running + // error progress reports will be closed. + statusErr := initialErr + if len(snapshot.Overlays()) == 0 { + // Don't report a hanging status message if there are no open files at this + // snapshot. + statusErr = nil + } + s.updateCriticalErrorStatus(ctx, snapshot, statusErr) + + // Diagnose template (.tmpl) files. + tmplReports := template.Diagnostics(snapshot) + // NOTE(rfindley): typeCheckSource is not accurate here. + // (but this will be gone soon anyway). + store("diagnosing templates", tmplReports, nil) + + // If there are no workspace packages, there is nothing to diagnose and + // there are no orphaned files. + if len(workspacePkgs) == 0 { + return diagnostics, nil + } + + var wg sync.WaitGroup // for potentially slow operations below + + // Maybe run go mod tidy (if it has been invalidated). + // + // Since go mod tidy can be slow, we run it concurrently to diagnostics. + wg.Add(1) + go func() { + defer wg.Done() + modTidyReports, err := mod.TidyDiagnostics(ctx, snapshot) + store("running go mod tidy", modTidyReports, err) + }() + + // Run type checking and go/analysis diagnosis of packages in parallel. + // + // For analysis, we use the *widest* package for each open file, + // for two reasons: + // + // - Correctness: some analyzers (e.g. unusedparam) depend + // on it. If applied to a non-test package for which a + // corresponding test package exists, they make assumptions + // that are falsified in the test package, for example that + // all references to unexported symbols are visible to the + // analysis. + // + // - Efficiency: it may yield a smaller covering set of + // PackageIDs for a given set of files. For example, {x.go, + // x_test.go} is covered by the single package x_test using + // "widest". (Using "narrowest", it would be covered only by + // the pair of packages {x, x_test}, Originally we used all + // covering packages, so {x.go} alone would be analyzed + // twice.) + var ( + toDiagnose = make(map[metadata.PackageID]*metadata.Package) + toAnalyze = make(map[metadata.PackageID]*metadata.Package) + + // secondary index, used to eliminate narrower packages. + toAnalyzeWidest = make(map[golang.PackagePath]*metadata.Package) + ) + for _, mp := range workspacePkgs { + var hasNonIgnored, hasOpenFile bool + for _, uri := range mp.CompiledGoFiles { + if !hasNonIgnored && !snapshot.IgnoredFile(uri) { + hasNonIgnored = true + } + if !hasOpenFile && snapshot.IsOpen(uri) { + hasOpenFile = true + } + } + if hasNonIgnored { + toDiagnose[mp.ID] = mp + if hasOpenFile { + if prev, ok := toAnalyzeWidest[mp.PkgPath]; ok { + if len(prev.CompiledGoFiles) >= len(mp.CompiledGoFiles) { + // Previous entry is not narrower; keep it. + continue + } + // Evict previous (narrower) entry. + delete(toAnalyze, prev.ID) + } + toAnalyze[mp.ID] = mp + toAnalyzeWidest[mp.PkgPath] = mp + } + } + } + + wg.Add(1) + go func() { + defer wg.Done() + gcDetailsReports, err := s.gcDetailsDiagnostics(ctx, snapshot, toDiagnose) + store("collecting gc_details", gcDetailsReports, err) + }() + + // Package diagnostics and analysis diagnostics must both be computed and + // merged before they can be reported. + var pkgDiags, analysisDiags diagMap + // Collect package diagnostics. + wg.Add(1) + go func() { + defer wg.Done() + var err error + pkgDiags, err = snapshot.PackageDiagnostics(ctx, maps.Keys(toDiagnose)...) + if err != nil { + event.Error(ctx, "warning: diagnostics failed", err, snapshot.Labels()...) + } + }() + + // Get diagnostics from analysis framework. + // This includes type-error analyzers, which suggest fixes to compiler errors. + wg.Add(1) + go func() { + defer wg.Done() + var err error + // TODO(rfindley): here and above, we should avoid using the first result + // if err is non-nil (though as of today it's OK). + analysisDiags, err = golang.Analyze(ctx, snapshot, toAnalyze, s.progress) + if err != nil { + event.Error(ctx, "warning: analyzing package", err, append(snapshot.Labels(), tag.Package.Of(keys.Join(maps.Keys(toDiagnose))))...) + return + } + }() + + wg.Wait() + + // Merge analysis diagnostics with package diagnostics, and store the + // resulting analysis diagnostics. + for uri, adiags := range analysisDiags { + tdiags := pkgDiags[uri] + var tdiags2, adiags2 []*cache.Diagnostic + combineDiagnostics(tdiags, adiags, &tdiags2, &adiags2) + pkgDiags[uri] = tdiags2 + analysisDiags[uri] = adiags2 + } + store("type checking", pkgDiags, nil) // error reported above + store("analyzing packages", analysisDiags, nil) // error reported above + + return diagnostics, nil +} + +func (s *server) gcDetailsDiagnostics(ctx context.Context, snapshot *cache.Snapshot, toDiagnose map[metadata.PackageID]*metadata.Package) (diagMap, error) { + // Process requested gc_details diagnostics. + // + // TODO(rfindley): this could be improved: + // 1. This should memoize its results if the package has not changed. + // 2. This should not even run gc_details if the package contains unsaved + // files. + // 3. See note below about using ReadFile. + // Consider that these points, in combination with the note below about + // races, suggest that gc_details should be tracked on the Snapshot. + var toGCDetail map[metadata.PackageID]*metadata.Package + for _, mp := range toDiagnose { + if snapshot.WantGCDetails(mp.ID) { + if toGCDetail == nil { + toGCDetail = make(map[metadata.PackageID]*metadata.Package) + } + toGCDetail[mp.ID] = mp + } + } + + diagnostics := make(diagMap) + for _, mp := range toGCDetail { + gcReports, err := golang.GCOptimizationDetails(ctx, snapshot, mp) + if err != nil { + event.Error(ctx, "warning: gc details", err, append(snapshot.Labels(), tag.Package.Of(string(mp.ID)))...) + continue + } + for uri, diags := range gcReports { + // TODO(rfindley): reading here should not be necessary: if a file has + // been deleted we should be notified, and diagnostics will eventually + // become consistent. + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + // Don't publish gc details for unsaved buffers, since the underlying + // logic operates on the file on disk. + if fh == nil || !fh.SameContentsOnDisk() { + continue + } + diagnostics[uri] = append(diagnostics[uri], diags...) + } + } + return diagnostics, nil +} + +// combineDiagnostics combines and filters list/parse/type diagnostics from +// tdiags with adiags, and appends the two lists to *outT and *outA, +// respectively. +// +// Type-error analyzers produce diagnostics that are redundant +// with type checker diagnostics, but more detailed (e.g. fixes). +// Rather than report two diagnostics for the same problem, +// we combine them by augmenting the type-checker diagnostic +// and discarding the analyzer diagnostic. +// +// If an analysis diagnostic has the same range and message as +// a list/parse/type diagnostic, the suggested fix information +// (et al) of the latter is merged into a copy of the former. +// This handles the case where a type-error analyzer suggests +// a fix to a type error, and avoids duplication. +// +// The use of out-slices, though irregular, allows the caller to +// easily choose whether to keep the results separate or combined. +// +// The arguments are not modified. +func combineDiagnostics(tdiags []*cache.Diagnostic, adiags []*cache.Diagnostic, outT, outA *[]*cache.Diagnostic) { + + // Build index of (list+parse+)type errors. + type key struct { + Range protocol.Range + message string + } + index := make(map[key]int) // maps (Range,Message) to index in tdiags slice + for i, diag := range tdiags { + index[key{diag.Range, diag.Message}] = i + } + + // Filter out analysis diagnostics that match type errors, + // retaining their suggested fix (etc) fields. + for _, diag := range adiags { + if i, ok := index[key{diag.Range, diag.Message}]; ok { + copy := *tdiags[i] + copy.SuggestedFixes = diag.SuggestedFixes + copy.Tags = diag.Tags + tdiags[i] = © + continue + } + + *outA = append(*outA, diag) + } + + *outT = append(*outT, tdiags...) +} + +// mustPublishDiagnostics marks the uri as needing publication, independent of +// whether the published contents have changed. +// +// This can be used for ensuring gopls publishes diagnostics after certain file +// events. +func (s *server) mustPublishDiagnostics(uri protocol.DocumentURI) { + s.diagnosticsMu.Lock() + defer s.diagnosticsMu.Unlock() + + if s.diagnostics[uri] == nil { + s.diagnostics[uri] = new(fileDiagnostics) + } + s.diagnostics[uri].mustPublish = true +} + +const WorkspaceLoadFailure = "Error loading workspace" + +// updateCriticalErrorStatus updates the critical error progress notification +// based on err. +// +// If err is nil, or if there are no open files, it clears any existing error +// progress report. +func (s *server) updateCriticalErrorStatus(ctx context.Context, snapshot *cache.Snapshot, err *cache.InitializationError) { + s.criticalErrorStatusMu.Lock() + defer s.criticalErrorStatusMu.Unlock() + + // Remove all newlines so that the error message can be formatted in a + // status bar. + var errMsg string + if err != nil { + errMsg = strings.ReplaceAll(err.MainError.Error(), "\n", " ") + } + + if s.criticalErrorStatus == nil { + if errMsg != "" { + event.Error(ctx, "errors loading workspace", err.MainError, snapshot.Labels()...) + s.criticalErrorStatus = s.progress.Start(ctx, WorkspaceLoadFailure, errMsg, nil, nil) + } + return + } + + // If an error is already shown to the user, update it or mark it as + // resolved. + if errMsg == "" { + s.criticalErrorStatus.End(ctx, "Done.") + s.criticalErrorStatus = nil + } else { + s.criticalErrorStatus.Report(ctx, errMsg, 0) + } +} + +// updateDiagnostics records the result of diagnosing a snapshot, and publishes +// any diagnostics that need to be updated on the client. +func (s *server) updateDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagnostics diagMap, final bool) { + ctx, done := event.Start(ctx, "Server.publishDiagnostics") + defer done() + + s.diagnosticsMu.Lock() + defer s.diagnosticsMu.Unlock() + + // Before updating any diagnostics, check that the context (i.e. snapshot + // background context) is not cancelled. + // + // If not, then we know that we haven't started diagnosing the next snapshot, + // because the previous snapshot is cancelled before the next snapshot is + // returned from Invalidate. + // + // Therefore, even if we publish stale diagnostics here, they should + // eventually be overwritten with accurate diagnostics. + // + // TODO(rfindley): refactor the API to force that snapshots are diagnosed + // after they are created. + if ctx.Err() != nil { + return + } + + // golang/go#65312: since the set of diagnostics depends on the set of views, + // we get the views *after* locking diagnosticsMu. This ensures that + // updateDiagnostics does not incorrectly delete diagnostics that have been + // set for an existing view that was created between the call to + // s.session.Views() and updateDiagnostics. + viewMap := make(viewSet) + for _, v := range s.session.Views() { + viewMap[v] = unit{} + } + + // updateAndPublish updates diagnostics for a file, checking both the latest + // diagnostics for the current snapshot, as well as reconciling the set of + // views. + updateAndPublish := func(uri protocol.DocumentURI, f *fileDiagnostics, diags []*cache.Diagnostic) error { + current, ok := f.byView[snapshot.View()] + // Update the stored diagnostics if: + // 1. we've never seen diagnostics for this view, + // 2. diagnostics are for an older snapshot, or + // 3. we're overwriting with final diagnostics + // + // In other words, we shouldn't overwrite existing diagnostics for a + // snapshot with non-final diagnostics. This avoids the race described at + // https://github.com/golang/go/issues/64765#issuecomment-1890144575. + if !ok || current.snapshot < snapshot.SequenceID() || (current.snapshot == snapshot.SequenceID() && final) { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return err + } + current = viewDiagnostics{ + snapshot: snapshot.SequenceID(), + version: fh.Version(), + diagnostics: diags, + } + if f.byView == nil { + f.byView = make(map[*cache.View]viewDiagnostics) + } + f.byView[snapshot.View()] = current + } + + return s.publishFileDiagnosticsLocked(ctx, viewMap, uri, current.version, f) + } + + seen := make(map[protocol.DocumentURI]bool) + for uri, diags := range diagnostics { + f, ok := s.diagnostics[uri] + if !ok { + f = new(fileDiagnostics) + s.diagnostics[uri] = f + } + seen[uri] = true + if err := updateAndPublish(uri, f, diags); err != nil { + if ctx.Err() != nil { + return + } else { + event.Error(ctx, "updateDiagnostics: failed to deliver diagnostics", err, tag.URI.Of(uri)) + } + } + } + + // TODO(rfindley): perhaps we should clean up files that have no diagnostics. + // One could imagine a large operation generating diagnostics for a great + // number of files, after which gopls has to do more bookkeeping into the + // future. + if final { + for uri, f := range s.diagnostics { + if !seen[uri] { + if err := updateAndPublish(uri, f, nil); err != nil { + if ctx.Err() != nil { + return + } else { + event.Error(ctx, "updateDiagnostics: failed to deliver diagnostics", err, tag.URI.Of(uri)) + } + } + } + } + } +} + +// updateOrphanedFileDiagnostics records and publishes orphaned file +// diagnostics as a given modification time. +func (s *server) updateOrphanedFileDiagnostics(ctx context.Context, modID uint64, diagnostics diagMap) error { + views := s.session.Views() + viewSet := make(viewSet) + for _, v := range views { + viewSet[v] = unit{} + } + + s.diagnosticsMu.Lock() + defer s.diagnosticsMu.Unlock() + + for uri, diags := range diagnostics { + f, ok := s.diagnostics[uri] + if !ok { + f = new(fileDiagnostics) + s.diagnostics[uri] = f + } + if f.orphanedAt > modID { + continue + } + f.orphanedAt = modID + f.orphanedFileDiagnostics = diags + // TODO(rfindley): the version of this file is potentially inaccurate; + // nevertheless, it should be eventually consistent, because all + // modifications are diagnosed. + fh, err := s.session.ReadFile(ctx, uri) + if err != nil { + return err + } + if err := s.publishFileDiagnosticsLocked(ctx, viewSet, uri, fh.Version(), f); err != nil { + return err + } + } + + // Clear any stale orphaned file diagnostics. + for uri, f := range s.diagnostics { + if f.orphanedAt < modID { + f.orphanedFileDiagnostics = nil + } + fh, err := s.session.ReadFile(ctx, uri) + if err != nil { + return err + } + if err := s.publishFileDiagnosticsLocked(ctx, viewSet, uri, fh.Version(), f); err != nil { + return err + } + } + return nil +} + +// publishFileDiagnosticsLocked publishes a fileDiagnostics value, while holding s.diagnosticsMu. +// +// If the publication succeeds, it updates f.publishedHash and f.mustPublish. +func (s *server) publishFileDiagnosticsLocked(ctx context.Context, views viewSet, uri protocol.DocumentURI, version int32, f *fileDiagnostics) error { + // We add a disambiguating suffix (e.g. " [darwin,arm64]") to + // each diagnostic that doesn't occur in the default view; + // see golang/go#65496. + type diagSuffix struct { + diag *cache.Diagnostic + suffix string // "" for default build (or orphans) + } + + // diagSuffixes records the set of view suffixes for a given diagnostic. + diagSuffixes := make(map[file.Hash][]diagSuffix) + add := func(diag *cache.Diagnostic, suffix string) { + h := hashDiagnostic(diag) + diagSuffixes[h] = append(diagSuffixes[h], diagSuffix{diag, suffix}) + } + + // Construct the inverse mapping, from diagnostic (hash) to its suffixes (views). + for _, diag := range f.orphanedFileDiagnostics { + add(diag, "") + } + + var allViews []*cache.View + for view, viewDiags := range f.byView { + if _, ok := views[view]; !ok { + delete(f.byView, view) // view no longer exists + continue + } + if viewDiags.version != version { + continue // a payload of diagnostics applies to a specific file version + } + allViews = append(allViews, view) + } + + // Only report diagnostics from the best views for a file. This avoids + // spurious import errors when a view has only a partial set of dependencies + // for a package (golang/go#66425). + // + // It's ok to use the session to derive the eligible views, because we + // publish diagnostics following any state change, so the set of best views + // is eventually consistent. + bestViews, err := cache.BestViews(ctx, s.session, uri, allViews) + if err != nil { + return err + } + + if len(bestViews) == 0 { + // If we have no preferred diagnostics for a given file (i.e., the file is + // not naturally nested within a view), then all diagnostics should be + // considered valid. + // + // This could arise if the user jumps to definition outside the workspace. + // There is no view that owns the file, so its diagnostics are valid from + // any view. + bestViews = allViews + } + + for _, view := range bestViews { + viewDiags := f.byView[view] + // Compute the view's suffix (e.g. " [darwin,arm64]"). + var suffix string + { + var words []string + if view.GOOS() != runtime.GOOS { + words = append(words, view.GOOS()) + } + if view.GOARCH() != runtime.GOARCH { + words = append(words, view.GOARCH()) + } + if len(words) > 0 { + suffix = fmt.Sprintf(" [%s]", strings.Join(words, ",")) + } + } + + for _, diag := range viewDiags.diagnostics { + add(diag, suffix) + } + } + + // De-dup diagnostics across views by hash, and sort. + var ( + hash file.Hash + unique []*cache.Diagnostic + ) + for h, items := range diagSuffixes { + // Sort the items by ascending suffix, so that the + // default view (if present) is first. + // (The others are ordered arbitrarily.) + sort.Slice(items, func(i, j int) bool { + return items[i].suffix < items[j].suffix + }) + + // If the diagnostic was not present in + // the default view, add the view suffix. + first := items[0] + if first.suffix != "" { + diag2 := *first.diag // shallow copy + diag2.Message += first.suffix + first.diag = &diag2 + h = hashDiagnostic(&diag2) // update the hash + } + + hash.XORWith(h) + unique = append(unique, first.diag) + } + sortDiagnostics(unique) + + // Publish, if necessary. + if hash != f.publishedHash || f.mustPublish { + if err := s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{ + Diagnostics: toProtocolDiagnostics(unique), + URI: uri, + Version: version, + }); err != nil { + return err + } + f.publishedHash = hash + f.mustPublish = false + } + return nil +} + +func toProtocolDiagnostics(diagnostics []*cache.Diagnostic) []protocol.Diagnostic { + reports := []protocol.Diagnostic{} + for _, diag := range diagnostics { + pdiag := protocol.Diagnostic{ + // diag.Message might start with \n or \t + Message: strings.TrimSpace(diag.Message), + Range: diag.Range, + Severity: diag.Severity, + Source: string(diag.Source), + Tags: protocol.NonNilSlice(diag.Tags), + RelatedInformation: diag.Related, + Data: diag.BundledFixes, + } + if diag.Code != "" { + pdiag.Code = diag.Code + } + if diag.CodeHref != "" { + pdiag.CodeDescription = &protocol.CodeDescription{Href: diag.CodeHref} + } + reports = append(reports, pdiag) + } + return reports +} + +func (s *server) shouldIgnoreError(snapshot *cache.Snapshot, err error) bool { + if err == nil { // if there is no error at all + return false + } + if errors.Is(err, context.Canceled) { + return true + } + // If the folder has no Go code in it, we shouldn't spam the user with a warning. + // TODO(rfindley): surely it is not correct to walk the folder here just to + // suppress diagnostics, every time we compute diagnostics. + var hasGo bool + _ = filepath.Walk(snapshot.Folder().Path(), func(_ string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !strings.HasSuffix(info.Name(), ".go") { + return nil + } + hasGo = true + return errors.New("done") + }) + return !hasGo +} diff --git a/gopls/internal/server/folding_range.go b/gopls/internal/server/folding_range.go new file mode 100644 index 00000000000..cb9d0cb5d49 --- /dev/null +++ b/gopls/internal/server/folding_range.go @@ -0,0 +1,49 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { + ctx, done := event.Start(ctx, "lsp.Server.foldingRange", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + if snapshot.FileKind(fh) != file.Go { + return nil, nil // empty result + } + ranges, err := golang.FoldingRange(ctx, snapshot, fh, snapshot.Options().LineFoldingOnly) + if err != nil { + return nil, err + } + return toProtocolFoldingRanges(ranges) +} + +func toProtocolFoldingRanges(ranges []*golang.FoldingRangeInfo) ([]protocol.FoldingRange, error) { + result := make([]protocol.FoldingRange, 0, len(ranges)) + for _, info := range ranges { + rng := info.MappedRange.Range() + result = append(result, protocol.FoldingRange{ + StartLine: rng.Start.Line, + StartCharacter: rng.Start.Character, + EndLine: rng.End.Line, + EndCharacter: rng.End.Character, + Kind: string(info.Kind), + }) + } + return result, nil +} diff --git a/gopls/internal/server/format.go b/gopls/internal/server/format.go new file mode 100644 index 00000000000..0e6cfdce6d7 --- /dev/null +++ b/gopls/internal/server/format.go @@ -0,0 +1,38 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/mod" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/work" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) Formatting(ctx context.Context, params *protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) { + ctx, done := event.Start(ctx, "lsp.Server.formatting", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + switch snapshot.FileKind(fh) { + case file.Mod: + return mod.Format(ctx, snapshot, fh) + case file.Go: + return golang.Format(ctx, snapshot, fh) + case file.Work: + return work.Format(ctx, snapshot, fh) + } + return nil, nil // empty result +} diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go new file mode 100644 index 00000000000..e40532e4901 --- /dev/null +++ b/gopls/internal/server/general.go @@ -0,0 +1,685 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +// This file defines server methods related to initialization, +// options, shutdown, and exit. + +import ( + "context" + "encoding/json" + "fmt" + "go/build" + "log" + "os" + "path" + "path/filepath" + "sort" + "strings" + "sync" + + "golang.org/x/telemetry/counter" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/goversion" + "golang.org/x/tools/gopls/internal/util/maps" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/jsonrpc2" +) + +func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { + ctx, done := event.Start(ctx, "lsp.Server.initialize") + defer done() + + var clientName string + if params != nil && params.ClientInfo != nil { + clientName = params.ClientInfo.Name + } + recordClientInfo(clientName) + + s.stateMu.Lock() + if s.state >= serverInitializing { + defer s.stateMu.Unlock() + return nil, fmt.Errorf("%w: initialize called while server in %v state", jsonrpc2.ErrInvalidRequest, s.state) + } + s.state = serverInitializing + s.stateMu.Unlock() + + // For uniqueness, use the gopls PID rather than params.ProcessID (the client + // pid). Some clients might start multiple gopls servers, though they + // probably shouldn't. + pid := os.Getpid() + s.tempDir = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.%s", pid, s.session.ID())) + err := os.Mkdir(s.tempDir, 0700) + if err != nil { + // MkdirTemp could fail due to permissions issues. This is a problem with + // the user's environment, but should not block gopls otherwise behaving. + // All usage of s.tempDir should be predicated on having a non-empty + // s.tempDir. + event.Error(ctx, "creating temp dir", err) + s.tempDir = "" + } + s.progress.SetSupportsWorkDoneProgress(params.Capabilities.Window.WorkDoneProgress) + + options := s.Options().Clone() + // TODO(rfindley): remove the error return from handleOptionResults, and + // eliminate this defer. + defer func() { s.SetOptions(options) }() + + if err := s.handleOptionResults(ctx, settings.SetOptions(options, params.InitializationOptions)); err != nil { + return nil, err + } + options.ForClientCapabilities(params.ClientInfo, params.Capabilities) + + if options.ShowBugReports { + // Report the next bug that occurs on the server. + bug.Handle(func(b bug.Bug) { + msg := &protocol.ShowMessageParams{ + Type: protocol.Error, + Message: fmt.Sprintf("A bug occurred on the server: %s\nLocation:%s", b.Description, b.Key), + } + go func() { + if err := s.eventuallyShowMessage(context.Background(), msg); err != nil { + log.Printf("error showing bug: %v", err) + } + }() + }) + } + + folders := params.WorkspaceFolders + if len(folders) == 0 { + if params.RootURI != "" { + folders = []protocol.WorkspaceFolder{{ + URI: string(params.RootURI), + Name: path.Base(params.RootURI.Path()), + }} + } + } + for _, folder := range folders { + if folder.URI == "" { + return nil, fmt.Errorf("empty WorkspaceFolder.URI") + } + if _, err := protocol.ParseDocumentURI(folder.URI); err != nil { + return nil, fmt.Errorf("invalid WorkspaceFolder.URI: %v", err) + } + s.pendingFolders = append(s.pendingFolders, folder) + } + + var codeActionProvider interface{} = true + if ca := params.Capabilities.TextDocument.CodeAction; len(ca.CodeActionLiteralSupport.CodeActionKind.ValueSet) > 0 { + // If the client has specified CodeActionLiteralSupport, + // send the code actions we support. + // + // Using CodeActionOptions is only valid if codeActionLiteralSupport is set. + codeActionProvider = &protocol.CodeActionOptions{ + CodeActionKinds: s.getSupportedCodeActions(), + ResolveProvider: true, + } + } + var renameOpts interface{} = true + if r := params.Capabilities.TextDocument.Rename; r != nil && r.PrepareSupport { + renameOpts = protocol.RenameOptions{ + PrepareProvider: r.PrepareSupport, + } + } + + versionInfo := debug.VersionInfo() + + goplsVersion, err := json.Marshal(versionInfo) + if err != nil { + return nil, err + } + + return &protocol.InitializeResult{ + Capabilities: protocol.ServerCapabilities{ + CallHierarchyProvider: &protocol.Or_ServerCapabilities_callHierarchyProvider{Value: true}, + CodeActionProvider: codeActionProvider, + CodeLensProvider: &protocol.CodeLensOptions{}, // must be non-nil to enable the code lens capability + CompletionProvider: &protocol.CompletionOptions{ + TriggerCharacters: []string{"."}, + }, + DefinitionProvider: &protocol.Or_ServerCapabilities_definitionProvider{Value: true}, + TypeDefinitionProvider: &protocol.Or_ServerCapabilities_typeDefinitionProvider{Value: true}, + ImplementationProvider: &protocol.Or_ServerCapabilities_implementationProvider{Value: true}, + DocumentFormattingProvider: &protocol.Or_ServerCapabilities_documentFormattingProvider{Value: true}, + DocumentSymbolProvider: &protocol.Or_ServerCapabilities_documentSymbolProvider{Value: true}, + WorkspaceSymbolProvider: &protocol.Or_ServerCapabilities_workspaceSymbolProvider{Value: true}, + ExecuteCommandProvider: &protocol.ExecuteCommandOptions{ + Commands: protocol.NonNilSlice(options.SupportedCommands), + }, + FoldingRangeProvider: &protocol.Or_ServerCapabilities_foldingRangeProvider{Value: true}, + HoverProvider: &protocol.Or_ServerCapabilities_hoverProvider{Value: true}, + DocumentHighlightProvider: &protocol.Or_ServerCapabilities_documentHighlightProvider{Value: true}, + DocumentLinkProvider: &protocol.DocumentLinkOptions{}, + InlayHintProvider: protocol.InlayHintOptions{}, + ReferencesProvider: &protocol.Or_ServerCapabilities_referencesProvider{Value: true}, + RenameProvider: renameOpts, + SelectionRangeProvider: &protocol.Or_ServerCapabilities_selectionRangeProvider{Value: true}, + SemanticTokensProvider: protocol.SemanticTokensOptions{ + Range: &protocol.Or_SemanticTokensOptions_range{Value: true}, + Full: &protocol.Or_SemanticTokensOptions_full{Value: true}, + Legend: protocol.SemanticTokensLegend{ + TokenTypes: protocol.NonNilSlice(options.SemanticTypes), + TokenModifiers: protocol.NonNilSlice(options.SemanticMods), + }, + }, + SignatureHelpProvider: &protocol.SignatureHelpOptions{ + TriggerCharacters: []string{"(", ","}, + }, + TextDocumentSync: &protocol.TextDocumentSyncOptions{ + Change: protocol.Incremental, + OpenClose: true, + Save: &protocol.SaveOptions{ + IncludeText: false, + }, + }, + Workspace: &protocol.WorkspaceOptions{ + WorkspaceFolders: &protocol.WorkspaceFolders5Gn{ + Supported: true, + ChangeNotifications: "workspace/didChangeWorkspaceFolders", + }, + }, + }, + ServerInfo: &protocol.ServerInfo{ + Name: "gopls", + Version: string(goplsVersion), + }, + }, nil +} + +func (s *server) Initialized(ctx context.Context, params *protocol.InitializedParams) error { + ctx, done := event.Start(ctx, "lsp.Server.initialized") + defer done() + + s.stateMu.Lock() + if s.state >= serverInitialized { + defer s.stateMu.Unlock() + return fmt.Errorf("%w: initialized called while server in %v state", jsonrpc2.ErrInvalidRequest, s.state) + } + s.state = serverInitialized + s.stateMu.Unlock() + + for _, not := range s.notifications { + s.client.ShowMessage(ctx, not) + } + s.notifications = nil + + s.addFolders(ctx, s.pendingFolders) + + s.pendingFolders = nil + s.checkViewGoVersions() + + var registrations []protocol.Registration + options := s.Options() + if options.ConfigurationSupported && options.DynamicConfigurationSupported { + registrations = append(registrations, protocol.Registration{ + ID: "workspace/didChangeConfiguration", + Method: "workspace/didChangeConfiguration", + }) + } + if len(registrations) > 0 { + if err := s.client.RegisterCapability(ctx, &protocol.RegistrationParams{ + Registrations: registrations, + }); err != nil { + return err + } + } + + // Ask (maybe) about enabling telemetry. Do this asynchronously, as it's OK + // for users to ignore or dismiss the question. + go s.maybePromptForTelemetry(ctx, options.TelemetryPrompt) + + return nil +} + +// checkViewGoVersions checks whether any Go version used by a view is too old, +// raising a showMessage notification if so. +// +// It should be called after views change. +func (s *server) checkViewGoVersions() { + oldestVersion, fromBuild := go1Point(), true + for _, view := range s.session.Views() { + viewVersion := view.GoVersion() + if oldestVersion == -1 || viewVersion < oldestVersion { + oldestVersion, fromBuild = viewVersion, false + } + if viewVersion >= 0 { + counter.Inc(fmt.Sprintf("gopls/goversion:1.%d", viewVersion)) + } + } + + if msg, isError := goversion.Message(oldestVersion, fromBuild); msg != "" { + mType := protocol.Warning + if isError { + mType = protocol.Error + } + s.eventuallyShowMessage(context.Background(), &protocol.ShowMessageParams{ + Type: mType, + Message: msg, + }) + } +} + +// go1Point returns the x in Go 1.x. If an error occurs extracting the go +// version, it returns -1. +// +// Copied from the testenv package. +func go1Point() int { + for i := len(build.Default.ReleaseTags) - 1; i >= 0; i-- { + var version int + if _, err := fmt.Sscanf(build.Default.ReleaseTags[i], "go1.%d", &version); err != nil { + continue + } + return version + } + return -1 +} + +// addFolders adds the specified list of "folders" (that's Windows for +// directories) to the session. It does not return an error, though it +// may report an error to the client over LSP if one or more folders +// had problems. +func (s *server) addFolders(ctx context.Context, folders []protocol.WorkspaceFolder) { + originalViews := len(s.session.Views()) + viewErrors := make(map[protocol.URI]error) + + var ndiagnose sync.WaitGroup // number of unfinished diagnose calls + if s.Options().VerboseWorkDoneProgress { + work := s.progress.Start(ctx, DiagnosticWorkTitle(FromInitialWorkspaceLoad), "Calculating diagnostics for initial workspace load...", nil, nil) + defer func() { + go func() { + ndiagnose.Wait() + work.End(ctx, "Done.") + }() + }() + } + // Only one view gets to have a workspace. + var nsnapshots sync.WaitGroup // number of unfinished snapshot initializations + for _, folder := range folders { + uri, err := protocol.ParseDocumentURI(folder.URI) + if err != nil { + viewErrors[folder.URI] = fmt.Errorf("invalid folder URI: %v", err) + continue + } + work := s.progress.Start(ctx, "Setting up workspace", "Loading packages...", nil, nil) + snapshot, release, err := s.addView(ctx, folder.Name, uri) + if err != nil { + if err == cache.ErrViewExists { + continue + } + viewErrors[folder.URI] = err + work.End(ctx, fmt.Sprintf("Error loading packages: %s", err)) + continue + } + // Inv: release() must be called once. + + // Initialize snapshot asynchronously. + initialized := make(chan struct{}) + nsnapshots.Add(1) + go func() { + snapshot.AwaitInitialized(ctx) + work.End(ctx, "Finished loading packages.") + nsnapshots.Done() + close(initialized) // signal + }() + + // Diagnose the newly created view asynchronously. + ndiagnose.Add(1) + go func() { + s.diagnoseSnapshot(snapshot, nil, 0) + <-initialized + release() + ndiagnose.Done() + }() + } + + // Wait for snapshots to be initialized so that all files are known. + // (We don't need to wait for diagnosis to finish.) + nsnapshots.Wait() + + // Register for file watching notifications, if they are supported. + if err := s.updateWatchedDirectories(ctx); err != nil { + event.Error(ctx, "failed to register for file watching notifications", err) + } + + // Report any errors using the protocol. + if len(viewErrors) > 0 { + errMsg := fmt.Sprintf("Error loading workspace folders (expected %v, got %v)\n", len(folders), len(s.session.Views())-originalViews) + for uri, err := range viewErrors { + errMsg += fmt.Sprintf("failed to load view for %s: %v\n", uri, err) + } + showMessage(ctx, s.client, protocol.Error, errMsg) + } +} + +// updateWatchedDirectories compares the current set of directories to watch +// with the previously registered set of directories. If the set of directories +// has changed, we unregister and re-register for file watching notifications. +// updatedSnapshots is the set of snapshots that have been updated. +func (s *server) updateWatchedDirectories(ctx context.Context) error { + patterns := s.session.FileWatchingGlobPatterns(ctx) + + s.watchedGlobPatternsMu.Lock() + defer s.watchedGlobPatternsMu.Unlock() + + // Nothing to do if the set of workspace directories is unchanged. + if maps.SameKeys(s.watchedGlobPatterns, patterns) { + return nil + } + + // If the set of directories to watch has changed, register the updates and + // unregister the previously watched directories. This ordering avoids a + // period where no files are being watched. Still, if a user makes on-disk + // changes before these updates are complete, we may miss them for the new + // directories. + prevID := s.watchRegistrationCount - 1 + if err := s.registerWatchedDirectoriesLocked(ctx, patterns); err != nil { + return err + } + if prevID >= 0 { + return s.client.UnregisterCapability(ctx, &protocol.UnregistrationParams{ + Unregisterations: []protocol.Unregistration{{ + ID: watchedFilesCapabilityID(prevID), + Method: "workspace/didChangeWatchedFiles", + }}, + }) + } + return nil +} + +func watchedFilesCapabilityID(id int) string { + return fmt.Sprintf("workspace/didChangeWatchedFiles-%d", id) +} + +// registerWatchedDirectoriesLocked sends the workspace/didChangeWatchedFiles +// registrations to the client and updates s.watchedDirectories. +// The caller must not subsequently mutate patterns. +func (s *server) registerWatchedDirectoriesLocked(ctx context.Context, patterns map[protocol.RelativePattern]unit) error { + if !s.Options().DynamicWatchedFilesSupported { + return nil + } + + supportsRelativePatterns := s.Options().RelativePatternsSupported + + s.watchedGlobPatterns = patterns + watchers := make([]protocol.FileSystemWatcher, 0, len(patterns)) // must be a slice + val := protocol.WatchChange | protocol.WatchDelete | protocol.WatchCreate + for pattern := range patterns { + var value any + if supportsRelativePatterns && pattern.BaseURI != "" { + value = pattern + } else { + p := pattern.Pattern + if pattern.BaseURI != "" { + p = path.Join(filepath.ToSlash(pattern.BaseURI.Path()), p) + } + value = p + } + watchers = append(watchers, protocol.FileSystemWatcher{ + GlobPattern: protocol.GlobPattern{Value: value}, + Kind: &val, + }) + } + + if err := s.client.RegisterCapability(ctx, &protocol.RegistrationParams{ + Registrations: []protocol.Registration{{ + ID: watchedFilesCapabilityID(s.watchRegistrationCount), + Method: "workspace/didChangeWatchedFiles", + RegisterOptions: protocol.DidChangeWatchedFilesRegistrationOptions{ + Watchers: watchers, + }, + }}, + }); err != nil { + return err + } + s.watchRegistrationCount++ + return nil +} + +// Options returns the current server options. +// +// The caller must not modify the result. +func (s *server) Options() *settings.Options { + s.optionsMu.Lock() + defer s.optionsMu.Unlock() + return s.options +} + +// SetOptions sets the current server options. +// +// The caller must not subsequently modify the options. +func (s *server) SetOptions(opts *settings.Options) { + s.optionsMu.Lock() + defer s.optionsMu.Unlock() + s.options = opts +} + +func (s *server) newFolder(ctx context.Context, folder protocol.DocumentURI, name string) (*cache.Folder, error) { + opts := s.Options() + if opts.ConfigurationSupported { + scope := string(folder) + configs, err := s.client.Configuration(ctx, &protocol.ParamConfiguration{ + Items: []protocol.ConfigurationItem{{ + ScopeURI: &scope, + Section: "gopls", + }}, + }, + ) + if err != nil { + return nil, fmt.Errorf("failed to get workspace configuration from client (%s): %v", folder, err) + } + + opts = opts.Clone() + for _, config := range configs { + if err := s.handleOptionResults(ctx, settings.SetOptions(opts, config)); err != nil { + return nil, err + } + } + } + + env, err := cache.FetchGoEnv(ctx, folder, opts) + if err != nil { + return nil, err + } + return &cache.Folder{ + Dir: folder, + Name: name, + Options: opts, + Env: env, + }, nil +} + +// fetchFolderOptions makes a workspace/configuration request for the given +// folder, and populates options with the result. +// +// If folder is "", fetchFolderOptions makes an unscoped request. +func (s *server) fetchFolderOptions(ctx context.Context, folder protocol.DocumentURI) (*settings.Options, error) { + opts := s.Options() + if !opts.ConfigurationSupported { + return opts, nil + } + var scopeURI *string + if folder != "" { + scope := string(folder) + scopeURI = &scope + } + configs, err := s.client.Configuration(ctx, &protocol.ParamConfiguration{ + Items: []protocol.ConfigurationItem{{ + ScopeURI: scopeURI, + Section: "gopls", + }}, + }, + ) + if err != nil { + return nil, fmt.Errorf("failed to get workspace configuration from client (%s): %v", folder, err) + } + + opts = opts.Clone() + for _, config := range configs { + if err := s.handleOptionResults(ctx, settings.SetOptions(opts, config)); err != nil { + return nil, err + } + } + return opts, nil +} + +func (s *server) eventuallyShowMessage(ctx context.Context, msg *protocol.ShowMessageParams) error { + s.stateMu.Lock() + defer s.stateMu.Unlock() + if s.state == serverInitialized { + return s.client.ShowMessage(ctx, msg) + } + s.notifications = append(s.notifications, msg) + return nil +} + +func (s *server) handleOptionResults(ctx context.Context, results settings.OptionResults) error { + var warnings, errors []string + for _, result := range results { + switch result.Error.(type) { + case nil: + // nothing to do + case *settings.SoftError: + warnings = append(warnings, result.Error.Error()) + default: + errors = append(errors, result.Error.Error()) + } + } + + // Sort messages, but put errors first. + // + // Having stable content for the message allows clients to de-duplicate. This + // matters because we may send duplicate warnings for clients that support + // dynamic configuration: one for the initial settings, and then more for the + // individual viewsettings. + var msgs []string + msgType := protocol.Warning + if len(errors) > 0 { + msgType = protocol.Error + sort.Strings(errors) + msgs = append(msgs, errors...) + } + if len(warnings) > 0 { + sort.Strings(warnings) + msgs = append(msgs, warnings...) + } + + if len(msgs) > 0 { + // Settings + combined := "Invalid settings: " + strings.Join(msgs, "; ") + params := &protocol.ShowMessageParams{ + Type: msgType, + Message: combined, + } + return s.eventuallyShowMessage(ctx, params) + } + + return nil +} + +// fileOf returns the file for a given URI and its snapshot. +// On success, the returned function must be called to release the snapshot. +func (s *server) fileOf(ctx context.Context, uri protocol.DocumentURI) (file.Handle, *cache.Snapshot, func(), error) { + snapshot, release, err := s.session.SnapshotOf(ctx, uri) + if err != nil { + return nil, nil, nil, err + } + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + release() + return nil, nil, nil, err + } + return fh, snapshot, release, nil +} + +// shutdown implements the 'shutdown' LSP handler. It releases resources +// associated with the server and waits for all ongoing work to complete. +func (s *server) Shutdown(ctx context.Context) error { + ctx, done := event.Start(ctx, "lsp.Server.shutdown") + defer done() + + s.stateMu.Lock() + defer s.stateMu.Unlock() + if s.state < serverInitialized { + event.Log(ctx, "server shutdown without initialization") + } + if s.state != serverShutDown { + // Wait for the webserver (if any) to finish. + if s.web != nil { + s.web.server.Shutdown(ctx) + } + + // drop all the active views + s.session.Shutdown(ctx) + s.state = serverShutDown + if s.tempDir != "" { + if err := os.RemoveAll(s.tempDir); err != nil { + event.Error(ctx, "removing temp dir", err) + } + } + } + return nil +} + +func (s *server) Exit(ctx context.Context) error { + ctx, done := event.Start(ctx, "lsp.Server.exit") + defer done() + + s.stateMu.Lock() + defer s.stateMu.Unlock() + + s.client.Close() + + if s.state != serverShutDown { + // TODO: We should be able to do better than this. + os.Exit(1) + } + // We don't terminate the process on a normal exit, we just allow it to + // close naturally if needed after the connection is closed. + return nil +} + +// recordClientInfo records gopls client info. +func recordClientInfo(clientName string) { + key := "gopls/client:other" + switch clientName { + case "Visual Studio Code": + key = "gopls/client:vscode" + case "Visual Studio Code - Insiders": + key = "gopls/client:vscode-insiders" + case "VSCodium": + key = "gopls/client:vscodium" + case "code-server": + // https://github.com/coder/code-server/blob/3cb92edc76ecc2cfa5809205897d93d4379b16a6/ci/build/build-vscode.sh#L19 + key = "gopls/client:code-server" + case "Eglot": + // https://lists.gnu.org/archive/html/bug-gnu-emacs/2023-03/msg00954.html + key = "gopls/client:eglot" + case "govim": + // https://github.com/govim/govim/pull/1189 + key = "gopls/client:govim" + case "Neovim": + // https://github.com/neovim/neovim/blob/42333ea98dfcd2994ee128a3467dfe68205154cd/runtime/lua/vim/lsp.lua#L1361 + key = "gopls/client:neovim" + case "coc.nvim": + // https://github.com/neoclide/coc.nvim/blob/3dc6153a85ed0f185abec1deb972a66af3fbbfb4/src/language-client/client.ts#L994 + key = "gopls/client:coc.nvim" + case "Sublime Text LSP": + // https://github.com/sublimelsp/LSP/blob/e608f878e7e9dd34aabe4ff0462540fadcd88fcc/plugin/core/sessions.py#L493 + key = "gopls/client:sublimetext" + default: + // Accumulate at least a local counter for an unknown + // client name, but also fall through to count it as + // ":other" for collection. + if clientName != "" { + counter.New(fmt.Sprintf("gopls/client-other:%s", clientName)).Inc() + } + } + counter.Inc(key) +} diff --git a/gopls/internal/server/highlight.go b/gopls/internal/server/highlight.go new file mode 100644 index 00000000000..45eeba77b56 --- /dev/null +++ b/gopls/internal/server/highlight.go @@ -0,0 +1,51 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) DocumentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { + ctx, done := event.Start(ctx, "lsp.Server.documentHighlight", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + switch snapshot.FileKind(fh) { + case file.Tmpl: + return template.Highlight(ctx, snapshot, fh, params.Position) + case file.Go: + rngs, err := golang.Highlight(ctx, snapshot, fh, params.Position) + if err != nil { + event.Error(ctx, "no highlight", err) + } + return toProtocolHighlight(rngs), nil + } + return nil, nil // empty result +} + +func toProtocolHighlight(rngs []protocol.Range) []protocol.DocumentHighlight { + result := make([]protocol.DocumentHighlight, 0, len(rngs)) + kind := protocol.Text + for _, rng := range rngs { + result = append(result, protocol.DocumentHighlight{ + Kind: kind, + Range: rng, + }) + } + return result +} diff --git a/gopls/internal/server/hover.go b/gopls/internal/server/hover.go new file mode 100644 index 00000000000..1ceede24ed7 --- /dev/null +++ b/gopls/internal/server/hover.go @@ -0,0 +1,47 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/mod" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/telemetry" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/gopls/internal/work" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) Hover(ctx context.Context, params *protocol.HoverParams) (_ *protocol.Hover, rerr error) { + recordLatency := telemetry.StartLatencyTimer("hover") + defer func() { + recordLatency(ctx, rerr) + }() + + ctx, done := event.Start(ctx, "lsp.Server.hover", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + switch snapshot.FileKind(fh) { + case file.Mod: + return mod.Hover(ctx, snapshot, fh, params.Position) + case file.Go: + return golang.Hover(ctx, snapshot, fh, params.Position) + case file.Tmpl: + return template.Hover(ctx, snapshot, fh, params.Position) + case file.Work: + return work.Hover(ctx, snapshot, fh, params.Position) + } + return nil, nil // empty result +} diff --git a/gopls/internal/server/implementation.go b/gopls/internal/server/implementation.go new file mode 100644 index 00000000000..b462eacee8a --- /dev/null +++ b/gopls/internal/server/implementation.go @@ -0,0 +1,36 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/telemetry" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) Implementation(ctx context.Context, params *protocol.ImplementationParams) (_ []protocol.Location, rerr error) { + recordLatency := telemetry.StartLatencyTimer("implementation") + defer func() { + recordLatency(ctx, rerr) + }() + + ctx, done := event.Start(ctx, "lsp.Server.implementation", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + if snapshot.FileKind(fh) != file.Go { + return nil, nil // empty result + } + return golang.Implementation(ctx, snapshot, fh, params.Position) +} diff --git a/gopls/internal/server/inlay_hint.go b/gopls/internal/server/inlay_hint.go new file mode 100644 index 00000000000..88ec783e391 --- /dev/null +++ b/gopls/internal/server/inlay_hint.go @@ -0,0 +1,35 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/mod" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) InlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { + ctx, done := event.Start(ctx, "lsp.Server.inlayHint", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + switch snapshot.FileKind(fh) { + case file.Mod: + return mod.InlayHint(ctx, snapshot, fh, params.Range) + case file.Go: + return golang.InlayHint(ctx, snapshot, fh, params.Range) + } + return nil, nil // empty result +} diff --git a/gopls/internal/lsp/link.go b/gopls/internal/server/link.go similarity index 81% rename from gopls/internal/lsp/link.go rename to gopls/internal/server/link.go index f04e265a08b..c0a60f22601 100644 --- a/gopls/internal/lsp/link.go +++ b/gopls/internal/server/link.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package lsp +package server import ( "bytes" @@ -16,37 +16,42 @@ import ( "sync" "golang.org/x/mod/modfile" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/safetoken" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/tag" ) -func (s *Server) documentLink(ctx context.Context, params *protocol.DocumentLinkParams) (links []protocol.DocumentLink, err error) { +func (s *server) DocumentLink(ctx context.Context, params *protocol.DocumentLinkParams) (links []protocol.DocumentLink, err error) { ctx, done := event.Start(ctx, "lsp.Server.documentLink") defer done() - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { return nil, err } + defer release() + switch snapshot.FileKind(fh) { - case source.Mod: + case file.Mod: links, err = modLinks(ctx, snapshot, fh) - case source.Go: + case file.Go: links, err = goLinks(ctx, snapshot, fh) } // Don't return errors for document links. if err != nil { event.Error(ctx, "failed to compute document links", err, tag.URI.Of(fh.URI())) - return nil, nil + return nil, nil // empty result } - return links, nil + return links, nil // may be empty (for other file types) } -func modLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.DocumentLink, error) { +func modLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentLink, error) { pm, err := snapshot.ParseMod(ctx, fh) if err != nil { return nil, err @@ -58,7 +63,7 @@ func modLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandl continue } // See golang/go#36998: don't link to modules matching GOPRIVATE. - if snapshot.View().IsGoPrivatePath(req.Mod.Path) { + if snapshot.IsGoPrivatePath(req.Mod.Path) { continue } dep := []byte(req.Mod.Path) @@ -69,7 +74,7 @@ func modLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandl } // Shift the start position to the location of the // dependency within the require statement. - target := source.BuildLink(snapshot.Options().LinkTarget, "mod/"+req.Mod.String(), "") + target := cache.BuildLink(snapshot.Options().LinkTarget, "mod/"+req.Mod.String(), "") l, err := toProtocolLink(pm.Mapper, target, start+i, start+i+len(dep)) if err != nil { return nil, err @@ -102,9 +107,9 @@ func modLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandl } // goLinks returns the set of hyperlink annotations for the specified Go file. -func goLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.DocumentLink, error) { +func goLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentLink, error) { - pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull) + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) if err != nil { return nil, err } @@ -116,35 +121,35 @@ func goLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle // If links are to pkg.go.dev, append module version suffixes. // This requires the import map from the package metadata. Ignore errors. - var depsByImpPath map[source.ImportPath]source.PackageID + var depsByImpPath map[golang.ImportPath]golang.PackageID if strings.ToLower(snapshot.Options().LinkTarget) == "pkg.go.dev" { - if meta, err := source.NarrowestMetadataForFile(ctx, snapshot, fh.URI()); err == nil { + if meta, err := golang.NarrowestMetadataForFile(ctx, snapshot, fh.URI()); err == nil { depsByImpPath = meta.DepsByImpPath } } for _, imp := range pgf.File.Imports { - importPath := source.UnquoteImportPath(imp) + importPath := metadata.UnquoteImportPath(imp) if importPath == "" { continue // bad import } // See golang/go#36998: don't link to modules matching GOPRIVATE. - if snapshot.View().IsGoPrivatePath(string(importPath)) { + if snapshot.IsGoPrivatePath(string(importPath)) { continue } urlPath := string(importPath) // For pkg.go.dev, append module version suffix to package import path. - if m := snapshot.Metadata(depsByImpPath[importPath]); m != nil && m.Module != nil && m.Module.Path != "" && m.Module.Version != "" { - urlPath = strings.Replace(urlPath, m.Module.Path, m.Module.Path+"@"+m.Module.Version, 1) + if mp := snapshot.Metadata(depsByImpPath[importPath]); mp != nil && mp.Module != nil && mp.Module.Path != "" && mp.Module.Version != "" { + urlPath = strings.Replace(urlPath, mp.Module.Path, mp.Module.Path+"@"+mp.Module.Version, 1) } start, end, err := safetoken.Offsets(pgf.Tok, imp.Path.Pos(), imp.Path.End()) if err != nil { return nil, err } - targetURL := source.BuildLink(snapshot.Options().LinkTarget, urlPath, "") + targetURL := cache.BuildLink(snapshot.Options().LinkTarget, urlPath, "") // Account for the quotation marks in the positions. l, err := toProtocolLink(pgf.Mapper, targetURL, start+len(`"`), end-len(`"`)) if err != nil { diff --git a/gopls/internal/lsp/prompt.go b/gopls/internal/server/prompt.go similarity index 91% rename from gopls/internal/lsp/prompt.go rename to gopls/internal/server/prompt.go index 976f7c6e09f..a1dba8d234d 100644 --- a/gopls/internal/lsp/prompt.go +++ b/gopls/internal/server/prompt.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package lsp +package server import ( "context" @@ -11,8 +11,8 @@ import ( "path/filepath" "time" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/telemetry" + "golang.org/x/telemetry" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/event" ) @@ -35,7 +35,7 @@ const ( // getenv returns the effective environment variable value for the provided // key, looking up the key in the session environment before falling back on // the process environment. -func (s *Server) getenv(key string) string { +func (s *server) getenv(key string) string { if v, ok := s.Options().Env[key]; ok { return v } @@ -44,7 +44,7 @@ func (s *Server) getenv(key string) string { // configDir returns the root of the gopls configuration dir. By default this // is os.UserConfigDir/gopls, but it may be overridden for tests. -func (s *Server) configDir() (string, error) { +func (s *server) configDir() (string, error) { if d := s.getenv(GoplsConfigDirEnvvar); d != "" { return d, nil } @@ -57,12 +57,12 @@ func (s *Server) configDir() (string, error) { // telemetryMode returns the current effective telemetry mode. // By default this is x/telemetry.Mode(), but it may be overridden for tests. -func (s *Server) telemetryMode() string { +func (s *server) telemetryMode() string { if fake := s.getenv(FakeTelemetryModefileEnvvar); fake != "" { if data, err := os.ReadFile(fake); err == nil { return string(data) } - return "off" + return "local" } return telemetry.Mode() } @@ -70,7 +70,7 @@ func (s *Server) telemetryMode() string { // setTelemetryMode sets the current telemetry mode. // By default this calls x/telemetry.SetMode, but it may be overridden for // tests. -func (s *Server) setTelemetryMode(mode string) error { +func (s *server) setTelemetryMode(mode string) error { if fake := s.getenv(FakeTelemetryModefileEnvvar); fake != "" { return os.WriteFile(fake, []byte(mode), 0666) } @@ -85,7 +85,7 @@ func (s *Server) setTelemetryMode(mode string) error { // prompting. // If enabled is false, this will not prompt the user in any condition, // but will send work progress reports to help testing. -func (s *Server) maybePromptForTelemetry(ctx context.Context, enabled bool) { +func (s *server) maybePromptForTelemetry(ctx context.Context, enabled bool) { if s.Options().VerboseWorkDoneProgress { work := s.progress.Start(ctx, TelemetryPromptWorkTitle, "Checking if gopls should prompt about telemetry...", nil, nil) defer work.End(ctx, "Done.") @@ -95,8 +95,8 @@ func (s *Server) maybePromptForTelemetry(ctx context.Context, enabled bool) { return // prompt is disabled } - if s.telemetryMode() == "on" { - // Telemetry is already on -- nothing to ask about. + if s.telemetryMode() == "on" || s.telemetryMode() == "off" { + // Telemetry is already on or explicitly off -- nothing to ask about. return } @@ -191,12 +191,12 @@ func (s *Server) maybePromptForTelemetry(ctx context.Context, enabled bool) { return } - var prompt = `Go telemetry helps us improve Go by periodically sending anonymous metrics and crash reports to the Go team. Learn more at https://telemetry.go.dev/privacy. + var prompt = `Go telemetry helps us improve Go by periodically sending anonymous metrics and crash reports to the Go team. Learn more at https://go.dev/doc/telemetry. Would you like to enable Go telemetry? ` if s.Options().LinkifyShowMessage { - prompt = `Go telemetry helps us improve Go by periodically sending anonymous metrics and crash reports to the Go team. Learn more at [telemetry.go.dev/privacy](https://telemetry.go.dev/privacy). + prompt = `Go telemetry helps us improve Go by periodically sending anonymous metrics and crash reports to the Go team. Learn more at [go.dev/doc/telemetry](https://go.dev/doc/telemetry). Would you like to enable Go telemetry? ` @@ -219,11 +219,9 @@ Would you like to enable Go telemetry? } message := func(typ protocol.MessageType, msg string) { - if err := s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ - Type: typ, - Message: msg, - }); err != nil { - errorf("ShowMessage(unrecognize) failed: %v", err) + if !showMessage(ctx, s.client, typ, msg) { + // Make sure we record that "telemetry prompt failed". + errorf("showMessage failed: %v", err) } } @@ -262,9 +260,9 @@ func telemetryOnMessage(linkify bool) string { To disable telemetry uploading, run %s. ` - var runCmd = "`go run golang.org/x/telemetry/cmd/gotelemetry@latest off`" + var runCmd = "`go run golang.org/x/telemetry/cmd/gotelemetry@latest local`" if linkify { - runCmd = "[gotelemetry off](https://golang.org/x/telemetry/cmd/gotelemetry)" + runCmd = "[gotelemetry local](https://golang.org/x/telemetry/cmd/gotelemetry)" } return fmt.Sprintf(format, runCmd) } diff --git a/gopls/internal/server/prompt_test.go b/gopls/internal/server/prompt_test.go new file mode 100644 index 00000000000..f4484cb6437 --- /dev/null +++ b/gopls/internal/server/prompt_test.go @@ -0,0 +1,82 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "path/filepath" + "sync" + "sync/atomic" + "testing" +) + +func TestAcquireFileLock(t *testing.T) { + name := filepath.Join(t.TempDir(), "config.json") + + const concurrency = 100 + var acquired int32 + var releasers [concurrency]func() + defer func() { + for _, r := range releasers { + if r != nil { + r() + } + } + }() + + var wg sync.WaitGroup + for i := range releasers { + i := i + wg.Add(1) + go func() { + defer wg.Done() + + release, ok, err := acquireLockFile(name) + if err != nil { + t.Errorf("Acquire failed: %v", err) + return + } + if ok { + atomic.AddInt32(&acquired, 1) + releasers[i] = release + } + }() + } + + wg.Wait() + + if acquired != 1 { + t.Errorf("Acquire succeeded %d times, expected exactly 1", acquired) + } +} + +func TestReleaseAndAcquireFileLock(t *testing.T) { + name := filepath.Join(t.TempDir(), "config.json") + + acquire := func() (func(), bool) { + t.Helper() + release, ok, err := acquireLockFile(name) + if err != nil { + t.Fatal(err) + } + return release, ok + } + + release, ok := acquire() + if !ok { + t.Fatal("failed to Acquire") + } + if release2, ok := acquire(); ok { + release() + release2() + t.Fatalf("Acquire succeeded unexpectedly") + } + + release() + release3, ok := acquire() + release3() + if !ok { + t.Fatalf("failed to Acquire") + } +} diff --git a/gopls/internal/server/references.go b/gopls/internal/server/references.go new file mode 100644 index 00000000000..cc02d6f16b6 --- /dev/null +++ b/gopls/internal/server/references.go @@ -0,0 +1,40 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/telemetry" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) References(ctx context.Context, params *protocol.ReferenceParams) (_ []protocol.Location, rerr error) { + recordLatency := telemetry.StartLatencyTimer("references") + defer func() { + recordLatency(ctx, rerr) + }() + + ctx, done := event.Start(ctx, "lsp.Server.references", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + switch snapshot.FileKind(fh) { + case file.Tmpl: + return template.References(ctx, snapshot, fh, params) + case file.Go: + return golang.References(ctx, snapshot, fh, params.Position, params.Context.IncludeDeclaration) + } + return nil, nil // empty result +} diff --git a/gopls/internal/server/rename.go b/gopls/internal/server/rename.go new file mode 100644 index 00000000000..946cf5092ec --- /dev/null +++ b/gopls/internal/server/rename.go @@ -0,0 +1,98 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + "path/filepath" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { + ctx, done := event.Start(ctx, "lsp.Server.rename", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + if kind := snapshot.FileKind(fh); kind != file.Go { + return nil, fmt.Errorf("cannot rename in file of type %s", kind) + } + + // Because we don't handle directory renaming within golang.Rename, golang.Rename returns + // boolean value isPkgRenaming to determine whether an DocumentChanges of type RenameFile should + // be added to the return protocol.WorkspaceEdit value. + edits, isPkgRenaming, err := golang.Rename(ctx, snapshot, fh, params.Position, params.NewName) + if err != nil { + return nil, err + } + + docChanges := []protocol.DocumentChanges{} // must be a slice + for uri, e := range edits { + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + docChanges = append(docChanges, documentChanges(fh, e)...) + } + if isPkgRenaming { + // Update the last component of the file's enclosing directory. + oldBase := filepath.Dir(fh.URI().Path()) + newURI := filepath.Join(filepath.Dir(oldBase), params.NewName) + docChanges = append(docChanges, protocol.DocumentChanges{ + RenameFile: &protocol.RenameFile{ + Kind: "rename", + OldURI: protocol.URIFromPath(oldBase), + NewURI: protocol.URIFromPath(newURI), + }, + }) + } + return &protocol.WorkspaceEdit{ + DocumentChanges: docChanges, + }, nil +} + +// PrepareRename implements the textDocument/prepareRename handler. It may +// return (nil, nil) if there is no rename at the cursor position, but it is +// not desirable to display an error to the user. +// +// TODO(rfindley): why wouldn't we want to show an error to the user, if the +// user initiated a rename request at the cursor? +func (s *server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRenamePlaceholder, error) { + ctx, done := event.Start(ctx, "lsp.Server.prepareRename", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + if kind := snapshot.FileKind(fh); kind != file.Go { + return nil, fmt.Errorf("cannot rename in file of type %s", kind) + } + + // Do not return errors here, as it adds clutter. + // Returning a nil result means there is not a valid rename. + item, usererr, err := golang.PrepareRename(ctx, snapshot, fh, params.Position) + if err != nil { + // Return usererr here rather than err, to avoid cluttering the UI with + // internal error details. + return nil, usererr + } + return &protocol.PrepareRenamePlaceholder{ + Range: item.Range, + Placeholder: item.Text, + }, nil +} diff --git a/gopls/internal/lsp/selection_range.go b/gopls/internal/server/selection_range.go similarity index 77% rename from gopls/internal/lsp/selection_range.go rename to gopls/internal/server/selection_range.go index 1cfc0f2d8c1..042812217f3 100644 --- a/gopls/internal/lsp/selection_range.go +++ b/gopls/internal/server/selection_range.go @@ -2,14 +2,16 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package lsp +package server import ( "context" + "fmt" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/event" ) @@ -24,17 +26,21 @@ import ( // for multiple cursors, and the entire path up to the whole document is // returned for each cursor to avoid multiple round-trips when the user is // likely to issue this command multiple times in quick succession. -func (s *Server) selectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { +func (s *server) SelectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { ctx, done := event.Start(ctx, "lsp.Server.selectionRange") defer done() - snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind) - defer release() - if !ok { + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { return nil, err } + defer release() + + if kind := snapshot.FileKind(fh); kind != file.Go { + return nil, fmt.Errorf("SelectionRange not supported for file of type %s", kind) + } - pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull) + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) if err != nil { return nil, err } diff --git a/gopls/internal/server/semantic.go b/gopls/internal/server/semantic.go new file mode 100644 index 00000000000..646f9b3d729 --- /dev/null +++ b/gopls/internal/server/semantic.go @@ -0,0 +1,53 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) SemanticTokensFull(ctx context.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { + return s.semanticTokens(ctx, params.TextDocument, nil) +} + +func (s *server) SemanticTokensRange(ctx context.Context, params *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { + return s.semanticTokens(ctx, params.TextDocument, ¶ms.Range) +} + +func (s *server) semanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { + ctx, done := event.Start(ctx, "lsp.Server.semanticTokens", tag.URI.Of(td.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, td.URI) + if err != nil { + return nil, err + } + defer release() + if !snapshot.Options().SemanticTokens { + // return an error, so if the option changes + // the client won't remember the wrong answer + return nil, fmt.Errorf("semantictokens are disabled") + } + + switch snapshot.FileKind(fh) { + case file.Tmpl: + return template.SemanticTokens(ctx, snapshot, fh.URI()) + + case file.Go: + return golang.SemanticTokens(ctx, snapshot, fh, rng) + + default: + // TODO(adonovan): should return an error! + return nil, nil // empty result + } +} diff --git a/gopls/internal/server/server.go b/gopls/internal/server/server.go new file mode 100644 index 00000000000..ae670e7d143 --- /dev/null +++ b/gopls/internal/server/server.go @@ -0,0 +1,398 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package server defines gopls' implementation of the LSP server +// interface, [protocol.Server]. Call [New] to create an instance. +package server + +import ( + "context" + "crypto/rand" + "embed" + "encoding/base64" + "fmt" + "log" + "net" + "net/http" + "net/url" + "os" + paths "path" + "strconv" + "strings" + "sync" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/progress" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/event" +) + +// New creates an LSP server and binds it to handle incoming client +// messages on the supplied stream. +func New(session *cache.Session, client protocol.ClientCloser, options *settings.Options) protocol.Server { + const concurrentAnalyses = 1 + // If this assignment fails to compile after a protocol + // upgrade, it means that one or more new methods need new + // stub declarations in unimplemented.go. + return &server{ + diagnostics: make(map[protocol.DocumentURI]*fileDiagnostics), + watchedGlobPatterns: nil, // empty + changedFiles: make(map[protocol.DocumentURI]unit), + session: session, + client: client, + diagnosticsSema: make(chan unit, concurrentAnalyses), + progress: progress.NewTracker(client), + options: options, + viewsToDiagnose: make(map[*cache.View]uint64), + } +} + +type serverState int + +const ( + serverCreated = serverState(iota) + serverInitializing // set once the server has received "initialize" request + serverInitialized // set once the server has received "initialized" request + serverShutDown +) + +func (s serverState) String() string { + switch s { + case serverCreated: + return "created" + case serverInitializing: + return "initializing" + case serverInitialized: + return "initialized" + case serverShutDown: + return "shutDown" + } + return fmt.Sprintf("(unknown state: %d)", int(s)) +} + +// server implements the protocol.server interface. +type server struct { + client protocol.ClientCloser + + stateMu sync.Mutex + state serverState + // notifications generated before serverInitialized + notifications []*protocol.ShowMessageParams + + session *cache.Session + + tempDir string + + // changedFiles tracks files for which there has been a textDocument/didChange. + changedFilesMu sync.Mutex + changedFiles map[protocol.DocumentURI]unit + + // folders is only valid between initialize and initialized, and holds the + // set of folders to build views for when we are ready. + // Each has a valid, non-empty 'file'-scheme URI. + pendingFolders []protocol.WorkspaceFolder + + // watchedGlobPatterns is the set of glob patterns that we have requested + // the client watch on disk. It will be updated as the set of directories + // that the server should watch changes. + // The map field may be reassigned but the map is immutable. + watchedGlobPatternsMu sync.Mutex + watchedGlobPatterns map[protocol.RelativePattern]unit + watchRegistrationCount int + + diagnosticsMu sync.Mutex + diagnostics map[protocol.DocumentURI]*fileDiagnostics + + // diagnosticsSema limits the concurrency of diagnostics runs, which can be + // expensive. + diagnosticsSema chan unit + + progress *progress.Tracker + + // When the workspace fails to load, we show its status through a progress + // report with an error message. + criticalErrorStatusMu sync.Mutex + criticalErrorStatus *progress.WorkDone + + // Track an ongoing CPU profile created with the StartProfile command and + // terminated with the StopProfile command. + ongoingProfileMu sync.Mutex + ongoingProfile *os.File // if non-nil, an ongoing profile is writing to this file + + // Track most recently requested options. + optionsMu sync.Mutex + options *settings.Options + + // Track the most recent completion results, for measuring completion efficacy + efficacyMu sync.Mutex + efficacyURI protocol.DocumentURI + efficacyVersion int32 + efficacyItems []protocol.CompletionItem + efficacyPos protocol.Position + + // Web server (for package documentation, etc) associated with this + // LSP server. Opened on demand, and closed during LSP Shutdown. + webOnce sync.Once + web *web + webErr error + + // # Modification tracking and diagnostics + // + // For the purpose of tracking diagnostics, we need a monotonically + // increasing clock. Each time a change occurs on the server, this clock is + // incremented and the previous diagnostics pass is cancelled. When the + // changed is processed, the Session (via DidModifyFiles) determines which + // Views are affected by the change and these views are added to the + // viewsToDiagnose set. Then the server calls diagnoseChangedViews + // in a separate goroutine. Any Views that successfully complete their + // diagnostics are removed from the viewsToDiagnose set, provided they haven't + // been subsequently marked for re-diagnosis (as determined by the latest + // modificationID referenced by viewsToDiagnose). + // + // In this way, we enforce eventual completeness of the diagnostic set: any + // views requiring diagnosis are diagnosed, though possibly at a later point + // in time. Notably, the logic in Session.DidModifyFiles to determines if a + // view needs diagnosis considers whether any packages in the view were + // invalidated. Consider the following sequence of snapshots for a given view + // V: + // + // C1 C2 + // S1 -> S2 -> S3 + // + // In this case, suppose that S1 was fully type checked, and then two changes + // C1 and C2 occur in rapid succession, to a file in their package graph but + // perhaps not enclosed by V's root. In this case, the logic of + // DidModifyFiles will detect that V needs to be reloaded following C1. In + // order for our eventual consistency to be sound, we need to avoid the race + // where S2 is being diagnosed, C2 arrives, and S3 is not detected as needing + // diagnosis because the relevant package has not yet been computed in S2. To + // achieve this, we only remove V from viewsToDiagnose if the diagnosis of S2 + // completes before C2 is processed, which we can confirm by checking + // S2.BackgroundContext(). + modificationMu sync.Mutex + cancelPrevDiagnostics func() + viewsToDiagnose map[*cache.View]uint64 // View -> modification at which it last required diagnosis + lastModificationID uint64 // incrementing clock +} + +func (s *server) WorkDoneProgressCancel(ctx context.Context, params *protocol.WorkDoneProgressCancelParams) error { + ctx, done := event.Start(ctx, "lsp.Server.workDoneProgressCancel") + defer done() + + return s.progress.Cancel(params.Token) +} + +// web encapsulates the web server associated with an LSP server. +// It is used for package documentation and other queries +// where HTML makes more sense than a client editor UI. +// +// Example URL: +// +// http://127.0.0.1:PORT/gopls/SECRET/... +// +// where +// - PORT is the random port number; +// - "gopls" helps the reader guess which program is the server; +// - SECRET is the 64-bit token; and +// - ... is the material part of the endpoint. +// +// Valid endpoints: +// +// open?file=%s&line=%d&col=%d - open a file +// pkg/PKGPATH?view=%s - show doc for package in a given view +type web struct { + server *http.Server + addr url.URL // "http://127.0.0.1:PORT/gopls/SECRET" + mux *http.ServeMux +} + +// getWeb returns the web server associated with this +// LSP server, creating it on first request. +func (s *server) getWeb() (*web, error) { + s.webOnce.Do(func() { + s.web, s.webErr = s.initWeb() + }) + return s.web, s.webErr +} + +// initWeb starts the local web server through which gopls +// serves package documentation and suchlike. +// +// Clients should use [getWeb]. +func (s *server) initWeb() (*web, error) { + // Use 64 random bits as the base of the URL namespace. + // This ensures that URLs are unguessable to any local + // processes that connect to the server, preventing + // exfiltration of source code. + // + // (Note: depending on the LSP client, URLs that are passed to + // it via showDocument and that result in the opening of a + // browser tab may be transiently published through the argv + // array of the open(1) or xdg-open(1) command.) + token := make([]byte, 8) + if _, err := rand.Read(token); err != nil { + return nil, fmt.Errorf("generating secret token: %v", err) + } + + // Pick any free port. + listener, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + return nil, err + } + + // -- There should be no early returns after this point. -- + + // The root mux is not authenticated. + rootMux := http.NewServeMux() + rootMux.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) { + http.Error(w, "request URI lacks authentication segment", http.StatusUnauthorized) + }) + rootMux.HandleFunc("/favicon.ico", func(w http.ResponseWriter, req *http.Request) { + http.Redirect(w, req, "/assets/favicon.ico", http.StatusMovedPermanently) + }) + rootMux.HandleFunc("/hang", func(w http.ResponseWriter, req *http.Request) { + // This endpoint hangs until cancelled. + // It is used by JS to detect server disconnect. + <-req.Context().Done() + }) + rootMux.Handle("/assets/", http.FileServer(http.FS(assets))) + + secret := "/gopls/" + base64.RawURLEncoding.EncodeToString(token) + webMux := http.NewServeMux() + rootMux.Handle(secret+"/", http.StripPrefix(secret, webMux)) + + webServer := &http.Server{Addr: listener.Addr().String(), Handler: rootMux} + go func() { + // This should run until LSP Shutdown, at which point + // it will return ErrServerClosed. Any other error + // means it failed to start. + if err := webServer.Serve(listener); err != nil { + if err != http.ErrServerClosed { + log.Print(err) + } + } + }() + + web := &web{ + server: webServer, + addr: url.URL{Scheme: "http", Host: webServer.Addr, Path: secret}, + mux: webMux, + } + + // The /open handler allows the browser to request that the + // LSP client editor open a file; see web.urlToOpen. + webMux.HandleFunc("/open", func(w http.ResponseWriter, req *http.Request) { + if err := req.ParseForm(); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + uri := protocol.URIFromPath(req.Form.Get("file")) + line, _ := strconv.Atoi(req.Form.Get("line")) // 1-based + col, _ := strconv.Atoi(req.Form.Get("col")) // 1-based UTF-8 + posn := protocol.Position{ + Line: uint32(line - 1), + Character: uint32(col - 1), // TODO(adonovan): map to UTF-16 + } + openClientEditor(req.Context(), s.client, protocol.Location{ + URI: uri, + Range: protocol.Range{Start: posn, End: posn}, + }) + }) + + // The /pkg/PATH&view=... handler shows package documentation for PATH. + webMux.Handle("/pkg/", http.StripPrefix("/pkg/", http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + ctx := req.Context() + if err := req.ParseForm(); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + // Get snapshot of specified view. + view, err := s.session.View(req.Form.Get("view")) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + snapshot, release, err := view.Snapshot() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer release() + + // Find package by path. + var found *metadata.Package + for _, mp := range snapshot.MetadataGraph().Packages { + if string(mp.PkgPath) == req.URL.Path && mp.ForTest == "" { + found = mp + break + } + } + if found == nil { + // TODO(adonovan): what should we do for external test packages? + http.Error(w, "package not found", http.StatusNotFound) + return + } + + // Type-check the package and render its documentation. + pkgs, err := snapshot.TypeCheck(ctx, found.ID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + pkgURL := func(path golang.PackagePath, fragment string) protocol.URI { + return web.pkgURL(view, path, fragment) + } + content, err := golang.RenderPackageDoc(pkgs[0], web.openURL, pkgURL) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.Write(content) + }))) + + return web, nil +} + +// assets holds our static web server content. +// +//go:embed assets/* +var assets embed.FS + +// openURL returns an /open URL that, when visited, causes the client +// editor to open the specified file/line/column (in 1-based UTF-8 +// coordinates). +// +// (Rendering may generate hundreds of positions across files of many +// packages, so don't convert to LSP coordinates yet: wait until the +// URL is opened.) +func (w *web) openURL(filename string, line, col8 int) protocol.URI { + return w.url( + "open", + fmt.Sprintf("file=%s&line=%d&col=%d", url.QueryEscape(filename), line, col8), + "") +} + +// pkgURL returns a /pkg URL for the documentation of the specified package. +// The optional fragment must be of the form "Println" or "Buffer.WriteString". +func (w *web) pkgURL(v *cache.View, path golang.PackagePath, fragment string) protocol.URI { + return w.url( + "pkg/"+string(path), + "view="+url.QueryEscape(v.ID()), + fragment) +} + +// url returns a URL by joining a relative path, an (encoded) query, +// and an (unencoded) fragment onto the authenticated base URL of the +// web server. +func (w *web) url(https://github.com/path,%20query,%20fragment%20string) protocol.URI { + url2 := w.addr + url2.Path = paths.Join(url2.Path, strings.TrimPrefix(path, "/")) + url2.RawQuery = query + url2.Fragment = fragment + return protocol.URI(url2.String()) +} diff --git a/gopls/internal/server/signature_help.go b/gopls/internal/server/signature_help.go new file mode 100644 index 00000000000..a10aa56d848 --- /dev/null +++ b/gopls/internal/server/signature_help.go @@ -0,0 +1,40 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) SignatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { + ctx, done := event.Start(ctx, "lsp.Server.signatureHelp", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + if snapshot.FileKind(fh) != file.Go { + return nil, nil // empty result + } + + info, activeParameter, err := golang.SignatureHelp(ctx, snapshot, fh, params.Position) + if err != nil { + event.Error(ctx, "no signature help", err, tag.Position.Of(params.Position)) + return nil, nil // sic? There could be many reasons for failure. + } + return &protocol.SignatureHelp{ + Signatures: []protocol.SignatureInformation{*info}, + ActiveParameter: uint32(activeParameter), + }, nil +} diff --git a/gopls/internal/server/symbols.go b/gopls/internal/server/symbols.go new file mode 100644 index 00000000000..3442318b352 --- /dev/null +++ b/gopls/internal/server/symbols.go @@ -0,0 +1,62 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/template" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" +) + +func (s *server) DocumentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]any, error) { + ctx, done := event.Start(ctx, "lsp.Server.documentSymbol", tag.URI.Of(params.TextDocument.URI)) + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + var docSymbols []protocol.DocumentSymbol + switch snapshot.FileKind(fh) { + case file.Tmpl: + docSymbols, err = template.DocumentSymbols(snapshot, fh) + case file.Go: + docSymbols, err = golang.DocumentSymbols(ctx, snapshot, fh) + default: + return nil, nil // empty result + } + if err != nil { + event.Error(ctx, "DocumentSymbols failed", err) + return nil, nil // empty result + } + // Convert the symbols to an interface array. + // TODO: Remove this once the lsp deprecates SymbolInformation. + symbols := make([]any, len(docSymbols)) + for i, s := range docSymbols { + if snapshot.Options().HierarchicalDocumentSymbolSupport { + symbols[i] = s + continue + } + // If the client does not support hierarchical document symbols, then + // we need to be backwards compatible for now and return SymbolInformation. + symbols[i] = protocol.SymbolInformation{ + Name: s.Name, + Kind: s.Kind, + Deprecated: s.Deprecated, + Location: protocol.Location{ + URI: params.TextDocument.URI, + Range: s.Range, + }, + } + } + return symbols, nil +} diff --git a/gopls/internal/server/text_synchronization.go b/gopls/internal/server/text_synchronization.go new file mode 100644 index 00000000000..242dd7da553 --- /dev/null +++ b/gopls/internal/server/text_synchronization.go @@ -0,0 +1,409 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "bytes" + "context" + "errors" + "fmt" + "path/filepath" + "strings" + "sync" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/tag" + "golang.org/x/tools/internal/jsonrpc2" + "golang.org/x/tools/internal/xcontext" +) + +// ModificationSource identifies the origin of a change. +type ModificationSource int + +const ( + // FromDidOpen is from a didOpen notification. + FromDidOpen = ModificationSource(iota) + + // FromDidChange is from a didChange notification. + FromDidChange + + // FromDidChangeWatchedFiles is from didChangeWatchedFiles notification. + FromDidChangeWatchedFiles + + // FromDidSave is from a didSave notification. + FromDidSave + + // FromDidClose is from a didClose notification. + FromDidClose + + // FromDidChangeConfiguration is from a didChangeConfiguration notification. + FromDidChangeConfiguration + + // FromRegenerateCgo refers to file modifications caused by regenerating + // the cgo sources for the workspace. + FromRegenerateCgo + + // FromInitialWorkspaceLoad refers to the loading of all packages in the + // workspace when the view is first created. + FromInitialWorkspaceLoad + + // FromCheckUpgrades refers to state changes resulting from the CheckUpgrades + // command, which queries module upgrades. + FromCheckUpgrades + + // FromResetGoModDiagnostics refers to state changes resulting from the + // ResetGoModDiagnostics command. + FromResetGoModDiagnostics + + // FromToggleGCDetails refers to state changes resulting from toggling + // gc_details on or off for a package. + FromToggleGCDetails +) + +func (m ModificationSource) String() string { + switch m { + case FromDidOpen: + return "opened files" + case FromDidChange: + return "changed files" + case FromDidChangeWatchedFiles: + return "files changed on disk" + case FromDidSave: + return "saved files" + case FromDidClose: + return "close files" + case FromRegenerateCgo: + return "regenerate cgo" + case FromInitialWorkspaceLoad: + return "initial workspace load" + case FromCheckUpgrades: + return "from check upgrades" + case FromResetGoModDiagnostics: + return "from resetting go.mod diagnostics" + default: + return "unknown file modification" + } +} + +func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { + ctx, done := event.Start(ctx, "lsp.Server.didOpen", tag.URI.Of(params.TextDocument.URI)) + defer done() + + uri := params.TextDocument.URI + // There may not be any matching view in the current session. If that's + // the case, try creating a new view based on the opened file path. + // + // TODO(golang/go#57979): revisit creating a folder here. We should separate + // the logic for managing folders from the logic for managing views. But it + // does make sense to ensure at least one workspace folder the first time a + // file is opened, and we can't do that inside didModifyFiles because we + // don't want to request configuration while holding a lock. + if len(s.session.Views()) == 0 { + dir := filepath.Dir(uri.Path()) + s.addFolders(ctx, []protocol.WorkspaceFolder{{ + URI: string(protocol.URIFromPath(dir)), + Name: filepath.Base(dir), + }}) + } + return s.didModifyFiles(ctx, []file.Modification{{ + URI: uri, + Action: file.Open, + Version: params.TextDocument.Version, + Text: []byte(params.TextDocument.Text), + LanguageID: params.TextDocument.LanguageID, + }}, FromDidOpen) +} + +func (s *server) DidChange(ctx context.Context, params *protocol.DidChangeTextDocumentParams) error { + ctx, done := event.Start(ctx, "lsp.Server.didChange", tag.URI.Of(params.TextDocument.URI)) + defer done() + + uri := params.TextDocument.URI + text, err := s.changedText(ctx, uri, params.ContentChanges) + if err != nil { + return err + } + c := file.Modification{ + URI: uri, + Action: file.Change, + Version: params.TextDocument.Version, + Text: text, + } + if err := s.didModifyFiles(ctx, []file.Modification{c}, FromDidChange); err != nil { + return err + } + return s.warnAboutModifyingGeneratedFiles(ctx, uri) +} + +// warnAboutModifyingGeneratedFiles shows a warning if a user tries to edit a +// generated file for the first time. +func (s *server) warnAboutModifyingGeneratedFiles(ctx context.Context, uri protocol.DocumentURI) error { + s.changedFilesMu.Lock() + _, ok := s.changedFiles[uri] + if !ok { + s.changedFiles[uri] = struct{}{} + } + s.changedFilesMu.Unlock() + + // This file has already been edited before. + if ok { + return nil + } + + // Ideally, we should be able to specify that a generated file should + // be opened as read-only. Tell the user that they should not be + // editing a generated file. + snapshot, release, err := s.session.SnapshotOf(ctx, uri) + if err != nil { + return err + } + isGenerated := golang.IsGenerated(ctx, snapshot, uri) + release() + + if isGenerated { + msg := fmt.Sprintf("Do not edit this file! %s is a generated file.", uri.Path()) + showMessage(ctx, s.client, protocol.Warning, msg) + } + return nil +} + +func (s *server) DidChangeWatchedFiles(ctx context.Context, params *protocol.DidChangeWatchedFilesParams) error { + ctx, done := event.Start(ctx, "lsp.Server.didChangeWatchedFiles") + defer done() + + var modifications []file.Modification + for _, change := range params.Changes { + action := changeTypeToFileAction(change.Type) + modifications = append(modifications, file.Modification{ + URI: change.URI, + Action: action, + OnDisk: true, + }) + } + return s.didModifyFiles(ctx, modifications, FromDidChangeWatchedFiles) +} + +func (s *server) DidSave(ctx context.Context, params *protocol.DidSaveTextDocumentParams) error { + ctx, done := event.Start(ctx, "lsp.Server.didSave", tag.URI.Of(params.TextDocument.URI)) + defer done() + + c := file.Modification{ + URI: params.TextDocument.URI, + Action: file.Save, + } + if params.Text != nil { + c.Text = []byte(*params.Text) + } + return s.didModifyFiles(ctx, []file.Modification{c}, FromDidSave) +} + +func (s *server) DidClose(ctx context.Context, params *protocol.DidCloseTextDocumentParams) error { + ctx, done := event.Start(ctx, "lsp.Server.didClose", tag.URI.Of(params.TextDocument.URI)) + defer done() + + return s.didModifyFiles(ctx, []file.Modification{ + { + URI: params.TextDocument.URI, + Action: file.Close, + Version: -1, + Text: nil, + }, + }, FromDidClose) +} + +func (s *server) didModifyFiles(ctx context.Context, modifications []file.Modification, cause ModificationSource) error { + // wg guards two conditions: + // 1. didModifyFiles is complete + // 2. the goroutine diagnosing changes on behalf of didModifyFiles is + // complete, if it was started + // + // Both conditions must be satisfied for the purpose of testing: we don't + // want to observe the completion of change processing until we have received + // all diagnostics as well as all server->client notifications done on behalf + // of this function. + var wg sync.WaitGroup + wg.Add(1) + defer wg.Done() + + if s.Options().VerboseWorkDoneProgress { + work := s.progress.Start(ctx, DiagnosticWorkTitle(cause), "Calculating file diagnostics...", nil, nil) + go func() { + wg.Wait() + work.End(ctx, "Done.") + }() + } + + s.stateMu.Lock() + if s.state >= serverShutDown { + // This state check does not prevent races below, and exists only to + // produce a better error message. The actual race to the cache should be + // guarded by Session.viewMu. + s.stateMu.Unlock() + return errors.New("server is shut down") + } + s.stateMu.Unlock() + + // If the set of changes included directories, expand those directories + // to their files. + modifications = s.session.ExpandModificationsToDirectories(ctx, modifications) + + viewsToDiagnose, err := s.session.DidModifyFiles(ctx, modifications) + if err != nil { + return err + } + + // golang/go#50267: diagnostics should be re-sent after each change. + for _, mod := range modifications { + s.mustPublishDiagnostics(mod.URI) + } + + modCtx, modID := s.needsDiagnosis(ctx, viewsToDiagnose) + + wg.Add(1) + go func() { + s.diagnoseChangedViews(modCtx, modID, viewsToDiagnose, cause) + wg.Done() + }() + + // After any file modifications, we need to update our watched files, + // in case something changed. Compute the new set of directories to watch, + // and if it differs from the current set, send updated registrations. + return s.updateWatchedDirectories(ctx) +} + +// needsDiagnosis records the given views as needing diagnosis, returning the +// context and modification id to use for said diagnosis. +// +// Only the keys of viewsToDiagnose are used; the changed files are irrelevant. +func (s *server) needsDiagnosis(ctx context.Context, viewsToDiagnose map[*cache.View][]protocol.DocumentURI) (context.Context, uint64) { + s.modificationMu.Lock() + defer s.modificationMu.Unlock() + if s.cancelPrevDiagnostics != nil { + s.cancelPrevDiagnostics() + } + modCtx := xcontext.Detach(ctx) + modCtx, s.cancelPrevDiagnostics = context.WithCancel(modCtx) + s.lastModificationID++ + modID := s.lastModificationID + + for v := range viewsToDiagnose { + if needs, ok := s.viewsToDiagnose[v]; !ok || needs < modID { + s.viewsToDiagnose[v] = modID + } + } + return modCtx, modID +} + +// DiagnosticWorkTitle returns the title of the diagnostic work resulting from a +// file change originating from the given cause. +func DiagnosticWorkTitle(cause ModificationSource) string { + return fmt.Sprintf("diagnosing %v", cause) +} + +func (s *server) changedText(ctx context.Context, uri protocol.DocumentURI, changes []protocol.TextDocumentContentChangeEvent) ([]byte, error) { + if len(changes) == 0 { + return nil, fmt.Errorf("%w: no content changes provided", jsonrpc2.ErrInternal) + } + + // Check if the client sent the full content of the file. + // We accept a full content change even if the server expected incremental changes. + if len(changes) == 1 && changes[0].Range == nil && changes[0].RangeLength == 0 { + changeFull.Inc() + return []byte(changes[0].Text), nil + } + return s.applyIncrementalChanges(ctx, uri, changes) +} + +func (s *server) applyIncrementalChanges(ctx context.Context, uri protocol.DocumentURI, changes []protocol.TextDocumentContentChangeEvent) ([]byte, error) { + fh, err := s.session.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + content, err := fh.Content() + if err != nil { + return nil, fmt.Errorf("%w: file not found (%v)", jsonrpc2.ErrInternal, err) + } + for i, change := range changes { + // TODO(adonovan): refactor to use diff.Apply, which is robust w.r.t. + // out-of-order or overlapping changes---and much more efficient. + + // Make sure to update mapper along with the content. + m := protocol.NewMapper(uri, content) + if change.Range == nil { + return nil, fmt.Errorf("%w: unexpected nil range for change", jsonrpc2.ErrInternal) + } + start, end, err := m.RangeOffsets(*change.Range) + if err != nil { + return nil, err + } + if end < start { + return nil, fmt.Errorf("%w: invalid range for content change", jsonrpc2.ErrInternal) + } + var buf bytes.Buffer + buf.Write(content[:start]) + buf.WriteString(change.Text) + buf.Write(content[end:]) + content = buf.Bytes() + if i == 0 { // only look at the first change if there are seversl + // TODO(pjw): understand multi-change) + s.checkEfficacy(fh.URI(), fh.Version(), change) + } + } + return content, nil +} + +// increment counters if any of the completions look like there were used +func (s *server) checkEfficacy(uri protocol.DocumentURI, version int32, change protocol.TextDocumentContentChangePartial) { + s.efficacyMu.Lock() + defer s.efficacyMu.Unlock() + if s.efficacyURI != uri { + return + } + // gopls increments the version, the test client does not + if version != s.efficacyVersion && version != s.efficacyVersion+1 { + return + } + // does any change at pos match a proposed completion item? + for _, item := range s.efficacyItems { + if item.TextEdit == nil { + continue + } + if item.TextEdit.Range.Start == change.Range.Start { + // the change and the proposed completion start at the same + if change.RangeLength == 0 && len(change.Text) == 1 { + // a single character added it does not count as a completion + continue + } + ix := strings.Index(item.TextEdit.NewText, "$") + if ix < 0 && strings.HasPrefix(change.Text, item.TextEdit.NewText) { + // not a snippet, suggested completion is a prefix of the change + complUsed.Inc() + return + } + if ix > 1 && strings.HasPrefix(change.Text, item.TextEdit.NewText[:ix]) { + // a snippet, suggested completion up to $ marker is a prefix of the change + complUsed.Inc() + return + } + } + } + complUnused.Inc() +} + +func changeTypeToFileAction(ct protocol.FileChangeType) file.Action { + switch ct { + case protocol.Changed: + return file.Change + case protocol.Created: + return file.Create + case protocol.Deleted: + return file.Delete + } + return file.UnknownAction +} diff --git a/gopls/internal/server/unimplemented.go b/gopls/internal/server/unimplemented.go new file mode 100644 index 00000000000..c293ee167a7 --- /dev/null +++ b/gopls/internal/server/unimplemented.go @@ -0,0 +1,159 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +// This file defines the LSP server methods that gopls does not currently implement. + +import ( + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/jsonrpc2" +) + +func (s *server) ColorPresentation(context.Context, *protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) { + return nil, notImplemented("ColorPresentation") +} + +func (s *server) Declaration(context.Context, *protocol.DeclarationParams) (*protocol.Or_textDocument_declaration, error) { + return nil, notImplemented("Declaration") +} + +func (s *server) Diagnostic(context.Context, *string) (*string, error) { + return nil, notImplemented("Diagnostic") +} + +func (s *server) DiagnosticWorkspace(context.Context, *protocol.WorkspaceDiagnosticParams) (*protocol.WorkspaceDiagnosticReport, error) { + return nil, notImplemented("DiagnosticWorkspace") +} + +func (s *server) DidChangeNotebookDocument(context.Context, *protocol.DidChangeNotebookDocumentParams) error { + return notImplemented("DidChangeNotebookDocument") +} + +func (s *server) DidCloseNotebookDocument(context.Context, *protocol.DidCloseNotebookDocumentParams) error { + return notImplemented("DidCloseNotebookDocument") +} + +func (s *server) DidCreateFiles(context.Context, *protocol.CreateFilesParams) error { + return notImplemented("DidCreateFiles") +} + +func (s *server) DidDeleteFiles(context.Context, *protocol.DeleteFilesParams) error { + return notImplemented("DidDeleteFiles") +} + +func (s *server) DidOpenNotebookDocument(context.Context, *protocol.DidOpenNotebookDocumentParams) error { + return notImplemented("DidOpenNotebookDocument") +} + +func (s *server) DidRenameFiles(context.Context, *protocol.RenameFilesParams) error { + return notImplemented("DidRenameFiles") +} + +func (s *server) DidSaveNotebookDocument(context.Context, *protocol.DidSaveNotebookDocumentParams) error { + return notImplemented("DidSaveNotebookDocument") +} + +func (s *server) DocumentColor(context.Context, *protocol.DocumentColorParams) ([]protocol.ColorInformation, error) { + return nil, notImplemented("DocumentColor") +} + +func (s *server) InlineCompletion(context.Context, *protocol.InlineCompletionParams) (*protocol.Or_Result_textDocument_inlineCompletion, error) { + return nil, notImplemented("InlineCompletion") +} + +func (s *server) InlineValue(context.Context, *protocol.InlineValueParams) ([]protocol.InlineValue, error) { + return nil, notImplemented("InlineValue") +} + +func (s *server) LinkedEditingRange(context.Context, *protocol.LinkedEditingRangeParams) (*protocol.LinkedEditingRanges, error) { + return nil, notImplemented("LinkedEditingRange") +} + +func (s *server) Moniker(context.Context, *protocol.MonikerParams) ([]protocol.Moniker, error) { + return nil, notImplemented("Moniker") +} + +func (s *server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) { + return nil, notImplemented("OnTypeFormatting") +} + +func (s *server) PrepareTypeHierarchy(context.Context, *protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) { + return nil, notImplemented("PrepareTypeHierarchy") +} + +func (s *server) Progress(context.Context, *protocol.ProgressParams) error { + return notImplemented("Progress") +} + +func (s *server) RangeFormatting(context.Context, *protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) { + return nil, notImplemented("RangeFormatting") +} + +func (s *server) RangesFormatting(context.Context, *protocol.DocumentRangesFormattingParams) ([]protocol.TextEdit, error) { + return nil, notImplemented("RangesFormatting") +} + +func (s *server) Resolve(context.Context, *protocol.InlayHint) (*protocol.InlayHint, error) { + return nil, notImplemented("Resolve") +} + +func (s *server) ResolveCodeLens(context.Context, *protocol.CodeLens) (*protocol.CodeLens, error) { + return nil, notImplemented("ResolveCodeLens") +} + +func (s *server) ResolveCompletionItem(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) { + return nil, notImplemented("ResolveCompletionItem") +} + +func (s *server) ResolveDocumentLink(context.Context, *protocol.DocumentLink) (*protocol.DocumentLink, error) { + return nil, notImplemented("ResolveDocumentLink") +} + +func (s *server) ResolveWorkspaceSymbol(context.Context, *protocol.WorkspaceSymbol) (*protocol.WorkspaceSymbol, error) { + return nil, notImplemented("ResolveWorkspaceSymbol") +} + +func (s *server) SemanticTokensFullDelta(context.Context, *protocol.SemanticTokensDeltaParams) (interface{}, error) { + return nil, notImplemented("SemanticTokensFullDelta") +} + +func (s *server) SetTrace(context.Context, *protocol.SetTraceParams) error { + return notImplemented("SetTrace") +} + +func (s *server) Subtypes(context.Context, *protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) { + return nil, notImplemented("Subtypes") +} + +func (s *server) Supertypes(context.Context, *protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) { + return nil, notImplemented("Supertypes") +} + +func (s *server) WillCreateFiles(context.Context, *protocol.CreateFilesParams) (*protocol.WorkspaceEdit, error) { + return nil, notImplemented("WillCreateFiles") +} + +func (s *server) WillDeleteFiles(context.Context, *protocol.DeleteFilesParams) (*protocol.WorkspaceEdit, error) { + return nil, notImplemented("WillDeleteFiles") +} + +func (s *server) WillRenameFiles(context.Context, *protocol.RenameFilesParams) (*protocol.WorkspaceEdit, error) { + return nil, notImplemented("WillRenameFiles") +} + +func (s *server) WillSave(context.Context, *protocol.WillSaveTextDocumentParams) error { + return notImplemented("WillSave") +} + +func (s *server) WillSaveWaitUntil(context.Context, *protocol.WillSaveTextDocumentParams) ([]protocol.TextEdit, error) { + return nil, notImplemented("WillSaveWaitUntil") +} + +func notImplemented(method string) error { + return fmt.Errorf("%w: %q not yet implemented", jsonrpc2.ErrMethodNotFound, method) +} diff --git a/gopls/internal/server/workspace.go b/gopls/internal/server/workspace.go new file mode 100644 index 00000000000..1a3c0864d33 --- /dev/null +++ b/gopls/internal/server/workspace.go @@ -0,0 +1,104 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + "sync" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +func (s *server) DidChangeWorkspaceFolders(ctx context.Context, params *protocol.DidChangeWorkspaceFoldersParams) error { + for _, folder := range params.Event.Removed { + dir, err := protocol.ParseDocumentURI(folder.URI) + if err != nil { + return fmt.Errorf("invalid folder %q: %v", folder.URI, err) + } + if !s.session.RemoveView(dir) { + return fmt.Errorf("view %q for %v not found", folder.Name, folder.URI) + } + } + s.addFolders(ctx, params.Event.Added) + return nil +} + +// addView returns a Snapshot and a release function that must be +// called when it is no longer needed. +func (s *server) addView(ctx context.Context, name string, dir protocol.DocumentURI) (*cache.Snapshot, func(), error) { + s.stateMu.Lock() + state := s.state + s.stateMu.Unlock() + if state < serverInitialized { + return nil, nil, fmt.Errorf("addView called before server initialized") + } + folder, err := s.newFolder(ctx, dir, name) + if err != nil { + return nil, nil, err + } + _, snapshot, release, err := s.session.NewView(ctx, folder) + return snapshot, release, err +} + +func (s *server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChangeConfigurationParams) error { + ctx, done := event.Start(ctx, "lsp.Server.didChangeConfiguration") + defer done() + + var wg sync.WaitGroup + wg.Add(1) + defer wg.Done() + if s.Options().VerboseWorkDoneProgress { + work := s.progress.Start(ctx, DiagnosticWorkTitle(FromDidChangeConfiguration), "Calculating diagnostics...", nil, nil) + go func() { + wg.Wait() + work.End(ctx, "Done.") + }() + } + + // Apply any changes to the session-level settings. + options, err := s.fetchFolderOptions(ctx, "") + if err != nil { + return err + } + s.SetOptions(options) + + // Collect options for all workspace folders. + seen := make(map[protocol.DocumentURI]bool) + var newFolders []*cache.Folder + for _, view := range s.session.Views() { + folder := view.Folder() + if seen[folder.Dir] { + continue + } + seen[folder.Dir] = true + newFolder, err := s.newFolder(ctx, folder.Dir, folder.Name) + if err != nil { + return err + } + newFolders = append(newFolders, newFolder) + } + s.session.UpdateFolders(ctx, newFolders) + + // The view set may have been updated above. + viewsToDiagnose := make(map[*cache.View][]protocol.DocumentURI) + for _, view := range s.session.Views() { + viewsToDiagnose[view] = nil + } + + modCtx, modID := s.needsDiagnosis(ctx, viewsToDiagnose) + wg.Add(1) + go func() { + s.diagnoseChangedViews(modCtx, modID, viewsToDiagnose, FromDidChangeConfiguration) + wg.Done() + }() + + // An options change may have affected the detected Go version. + s.checkViewGoVersions() + + return nil +} diff --git a/gopls/internal/server/workspace_symbol.go b/gopls/internal/server/workspace_symbol.go new file mode 100644 index 00000000000..9eafeb015ad --- /dev/null +++ b/gopls/internal/server/workspace_symbol.go @@ -0,0 +1,41 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/telemetry" + "golang.org/x/tools/internal/event" +) + +func (s *server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolParams) (_ []protocol.SymbolInformation, rerr error) { + recordLatency := telemetry.StartLatencyTimer("symbol") + defer func() { + recordLatency(ctx, rerr) + }() + + ctx, done := event.Start(ctx, "lsp.Server.symbol") + defer done() + + views := s.session.Views() + matcher := s.Options().SymbolMatcher + style := s.Options().SymbolStyle + + var snapshots []*cache.Snapshot + for _, v := range views { + snapshot, release, err := v.Snapshot() + if err != nil { + continue // snapshot is shutting down + } + // If err is non-nil, the snapshot is shutting down. Skip it. + defer release() + snapshots = append(snapshots, snapshot) + } + return golang.WorkspaceSymbols(ctx, matcher, style, snapshots, params.Query) +} diff --git a/gopls/internal/settings/analyzer.go b/gopls/internal/settings/analyzer.go new file mode 100644 index 00000000000..d855aa21a0d --- /dev/null +++ b/gopls/internal/settings/analyzer.go @@ -0,0 +1,53 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package settings + +import ( + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/gopls/internal/protocol" +) + +// Analyzer augments a go/analysis analyzer with additional LSP configuration. +type Analyzer struct { + Analyzer *analysis.Analyzer + + // Enabled reports whether the analyzer is enabled. This value can be + // configured per-analysis in user settings. For staticcheck analyzers, + // the value of the Staticcheck setting overrides this field. + // + // Most clients should use the IsEnabled method. + Enabled bool + + // ActionKinds is the set of kinds of code action this analyzer produces. + // If empty, the set is just QuickFix. + ActionKinds []protocol.CodeActionKind + + // Severity is the severity set for diagnostics reported by this + // analyzer. If left unset it defaults to Warning. + // + // Note: diagnostics with severity protocol.SeverityHint do not show up in + // the VS Code "problems" tab. + Severity protocol.DiagnosticSeverity + + // Tag is extra tags (unnecessary, deprecated, etc) for diagnostics + // reported by this analyzer. + Tag []protocol.DiagnosticTag +} + +func (a *Analyzer) String() string { return a.Analyzer.String() } + +// IsEnabled reports whether this analyzer is enabled by the given options. +func (a Analyzer) IsEnabled(options *Options) bool { + // Staticcheck analyzers can only be enabled when staticcheck is on. + if _, ok := options.StaticcheckAnalyzers[a.Analyzer.Name]; ok { + if !options.Staticcheck { + return false + } + } + if enabled, ok := options.Analyses[a.Analyzer.Name]; ok { + return enabled + } + return a.Enabled +} diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/settings/api_json.go similarity index 75% rename from gopls/internal/lsp/source/api_json.go rename to gopls/internal/settings/api_json.go index b6e5fbd3b85..26db2a9290a 100644 --- a/gopls/internal/lsp/source/api_json.go +++ b/gopls/internal/settings/api_json.go @@ -1,6 +1,6 @@ // Code generated by "golang.org/x/tools/gopls/doc/generate"; DO NOT EDIT. -package source +package settings var GeneratedAPIJSON = &APIJSON{ Options: map[string][]*OptionJSON{ @@ -34,24 +34,17 @@ var GeneratedAPIJSON = &APIJSON{ Hierarchy: "build", }, { - Name: "memoryMode", - Type: "enum", - Doc: "memoryMode controls the tradeoff `gopls` makes between memory usage and\ncorrectness.\n\nValues other than `Normal` are untested and may break in surprising ways.\n", - EnumValues: []EnumValue{ - { - Value: "\"DegradeClosed\"", - Doc: "`\"DegradeClosed\"`: In DegradeClosed mode, `gopls` will collect less information about\npackages without open files. As a result, features like Find\nReferences and Rename will miss results in such packages.\n", - }, - {Value: "\"Normal\""}, - }, - Default: "\"Normal\"", + Name: "memoryMode", + Type: "string", + Doc: "obsolete, no effect\n", + Default: "\"\"", Status: "experimental", Hierarchy: "build", }, { Name: "expandWorkspaceToModule", Type: "bool", - Doc: "expandWorkspaceToModule instructs `gopls` to adjust the scope of the\nworkspace to find the best available module root. `gopls` first looks for\na go.mod file in any parent directory of the workspace folder, expanding\nthe scope to that directory if it exists. If no viable parent directory is\nfound, gopls will check if there is exactly one child directory containing\na go.mod file, narrowing the scope to that directory if it exists.\n", + Doc: "expandWorkspaceToModule determines which packages are considered\n\"workspace packages\" when the workspace is using modules.\n\nWorkspace packages affect the scope of workspace-wide operations. Notably,\ngopls diagnoses all packages considered to be part of the workspace after\nevery keystroke, so by setting \"ExpandWorkspaceToModule\" to false, and\nopening a nested workspace directory, you can reduce the amount of work\ngopls has to do to keep your workspace up to date.\n", Default: "true", Status: "experimental", Hierarchy: "build", @@ -221,7 +214,7 @@ var GeneratedAPIJSON = &APIJSON{ { Name: "analyses", Type: "map[string]bool", - Doc: "analyses specify analyses that the user would like to enable or disable.\nA map of the names of analysis passes that should be enabled/disabled.\nA full list of analyzers that gopls uses can be found in\n[analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).\n\nExample Usage:\n\n```json5\n...\n\"analyses\": {\n \"unreachable\": false, // Disable the unreachable analyzer.\n \"unusedparams\": true // Enable the unusedparams analyzer.\n}\n...\n```\n", + Doc: "analyses specify analyses that the user would like to enable or disable.\nA map of the names of analysis passes that should be enabled/disabled.\nA full list of analyzers that gopls uses can be found in\n[analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).\n\nExample Usage:\n\n```json5\n...\n\"analyses\": {\n \"unreachable\": false, // Disable the unreachable analyzer.\n \"unusedvariable\": true // Enable the unusedvariable analyzer.\n}\n...\n```\n", EnumKeys: EnumKeys{ ValueType: "bool", Keys: []EnumKey{ @@ -287,7 +280,7 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "\"deprecated\"", - Doc: "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package imports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", + Doc: "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package\nimports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", Default: "true", }, { @@ -310,6 +303,11 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the most compact order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n\nBe aware that the most compact order is not always the most efficient.\nIn rare cases it may cause two variables each updated by its own goroutine\nto occupy the same CPU cache line, inducing a form of memory contention\nknown as \"false sharing\" that slows down both goroutines.\n", Default: "false", }, + { + Name: "\"fillreturns\"", + Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\n\nwill turn into\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.", + Default: "true", + }, { Name: "\"httpresponse\"", Doc: "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", @@ -320,9 +318,14 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.", Default: "true", }, + { + Name: "\"infertypeargs\"", + Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", + Default: "true", + }, { Name: "\"loopclosure\"", - Doc: "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v.\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nThe next example uses a go statement and has a similar problem.\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop.\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", + Doc: "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nNote: An iteration variable can only outlive a loop iteration in Go versions <=1.21.\nIn Go 1.22 and later, the loop variable lifetimes changed to create a new\niteration variable per loop iteration. (See go.dev/issue/60078.)\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v [<go1.22].\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nAfter Go version 1.22, the previous two for loops are equivalent\nand both are correct.\n\nThe next example uses a go statement and has a similar problem [<go1.22].\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop [<go1.22].\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", Default: "true", }, { @@ -337,7 +340,17 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "\"nilness\"", - Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}", + Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n\nSometimes the control flow may be quite complex, making bugs hard\nto spot. In the example below, the err.Error expression is\nguaranteed to panic because, after the first return, err must be\nnil. The intervening loop is just a distraction.\n\n\t...\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpartialSuccess := false\n\tfor _, err := range errs {\n\t\tif err == nil {\n\t\t\tpartialSuccess = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif partialSuccess {\n\t\treportStatus(StatusMessage{\n\t\t\tCode: code.ERROR,\n\t\t\tDetail: err.Error(), // \"nil dereference in dynamic method call\"\n\t\t})\n\t\treturn nil\n\t}\n\n...", + Default: "true", + }, + { + Name: "\"nonewvars\"", + Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\n\tz := 1\n\tz := 2\n\nwill turn into\n\n\tz := 1\n\tz = 2", + Default: "true", + }, + { + Name: "\"noresultvalues\"", + Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\n\tfunc z() { return nil }\n\nwill turn into\n\n\tfunc z() { return }", Default: "true", }, { @@ -357,17 +370,17 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "\"simplifycompositelit\"", - Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\t[]T{T{}, T{}}\nwill be simplified to:\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\n\t[]T{T{}, T{}}\n\nwill be simplified to:\n\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", Default: "true", }, { Name: "\"simplifyrange\"", - Doc: "check for range statement simplifications\n\nA range of the form:\n\tfor x, _ = range v {...}\nwill be simplified to:\n\tfor x = range v {...}\n\nA range of the form:\n\tfor _ = range v {...}\nwill be simplified to:\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + Doc: "check for range statement simplifications\n\nA range of the form:\n\n\tfor x, _ = range v {...}\n\nwill be simplified to:\n\n\tfor x = range v {...}\n\nA range of the form:\n\n\tfor _ = range v {...}\n\nwill be simplified to:\n\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", Default: "true", }, { Name: "\"simplifyslice\"", - Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + Doc: "check for slice simplifications\n\nA slice expression of the form:\n\n\ts[a:len(s)]\n\nwill be simplified to:\n\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.", Default: "true", }, { @@ -385,6 +398,11 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n\tfunc (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo", Default: "true", }, + { + Name: "\"stdversion\"", + Doc: "report uses of too-new standard library symbols\n\nThe stdversion analyzer reports references to symbols in the standard\nlibrary that were introduced by a Go release higher than the one in\nforce in the referring file. (Recall that the file's Go version is\ndefined by the 'go' directive its module's go.mod file, or by a\n\"//go:build go1.X\" build tag at the top of the file.)\n\nThe analyzer does not report a diagnostic for a reference to a \"too\nnew\" field or method of a type that is itself \"too new\", as this may\nhave false positives, for example if fields or methods are accessed\nthrough a type alias that is guarded by a Go version constraint.\n", + Default: "true", + }, { Name: "\"stringintconv\"", Doc: "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.", @@ -395,9 +413,14 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.", Default: "true", }, + { + Name: "\"stubmethods\"", + Doc: "detect missing methods and fix with stub implementations\n\nThis analyzer detects type-checking errors due to missing methods\nin assignments from concrete types to interface types, and offers\na suggested fix that will create a set of stub methods so that\nthe concrete type satisfies the interface.\n\nFor example, this function will not compile because the value\nNegativeErr{} does not implement the \"error\" interface:\n\n\tfunc sqrt(x float64) (float64, error) {\n\t\tif x < 0 {\n\t\t\treturn 0, NegativeErr{} // error: missing method\n\t\t}\n\t\t...\n\t}\n\n\ttype NegativeErr struct{}\n\nThis analyzer will suggest a fix to declare this method:\n\n\t// Error implements error.Error.\n\tfunc (NegativeErr) Error() string {\n\t\tpanic(\"unimplemented\")\n\t}\n\n(At least, it appears to behave that way, but technically it\ndoesn't use the SuggestedFix mechanism and the stub is created by\nlogic in gopls's golang.stub function.)", + Default: "true", + }, { Name: "\"testinggoroutine\"", - Doc: "report calls to (*testing.T).Fatal from goroutines started by a test.\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", + Doc: "report calls to (*testing.T).Fatal from goroutines started by a test\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", Default: "true", }, { @@ -410,6 +433,11 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.", Default: "true", }, + { + Name: "\"undeclaredname\"", + Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\t<> :=\n\nor a new function declaration, such as:\n\n\tfunc <>(inferred parameters) {\n\t\tpanic(\"implement me!\")\n\t}", + Default: "true", + }, { Name: "\"unmarshal\"", Doc: "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.", @@ -427,63 +455,28 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "\"unusedparams\"", - Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or have the name '_' (the blank identifier)\n- functions in test files\n- functions with empty bodies or those with just a return stmt", - Default: "false", + Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.", + Default: "true", }, { Name: "\"unusedresult\"", Doc: "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.", Default: "true", }, - { - Name: "\"unusedwrite\"", - Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", - Default: "false", - }, - { - Name: "\"useany\"", - Doc: "check for constraints that could be simplified to \"any\"", - Default: "false", - }, - { - Name: "\"fillreturns\"", - Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n", - Default: "true", - }, - { - Name: "\"nonewvars\"", - Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\tz := 1\n\tz := 2\nwill turn into\n\tz := 1\n\tz = 2\n", - Default: "true", - }, - { - Name: "\"noresultvalues\"", - Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n", - Default: "true", - }, - { - Name: "\"undeclaredname\"", - Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n", - Default: "true", - }, { Name: "\"unusedvariable\"", - Doc: "check for unused variables\n\nThe unusedvariable analyzer suggests fixes for unused variables errors.\n", + Doc: "check for unused variables and suggest fixes", Default: "false", }, { - Name: "\"fillstruct\"", - Doc: "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n", - Default: "true", - }, - { - Name: "\"infertypeargs\"", - Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", + Name: "\"unusedwrite\"", + Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", Default: "true", }, { - Name: "\"stubmethods\"", - Doc: "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface", - Default: "true", + Name: "\"useany\"", + Doc: "check for constraints that could be simplified to \"any\"", + Default: "false", }, }, }, @@ -741,21 +734,23 @@ var GeneratedAPIJSON = &APIJSON{ }, { Command: "gopls.add_telemetry_counters", - Title: "update the given telemetry counters.", + Title: "Update the given telemetry counters", Doc: "Gopls will prepend \"fwd/\" to all the counters updated using this command\nto avoid conflicts with other counters gopls collects.", ArgDoc: "{\n\t// Names and Values must have the same length.\n\t\"Names\": []string,\n\t\"Values\": []int64,\n}", }, { - Command: "gopls.apply_fix", - Title: "Apply a fix", - Doc: "Applies a fix to a region of source code.", - ArgDoc: "{\n\t// The fix to apply.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}", + Command: "gopls.apply_fix", + Title: "Apply a fix", + Doc: "Applies a fix to a region of source code.", + ArgDoc: "{\n\t// The name of the fix to apply.\n\t//\n\t// For fixes suggested by analyzers, this is a string constant\n\t// advertised by the analyzer that matches the Category of\n\t// the analysis.Diagnostic with a SuggestedFix containing no edits.\n\t//\n\t// For fixes suggested by code actions, this is a string agreed\n\t// upon by the code action and golang.ApplyFix.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n\t// Whether to resolve and return the edits.\n\t\"ResolveEdits\": bool,\n}", + ResultDoc: "{\n\t// Holds changes to existing resources.\n\t\"changes\": map[golang.org/x/tools/gopls/internal/protocol.DocumentURI][]golang.org/x/tools/gopls/internal/protocol.TextEdit,\n\t// Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes\n\t// are either an array of `TextDocumentEdit`s to express changes to n different text documents\n\t// where each text document edit addresses a specific version of a text document. Or it can contain\n\t// above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations.\n\t//\n\t// Whether a client supports versioned document edits is expressed via\n\t// `workspace.workspaceEdit.documentChanges` client capability.\n\t//\n\t// If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then\n\t// only plain `TextEdit`s using the `changes` property are supported.\n\t\"documentChanges\": []{\n\t\t\"TextDocumentEdit\": {\n\t\t\t\"textDocument\": { ... },\n\t\t\t\"edits\": { ... },\n\t\t},\n\t\t\"RenameFile\": {\n\t\t\t\"kind\": string,\n\t\t\t\"oldUri\": string,\n\t\t\t\"newUri\": string,\n\t\t\t\"options\": { ... },\n\t\t\t\"ResourceOperation\": { ... },\n\t\t},\n\t},\n\t// A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and\n\t// delete file / folder operations.\n\t//\n\t// Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`.\n\t//\n\t// @since 3.16.0\n\t\"changeAnnotations\": map[string]golang.org/x/tools/gopls/internal/protocol.ChangeAnnotation,\n}", }, { - Command: "gopls.change_signature", - Title: "performs a \"change signature\" refactoring.", - Doc: "This command is experimental, currently only supporting parameter removal.\nIts signature will certainly change in the future (pun intended).", - ArgDoc: "{\n\t\"RemoveParameter\": {\n\t\t\"uri\": string,\n\t\t\"range\": {\n\t\t\t\"start\": { ... },\n\t\t\t\"end\": { ... },\n\t\t},\n\t},\n}", + Command: "gopls.change_signature", + Title: "Perform a \"change signature\" refactoring", + Doc: "This command is experimental, currently only supporting parameter removal.\nIts signature will certainly change in the future (pun intended).", + ArgDoc: "{\n\t\"RemoveParameter\": {\n\t\t\"uri\": string,\n\t\t\"range\": {\n\t\t\t\"start\": { ... },\n\t\t\t\"end\": { ... },\n\t\t},\n\t},\n\t// Whether to resolve and return the edits.\n\t\"ResolveEdits\": bool,\n}", + ResultDoc: "{\n\t// Holds changes to existing resources.\n\t\"changes\": map[golang.org/x/tools/gopls/internal/protocol.DocumentURI][]golang.org/x/tools/gopls/internal/protocol.TextEdit,\n\t// Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes\n\t// are either an array of `TextDocumentEdit`s to express changes to n different text documents\n\t// where each text document edit addresses a specific version of a text document. Or it can contain\n\t// above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations.\n\t//\n\t// Whether a client supports versioned document edits is expressed via\n\t// `workspace.workspaceEdit.documentChanges` client capability.\n\t//\n\t// If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then\n\t// only plain `TextEdit`s using the `changes` property are supported.\n\t\"documentChanges\": []{\n\t\t\"TextDocumentEdit\": {\n\t\t\t\"textDocument\": { ... },\n\t\t\t\"edits\": { ... },\n\t\t},\n\t\t\"RenameFile\": {\n\t\t\t\"kind\": string,\n\t\t\t\"oldUri\": string,\n\t\t\t\"newUri\": string,\n\t\t\t\"options\": { ... },\n\t\t\t\"ResourceOperation\": { ... },\n\t\t},\n\t},\n\t// A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and\n\t// delete file / folder operations.\n\t//\n\t// Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`.\n\t//\n\t// @since 3.16.0\n\t\"changeAnnotations\": map[string]golang.org/x/tools/gopls/internal/protocol.ChangeAnnotation,\n}", }, { Command: "gopls.check_upgrades", @@ -763,6 +758,18 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "Checks for module upgrades.", ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The modules to check.\n\t\"Modules\": []string,\n}", }, + { + Command: "gopls.diagnose_files", + Title: "Cause server to publish diagnostics for the specified files.", + Doc: "This command is needed by the 'gopls {check,fix}' CLI subcommands.", + ArgDoc: "{\n\t\"Files\": []string,\n}", + }, + { + Command: "gopls.doc", + Title: "View package documentation.", + Doc: "Opens the Go package documentation page for the current\npackage in a browser.", + ArgDoc: "{\n\t\"uri\": string,\n\t\"range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}", + }, { Command: "gopls.edit_go_directive", Title: "Run go mod edit -go=version", @@ -774,7 +781,7 @@ var GeneratedAPIJSON = &APIJSON{ Title: "Get known vulncheck result", Doc: "Fetch the result of latest vulnerability check (`govulncheck`).", ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}", - ResultDoc: "map[golang.org/x/tools/gopls/internal/lsp/protocol.DocumentURI]*golang.org/x/tools/gopls/internal/vulncheck.Result", + ResultDoc: "map[golang.org/x/tools/gopls/internal/protocol.DocumentURI]*golang.org/x/tools/gopls/internal/vulncheck.Result", }, { Command: "gopls.gc_details", @@ -790,7 +797,7 @@ var GeneratedAPIJSON = &APIJSON{ }, { Command: "gopls.go_get_package", - Title: "go get a package", + Title: "'go get' a package", Doc: "Runs `go get` to fetch a package.", ArgDoc: "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The package to go get.\n\t\"Pkg\": string,\n\t\"AddRequire\": bool,\n}", }, @@ -810,12 +817,12 @@ var GeneratedAPIJSON = &APIJSON{ }, { Command: "gopls.maybe_prompt_for_telemetry", - Title: "checks for the right conditions, and then prompts", - Doc: "the user to ask if they want to enable Go telemetry uploading. If the user\nresponds 'Yes', the telemetry mode is set to \"on\".", + Title: "Prompt user to enable telemetry", + Doc: "Checks for the right conditions, and then prompts the user\nto ask if they want to enable Go telemetry uploading. If\nthe user responds 'Yes', the telemetry mode is set to \"on\".", }, { Command: "gopls.mem_stats", - Title: "fetch memory statistics", + Title: "Fetch memory statistics", Doc: "Call runtime.GC multiple times and return memory statistics as reported by\nruntime.MemStats.\n\nThis command is used for benchmarking, and may change in the future.", ResultDoc: "{\n\t\"HeapAlloc\": uint64,\n\t\"HeapInUse\": uint64,\n\t\"TotalAlloc\": uint64,\n}", }, @@ -839,13 +846,13 @@ var GeneratedAPIJSON = &APIJSON{ }, { Command: "gopls.run_go_work_command", - Title: "run `go work [args...]`, and apply the resulting go.work", - Doc: "edits to the current go.work file.", + Title: "Run `go work [args...]`, and apply the resulting go.work", + Doc: "edits to the current go.work file", ArgDoc: "{\n\t\"ViewID\": string,\n\t\"InitFirst\": bool,\n\t\"Args\": []string,\n}", }, { Command: "gopls.run_govulncheck", - Title: "Run vulncheck.", + Title: "Run vulncheck", Doc: "Run vulnerability check (`govulncheck`).", ArgDoc: "{\n\t// Any document in the directory from which govulncheck will run.\n\t\"URI\": string,\n\t// Package pattern. E.g. \"\", \".\", \"./...\".\n\t\"Pattern\": string,\n}", ResultDoc: "{\n\t// Token holds the progress token for LSP workDone reporting of the vulncheck\n\t// invocation.\n\t\"Token\": interface{},\n}", @@ -865,14 +872,14 @@ var GeneratedAPIJSON = &APIJSON{ }, { Command: "gopls.start_profile", - Title: "start capturing a profile of gopls' execution.", + Title: "Start capturing a profile of gopls' execution", Doc: "Start a new pprof profile. Before using the resulting file, profiling must\nbe stopped with a corresponding call to StopProfile.\n\nThis command is intended for internal use only, by the gopls benchmark\nrunner.", ArgDoc: "struct{}", ResultDoc: "struct{}", }, { Command: "gopls.stop_profile", - Title: "stop an ongoing profile.", + Title: "Stop an ongoing profile", Doc: "This command is intended for internal use only, by the gopls benchmark\nrunner.", ArgDoc: "struct{}", ResultDoc: "{\n\t// File is the profile file name.\n\t\"File\": string,\n}", @@ -913,9 +920,15 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "Runs `go mod vendor` for a module.", ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}", }, + { + Command: "gopls.views", + Title: "List current Views on the server.", + Doc: "This command is intended for use by gopls tests only.", + ResultDoc: "[]{\n\t\"Type\": string,\n\t\"Root\": string,\n\t\"Folder\": string,\n\t\"EnvOverlay\": []string,\n}", + }, { Command: "gopls.workspace_stats", - Title: "fetch workspace statistics", + Title: "Fetch workspace statistics", Doc: "Query statistics about workspace builds, modules, packages, and files.\n\nThis command is intended for internal use only, by the gopls stats\ncommand.", ResultDoc: "{\n\t\"Files\": {\n\t\t\"Total\": int,\n\t\t\"Largest\": int,\n\t\t\"Errs\": int,\n\t},\n\t\"Views\": []{\n\t\t\"GoCommandVersion\": string,\n\t\t\"AllPackages\": {\n\t\t\t\"Packages\": int,\n\t\t\t\"LargestPackage\": int,\n\t\t\t\"CompiledGoFiles\": int,\n\t\t\t\"Modules\": int,\n\t\t},\n\t\t\"WorkspacePackages\": {\n\t\t\t\"Packages\": int,\n\t\t\t\"LargestPackage\": int,\n\t\t\t\"CompiledGoFiles\": int,\n\t\t\t\"Modules\": int,\n\t\t},\n\t\t\"Diagnostics\": int,\n\t},\n}", }, @@ -938,7 +951,7 @@ var GeneratedAPIJSON = &APIJSON{ }, { Lens: "run_govulncheck", - Title: "Run vulncheck.", + Title: "Run vulncheck", Doc: "Run vulnerability check (`govulncheck`).", }, { @@ -1037,7 +1050,8 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "deprecated", - Doc: "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package imports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", + Doc: "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package\nimports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/deprecated", Default: true, }, { @@ -1049,6 +1063,7 @@ var GeneratedAPIJSON = &APIJSON{ { Name: "embed", Doc: "check //go:embed directive usage\n\nThis analyzer checks that the embed package is imported if //go:embed\ndirectives are present, providing a suggested fix to add the import if\nit is missing.\n\nThis analyzer also checks that //go:embed directives precede the\ndeclaration of a single variable.", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/embeddirective", Default: true, }, { @@ -1062,6 +1077,12 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the most compact order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n\nBe aware that the most compact order is not always the most efficient.\nIn rare cases it may cause two variables each updated by its own goroutine\nto occupy the same CPU cache line, inducing a form of memory contention\nknown as \"false sharing\" that slows down both goroutines.\n", URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/fieldalignment", }, + { + Name: "fillreturns", + Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\n\nwill turn into\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns", + Default: true, + }, { Name: "httpresponse", Doc: "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", @@ -1074,9 +1095,15 @@ var GeneratedAPIJSON = &APIJSON{ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ifaceassert", Default: true, }, + { + Name: "infertypeargs", + Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/infertypeargs", + Default: true, + }, { Name: "loopclosure", - Doc: "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v.\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nThe next example uses a go statement and has a similar problem.\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop.\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", + Doc: "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nNote: An iteration variable can only outlive a loop iteration in Go versions <=1.21.\nIn Go 1.22 and later, the loop variable lifetimes changed to create a new\niteration variable per loop iteration. (See go.dev/issue/60078.)\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v [<go1.22].\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nAfter Go version 1.22, the previous two for loops are equivalent\nand both are correct.\n\nThe next example uses a go statement and has a similar problem [<go1.22].\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop [<go1.22].\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/loopclosure", Default: true, }, @@ -1094,10 +1121,22 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "nilness", - Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}", + Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n\nSometimes the control flow may be quite complex, making bugs hard\nto spot. In the example below, the err.Error expression is\nguaranteed to panic because, after the first return, err must be\nnil. The intervening loop is just a distraction.\n\n\t...\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpartialSuccess := false\n\tfor _, err := range errs {\n\t\tif err == nil {\n\t\t\tpartialSuccess = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif partialSuccess {\n\t\treportStatus(StatusMessage{\n\t\t\tCode: code.ERROR,\n\t\t\tDetail: err.Error(), // \"nil dereference in dynamic method call\"\n\t\t})\n\t\treturn nil\n\t}\n\n...", URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilness", Default: true, }, + { + Name: "nonewvars", + Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\n\tz := 1\n\tz := 2\n\nwill turn into\n\n\tz := 1\n\tz = 2", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/nonewvars", + Default: true, + }, + { + Name: "noresultvalues", + Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\n\tfunc z() { return nil }\n\nwill turn into\n\n\tfunc z() { return }", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/noresultvars", + Default: true, + }, { Name: "printf", Doc: "check consistency of Printf format strings and arguments\n\nThe check applies to calls of the formatting functions such as\n[fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of\nthose functions.\n\nIn this example, the %d format operator requires an integer operand:\n\n\tfmt.Printf(\"%d\", \"hello\") // fmt.Printf format %d has arg \"hello\" of wrong type string\n\nSee the documentation of the fmt package for the complete set of\nformat operators and their operand types.\n\nTo enable printf checking on a function that is not found by this\nanalyzer's heuristics (for example, because control is obscured by\ndynamic method calls), insert a bogus call:\n\n\tfunc MyPrintf(format string, args ...any) {\n\t\tif false {\n\t\t\t_ = fmt.Sprintf(format, args...) // enable printf checker\n\t\t}\n\t\t...\n\t}\n\nThe -funcs flag specifies a comma-separated list of names of additional\nknown formatting functions or methods. If the name contains a period,\nit must denote a specific function using one of the following forms:\n\n\tdir/pkg.Function\n\tdir/pkg.Type.Method\n\t(*dir/pkg.Type).Method\n\nOtherwise the name is interpreted as a case-insensitive unqualified\nidentifier such as \"errorf\". Either way, if a listed name ends in f, the\nfunction is assumed to be Printf-like, taking a format string before the\nargument list. Otherwise it is assumed to be Print-like, taking a list\nof arguments with no format string.", @@ -1117,17 +1156,20 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "simplifycompositelit", - Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\t[]T{T{}, T{}}\nwill be simplified to:\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\n\t[]T{T{}, T{}}\n\nwill be simplified to:\n\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifycompositelit", Default: true, }, { Name: "simplifyrange", - Doc: "check for range statement simplifications\n\nA range of the form:\n\tfor x, _ = range v {...}\nwill be simplified to:\n\tfor x = range v {...}\n\nA range of the form:\n\tfor _ = range v {...}\nwill be simplified to:\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + Doc: "check for range statement simplifications\n\nA range of the form:\n\n\tfor x, _ = range v {...}\n\nwill be simplified to:\n\n\tfor x = range v {...}\n\nA range of the form:\n\n\tfor _ = range v {...}\n\nwill be simplified to:\n\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyrange", Default: true, }, { Name: "simplifyslice", - Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + Doc: "check for slice simplifications\n\nA slice expression of the form:\n\n\ts[a:len(s)]\n\nwill be simplified to:\n\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyslice", Default: true, }, { @@ -1148,6 +1190,12 @@ var GeneratedAPIJSON = &APIJSON{ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdmethods", Default: true, }, + { + Name: "stdversion", + Doc: "report uses of too-new standard library symbols\n\nThe stdversion analyzer reports references to symbols in the standard\nlibrary that were introduced by a Go release higher than the one in\nforce in the referring file. (Recall that the file's Go version is\ndefined by the 'go' directive its module's go.mod file, or by a\n\"//go:build go1.X\" build tag at the top of the file.)\n\nThe analyzer does not report a diagnostic for a reference to a \"too\nnew\" field or method of a type that is itself \"too new\", as this may\nhave false positives, for example if fields or methods are accessed\nthrough a type alias that is guarded by a Go version constraint.\n", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdversion", + Default: true, + }, { Name: "stringintconv", Doc: "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.", @@ -1160,9 +1208,15 @@ var GeneratedAPIJSON = &APIJSON{ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/structtag", Default: true, }, + { + Name: "stubmethods", + Doc: "detect missing methods and fix with stub implementations\n\nThis analyzer detects type-checking errors due to missing methods\nin assignments from concrete types to interface types, and offers\na suggested fix that will create a set of stub methods so that\nthe concrete type satisfies the interface.\n\nFor example, this function will not compile because the value\nNegativeErr{} does not implement the \"error\" interface:\n\n\tfunc sqrt(x float64) (float64, error) {\n\t\tif x < 0 {\n\t\t\treturn 0, NegativeErr{} // error: missing method\n\t\t}\n\t\t...\n\t}\n\n\ttype NegativeErr struct{}\n\nThis analyzer will suggest a fix to declare this method:\n\n\t// Error implements error.Error.\n\tfunc (NegativeErr) Error() string {\n\t\tpanic(\"unimplemented\")\n\t}\n\n(At least, it appears to behave that way, but technically it\ndoesn't use the SuggestedFix mechanism and the stub is created by\nlogic in gopls's golang.stub function.)", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/stubmethods", + Default: true, + }, { Name: "testinggoroutine", - Doc: "report calls to (*testing.T).Fatal from goroutines started by a test.\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", + Doc: "report calls to (*testing.T).Fatal from goroutines started by a test\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/testinggoroutine", Default: true, }, @@ -1178,6 +1232,12 @@ var GeneratedAPIJSON = &APIJSON{ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/timeformat", Default: true, }, + { + Name: "undeclaredname", + Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\t<> :=\n\nor a new function declaration, such as:\n\n\tfunc <>(inferred parameters) {\n\t\tpanic(\"implement me!\")\n\t}", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/undeclaredname", + Default: true, + }, { Name: "unmarshal", Doc: "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.", @@ -1197,8 +1257,10 @@ var GeneratedAPIJSON = &APIJSON{ Default: true, }, { - Name: "unusedparams", - Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or have the name '_' (the blank identifier)\n- functions in test files\n- functions with empty bodies or those with just a return stmt", + Name: "unusedparams", + Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams", + Default: true, }, { Name: "unusedresult", @@ -1206,53 +1268,21 @@ var GeneratedAPIJSON = &APIJSON{ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult", Default: true, }, - { - Name: "unusedwrite", - Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite", - }, - { - Name: "useany", - Doc: "check for constraints that could be simplified to \"any\"", - }, - { - Name: "fillreturns", - Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n", - Default: true, - }, - { - Name: "nonewvars", - Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\tz := 1\n\tz := 2\nwill turn into\n\tz := 1\n\tz = 2\n", - Default: true, - }, - { - Name: "noresultvalues", - Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n", - Default: true, - }, - { - Name: "undeclaredname", - Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n", - Default: true, - }, { Name: "unusedvariable", - Doc: "check for unused variables\n\nThe unusedvariable analyzer suggests fixes for unused variables errors.\n", + Doc: "check for unused variables and suggest fixes", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable", }, { - Name: "fillstruct", - Doc: "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n", + Name: "unusedwrite", + Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite", Default: true, }, { - Name: "infertypeargs", - Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", - Default: true, - }, - { - Name: "stubmethods", - Doc: "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface", - Default: true, + Name: "useany", + Doc: "check for constraints that could be simplified to \"any\"", + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/useany", }, }, Hints: []*HintJSON{ diff --git a/gopls/internal/settings/default.go b/gopls/internal/settings/default.go new file mode 100644 index 00000000000..e80fa59c270 --- /dev/null +++ b/gopls/internal/settings/default.go @@ -0,0 +1,136 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package settings + +import ( + "sync" + "time" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" +) + +var ( + optionsOnce sync.Once + defaultOptions *Options +) + +// DefaultOptions is the options that are used for Gopls execution independent +// of any externally provided configuration (LSP initialization, command +// invocation, etc.). +func DefaultOptions(overrides ...func(*Options)) *Options { + optionsOnce.Do(func() { + var commands []string + for _, c := range command.Commands { + commands = append(commands, c.ID()) + } + defaultOptions = &Options{ + ClientOptions: ClientOptions{ + InsertTextFormat: protocol.PlainTextTextFormat, + PreferredContentFormat: protocol.Markdown, + ConfigurationSupported: true, + DynamicConfigurationSupported: true, + DynamicRegistrationSemanticTokensSupported: true, + DynamicWatchedFilesSupported: true, + LineFoldingOnly: false, + HierarchicalDocumentSymbolSupport: true, + }, + ServerOptions: ServerOptions{ + SupportedCodeActions: map[file.Kind]map[protocol.CodeActionKind]bool{ + file.Go: { + protocol.SourceFixAll: true, + protocol.SourceOrganizeImports: true, + protocol.QuickFix: true, + protocol.RefactorRewrite: true, + protocol.RefactorInline: true, + protocol.RefactorExtract: true, + protocol.GoDoc: true, + }, + file.Mod: { + protocol.SourceOrganizeImports: true, + protocol.QuickFix: true, + }, + file.Work: {}, + file.Sum: {}, + file.Tmpl: {}, + }, + SupportedCommands: commands, + }, + UserOptions: UserOptions{ + BuildOptions: BuildOptions{ + ExpandWorkspaceToModule: true, + DirectoryFilters: []string{"-**/node_modules"}, + TemplateExtensions: []string{}, + StandaloneTags: []string{"ignore"}, + }, + UIOptions: UIOptions{ + DiagnosticOptions: DiagnosticOptions{ + Annotations: map[Annotation]bool{ + Bounds: true, + Escape: true, + Inline: true, + Nil: true, + }, + Vulncheck: ModeVulncheckOff, + DiagnosticsDelay: 1 * time.Second, + DiagnosticsTrigger: DiagnosticsOnEdit, + AnalysisProgressReporting: true, + }, + InlayHintOptions: InlayHintOptions{}, + DocumentationOptions: DocumentationOptions{ + HoverKind: FullDocumentation, + LinkTarget: "pkg.go.dev", + LinksInHover: true, + }, + NavigationOptions: NavigationOptions{ + ImportShortcut: BothShortcuts, + SymbolMatcher: SymbolFastFuzzy, + SymbolStyle: DynamicSymbols, + SymbolScope: AllSymbolScope, + }, + CompletionOptions: CompletionOptions{ + Matcher: Fuzzy, + CompletionBudget: 100 * time.Millisecond, + ExperimentalPostfixCompletions: true, + CompleteFunctionCalls: true, + }, + Codelenses: map[string]bool{ + string(command.Generate): true, + string(command.RegenerateCgo): true, + string(command.Tidy): true, + string(command.GCDetails): false, + string(command.UpgradeDependency): true, + string(command.Vendor): true, + // TODO(hyangah): enable command.RunGovulncheck. + }, + }, + }, + InternalOptions: InternalOptions{ + CompleteUnimported: true, + CompletionDocumentation: true, + DeepCompletion: true, + SubdirWatchPatterns: SubdirWatchPatternsAuto, + ReportAnalysisProgressAfter: 5 * time.Second, + TelemetryPrompt: false, + LinkifyShowMessage: false, + IncludeReplaceInWorkspace: false, + ZeroConfig: true, + }, + Hooks: Hooks{ + URLRegexp: urlRegexp(), + DefaultAnalyzers: analyzers(), + StaticcheckAnalyzers: map[string]*Analyzer{}, + }, + } + }) + options := defaultOptions.Clone() + for _, override := range overrides { + if override != nil { + override(options) + } + } + return options +} diff --git a/gopls/internal/settings/json.go b/gopls/internal/settings/json.go new file mode 100644 index 00000000000..30d8f119252 --- /dev/null +++ b/gopls/internal/settings/json.go @@ -0,0 +1,168 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package settings + +import ( + "fmt" + "io" + "regexp" + "strings" +) + +type APIJSON struct { + Options map[string][]*OptionJSON + Commands []*CommandJSON + Lenses []*LensJSON + Analyzers []*AnalyzerJSON + Hints []*HintJSON +} + +type OptionJSON struct { + Name string + Type string + Doc string + EnumKeys EnumKeys + EnumValues []EnumValue + Default string + Status string + Hierarchy string +} + +func (o *OptionJSON) String() string { + return o.Name +} + +func (o *OptionJSON) Write(w io.Writer) { + fmt.Fprintf(w, "**%v** *%v*\n\n", o.Name, o.Type) + writeStatus(w, o.Status) + enumValues := collectEnums(o) + fmt.Fprintf(w, "%v%v\nDefault: `%v`.\n\n", o.Doc, enumValues, o.Default) +} + +func writeStatus(section io.Writer, status string) { + switch status { + case "": + case "advanced": + fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n") + case "debug": + fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n") + case "experimental": + fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n") + default: + fmt.Fprintf(section, "**Status: %s.**\n\n", status) + } +} + +var parBreakRE = regexp.MustCompile("\n{2,}") + +func collectEnums(opt *OptionJSON) string { + var b strings.Builder + write := func(name, doc string) { + if doc != "" { + unbroken := parBreakRE.ReplaceAllString(doc, "\\\n") + fmt.Fprintf(&b, "* %s\n", strings.TrimSpace(unbroken)) + } else { + fmt.Fprintf(&b, "* `%s`\n", name) + } + } + if len(opt.EnumValues) > 0 && opt.Type == "enum" { + b.WriteString("\nMust be one of:\n\n") + for _, val := range opt.EnumValues { + write(val.Value, val.Doc) + } + } else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) { + b.WriteString("\nCan contain any of:\n\n") + for _, val := range opt.EnumKeys.Keys { + write(val.Name, val.Doc) + } + } + return b.String() +} + +func shouldShowEnumKeysInSettings(name string) bool { + // These fields have too many possible options to print. + return !(name == "analyses" || name == "codelenses" || name == "hints") +} + +type EnumKeys struct { + ValueType string + Keys []EnumKey +} + +type EnumKey struct { + Name string + Doc string + Default string +} + +type EnumValue struct { + Value string + Doc string +} + +type CommandJSON struct { + Command string + Title string + Doc string + ArgDoc string + ResultDoc string +} + +func (c *CommandJSON) String() string { + return c.Command +} + +func (c *CommandJSON) Write(w io.Writer) { + fmt.Fprintf(w, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", c.Title, c.Command, c.Doc) + if c.ArgDoc != "" { + fmt.Fprintf(w, "Args:\n\n```\n%s\n```\n\n", c.ArgDoc) + } + if c.ResultDoc != "" { + fmt.Fprintf(w, "Result:\n\n```\n%s\n```\n\n", c.ResultDoc) + } +} + +type LensJSON struct { + Lens string + Title string + Doc string +} + +func (l *LensJSON) String() string { + return l.Title +} + +func (l *LensJSON) Write(w io.Writer) { + fmt.Fprintf(w, "%s (%s): %s", l.Title, l.Lens, l.Doc) +} + +type AnalyzerJSON struct { + Name string + Doc string + URL string + Default bool +} + +func (a *AnalyzerJSON) String() string { + return a.Name +} + +func (a *AnalyzerJSON) Write(w io.Writer) { + fmt.Fprintf(w, "%s (%s): %v", a.Name, a.Doc, a.Default) +} + +type HintJSON struct { + Name string + Doc string + Default bool +} + +func (h *HintJSON) String() string { + return h.Name +} + +func (h *HintJSON) Write(w io.Writer) { + fmt.Fprintf(w, "%s (%s): %v", h.Name, h.Doc, h.Default) +} diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go new file mode 100644 index 00000000000..750b7b7f119 --- /dev/null +++ b/gopls/internal/settings/settings.go @@ -0,0 +1,1503 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package settings + +import ( + "context" + "fmt" + "path/filepath" + "regexp" + "runtime" + "strings" + "time" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/appends" + "golang.org/x/tools/go/analysis/passes/asmdecl" + "golang.org/x/tools/go/analysis/passes/assign" + "golang.org/x/tools/go/analysis/passes/atomic" + "golang.org/x/tools/go/analysis/passes/atomicalign" + "golang.org/x/tools/go/analysis/passes/bools" + "golang.org/x/tools/go/analysis/passes/buildtag" + "golang.org/x/tools/go/analysis/passes/cgocall" + "golang.org/x/tools/go/analysis/passes/composite" + "golang.org/x/tools/go/analysis/passes/copylock" + "golang.org/x/tools/go/analysis/passes/deepequalerrors" + "golang.org/x/tools/go/analysis/passes/defers" + "golang.org/x/tools/go/analysis/passes/directive" + "golang.org/x/tools/go/analysis/passes/errorsas" + "golang.org/x/tools/go/analysis/passes/fieldalignment" + "golang.org/x/tools/go/analysis/passes/httpresponse" + "golang.org/x/tools/go/analysis/passes/ifaceassert" + "golang.org/x/tools/go/analysis/passes/loopclosure" + "golang.org/x/tools/go/analysis/passes/lostcancel" + "golang.org/x/tools/go/analysis/passes/nilfunc" + "golang.org/x/tools/go/analysis/passes/nilness" + "golang.org/x/tools/go/analysis/passes/printf" + "golang.org/x/tools/go/analysis/passes/shadow" + "golang.org/x/tools/go/analysis/passes/shift" + "golang.org/x/tools/go/analysis/passes/slog" + "golang.org/x/tools/go/analysis/passes/sortslice" + "golang.org/x/tools/go/analysis/passes/stdmethods" + "golang.org/x/tools/go/analysis/passes/stdversion" + "golang.org/x/tools/go/analysis/passes/stringintconv" + "golang.org/x/tools/go/analysis/passes/structtag" + "golang.org/x/tools/go/analysis/passes/testinggoroutine" + "golang.org/x/tools/go/analysis/passes/tests" + "golang.org/x/tools/go/analysis/passes/timeformat" + "golang.org/x/tools/go/analysis/passes/unmarshal" + "golang.org/x/tools/go/analysis/passes/unreachable" + "golang.org/x/tools/go/analysis/passes/unsafeptr" + "golang.org/x/tools/go/analysis/passes/unusedresult" + "golang.org/x/tools/go/analysis/passes/unusedwrite" + "golang.org/x/tools/gopls/internal/analysis/deprecated" + "golang.org/x/tools/gopls/internal/analysis/embeddirective" + "golang.org/x/tools/gopls/internal/analysis/fillreturns" + "golang.org/x/tools/gopls/internal/analysis/infertypeargs" + "golang.org/x/tools/gopls/internal/analysis/nonewvars" + "golang.org/x/tools/gopls/internal/analysis/noresultvalues" + "golang.org/x/tools/gopls/internal/analysis/simplifycompositelit" + "golang.org/x/tools/gopls/internal/analysis/simplifyrange" + "golang.org/x/tools/gopls/internal/analysis/simplifyslice" + "golang.org/x/tools/gopls/internal/analysis/stubmethods" + "golang.org/x/tools/gopls/internal/analysis/undeclaredname" + "golang.org/x/tools/gopls/internal/analysis/unusedparams" + "golang.org/x/tools/gopls/internal/analysis/unusedvariable" + "golang.org/x/tools/gopls/internal/analysis/useany" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" +) + +type Annotation string + +const ( + // Nil controls nil checks. + Nil Annotation = "nil" + + // Escape controls diagnostics about escape choices. + Escape Annotation = "escape" + + // Inline controls diagnostics about inlining choices. + Inline Annotation = "inline" + + // Bounds controls bounds checking diagnostics. + Bounds Annotation = "bounds" +) + +// Options holds various configuration that affects Gopls execution, organized +// by the nature or origin of the settings. +type Options struct { + ClientOptions + ServerOptions + UserOptions + InternalOptions + Hooks +} + +// IsAnalyzerEnabled reports whether an analyzer with the given name is +// enabled. +// +// TODO(rfindley): refactor to simplify this function. We no longer need the +// different categories of analyzer. +func (opts *Options) IsAnalyzerEnabled(name string) bool { + for _, amap := range []map[string]*Analyzer{opts.DefaultAnalyzers, opts.StaticcheckAnalyzers} { + for _, analyzer := range amap { + if analyzer.Analyzer.Name == name && analyzer.IsEnabled(opts) { + return true + } + } + } + return false +} + +// ClientOptions holds LSP-specific configuration that is provided by the +// client. +type ClientOptions struct { + ClientInfo *protocol.ClientInfo + InsertTextFormat protocol.InsertTextFormat + ConfigurationSupported bool + DynamicConfigurationSupported bool + DynamicRegistrationSemanticTokensSupported bool + DynamicWatchedFilesSupported bool + RelativePatternsSupported bool + PreferredContentFormat protocol.MarkupKind + LineFoldingOnly bool + HierarchicalDocumentSymbolSupport bool + SemanticTypes []string + SemanticMods []string + RelatedInformationSupported bool + CompletionTags bool + CompletionDeprecated bool + SupportedResourceOperations []protocol.ResourceOperationKind + CodeActionResolveOptions []string +} + +// ServerOptions holds LSP-specific configuration that is provided by the +// server. +type ServerOptions struct { + SupportedCodeActions map[file.Kind]map[protocol.CodeActionKind]bool + SupportedCommands []string +} + +type BuildOptions struct { + // BuildFlags is the set of flags passed on to the build system when invoked. + // It is applied to queries like `go list`, which is used when discovering files. + // The most common use is to set `-tags`. + BuildFlags []string + + // Env adds environment variables to external commands run by `gopls`, most notably `go list`. + Env map[string]string + + // DirectoryFilters can be used to exclude unwanted directories from the + // workspace. By default, all directories are included. Filters are an + // operator, `+` to include and `-` to exclude, followed by a path prefix + // relative to the workspace folder. They are evaluated in order, and + // the last filter that applies to a path controls whether it is included. + // The path prefix can be empty, so an initial `-` excludes everything. + // + // DirectoryFilters also supports the `**` operator to match 0 or more directories. + // + // Examples: + // + // Exclude node_modules at current depth: `-node_modules` + // + // Exclude node_modules at any depth: `-**/node_modules` + // + // Include only project_a: `-` (exclude everything), `+project_a` + // + // Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules` + DirectoryFilters []string + + // TemplateExtensions gives the extensions of file names that are treateed + // as template files. (The extension + // is the part of the file name after the final dot.) + TemplateExtensions []string + + // obsolete, no effect + MemoryMode string `status:"experimental"` + + // ExpandWorkspaceToModule determines which packages are considered + // "workspace packages" when the workspace is using modules. + // + // Workspace packages affect the scope of workspace-wide operations. Notably, + // gopls diagnoses all packages considered to be part of the workspace after + // every keystroke, so by setting "ExpandWorkspaceToModule" to false, and + // opening a nested workspace directory, you can reduce the amount of work + // gopls has to do to keep your workspace up to date. + ExpandWorkspaceToModule bool `status:"experimental"` + + // AllowModfileModifications disables -mod=readonly, allowing imports from + // out-of-scope modules. This option will eventually be removed. + AllowModfileModifications bool `status:"experimental"` + + // AllowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module + // downloads rather than requiring user action. This option will eventually + // be removed. + AllowImplicitNetworkAccess bool `status:"experimental"` + + // StandaloneTags specifies a set of build constraints that identify + // individual Go source files that make up the entire main package of an + // executable. + // + // A common example of standalone main files is the convention of using the + // directive `//go:build ignore` to denote files that are not intended to be + // included in any package, for example because they are invoked directly by + // the developer using `go run`. + // + // Gopls considers a file to be a standalone main file if and only if it has + // package name "main" and has a build directive of the exact form + // "//go:build tag" or "// +build tag", where tag is among the list of tags + // configured by this setting. Notably, if the build constraint is more + // complicated than a simple tag (such as the composite constraint + // `//go:build tag && go1.18`), the file is not considered to be a standalone + // main file. + // + // This setting is only supported when gopls is built with Go 1.16 or later. + StandaloneTags []string +} + +type UIOptions struct { + DocumentationOptions + CompletionOptions + NavigationOptions + DiagnosticOptions + InlayHintOptions + + // Codelenses overrides the enabled/disabled state of code lenses. See the + // "Code Lenses" section of the + // [Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses) + // for the list of supported lenses. + // + // Example Usage: + // + // ```json5 + // "gopls": { + // ... + // "codelenses": { + // "generate": false, // Don't show the `go generate` lens. + // "gc_details": true // Show a code lens toggling the display of gc's choices. + // } + // ... + // } + // ``` + Codelenses map[string]bool + + // SemanticTokens controls whether the LSP server will send + // semantic tokens to the client. + SemanticTokens bool `status:"experimental"` + + // NoSemanticString turns off the sending of the semantic token 'string' + NoSemanticString bool `status:"experimental"` + + // NoSemanticNumber turns off the sending of the semantic token 'number' + NoSemanticNumber bool `status:"experimental"` +} + +type CompletionOptions struct { + // Placeholders enables placeholders for function parameters or struct + // fields in completion responses. + UsePlaceholders bool + + // CompletionBudget is the soft latency goal for completion requests. Most + // requests finish in a couple milliseconds, but in some cases deep + // completions can take much longer. As we use up our budget we + // dynamically reduce the search scope to ensure we return timely + // results. Zero means unlimited. + CompletionBudget time.Duration `status:"debug"` + + // Matcher sets the algorithm that is used when calculating completion + // candidates. + Matcher Matcher `status:"advanced"` + + // ExperimentalPostfixCompletions enables artificial method snippets + // such as "someSlice.sort!". + ExperimentalPostfixCompletions bool `status:"experimental"` + + // CompleteFunctionCalls enables function call completion. + // + // When completing a statement, or when a function return type matches the + // expected of the expression being completed, completion may suggest call + // expressions (i.e. may include parentheses). + CompleteFunctionCalls bool +} + +type DocumentationOptions struct { + // HoverKind controls the information that appears in the hover text. + // SingleLine and Structured are intended for use only by authors of editor plugins. + HoverKind HoverKind + + // LinkTarget controls where documentation links go. + // It might be one of: + // + // * `"godoc.org"` + // * `"pkg.go.dev"` + // + // If company chooses to use its own `godoc.org`, its address can be used as well. + // + // Modules matching the GOPRIVATE environment variable will not have + // documentation links in hover. + LinkTarget string + + // LinksInHover toggles the presence of links to documentation in hover. + LinksInHover bool +} + +type FormattingOptions struct { + // Local is the equivalent of the `goimports -local` flag, which puts + // imports beginning with this string after third-party packages. It should + // be the prefix of the import path whose imports should be grouped + // separately. + Local string + + // Gofumpt indicates if we should run gofumpt formatting. + Gofumpt bool +} + +type DiagnosticOptions struct { + // Analyses specify analyses that the user would like to enable or disable. + // A map of the names of analysis passes that should be enabled/disabled. + // A full list of analyzers that gopls uses can be found in + // [analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md). + // + // Example Usage: + // + // ```json5 + // ... + // "analyses": { + // "unreachable": false, // Disable the unreachable analyzer. + // "unusedvariable": true // Enable the unusedvariable analyzer. + // } + // ... + // ``` + Analyses map[string]bool + + // Staticcheck enables additional analyses from staticcheck.io. + // These analyses are documented on + // [Staticcheck's website](https://staticcheck.io/docs/checks/). + Staticcheck bool `status:"experimental"` + + // Annotations specifies the various kinds of optimization diagnostics + // that should be reported by the gc_details command. + Annotations map[Annotation]bool `status:"experimental"` + + // Vulncheck enables vulnerability scanning. + Vulncheck VulncheckMode `status:"experimental"` + + // DiagnosticsDelay controls the amount of time that gopls waits + // after the most recent file modification before computing deep diagnostics. + // Simple diagnostics (parsing and type-checking) are always run immediately + // on recently modified packages. + // + // This option must be set to a valid duration string, for example `"250ms"`. + DiagnosticsDelay time.Duration `status:"advanced"` + + // DiagnosticsTrigger controls when to run diagnostics. + DiagnosticsTrigger DiagnosticsTrigger `status:"experimental"` + + // AnalysisProgressReporting controls whether gopls sends progress + // notifications when construction of its index of analysis facts is taking a + // long time. Cancelling these notifications will cancel the indexing task, + // though it will restart after the next change in the workspace. + // + // When a package is opened for the first time and heavyweight analyses such as + // staticcheck are enabled, it can take a while to construct the index of + // analysis facts for all its dependencies. The index is cached in the + // filesystem, so subsequent analysis should be faster. + AnalysisProgressReporting bool +} + +type InlayHintOptions struct { + // Hints specify inlay hints that users want to see. A full list of hints + // that gopls uses can be found in + // [inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md). + Hints map[string]bool `status:"experimental"` +} + +type NavigationOptions struct { + // ImportShortcut specifies whether import statements should link to + // documentation or go to definitions. + ImportShortcut ImportShortcut + + // SymbolMatcher sets the algorithm that is used when finding workspace symbols. + SymbolMatcher SymbolMatcher `status:"advanced"` + + // SymbolStyle controls how symbols are qualified in symbol responses. + // + // Example Usage: + // + // ```json5 + // "gopls": { + // ... + // "symbolStyle": "Dynamic", + // ... + // } + // ``` + SymbolStyle SymbolStyle `status:"advanced"` + + // SymbolScope controls which packages are searched for workspace/symbol + // requests. The default value, "workspace", searches only workspace + // packages. The legacy behavior, "all", causes all loaded packages to be + // searched, including dependencies; this is more expensive and may return + // unwanted results. + SymbolScope SymbolScope +} + +// UserOptions holds custom Gopls configuration (not part of the LSP) that is +// modified by the client. +type UserOptions struct { + BuildOptions + UIOptions + FormattingOptions + + // VerboseOutput enables additional debug logging. + VerboseOutput bool `status:"debug"` +} + +// EnvSlice returns Env as a slice of k=v strings. +func (u *UserOptions) EnvSlice() []string { + var result []string + for k, v := range u.Env { + result = append(result, fmt.Sprintf("%v=%v", k, v)) + } + return result +} + +// SetEnvSlice sets Env from a slice of k=v strings. +func (u *UserOptions) SetEnvSlice(env []string) { + u.Env = map[string]string{} + for _, kv := range env { + split := strings.SplitN(kv, "=", 2) + if len(split) != 2 { + continue + } + u.Env[split[0]] = split[1] + } +} + +// Hooks contains configuration that is provided to the Gopls command by the +// main package. +type Hooks struct { + // LicensesText holds third party licenses for software used by gopls. + LicensesText string + + // Whether staticcheck is supported. + StaticcheckSupported bool + + // URLRegexp is used to find potential URLs in comments/strings. + // + // Not all matches are shown to the user: if the matched URL is not detected + // as valid, it will be skipped. + URLRegexp *regexp.Regexp + + // GofumptFormat allows the gopls module to wire-in a call to + // gofumpt/format.Source. langVersion and modulePath are used for some + // Gofumpt formatting rules -- see the Gofumpt documentation for details. + GofumptFormat func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error) + + DefaultAnalyzers map[string]*Analyzer + StaticcheckAnalyzers map[string]*Analyzer +} + +// InternalOptions contains settings that are not intended for use by the +// average user. These may be settings used by tests or outdated settings that +// will soon be deprecated. Some of these settings may not even be configurable +// by the user. +// +// TODO(rfindley): even though these settings are not intended for +// modification, some of them should be surfaced in our documentation. +type InternalOptions struct { + // VerboseWorkDoneProgress controls whether the LSP server should send + // progress reports for all work done outside the scope of an RPC. + // Used by the regression tests. + VerboseWorkDoneProgress bool + + // The following options were previously available to users, but they + // really shouldn't be configured by anyone other than "power users". + + // CompletionDocumentation enables documentation with completion results. + CompletionDocumentation bool + + // CompleteUnimported enables completion for packages that you do not + // currently import. + CompleteUnimported bool + + // DeepCompletion enables the ability to return completions from deep + // inside relevant entities, rather than just the locally accessible ones. + // + // Consider this example: + // + // ```go + // package main + // + // import "fmt" + // + // type wrapString struct { + // str string + // } + // + // func main() { + // x := wrapString{"hello world"} + // fmt.Printf(<>) + // } + // ``` + // + // At the location of the `<>` in this program, deep completion would suggest + // the result `x.str`. + DeepCompletion bool + + // ShowBugReports causes a message to be shown when the first bug is reported + // on the server. + // This option applies only during initialization. + ShowBugReports bool + + // SubdirWatchPatterns configures the file watching glob patterns registered + // by gopls. + // + // Some clients (namely VS Code) do not send workspace/didChangeWatchedFile + // notifications for files contained in a directory when that directory is + // deleted: + // https://github.com/microsoft/vscode/issues/109754 + // + // In this case, gopls would miss important notifications about deleted + // packages. To work around this, gopls registers a watch pattern for each + // directory containing Go files. + // + // Unfortunately, other clients experience performance problems with this + // many watch patterns, so there is no single behavior that works well for + // all clients. + // + // The "subdirWatchPatterns" setting allows configuring this behavior. Its + // default value of "auto" attempts to guess the correct behavior based on + // the client name. We'd love to avoid this specialization, but as described + // above there is no single value that works for all clients. + // + // If any LSP client does not behave well with the default value (for + // example, if like VS Code it drops file notifications), please file an + // issue. + SubdirWatchPatterns SubdirWatchPatterns + + // ReportAnalysisProgressAfter sets the duration for gopls to wait before starting + // progress reporting for ongoing go/analysis passes. + // + // It is intended to be used for testing only. + ReportAnalysisProgressAfter time.Duration + + // TelemetryPrompt controls whether gopls prompts about enabling Go telemetry. + // + // Once the prompt is answered, gopls doesn't ask again, but TelemetryPrompt + // can prevent the question from ever being asked in the first place. + TelemetryPrompt bool + + // LinkifyShowMessage controls whether the client wants gopls + // to linkify links in showMessage. e.g. [go.dev](https://go.dev). + LinkifyShowMessage bool + + // IncludeReplaceInWorkspace controls whether locally replaced modules in a + // go.mod file are treated like workspace modules. + // Or in other words, if a go.mod file with local replaces behaves like a + // go.work file. + IncludeReplaceInWorkspace bool + + // ZeroConfig enables the zero-config algorithm for workspace layout, + // dynamically creating build configurations for different modules, + // directories, and GOOS/GOARCH combinations to cover open files. + ZeroConfig bool +} + +type SubdirWatchPatterns string + +const ( + SubdirWatchPatternsOn SubdirWatchPatterns = "on" + SubdirWatchPatternsOff SubdirWatchPatterns = "off" + SubdirWatchPatternsAuto SubdirWatchPatterns = "auto" +) + +type ImportShortcut string + +const ( + BothShortcuts ImportShortcut = "Both" + LinkShortcut ImportShortcut = "Link" + DefinitionShortcut ImportShortcut = "Definition" +) + +func (s ImportShortcut) ShowLinks() bool { + return s == BothShortcuts || s == LinkShortcut +} + +func (s ImportShortcut) ShowDefinition() bool { + return s == BothShortcuts || s == DefinitionShortcut +} + +type Matcher string + +const ( + Fuzzy Matcher = "Fuzzy" + CaseInsensitive Matcher = "CaseInsensitive" + CaseSensitive Matcher = "CaseSensitive" +) + +// A SymbolMatcher controls the matching of symbols for workspace/symbol +// requests. +type SymbolMatcher string + +const ( + SymbolFuzzy SymbolMatcher = "Fuzzy" + SymbolFastFuzzy SymbolMatcher = "FastFuzzy" + SymbolCaseInsensitive SymbolMatcher = "CaseInsensitive" + SymbolCaseSensitive SymbolMatcher = "CaseSensitive" +) + +// A SymbolStyle controls the formatting of symbols in workspace/symbol results. +type SymbolStyle string + +const ( + // PackageQualifiedSymbols is package qualified symbols i.e. + // "pkg.Foo.Field". + PackageQualifiedSymbols SymbolStyle = "Package" + // FullyQualifiedSymbols is fully qualified symbols, i.e. + // "path/to/pkg.Foo.Field". + FullyQualifiedSymbols SymbolStyle = "Full" + // DynamicSymbols uses whichever qualifier results in the highest scoring + // match for the given symbol query. Here a "qualifier" is any "/" or "." + // delimited suffix of the fully qualified symbol. i.e. "to/pkg.Foo.Field" or + // just "Foo.Field". + DynamicSymbols SymbolStyle = "Dynamic" +) + +// A SymbolScope controls the search scope for workspace/symbol requests. +type SymbolScope string + +const ( + // WorkspaceSymbolScope matches symbols in workspace packages only. + WorkspaceSymbolScope SymbolScope = "workspace" + // AllSymbolScope matches symbols in any loaded package, including + // dependencies. + AllSymbolScope SymbolScope = "all" +) + +type HoverKind string + +const ( + SingleLine HoverKind = "SingleLine" + NoDocumentation HoverKind = "NoDocumentation" + SynopsisDocumentation HoverKind = "SynopsisDocumentation" + FullDocumentation HoverKind = "FullDocumentation" + + // Structured is an experimental setting that returns a structured hover format. + // This format separates the signature from the documentation, so that the client + // can do more manipulation of these fields. + // + // This should only be used by clients that support this behavior. + Structured HoverKind = "Structured" +) + +type VulncheckMode string + +const ( + // Disable vulnerability analysis. + ModeVulncheckOff VulncheckMode = "Off" + // In Imports mode, `gopls` will report vulnerabilities that affect packages + // directly and indirectly used by the analyzed main module. + ModeVulncheckImports VulncheckMode = "Imports" + + // TODO: VulncheckRequire, VulncheckCallgraph +) + +type DiagnosticsTrigger string + +const ( + // Trigger diagnostics on file edit and save. (default) + DiagnosticsOnEdit DiagnosticsTrigger = "Edit" + // Trigger diagnostics only on file save. Events like initial workspace load + // or configuration change will still trigger diagnostics. + DiagnosticsOnSave DiagnosticsTrigger = "Save" + // TODO: support "Manual"? +) + +type OptionResults []OptionResult + +type OptionResult struct { + Name string + Value any + Error error +} + +func SetOptions(options *Options, opts any) OptionResults { + var results OptionResults + switch opts := opts.(type) { + case nil: + case map[string]any: + // If the user's settings contains "allExperiments", set that first, + // and then let them override individual settings independently. + var enableExperiments bool + for name, value := range opts { + if b, ok := value.(bool); name == "allExperiments" && ok && b { + enableExperiments = true + options.EnableAllExperiments() + } + } + seen := map[string]struct{}{} + for name, value := range opts { + results = append(results, options.set(name, value, seen)) + } + // Finally, enable any experimental features that are specified in + // maps, which allows users to individually toggle them on or off. + if enableExperiments { + options.enableAllExperimentMaps() + } + default: + results = append(results, OptionResult{ + Value: opts, + Error: fmt.Errorf("Invalid options type %T", opts), + }) + } + return results +} + +func (o *Options) ForClientCapabilities(clientName *protocol.ClientInfo, caps protocol.ClientCapabilities) { + o.ClientInfo = clientName + // Check if the client supports snippets in completion items. + if caps.Workspace.WorkspaceEdit != nil { + o.SupportedResourceOperations = caps.Workspace.WorkspaceEdit.ResourceOperations + } + if c := caps.TextDocument.Completion; c.CompletionItem.SnippetSupport { + o.InsertTextFormat = protocol.SnippetTextFormat + } + // Check if the client supports configuration messages. + o.ConfigurationSupported = caps.Workspace.Configuration + o.DynamicConfigurationSupported = caps.Workspace.DidChangeConfiguration.DynamicRegistration + o.DynamicRegistrationSemanticTokensSupported = caps.TextDocument.SemanticTokens.DynamicRegistration + o.DynamicWatchedFilesSupported = caps.Workspace.DidChangeWatchedFiles.DynamicRegistration + o.RelativePatternsSupported = caps.Workspace.DidChangeWatchedFiles.RelativePatternSupport + + // Check which types of content format are supported by this client. + if hover := caps.TextDocument.Hover; hover != nil && len(hover.ContentFormat) > 0 { + o.PreferredContentFormat = hover.ContentFormat[0] + } + // Check if the client supports only line folding. + + if fr := caps.TextDocument.FoldingRange; fr != nil { + o.LineFoldingOnly = fr.LineFoldingOnly + } + // Check if the client supports hierarchical document symbols. + o.HierarchicalDocumentSymbolSupport = caps.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport + + // Client's semantic tokens + o.SemanticTypes = caps.TextDocument.SemanticTokens.TokenTypes + o.SemanticMods = caps.TextDocument.SemanticTokens.TokenModifiers + // we don't need Requests, as we support full functionality + // we don't need Formats, as there is only one, for now + + // Check if the client supports diagnostic related information. + o.RelatedInformationSupported = caps.TextDocument.PublishDiagnostics.RelatedInformation + // Check if the client completion support includes tags (preferred) or deprecation + if caps.TextDocument.Completion.CompletionItem.TagSupport != nil && + caps.TextDocument.Completion.CompletionItem.TagSupport.ValueSet != nil { + o.CompletionTags = true + } else if caps.TextDocument.Completion.CompletionItem.DeprecatedSupport { + o.CompletionDeprecated = true + } + + // Check if the client supports code actions resolving. + if caps.TextDocument.CodeAction.DataSupport && caps.TextDocument.CodeAction.ResolveSupport != nil { + o.CodeActionResolveOptions = caps.TextDocument.CodeAction.ResolveSupport.Properties + } +} + +func (o *Options) Clone() *Options { + // TODO(rfindley): has this function gone stale? It appears that there are + // settings that are incorrectly cloned here (such as TemplateExtensions). + result := &Options{ + ClientOptions: o.ClientOptions, + InternalOptions: o.InternalOptions, + Hooks: Hooks{ + StaticcheckSupported: o.StaticcheckSupported, + GofumptFormat: o.GofumptFormat, + URLRegexp: o.URLRegexp, + }, + ServerOptions: o.ServerOptions, + UserOptions: o.UserOptions, + } + // Fully clone any slice or map fields. Only Hooks, ExperimentalOptions, + // and UserOptions can be modified. + copyStringMap := func(src map[string]bool) map[string]bool { + dst := make(map[string]bool) + for k, v := range src { + dst[k] = v + } + return dst + } + result.Analyses = copyStringMap(o.Analyses) + result.Codelenses = copyStringMap(o.Codelenses) + + copySlice := func(src []string) []string { + dst := make([]string, len(src)) + copy(dst, src) + return dst + } + result.SetEnvSlice(o.EnvSlice()) + result.BuildFlags = copySlice(o.BuildFlags) + result.DirectoryFilters = copySlice(o.DirectoryFilters) + result.StandaloneTags = copySlice(o.StandaloneTags) + + copyAnalyzerMap := func(src map[string]*Analyzer) map[string]*Analyzer { + dst := make(map[string]*Analyzer) + for k, v := range src { + dst[k] = v + } + return dst + } + result.DefaultAnalyzers = copyAnalyzerMap(o.DefaultAnalyzers) + result.StaticcheckAnalyzers = copyAnalyzerMap(o.StaticcheckAnalyzers) + return result +} + +func (o *Options) AddStaticcheckAnalyzer(a *analysis.Analyzer, enabled bool, severity protocol.DiagnosticSeverity) { + o.StaticcheckAnalyzers[a.Name] = &Analyzer{ + Analyzer: a, + Enabled: enabled, + Severity: severity, + } +} + +// EnableAllExperiments turns on all of the experimental "off-by-default" +// features offered by gopls. Any experimental features specified in maps +// should be enabled in enableAllExperimentMaps. +func (o *Options) EnableAllExperiments() { + o.SemanticTokens = true +} + +func (o *Options) enableAllExperimentMaps() { + if _, ok := o.Codelenses[string(command.GCDetails)]; !ok { + o.Codelenses[string(command.GCDetails)] = true + } + if _, ok := o.Codelenses[string(command.RunGovulncheck)]; !ok { + o.Codelenses[string(command.RunGovulncheck)] = true + } + if _, ok := o.Analyses[unusedvariable.Analyzer.Name]; !ok { + o.Analyses[unusedvariable.Analyzer.Name] = true + } +} + +// validateDirectoryFilter validates if the filter string +// - is not empty +// - start with either + or - +// - doesn't contain currently unsupported glob operators: *, ? +func validateDirectoryFilter(ifilter string) (string, error) { + filter := fmt.Sprint(ifilter) + if filter == "" || (filter[0] != '+' && filter[0] != '-') { + return "", fmt.Errorf("invalid filter %v, must start with + or -", filter) + } + segs := strings.Split(filter[1:], "/") + unsupportedOps := [...]string{"?", "*"} + for _, seg := range segs { + if seg != "**" { + for _, op := range unsupportedOps { + if strings.Contains(seg, op) { + return "", fmt.Errorf("invalid filter %v, operator %v not supported. If you want to have this operator supported, consider filing an issue.", filter, op) + } + } + } + } + + return strings.TrimRight(filepath.FromSlash(filter), "/"), nil +} + +func (o *Options) set(name string, value interface{}, seen map[string]struct{}) OptionResult { + // Flatten the name in case we get options with a hierarchy. + split := strings.Split(name, ".") + name = split[len(split)-1] + + result := OptionResult{Name: name, Value: value} + if _, ok := seen[name]; ok { + result.parseErrorf("duplicate configuration for %s", name) + } + seen[name] = struct{}{} + + switch name { + case "env": + menv, ok := value.(map[string]interface{}) + if !ok { + result.parseErrorf("invalid type %T, expect map", value) + break + } + if o.Env == nil { + o.Env = make(map[string]string) + } + for k, v := range menv { + o.Env[k] = fmt.Sprint(v) + } + + case "buildFlags": + // TODO(rfindley): use asStringSlice. + iflags, ok := value.([]interface{}) + if !ok { + result.parseErrorf("invalid type %T, expect list", value) + break + } + flags := make([]string, 0, len(iflags)) + for _, flag := range iflags { + flags = append(flags, fmt.Sprintf("%s", flag)) + } + o.BuildFlags = flags + + case "directoryFilters": + // TODO(rfindley): use asStringSlice. + ifilters, ok := value.([]interface{}) + if !ok { + result.parseErrorf("invalid type %T, expect list", value) + break + } + var filters []string + for _, ifilter := range ifilters { + filter, err := validateDirectoryFilter(fmt.Sprintf("%v", ifilter)) + if err != nil { + result.parseErrorf("%v", err) + return result + } + filters = append(filters, strings.TrimRight(filepath.FromSlash(filter), "/")) + } + o.DirectoryFilters = filters + + case "memoryMode": + result.deprecated("") + case "completionDocumentation": + result.setBool(&o.CompletionDocumentation) + case "usePlaceholders": + result.setBool(&o.UsePlaceholders) + case "deepCompletion": + result.setBool(&o.DeepCompletion) + case "completeUnimported": + result.setBool(&o.CompleteUnimported) + case "completionBudget": + result.setDuration(&o.CompletionBudget) + case "matcher": + if s, ok := result.asOneOf( + string(Fuzzy), + string(CaseSensitive), + string(CaseInsensitive), + ); ok { + o.Matcher = Matcher(s) + } + + case "symbolMatcher": + if s, ok := result.asOneOf( + string(SymbolFuzzy), + string(SymbolFastFuzzy), + string(SymbolCaseInsensitive), + string(SymbolCaseSensitive), + ); ok { + o.SymbolMatcher = SymbolMatcher(s) + } + + case "symbolStyle": + if s, ok := result.asOneOf( + string(FullyQualifiedSymbols), + string(PackageQualifiedSymbols), + string(DynamicSymbols), + ); ok { + o.SymbolStyle = SymbolStyle(s) + } + + case "symbolScope": + if s, ok := result.asOneOf( + string(WorkspaceSymbolScope), + string(AllSymbolScope), + ); ok { + o.SymbolScope = SymbolScope(s) + } + + case "hoverKind": + if s, ok := result.asOneOf( + string(NoDocumentation), + string(SingleLine), + string(SynopsisDocumentation), + string(FullDocumentation), + string(Structured), + ); ok { + o.HoverKind = HoverKind(s) + } + + case "linkTarget": + result.setString(&o.LinkTarget) + + case "linksInHover": + result.setBool(&o.LinksInHover) + + case "importShortcut": + if s, ok := result.asOneOf(string(BothShortcuts), string(LinkShortcut), string(DefinitionShortcut)); ok { + o.ImportShortcut = ImportShortcut(s) + } + + case "analyses": + result.setBoolMap(&o.Analyses) + + case "hints": + result.setBoolMap(&o.Hints) + + case "annotations": + result.setAnnotationMap(&o.Annotations) + + case "vulncheck": + if s, ok := result.asOneOf( + string(ModeVulncheckOff), + string(ModeVulncheckImports), + ); ok { + o.Vulncheck = VulncheckMode(s) + } + + case "codelenses", "codelens": + var lensOverrides map[string]bool + result.setBoolMap(&lensOverrides) + if result.Error == nil { + if o.Codelenses == nil { + o.Codelenses = make(map[string]bool) + } + for lens, enabled := range lensOverrides { + o.Codelenses[lens] = enabled + } + } + + // codelens is deprecated, but still works for now. + // TODO(rstambler): Remove this for the gopls/v0.7.0 release. + if name == "codelens" { + result.deprecated("codelenses") + } + + case "staticcheck": + if v, ok := result.asBool(); ok { + o.Staticcheck = v + if v && !o.StaticcheckSupported { + result.Error = fmt.Errorf("applying setting %q: staticcheck is not supported at %s;"+ + " rebuild gopls with a more recent version of Go", result.Name, runtime.Version()) + } + } + + case "local": + result.setString(&o.Local) + + case "verboseOutput": + result.setBool(&o.VerboseOutput) + + case "verboseWorkDoneProgress": + result.setBool(&o.VerboseWorkDoneProgress) + + case "tempModFile": + result.deprecated("") + + case "showBugReports": + result.setBool(&o.ShowBugReports) + + case "gofumpt": + if v, ok := result.asBool(); ok { + o.Gofumpt = v + if v && o.GofumptFormat == nil { + result.Error = fmt.Errorf("applying setting %q: gofumpt is not supported at %s;"+ + " rebuild gopls with a more recent version of Go", result.Name, runtime.Version()) + } + } + case "completeFunctionCalls": + result.setBool(&o.CompleteFunctionCalls) + + case "semanticTokens": + result.setBool(&o.SemanticTokens) + + case "noSemanticString": + result.setBool(&o.NoSemanticString) + + case "noSemanticNumber": + result.setBool(&o.NoSemanticNumber) + + case "expandWorkspaceToModule": + // See golang/go#63536: we can consider deprecating + // expandWorkspaceToModule, but probably need to change the default + // behavior in that case to *not* expand to the module. + result.setBool(&o.ExpandWorkspaceToModule) + + case "experimentalPostfixCompletions": + result.setBool(&o.ExperimentalPostfixCompletions) + + case "experimentalWorkspaceModule": + result.deprecated("") + + case "experimentalTemplateSupport": // TODO(pjw): remove after June 2022 + result.deprecated("") + + case "templateExtensions": + if iexts, ok := value.([]interface{}); ok { + ans := []string{} + for _, x := range iexts { + ans = append(ans, fmt.Sprint(x)) + } + o.TemplateExtensions = ans + break + } + if value == nil { + o.TemplateExtensions = nil + break + } + result.parseErrorf("unexpected type %T not []string", value) + + case "experimentalDiagnosticsDelay": + result.deprecated("diagnosticsDelay") + + case "diagnosticsDelay": + result.setDuration(&o.DiagnosticsDelay) + + case "diagnosticsTrigger": + if s, ok := result.asOneOf( + string(DiagnosticsOnEdit), + string(DiagnosticsOnSave), + ); ok { + o.DiagnosticsTrigger = DiagnosticsTrigger(s) + } + + case "analysisProgressReporting": + result.setBool(&o.AnalysisProgressReporting) + + case "experimentalWatchedFileDelay": + result.deprecated("") + + case "experimentalPackageCacheKey": + result.deprecated("") + + case "allowModfileModifications": + result.softErrorf("gopls setting \"allowModfileModifications\" is deprecated.\nPlease comment on https://go.dev/issue/65546 if this impacts your workflow.") + result.setBool(&o.AllowModfileModifications) + + case "allowImplicitNetworkAccess": + result.setBool(&o.AllowImplicitNetworkAccess) + + case "experimentalUseInvalidMetadata": + result.deprecated("") + + case "standaloneTags": + result.setStringSlice(&o.StandaloneTags) + + case "allExperiments": + // This setting should be handled before all of the other options are + // processed, so do nothing here. + + case "newDiff": + result.deprecated("") + + case "subdirWatchPatterns": + if s, ok := result.asOneOf( + string(SubdirWatchPatternsOn), + string(SubdirWatchPatternsOff), + string(SubdirWatchPatternsAuto), + ); ok { + o.SubdirWatchPatterns = SubdirWatchPatterns(s) + } + + case "reportAnalysisProgressAfter": + result.setDuration(&o.ReportAnalysisProgressAfter) + + case "telemetryPrompt": + result.setBool(&o.TelemetryPrompt) + + case "linkifyShowMessage": + result.setBool(&o.LinkifyShowMessage) + + case "includeReplaceInWorkspace": + result.setBool(&o.IncludeReplaceInWorkspace) + + case "zeroConfig": + result.setBool(&o.ZeroConfig) + + // Replaced settings. + case "experimentalDisabledAnalyses": + result.deprecated("analyses") + + case "disableDeepCompletion": + result.deprecated("deepCompletion") + + case "disableFuzzyMatching": + result.deprecated("fuzzyMatching") + + case "wantCompletionDocumentation": + result.deprecated("completionDocumentation") + + case "wantUnimportedCompletions": + result.deprecated("completeUnimported") + + case "fuzzyMatching": + result.deprecated("matcher") + + case "caseSensitiveCompletion": + result.deprecated("matcher") + + // Deprecated settings. + case "wantSuggestedFixes": + result.deprecated("") + + case "noIncrementalSync": + result.deprecated("") + + case "watchFileChanges": + result.deprecated("") + + case "go-diff": + result.deprecated("") + + default: + result.unexpected() + } + return result +} + +// parseErrorf reports an error parsing the current configuration value. +func (r *OptionResult) parseErrorf(msg string, values ...interface{}) { + if false { + _ = fmt.Sprintf(msg, values...) // this causes vet to check this like printf + } + prefix := fmt.Sprintf("parsing setting %q: ", r.Name) + r.Error = fmt.Errorf(prefix+msg, values...) +} + +// A SoftError is an error that does not affect the functionality of gopls. +type SoftError struct { + msg string +} + +func (e *SoftError) Error() string { + return e.msg +} + +// deprecated reports the current setting as deprecated. If 'replacement' is +// non-nil, it is suggested to the user. +func (r *OptionResult) deprecated(replacement string) { + msg := fmt.Sprintf("gopls setting %q is deprecated", r.Name) + if replacement != "" { + msg = fmt.Sprintf("%s, use %q instead", msg, replacement) + } + r.Error = &SoftError{msg} +} + +// softErrorf reports a soft error related to the current option. +func (r *OptionResult) softErrorf(format string, args ...any) { + r.Error = &SoftError{fmt.Sprintf(format, args...)} +} + +// unexpected reports that the current setting is not known to gopls. +func (r *OptionResult) unexpected() { + r.Error = fmt.Errorf("unexpected gopls setting %q", r.Name) +} + +func (r *OptionResult) asBool() (bool, bool) { + b, ok := r.Value.(bool) + if !ok { + r.parseErrorf("invalid type %T, expect bool", r.Value) + return false, false + } + return b, true +} + +func (r *OptionResult) setBool(b *bool) { + if v, ok := r.asBool(); ok { + *b = v + } +} + +func (r *OptionResult) setDuration(d *time.Duration) { + if v, ok := r.asString(); ok { + parsed, err := time.ParseDuration(v) + if err != nil { + r.parseErrorf("failed to parse duration %q: %v", v, err) + return + } + *d = parsed + } +} + +func (r *OptionResult) setBoolMap(bm *map[string]bool) { + m := r.asBoolMap() + *bm = m +} + +func (r *OptionResult) setAnnotationMap(bm *map[Annotation]bool) { + all := r.asBoolMap() + if all == nil { + return + } + // Default to everything enabled by default. + m := make(map[Annotation]bool) + for k, enabled := range all { + a, err := asOneOf( + k, + string(Nil), + string(Escape), + string(Inline), + string(Bounds), + ) + if err != nil { + // In case of an error, process any legacy values. + switch k { + case "noEscape": + m[Escape] = false + r.parseErrorf(`"noEscape" is deprecated, set "Escape: false" instead`) + case "noNilcheck": + m[Nil] = false + r.parseErrorf(`"noNilcheck" is deprecated, set "Nil: false" instead`) + case "noInline": + m[Inline] = false + r.parseErrorf(`"noInline" is deprecated, set "Inline: false" instead`) + case "noBounds": + m[Bounds] = false + r.parseErrorf(`"noBounds" is deprecated, set "Bounds: false" instead`) + default: + r.parseErrorf("%v", err) + } + continue + } + m[Annotation(a)] = enabled + } + *bm = m +} + +func (r *OptionResult) asBoolMap() map[string]bool { + all, ok := r.Value.(map[string]interface{}) + if !ok { + r.parseErrorf("invalid type %T for map[string]bool option", r.Value) + return nil + } + m := make(map[string]bool) + for a, enabled := range all { + if e, ok := enabled.(bool); ok { + m[a] = e + } else { + r.parseErrorf("invalid type %T for map key %q", enabled, a) + return m + } + } + return m +} + +func (r *OptionResult) asString() (string, bool) { + b, ok := r.Value.(string) + if !ok { + r.parseErrorf("invalid type %T, expect string", r.Value) + return "", false + } + return b, true +} + +func (r *OptionResult) asStringSlice() ([]string, bool) { + iList, ok := r.Value.([]interface{}) + if !ok { + r.parseErrorf("invalid type %T, expect list", r.Value) + return nil, false + } + var list []string + for _, elem := range iList { + s, ok := elem.(string) + if !ok { + r.parseErrorf("invalid element type %T, expect string", elem) + return nil, false + } + list = append(list, s) + } + return list, true +} + +func (r *OptionResult) asOneOf(options ...string) (string, bool) { + s, ok := r.asString() + if !ok { + return "", false + } + s, err := asOneOf(s, options...) + if err != nil { + r.parseErrorf("%v", err) + } + return s, err == nil +} + +func asOneOf(str string, options ...string) (string, error) { + lower := strings.ToLower(str) + for _, opt := range options { + if strings.ToLower(opt) == lower { + return opt, nil + } + } + return "", fmt.Errorf("invalid option %q for enum", str) +} + +func (r *OptionResult) setString(s *string) { + if v, ok := r.asString(); ok { + *s = v + } +} + +func (r *OptionResult) setStringSlice(s *[]string) { + if v, ok := r.asStringSlice(); ok { + *s = v + } +} + +func analyzers() map[string]*Analyzer { + return map[string]*Analyzer{ + // The traditional vet suite: + appends.Analyzer.Name: {Analyzer: appends.Analyzer, Enabled: true}, + asmdecl.Analyzer.Name: {Analyzer: asmdecl.Analyzer, Enabled: true}, + assign.Analyzer.Name: {Analyzer: assign.Analyzer, Enabled: true}, + atomic.Analyzer.Name: {Analyzer: atomic.Analyzer, Enabled: true}, + bools.Analyzer.Name: {Analyzer: bools.Analyzer, Enabled: true}, + buildtag.Analyzer.Name: {Analyzer: buildtag.Analyzer, Enabled: true}, + cgocall.Analyzer.Name: {Analyzer: cgocall.Analyzer, Enabled: true}, + composite.Analyzer.Name: {Analyzer: composite.Analyzer, Enabled: true}, + copylock.Analyzer.Name: {Analyzer: copylock.Analyzer, Enabled: true}, + defers.Analyzer.Name: {Analyzer: defers.Analyzer, Enabled: true}, + deprecated.Analyzer.Name: { + Analyzer: deprecated.Analyzer, + Enabled: true, + Severity: protocol.SeverityHint, + Tag: []protocol.DiagnosticTag{protocol.Deprecated}, + }, + directive.Analyzer.Name: {Analyzer: directive.Analyzer, Enabled: true}, + errorsas.Analyzer.Name: {Analyzer: errorsas.Analyzer, Enabled: true}, + httpresponse.Analyzer.Name: {Analyzer: httpresponse.Analyzer, Enabled: true}, + ifaceassert.Analyzer.Name: {Analyzer: ifaceassert.Analyzer, Enabled: true}, + loopclosure.Analyzer.Name: {Analyzer: loopclosure.Analyzer, Enabled: true}, + lostcancel.Analyzer.Name: {Analyzer: lostcancel.Analyzer, Enabled: true}, + nilfunc.Analyzer.Name: {Analyzer: nilfunc.Analyzer, Enabled: true}, + printf.Analyzer.Name: {Analyzer: printf.Analyzer, Enabled: true}, + shift.Analyzer.Name: {Analyzer: shift.Analyzer, Enabled: true}, + slog.Analyzer.Name: {Analyzer: slog.Analyzer, Enabled: true}, + stdmethods.Analyzer.Name: {Analyzer: stdmethods.Analyzer, Enabled: true}, + stringintconv.Analyzer.Name: {Analyzer: stringintconv.Analyzer, Enabled: true}, + structtag.Analyzer.Name: {Analyzer: structtag.Analyzer, Enabled: true}, + tests.Analyzer.Name: {Analyzer: tests.Analyzer, Enabled: true}, + unmarshal.Analyzer.Name: {Analyzer: unmarshal.Analyzer, Enabled: true}, + unreachable.Analyzer.Name: {Analyzer: unreachable.Analyzer, Enabled: true}, + unsafeptr.Analyzer.Name: {Analyzer: unsafeptr.Analyzer, Enabled: true}, + unusedresult.Analyzer.Name: {Analyzer: unusedresult.Analyzer, Enabled: true}, + + // Non-vet analyzers: + // - some (nilness, unusedwrite) use go/ssa; + // - some (unusedwrite) report bad code but not always a bug, + // so are not suitable for vet. + atomicalign.Analyzer.Name: {Analyzer: atomicalign.Analyzer, Enabled: true}, + deepequalerrors.Analyzer.Name: {Analyzer: deepequalerrors.Analyzer, Enabled: true}, + fieldalignment.Analyzer.Name: {Analyzer: fieldalignment.Analyzer, Enabled: false}, + nilness.Analyzer.Name: {Analyzer: nilness.Analyzer, Enabled: true}, + shadow.Analyzer.Name: {Analyzer: shadow.Analyzer, Enabled: false}, + sortslice.Analyzer.Name: {Analyzer: sortslice.Analyzer, Enabled: true}, + testinggoroutine.Analyzer.Name: {Analyzer: testinggoroutine.Analyzer, Enabled: true}, + unusedparams.Analyzer.Name: {Analyzer: unusedparams.Analyzer, Enabled: true}, + unusedwrite.Analyzer.Name: {Analyzer: unusedwrite.Analyzer, Enabled: true}, + useany.Analyzer.Name: {Analyzer: useany.Analyzer, Enabled: false}, + infertypeargs.Analyzer.Name: { + Analyzer: infertypeargs.Analyzer, + Enabled: true, + Severity: protocol.SeverityHint, + }, + timeformat.Analyzer.Name: {Analyzer: timeformat.Analyzer, Enabled: true}, + embeddirective.Analyzer.Name: {Analyzer: embeddirective.Analyzer, Enabled: true}, + + // gofmt -s suite: + simplifycompositelit.Analyzer.Name: { + Analyzer: simplifycompositelit.Analyzer, + Enabled: true, + ActionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + }, + simplifyrange.Analyzer.Name: { + Analyzer: simplifyrange.Analyzer, + Enabled: true, + ActionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + }, + simplifyslice.Analyzer.Name: { + Analyzer: simplifyslice.Analyzer, + Enabled: true, + ActionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + }, + stdversion.Analyzer.Name: { + Analyzer: stdversion.Analyzer, + Enabled: true, + }, + + // Type error analyzers. + // These analyzers enrich go/types errors with suggested fixes. + fillreturns.Analyzer.Name: {Analyzer: fillreturns.Analyzer, Enabled: true}, + nonewvars.Analyzer.Name: {Analyzer: nonewvars.Analyzer, Enabled: true}, + noresultvalues.Analyzer.Name: {Analyzer: noresultvalues.Analyzer, Enabled: true}, + stubmethods.Analyzer.Name: {Analyzer: stubmethods.Analyzer, Enabled: true}, + undeclaredname.Analyzer.Name: {Analyzer: undeclaredname.Analyzer, Enabled: true}, + // TODO(rfindley): why isn't the 'unusedvariable' analyzer enabled, if it + // is only enhancing type errors with suggested fixes? + // + // In particular, enabling this analyzer could cause unused variables to be + // greyed out, (due to the 'deletions only' fix). That seems like a nice UI + // feature. + unusedvariable.Analyzer.Name: {Analyzer: unusedvariable.Analyzer, Enabled: false}, + } +} + +func urlRegexp() *regexp.Regexp { + // Ensure links are matched as full words, not anywhere. + re := regexp.MustCompile(`\b(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?\b`) + re.Longest() + return re +} diff --git a/gopls/internal/settings/settings_test.go b/gopls/internal/settings/settings_test.go new file mode 100644 index 00000000000..ebc4f2c41a8 --- /dev/null +++ b/gopls/internal/settings/settings_test.go @@ -0,0 +1,206 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package settings + +import ( + "testing" + "time" +) + +func TestSetOption(t *testing.T) { + tests := []struct { + name string + value interface{} + wantError bool + check func(Options) bool + }{ + { + name: "symbolStyle", + value: "Dynamic", + check: func(o Options) bool { return o.SymbolStyle == DynamicSymbols }, + }, + { + name: "symbolStyle", + value: "", + wantError: true, + check: func(o Options) bool { return o.SymbolStyle == "" }, + }, + { + name: "symbolStyle", + value: false, + wantError: true, + check: func(o Options) bool { return o.SymbolStyle == "" }, + }, + { + name: "symbolMatcher", + value: "caseInsensitive", + check: func(o Options) bool { return o.SymbolMatcher == SymbolCaseInsensitive }, + }, + { + name: "completionBudget", + value: "2s", + check: func(o Options) bool { return o.CompletionBudget == 2*time.Second }, + }, + { + name: "staticcheck", + value: true, + check: func(o Options) bool { return o.Staticcheck == true }, + wantError: true, // o.StaticcheckSupported is unset + }, + { + name: "codelenses", + value: map[string]interface{}{"generate": true}, + check: func(o Options) bool { return o.Codelenses["generate"] }, + }, + { + name: "allExperiments", + value: true, + check: func(o Options) bool { + return true // just confirm that we handle this setting + }, + }, + { + name: "hoverKind", + value: "FullDocumentation", + check: func(o Options) bool { + return o.HoverKind == FullDocumentation + }, + }, + { + name: "hoverKind", + value: "NoDocumentation", + check: func(o Options) bool { + return o.HoverKind == NoDocumentation + }, + }, + { + name: "hoverKind", + value: "SingleLine", + check: func(o Options) bool { + return o.HoverKind == SingleLine + }, + }, + { + name: "hoverKind", + value: "Structured", + check: func(o Options) bool { + return o.HoverKind == Structured + }, + }, + { + name: "ui.documentation.hoverKind", + value: "Structured", + check: func(o Options) bool { + return o.HoverKind == Structured + }, + }, + { + name: "matcher", + value: "Fuzzy", + check: func(o Options) bool { + return o.Matcher == Fuzzy + }, + }, + { + name: "matcher", + value: "CaseSensitive", + check: func(o Options) bool { + return o.Matcher == CaseSensitive + }, + }, + { + name: "matcher", + value: "CaseInsensitive", + check: func(o Options) bool { + return o.Matcher == CaseInsensitive + }, + }, + { + name: "env", + value: map[string]interface{}{"testing": "true"}, + check: func(o Options) bool { + v, found := o.Env["testing"] + return found && v == "true" + }, + }, + { + name: "env", + value: []string{"invalid", "input"}, + wantError: true, + check: func(o Options) bool { + return o.Env == nil + }, + }, + { + name: "directoryFilters", + value: []interface{}{"-node_modules", "+project_a"}, + check: func(o Options) bool { + return len(o.DirectoryFilters) == 2 + }, + }, + { + name: "directoryFilters", + value: []interface{}{"invalid"}, + wantError: true, + check: func(o Options) bool { + return len(o.DirectoryFilters) == 0 + }, + }, + { + name: "directoryFilters", + value: []string{"-invalid", "+type"}, + wantError: true, + check: func(o Options) bool { + return len(o.DirectoryFilters) == 0 + }, + }, + { + name: "annotations", + value: map[string]interface{}{ + "Nil": false, + "noBounds": true, + }, + wantError: true, + check: func(o Options) bool { + return !o.Annotations[Nil] && !o.Annotations[Bounds] + }, + }, + { + name: "vulncheck", + value: []interface{}{"invalid"}, + wantError: true, + check: func(o Options) bool { + return o.Vulncheck == "" // For invalid value, default to 'off'. + }, + }, + { + name: "vulncheck", + value: "Imports", + check: func(o Options) bool { + return o.Vulncheck == ModeVulncheckImports // For invalid value, default to 'off'. + }, + }, + { + name: "vulncheck", + value: "imports", + check: func(o Options) bool { + return o.Vulncheck == ModeVulncheckImports + }, + }, + } + + for _, test := range tests { + var opts Options + result := opts.set(test.name, test.value, map[string]struct{}{}) + if (result.Error != nil) != test.wantError { + t.Fatalf("Options.set(%q, %v): result.Error = %v, want error: %t", test.name, test.value, result.Error, test.wantError) + } + // TODO: this could be made much better using cmp.Diff, if that becomes + // available in this module. + if !test.check(opts) { + t.Errorf("Options.set(%q, %v): unexpected result %+v", test.name, test.value, opts) + } + } +} diff --git a/gopls/internal/span/parse.go b/gopls/internal/span/parse.go deleted file mode 100644 index 715d5fe44fd..00000000000 --- a/gopls/internal/span/parse.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package span - -import ( - "path/filepath" - "strconv" - "strings" - "unicode/utf8" -) - -// Parse returns the location represented by the input. -// Only file paths are accepted, not URIs. -// The returned span will be normalized, and thus if printed may produce a -// different string. -func Parse(input string) Span { - return ParseInDir(input, ".") -} - -// ParseInDir is like Parse, but interprets paths relative to wd. -func ParseInDir(input, wd string) Span { - uri := func(path string) URI { - if !filepath.IsAbs(path) { - path = filepath.Join(wd, path) - } - return URIFromPath(path) - } - // :0:0#0-0:0#0 - valid := input - var hold, offset int - hadCol := false - suf := rstripSuffix(input) - if suf.sep == "#" { - offset = suf.num - suf = rstripSuffix(suf.remains) - } - if suf.sep == ":" { - valid = suf.remains - hold = suf.num - hadCol = true - suf = rstripSuffix(suf.remains) - } - switch { - case suf.sep == ":": - return New(uri(suf.remains), NewPoint(suf.num, hold, offset), Point{}) - case suf.sep == "-": - // we have a span, fall out of the case to continue - default: - // separator not valid, rewind to either the : or the start - return New(uri(valid), NewPoint(hold, 0, offset), Point{}) - } - // only the span form can get here - // at this point we still don't know what the numbers we have mean - // if have not yet seen a : then we might have either a line or a column depending - // on whether start has a column or not - // we build an end point and will fix it later if needed - end := NewPoint(suf.num, hold, offset) - hold, offset = 0, 0 - suf = rstripSuffix(suf.remains) - if suf.sep == "#" { - offset = suf.num - suf = rstripSuffix(suf.remains) - } - if suf.sep != ":" { - // turns out we don't have a span after all, rewind - return New(uri(valid), end, Point{}) - } - valid = suf.remains - hold = suf.num - suf = rstripSuffix(suf.remains) - if suf.sep != ":" { - // line#offset only - return New(uri(valid), NewPoint(hold, 0, offset), end) - } - // we have a column, so if end only had one number, it is also the column - if !hadCol { - end = NewPoint(suf.num, end.v.Line, end.v.Offset) - } - return New(uri(suf.remains), NewPoint(suf.num, hold, offset), end) -} - -type suffix struct { - remains string - sep string - num int -} - -func rstripSuffix(input string) suffix { - if len(input) == 0 { - return suffix{"", "", -1} - } - remains := input - - // Remove optional trailing decimal number. - num := -1 - last := strings.LastIndexFunc(remains, func(r rune) bool { return r < '0' || r > '9' }) - if last >= 0 && last < len(remains)-1 { - number, err := strconv.ParseInt(remains[last+1:], 10, 64) - if err == nil { - num = int(number) - remains = remains[:last+1] - } - } - // now see if we have a trailing separator - r, w := utf8.DecodeLastRuneInString(remains) - // TODO(adonovan): this condition is clearly wrong. Should the third byte be '-'? - if r != ':' && r != '#' && r == '#' { - return suffix{input, "", -1} - } - remains = remains[:len(remains)-w] - return suffix{remains, string(r), num} -} diff --git a/gopls/internal/span/span.go b/gopls/internal/span/span.go deleted file mode 100644 index fddc0a42b56..00000000000 --- a/gopls/internal/span/span.go +++ /dev/null @@ -1,249 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package span contains support for representing with positions and ranges in -// text files. -package span - -import ( - "encoding/json" - "fmt" - "go/token" - "path" - "sort" - "strings" - - "golang.org/x/tools/gopls/internal/lsp/safetoken" -) - -// A Span represents a range of text within a source file. The start -// and end points of a valid span may be hold either its byte offset, -// or its (line, column) pair, or both. Columns are measured in bytes. -// -// Spans are appropriate in user interfaces (e.g. command-line tools) -// and tests where a position is notated without access to the content -// of the file. -// -// Use protocol.Mapper to convert between Span and other -// representations, such as go/token (also UTF-8) or the LSP protocol -// (UTF-16). The latter requires access to file contents. -// -// See overview comments at ../lsp/protocol/mapper.go. -type Span struct { - v span -} - -// Point represents a single point within a file. -// In general this should only be used as part of a Span, as on its own it -// does not carry enough information. -type Point struct { - v point -} - -// The private span/point types have public fields to support JSON -// encoding, but the public Span/Point types hide these fields by -// defining methods that shadow them. (This is used by a few of the -// command-line tool subcommands, which emit spans and have a -json -// flag.) - -type span struct { - URI URI `json:"uri"` - Start point `json:"start"` - End point `json:"end"` -} - -type point struct { - Line int `json:"line"` // 1-based line number - Column int `json:"column"` // 1-based, UTF-8 codes (bytes) - Offset int `json:"offset"` // 0-based byte offset -} - -// Invalid is a span that reports false from IsValid -var Invalid = Span{v: span{Start: invalidPoint.v, End: invalidPoint.v}} - -var invalidPoint = Point{v: point{Line: 0, Column: 0, Offset: -1}} - -func New(uri URI, start, end Point) Span { - s := Span{v: span{URI: uri, Start: start.v, End: end.v}} - s.v.clean() - return s -} - -func NewPoint(line, col, offset int) Point { - p := Point{v: point{Line: line, Column: col, Offset: offset}} - p.v.clean() - return p -} - -// SortSpans sorts spans into a stable but unspecified order. -func SortSpans(spans []Span) { - sort.SliceStable(spans, func(i, j int) bool { - return compare(spans[i], spans[j]) < 0 - }) -} - -// compare implements a three-valued ordered comparison of Spans. -func compare(a, b Span) int { - // This is a textual comparison. It does not perform path - // cleaning, case folding, resolution of symbolic links, - // testing for existence, or any I/O. - if cmp := strings.Compare(string(a.URI()), string(b.URI())); cmp != 0 { - return cmp - } - if cmp := comparePoint(a.v.Start, b.v.Start); cmp != 0 { - return cmp - } - return comparePoint(a.v.End, b.v.End) -} - -func comparePoint(a, b point) int { - if !a.hasPosition() { - if a.Offset < b.Offset { - return -1 - } - if a.Offset > b.Offset { - return 1 - } - return 0 - } - if a.Line < b.Line { - return -1 - } - if a.Line > b.Line { - return 1 - } - if a.Column < b.Column { - return -1 - } - if a.Column > b.Column { - return 1 - } - return 0 -} - -func (s Span) HasPosition() bool { return s.v.Start.hasPosition() } -func (s Span) HasOffset() bool { return s.v.Start.hasOffset() } -func (s Span) IsValid() bool { return s.v.Start.isValid() } -func (s Span) IsPoint() bool { return s.v.Start == s.v.End } -func (s Span) URI() URI { return s.v.URI } -func (s Span) Start() Point { return Point{s.v.Start} } -func (s Span) End() Point { return Point{s.v.End} } -func (s *Span) MarshalJSON() ([]byte, error) { return json.Marshal(&s.v) } -func (s *Span) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &s.v) } - -func (p Point) HasPosition() bool { return p.v.hasPosition() } -func (p Point) HasOffset() bool { return p.v.hasOffset() } -func (p Point) IsValid() bool { return p.v.isValid() } -func (p *Point) MarshalJSON() ([]byte, error) { return json.Marshal(&p.v) } -func (p *Point) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &p.v) } -func (p Point) Line() int { - if !p.v.hasPosition() { - panic(fmt.Errorf("position not set in %v", p.v)) - } - return p.v.Line -} -func (p Point) Column() int { - if !p.v.hasPosition() { - panic(fmt.Errorf("position not set in %v", p.v)) - } - return p.v.Column -} -func (p Point) Offset() int { - if !p.v.hasOffset() { - panic(fmt.Errorf("offset not set in %v", p.v)) - } - return p.v.Offset -} - -func (p point) hasPosition() bool { return p.Line > 0 } -func (p point) hasOffset() bool { return p.Offset >= 0 } -func (p point) isValid() bool { return p.hasPosition() || p.hasOffset() } -func (p point) isZero() bool { - return (p.Line == 1 && p.Column == 1) || (!p.hasPosition() && p.Offset == 0) -} - -func (s *span) clean() { - //this presumes the points are already clean - if !s.End.isValid() || (s.End == point{}) { - s.End = s.Start - } -} - -func (p *point) clean() { - if p.Line < 0 { - p.Line = 0 - } - if p.Column <= 0 { - if p.Line > 0 { - p.Column = 1 - } else { - p.Column = 0 - } - } - if p.Offset == 0 && (p.Line > 1 || p.Column > 1) { - p.Offset = -1 - } -} - -// Format implements fmt.Formatter to print the Location in a standard form. -// The format produced is one that can be read back in using Parse. -func (s Span) Format(f fmt.State, c rune) { - fullForm := f.Flag('+') - preferOffset := f.Flag('#') - // we should always have a uri, simplify if it is file format - //TODO: make sure the end of the uri is unambiguous - uri := string(s.v.URI) - if c == 'f' { - uri = path.Base(uri) - } else if !fullForm { - uri = s.v.URI.Filename() - } - fmt.Fprint(f, uri) - if !s.IsValid() || (!fullForm && s.v.Start.isZero() && s.v.End.isZero()) { - return - } - // see which bits of start to write - printOffset := s.HasOffset() && (fullForm || preferOffset || !s.HasPosition()) - printLine := s.HasPosition() && (fullForm || !printOffset) - printColumn := printLine && (fullForm || (s.v.Start.Column > 1 || s.v.End.Column > 1)) - fmt.Fprint(f, ":") - if printLine { - fmt.Fprintf(f, "%d", s.v.Start.Line) - } - if printColumn { - fmt.Fprintf(f, ":%d", s.v.Start.Column) - } - if printOffset { - fmt.Fprintf(f, "#%d", s.v.Start.Offset) - } - // start is written, do we need end? - if s.IsPoint() { - return - } - // we don't print the line if it did not change - printLine = fullForm || (printLine && s.v.End.Line > s.v.Start.Line) - fmt.Fprint(f, "-") - if printLine { - fmt.Fprintf(f, "%d", s.v.End.Line) - } - if printColumn { - if printLine { - fmt.Fprint(f, ":") - } - fmt.Fprintf(f, "%d", s.v.End.Column) - } - if printOffset { - fmt.Fprintf(f, "#%d", s.v.End.Offset) - } -} - -// SetRange implements packagestest.rangeSetter, allowing -// gopls' test suites to use Spans instead of Range in parameters. -func (span *Span) SetRange(file *token.File, start, end token.Pos) { - point := func(pos token.Pos) Point { - posn := safetoken.Position(file, pos) - return NewPoint(posn.Line, posn.Column, posn.Offset) - } - *span = New(URIFromPath(file.Name()), point(start), point(end)) -} diff --git a/gopls/internal/span/span_test.go b/gopls/internal/span/span_test.go deleted file mode 100644 index d2aaff12cab..00000000000 --- a/gopls/internal/span/span_test.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package span_test - -import ( - "fmt" - "path/filepath" - "strings" - "testing" - - "golang.org/x/tools/gopls/internal/span" -) - -func TestFormat(t *testing.T) { - formats := []string{"%v", "%#v", "%+v"} - - // Element 0 is the input, and the elements 0-2 are the expected - // output in [%v %#v %+v] formats. Thus the first must be in - // canonical form (invariant under span.Parse + fmt.Sprint). - // The '#' form displays offsets; the '+' form outputs a URI. - // If len=4, element 0 is a noncanonical input and 1-3 are expected outputs. - for _, test := range [][]string{ - {"C:/file_a", "C:/file_a", "file:///C:/file_a:#0"}, - {"C:/file_b:1:2", "C:/file_b:1:2", "file:///C:/file_b:1:2"}, - {"C:/file_c:1000", "C:/file_c:1000", "file:///C:/file_c:1000:1"}, - {"C:/file_d:14:9", "C:/file_d:14:9", "file:///C:/file_d:14:9"}, - {"C:/file_e:1:2-7", "C:/file_e:1:2-7", "file:///C:/file_e:1:2-1:7"}, - {"C:/file_f:500-502", "C:/file_f:500-502", "file:///C:/file_f:500:1-502:1"}, - {"C:/file_g:3:7-8", "C:/file_g:3:7-8", "file:///C:/file_g:3:7-3:8"}, - {"C:/file_h:3:7-4:8", "C:/file_h:3:7-4:8", "file:///C:/file_h:3:7-4:8"}, - {"C:/file_i:#100", "C:/file_i:#100", "file:///C:/file_i:#100"}, - {"C:/file_j:#26-#28", "C:/file_j:#26-#28", "file:///C:/file_j:#26-0#28"}, // 0#28? - {"C:/file_h:3:7#26-4:8#37", // not canonical - "C:/file_h:3:7-4:8", "C:/file_h:#26-#37", "file:///C:/file_h:3:7#26-4:8#37"}} { - input := test[0] - spn := span.Parse(input) - wants := test[0:3] - if len(test) == 4 { - wants = test[1:4] - } - for i, format := range formats { - want := toPath(wants[i]) - if got := fmt.Sprintf(format, spn); got != want { - t.Errorf("Sprintf(%q, %q) = %q, want %q", format, input, got, want) - } - } - } -} - -func toPath(value string) string { - if strings.HasPrefix(value, "file://") { - return value - } - return filepath.FromSlash(value) -} diff --git a/gopls/internal/span/uri.go b/gopls/internal/span/uri.go deleted file mode 100644 index cf2d66df20b..00000000000 --- a/gopls/internal/span/uri.go +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package span - -import ( - "fmt" - "net/url" - "os" - "path/filepath" - "runtime" - "strings" - "unicode" -) - -const fileScheme = "file" - -// URI represents the full URI for a file. -type URI string - -func (uri URI) IsFile() bool { - return strings.HasPrefix(string(uri), "file://") -} - -// Filename returns the file path for the given URI. -// It is an error to call this on a URI that is not a valid filename. -func (uri URI) Filename() string { - filename, err := filename(uri) - if err != nil { - panic(err) - } - return filepath.FromSlash(filename) -} - -func filename(uri URI) (string, error) { - if uri == "" { - return "", nil - } - - // This conservative check for the common case - // of a simple non-empty absolute POSIX filename - // avoids the allocation of a net.URL. - if strings.HasPrefix(string(uri), "file:///") { - rest := string(uri)[len("file://"):] // leave one slash - for i := 0; i < len(rest); i++ { - b := rest[i] - // Reject these cases: - if b < ' ' || b == 0x7f || // control character - b == '%' || b == '+' || // URI escape - b == ':' || // Windows drive letter - b == '@' || b == '&' || b == '?' { // authority or query - goto slow - } - } - return rest, nil - } -slow: - - u, err := url.ParseRequestURI(string(uri)) - if err != nil { - return "", err - } - if u.Scheme != fileScheme { - return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri) - } - // If the URI is a Windows URI, we trim the leading "/" and uppercase - // the drive letter, which will never be case sensitive. - if isWindowsDriveURIPath(u.Path) { - u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:] - } - - return u.Path, nil -} - -// TODO(adonovan): document this function, and any invariants of -// span.URI that it is supposed to establish. -func URIFromURI(s string) URI { - if !strings.HasPrefix(s, "file://") { - return URI(s) - } - - if !strings.HasPrefix(s, "file:///") { - // VS Code sends URLs with only two slashes, which are invalid. golang/go#39789. - s = "file:///" + s[len("file://"):] - } - // Even though the input is a URI, it may not be in canonical form. VS Code - // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize. - path, err := url.PathUnescape(s[len("file://"):]) - if err != nil { - panic(err) - } - - // File URIs from Windows may have lowercase drive letters. - // Since drive letters are guaranteed to be case insensitive, - // we change them to uppercase to remain consistent. - // For example, file:///c:/x/y/z becomes file:///C:/x/y/z. - if isWindowsDriveURIPath(path) { - path = path[:1] + strings.ToUpper(string(path[1])) + path[2:] - } - u := url.URL{Scheme: fileScheme, Path: path} - return URI(u.String()) -} - -// SameExistingFile reports whether two spans denote the -// same existing file by querying the file system. -func SameExistingFile(a, b URI) bool { - fa, err := filename(a) - if err != nil { - return false - } - fb, err := filename(b) - if err != nil { - return false - } - infoa, err := os.Stat(filepath.FromSlash(fa)) - if err != nil { - return false - } - infob, err := os.Stat(filepath.FromSlash(fb)) - if err != nil { - return false - } - return os.SameFile(infoa, infob) -} - -// URIFromPath returns a span URI for the supplied file path. -// -// For empty paths, URIFromPath returns the empty URI "". -// For non-empty paths, URIFromPath returns a uri with the file:// scheme. -func URIFromPath(path string) URI { - if path == "" { - return "" - } - // Handle standard library paths that contain the literal "$GOROOT". - // TODO(rstambler): The go/packages API should allow one to determine a user's $GOROOT. - const prefix = "$GOROOT" - if len(path) >= len(prefix) && strings.EqualFold(prefix, path[:len(prefix)]) { - suffix := path[len(prefix):] - path = runtime.GOROOT() + suffix - } - if !isWindowsDrivePath(path) { - if abs, err := filepath.Abs(path); err == nil { - path = abs - } - } - // Check the file path again, in case it became absolute. - if isWindowsDrivePath(path) { - path = "/" + strings.ToUpper(string(path[0])) + path[1:] - } - path = filepath.ToSlash(path) - u := url.URL{ - Scheme: fileScheme, - Path: path, - } - return URI(u.String()) -} - -// isWindowsDrivePath returns true if the file path is of the form used by -// Windows. We check if the path begins with a drive letter, followed by a ":". -// For example: C:/x/y/z. -func isWindowsDrivePath(path string) bool { - if len(path) < 3 { - return false - } - return unicode.IsLetter(rune(path[0])) && path[1] == ':' -} - -// isWindowsDriveURIPath returns true if the file URI is of the format used by -// Windows URIs. The url.Parse package does not specially handle Windows paths -// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:"). -func isWindowsDriveURIPath(uri string) bool { - if len(uri) < 4 { - return false - } - return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':' -} diff --git a/gopls/internal/span/uri_test.go b/gopls/internal/span/uri_test.go deleted file mode 100644 index e9904378504..00000000000 --- a/gopls/internal/span/uri_test.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !windows -// +build !windows - -package span_test - -import ( - "testing" - - "golang.org/x/tools/gopls/internal/span" -) - -// TestURI tests the conversion between URIs and filenames. The test cases -// include Windows-style URIs and filepaths, but we avoid having OS-specific -// tests by using only forward slashes, assuming that the standard library -// functions filepath.ToSlash and filepath.FromSlash do not need testing. -func TestURIFromPath(t *testing.T) { - for _, test := range []struct { - path, wantFile string - wantURI span.URI - }{ - { - path: ``, - wantFile: ``, - wantURI: span.URI(""), - }, - { - path: `C:/Windows/System32`, - wantFile: `C:/Windows/System32`, - wantURI: span.URI("file:///C:/Windows/System32"), - }, - { - path: `C:/Go/src/bob.go`, - wantFile: `C:/Go/src/bob.go`, - wantURI: span.URI("file:///C:/Go/src/bob.go"), - }, - { - path: `c:/Go/src/bob.go`, - wantFile: `C:/Go/src/bob.go`, - wantURI: span.URI("file:///C:/Go/src/bob.go"), - }, - { - path: `/path/to/dir`, - wantFile: `/path/to/dir`, - wantURI: span.URI("file:///path/to/dir"), - }, - { - path: `/a/b/c/src/bob.go`, - wantFile: `/a/b/c/src/bob.go`, - wantURI: span.URI("file:///a/b/c/src/bob.go"), - }, - { - path: `c:/Go/src/bob george/george/george.go`, - wantFile: `C:/Go/src/bob george/george/george.go`, - wantURI: span.URI("file:///C:/Go/src/bob%20george/george/george.go"), - }, - } { - got := span.URIFromPath(test.path) - if got != test.wantURI { - t.Errorf("URIFromPath(%q): got %q, expected %q", test.path, got, test.wantURI) - } - gotFilename := got.Filename() - if gotFilename != test.wantFile { - t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) - } - } -} - -func TestURIFromURI(t *testing.T) { - for _, test := range []struct { - inputURI, wantFile string - wantURI span.URI - }{ - { - inputURI: `file:///c:/Go/src/bob%20george/george/george.go`, - wantFile: `C:/Go/src/bob george/george/george.go`, - wantURI: span.URI("file:///C:/Go/src/bob%20george/george/george.go"), - }, - { - inputURI: `file:///C%3A/Go/src/bob%20george/george/george.go`, - wantFile: `C:/Go/src/bob george/george/george.go`, - wantURI: span.URI("file:///C:/Go/src/bob%20george/george/george.go"), - }, - { - inputURI: `file:///path/to/%25p%25ercent%25/per%25cent.go`, - wantFile: `/path/to/%p%ercent%/per%cent.go`, - wantURI: span.URI(`file:///path/to/%25p%25ercent%25/per%25cent.go`), - }, - { - inputURI: `file:///C%3A/`, - wantFile: `C:/`, - wantURI: span.URI(`file:///C:/`), - }, - { - inputURI: `file:///`, - wantFile: `/`, - wantURI: span.URI(`file:///`), - }, - { - inputURI: `file://wsl%24/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, - wantFile: `/wsl$/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, - wantURI: span.URI(`file:///wsl$/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`), - }, - } { - got := span.URIFromURI(test.inputURI) - if got != test.wantURI { - t.Errorf("NewURI(%q): got %q, expected %q", test.inputURI, got, test.wantURI) - } - gotFilename := got.Filename() - if gotFilename != test.wantFile { - t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) - } - } -} diff --git a/gopls/internal/span/uri_windows_test.go b/gopls/internal/span/uri_windows_test.go deleted file mode 100644 index 3891e0d3e77..00000000000 --- a/gopls/internal/span/uri_windows_test.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build windows -// +build windows - -package span_test - -import ( - "testing" - - "golang.org/x/tools/gopls/internal/span" -) - -// TestURI tests the conversion between URIs and filenames. The test cases -// include Windows-style URIs and filepaths, but we avoid having OS-specific -// tests by using only forward slashes, assuming that the standard library -// functions filepath.ToSlash and filepath.FromSlash do not need testing. -func TestURIFromPath(t *testing.T) { - for _, test := range []struct { - path, wantFile string - wantURI span.URI - }{ - { - path: ``, - wantFile: ``, - wantURI: span.URI(""), - }, - { - path: `C:\Windows\System32`, - wantFile: `C:\Windows\System32`, - wantURI: span.URI("file:///C:/Windows/System32"), - }, - { - path: `C:\Go\src\bob.go`, - wantFile: `C:\Go\src\bob.go`, - wantURI: span.URI("file:///C:/Go/src/bob.go"), - }, - { - path: `c:\Go\src\bob.go`, - wantFile: `C:\Go\src\bob.go`, - wantURI: span.URI("file:///C:/Go/src/bob.go"), - }, - { - path: `\path\to\dir`, - wantFile: `C:\path\to\dir`, - wantURI: span.URI("file:///C:/path/to/dir"), - }, - { - path: `\a\b\c\src\bob.go`, - wantFile: `C:\a\b\c\src\bob.go`, - wantURI: span.URI("file:///C:/a/b/c/src/bob.go"), - }, - { - path: `c:\Go\src\bob george\george\george.go`, - wantFile: `C:\Go\src\bob george\george\george.go`, - wantURI: span.URI("file:///C:/Go/src/bob%20george/george/george.go"), - }, - } { - got := span.URIFromPath(test.path) - if got != test.wantURI { - t.Errorf("URIFromPath(%q): got %q, expected %q", test.path, got, test.wantURI) - } - gotFilename := got.Filename() - if gotFilename != test.wantFile { - t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) - } - } -} - -func TestURIFromURI(t *testing.T) { - for _, test := range []struct { - inputURI, wantFile string - wantURI span.URI - }{ - { - inputURI: `file:///c:/Go/src/bob%20george/george/george.go`, - wantFile: `C:\Go\src\bob george\george\george.go`, - wantURI: span.URI("file:///C:/Go/src/bob%20george/george/george.go"), - }, - { - inputURI: `file:///C%3A/Go/src/bob%20george/george/george.go`, - wantFile: `C:\Go\src\bob george\george\george.go`, - wantURI: span.URI("file:///C:/Go/src/bob%20george/george/george.go"), - }, - { - inputURI: `file:///c:/path/to/%25p%25ercent%25/per%25cent.go`, - wantFile: `C:\path\to\%p%ercent%\per%cent.go`, - wantURI: span.URI(`file:///C:/path/to/%25p%25ercent%25/per%25cent.go`), - }, - { - inputURI: `file:///C%3A/`, - wantFile: `C:\`, - wantURI: span.URI(`file:///C:/`), - }, - { - inputURI: `file:///`, - wantFile: `\`, - wantURI: span.URI(`file:///`), - }, - } { - got := span.URIFromURI(test.inputURI) - if got != test.wantURI { - t.Errorf("NewURI(%q): got %q, expected %q", test.inputURI, got, test.wantURI) - } - gotFilename := got.Filename() - if gotFilename != test.wantFile { - t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) - } - } -} diff --git a/gopls/internal/telemetry/cmd/stacks/stacks.go b/gopls/internal/telemetry/cmd/stacks/stacks.go new file mode 100644 index 00000000000..7123b3d477d --- /dev/null +++ b/gopls/internal/telemetry/cmd/stacks/stacks.go @@ -0,0 +1,302 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The stacks command finds all gopls stack traces reported by +// telemetry in the past 7 days, and reports their associated GitHub +// issue, creating new issues as needed. +package main + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "flag" + "fmt" + "hash/fnv" + "log" + "net/http" + "net/url" + "sort" + "strings" + "time" + + "io" + + "golang.org/x/telemetry" + "golang.org/x/tools/gopls/internal/util/browser" +) + +// flags +var ( + daysFlag = flag.Int("days", 7, "number of previous days of telemetry data to read") +) + +func main() { + log.SetFlags(0) + log.SetPrefix("stacks: ") + flag.Parse() + + // Maps stack text to Version/GoVersion/GOOS/GOARCH string to counter. + stacks := make(map[string]map[string]int64) + var total int + + // Maps stack to a telemetry URL. + stackToURL := make(map[string]string) + + // Read all recent telemetry reports. + t := time.Now() + for i := 0; i < *daysFlag; i++ { + const DateOnly = "2006-01-02" // TODO(adonovan): use time.DateOnly in go1.20. + date := t.Add(-time.Duration(i+1) * 24 * time.Hour).Format(DateOnly) + + url := fmt.Sprintf("https://storage.googleapis.com/prod-telemetry-merged/%s.json", date) + resp, err := http.Get(url) + if err != nil { + log.Fatalf("can't GET %s: %v", url, err) + } + defer resp.Body.Close() + if resp.StatusCode != 200 { + log.Fatalf("GET %s returned %d %s", url, resp.StatusCode, resp.Status) + } + + dec := json.NewDecoder(resp.Body) + for { + var report telemetry.Report + if err := dec.Decode(&report); err != nil { + if err == io.EOF { + break + } + log.Fatal(err) + } + for _, prog := range report.Programs { + if prog.Program == "golang.org/x/tools/gopls" && len(prog.Stacks) > 0 { + total++ + + // Include applicable client names (e.g. vscode, eglot). + var clients []string + var clientSuffix string + for key := range prog.Counters { + client := strings.TrimPrefix(key, "gopls/client:") + if client != key { + clients = append(clients, client) + } + } + sort.Strings(clients) + if len(clients) > 0 { + clientSuffix = " " + strings.Join(clients, ",") + } + + // Ignore @devel versions as they correspond to + // ephemeral (and often numerous) variations of + // the program as we work on a fix to a bug. + if prog.Version == "devel" { + continue + } + info := fmt.Sprintf("%s@%s %s %s/%s%s", + prog.Program, prog.Version, + prog.GoVersion, prog.GOOS, prog.GOARCH, + clientSuffix) + for stack, count := range prog.Stacks { + counts := stacks[stack] + if counts == nil { + counts = make(map[string]int64) + stacks[stack] = counts + } + counts[info] += count + stackToURL[stack] = url + } + } + } + } + } + + // Compute IDs of all stacks. + var stackIDs []string + for stack := range stacks { + stackIDs = append(stackIDs, stackID(stack)) + } + + // Query GitHub for existing GitHub issues. + issuesByStackID := make(map[string]*Issue) + for len(stackIDs) > 0 { + // For some reason GitHub returns 422 UnprocessableEntity + // if we attempt to read more than 6 at once. + batch := stackIDs[:min(6, len(stackIDs))] + stackIDs = stackIDs[len(batch):] + + query := "label:gopls/telemetry-wins in:body " + strings.Join(batch, " OR ") + res, err := searchIssues(query) + if err != nil { + log.Fatalf("GitHub issues query failed: %v", err) + } + for _, issue := range res.Items { + for _, id := range batch { + // Matching is a little fuzzy here + // but base64 will rarely produce + // words that appear in the body + // by chance. + if strings.Contains(issue.Body, id) { + issuesByStackID[id] = issue + } + } + } + } + + fmt.Printf("Found %d stacks in last %v days:\n", total, *daysFlag) + + // For each stack, show existing issue or create a new one. + for stack, counts := range stacks { + id := stackID(stack) + + // Existing issue? + issue, ok := issuesByStackID[id] + if ok { + if issue != nil { + fmt.Printf("#%d: %s [%s]\n", + issue.Number, issue.Title, issue.State) + } else { + // We just created a "New issue" browser tab + // for this stackID. + issuesByStackID[id] = nil // suppress dups + } + continue + } + + // Create new issue. + issuesByStackID[id] = nil // suppress dups + + // Use a heuristic to find a suitable symbol to blame + // in the title: the first public function or method + // of a public type, in gopls, to appear in the stack + // trace. We can always refine it later. + var symbol string + for _, line := range strings.Split(stack, "\n") { + // Look for: + // gopls/.../pkg.Func + // gopls/.../pkg.Type.method + // gopls/.../pkg.(*Type).method + if strings.Contains(line, "internal/util/bug.") { + continue // not interesting + } + if _, rest, ok := strings.Cut(line, "golang.org/x/tools/gopls/"); ok { + if i := strings.IndexByte(rest, '.'); i >= 0 { + rest = rest[i+1:] + rest = strings.TrimPrefix(rest, "(*") + if rest != "" && 'A' <= rest[0] && rest[0] <= 'Z' { + rest, _, _ = strings.Cut(rest, ":") + symbol = " " + rest + break + } + } + } + } + + // Populate the form (title, body, label) + title := fmt.Sprintf("x/tools/gopls:%s bug reported by telemetry", symbol) + body := new(bytes.Buffer) + fmt.Fprintf(body, "This stack `%s` was [reported by telemetry](%s):\n\n", + id, stackToURL[stack]) + fmt.Fprintf(body, "```\n%s\n```\n", stack) + + // Add counts, gopls version, and platform info. + // This isn't very precise but should provide clues. + // + // TODO(adonovan): link each stack (ideally each frame) to source: + // https://cs.opensource.google/go/x/tools/+/gopls/VERSION:gopls/FILE;l=LINE + // (Requires parsing stack, shallow-cloning gopls module at that tag, and + // computing correct line offsets. Would be labor-saving though.) + fmt.Fprintf(body, "```\n") + for info, count := range counts { + fmt.Fprintf(body, "%s (%d)\n", info, count) + } + fmt.Fprintf(body, "```\n\n") + + fmt.Fprintf(body, "Issue created by golang.org/x/tools/gopls/internal/telemetry/cmd/stacks.\n") + + const labels = "gopls,Tools,gopls/telemetry-wins,NeedsInvestigation" + + // Report it. + if !browser.Open("https://github.com/golang/go/issues/new?labels=" + labels + "&title=" + url.QueryEscape(title) + "&body=" + url.QueryEscape(body.String())) { + log.Print("Please file a new issue at golang.org/issue/new using this template:\n\n") + log.Printf("Title: %s\n", title) + log.Printf("Labels: %s\n", labels) + log.Printf("Body: %s\n", body) + } + } +} + +// stackID returns a 32-bit identifier for a stack +// suitable for use in GitHub issue titles. +func stackID(stack string) string { + // Encode it using base64 (6 bytes) for brevity, + // as a single issue's body might contain multiple IDs + // if separate issues with same cause wre manually de-duped, + // e.g. "AAAAAA, BBBBBB" + // + // https://hbfs.wordpress.com/2012/03/30/finding-collisions: + // the chance of a collision is 1 - exp(-n(n-1)/2d) where n + // is the number of items and d is the number of distinct values. + // So, even with n=10^4 telemetry-reported stacks each identified + // by a uint32 (d=2^32), we have a 1% chance of a collision, + // which is plenty good enough. + h := fnv.New32() + io.WriteString(h, stack) + return base64.URLEncoding.EncodeToString(h.Sum(nil))[:6] +} + +// -- GitHub search -- + +// searchIssues queries the GitHub issue tracker. +func searchIssues(query string) (*IssuesSearchResult, error) { + q := url.QueryEscape(query) + resp, err := http.Get(IssuesURL + "?q=" + q) + if err != nil { + return nil, err + } + if resp.StatusCode != http.StatusOK { + resp.Body.Close() + return nil, fmt.Errorf("search query failed: %s", resp.Status) + } + var result IssuesSearchResult + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + resp.Body.Close() + return nil, err + } + resp.Body.Close() + return &result, nil +} + +// See https://developer.github.com/v3/search/#search-issues. + +const IssuesURL = "https://api.github.com/search/issues" + +type IssuesSearchResult struct { + TotalCount int `json:"total_count"` + Items []*Issue +} + +type Issue struct { + Number int + HTMLURL string `json:"html_url"` + Title string + State string + User *User + CreatedAt time.Time `json:"created_at"` + Body string // in Markdown format +} + +type User struct { + Login string + HTMLURL string `json:"html_url"` +} + +// -- helpers -- + +func min(x, y int) int { + if x < y { + return x + } else { + return y + } +} diff --git a/gopls/internal/telemetry/latency.go b/gopls/internal/telemetry/latency.go index b0e2da73165..3147ecb9f7f 100644 --- a/gopls/internal/telemetry/latency.go +++ b/gopls/internal/telemetry/latency.go @@ -79,7 +79,7 @@ func getLatencyCounter(operation, bucket string, isError bool) *counter.Counter // StartLatencyTimer starts a timer for the gopls operation with the given // name, and returns a func to stop the timer and record the latency sample. // -// If the context provided to the the resulting func is done, no observation is +// If the context provided to the resulting func is done, no observation is // recorded. func StartLatencyTimer(operation string) func(context.Context, error) { start := time.Now() diff --git a/gopls/internal/telemetry/telemetry.go b/gopls/internal/telemetry/telemetry.go deleted file mode 100644 index dc6f7c23372..00000000000 --- a/gopls/internal/telemetry/telemetry.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package telemetry - -import ( - "fmt" - - "golang.org/x/telemetry" - "golang.org/x/telemetry/counter" - "golang.org/x/telemetry/upload" - "golang.org/x/tools/gopls/internal/lsp/protocol" -) - -// Mode calls x/telemetry.Mode. -func Mode() string { - return telemetry.Mode() -} - -// SetMode calls x/telemetry.SetMode. -func SetMode(mode string) error { - return telemetry.SetMode(mode) -} - -// Start starts telemetry instrumentation. -func Start() { - counter.Open() - // upload only once at startup, hoping that users restart gopls often. - go upload.Run(nil) -} - -// RecordClientInfo records gopls client info. -func RecordClientInfo(params *protocol.ParamInitialize) { - client := "gopls/client:other" - if params != nil && params.ClientInfo != nil { - switch params.ClientInfo.Name { - case "Visual Studio Code": - client = "gopls/client:vscode" - case "Visual Studio Code - Insiders": - client = "gopls/client:vscode-insiders" - case "VSCodium": - client = "gopls/client:vscodium" - case "code-server": - // https://github.com/coder/code-server/blob/3cb92edc76ecc2cfa5809205897d93d4379b16a6/ci/build/build-vscode.sh#L19 - client = "gopls/client:code-server" - case "Eglot": - // https://lists.gnu.org/archive/html/bug-gnu-emacs/2023-03/msg00954.html - client = "gopls/client:eglot" - case "govim": - // https://github.com/govim/govim/pull/1189 - client = "gopls/client:govim" - case "Neovim": - // https://github.com/neovim/neovim/blob/42333ea98dfcd2994ee128a3467dfe68205154cd/runtime/lua/vim/lsp.lua#L1361 - client = "gopls/client:neovim" - case "coc.nvim": - // https://github.com/neoclide/coc.nvim/blob/3dc6153a85ed0f185abec1deb972a66af3fbbfb4/src/language-client/client.ts#L994 - client = "gopls/client:coc.nvim" - case "Sublime Text LSP": - // https://github.com/sublimelsp/LSP/blob/e608f878e7e9dd34aabe4ff0462540fadcd88fcc/plugin/core/sessions.py#L493 - client = "gopls/client:sublimetext" - default: - // at least accumulate the client name locally - counter.New(fmt.Sprintf("gopls/client-other:%s", params.ClientInfo.Name)).Inc() - // but also record client:other - } - } - counter.Inc(client) -} - -// RecordViewGoVersion records the Go minor version number (1.x) used for a view. -func RecordViewGoVersion(x int) { - if x < 0 { - return - } - name := fmt.Sprintf("gopls/goversion:1.%d", x) - counter.Inc(name) -} - -// AddForwardedCounters adds the given counters on behalf of clients. -// Names and values must have the same length. -func AddForwardedCounters(names []string, values []int64) { - for i, n := range names { - v := values[i] - if n == "" || v < 0 { - continue // Should we report an error? Who is the audience? - } - counter.Add("fwd/"+n, v) - } -} diff --git a/gopls/internal/telemetry/telemetry_go118.go b/gopls/internal/telemetry/telemetry_go118.go deleted file mode 100644 index 53394002f76..00000000000 --- a/gopls/internal/telemetry/telemetry_go118.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package telemetry - -import "golang.org/x/tools/gopls/internal/lsp/protocol" - -func Mode() string { - return "local" -} - -func SetMode(mode string) error { - return nil -} - -func Start() { -} - -func RecordClientInfo(params *protocol.ParamInitialize) { -} - -func RecordViewGoVersion(x int) { -} - -func AddForwardedCounters(names []string, values []int64) { -} diff --git a/gopls/internal/telemetry/telemetry_test.go b/gopls/internal/telemetry/telemetry_test.go index 25e94f6284f..3493a15d89e 100644 --- a/gopls/internal/telemetry/telemetry_test.go +++ b/gopls/internal/telemetry/telemetry_test.go @@ -18,12 +18,12 @@ import ( "golang.org/x/telemetry/counter" "golang.org/x/telemetry/counter/countertest" // requires go1.21+ - "golang.org/x/tools/gopls/internal/bug" "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" "golang.org/x/tools/gopls/internal/telemetry" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" ) func TestMain(m *testing.M) { @@ -59,7 +59,7 @@ func TestTelemetry(t *testing.T) { for i, c := range sessionCounters { count, err := countertest.ReadCounter(c) if err != nil { - t.Fatalf("ReadCounter(%s): %v", c.Name(), err) + continue // counter db not open, or counter not found } initialCounts[i] = count } @@ -74,7 +74,19 @@ func TestTelemetry(t *testing.T) { goversion = strconv.Itoa(env.GoVersion()) addForwardedCounters(env, []string{"vscode/linter:a"}, []int64{1}) const desc = "got a bug" + + // This will increment a counter named something like: + // + // `gopls/bug + // golang.org/x/tools/gopls/internal/util/bug.report:+35 + // golang.org/x/tools/gopls/internal/util/bug.Report:=68 + // golang.org/x/tools/gopls/internal/telemetry_test.TestTelemetry.func2:+4 + // golang.org/x/tools/gopls/internal/test/integration.(*Runner).Run.func1:+87 + // testing.tRunner:+150 + // runtime.goexit:+0` + // bug.Report(desc) // want a stack counter with the trace starting from here. + env.Await(ShownMessage(desc)) }) @@ -113,7 +125,7 @@ func addForwardedCounters(env *Env, names []string, values []int64) { env.ExecuteCommand(&protocol.ExecuteCommandParams{ Command: command.AddTelemetryCounters.ID(), Arguments: args, - }, res) + }, &res) if res != nil { env.T.Errorf("%v failed - %v", command.AddTelemetryCounters.ID(), res) } diff --git a/gopls/internal/template/completion.go b/gopls/internal/template/completion.go new file mode 100644 index 00000000000..dfacefc938e --- /dev/null +++ b/gopls/internal/template/completion.go @@ -0,0 +1,253 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "bytes" + "context" + "fmt" + "go/scanner" + "go/token" + "strings" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" +) + +// information needed for completion +type completer struct { + p *Parsed + pos protocol.Position + offset int // offset of the start of the Token + ctx protocol.CompletionContext + syms map[string]symbol +} + +func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pos protocol.Position, context protocol.CompletionContext) (*protocol.CompletionList, error) { + all := New(snapshot.Templates()) + var start int // the beginning of the Token (completed or not) + syms := make(map[string]symbol) + var p *Parsed + for fn, fc := range all.files { + // collect symbols from all template files + filterSyms(syms, fc.symbols) + if fn.Path() != fh.URI().Path() { + continue + } + if start = inTemplate(fc, pos); start == -1 { + return nil, nil + } + p = fc + } + if p == nil { + // this cannot happen unless the search missed a template file + return nil, fmt.Errorf("%s not found", fh.Identity().URI.Path()) + } + c := completer{ + p: p, + pos: pos, + offset: start + len(Left), + ctx: context, + syms: syms, + } + return c.complete() +} + +func filterSyms(syms map[string]symbol, ns []symbol) { + for _, xsym := range ns { + switch xsym.kind { + case protocol.Method, protocol.Package, protocol.Boolean, protocol.Namespace, + protocol.Function: + syms[xsym.name] = xsym // we don't care which symbol we get + case protocol.Variable: + if xsym.name != "dot" { + syms[xsym.name] = xsym + } + case protocol.Constant: + if xsym.name == "nil" { + syms[xsym.name] = xsym + } + } + } +} + +// return the starting position of the enclosing token, or -1 if none +func inTemplate(fc *Parsed, pos protocol.Position) int { + // pos is the pos-th character. if the cursor is at the beginning + // of the file, pos is 0. That is, we've only seen characters before pos + // 1. pos might be in a Token, return tk.Start + // 2. pos might be after an elided but before a Token, return elided + // 3. return -1 for false + offset := fc.FromPosition(pos) + // this could be a binary search, as the tokens are ordered + for _, tk := range fc.tokens { + if tk.Start < offset && offset <= tk.End { + return tk.Start + } + } + for _, x := range fc.elided { + if x > offset { + // fc.elided is sorted + break + } + // If the interval [x,offset] does not contain Left or Right + // then provide completions. (do we need the test for Right?) + if !bytes.Contains(fc.buf[x:offset], Left) && !bytes.Contains(fc.buf[x:offset], Right) { + return x + } + } + return -1 +} + +var ( + keywords = []string{"if", "with", "else", "block", "range", "template", "end}}", "end"} + globals = []string{"and", "call", "html", "index", "slice", "js", "len", "not", "or", + "urlquery", "printf", "println", "print", "eq", "ne", "le", "lt", "ge", "gt"} +) + +// find the completions. start is the offset of either the Token enclosing pos, or where +// the incomplete token starts. +// The error return is always nil. +func (c *completer) complete() (*protocol.CompletionList, error) { + ans := &protocol.CompletionList{IsIncomplete: true, Items: []protocol.CompletionItem{}} + start := c.p.FromPosition(c.pos) + sofar := c.p.buf[c.offset:start] + if len(sofar) == 0 || sofar[len(sofar)-1] == ' ' || sofar[len(sofar)-1] == '\t' { + return ans, nil + } + // sofar could be parsed by either c.analyzer() or scan(). The latter is precise + // and slower, but fast enough + words := scan(sofar) + // 1. if pattern starts $, show variables + // 2. if pattern starts ., show methods (and . by itself?) + // 3. if len(words) == 1, show firstWords (but if it were a |, show functions and globals) + // 4. ...? (parenthetical expressions, arguments, ...) (packages, namespaces, nil?) + if len(words) == 0 { + return nil, nil // if this happens, why were we called? + } + pattern := words[len(words)-1] + if pattern[0] == '$' { + // should we also return a raw "$"? + for _, s := range c.syms { + if s.kind == protocol.Variable && weakMatch(s.name, pattern) > 0 { + ans.Items = append(ans.Items, protocol.CompletionItem{ + Label: s.name, + Kind: protocol.VariableCompletion, + Detail: "Variable", + }) + } + } + return ans, nil + } + if pattern[0] == '.' { + for _, s := range c.syms { + if s.kind == protocol.Method && weakMatch("."+s.name, pattern) > 0 { + ans.Items = append(ans.Items, protocol.CompletionItem{ + Label: s.name, + Kind: protocol.MethodCompletion, + Detail: "Method/member", + }) + } + } + return ans, nil + } + // could we get completion attempts in strings or numbers, and if so, do we care? + // globals + for _, kw := range globals { + if weakMatch(kw, pattern) != 0 { + ans.Items = append(ans.Items, protocol.CompletionItem{ + Label: kw, + Kind: protocol.KeywordCompletion, + Detail: "Function", + }) + } + } + // and functions + for _, s := range c.syms { + if s.kind == protocol.Function && weakMatch(s.name, pattern) != 0 { + ans.Items = append(ans.Items, protocol.CompletionItem{ + Label: s.name, + Kind: protocol.FunctionCompletion, + Detail: "Function", + }) + } + } + // keywords if we're at the beginning + if len(words) <= 1 || len(words[len(words)-2]) == 1 && words[len(words)-2][0] == '|' { + for _, kw := range keywords { + if weakMatch(kw, pattern) != 0 { + ans.Items = append(ans.Items, protocol.CompletionItem{ + Label: kw, + Kind: protocol.KeywordCompletion, + Detail: "keyword", + }) + } + } + } + return ans, nil +} + +// version of c.analyze that uses go/scanner. +func scan(buf []byte) []string { + fset := token.NewFileSet() + fp := fset.AddFile("", -1, len(buf)) + var sc scanner.Scanner + sc.Init(fp, buf, func(pos token.Position, msg string) {}, scanner.ScanComments) + ans := make([]string, 0, 10) // preallocating gives a measurable savings + for { + _, tok, lit := sc.Scan() // tok is an int + if tok == token.EOF { + break // done + } else if tok == token.SEMICOLON && lit == "\n" { + continue // don't care, but probably can't happen + } else if tok == token.PERIOD { + ans = append(ans, ".") // lit is empty + } else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "." { + ans[len(ans)-1] = "." + lit + } else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "$" { + ans[len(ans)-1] = "$" + lit + } else if lit != "" { + ans = append(ans, lit) + } + } + return ans +} + +// pattern is what the user has typed +func weakMatch(choice, pattern string) float64 { + lower := strings.ToLower(choice) + // for now, use only lower-case everywhere + pattern = strings.ToLower(pattern) + // The first char has to match + if pattern[0] != lower[0] { + return 0 + } + // If they start with ., then the second char has to match + from := 1 + if pattern[0] == '.' { + if len(pattern) < 2 { + return 1 // pattern just a ., so it matches + } + if pattern[1] != lower[1] { + return 0 + } + from = 2 + } + // check that all the characters of pattern occur as a subsequence of choice + i, j := from, from + for ; i < len(lower) && j < len(pattern); j++ { + if pattern[j] == lower[i] { + i++ + if i >= len(lower) { + return 0 + } + } + } + if j < len(pattern) { + return 0 + } + return 1 +} diff --git a/gopls/internal/template/completion_test.go b/gopls/internal/template/completion_test.go new file mode 100644 index 00000000000..8e1bdbf0535 --- /dev/null +++ b/gopls/internal/template/completion_test.go @@ -0,0 +1,102 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "log" + "sort" + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" +) + +func init() { + log.SetFlags(log.Lshortfile) +} + +type tparse struct { + marked string // ^ shows where to ask for completions. (The user just typed the following character.) + wanted []string // expected completions +} + +// Test completions in templates that parse enough (if completion needs symbols) +// Seen characters up to the ^ +func TestParsed(t *testing.T) { + var tests = []tparse{ + {"{{x}}{{12. xx^", nil}, // https://github.com/golang/go/issues/50430 + {`<table class="chroma" data-new-comment-url="{{if $.PageIsPullFiles}}{{$.Issue.HTMLURL}}/files/reviews/new_comment{{else}}{{$.CommitHTML}}/new_comment^{{end}}">`, nil}, + {"{{i^f}}", []string{"index", "if"}}, + {"{{if .}}{{e^ {{end}}", []string{"eq", "end}}", "else", "end"}}, + {"{{foo}}{{f^", []string{"foo"}}, + {"{{$^}}", []string{"$"}}, + {"{{$x:=4}}{{$^", []string{"$x"}}, + {"{{$x:=4}}{{$ ^ ", []string{}}, + {"{{len .Modified}}{{.^Mo", []string{"Modified"}}, + {"{{len .Modified}}{{.mf^", []string{"Modified"}}, + {"{{$^ }}", []string{"$"}}, + {"{{$a =3}}{{$^", []string{"$a"}}, + // .two is not good here: fix someday + {`{{.Modified}}{{.^{{if $.one.two}}xxx{{end}}`, []string{"Modified", "one", "two"}}, + {`{{.Modified}}{{.o^{{if $.one.two}}xxx{{end}}`, []string{"one"}}, + {"{{.Modiifed}}{{.one.t^{{if $.one.two}}xxx{{end}}", []string{"two"}}, + {`{{block "foo" .}}{{i^`, []string{"index", "if"}}, + {"{{in^{{Internal}}", []string{"index", "Internal", "if"}}, + // simple number has no completions + {"{{4^e", []string{}}, + // simple string has no completions + {"{{`e^", []string{}}, + {"{{`No i^", []string{}}, // example of why go/scanner is used + {"{{xavier}}{{12. x^", []string{"xavier"}}, + } + for _, tx := range tests { + c := testCompleter(t, tx) + var v []string + if c != nil { + ans, _ := c.complete() + for _, a := range ans.Items { + v = append(v, a.Label) + } + } + if len(v) != len(tx.wanted) { + t.Errorf("%q: got %q, wanted %q %d,%d", tx.marked, v, tx.wanted, len(v), len(tx.wanted)) + continue + } + sort.Strings(tx.wanted) + sort.Strings(v) + for i := 0; i < len(v); i++ { + if tx.wanted[i] != v[i] { + t.Errorf("%q at %d: got %v, wanted %v", tx.marked, i, v, tx.wanted) + break + } + } + } +} + +func testCompleter(t *testing.T, tx tparse) *completer { + t.Helper() + // seen chars up to ^ + col := strings.Index(tx.marked, "^") + buf := strings.Replace(tx.marked, "^", "", 1) + p := parseBuffer([]byte(buf)) + pos := protocol.Position{Line: 0, Character: uint32(col)} + if p.ParseErr != nil { + log.Printf("%q: %v", tx.marked, p.ParseErr) + } + offset := inTemplate(p, pos) + if offset == -1 { + return nil + } + syms := make(map[string]symbol) + filterSyms(syms, p.symbols) + c := &completer{ + p: p, + pos: protocol.Position{Line: 0, Character: uint32(col)}, + offset: offset + len(Left), + ctx: protocol.CompletionContext{TriggerKind: protocol.Invoked}, + syms: syms, + } + return c +} diff --git a/gopls/internal/template/highlight.go b/gopls/internal/template/highlight.go new file mode 100644 index 00000000000..39812cfd0ba --- /dev/null +++ b/gopls/internal/template/highlight.go @@ -0,0 +1,97 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "context" + "fmt" + "regexp" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" +) + +func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, loc protocol.Position) ([]protocol.DocumentHighlight, error) { + buf, err := fh.Content() + if err != nil { + return nil, err + } + p := parseBuffer(buf) + pos := p.FromPosition(loc) + var ans []protocol.DocumentHighlight + if p.ParseErr == nil { + for _, s := range p.symbols { + if s.start <= pos && pos < s.start+s.length { + return markSymbols(p, s) + } + } + } + // these tokens exist whether or not there was a parse error + // (symbols require a successful parse) + for _, tok := range p.tokens { + if tok.Start <= pos && pos < tok.End { + wordAt := findWordAt(p, pos) + if len(wordAt) > 0 { + return markWordInToken(p, wordAt) + } + } + } + // find the 'word' at pos, etc: someday + // until then we get the default action, which doesn't respect word boundaries + return ans, nil +} + +func markSymbols(p *Parsed, sym symbol) ([]protocol.DocumentHighlight, error) { + var ans []protocol.DocumentHighlight + for _, s := range p.symbols { + if s.name == sym.name { + kind := protocol.Read + if s.vardef { + kind = protocol.Write + } + ans = append(ans, protocol.DocumentHighlight{ + Range: p.Range(s.start, s.length), + Kind: kind, + }) + } + } + return ans, nil +} + +// A token is {{...}}, and this marks words in the token that equal the give word +func markWordInToken(p *Parsed, wordAt string) ([]protocol.DocumentHighlight, error) { + var ans []protocol.DocumentHighlight + pat, err := regexp.Compile(fmt.Sprintf(`\b%s\b`, wordAt)) + if err != nil { + return nil, fmt.Errorf("%q: unmatchable word (%v)", wordAt, err) + } + for _, tok := range p.tokens { + got := pat.FindAllIndex(p.buf[tok.Start:tok.End], -1) + for i := 0; i < len(got); i++ { + ans = append(ans, protocol.DocumentHighlight{ + Range: p.Range(got[i][0], got[i][1]-got[i][0]), + Kind: protocol.Text, + }) + } + } + return ans, nil +} + +var wordRe = regexp.MustCompile(`[$]?\w+$`) +var moreRe = regexp.MustCompile(`^[$]?\w+`) + +// findWordAt finds the word the cursor is in (meaning in or just before) +func findWordAt(p *Parsed, pos int) string { + if pos >= len(p.buf) { + return "" // can't happen, as we are called with pos < tok.End + } + after := moreRe.Find(p.buf[pos:]) + if len(after) == 0 { + return "" // end of the word + } + got := wordRe.Find(p.buf[:pos+len(after)]) + return string(got) +} diff --git a/gopls/internal/template/implementations.go b/gopls/internal/template/implementations.go new file mode 100644 index 00000000000..19a27620b57 --- /dev/null +++ b/gopls/internal/template/implementations.go @@ -0,0 +1,218 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "context" + "fmt" + "regexp" + "strconv" + "time" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/semtok" +) + +// line number (1-based) and message +var errRe = regexp.MustCompile(`template.*:(\d+): (.*)`) + +// Diagnostics returns parse errors. There is only one per file. +// The errors are not always helpful. For instance { {end}} +// will likely point to the end of the file. +func Diagnostics(snapshot *cache.Snapshot) map[protocol.DocumentURI][]*cache.Diagnostic { + diags := make(map[protocol.DocumentURI][]*cache.Diagnostic) + for uri, fh := range snapshot.Templates() { + diags[uri] = diagnoseOne(fh) + } + return diags +} + +func diagnoseOne(fh file.Handle) []*cache.Diagnostic { + // no need for skipTemplate check, as Diagnose is called on the + // snapshot's template files + buf, err := fh.Content() + if err != nil { + // Is a Diagnostic with no Range useful? event.Error also? + msg := fmt.Sprintf("failed to read %s (%v)", fh.URI().Path(), err) + d := cache.Diagnostic{Message: msg, Severity: protocol.SeverityError, URI: fh.URI(), + Source: cache.TemplateError} + return []*cache.Diagnostic{&d} + } + p := parseBuffer(buf) + if p.ParseErr == nil { + return nil + } + unknownError := func(msg string) []*cache.Diagnostic { + s := fmt.Sprintf("malformed template error %q: %s", p.ParseErr.Error(), msg) + d := cache.Diagnostic{ + Message: s, Severity: protocol.SeverityError, Range: p.Range(p.nls[0], 1), + URI: fh.URI(), Source: cache.TemplateError} + return []*cache.Diagnostic{&d} + } + // errors look like `template: :40: unexpected "}" in operand` + // so the string needs to be parsed + matches := errRe.FindStringSubmatch(p.ParseErr.Error()) + if len(matches) != 3 { + msg := fmt.Sprintf("expected 3 matches, got %d (%v)", len(matches), matches) + return unknownError(msg) + } + lineno, err := strconv.Atoi(matches[1]) + if err != nil { + msg := fmt.Sprintf("couldn't convert %q to int, %v", matches[1], err) + return unknownError(msg) + } + msg := matches[2] + d := cache.Diagnostic{Message: msg, Severity: protocol.SeverityError, + Source: cache.TemplateError} + start := p.nls[lineno-1] + if lineno < len(p.nls) { + size := p.nls[lineno] - start + d.Range = p.Range(start, size) + } else { + d.Range = p.Range(start, 1) + } + return []*cache.Diagnostic{&d} +} + +// Definition finds the definitions of the symbol at loc. It +// does not understand scoping (if any) in templates. This code is +// for definitions, type definitions, and implementations. +// Results only for variables and templates. +func Definition(snapshot *cache.Snapshot, fh file.Handle, loc protocol.Position) ([]protocol.Location, error) { + x, _, err := symAtPosition(fh, loc) + if err != nil { + return nil, err + } + sym := x.name + ans := []protocol.Location{} + // PJW: this is probably a pattern to abstract + a := New(snapshot.Templates()) + for k, p := range a.files { + for _, s := range p.symbols { + if !s.vardef || s.name != sym { + continue + } + ans = append(ans, protocol.Location{URI: k, Range: p.Range(s.start, s.length)}) + } + } + return ans, nil +} + +func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { + sym, p, err := symAtPosition(fh, position) + if sym == nil || err != nil { + return nil, err + } + ans := protocol.Hover{Range: p.Range(sym.start, sym.length), Contents: protocol.MarkupContent{Kind: protocol.Markdown}} + switch sym.kind { + case protocol.Function: + ans.Contents.Value = fmt.Sprintf("function: %s", sym.name) + case protocol.Variable: + ans.Contents.Value = fmt.Sprintf("variable: %s", sym.name) + case protocol.Constant: + ans.Contents.Value = fmt.Sprintf("constant %s", sym.name) + case protocol.Method: // field or method + ans.Contents.Value = fmt.Sprintf("%s: field or method", sym.name) + case protocol.Package: // template use, template def (PJW: do we want two?) + ans.Contents.Value = fmt.Sprintf("template %s\n(add definition)", sym.name) + case protocol.Namespace: + ans.Contents.Value = fmt.Sprintf("template %s defined", sym.name) + case protocol.Number: + ans.Contents.Value = "number" + case protocol.String: + ans.Contents.Value = "string" + case protocol.Boolean: + ans.Contents.Value = "boolean" + default: + ans.Contents.Value = fmt.Sprintf("oops, sym=%#v", sym) + } + return &ans, nil +} + +func References(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, params *protocol.ReferenceParams) ([]protocol.Location, error) { + sym, _, err := symAtPosition(fh, params.Position) + if sym == nil || err != nil || sym.name == "" { + return nil, err + } + ans := []protocol.Location{} + + a := New(snapshot.Templates()) + for k, p := range a.files { + for _, s := range p.symbols { + if s.name != sym.name { + continue + } + if s.vardef && !params.Context.IncludeDeclaration { + continue + } + ans = append(ans, protocol.Location{URI: k, Range: p.Range(s.start, s.length)}) + } + } + // do these need to be sorted? (a.files is a map) + return ans, nil +} + +func SemanticTokens(ctx context.Context, snapshot *cache.Snapshot, spn protocol.DocumentURI) (*protocol.SemanticTokens, error) { + fh, err := snapshot.ReadFile(ctx, spn) + if err != nil { + return nil, err + } + buf, err := fh.Content() + if err != nil { + return nil, err + } + p := parseBuffer(buf) + + var items []semtok.Token + add := func(line, start, len uint32) { + if len == 0 { + return // vscode doesn't like 0-length Tokens + } + // TODO(adonovan): don't ignore the rng restriction, if any. + items = append(items, semtok.Token{ + Line: line, + Start: start, + Len: len, + Type: semtok.TokMacro, + }) + } + + for _, t := range p.Tokens() { + if t.Multiline { + la, ca := p.LineCol(t.Start) + lb, cb := p.LineCol(t.End) + add(la, ca, p.RuneCount(la, ca, 0)) + for l := la + 1; l < lb; l++ { + add(l, 0, p.RuneCount(l, 0, 0)) + } + add(lb, 0, p.RuneCount(lb, 0, cb)) + continue + } + sz, err := p.TokenSize(t) + if err != nil { + return nil, err + } + line, col := p.LineCol(t.Start) + add(line, col, uint32(sz)) + } + const noStrings = false + const noNumbers = false + ans := &protocol.SemanticTokens{ + Data: semtok.Encode( + items, + noStrings, + noNumbers, + snapshot.Options().SemanticTypes, + snapshot.Options().SemanticMods), + // for small cache, some day. for now, the LSP client ignores this + // (that is, when the LSP client starts returning these, we can cache) + ResultID: fmt.Sprintf("%v", time.Now()), + } + return ans, nil +} + +// still need to do rename, etc diff --git a/gopls/internal/template/parse.go b/gopls/internal/template/parse.go new file mode 100644 index 00000000000..448a5ab51e8 --- /dev/null +++ b/gopls/internal/template/parse.go @@ -0,0 +1,504 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package template contains code for dealing with templates +package template + +// template files are small enough that the code reprocesses them each time +// this may be a bad choice for projects with lots of template files. + +import ( + "bytes" + "context" + "fmt" + "io" + "log" + "regexp" + "runtime" + "sort" + "text/template" + "text/template/parse" + "unicode/utf8" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +var ( + Left = []byte("{{") + Right = []byte("}}") +) + +type Parsed struct { + buf []byte //contents + lines [][]byte // needed?, other than for debugging? + elided []int // offsets where Left was replaced by blanks + + // tokens are matched Left-Right pairs, computed before trying to parse + tokens []Token + + // result of parsing + named []*template.Template // the template and embedded templates + ParseErr error + symbols []symbol + stack []parse.Node // used while computing symbols + + // for mapping from offsets in buf to LSP coordinates + // See FromPosition() and LineCol() + nls []int // offset of newlines before each line (nls[0]==-1) + lastnl int // last line seen + check int // used to decide whether to use lastnl or search through nls + nonASCII bool // are there any non-ascii runes in buf? +} + +// Token is a single {{...}}. More precisely, Left...Right +type Token struct { + Start, End int // offset from start of template + Multiline bool +} + +// All contains the Parse of all the template files +type All struct { + files map[protocol.DocumentURI]*Parsed +} + +// New returns the Parses of the snapshot's tmpl files +// (maybe cache these, but then avoiding import cycles needs code rearrangements) +func New(tmpls map[protocol.DocumentURI]file.Handle) *All { + all := make(map[protocol.DocumentURI]*Parsed) + for k, v := range tmpls { + buf, err := v.Content() + if err != nil { // PJW: decide what to do with these errors + log.Printf("failed to read %s (%v)", v.URI().Path(), err) + continue + } + all[k] = parseBuffer(buf) + } + return &All{files: all} +} + +func parseBuffer(buf []byte) *Parsed { + ans := &Parsed{ + buf: buf, + check: -1, + nls: []int{-1}, + } + if len(buf) == 0 { + return ans + } + // how to compute allAscii... + for _, b := range buf { + if b >= utf8.RuneSelf { + ans.nonASCII = true + break + } + } + if buf[len(buf)-1] != '\n' { + ans.buf = append(buf, '\n') + } + for i, p := range ans.buf { + if p == '\n' { + ans.nls = append(ans.nls, i) + } + } + ans.setTokens() // ans.buf may be a new []byte + ans.lines = bytes.Split(ans.buf, []byte{'\n'}) + t, err := template.New("").Parse(string(ans.buf)) + if err != nil { + funcs := make(template.FuncMap) + for t == nil && ans.ParseErr == nil { + // in 1.17 it may be possible to avoid getting this error + // template: :2: function "foo" not defined + matches := parseErrR.FindStringSubmatch(err.Error()) + if len(matches) == 2 { + // suppress the error by giving it a function with the right name + funcs[matches[1]] = func() interface{} { return nil } + t, err = template.New("").Funcs(funcs).Parse(string(ans.buf)) + continue + } + ans.ParseErr = err // unfixed error + return ans + } + } + ans.named = t.Templates() + // set the symbols + for _, t := range ans.named { + ans.stack = append(ans.stack, t.Root) + ans.findSymbols() + if t.Name() != "" { + // defining a template. The pos is just after {{define...}} (or {{block...}}?) + at, sz := ans.FindLiteralBefore(int(t.Root.Pos)) + s := symbol{start: at, length: sz, name: t.Name(), kind: protocol.Namespace, vardef: true} + ans.symbols = append(ans.symbols, s) + } + } + + sort.Slice(ans.symbols, func(i, j int) bool { + left, right := ans.symbols[i], ans.symbols[j] + if left.start != right.start { + return left.start < right.start + } + if left.vardef != right.vardef { + return left.vardef + } + return left.kind < right.kind + }) + return ans +} + +// FindLiteralBefore locates the first preceding string literal +// returning its position and length in buf +// or returns -1 if there is none. +// Assume double-quoted string rather than backquoted string for now. +func (p *Parsed) FindLiteralBefore(pos int) (int, int) { + left, right := -1, -1 + for i := pos - 1; i >= 0; i-- { + if p.buf[i] != '"' { + continue + } + if right == -1 { + right = i + continue + } + left = i + break + } + if left == -1 { + return -1, 0 + } + return left + 1, right - left - 1 +} + +var ( + parseErrR = regexp.MustCompile(`template:.*function "([^"]+)" not defined`) +) + +func (p *Parsed) setTokens() { + const ( + // InRaw and InString only occur inside an action (SeenLeft) + Start = iota + InRaw + InString + SeenLeft + ) + state := Start + var left, oldState int + for n := 0; n < len(p.buf); n++ { + c := p.buf[n] + switch state { + case InRaw: + if c == '`' { + state = oldState + } + case InString: + if c == '"' && !isEscaped(p.buf[:n]) { + state = oldState + } + case SeenLeft: + if c == '`' { + oldState = state // it's SeenLeft, but a little clearer this way + state = InRaw + continue + } + if c == '"' { + oldState = state + state = InString + continue + } + if bytes.HasPrefix(p.buf[n:], Right) { + right := n + len(Right) + tok := Token{Start: left, + End: right, + Multiline: bytes.Contains(p.buf[left:right], []byte{'\n'}), + } + p.tokens = append(p.tokens, tok) + state = Start + } + // If we see (unquoted) Left then the original left is probably the user + // typing. Suppress the original left + if bytes.HasPrefix(p.buf[n:], Left) { + p.elideAt(left) + left = n + n += len(Left) - 1 // skip the rest + } + case Start: + if bytes.HasPrefix(p.buf[n:], Left) { + left = n + state = SeenLeft + n += len(Left) - 1 // skip the rest (avoids {{{ bug) + } + } + } + // this error occurs after typing {{ at the end of the file + if state != Start { + // Unclosed Left. remove the Left at left + p.elideAt(left) + } +} + +func (p *Parsed) elideAt(left int) { + if p.elided == nil { + // p.buf is the same buffer that v.Read() returns, so copy it. + // (otherwise the next time it's parsed, elided information is lost) + b := make([]byte, len(p.buf)) + copy(b, p.buf) + p.buf = b + } + for i := 0; i < len(Left); i++ { + p.buf[left+i] = ' ' + } + p.elided = append(p.elided, left) +} + +// isEscaped reports whether the byte after buf is escaped +func isEscaped(buf []byte) bool { + backSlashes := 0 + for j := len(buf) - 1; j >= 0 && buf[j] == '\\'; j-- { + backSlashes++ + } + return backSlashes%2 == 1 +} + +func (p *Parsed) Tokens() []Token { + return p.tokens +} + +// TODO(adonovan): the next 100 lines could perhaps replaced by use of protocol.Mapper. + +func (p *Parsed) utf16len(buf []byte) int { + cnt := 0 + if !p.nonASCII { + return len(buf) + } + // we need a utf16len(rune), but we don't have it + for _, r := range string(buf) { + cnt++ + if r >= 1<<16 { + cnt++ + } + } + return cnt +} + +func (p *Parsed) TokenSize(t Token) (int, error) { + if t.Multiline { + return -1, fmt.Errorf("TokenSize called with Multiline token %#v", t) + } + ans := p.utf16len(p.buf[t.Start:t.End]) + return ans, nil +} + +// RuneCount counts runes in line l, from col s to e +// (e==0 for end of line. called only for multiline tokens) +func (p *Parsed) RuneCount(l, s, e uint32) uint32 { + start := p.nls[l] + 1 + int(s) + end := p.nls[l] + 1 + int(e) + if e == 0 || end > p.nls[l+1] { + end = p.nls[l+1] + } + return uint32(utf8.RuneCount(p.buf[start:end])) +} + +// LineCol converts from a 0-based byte offset to 0-based line, col. col in runes +func (p *Parsed) LineCol(x int) (uint32, uint32) { + if x < p.check { + p.lastnl = 0 + } + p.check = x + for i := p.lastnl; i < len(p.nls); i++ { + if p.nls[i] <= x { + continue + } + p.lastnl = i + var count int + if i > 0 && x == p.nls[i-1] { // \n + count = 0 + } else { + count = p.utf16len(p.buf[p.nls[i-1]+1 : x]) + } + return uint32(i - 1), uint32(count) + } + if x == len(p.buf)-1 { // trailing \n + return uint32(len(p.nls) - 1), 0 + } + // shouldn't happen + for i := 1; i < 4; i++ { + _, f, l, ok := runtime.Caller(i) + if !ok { + break + } + log.Printf("%d: %s:%d", i, f, l) + } + + msg := fmt.Errorf("LineCol off the end, %d of %d, nls=%v, %q", x, len(p.buf), p.nls, p.buf[x:]) + event.Error(context.Background(), "internal error", msg) + return 0, 0 +} + +// Position produces a protocol.Position from an offset in the template +func (p *Parsed) Position(pos int) protocol.Position { + line, col := p.LineCol(pos) + return protocol.Position{Line: line, Character: col} +} + +func (p *Parsed) Range(x, length int) protocol.Range { + line, col := p.LineCol(x) + ans := protocol.Range{ + Start: protocol.Position{Line: line, Character: col}, + End: protocol.Position{Line: line, Character: col + uint32(length)}, + } + return ans +} + +// FromPosition translates a protocol.Position into an offset into the template +func (p *Parsed) FromPosition(x protocol.Position) int { + l, c := int(x.Line), int(x.Character) + if l >= len(p.nls) || p.nls[l]+1 >= len(p.buf) { + // paranoia to avoid panic. return the largest offset + return len(p.buf) + } + line := p.buf[p.nls[l]+1:] + cnt := 0 + for w := range string(line) { + if cnt >= c { + return w + p.nls[l] + 1 + } + cnt++ + } + // do we get here? NO + pos := int(x.Character) + p.nls[int(x.Line)] + 1 + event.Error(context.Background(), "internal error", fmt.Errorf("surprise %#v", x)) + return pos +} + +func symAtPosition(fh file.Handle, loc protocol.Position) (*symbol, *Parsed, error) { + buf, err := fh.Content() + if err != nil { + return nil, nil, err + } + p := parseBuffer(buf) + pos := p.FromPosition(loc) + syms := p.SymsAtPos(pos) + if len(syms) == 0 { + return nil, p, fmt.Errorf("no symbol found") + } + if len(syms) > 1 { + log.Printf("Hover: %d syms, not 1 %v", len(syms), syms) + } + sym := syms[0] + return &sym, p, nil +} + +func (p *Parsed) SymsAtPos(pos int) []symbol { + ans := []symbol{} + for _, s := range p.symbols { + if s.start <= pos && pos < s.start+s.length { + ans = append(ans, s) + } + } + return ans +} + +type wrNode struct { + p *Parsed + w io.Writer +} + +// WriteNode is for debugging +func (p *Parsed) WriteNode(w io.Writer, n parse.Node) { + wr := wrNode{p: p, w: w} + wr.writeNode(n, "") +} + +func (wr wrNode) writeNode(n parse.Node, indent string) { + if n == nil { + return + } + at := func(pos parse.Pos) string { + line, col := wr.p.LineCol(int(pos)) + return fmt.Sprintf("(%d)%v:%v", pos, line, col) + } + switch x := n.(type) { + case *parse.ActionNode: + fmt.Fprintf(wr.w, "%sActionNode at %s\n", indent, at(x.Pos)) + wr.writeNode(x.Pipe, indent+". ") + case *parse.BoolNode: + fmt.Fprintf(wr.w, "%sBoolNode at %s, %v\n", indent, at(x.Pos), x.True) + case *parse.BranchNode: + fmt.Fprintf(wr.w, "%sBranchNode at %s\n", indent, at(x.Pos)) + wr.writeNode(x.Pipe, indent+"Pipe. ") + wr.writeNode(x.List, indent+"List. ") + wr.writeNode(x.ElseList, indent+"Else. ") + case *parse.ChainNode: + fmt.Fprintf(wr.w, "%sChainNode at %s, %v\n", indent, at(x.Pos), x.Field) + case *parse.CommandNode: + fmt.Fprintf(wr.w, "%sCommandNode at %s, %d children\n", indent, at(x.Pos), len(x.Args)) + for _, a := range x.Args { + wr.writeNode(a, indent+". ") + } + //case *parse.CommentNode: // 1.16 + case *parse.DotNode: + fmt.Fprintf(wr.w, "%sDotNode at %s\n", indent, at(x.Pos)) + case *parse.FieldNode: + fmt.Fprintf(wr.w, "%sFieldNode at %s, %v\n", indent, at(x.Pos), x.Ident) + case *parse.IdentifierNode: + fmt.Fprintf(wr.w, "%sIdentifierNode at %s, %v\n", indent, at(x.Pos), x.Ident) + case *parse.IfNode: + fmt.Fprintf(wr.w, "%sIfNode at %s\n", indent, at(x.Pos)) + wr.writeNode(&x.BranchNode, indent+". ") + case *parse.ListNode: + if x == nil { + return // nil BranchNode.ElseList + } + fmt.Fprintf(wr.w, "%sListNode at %s, %d children\n", indent, at(x.Pos), len(x.Nodes)) + for _, n := range x.Nodes { + wr.writeNode(n, indent+". ") + } + case *parse.NilNode: + fmt.Fprintf(wr.w, "%sNilNode at %s\n", indent, at(x.Pos)) + case *parse.NumberNode: + fmt.Fprintf(wr.w, "%sNumberNode at %s, %s\n", indent, at(x.Pos), x.Text) + case *parse.PipeNode: + if x == nil { + return // {{template "xxx"}} + } + fmt.Fprintf(wr.w, "%sPipeNode at %s, %d vars, %d cmds, IsAssign:%v\n", + indent, at(x.Pos), len(x.Decl), len(x.Cmds), x.IsAssign) + for _, d := range x.Decl { + wr.writeNode(d, indent+"Decl. ") + } + for _, c := range x.Cmds { + wr.writeNode(c, indent+"Cmd. ") + } + case *parse.RangeNode: + fmt.Fprintf(wr.w, "%sRangeNode at %s\n", indent, at(x.Pos)) + wr.writeNode(&x.BranchNode, indent+". ") + case *parse.StringNode: + fmt.Fprintf(wr.w, "%sStringNode at %s, %s\n", indent, at(x.Pos), x.Quoted) + case *parse.TemplateNode: + fmt.Fprintf(wr.w, "%sTemplateNode at %s, %s\n", indent, at(x.Pos), x.Name) + wr.writeNode(x.Pipe, indent+". ") + case *parse.TextNode: + fmt.Fprintf(wr.w, "%sTextNode at %s, len %d\n", indent, at(x.Pos), len(x.Text)) + case *parse.VariableNode: + fmt.Fprintf(wr.w, "%sVariableNode at %s, %v\n", indent, at(x.Pos), x.Ident) + case *parse.WithNode: + fmt.Fprintf(wr.w, "%sWithNode at %s\n", indent, at(x.Pos)) + wr.writeNode(&x.BranchNode, indent+". ") + } +} + +var kindNames = []string{"", "File", "Module", "Namespace", "Package", "Class", "Method", "Property", + "Field", "Constructor", "Enum", "Interface", "Function", "Variable", "Constant", "String", + "Number", "Boolean", "Array", "Object", "Key", "Null", "EnumMember", "Struct", "Event", + "Operator", "TypeParameter"} + +func kindStr(k protocol.SymbolKind) string { + n := int(k) + if n < 1 || n >= len(kindNames) { + return fmt.Sprintf("?SymbolKind %d?", n) + } + return kindNames[n] +} diff --git a/gopls/internal/lsp/template/parse_test.go b/gopls/internal/template/parse_test.go similarity index 100% rename from gopls/internal/lsp/template/parse_test.go rename to gopls/internal/template/parse_test.go diff --git a/gopls/internal/template/symbols.go b/gopls/internal/template/symbols.go new file mode 100644 index 00000000000..fcbaec43c54 --- /dev/null +++ b/gopls/internal/template/symbols.go @@ -0,0 +1,231 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "bytes" + "context" + "fmt" + "text/template/parse" + "unicode/utf8" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +// in local coordinates, to be translated to protocol.DocumentSymbol +type symbol struct { + start int // for sorting + length int // in runes (unicode code points) + name string + kind protocol.SymbolKind + vardef bool // is this a variable definition? + // do we care about selection range, or children? + // no children yet, and selection range is the same as range +} + +func (s symbol) String() string { + return fmt.Sprintf("{%d,%d,%s,%s,%v}", s.start, s.length, s.name, s.kind, s.vardef) +} + +// for FieldNode or VariableNode (or ChainNode?) +func (p *Parsed) fields(flds []string, x parse.Node) []symbol { + ans := []symbol{} + // guessing that there are no embedded blanks allowed. The doc is unclear + lookfor := "" + switch x.(type) { + case *parse.FieldNode: + for _, f := range flds { + lookfor += "." + f // quadratic, but probably ok + } + case *parse.VariableNode: + lookfor = flds[0] + for i := 1; i < len(flds); i++ { + lookfor += "." + flds[i] + } + case *parse.ChainNode: // PJW, what are these? + for _, f := range flds { + lookfor += "." + f // quadratic, but probably ok + } + default: + // If these happen they will happen even if gopls is restarted + // and the users does the same thing, so it is better not to panic. + // context.Background() is used because we don't have access + // to any other context. [we could, but it would be complicated] + event.Log(context.Background(), fmt.Sprintf("%T unexpected in fields()", x)) + return nil + } + if len(lookfor) == 0 { + event.Log(context.Background(), fmt.Sprintf("no strings in fields() %#v", x)) + return nil + } + startsAt := int(x.Position()) + ix := bytes.Index(p.buf[startsAt:], []byte(lookfor)) // HasPrefix? PJW? + if ix < 0 || ix > len(lookfor) { // lookfor expected to be at start (or so) + // probably golang.go/#43388, so back up + startsAt -= len(flds[0]) + 1 + ix = bytes.Index(p.buf[startsAt:], []byte(lookfor)) // ix might be 1? PJW + if ix < 0 { + return ans + } + } + at := ix + startsAt + for _, f := range flds { + at += 1 // . + kind := protocol.Method + if f[0] == '$' { + kind = protocol.Variable + } + sym := symbol{name: f, kind: kind, start: at, length: utf8.RuneCount([]byte(f))} + if kind == protocol.Variable && len(p.stack) > 1 { + if pipe, ok := p.stack[len(p.stack)-2].(*parse.PipeNode); ok { + for _, y := range pipe.Decl { + if x == y { + sym.vardef = true + } + } + } + } + ans = append(ans, sym) + at += len(f) + } + return ans +} + +func (p *Parsed) findSymbols() { + if len(p.stack) == 0 { + return + } + n := p.stack[len(p.stack)-1] + pop := func() { + p.stack = p.stack[:len(p.stack)-1] + } + if n == nil { // allowing nil simplifies the code + pop() + return + } + nxt := func(nd parse.Node) { + p.stack = append(p.stack, nd) + p.findSymbols() + } + switch x := n.(type) { + case *parse.ActionNode: + nxt(x.Pipe) + case *parse.BoolNode: + // need to compute the length from the value + msg := fmt.Sprintf("%v", x.True) + p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: len(msg), kind: protocol.Boolean}) + case *parse.BranchNode: + nxt(x.Pipe) + nxt(x.List) + nxt(x.ElseList) + case *parse.ChainNode: + p.symbols = append(p.symbols, p.fields(x.Field, x)...) + nxt(x.Node) + case *parse.CommandNode: + for _, a := range x.Args { + nxt(a) + } + //case *parse.CommentNode: // go 1.16 + // log.Printf("implement %d", x.Type()) + case *parse.DotNode: + sym := symbol{name: "dot", kind: protocol.Variable, start: int(x.Pos), length: 1} + p.symbols = append(p.symbols, sym) + case *parse.FieldNode: + p.symbols = append(p.symbols, p.fields(x.Ident, x)...) + case *parse.IdentifierNode: + sym := symbol{name: x.Ident, kind: protocol.Function, start: int(x.Pos), + length: utf8.RuneCount([]byte(x.Ident))} + p.symbols = append(p.symbols, sym) + case *parse.IfNode: + nxt(&x.BranchNode) + case *parse.ListNode: + if x != nil { // wretched typed nils. Node should have an IfNil + for _, nd := range x.Nodes { + nxt(nd) + } + } + case *parse.NilNode: + sym := symbol{name: "nil", kind: protocol.Constant, start: int(x.Pos), length: 3} + p.symbols = append(p.symbols, sym) + case *parse.NumberNode: + // no name; ascii + p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: len(x.Text), kind: protocol.Number}) + case *parse.PipeNode: + if x == nil { // {{template "foo"}} + return + } + for _, d := range x.Decl { + nxt(d) + } + for _, c := range x.Cmds { + nxt(c) + } + case *parse.RangeNode: + nxt(&x.BranchNode) + case *parse.StringNode: + // no name + sz := utf8.RuneCount([]byte(x.Text)) + p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: sz, kind: protocol.String}) + case *parse.TemplateNode: // invoking a template + // x.Pos points to the quote before the name + p.symbols = append(p.symbols, symbol{name: x.Name, kind: protocol.Package, start: int(x.Pos) + 1, + length: utf8.RuneCount([]byte(x.Name))}) + nxt(x.Pipe) + case *parse.TextNode: + if len(x.Text) == 1 && x.Text[0] == '\n' { + break + } + // nothing to report, but build one for hover + sz := utf8.RuneCount(x.Text) + p.symbols = append(p.symbols, symbol{start: int(x.Pos), length: sz, kind: protocol.Constant}) + case *parse.VariableNode: + p.symbols = append(p.symbols, p.fields(x.Ident, x)...) + case *parse.WithNode: + nxt(&x.BranchNode) + + } + pop() +} + +// DocumentSymbols returns a hierarchy of the symbols defined in a template file. +// (The hierarchy is flat. SymbolInformation might be better.) +func DocumentSymbols(snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentSymbol, error) { + buf, err := fh.Content() + if err != nil { + return nil, err + } + p := parseBuffer(buf) + if p.ParseErr != nil { + return nil, p.ParseErr + } + var ans []protocol.DocumentSymbol + for _, s := range p.symbols { + if s.kind == protocol.Constant { + continue + } + d := kindStr(s.kind) + if d == "Namespace" { + d = "Template" + } + if s.vardef { + d += "(def)" + } else { + d += "(use)" + } + r := p.Range(s.start, s.length) + y := protocol.DocumentSymbol{ + Name: s.name, + Detail: d, + Kind: s.kind, + Range: r, + SelectionRange: r, // or should this be the entire {{...}}? + } + ans = append(ans, y) + } + return ans, nil +} diff --git a/gopls/internal/lsp/tests/compare/text.go b/gopls/internal/test/compare/text.go similarity index 100% rename from gopls/internal/lsp/tests/compare/text.go rename to gopls/internal/test/compare/text.go diff --git a/gopls/internal/lsp/tests/compare/text_test.go b/gopls/internal/test/compare/text_test.go similarity index 92% rename from gopls/internal/lsp/tests/compare/text_test.go rename to gopls/internal/test/compare/text_test.go index 8f5af48bd11..66bdf0996e2 100644 --- a/gopls/internal/lsp/tests/compare/text_test.go +++ b/gopls/internal/test/compare/text_test.go @@ -7,7 +7,7 @@ package compare_test import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/test/compare" ) func TestText(t *testing.T) { diff --git a/gopls/internal/regtest/bench/bench_test.go b/gopls/internal/test/integration/bench/bench_test.go similarity index 94% rename from gopls/internal/regtest/bench/bench_test.go rename to gopls/internal/test/integration/bench/bench_test.go index 610f43fa876..5ae46c9ae3d 100644 --- a/gopls/internal/regtest/bench/bench_test.go +++ b/gopls/internal/test/integration/bench/bench_test.go @@ -20,12 +20,12 @@ import ( "testing" "time" - "golang.org/x/tools/gopls/internal/bug" + "golang.org/x/tools/gopls/internal/cmd" "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/cmd" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" + "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/fakenet" "golang.org/x/tools/internal/jsonrpc2" @@ -57,7 +57,7 @@ const runAsGopls = "_GOPLS_BENCH_RUN_AS_GOPLS" func TestMain(m *testing.M) { bug.PanicOnBugs = true if os.Getenv(runAsGopls) == "true" { - tool.Main(context.Background(), cmd.New("gopls", "", nil, hooks.Options), os.Args[1:]) + tool.Main(context.Background(), cmd.New(hooks.Options), os.Args[1:]) os.Exit(0) } event.SetExporter(nil) // don't log to stderr @@ -109,7 +109,7 @@ func shallowClone(dir, repo, commitish string) error { // connectEditor connects a fake editor session in the given dir, using the // given editor config. -func connectEditor(dir string, config fake.EditorConfig, ts servertest.Connector) (*fake.Sandbox, *fake.Editor, *regtest.Awaiter, error) { +func connectEditor(dir string, config fake.EditorConfig, ts servertest.Connector) (*fake.Sandbox, *fake.Editor, *integration.Awaiter, error) { s, err := fake.NewSandbox(&fake.SandboxConfig{ Workdir: dir, GOPROXY: "https://proxy.golang.org/", @@ -118,7 +118,7 @@ func connectEditor(dir string, config fake.EditorConfig, ts servertest.Connector return nil, nil, nil, err } - a := regtest.NewAwaiter(s.Workdir) + a := integration.NewAwaiter(s.Workdir) const skipApplyEdits = false editor, err := fake.NewEditor(s, config).Connect(context.Background(), ts, a.Hooks(), skipApplyEdits) if err != nil { @@ -289,7 +289,7 @@ func (s *SidecarServer) Connect(ctx context.Context) jsonrpc2.Conn { // <repo>.<userSuffix>, and not deleted when the benchmark exits. Otherwise, // the profile is written to a temp file that is deleted after the cpu_seconds // metric has been computed. -func startProfileIfSupported(b *testing.B, env *regtest.Env, name string) func() { +func startProfileIfSupported(b *testing.B, env *integration.Env, name string) func() { if !env.Editor.HasCommand(command.StartProfile.ID()) { return nil } @@ -343,7 +343,7 @@ func totalCPUForProfile(filename string) (time.Duration, error) { // // It may be used to clean up files opened in the shared environment during // benchmarking. -func closeBuffer(b *testing.B, env *regtest.Env, name string) { +func closeBuffer(b *testing.B, env *integration.Env, name string) { b.StopTimer() env.CloseBuffer(name) env.AfterChange() diff --git a/gopls/internal/regtest/bench/codeaction_test.go b/gopls/internal/test/integration/bench/codeaction_test.go similarity index 88% rename from gopls/internal/regtest/bench/codeaction_test.go rename to gopls/internal/test/integration/bench/codeaction_test.go index c9ebe48c30d..fe89500da82 100644 --- a/gopls/internal/regtest/bench/codeaction_test.go +++ b/gopls/internal/test/integration/bench/codeaction_test.go @@ -9,7 +9,7 @@ import ( "sync/atomic" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) func BenchmarkCodeAction(b *testing.B) { @@ -41,7 +41,7 @@ func BenchmarkCodeActionFollowingEdit(b *testing.B) { env := getRepo(b, test.repo).sharedEnv(b) env.OpenFile(test.file) defer closeBuffer(b, env, test.file) - env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __REGTEST_PLACEHOLDER_0__\n"}) + env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) env.AfterChange() env.CodeAction(test.file, nil) // pre-warm @@ -60,7 +60,7 @@ func BenchmarkCodeActionFollowingEdit(b *testing.B) { End: protocol.Position{Line: 1, Character: 0}, }, // Increment the placeholder text, to ensure cache misses. - NewText: fmt.Sprintf("// __REGTEST_PLACEHOLDER_%d__\n", edits), + NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), }) env.CodeAction(test.file, nil) } diff --git a/gopls/internal/test/integration/bench/completion_test.go b/gopls/internal/test/integration/bench/completion_test.go new file mode 100644 index 00000000000..bbbba0e3fd1 --- /dev/null +++ b/gopls/internal/test/integration/bench/completion_test.go @@ -0,0 +1,330 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bench + +import ( + "flag" + "fmt" + "sync/atomic" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" +) + +var completionGOPATH = flag.String("completion_gopath", "", "if set, use this GOPATH for BenchmarkCompletion") + +type completionBenchOptions struct { + file, locationRegexp string + + // Hooks to run edits before initial completion + setup func(*Env) // run before the benchmark starts + beforeCompletion func(*Env) // run before each completion +} + +// Deprecated: new tests should be expressed in BenchmarkCompletion. +func benchmarkCompletion(options completionBenchOptions, b *testing.B) { + repo := getRepo(b, "tools") + _ = repo.sharedEnv(b) // ensure cache is warm + env := repo.newEnv(b, fake.EditorConfig{}, "completion", false) + defer env.Close() + + // Run edits required for this completion. + if options.setup != nil { + options.setup(env) + } + + // Run a completion to make sure the system is warm. + loc := env.RegexpSearch(options.file, options.locationRegexp) + completions := env.Completion(loc) + + if testing.Verbose() { + fmt.Println("Results:") + for i := 0; i < len(completions.Items); i++ { + fmt.Printf("\t%d. %v\n", i, completions.Items[i]) + } + } + + b.Run("tools", func(b *testing.B) { + if stopAndRecord := startProfileIfSupported(b, env, qualifiedName("tools", "completion")); stopAndRecord != nil { + defer stopAndRecord() + } + + for i := 0; i < b.N; i++ { + if options.beforeCompletion != nil { + options.beforeCompletion(env) + } + env.Completion(loc) + } + }) +} + +// endRangeInBuffer returns the position for last character in the buffer for +// the given file. +func endRangeInBuffer(env *Env, name string) protocol.Range { + buffer := env.BufferText(name) + m := protocol.NewMapper("", []byte(buffer)) + rng, err := m.OffsetRange(len(buffer), len(buffer)) + if err != nil { + env.T.Fatal(err) + } + return rng +} + +// Benchmark struct completion in tools codebase. +func BenchmarkStructCompletion(b *testing.B) { + file := "internal/lsp/cache/session.go" + + setup := func(env *Env) { + env.OpenFile(file) + env.EditBuffer(file, protocol.TextEdit{ + Range: endRangeInBuffer(env, file), + NewText: "\nvar testVariable map[string]bool = Session{}.\n", + }) + } + + benchmarkCompletion(completionBenchOptions{ + file: file, + locationRegexp: `var testVariable map\[string\]bool = Session{}(\.)`, + setup: setup, + }, b) +} + +// Benchmark import completion in tools codebase. +func BenchmarkImportCompletion(b *testing.B) { + const file = "internal/lsp/source/completion/completion.go" + benchmarkCompletion(completionBenchOptions{ + file: file, + locationRegexp: `go\/()`, + setup: func(env *Env) { env.OpenFile(file) }, + }, b) +} + +// Benchmark slice completion in tools codebase. +func BenchmarkSliceCompletion(b *testing.B) { + file := "internal/lsp/cache/session.go" + + setup := func(env *Env) { + env.OpenFile(file) + env.EditBuffer(file, protocol.TextEdit{ + Range: endRangeInBuffer(env, file), + NewText: "\nvar testVariable []byte = \n", + }) + } + + benchmarkCompletion(completionBenchOptions{ + file: file, + locationRegexp: `var testVariable \[\]byte (=)`, + setup: setup, + }, b) +} + +// Benchmark deep completion in function call in tools codebase. +func BenchmarkFuncDeepCompletion(b *testing.B) { + file := "internal/lsp/source/completion/completion.go" + fileContent := ` +func (c *completer) _() { + c.inference.kindMatches(c.) +} +` + setup := func(env *Env) { + env.OpenFile(file) + originalBuffer := env.BufferText(file) + env.EditBuffer(file, protocol.TextEdit{ + Range: endRangeInBuffer(env, file), + // TODO(rfindley): this is a bug: it should just be fileContent. + NewText: originalBuffer + fileContent, + }) + } + + benchmarkCompletion(completionBenchOptions{ + file: file, + locationRegexp: `func \(c \*completer\) _\(\) {\n\tc\.inference\.kindMatches\((c)`, + setup: setup, + }, b) +} + +type completionTest struct { + repo string + name string + file string // repo-relative file to create + content string // file content + locationRegexp string // regexp for completion +} + +var completionTests = []completionTest{ + { + "tools", + "selector", + "internal/lsp/source/completion/completion2.go", + ` +package completion + +func (c *completer) _() { + c.inference.kindMatches(c.) +} +`, + `func \(c \*completer\) _\(\) {\n\tc\.inference\.kindMatches\((c)`, + }, + { + "tools", + "unimportedident", + "internal/lsp/source/completion/completion2.go", + ` +package completion + +func (c *completer) _() { + lo +} +`, + `lo()`, + }, + { + "tools", + "unimportedselector", + "internal/lsp/source/completion/completion2.go", + ` +package completion + +func (c *completer) _() { + log. +} +`, + `log\.()`, + }, + { + "kubernetes", + "selector", + "pkg/kubelet/kubelet2.go", + ` +package kubelet + +func (kl *Kubelet) _() { + kl. +} +`, + `kl\.()`, + }, + { + "kubernetes", + "identifier", + "pkg/kubelet/kubelet2.go", + ` +package kubelet + +func (kl *Kubelet) _() { + k // here +} +`, + `k() // here`, + }, + { + "oracle", + "selector", + "dataintegration/pivot2.go", + ` +package dataintegration + +func (p *Pivot) _() { + p. +} +`, + `p\.()`, + }, +} + +// Benchmark completion following an arbitrary edit. +// +// Edits force type-checked packages to be invalidated, so we want to measure +// how long it takes before completion results are available. +func BenchmarkCompletion(b *testing.B) { + for _, test := range completionTests { + b.Run(fmt.Sprintf("%s_%s", test.repo, test.name), func(b *testing.B) { + for _, followingEdit := range []bool{true, false} { + b.Run(fmt.Sprintf("edit=%v", followingEdit), func(b *testing.B) { + for _, completeUnimported := range []bool{true, false} { + b.Run(fmt.Sprintf("unimported=%v", completeUnimported), func(b *testing.B) { + for _, budget := range []string{"0s", "100ms"} { + b.Run(fmt.Sprintf("budget=%s", budget), func(b *testing.B) { + runCompletion(b, test, followingEdit, completeUnimported, budget) + }) + } + }) + } + }) + } + }) + } +} + +// For optimizing unimported completion, it can be useful to benchmark with a +// huge GOMODCACHE. +var gomodcache = flag.String("gomodcache", "", "optional GOMODCACHE for unimported completion benchmarks") + +func runCompletion(b *testing.B, test completionTest, followingEdit, completeUnimported bool, budget string) { + repo := getRepo(b, test.repo) + gopath := *completionGOPATH + if gopath == "" { + // use a warm GOPATH + sharedEnv := repo.sharedEnv(b) + gopath = sharedEnv.Sandbox.GOPATH() + } + envvars := map[string]string{ + "GOPATH": gopath, + } + + if *gomodcache != "" { + envvars["GOMODCACHE"] = *gomodcache + } + + env := repo.newEnv(b, fake.EditorConfig{ + Env: envvars, + Settings: map[string]interface{}{ + "completeUnimported": completeUnimported, + "completionBudget": budget, + }, + }, "completion", false) + defer env.Close() + + env.CreateBuffer(test.file, "// __TEST_PLACEHOLDER_0__\n"+test.content) + editPlaceholder := func() { + edits := atomic.AddInt64(&editID, 1) + env.EditBuffer(test.file, protocol.TextEdit{ + Range: protocol.Range{ + Start: protocol.Position{Line: 0, Character: 0}, + End: protocol.Position{Line: 1, Character: 0}, + }, + // Increment the placeholder text, to ensure cache misses. + NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), + }) + } + env.AfterChange() + + // Run a completion to make sure the system is warm. + loc := env.RegexpSearch(test.file, test.locationRegexp) + completions := env.Completion(loc) + + if testing.Verbose() { + fmt.Println("Results:") + for i, item := range completions.Items { + fmt.Printf("\t%d. %v\n", i, item) + } + } + + b.ResetTimer() + + if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "completion")); stopAndRecord != nil { + defer stopAndRecord() + } + + for i := 0; i < b.N; i++ { + if followingEdit { + editPlaceholder() + } + loc := env.RegexpSearch(test.file, test.locationRegexp) + env.Completion(loc) + } +} diff --git a/gopls/internal/regtest/bench/definition_test.go b/gopls/internal/test/integration/bench/definition_test.go similarity index 100% rename from gopls/internal/regtest/bench/definition_test.go rename to gopls/internal/test/integration/bench/definition_test.go diff --git a/gopls/internal/regtest/bench/didchange_test.go b/gopls/internal/test/integration/bench/didchange_test.go similarity index 91% rename from gopls/internal/regtest/bench/didchange_test.go rename to gopls/internal/test/integration/bench/didchange_test.go index 56da0ae7a68..22e7ca2a11b 100644 --- a/gopls/internal/regtest/bench/didchange_test.go +++ b/gopls/internal/test/integration/bench/didchange_test.go @@ -10,8 +10,8 @@ import ( "testing" "time" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake" ) // Use a global edit counter as bench function may execute multiple times, and @@ -48,7 +48,7 @@ func BenchmarkDidChange(b *testing.B) { defer closeBuffer(b, env, test.file) // Insert the text we'll be modifying at the top of the file. - env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __REGTEST_PLACEHOLDER_0__\n"}) + env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) env.AfterChange() b.ResetTimer() @@ -64,7 +64,7 @@ func BenchmarkDidChange(b *testing.B) { End: protocol.Position{Line: 1, Character: 0}, }, // Increment the placeholder text, to ensure cache misses. - NewText: fmt.Sprintf("// __REGTEST_PLACEHOLDER_%d__\n", edits), + NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), }) env.Await(env.StartedChange()) } @@ -108,7 +108,7 @@ func runChangeDiagnosticsBenchmark(b *testing.B, test changeTest, save bool, ope defer env.Close() env.OpenFile(test.file) // Insert the text we'll be modifying at the top of the file. - env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __REGTEST_PLACEHOLDER_0__\n"}) + env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) if save { env.SaveBuffer(test.file) } @@ -130,7 +130,7 @@ func runChangeDiagnosticsBenchmark(b *testing.B, test changeTest, save bool, ope End: protocol.Position{Line: 1, Character: 0}, }, // Increment the placeholder text, to ensure cache misses. - NewText: fmt.Sprintf("// __REGTEST_PLACEHOLDER_%d__\n", edits), + NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), }) if save { env.SaveBuffer(test.file) diff --git a/gopls/internal/regtest/bench/doc.go b/gopls/internal/test/integration/bench/doc.go similarity index 100% rename from gopls/internal/regtest/bench/doc.go rename to gopls/internal/test/integration/bench/doc.go diff --git a/gopls/internal/regtest/bench/hover_test.go b/gopls/internal/test/integration/bench/hover_test.go similarity index 100% rename from gopls/internal/regtest/bench/hover_test.go rename to gopls/internal/test/integration/bench/hover_test.go diff --git a/gopls/internal/regtest/bench/implementations_test.go b/gopls/internal/test/integration/bench/implementations_test.go similarity index 100% rename from gopls/internal/regtest/bench/implementations_test.go rename to gopls/internal/test/integration/bench/implementations_test.go diff --git a/gopls/internal/test/integration/bench/iwl_test.go b/gopls/internal/test/integration/bench/iwl_test.go new file mode 100644 index 00000000000..07a5d9070d0 --- /dev/null +++ b/gopls/internal/test/integration/bench/iwl_test.go @@ -0,0 +1,72 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bench + +import ( + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" +) + +// BenchmarkInitialWorkspaceLoad benchmarks the initial workspace load time for +// a new editing session. +func BenchmarkInitialWorkspaceLoad(b *testing.B) { + repoNames := []string{ + "google-cloud-go", + "istio", + "kubernetes", + "kuma", + "oracle", + "pkgsite", + "starlark", + "tools", + "hashiform", + } + for _, repoName := range repoNames { + b.Run(repoName, func(b *testing.B) { + repo := getRepo(b, repoName) + // get the (initialized) shared env to ensure the cache is warm. + // Reuse its GOPATH so that we get cache hits for things in the module + // cache. + sharedEnv := repo.sharedEnv(b) + b.ResetTimer() + + for i := 0; i < b.N; i++ { + doIWL(b, sharedEnv.Sandbox.GOPATH(), repo) + } + }) + } +} + +func doIWL(b *testing.B, gopath string, repo *repo) { + // Exclude the time to set up the env from the benchmark time, as this may + // involve installing gopls and/or checking out the repo dir. + b.StopTimer() + config := fake.EditorConfig{Env: map[string]string{"GOPATH": gopath}} + env := repo.newEnv(b, config, "iwl", true) + defer env.Close() + b.StartTimer() + + // Note: in the future, we may need to open a file in order to cause gopls to + // start loading the workspace. + + env.Await(InitialWorkspaceLoad) + + if env.Editor.HasCommand(command.MemStats.ID()) { + b.StopTimer() + params := &protocol.ExecuteCommandParams{ + Command: command.MemStats.ID(), + } + var memstats command.MemStatsResult + env.ExecuteCommand(params, &memstats) + b.ReportMetric(float64(memstats.HeapAlloc), "alloc_bytes") + b.ReportMetric(float64(memstats.HeapInUse), "in_use_bytes") + b.ReportMetric(float64(memstats.TotalAlloc), "total_alloc_bytes") + b.StartTimer() + } +} diff --git a/gopls/internal/regtest/bench/references_test.go b/gopls/internal/test/integration/bench/references_test.go similarity index 100% rename from gopls/internal/regtest/bench/references_test.go rename to gopls/internal/test/integration/bench/references_test.go diff --git a/gopls/internal/regtest/bench/reload_test.go b/gopls/internal/test/integration/bench/reload_test.go similarity index 96% rename from gopls/internal/regtest/bench/reload_test.go rename to gopls/internal/test/integration/bench/reload_test.go index dbe8827cb09..332809ee1eb 100644 --- a/gopls/internal/regtest/bench/reload_test.go +++ b/gopls/internal/test/integration/bench/reload_test.go @@ -6,7 +6,7 @@ package bench import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) // BenchmarkReload benchmarks reloading a file metadata after a change to an import. diff --git a/gopls/internal/regtest/bench/rename_test.go b/gopls/internal/test/integration/bench/rename_test.go similarity index 100% rename from gopls/internal/regtest/bench/rename_test.go rename to gopls/internal/test/integration/bench/rename_test.go diff --git a/gopls/internal/regtest/bench/repo_test.go b/gopls/internal/test/integration/bench/repo_test.go similarity index 98% rename from gopls/internal/regtest/bench/repo_test.go rename to gopls/internal/test/integration/bench/repo_test.go index 3a4575e65c4..73390a78885 100644 --- a/gopls/internal/regtest/bench/repo_test.go +++ b/gopls/internal/test/integration/bench/repo_test.go @@ -17,8 +17,8 @@ import ( "testing" "time" - "golang.org/x/tools/gopls/internal/lsp/fake" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" ) // repos holds shared repositories for use in benchmarks. diff --git a/gopls/internal/regtest/bench/stress_test.go b/gopls/internal/test/integration/bench/stress_test.go similarity index 94% rename from gopls/internal/regtest/bench/stress_test.go rename to gopls/internal/test/integration/bench/stress_test.go index 15a2c908158..4ec272f5002 100644 --- a/gopls/internal/regtest/bench/stress_test.go +++ b/gopls/internal/test/integration/bench/stress_test.go @@ -11,10 +11,10 @@ import ( "testing" "time" + "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/lsprpc" + "golang.org/x/tools/gopls/internal/lsprpc" + "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2/servertest" ) diff --git a/gopls/internal/regtest/bench/typing_test.go b/gopls/internal/test/integration/bench/typing_test.go similarity index 89% rename from gopls/internal/regtest/bench/typing_test.go rename to gopls/internal/test/integration/bench/typing_test.go index 0ce90cd912f..78bd16cef5b 100644 --- a/gopls/internal/regtest/bench/typing_test.go +++ b/gopls/internal/test/integration/bench/typing_test.go @@ -10,7 +10,7 @@ import ( "testing" "time" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) // BenchmarkTyping simulates typing steadily in a single file at different @@ -26,7 +26,7 @@ func BenchmarkTyping(b *testing.B) { defer closeBuffer(b, env, test.file) // Insert the text we'll be modifying at the top of the file. - env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __REGTEST_PLACEHOLDER_0__\n"}) + env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) env.AfterChange() delays := []time.Duration{ @@ -49,7 +49,7 @@ func BenchmarkTyping(b *testing.B) { End: protocol.Position{Line: 1, Character: 0}, }, // Increment the placeholder text, to ensure cache misses. - NewText: fmt.Sprintf("// __REGTEST_PLACEHOLDER_%d__\n", edits), + NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), }) <-ticker.C } diff --git a/gopls/internal/regtest/bench/workspace_symbols_test.go b/gopls/internal/test/integration/bench/workspace_symbols_test.go similarity index 100% rename from gopls/internal/regtest/bench/workspace_symbols_test.go rename to gopls/internal/test/integration/bench/workspace_symbols_test.go diff --git a/gopls/internal/test/integration/codelens/codelens_test.go b/gopls/internal/test/integration/codelens/codelens_test.go new file mode 100644 index 00000000000..c1c28dab803 --- /dev/null +++ b/gopls/internal/test/integration/codelens/codelens_test.go @@ -0,0 +1,405 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package codelens + +import ( + "fmt" + "testing" + + "golang.org/x/tools/gopls/internal/hooks" + "golang.org/x/tools/gopls/internal/server" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/internal/testenv" +) + +func TestMain(m *testing.M) { + bug.PanicOnBugs = true + Main(m, hooks.Options) +} + +func TestDisablingCodeLens(t *testing.T) { + const workspace = ` +-- go.mod -- +module codelens.test + +go 1.12 +-- lib.go -- +package lib + +type Number int + +const ( + Zero Number = iota + One + Two +) + +//` + `go:generate stringer -type=Number +` + tests := []struct { + label string + enabled map[string]bool + wantCodeLens bool + }{ + { + label: "default", + wantCodeLens: true, + }, + { + label: "generate disabled", + enabled: map[string]bool{string(command.Generate): false}, + wantCodeLens: false, + }, + } + for _, test := range tests { + t.Run(test.label, func(t *testing.T) { + WithOptions( + Settings{"codelenses": test.enabled}, + ).Run(t, workspace, func(t *testing.T, env *Env) { + env.OpenFile("lib.go") + lens := env.CodeLens("lib.go") + if gotCodeLens := len(lens) > 0; gotCodeLens != test.wantCodeLens { + t.Errorf("got codeLens: %t, want %t", gotCodeLens, test.wantCodeLens) + } + }) + }) + } +} + +const proxyWithLatest = ` +-- golang.org/x/hello@v1.3.3/go.mod -- +module golang.org/x/hello + +go 1.12 +-- golang.org/x/hello@v1.3.3/hi/hi.go -- +package hi + +var Goodbye error +-- golang.org/x/hello@v1.2.3/go.mod -- +module golang.org/x/hello + +go 1.12 +-- golang.org/x/hello@v1.2.3/hi/hi.go -- +package hi + +var Goodbye error +` + +// This test confirms the full functionality of the code lenses for updating +// dependencies in a go.mod file, when using a go.work file. It checks for the +// code lens that suggests an update and then executes the command associated +// with that code lens. A regression test for golang/go#39446. It also checks +// that these code lenses only affect the diagnostics and contents of the +// containing go.mod file. +func TestUpgradeCodelens_Workspace(t *testing.T) { + const shouldUpdateDep = ` +-- go.work -- +go 1.18 + +use ( + ./a + ./b +) +-- a/go.mod -- +module mod.com/a + +go 1.14 + +require golang.org/x/hello v1.2.3 +-- a/go.sum -- +golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= +golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= +-- a/main.go -- +package main + +import "golang.org/x/hello/hi" + +func main() { + _ = hi.Goodbye +} +-- b/go.mod -- +module mod.com/b + +go 1.14 + +require golang.org/x/hello v1.2.3 +-- b/go.sum -- +golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= +golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= +-- b/main.go -- +package main + +import ( + "golang.org/x/hello/hi" +) + +func main() { + _ = hi.Goodbye +} +` + + const wantGoModA = `module mod.com/a + +go 1.14 + +require golang.org/x/hello v1.3.3 +` + // Applying the diagnostics or running the codelenses for a/go.mod + // should not change the contents of b/go.mod + const wantGoModB = `module mod.com/b + +go 1.14 + +require golang.org/x/hello v1.2.3 +` + + for _, commandTitle := range []string{ + "Upgrade transitive dependencies", + "Upgrade direct dependencies", + } { + t.Run(commandTitle, func(t *testing.T) { + WithOptions( + ProxyFiles(proxyWithLatest), + ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { + env.OpenFile("a/go.mod") + env.OpenFile("b/go.mod") + var lens protocol.CodeLens + var found bool + for _, l := range env.CodeLens("a/go.mod") { + if l.Command.Title == commandTitle { + lens = l + found = true + } + } + if !found { + t.Fatalf("found no command with the title %s", commandTitle) + } + if _, err := env.Editor.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ + Command: lens.Command.Command, + Arguments: lens.Command.Arguments, + }); err != nil { + t.Fatal(err) + } + env.AfterChange() + if got := env.BufferText("a/go.mod"); got != wantGoModA { + t.Fatalf("a/go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) + } + if got := env.BufferText("b/go.mod"); got != wantGoModB { + t.Fatalf("b/go.mod changed unexpectedly:\n%s", compare.Text(wantGoModB, got)) + } + }) + }) + } + for _, vendoring := range []bool{false, true} { + t.Run(fmt.Sprintf("Upgrade individual dependency vendoring=%v", vendoring), func(t *testing.T) { + WithOptions( + ProxyFiles(proxyWithLatest), + ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { + if vendoring { + env.RunGoCommandInDirWithEnv("a", []string{"GOWORK=off"}, "mod", "vendor") + } + env.AfterChange() + env.OpenFile("a/go.mod") + env.OpenFile("b/go.mod") + + env.ExecuteCodeLensCommand("a/go.mod", command.CheckUpgrades, nil) + d := &protocol.PublishDiagnosticsParams{} + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromCheckUpgrades), 1, true), + Diagnostics(env.AtRegexp("a/go.mod", `require`), WithMessage("can be upgraded")), + ReadDiagnostics("a/go.mod", d), + // We do not want there to be a diagnostic for b/go.mod, + // but there may be some subtlety in timing here, where this + // should always succeed, but may not actually test the correct + // behavior. + NoDiagnostics(env.AtRegexp("b/go.mod", `require`)), + ) + // Check for upgrades in b/go.mod and then clear them. + env.ExecuteCodeLensCommand("b/go.mod", command.CheckUpgrades, nil) + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromCheckUpgrades), 2, true), + Diagnostics(env.AtRegexp("b/go.mod", `require`), WithMessage("can be upgraded")), + ) + env.ExecuteCodeLensCommand("b/go.mod", command.ResetGoModDiagnostics, nil) + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromResetGoModDiagnostics), 1, true), + NoDiagnostics(ForFile("b/go.mod")), + ) + + // Apply the diagnostics to a/go.mod. + env.ApplyQuickFixes("a/go.mod", d.Diagnostics) + env.AfterChange() + if got := env.BufferText("a/go.mod"); got != wantGoModA { + t.Fatalf("a/go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) + } + if got := env.BufferText("b/go.mod"); got != wantGoModB { + t.Fatalf("b/go.mod changed unexpectedly:\n%s", compare.Text(wantGoModB, got)) + } + }) + }) + } +} + +func TestUpgradeCodelens_ModVendor(t *testing.T) { + // This test checks the regression of golang/go#66055. The upgrade codelens + // should work in a mod vendor context (the test above using a go.work file + // was not broken). + testenv.NeedsGo1Point(t, 22) + const shouldUpdateDep = ` +-- go.mod -- +module mod.com/a + +go 1.22 + +require golang.org/x/hello v1.2.3 +-- go.sum -- +golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= +golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= +-- main.go -- +package main + +import "golang.org/x/hello/hi" + +func main() { + _ = hi.Goodbye +} +` + + const wantGoModA = `module mod.com/a + +go 1.22 + +require golang.org/x/hello v1.3.3 +` + + WithOptions( + ProxyFiles(proxyWithLatest), + ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { + env.RunGoCommand("mod", "vendor") + env.AfterChange() + env.OpenFile("go.mod") + + env.ExecuteCodeLensCommand("go.mod", command.CheckUpgrades, nil) + d := &protocol.PublishDiagnosticsParams{} + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromCheckUpgrades), 1, true), + Diagnostics(env.AtRegexp("go.mod", `require`), WithMessage("can be upgraded")), + ReadDiagnostics("go.mod", d), + ) + + // Apply the diagnostics to a/go.mod. + env.ApplyQuickFixes("go.mod", d.Diagnostics) + env.AfterChange() + if got := env.BufferText("go.mod"); got != wantGoModA { + t.Fatalf("go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) + } + }) +} + +func TestUnusedDependenciesCodelens(t *testing.T) { + const proxy = ` +-- golang.org/x/hello@v1.0.0/go.mod -- +module golang.org/x/hello + +go 1.14 +-- golang.org/x/hello@v1.0.0/hi/hi.go -- +package hi + +var Goodbye error +-- golang.org/x/unused@v1.0.0/go.mod -- +module golang.org/x/unused + +go 1.14 +-- golang.org/x/unused@v1.0.0/nouse/nouse.go -- +package nouse + +var NotUsed error +` + + const shouldRemoveDep = ` +-- go.mod -- +module mod.com + +go 1.14 + +require golang.org/x/hello v1.0.0 +require golang.org/x/unused v1.0.0 +-- go.sum -- +golang.org/x/hello v1.0.0 h1:qbzE1/qT0/zojAMd/JcPsO2Vb9K4Bkeyq0vB2JGMmsw= +golang.org/x/hello v1.0.0/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= +golang.org/x/unused v1.0.0 h1:LecSbCn5P3vTcxubungSt1Pn4D/WocCaiWOPDC0y0rw= +golang.org/x/unused v1.0.0/go.mod h1:ihoW8SgWzugwwj0N2SfLfPZCxTB1QOVfhMfB5PWTQ8U= +-- main.go -- +package main + +import "golang.org/x/hello/hi" + +func main() { + _ = hi.Goodbye +} +` + WithOptions(ProxyFiles(proxy)).Run(t, shouldRemoveDep, func(t *testing.T, env *Env) { + env.OpenFile("go.mod") + env.ExecuteCodeLensCommand("go.mod", command.Tidy, nil) + env.AfterChange() + got := env.BufferText("go.mod") + const wantGoMod = `module mod.com + +go 1.14 + +require golang.org/x/hello v1.0.0 +` + if got != wantGoMod { + t.Fatalf("go.mod tidy failed:\n%s", compare.Text(wantGoMod, got)) + } + }) +} + +func TestRegenerateCgo(t *testing.T) { + testenv.NeedsTool(t, "cgo") + const workspace = ` +-- go.mod -- +module example.com + +go 1.12 +-- cgo.go -- +package x + +/* +int fortythree() { return 42; } +*/ +import "C" + +func Foo() { + print(C.fortytwo()) +} +` + Run(t, workspace, func(t *testing.T, env *Env) { + // Open the file. We have a nonexistant symbol that will break cgo processing. + env.OpenFile("cgo.go") + env.AfterChange( + Diagnostics(env.AtRegexp("cgo.go", ``), WithMessage("go list failed to return CompiledGoFiles")), + ) + + // Fix the C function name. We haven't regenerated cgo, so nothing should be fixed. + env.RegexpReplace("cgo.go", `int fortythree`, "int fortytwo") + env.SaveBuffer("cgo.go") + env.AfterChange( + Diagnostics(env.AtRegexp("cgo.go", ``), WithMessage("go list failed to return CompiledGoFiles")), + ) + + // Regenerate cgo, fixing the diagnostic. + env.ExecuteCodeLensCommand("cgo.go", command.RegenerateCgo, nil) + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromRegenerateCgo), 1, true), + NoDiagnostics(ForFile("cgo.go")), + ) + }) +} diff --git a/gopls/internal/regtest/codelens/gcdetails_test.go b/gopls/internal/test/integration/codelens/gcdetails_test.go similarity index 83% rename from gopls/internal/regtest/codelens/gcdetails_test.go rename to gopls/internal/test/integration/codelens/gcdetails_test.go index ebb02499512..4d3024defe5 100644 --- a/gopls/internal/regtest/codelens/gcdetails_test.go +++ b/gopls/internal/test/integration/codelens/gcdetails_test.go @@ -9,11 +9,12 @@ import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/bug" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/server" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" + "golang.org/x/tools/gopls/internal/util/bug" ) func TestGCDetails_Toggle(t *testing.T) { @@ -46,7 +47,7 @@ func main() { env.ExecuteCodeLensCommand("main.go", command.GCDetails, nil) d := &protocol.PublishDiagnosticsParams{} env.OnceMet( - Diagnostics(AtPosition("main.go", 5, 13)), + CompletedWork(server.DiagnosticWorkTitle(server.FromToggleGCDetails), 1, true), ReadDiagnostics("main.go", d), ) // Confirm that the diagnostics come from the gc details code lens. @@ -75,7 +76,10 @@ func main() { // Toggle the GC details code lens again so now it should be off. env.ExecuteCodeLensCommand("main.go", command.GCDetails, nil) - env.Await(NoDiagnostics(ForFile("main.go"))) + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromToggleGCDetails), 2, true), + NoDiagnostics(ForFile("main.go")), + ) }) } @@ -98,12 +102,10 @@ go 1.12 ).Run(t, src, func(t *testing.T, env *Env) { env.CreateBuffer("p_test.go", "") - const gcDetailsCommand = "gopls." + string(command.GCDetails) - hasGCDetails := func() bool { lenses := env.CodeLens("p_test.go") // should not crash for _, lens := range lenses { - if lens.Command.Command == gcDetailsCommand { + if lens.Command.Command == command.GCDetails.ID() { return true } } diff --git a/gopls/internal/regtest/completion/completion18_test.go b/gopls/internal/test/integration/completion/completion18_test.go similarity index 95% rename from gopls/internal/regtest/completion/completion18_test.go rename to gopls/internal/test/integration/completion/completion18_test.go index 18e81bc4b34..a35061d693b 100644 --- a/gopls/internal/regtest/completion/completion18_test.go +++ b/gopls/internal/test/integration/completion/completion18_test.go @@ -2,16 +2,13 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package completion import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" ) // test generic receivers diff --git a/gopls/internal/test/integration/completion/completion_test.go b/gopls/internal/test/integration/completion/completion_test.go new file mode 100644 index 00000000000..4498e1b1e55 --- /dev/null +++ b/gopls/internal/test/integration/completion/completion_test.go @@ -0,0 +1,1065 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "fmt" + "sort" + "strings" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "golang.org/x/telemetry/counter" + "golang.org/x/telemetry/counter/countertest" + "golang.org/x/tools/gopls/internal/hooks" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/server" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/testenv" +) + +func TestMain(m *testing.M) { + bug.PanicOnBugs = true + Main(m, hooks.Options) +} + +const proxy = ` +-- example.com@v1.2.3/go.mod -- +module example.com + +go 1.12 +-- example.com@v1.2.3/blah/blah.go -- +package blah + +const Name = "Blah" +-- random.org@v1.2.3/go.mod -- +module random.org + +go 1.12 +-- random.org@v1.2.3/blah/blah.go -- +package hello + +const Name = "Hello" +` + +func TestPackageCompletion(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- fruits/apple.go -- +package apple + +fun apple() int { + return 0 +} + +-- fruits/testfile.go -- +// this is a comment + +/* + this is a multiline comment +*/ + +import "fmt" + +func test() {} + +-- fruits/testfile2.go -- +package + +-- fruits/testfile3.go -- +pac +-- 123f_r.u~its-123/testfile.go -- +package + +-- .invalid-dir@-name/testfile.go -- +package +` + var ( + testfile4 = "" + testfile5 = "/*a comment*/ " + testfile6 = "/*a comment*/\n" + ) + for _, tc := range []struct { + name string + filename string + content *string + triggerRegexp string + want []string + editRegexp string + }{ + { + name: "package completion at valid position", + filename: "fruits/testfile.go", + triggerRegexp: "\n()", + want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, + editRegexp: "\n()", + }, + { + name: "package completion in a comment", + filename: "fruits/testfile.go", + triggerRegexp: "th(i)s", + want: nil, + }, + { + name: "package completion in a multiline comment", + filename: "fruits/testfile.go", + triggerRegexp: `\/\*\n()`, + want: nil, + }, + { + name: "package completion at invalid position", + filename: "fruits/testfile.go", + triggerRegexp: "import \"fmt\"\n()", + want: nil, + }, + { + name: "package completion after keyword 'package'", + filename: "fruits/testfile2.go", + triggerRegexp: "package()", + want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, + editRegexp: "package\n", + }, + { + name: "package completion with 'pac' prefix", + filename: "fruits/testfile3.go", + triggerRegexp: "pac()", + want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, + editRegexp: "pac", + }, + { + name: "package completion for empty file", + filename: "fruits/testfile4.go", + triggerRegexp: "^$", + content: &testfile4, + want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, + editRegexp: "^$", + }, + { + name: "package completion without terminal newline", + filename: "fruits/testfile5.go", + triggerRegexp: `\*\/ ()`, + content: &testfile5, + want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, + editRegexp: `\*\/ ()`, + }, + { + name: "package completion on terminal newline", + filename: "fruits/testfile6.go", + triggerRegexp: `\*\/\n()`, + content: &testfile6, + want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, + editRegexp: `\*\/\n()`, + }, + // Issue golang/go#44680 + { + name: "package completion for dir name with punctuation", + filename: "123f_r.u~its-123/testfile.go", + triggerRegexp: "package()", + want: []string{"package fruits123", "package fruits123_test", "package main"}, + editRegexp: "package\n", + }, + { + name: "package completion for invalid dir name", + filename: ".invalid-dir@-name/testfile.go", + triggerRegexp: "package()", + want: []string{"package main"}, + editRegexp: "package\n", + }, + } { + t.Run(tc.name, func(t *testing.T) { + Run(t, files, func(t *testing.T, env *Env) { + if tc.content != nil { + env.WriteWorkspaceFile(tc.filename, *tc.content) + env.Await(env.DoneWithChangeWatchedFiles()) + } + env.OpenFile(tc.filename) + completions := env.Completion(env.RegexpSearch(tc.filename, tc.triggerRegexp)) + + // Check that the completion item suggestions are in the range + // of the file. {Start,End}.Line are zero-based. + lineCount := len(strings.Split(env.BufferText(tc.filename), "\n")) + for _, item := range completions.Items { + if start := int(item.TextEdit.Range.Start.Line); start > lineCount { + t.Fatalf("unexpected text edit range start line number: got %d, want <= %d", start, lineCount) + } + if end := int(item.TextEdit.Range.End.Line); end > lineCount { + t.Fatalf("unexpected text edit range end line number: got %d, want <= %d", end, lineCount) + } + } + + if tc.want != nil { + expectedLoc := env.RegexpSearch(tc.filename, tc.editRegexp) + for _, item := range completions.Items { + gotRng := item.TextEdit.Range + if expectedLoc.Range != gotRng { + t.Errorf("unexpected completion range for completion item %s: got %v, want %v", + item.Label, gotRng, expectedLoc.Range) + } + } + } + + diff := compareCompletionLabels(tc.want, completions.Items) + if diff != "" { + t.Error(diff) + } + }) + }) + } +} + +func TestPackageNameCompletion(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- math/add.go -- +package ma +` + + want := []string{"ma", "ma_test", "main", "math", "math_test"} + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("math/add.go") + completions := env.Completion(env.RegexpSearch("math/add.go", "package ma()")) + + diff := compareCompletionLabels(want, completions.Items) + if diff != "" { + t.Fatal(diff) + } + }) +} + +// TODO(rfindley): audit/clean up call sites for this helper, to ensure +// consistent test errors. +func compareCompletionLabels(want []string, gotItems []protocol.CompletionItem) string { + var got []string + for _, item := range gotItems { + got = append(got, item.Label) + if item.Label != item.InsertText && item.TextEdit == nil { + // Label should be the same as InsertText, if InsertText is to be used + return fmt.Sprintf("label not the same as InsertText %#v", item) + } + } + + if len(got) == 0 && len(want) == 0 { + return "" // treat nil and the empty slice as equivalent + } + + if diff := cmp.Diff(want, got); diff != "" { + return fmt.Sprintf("completion item mismatch (-want +got):\n%s", diff) + } + return "" +} + +func TestUnimportedCompletion(t *testing.T) { + const mod = ` +-- go.mod -- +module mod.com + +go 1.14 + +require example.com v1.2.3 +-- go.sum -- +example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= +example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +-- main.go -- +package main + +func main() { + _ = blah +} +-- main2.go -- +package main + +import "example.com/blah" + +func _() { + _ = blah.Hello +} +` + WithOptions( + ProxyFiles(proxy), + ).Run(t, mod, func(t *testing.T, env *Env) { + // Make sure the dependency is in the module cache and accessible for + // unimported completions, and then remove it before proceeding. + env.RemoveWorkspaceFile("main2.go") + env.RunGoCommand("mod", "tidy") + env.Await(env.DoneWithChangeWatchedFiles()) + + // Trigger unimported completions for the example.com/blah package. + env.OpenFile("main.go") + env.Await(env.DoneWithOpen()) + loc := env.RegexpSearch("main.go", "ah") + completions := env.Completion(loc) + if len(completions.Items) == 0 { + t.Fatalf("no completion items") + } + env.AcceptCompletion(loc, completions.Items[0]) // adds blah import to main.go + env.Await(env.DoneWithChange()) + + // Trigger completions once again for the blah.<> selector. + env.RegexpReplace("main.go", "_ = blah", "_ = blah.") + env.Await(env.DoneWithChange()) + loc = env.RegexpSearch("main.go", "\n}") + completions = env.Completion(loc) + if len(completions.Items) != 1 { + t.Fatalf("expected 1 completion item, got %v", len(completions.Items)) + } + item := completions.Items[0] + if item.Label != "Name" { + t.Fatalf("expected completion item blah.Name, got %v", item.Label) + } + env.AcceptCompletion(loc, item) + + // Await the diagnostics to add example.com/blah to the go.mod file. + env.AfterChange( + Diagnostics(env.AtRegexp("main.go", `"example.com/blah"`)), + ) + }) +} + +// Test that completions still work with an undownloaded module, golang/go#43333. +func TestUndownloadedModule(t *testing.T) { + // mod.com depends on example.com, but only in a file that's hidden by a + // build tag, so the IWL won't download example.com. That will cause errors + // in the go list -m call performed by the imports package. + const files = ` +-- go.mod -- +module mod.com + +go 1.14 + +require example.com v1.2.3 +-- go.sum -- +example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= +example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +-- useblah.go -- +// +build hidden + +package pkg +import "example.com/blah" +var _ = blah.Name +-- mainmod/mainmod.go -- +package mainmod + +const Name = "mainmod" +` + WithOptions(ProxyFiles(proxy)).Run(t, files, func(t *testing.T, env *Env) { + env.CreateBuffer("import.go", "package pkg\nvar _ = mainmod.Name\n") + env.SaveBuffer("import.go") + content := env.ReadWorkspaceFile("import.go") + if !strings.Contains(content, `import "mod.com/mainmod`) { + t.Errorf("expected import of mod.com/mainmod in %q", content) + } + }) +} + +// Test that we can doctor the source code enough so the file is +// parseable and completion works as expected. +func TestSourceFixup(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- foo.go -- +package foo + +func _() { + var s S + if s. +} + +type S struct { + i int +} +` + + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("foo.go") + completions := env.Completion(env.RegexpSearch("foo.go", `if s\.()`)) + diff := compareCompletionLabels([]string{"i"}, completions.Items) + if diff != "" { + t.Fatal(diff) + } + }) +} + +func TestCompletion_Issue45510(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- main.go -- +package main + +func _() { + type a *a + var aaaa1, aaaa2 a + var _ a = aaaa + + type b a + var bbbb1, bbbb2 b + var _ b = bbbb +} + +type ( + c *d + d *e + e **c +) + +func _() { + var ( + xxxxc c + xxxxd d + xxxxe e + ) + + var _ c = xxxx + var _ d = xxxx + var _ e = xxxx +} +` + + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + + tests := []struct { + re string + want []string + }{ + {`var _ a = aaaa()`, []string{"aaaa1", "aaaa2"}}, + {`var _ b = bbbb()`, []string{"bbbb1", "bbbb2"}}, + {`var _ c = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, + {`var _ d = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, + {`var _ e = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, + } + for _, tt := range tests { + completions := env.Completion(env.RegexpSearch("main.go", tt.re)) + diff := compareCompletionLabels(tt.want, completions.Items) + if diff != "" { + t.Errorf("%s: %s", tt.re, diff) + } + } + }) +} + +func TestCompletionDeprecation(t *testing.T) { + const files = ` +-- go.mod -- +module test.com + +go 1.16 +-- prog.go -- +package waste +// Deprecated, use newFoof +func fooFunc() bool { + return false +} + +// Deprecated +const badPi = 3.14 + +func doit() { + if fooF + panic() + x := badP +} +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("prog.go") + loc := env.RegexpSearch("prog.go", "if fooF") + loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte("if fooF"))) + completions := env.Completion(loc) + diff := compareCompletionLabels([]string{"fooFunc"}, completions.Items) + if diff != "" { + t.Error(diff) + } + if completions.Items[0].Tags == nil { + t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags) + } + loc = env.RegexpSearch("prog.go", "= badP") + loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte("= badP"))) + completions = env.Completion(loc) + diff = compareCompletionLabels([]string{"badPi"}, completions.Items) + if diff != "" { + t.Error(diff) + } + if completions.Items[0].Tags == nil { + t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags) + } + }) +} + +func TestUnimportedCompletion_VSCodeIssue1489(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.14 + +-- main.go -- +package main + +import "fmt" + +func main() { + fmt.Println("a") + math.Sqr +} +` + WithOptions( + WindowsLineEndings(), + Settings{"ui.completion.usePlaceholders": true}, + ).Run(t, src, func(t *testing.T, env *Env) { + // Trigger unimported completions for the mod.com package. + env.OpenFile("main.go") + env.Await(env.DoneWithOpen()) + loc := env.RegexpSearch("main.go", "Sqr()") + completions := env.Completion(loc) + if len(completions.Items) == 0 { + t.Fatalf("no completion items") + } + env.AcceptCompletion(loc, completions.Items[0]) + env.Await(env.DoneWithChange()) + got := env.BufferText("main.go") + want := "package main\r\n\r\nimport (\r\n\t\"fmt\"\r\n\t\"math\"\r\n)\r\n\r\nfunc main() {\r\n\tfmt.Println(\"a\")\r\n\tmath.Sqrt(${1:x float64})\r\n}\r\n" + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("unimported completion (-want +got):\n%s", diff) + } + }) +} + +func TestUnimportedCompletionHasPlaceholders60269(t *testing.T) { + // We can't express this as a marker test because it doesn't support AcceptCompletion. + const src = ` +-- go.mod -- +module example.com +go 1.12 + +-- a/a.go -- +package a + +var _ = b.F + +-- b/b.go -- +package b + +func F0(a, b int, c float64) {} +func F1(int, chan *string) {} +func F2[K, V any](map[K]V, chan V) {} // missing type parameters was issue #60959 +func F3[K comparable, V any](map[K]V, chan V) {} +` + WithOptions( + WindowsLineEndings(), + Settings{"ui.completion.usePlaceholders": true}, + ).Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + env.Await(env.DoneWithOpen()) + + // The table lists the expected completions of b.F as they appear in Items. + const common = "package a\r\n\r\nimport \"example.com/b\"\r\n\r\nvar _ = " + for i, want := range []string{ + common + "b.F0(${1:a int}, ${2:b int}, ${3:c float64})\r\n", + common + "b.F1(${1:_ int}, ${2:_ chan *string})\r\n", + common + "b.F2[${1:K any}, ${2:V any}](${3:_ map[K]V}, ${4:_ chan V})\r\n", + common + "b.F3[${1:K comparable}, ${2:V any}](${3:_ map[K]V}, ${4:_ chan V})\r\n", + } { + loc := env.RegexpSearch("a/a.go", "b.F()") + completions := env.Completion(loc) + if len(completions.Items) == 0 { + t.Fatalf("no completion items") + } + saved := env.BufferText("a/a.go") + env.AcceptCompletion(loc, completions.Items[i]) + env.Await(env.DoneWithChange()) + got := env.BufferText("a/a.go") + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("%d: unimported completion (-want +got):\n%s", i, diff) + } + env.SetBufferContent("a/a.go", saved) // restore + } + }) +} + +func TestPackageMemberCompletionAfterSyntaxError(t *testing.T) { + // This test documents the current broken behavior due to golang/go#58833. + const src = ` +-- go.mod -- +module mod.com + +go 1.14 + +-- main.go -- +package main + +import "math" + +func main() { + math.Sqrt(,0) + math.Ldex +} +` + Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + env.Await(env.DoneWithOpen()) + loc := env.RegexpSearch("main.go", "Ldex()") + completions := env.Completion(loc) + if len(completions.Items) == 0 { + t.Fatalf("no completion items") + } + env.AcceptCompletion(loc, completions.Items[0]) + env.Await(env.DoneWithChange()) + got := env.BufferText("main.go") + // The completion of math.Ldex after the syntax error on the + // previous line is not "math.Ldexp" but "math.Ldexmath.Abs". + // (In VSCode, "Abs" wrongly appears in the completion menu.) + // This is a consequence of poor error recovery in the parser + // causing "math.Ldex" to become a BadExpr. + want := "package main\n\nimport \"math\"\n\nfunc main() {\n\tmath.Sqrt(,0)\n\tmath.Ldexmath.Abs(${1:})\n}\n" + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("unimported completion (-want +got):\n%s", diff) + } + }) +} + +func TestCompleteAllFields(t *testing.T) { + // This test verifies that completion results always include all struct fields. + // See golang/go#53992. + + const src = ` +-- go.mod -- +module mod.com + +go 1.18 + +-- p/p.go -- +package p + +import ( + "fmt" + + . "net/http" + . "runtime" + . "go/types" + . "go/parser" + . "go/ast" +) + +type S struct { + a, b, c, d, e, f, g, h, i, j, k, l, m int + n, o, p, q, r, s, t, u, v, w, x, y, z int +} + +func _() { + var s S + fmt.Println(s.) +} +` + + WithOptions(Settings{ + "completionBudget": "1ns", // must be non-zero as 0 => infinity + }).Run(t, src, func(t *testing.T, env *Env) { + wantFields := make(map[string]bool) + for c := 'a'; c <= 'z'; c++ { + wantFields[string(c)] = true + } + + env.OpenFile("p/p.go") + // Make an arbitrary edit to ensure we're not hitting the cache. + env.EditBuffer("p/p.go", fake.NewEdit(0, 0, 0, 0, fmt.Sprintf("// current time: %v\n", time.Now()))) + loc := env.RegexpSearch("p/p.go", `s\.()`) + completions := env.Completion(loc) + gotFields := make(map[string]bool) + for _, item := range completions.Items { + if item.Kind == protocol.FieldCompletion { + gotFields[item.Label] = true + } + } + + if diff := cmp.Diff(wantFields, gotFields); diff != "" { + t.Errorf("Completion(...) returned mismatching fields (-want +got):\n%s", diff) + } + }) +} + +func TestDefinition(t *testing.T) { + files := ` +-- go.mod -- +module mod.com + +go 1.18 +-- a_test.go -- +package foo +` + tests := []struct { + line string // the sole line in the buffer after the package statement + pat string // the pattern to search for + want []string // expected completions + }{ + {"func T", "T", []string{"TestXxx(t *testing.T)", "TestMain(m *testing.M)"}}, + {"func T()", "T", []string{"TestMain", "Test"}}, + {"func TestM", "TestM", []string{"TestMain(m *testing.M)", "TestM(t *testing.T)"}}, + {"func TestM()", "TestM", []string{"TestMain"}}, + {"func TestMi", "TestMi", []string{"TestMi(t *testing.T)"}}, + {"func TestMi()", "TestMi", nil}, + {"func TestG", "TestG", []string{"TestG(t *testing.T)"}}, + {"func TestG(", "TestG", nil}, + {"func Ben", "B", []string{"BenchmarkXxx(b *testing.B)"}}, + {"func Ben(", "Ben", []string{"Benchmark"}}, + {"func BenchmarkFoo", "BenchmarkFoo", []string{"BenchmarkFoo(b *testing.B)"}}, + {"func BenchmarkFoo(", "BenchmarkFoo", nil}, + {"func Fuz", "F", []string{"FuzzXxx(f *testing.F)"}}, + {"func Fuz(", "Fuz", []string{"Fuzz"}}, + {"func Testx", "Testx", nil}, + {"func TestMe(t *testing.T)", "TestMe", nil}, + {"func Te(t *testing.T)", "Te", []string{"TestMain", "Test"}}, + } + fname := "a_test.go" + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile(fname) + env.Await(env.DoneWithOpen()) + for _, test := range tests { + env.SetBufferContent(fname, "package foo\n"+test.line) + loc := env.RegexpSearch(fname, test.pat) + loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte(test.pat))) + completions := env.Completion(loc) + if diff := compareCompletionLabels(test.want, completions.Items); diff != "" { + t.Error(diff) + } + } + }) +} + +// Test that completing a definition replaces source text when applied, golang/go#56852. +func TestDefinitionReplaceRange(t *testing.T) { + const mod = ` +-- go.mod -- +module mod.com + +go 1.17 +` + + tests := []struct { + name string + before, after string + }{ + { + name: "func TestMa", + before: ` +package foo_test + +func TestMa +`, + after: ` +package foo_test + +func TestMain(m *testing.M) +`, + }, + { + name: "func TestSome", + before: ` +package foo_test + +func TestSome +`, + after: ` +package foo_test + +func TestSome(t *testing.T) +`, + }, + { + name: "func Bench", + before: ` +package foo_test + +func Bench +`, + // Note: Snippet with escaped }. + after: ` +package foo_test + +func Benchmark${1:Xxx}(b *testing.B) { + $0 +\} +`, + }, + } + + Run(t, mod, func(t *testing.T, env *Env) { + env.CreateBuffer("foo_test.go", "") + + for _, tst := range tests { + tst.before = strings.Trim(tst.before, "\n") + tst.after = strings.Trim(tst.after, "\n") + env.SetBufferContent("foo_test.go", tst.before) + + loc := env.RegexpSearch("foo_test.go", tst.name) + loc.Range.Start.Character = uint32(protocol.UTF16Len([]byte(tst.name))) + completions := env.Completion(loc) + if len(completions.Items) == 0 { + t.Fatalf("no completion items") + } + + env.AcceptCompletion(loc, completions.Items[0]) + env.Await(env.DoneWithChange()) + if buf := env.BufferText("foo_test.go"); buf != tst.after { + t.Errorf("%s:incorrect completion: got %q, want %q", tst.name, buf, tst.after) + } + } + }) +} + +func TestGoWorkCompletion(t *testing.T) { + const files = ` +-- go.work -- +go 1.18 + +use ./a +use ./a/ba +use ./a/b/ +use ./dir/foo +use ./dir/foobar/ +use ./missing/ +-- a/go.mod -- +-- go.mod -- +-- a/bar/go.mod -- +-- a/b/c/d/e/f/go.mod -- +-- dir/bar -- +-- dir/foobar/go.mod -- +` + + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("go.work") + + tests := []struct { + re string + want []string + }{ + {`use ()\.`, []string{".", "./a", "./a/bar", "./dir/foobar"}}, + {`use \.()`, []string{"", "/a", "/a/bar", "/dir/foobar"}}, + {`use \./()`, []string{"a", "a/bar", "dir/foobar"}}, + {`use ./a()`, []string{"", "/b/c/d/e/f", "/bar"}}, + {`use ./a/b()`, []string{"/c/d/e/f", "ar"}}, + {`use ./a/b/()`, []string{`c/d/e/f`}}, + {`use ./a/ba()`, []string{"r"}}, + {`use ./dir/foo()`, []string{"bar"}}, + {`use ./dir/foobar/()`, []string{}}, + {`use ./missing/()`, []string{}}, + } + for _, tt := range tests { + completions := env.Completion(env.RegexpSearch("go.work", tt.re)) + diff := compareCompletionLabels(tt.want, completions.Items) + if diff != "" { + t.Errorf("%s: %s", tt.re, diff) + } + } + }) +} + +func TestBuiltinCompletion(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.18 +-- a.go -- +package a + +func _() { + // here +} +` + + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + result := env.Completion(env.RegexpSearch("a.go", `// here`)) + builtins := []string{ + "any", "append", "bool", "byte", "cap", "close", + "comparable", "complex", "complex128", "complex64", "copy", "delete", + "error", "false", "float32", "float64", "imag", "int", "int16", "int32", + "int64", "int8", "len", "make", "new", "panic", "print", "println", "real", + "recover", "rune", "string", "true", "uint", "uint16", "uint32", "uint64", + "uint8", "uintptr", "nil", + } + if testenv.Go1Point() >= 21 { + builtins = append(builtins, "clear", "max", "min") + } + sort.Strings(builtins) + var got []string + + for _, item := range result.Items { + // TODO(rfindley): for flexibility, ignore zero while it is being + // implemented. Remove this if/when zero lands. + if item.Label != "zero" { + got = append(got, item.Label) + } + } + sort.Strings(got) + + if diff := cmp.Diff(builtins, got); diff != "" { + t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) + } + }) +} + +func TestOverlayCompletion(t *testing.T) { + const files = ` +-- go.mod -- +module foo.test + +go 1.18 + +-- foo/foo.go -- +package foo + +type Foo struct{} +` + + Run(t, files, func(t *testing.T, env *Env) { + env.CreateBuffer("nodisk/nodisk.go", ` +package nodisk + +import ( + "foo.test/foo" +) + +func _() { + foo.Foo() +} +`) + list := env.Completion(env.RegexpSearch("nodisk/nodisk.go", "foo.(Foo)")) + want := []string{"Foo"} + var got []string + for _, item := range list.Items { + got = append(got, item.Label) + } + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) + } + }) +} + +// Fix for golang/go#60062: unimported completion included "golang.org/toolchain" results. +func TestToolchainCompletions(t *testing.T) { + const files = ` +-- go.mod -- +module foo.test/foo + +go 1.21 + +-- foo.go -- +package foo + +func _() { + os.Open +} + +func _() { + strings +} +` + + const proxy = ` +-- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/go.mod -- +module golang.org/toolchain +-- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/os/os.go -- +package os + +func Open() {} +-- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/strings/strings.go -- +package strings + +func Join() {} +` + + WithOptions( + ProxyFiles(proxy), + ).Run(t, files, func(t *testing.T, env *Env) { + env.RunGoCommand("mod", "download", "golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64") + env.OpenFile("foo.go") + + for _, pattern := range []string{"os.Open()", "string()"} { + loc := env.RegexpSearch("foo.go", pattern) + res := env.Completion(loc) + for _, item := range res.Items { + if strings.Contains(item.Detail, "golang.org/toolchain") { + t.Errorf("Completion(...) returned toolchain item %#v", item) + } + } + } + }) +} + +// show that the efficacy counters get exercised. Fortuntely a small program +// exercises them all +func TestCounters(t *testing.T) { + const files = ` +-- go.mod -- +module foo +go 1.21 +-- x.go -- +package foo + +func main() { +} + +` + WithOptions( + Modes(Default), + ).Run(t, files, func(t *testing.T, env *Env) { + cts := func() map[*counter.Counter]uint64 { + ans := make(map[*counter.Counter]uint64) + for _, c := range server.CompletionCounters { + ans[c], _ = countertest.ReadCounter(c) + } + return ans + } + before := cts() + env.OpenFile("x.go") + env.Await(env.DoneWithOpen()) + saved := env.BufferText("x.go") + lines := strings.Split(saved, "\n") + // make sure the unused counter is exercised + loc := env.RegexpSearch("x.go", "main") + loc.Range.End = loc.Range.Start + env.Completion(loc) // ignore the proposed completions + env.RegexpReplace("x.go", "main", "Main") // completions are unused + env.SetBufferContent("x.go", saved) // restore x.go + // used:no + + // all the action is after 4 characters on line 2 (counting from 0) + for i := 2; i < len(lines); i++ { + l := lines[i] + loc.Range.Start.Line = uint32(i) + for j := 4; j < len(l); j++ { + loc.Range.Start.Character = uint32(j) + loc.Range.End = loc.Range.Start + res := env.Completion(loc) + if len(res.Items) > 0 { + r := res.Items[0] + env.AcceptCompletion(loc, r) + env.SetBufferContent("x.go", saved) + } + } + } + after := cts() + for c := range after { + if after[c] <= before[c] { + t.Errorf("%s did not increase", c.Name()) + } + } + }) +} diff --git a/gopls/internal/test/integration/completion/postfix_snippet_test.go b/gopls/internal/test/integration/completion/postfix_snippet_test.go new file mode 100644 index 00000000000..884be420835 --- /dev/null +++ b/gopls/internal/test/integration/completion/postfix_snippet_test.go @@ -0,0 +1,762 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "strings" + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestPostfixSnippetCompletion(t *testing.T) { + const mod = ` +-- go.mod -- +module mod.com + +go 1.12 +` + + cases := []struct { + name string + before, after string + allowMultipleItem bool + }{ + { + name: "sort", + before: ` +package foo + +func _() { + var foo []int + foo.sort +} +`, + after: ` +package foo + +import "sort" + +func _() { + var foo []int + sort.Slice(foo, func(i, j int) bool { + $0 +}) +} +`, + }, + { + name: "sort_renamed_sort_package", + before: ` +package foo + +import blahsort "sort" + +var j int + +func _() { + var foo []int + foo.sort +} +`, + after: ` +package foo + +import blahsort "sort" + +var j int + +func _() { + var foo []int + blahsort.Slice(foo, func(i, j2 int) bool { + $0 +}) +} +`, + }, + { + name: "last", + before: ` +package foo + +func _() { + var s struct { i []int } + s.i.last +} +`, + after: ` +package foo + +func _() { + var s struct { i []int } + s.i[len(s.i)-1] +} +`, + }, + { + name: "reverse", + before: ` +package foo + +func _() { + var foo []int + foo.reverse +} +`, + after: ` +package foo + +import "slices" + +func _() { + var foo []int + slices.Reverse(foo) +} +`, + }, + { + name: "slice_range", + before: ` +package foo + +func _() { + type myThing struct{} + var foo []myThing + foo.range +} +`, + after: ` +package foo + +func _() { + type myThing struct{} + var foo []myThing + for ${1:}, ${2:} := range foo { + $0 +} +} +`, + }, + { + name: "append_stmt", + before: ` +package foo + +func _() { + var foo []int + foo.append +} +`, + after: ` +package foo + +func _() { + var foo []int + foo = append(foo, $0) +} +`, + }, + { + name: "append_expr", + before: ` +package foo + +func _() { + var foo []int + var _ []int = foo.append +} +`, + after: ` +package foo + +func _() { + var foo []int + var _ []int = append(foo, $0) +} +`, + }, + { + name: "slice_copy", + before: ` +package foo + +func _() { + var foo []int + foo.copy +} +`, + after: ` +package foo + +func _() { + var foo []int + fooCopy := make([]int, len(foo)) +copy(fooCopy, foo) + +} +`, + }, + { + name: "map_range", + before: ` +package foo + +func _() { + var foo map[string]int + foo.range +} +`, + after: ` +package foo + +func _() { + var foo map[string]int + for ${1:}, ${2:} := range foo { + $0 +} +} +`, + }, + { + name: "map_clear", + before: ` +package foo + +func _() { + var foo map[string]int + foo.clear +} +`, + after: ` +package foo + +func _() { + var foo map[string]int + for k := range foo { + delete(foo, k) +} + +} +`, + }, + { + name: "map_keys", + before: ` +package foo + +func _() { + var foo map[string]int + foo.keys +} +`, + after: ` +package foo + +func _() { + var foo map[string]int + keys := make([]string, 0, len(foo)) +for k := range foo { + keys = append(keys, k) +} + +} +`, + }, + { + name: "channel_range", + before: ` +package foo + +func _() { + foo := make(chan int) + foo.range +} +`, + after: ` +package foo + +func _() { + foo := make(chan int) + for ${1:} := range foo { + $0 +} +} +`, + }, + { + name: "var", + before: ` +package foo + +func foo() (int, error) { return 0, nil } + +func _() { + foo().var +} +`, + after: ` +package foo + +func foo() (int, error) { return 0, nil } + +func _() { + ${1:}, ${2:} := foo() +} +`, + allowMultipleItem: true, + }, + { + name: "var_single_value", + before: ` +package foo + +func foo() error { return nil } + +func _() { + foo().var +} +`, + allowMultipleItem: true, + after: ` +package foo + +func foo() error { return nil } + +func _() { + ${1:} := foo() +} +`, + }, + { + name: "var_same_type", + before: ` +package foo + +func foo() (int, int) { return 0, 0 } + +func _() { + foo().var +} +`, + after: ` +package foo + +func foo() (int, int) { return 0, 0 } + +func _() { + ${1:}, ${2:} := foo() +} +`, + }, + { + name: "print_scalar", + before: ` +package foo + +func _() { + var foo int + foo.print +} +`, + after: ` +package foo + +import "fmt" + +func _() { + var foo int + fmt.Printf("foo: %v\n", foo) +} +`, + }, + { + name: "print_multi", + before: ` +package foo + +func foo() (int, error) { return 0, nil } + +func _() { + foo().print +} +`, + after: ` +package foo + +import "fmt" + +func foo() (int, error) { return 0, nil } + +func _() { + fmt.Println(foo()) +} +`, + }, + { + name: "string split", + before: ` +package foo + +func foo() []string { + x := "test" + return x.split +}`, + after: ` +package foo + +import "strings" + +func foo() []string { + x := "test" + return strings.Split(x, "$0") +}`, + }, + { + name: "string slice join", + before: ` +package foo + +func foo() string { + x := []string{"a", "test"} + return x.join +}`, + after: ` +package foo + +import "strings" + +func foo() string { + x := []string{"a", "test"} + return strings.Join(x, "$0") +}`, + }, + { + name: "if not nil interface", + before: ` +package foo + +func _() { + var foo error + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo error + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil pointer", + before: ` +package foo + +func _() { + var foo *int + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo *int + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil slice", + before: ` +package foo + +func _() { + var foo []int + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo []int + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil map", + before: ` +package foo + +func _() { + var foo map[string]any + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo map[string]any + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil channel", + before: ` +package foo + +func _() { + var foo chan int + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo chan int + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil function", + before: ` +package foo + +func _() { + var foo func() + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo func() + if foo != nil { + $0 +} +} +`, + }, + { + name: "slice_len", + before: ` +package foo + +func _() { + var foo []int + foo.len +} +`, + after: ` +package foo + +func _() { + var foo []int + len(foo) +} +`, + }, + { + name: "map_len", + before: ` +package foo + +func _() { + var foo map[string]int + foo.len +} +`, + after: ` +package foo + +func _() { + var foo map[string]int + len(foo) +} +`, + }, + { + name: "slice_for", + allowMultipleItem: true, + before: ` +package foo + +func _() { + var foo []int + foo.for +} +`, + after: ` +package foo + +func _() { + var foo []int + for ${1:} := range foo { + $0 +} +} +`, + }, + { + name: "map_for", + allowMultipleItem: true, + before: ` +package foo + +func _() { + var foo map[string]int + foo.for +} +`, + after: ` +package foo + +func _() { + var foo map[string]int + for ${1:} := range foo { + $0 +} +} +`, + }, + { + name: "chan_for", + allowMultipleItem: true, + before: ` +package foo + +func _() { + var foo chan int + foo.for +} +`, + after: ` +package foo + +func _() { + var foo chan int + for ${1:} := range foo { + $0 +} +} +`, + }, + { + name: "slice_forr", + before: ` +package foo + +func _() { + var foo []int + foo.forr +} +`, + after: ` +package foo + +func _() { + var foo []int + for ${1:}, ${2:} := range foo { + $0 +} +} +`, + }, + { + name: "slice_forr", + before: ` +package foo + +func _() { + var foo []int + foo.forr +} +`, + after: ` +package foo + +func _() { + var foo []int + for ${1:}, ${2:} := range foo { + $0 +} +} +`, + }, + { + name: "map_forr", + before: ` +package foo + +func _() { + var foo map[string]int + foo.forr +} +`, + after: ` +package foo + +func _() { + var foo map[string]int + for ${1:}, ${2:} := range foo { + $0 +} +} +`, + }, + } + + r := WithOptions( + Settings{ + "experimentalPostfixCompletions": true, + }, + ) + r.Run(t, mod, func(t *testing.T, env *Env) { + env.CreateBuffer("foo.go", "") + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + c.before = strings.Trim(c.before, "\n") + c.after = strings.Trim(c.after, "\n") + + env.SetBufferContent("foo.go", c.before) + + loc := env.RegexpSearch("foo.go", "\n}") + completions := env.Completion(loc) + if len(completions.Items) < 1 { + t.Fatalf("expected at least one completion, got %v", completions.Items) + } + if !c.allowMultipleItem && len(completions.Items) > 1 { + t.Fatalf("expected one completion, got %v", completions.Items) + } + + env.AcceptCompletion(loc, completions.Items[0]) + + if buf := env.BufferText("foo.go"); buf != c.after { + t.Errorf("\nGOT:\n%s\nEXPECTED:\n%s", buf, c.after) + } + }) + } + }) +} diff --git a/gopls/internal/test/integration/debug/debug_test.go b/gopls/internal/test/integration/debug/debug_test.go new file mode 100644 index 00000000000..255a8e1b90d --- /dev/null +++ b/gopls/internal/test/integration/debug/debug_test.go @@ -0,0 +1,101 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package debug + +import ( + "context" + "encoding/json" + "io" + "net/http" + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/hooks" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" +) + +func TestMain(m *testing.M) { + Main(m, hooks.Options) +} + +func TestBugNotification(t *testing.T) { + // Verify that a properly configured session gets notified of a bug on the + // server. + WithOptions( + Modes(Default), // must be in-process to receive the bug report below + Settings{"showBugReports": true}, + ).Run(t, "", func(t *testing.T, env *Env) { + const desc = "got a bug" + bug.Report(desc) + env.Await(ShownMessage(desc)) + }) +} + +// TestStartDebugging executes a gopls.start_debugging command to +// start the internal web server. +func TestStartDebugging(t *testing.T) { + WithOptions( + Modes(Default|Experimental), // doesn't work in Forwarded mode + ).Run(t, "", func(t *testing.T, env *Env) { + // Start a debugging server. + res, err := startDebugging(env.Ctx, env.Editor.Server, &command.DebuggingArgs{ + Addr: "", // any free port + }) + if err != nil { + t.Fatalf("startDebugging: %v", err) + } + + // Assert that the server requested that the + // client show the debug page in a browser. + debugURL := res.URLs[0] + env.Await(ShownDocument(debugURL)) + + // Send a request to the debug server and ensure it responds. + resp, err := http.Get(debugURL) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + data, err := io.ReadAll(resp.Body) + if err != nil { + t.Fatalf("reading HTTP response body: %v", err) + } + const want = "<title>Gopls" + if !strings.Contains(string(data), want) { + t.Errorf("GET %s response does not contain %q: <<%s>>", debugURL, want, data) + } + }) +} + +// startDebugging starts a debugging server. +// TODO(adonovan): move into command package? +func startDebugging(ctx context.Context, server protocol.Server, args *command.DebuggingArgs) (*command.DebuggingResult, error) { + rawArgs, err := command.MarshalArgs(args) + if err != nil { + return nil, err + } + res0, err := server.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ + Command: command.StartDebugging.ID(), + Arguments: rawArgs, + }) + if err != nil { + return nil, err + } + // res0 is the result of a schemaless (map[string]any) JSON decoding. + // Re-encode and decode into the correct Go struct type. + // TODO(adonovan): fix (*serverDispatcher).ExecuteCommand. + data, err := json.Marshal(res0) + if err != nil { + return nil, err + } + var res *command.DebuggingResult + if err := json.Unmarshal(data, &res); err != nil { + return nil, err + } + return res, nil +} diff --git a/gopls/internal/regtest/diagnostics/analysis_test.go b/gopls/internal/test/integration/diagnostics/analysis_test.go similarity index 94% rename from gopls/internal/regtest/diagnostics/analysis_test.go rename to gopls/internal/test/integration/diagnostics/analysis_test.go index 190f5777258..8cb86f8f735 100644 --- a/gopls/internal/regtest/diagnostics/analysis_test.go +++ b/gopls/internal/test/integration/diagnostics/analysis_test.go @@ -8,9 +8,9 @@ import ( "fmt" "testing" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" ) // Test for the timeformat analyzer, following golang/vscode-go#2406. diff --git a/gopls/internal/regtest/diagnostics/builtin_test.go b/gopls/internal/test/integration/diagnostics/builtin_test.go similarity index 92% rename from gopls/internal/regtest/diagnostics/builtin_test.go rename to gopls/internal/test/integration/diagnostics/builtin_test.go index 935a7f9b831..d6828a0df5c 100644 --- a/gopls/internal/regtest/diagnostics/builtin_test.go +++ b/gopls/internal/test/integration/diagnostics/builtin_test.go @@ -8,7 +8,7 @@ import ( "strings" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestIssue44866(t *testing.T) { diff --git a/gopls/internal/regtest/diagnostics/diagnostics_test.go b/gopls/internal/test/integration/diagnostics/diagnostics_test.go similarity index 93% rename from gopls/internal/regtest/diagnostics/diagnostics_test.go rename to gopls/internal/test/integration/diagnostics/diagnostics_test.go index 5849c9b5b0d..89c8a14bd37 100644 --- a/gopls/internal/regtest/diagnostics/diagnostics_test.go +++ b/gopls/internal/test/integration/diagnostics/diagnostics_test.go @@ -10,12 +10,13 @@ import ( "os/exec" "testing" - "golang.org/x/tools/gopls/internal/bug" "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/server" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/goversion" "golang.org/x/tools/internal/testenv" ) @@ -553,8 +554,14 @@ func f() { Run(t, noModule, func(t *testing.T, env *Env) { env.OpenFile("a.go") env.AfterChange( - // Expect the adHocPackagesWarning. - OutstandingWork(lsp.WorkspaceLoadFailure, "outside of a module"), + // AdHoc views are not critical errors, but their missing import + // diagnostics should specifically mention GOROOT or GOPATH (and not + // modules). + NoOutstandingWork(IgnoreTelemetryPromptWork), + Diagnostics( + env.AtRegexp("a.go", `"mod.com`), + WithMessage("GOROOT or GOPATH"), + ), ) // Deleting the import dismisses the warning. env.RegexpReplace("a.go", `import "mod.com/hello"`, "") @@ -1161,8 +1168,9 @@ func main() {} }) } -// This tests the functionality of the "limitWorkspaceScope" -func TestLimitWorkspaceScope(t *testing.T) { +// This test verifies that the workspace scope is effectively limited to the +// workspace folder, if expandWorkspaceToModule is set. +func TestExpandWorkspaceToModule(t *testing.T) { const mod = ` -- go.mod -- module mod.com @@ -1198,6 +1206,55 @@ func main() { }) } +// This test verifies that the workspace scope is effectively limited to the +// set of active modules. +// +// We should not get diagnostics or file watching patterns for paths outside of +// the active workspace. +func TestWorkspaceModules(t *testing.T) { + const mod = ` +-- go.work -- +go 1.18 + +use a +-- a/go.mod -- +module mod.com/a + +go 1.12 +-- a/a.go -- +package a + +func _() { + var x int +} +-- b/go.mod -- +module mod.com/b + +go 1.18 +` + WithOptions( + Settings{ + "subdirWatchPatterns": "on", + }, + ).Run(t, mod, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + // Writing this file may cause the snapshot to 'know' about the file b, but + // that shouldn't cause it to watch the 'b' directory. + env.WriteWorkspaceFile("b/b.go", `package b + +func _() { + var x int +} +`) + env.AfterChange( + Diagnostics(env.AtRegexp("a/a.go", "x")), + NoDiagnostics(ForFile("b/b.go")), + FileWatchMatching("a$"), + NoFileWatchMatching("b$"), + ) + }) +} + func TestSimplifyCompositeLitDiagnostic(t *testing.T) { const files = ` -- go.mod -- @@ -1331,7 +1388,7 @@ func _() { func TestEnableAllExperiments(t *testing.T) { // Before the oldest supported Go version, gopls sends a warning to upgrade // Go, which fails the expectation below. - testenv.NeedsGo1Point(t, lsp.OldestSupportedGoVersion()) + testenv.NeedsGo1Point(t, goversion.OldestSupported()) const mod = ` -- go.mod -- @@ -1359,9 +1416,6 @@ func b(c bytes.Buffer) { } func TestSwig(t *testing.T) { - // This is fixed in Go 1.17, but not earlier. - testenv.NeedsGo1Point(t, 17) - if _, err := exec.LookPath("swig"); err != nil { t.Skip("skipping test: swig not available") } @@ -1488,7 +1542,7 @@ package main Run(t, pkg, func(t *testing.T, env *Env) { env.OpenFile("go.mod") env.AfterChange( - OutstandingWork(lsp.WorkspaceLoadFailure, "unknown directive"), + OutstandingWork(server.WorkspaceLoadFailure, "unknown directive"), ) env.EditBuffer("go.mod", fake.NewEdit(0, 0, 3, 0, `module mod.com @@ -1498,7 +1552,7 @@ go 1.hello // they are saved. env.SaveBufferWithoutActions("go.mod") env.AfterChange( - OutstandingWork(lsp.WorkspaceLoadFailure, "invalid go version"), + OutstandingWork(server.WorkspaceLoadFailure, "invalid go version"), ) env.RegexpReplace("go.mod", "go 1.hello", "go 1.12") env.SaveBufferWithoutActions("go.mod") @@ -1618,12 +1672,17 @@ const B = a.B env.OpenFile("b/b.go") env.AfterChange( // The Go command sometimes tells us about only one of the import cycle - // errors below. For robustness of this test, succeed if we get either. + // errors below. Also, sometimes we get an error during type checking + // instead of during list, due to missing metadata. This is likely due to + // a race. + // For robustness of this test, succeed if we get any reasonable error. // // TODO(golang/go#52904): we should get *both* of these errors. + // TODO(golang/go#64899): we should always get an import cycle error + // rather than a missing metadata error. AnyOf( - Diagnostics(env.AtRegexp("a/a.go", `"mod.test/b"`), WithMessage("import cycle")), - Diagnostics(env.AtRegexp("b/b.go", `"mod.test/a"`), WithMessage("import cycle")), + Diagnostics(env.AtRegexp("a/a.go", `"mod.test/b"`)), + Diagnostics(env.AtRegexp("b/b.go", `"mod.test/a"`)), ), ) env.RegexpReplace("b/b.go", `const B = a\.B`, "") @@ -1721,9 +1780,13 @@ func helloHelper() {} // Expect a diagnostic in a nested module. env.OpenFile("nested/hello/hello.go") env.AfterChange( - Diagnostics(env.AtRegexp("nested/hello/hello.go", "helloHelper")), - Diagnostics(env.AtRegexp("nested/hello/hello.go", "package (hello)"), WithMessage("not included in your workspace")), + NoDiagnostics(ForFile("nested/hello/hello.go")), ) + loc := env.GoToDefinition(env.RegexpSearch("nested/hello/hello.go", "helloHelper")) + want := "nested/hello/hello_helper.go" + if got := env.Sandbox.Workdir.URIToPath(loc.URI); got != want { + t.Errorf("Definition() returned %q, want %q", got, want) + } }) } @@ -1849,7 +1912,6 @@ func main() {} } func TestLangVersion(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // Requires types.Config.GoVersion, new in 1.18. const files = ` -- go.mod -- module mod.com @@ -1873,7 +1935,6 @@ const C = 0b10 } func TestNoQuickFixForUndeclaredConstraint(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const files = ` -- go.mod -- module mod.com @@ -1900,7 +1961,6 @@ func F[T C](_ T) { } func TestEditGoDirective(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const files = ` -- go.mod -- module mod.com @@ -1928,7 +1988,6 @@ func F[T any](_ T) { } func TestEditGoDirectiveWorkspace(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const files = ` -- go.mod -- module mod.com @@ -2114,6 +2173,18 @@ func (B) New() {} } func TestDiagnosticsOnlyOnSaveFile(t *testing.T) { + // This functionality is broken because the new orphaned file diagnostics + // logic wants to publish diagnostics for changed files, independent of any + // snapshot diagnostics pass, and this causes stale diagnostics to be + // invalidated. + // + // We can fix this behavior more correctly by also honoring the + // diagnosticsTrigger in DiagnoseOrphanedFiles, but that would require + // resolving configuration that is independent of the snapshot. In other + // words, we need to figure out which cache.Folder.Options applies to the + // changed file, even if it does not have a snapshot. + t.Skip("temporary skip for golang/go#57979: revisit after zero-config logic is in place") + const onlyMod = ` -- go.mod -- module mod.com diff --git a/gopls/internal/regtest/diagnostics/golist_test.go b/gopls/internal/test/integration/diagnostics/golist_test.go similarity index 81% rename from gopls/internal/regtest/diagnostics/golist_test.go rename to gopls/internal/test/integration/diagnostics/golist_test.go index 85b35be024f..8c11246d3e1 100644 --- a/gopls/internal/regtest/diagnostics/golist_test.go +++ b/gopls/internal/test/integration/diagnostics/golist_test.go @@ -7,8 +7,8 @@ package diagnostics import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" + . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/internal/testenv" ) @@ -54,16 +54,16 @@ const Q = p.P + 1 InitialWorkspaceLoad, Diagnostics( env.AtRegexp("a/a.go", "import\n()"), - FromSource(string(source.ParseError)), + FromSource(string(cache.ParseError)), ), Diagnostics( AtPosition("c/c.go", 0, 0), - FromSource(string(source.ListError)), + FromSource(string(cache.ListError)), WithMessage("may indicate failure to perform cgo processing"), ), Diagnostics( env.AtRegexp("p/p.go", `"a.com/q"`), - FromSource(string(source.ListError)), + FromSource(string(cache.ListError)), WithMessage("import cycle not allowed"), ), ) diff --git a/gopls/internal/regtest/diagnostics/invalidation_test.go b/gopls/internal/test/integration/diagnostics/invalidation_test.go similarity index 85% rename from gopls/internal/regtest/diagnostics/invalidation_test.go rename to gopls/internal/test/integration/diagnostics/invalidation_test.go index f5097f32d77..395e7619c57 100644 --- a/gopls/internal/regtest/diagnostics/invalidation_test.go +++ b/gopls/internal/test/integration/diagnostics/invalidation_test.go @@ -8,8 +8,8 @@ import ( "fmt" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" ) // Test for golang/go#50267: diagnostics should be re-sent after a file is @@ -52,8 +52,8 @@ func _() { }) } -// Test for the "chattyDiagnostics" setting: we should get re-published -// diagnostics after every file change, even if diagnostics did not change. +// Test for the "chatty" diagnostics: gopls should re-send diagnostics for +// changed files after every file change, even if diagnostics did not change. func TestChattyDiagnostics(t *testing.T) { const files = ` -- go.mod -- @@ -70,12 +70,7 @@ func _() { // Irrelevant comment #0 ` - WithOptions( - Settings{ - "chattyDiagnostics": true, - }, - ).Run(t, files, func(_ *testing.T, env *Env) { // Create a new workspace-level directory and empty file. - + Run(t, files, func(_ *testing.T, env *Env) { // Create a new workspace-level directory and empty file. env.OpenFile("main.go") var d protocol.PublishDiagnosticsParams env.AfterChange( diff --git a/gopls/internal/test/integration/diagnostics/undeclared_test.go b/gopls/internal/test/integration/diagnostics/undeclared_test.go new file mode 100644 index 00000000000..5579c0752d7 --- /dev/null +++ b/gopls/internal/test/integration/diagnostics/undeclared_test.go @@ -0,0 +1,73 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package diagnostics + +import ( + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestUndeclaredDiagnostics(t *testing.T) { + src := ` +-- go.mod -- +module mod.com + +go 1.12 +-- a/a.go -- +package a + +func _() int { + return x +} +-- b/b.go -- +package b + +func _() int { + var y int + y = y + return y +} +` + Run(t, src, func(t *testing.T, env *Env) { + isUnnecessary := func(diag protocol.Diagnostic) bool { + for _, tag := range diag.Tags { + if tag == protocol.Unnecessary { + return true + } + } + return false + } + + // 'x' is undeclared, but still necessary. + env.OpenFile("a/a.go") + var adiags protocol.PublishDiagnosticsParams + env.AfterChange( + Diagnostics(env.AtRegexp("a/a.go", "x")), + ReadDiagnostics("a/a.go", &adiags), + ) + if got := len(adiags.Diagnostics); got != 1 { + t.Errorf("len(Diagnostics) = %d, want 1", got) + } + if diag := adiags.Diagnostics[0]; isUnnecessary(diag) { + t.Errorf("%v tagged unnecessary, want necessary", diag) + } + + // 'y = y' is pointless, and should be detected as unnecessary. + env.OpenFile("b/b.go") + var bdiags protocol.PublishDiagnosticsParams + env.AfterChange( + Diagnostics(env.AtRegexp("b/b.go", "y = y")), + ReadDiagnostics("b/b.go", &bdiags), + ) + if got := len(bdiags.Diagnostics); got != 1 { + t.Errorf("len(Diagnostics) = %d, want 1", got) + } + if diag := bdiags.Diagnostics[0]; !isUnnecessary(diag) { + t.Errorf("%v tagged necessary, want unnecessary", diag) + } + }) +} diff --git a/gopls/internal/test/integration/doc.go b/gopls/internal/test/integration/doc.go new file mode 100644 index 00000000000..5599564bb25 --- /dev/null +++ b/gopls/internal/test/integration/doc.go @@ -0,0 +1,156 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package integration provides a framework for writing integration tests of gopls. +// +// The behaviors that matter to users, and the scenarios they +// typically describe in bug report, are usually expressed in terms of +// editor interactions. For example: "When I open my editor in this +// directory, navigate to this file, and change this line, I get a +// diagnostic that doesn't make sense". The integration package +// provides an API for gopls maintainers to express these types of +// user interactions in ordinary Go tests, validate them, and run them +// in a variety of execution modes. +// +// # Test package setup +// +// The integration test package uses a couple of uncommon patterns to reduce +// boilerplate in test bodies. First, it is intended to be imported as "." so +// that helpers do not need to be qualified. Second, it requires some setup +// that is currently implemented in the integration.Main function, which must be +// invoked by TestMain. Therefore, a minimal integration testing package looks +// like this: +// +// package feature +// +// import ( +// "fmt" +// "testing" +// +// "golang.org/x/tools/gopls/internal/hooks" +// . "golang.org/x/tools/gopls/internal/test/integration" +// ) +// +// func TestMain(m *testing.M) { +// Main(m, hooks.Options) +// } +// +// # Writing a simple integration test +// +// To run an integration test use the integration.Run function, which accepts a +// txtar-encoded archive defining the initial workspace state. This function +// sets up the workspace in a temporary directory, creates a fake text editor, +// starts gopls, and initializes an LSP session. It then invokes the provided +// test function with an *Env encapsulating the newly created +// environment. Because gopls may be run in various modes (as a sidecar or +// daemon process, with different settings), the test runner may perform this +// process multiple times, re-running the test function each time with a new +// environment. +// +// func TestOpenFile(t *testing.T) { +// const files = ` +// -- go.mod -- +// module mod.com +// +// go 1.12 +// -- foo.go -- +// package foo +// ` +// Run(t, files, func(t *testing.T, env *Env) { +// env.OpenFile("foo.go") +// }) +// } +// +// # Configuring integration test execution +// +// The integration package exposes several options that affect the setup process +// described above. To use these options, use the WithOptions function: +// +// WithOptions(opts...).Run(...) +// +// See options.go for a full list of available options. +// +// # Operating on editor state +// +// To operate on editor state within the test body, the Env type provides +// access to the workspace directory (Env.SandBox), text editor (Env.Editor), +// LSP server (Env.Server), and 'awaiter' (Env.Awaiter). +// +// In most cases, operations on these primitive building blocks of the +// integration test environment expect a Context (which should be a child of +// env.Ctx), and return an error. To avoid boilerplate, the Env exposes a set +// of wrappers in wrappers.go for use in scripting: +// +// env.CreateBuffer("c/c.go", "") +// env.EditBuffer("c/c.go", editor.Edit{ +// Text: `package c`, +// }) +// +// These wrappers thread through Env.Ctx, and call t.Fatal on any errors. +// +// # Expressing expectations +// +// The general pattern for an integration test is to script interactions with the +// fake editor and sandbox, and assert that gopls behaves correctly after each +// state change. Unfortunately, this is complicated by the fact that state +// changes are communicated to gopls via unidirectional client->server +// notifications (didOpen, didChange, etc.), and resulting gopls behavior such +// as diagnostics, logs, or messages is communicated back via server->client +// notifications. Therefore, within integration tests we must be able to say "do +// this, and then eventually gopls should do that". To achieve this, the +// integration package provides a framework for expressing conditions that must +// eventually be met, in terms of the Expectation type. +// +// To express the assertion that "eventually gopls must meet these +// expectations", use env.Await(...): +// +// env.RegexpReplace("x/x.go", `package x`, `package main`) +// env.Await(env.DiagnosticAtRegexp("x/main.go", `fmt`)) +// +// Await evaluates the provided expectations atomically, whenever the client +// receives a state-changing notification from gopls. See expectation.go for a +// full list of available expectations. +// +// A problem with this model is that if gopls never meets the provided +// expectations, the test runner will hang until the test timeout +// (which defaults to 10m). There are two ways to work around this +// poor behavior: +// +// 1. Use a precondition to define precisely when we expect conditions to be +// met. Gopls provides the OnceMet(precondition, expectations...) pattern +// to express ("once this precondition is met, the following expectations +// must all hold"). To instrument preconditions, gopls uses verbose +// progress notifications to inform the client about ongoing work (see +// CompletedWork). The most common precondition is to wait for gopls to be +// done processing all change notifications, for which the integration package +// provides the AfterChange helper. For example: +// +// // We expect diagnostics to be cleared after gopls is done processing the +// // didSave notification. +// env.SaveBuffer("a/go.mod") +// env.AfterChange(EmptyDiagnostics("a/go.mod")) +// +// 2. Set a shorter timeout during development, if you expect to be breaking +// tests. By setting the environment variable GOPLS_INTEGRATION_TEST_TIMEOUT=5s, +// integration tests will time out after 5 seconds. +// +// # Tips & Tricks +// +// Here are some tips and tricks for working with integration tests: +// +// 1. Set the environment variable GOPLS_INTEGRRATION_TEST_TIMEOUT=5s during development. +// 2. Run tests with -short. This will only run integration tests in the +// default gopls execution mode. +// 3. Use capture groups to narrow regexp positions. All regular-expression +// based positions (such as DiagnosticAtRegexp) will match the position of +// the first capture group, if any are provided. This can be used to +// identify a specific position in the code for a pattern that may occur in +// multiple places. For example `var (mu) sync.Mutex` matches the position +// of "mu" within the variable declaration. +// 4. Read diagnostics into a variable to implement more complicated +// assertions about diagnostic state in the editor. To do this, use the +// pattern OnceMet(precondition, ReadDiagnostics("file.go", &d)) to capture +// the current diagnostics as soon as the precondition is met. This is +// preferable to accessing the diagnostics directly, as it avoids races. +package integration diff --git a/gopls/internal/lsp/regtest/env.go b/gopls/internal/test/integration/env.go similarity index 94% rename from gopls/internal/lsp/regtest/env.go rename to gopls/internal/test/integration/env.go index 344e5e7a9a3..8dab7d72873 100644 --- a/gopls/internal/lsp/regtest/env.go +++ b/gopls/internal/test/integration/env.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package regtest +package integration import ( "context" @@ -11,8 +11,8 @@ import ( "sync" "testing" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/internal/jsonrpc2/servertest" ) @@ -98,17 +98,6 @@ type State struct { work map[protocol.ProgressToken]*workProgress } -// outstandingWork counts started but not complete work items by title. -func (s State) outstandingWork() map[string]uint64 { - outstanding := make(map[string]uint64) - for _, work := range s.work { - if !work.complete { - outstanding[work.title]++ - } - } - return outstanding -} - // completedWork counts complete work items by title. func (s State) completedWork() map[string]uint64 { completed := make(map[string]uint64) @@ -152,9 +141,9 @@ func (s State) String() string { b.WriteString("\n") b.WriteString("#### diagnostics:\n") for name, params := range s.diagnostics { - fmt.Fprintf(&b, "\t%s (version %d):\n", name, int(params.Version)) + fmt.Fprintf(&b, "\t%s (version %d):\n", name, params.Version) for _, d := range params.Diagnostics { - fmt.Fprintf(&b, "\t\t(%d, %d) [%s]: %s\n", int(d.Range.Start.Line), int(d.Range.Start.Character), d.Source, d.Message) + fmt.Fprintf(&b, "\t\t%d:%d [%s]: %s\n", d.Range.Start.Line, d.Range.Start.Character, d.Source, d.Message) } } b.WriteString("\n") @@ -311,9 +300,9 @@ func (a *Awaiter) checkConditionsLocked() { } } -// takeDocumentChanges returns any accumulated document changes (from +// TakeDocumentChanges returns any accumulated document changes (from // server ApplyEdit RPC downcalls) and resets the list. -func (a *Awaiter) takeDocumentChanges() []protocol.DocumentChanges { +func (a *Awaiter) TakeDocumentChanges() []protocol.DocumentChanges { a.mu.Lock() defer a.mu.Unlock() diff --git a/gopls/internal/lsp/regtest/env_test.go b/gopls/internal/test/integration/env_test.go similarity index 96% rename from gopls/internal/lsp/regtest/env_test.go rename to gopls/internal/test/integration/env_test.go index e334faa905c..02bacd0f3db 100644 --- a/gopls/internal/lsp/regtest/env_test.go +++ b/gopls/internal/test/integration/env_test.go @@ -2,14 +2,14 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package regtest +package integration import ( "context" "encoding/json" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) func TestProgressUpdating(t *testing.T) { diff --git a/gopls/internal/lsp/regtest/expectation.go b/gopls/internal/test/integration/expectation.go similarity index 91% rename from gopls/internal/lsp/regtest/expectation.go rename to gopls/internal/test/integration/expectation.go index 7238eb0e832..b749800f675 100644 --- a/gopls/internal/lsp/regtest/expectation.go +++ b/gopls/internal/test/integration/expectation.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package regtest +package integration import ( "fmt" @@ -11,14 +11,14 @@ import ( "strings" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/server" ) var ( // InitialWorkspaceLoad is an expectation that the workspace initial load has // completed. It is verified via workdone reporting. - InitialWorkspaceLoad = CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromInitialWorkspaceLoad), 1, false) + InitialWorkspaceLoad = CompletedWork(server.DiagnosticWorkTitle(server.FromInitialWorkspaceLoad), 1, false) ) // A Verdict is the result of checking an expectation against the current @@ -229,6 +229,23 @@ func ShownDocument(uri protocol.URI) Expectation { } } +// ShownDocuments is an expectation that appends each showDocument +// request into the provided slice, whenever it is evaluated. +// +// It can be used in combination with OnceMet or AfterChange to +// capture the set of showDocument requests when other expectations +// are satisfied. +func ShownDocuments(into *[]*protocol.ShowDocumentParams) Expectation { + check := func(s State) Verdict { + *into = append(*into, s.showDocument...) + return Met + } + return Expectation{ + Check: check, + Description: "read shown documents", + } +} + // NoShownMessage asserts that the editor has not received a ShowMessage. func NoShownMessage(subString string) Expectation { check := func(s State) Verdict { @@ -294,16 +311,16 @@ func ShownMessageRequest(messageRegexp string) Expectation { // track of func (e *Env) DoneDiagnosingChanges() Expectation { stats := e.Editor.Stats() - statsBySource := map[lsp.ModificationSource]uint64{ - lsp.FromDidOpen: stats.DidOpen, - lsp.FromDidChange: stats.DidChange, - lsp.FromDidSave: stats.DidSave, - lsp.FromDidChangeWatchedFiles: stats.DidChangeWatchedFiles, - lsp.FromDidClose: stats.DidClose, - lsp.FromDidChangeConfiguration: stats.DidChangeConfiguration, + statsBySource := map[server.ModificationSource]uint64{ + server.FromDidOpen: stats.DidOpen, + server.FromDidChange: stats.DidChange, + server.FromDidSave: stats.DidSave, + server.FromDidChangeWatchedFiles: stats.DidChangeWatchedFiles, + server.FromDidClose: stats.DidClose, + server.FromDidChangeConfiguration: stats.DidChangeConfiguration, } - var expected []lsp.ModificationSource + var expected []server.ModificationSource for k, v := range statsBySource { if v > 0 { expected = append(expected, k) @@ -317,7 +334,7 @@ func (e *Env) DoneDiagnosingChanges() Expectation { var all []Expectation for _, source := range expected { - all = append(all, CompletedWork(lsp.DiagnosticWorkTitle(source), statsBySource[source], true)) + all = append(all, CompletedWork(server.DiagnosticWorkTitle(source), statsBySource[source], true)) } return AllOf(all...) @@ -340,49 +357,49 @@ func (e *Env) AfterChange(expectations ...Expectation) { // to be completely processed. func (e *Env) DoneWithOpen() Expectation { opens := e.Editor.Stats().DidOpen - return CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromDidOpen), opens, true) + return CompletedWork(server.DiagnosticWorkTitle(server.FromDidOpen), opens, true) } // StartedChange expects that the server has at least started processing all // didChange notifications sent from the client. func (e *Env) StartedChange() Expectation { changes := e.Editor.Stats().DidChange - return StartedWork(lsp.DiagnosticWorkTitle(lsp.FromDidChange), changes) + return StartedWork(server.DiagnosticWorkTitle(server.FromDidChange), changes) } // DoneWithChange expects all didChange notifications currently sent by the // editor to be completely processed. func (e *Env) DoneWithChange() Expectation { changes := e.Editor.Stats().DidChange - return CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromDidChange), changes, true) + return CompletedWork(server.DiagnosticWorkTitle(server.FromDidChange), changes, true) } // DoneWithSave expects all didSave notifications currently sent by the editor // to be completely processed. func (e *Env) DoneWithSave() Expectation { saves := e.Editor.Stats().DidSave - return CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromDidSave), saves, true) + return CompletedWork(server.DiagnosticWorkTitle(server.FromDidSave), saves, true) } // StartedChangeWatchedFiles expects that the server has at least started // processing all didChangeWatchedFiles notifications sent from the client. func (e *Env) StartedChangeWatchedFiles() Expectation { changes := e.Editor.Stats().DidChangeWatchedFiles - return StartedWork(lsp.DiagnosticWorkTitle(lsp.FromDidChangeWatchedFiles), changes) + return StartedWork(server.DiagnosticWorkTitle(server.FromDidChangeWatchedFiles), changes) } // DoneWithChangeWatchedFiles expects all didChangeWatchedFiles notifications // currently sent by the editor to be completely processed. func (e *Env) DoneWithChangeWatchedFiles() Expectation { changes := e.Editor.Stats().DidChangeWatchedFiles - return CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromDidChangeWatchedFiles), changes, true) + return CompletedWork(server.DiagnosticWorkTitle(server.FromDidChangeWatchedFiles), changes, true) } // DoneWithClose expects all didClose notifications currently sent by the // editor to be completely processed. func (e *Env) DoneWithClose() Expectation { changes := e.Editor.Stats().DidClose - return CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromDidClose), changes, true) + return CompletedWork(server.DiagnosticWorkTitle(server.FromDidClose), changes, true) } // StartedWork expect a work item to have been started >= atLeast times. @@ -500,7 +517,7 @@ func NoOutstandingWork(ignore func(title, msg string) bool) Expectation { // the "begin" notification, work should not be in progress. continue } - if ignore(w.title, w.msg) { + if ignore != nil && ignore(w.title, w.msg) { continue } return Unmet @@ -516,7 +533,7 @@ func NoOutstandingWork(ignore func(title, msg string) bool) Expectation { // IgnoreTelemetryPromptWork may be used in conjunction with NoOutStandingWork // to ignore the telemetry prompt. func IgnoreTelemetryPromptWork(title, msg string) bool { - return title == lsp.TelemetryPromptWorkTitle + return title == server.TelemetryPromptWorkTitle } // NoErrorLogs asserts that the client has not received any log messages of @@ -761,7 +778,7 @@ func FromSource(source string) DiagnosticFilter { func (e *Env) AtRegexp(name, pattern string) DiagnosticFilter { loc := e.RegexpSearch(name, pattern) return DiagnosticFilter{ - desc: fmt.Sprintf("at the first position matching %#q in %q", pattern, name), + desc: fmt.Sprintf("at the first position (%v) matching %#q in %q", loc.Range.Start, pattern, name), check: func(diagName string, d protocol.Diagnostic) bool { return diagName == name && d.Range.Start == loc.Range.Start }, diff --git a/gopls/internal/lsp/fake/client.go b/gopls/internal/test/integration/fake/client.go similarity index 88% rename from gopls/internal/lsp/fake/client.go rename to gopls/internal/test/integration/fake/client.go index cedd5884386..f940821eefe 100644 --- a/gopls/internal/lsp/fake/client.go +++ b/gopls/internal/test/integration/fake/client.go @@ -8,9 +8,11 @@ import ( "context" "encoding/json" "fmt" + "path" + "path/filepath" - "golang.org/x/tools/gopls/internal/lsp/glob" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake/glob" ) // ClientHooks are a set of optional hooks called during handling of @@ -30,8 +32,9 @@ type ClientHooks struct { OnApplyEdit func(context.Context, *protocol.ApplyWorkspaceEditParams) error } -// Client is an adapter that converts an *Editor into an LSP Client. It mostly -// delegates functionality to hooks that can be configured by tests. +// Client is an implementation of the [protocol.Client] interface +// based on the test's fake [Editor]. It mostly delegates +// functionality to hooks that can be configured by tests. type Client struct { editor *Editor hooks ClientHooks @@ -44,6 +47,8 @@ func (c *Client) InlayHintRefresh(context.Context) error { return nil } func (c *Client) DiagnosticRefresh(context.Context) error { return nil } +func (c *Client) FoldingRangeRefresh(context.Context) error { return nil } + func (c *Client) InlineValueRefresh(context.Context) error { return nil } func (c *Client) SemanticTokensRefresh(context.Context) error { return nil } @@ -94,9 +99,12 @@ func (c *Client) WorkspaceFolders(context.Context) ([]protocol.WorkspaceFolder, func (c *Client) Configuration(_ context.Context, p *protocol.ParamConfiguration) ([]interface{}, error) { results := make([]interface{}, len(p.Items)) for i, item := range p.Items { + if item.ScopeURI != nil && *item.ScopeURI == "" { + return nil, fmt.Errorf(`malformed ScopeURI ""`) + } if item.Section == "gopls" { config := c.editor.Config() - results[i] = makeSettings(c.editor.sandbox, config) + results[i] = makeSettings(c.editor.sandbox, config, item.ScopeURI) } } return results, nil @@ -127,8 +135,15 @@ func (c *Client) RegisterCapability(ctx context.Context, params *protocol.Regist } var globs []*glob.Glob for _, watcher := range opts.Watchers { + var globPattern string + switch pattern := watcher.GlobPattern.Value.(type) { + case protocol.Pattern: + globPattern = pattern + case protocol.RelativePattern: + globPattern = path.Join(filepath.ToSlash(pattern.BaseURI.Path()), pattern.Pattern) + } // TODO(rfindley): honor the watch kind. - g, err := glob.Parse(watcher.GlobPattern) + g, err := glob.Parse(globPattern) if err != nil { return fmt.Errorf("error parsing glob pattern %q: %v", watcher.GlobPattern, err) } diff --git a/gopls/internal/test/integration/fake/doc.go b/gopls/internal/test/integration/fake/doc.go new file mode 100644 index 00000000000..e0fc61b9928 --- /dev/null +++ b/gopls/internal/test/integration/fake/doc.go @@ -0,0 +1,20 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fake provides a fake implementation of an LSP-enabled +// text editor, its LSP client plugin, and a Sandbox environment for +// use in integration tests. +// +// The Editor type provides a high level API for text editor operations +// (open/modify/save/close a buffer, jump to definition, etc.), and the Client +// type exposes an LSP client for the editor that can be connected to a +// language server. By default, the Editor and Client should be compliant with +// the LSP spec: their intended use is to verify server compliance with the +// spec in a variety of environment. Possible future enhancements of these +// types may allow them to misbehave in configurable ways, but that is not +// their primary use. +// +// The Sandbox type provides a facility for executing tests with a temporary +// directory, module proxy, and GOPATH. +package fake diff --git a/gopls/internal/lsp/fake/edit.go b/gopls/internal/test/integration/fake/edit.go similarity index 77% rename from gopls/internal/lsp/fake/edit.go rename to gopls/internal/test/integration/fake/edit.go index 40762f2ff1a..b06984b3dbc 100644 --- a/gopls/internal/lsp/fake/edit.go +++ b/gopls/internal/test/integration/fake/edit.go @@ -5,8 +5,7 @@ package fake import ( - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/diff" ) @@ -24,19 +23,11 @@ func NewEdit(startLine, startColumn, endLine, endColumn uint32, text string) pro } } -func EditToChangeEvent(e protocol.TextEdit) protocol.TextDocumentContentChangeEvent { - var rng protocol.Range = e.Range - return protocol.TextDocumentContentChangeEvent{ - Range: &rng, - Text: e.NewText, - } -} - // applyEdits applies the edits to a file with the specified lines, // and returns a new slice containing the lines of the patched file. // It is a wrapper around diff.Apply; see that function for preconditions. func applyEdits(mapper *protocol.Mapper, edits []protocol.TextEdit, windowsLineEndings bool) ([]byte, error) { - diffEdits, err := source.FromProtocolEdits(mapper, edits) + diffEdits, err := protocol.EditsToDiffEdits(mapper, edits) if err != nil { return nil, err } diff --git a/gopls/internal/lsp/fake/edit_test.go b/gopls/internal/test/integration/fake/edit_test.go similarity index 97% rename from gopls/internal/lsp/fake/edit_test.go rename to gopls/internal/test/integration/fake/edit_test.go index 97e2c73e42d..0d7ac18c414 100644 --- a/gopls/internal/lsp/fake/edit_test.go +++ b/gopls/internal/test/integration/fake/edit_test.go @@ -7,7 +7,7 @@ package fake import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) func TestApplyEdits(t *testing.T) { diff --git a/gopls/internal/lsp/fake/editor.go b/gopls/internal/test/integration/fake/editor.go similarity index 87% rename from gopls/internal/lsp/fake/editor.go rename to gopls/internal/test/integration/fake/editor.go index b8e69011d99..224a68c26bd 100644 --- a/gopls/internal/lsp/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -17,17 +17,17 @@ import ( "strings" "sync" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/glob" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/test/integration/fake/glob" + "golang.org/x/tools/gopls/internal/util/pathutil" + "golang.org/x/tools/gopls/internal/util/slices" "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2/servertest" "golang.org/x/tools/internal/xcontext" ) -// Editor is a fake editor client. It keeps track of client state and can be +// Editor is a fake client editor. It keeps track of client state and can be // used for writing LSP tests. type Editor struct { @@ -39,7 +39,7 @@ type Editor struct { client *Client sandbox *Sandbox - // TODO(adonovan): buffers should be keyed by protocol.DocumentURI. + // TODO(rfindley): buffers should be keyed by protocol.DocumentURI. mu sync.Mutex config EditorConfig // editor configuration buffers map[string]buffer // open buffers (relative path -> buffer content) @@ -73,7 +73,7 @@ func (b buffer) text() string { } // EditorConfig configures the editor's LSP session. This is similar to -// source.UserOptions, but we use a separate type here so that we expose only +// golang.UserOptions, but we use a separate type here so that we expose only // that configuration which we support. // // The zero value for EditorConfig is the default configuration. @@ -82,6 +82,8 @@ type EditorConfig struct { // // Since this can only be set during initialization, changing this field via // Editor.ChangeConfiguration has no effect. + // + // If empty, "fake.Editor" is used. ClientName string // Env holds environment variables to apply on top of the default editor @@ -110,7 +112,13 @@ type EditorConfig struct { FileAssociations map[string]string // Settings holds user-provided configuration for the LSP server. - Settings map[string]interface{} + Settings map[string]any + + // FolderSettings holds user-provided per-folder configuration, if any. + // + // It maps each folder (as a relative path to the sandbox workdir) to its + // configuration mapping (like Settings). + FolderSettings map[string]map[string]any // CapabilitiesJSON holds JSON client capabilities to overlay over the // editor's default client capabilities. @@ -216,7 +224,7 @@ func (e *Editor) Client() *Client { } // makeSettings builds the settings map for use in LSP settings RPCs. -func makeSettings(sandbox *Sandbox, config EditorConfig) map[string]interface{} { +func makeSettings(sandbox *Sandbox, config EditorConfig, scopeURI *protocol.URI) map[string]any { env := make(map[string]string) for k, v := range sandbox.GoEnv() { env[k] = v @@ -225,14 +233,14 @@ func makeSettings(sandbox *Sandbox, config EditorConfig) map[string]interface{} env[k] = v } for k, v := range env { - v = strings.ReplaceAll(v, "$SANDBOX_WORKDIR", sandbox.Workdir.RootURI().SpanURI().Filename()) + v = strings.ReplaceAll(v, "$SANDBOX_WORKDIR", sandbox.Workdir.RootURI().Path()) env[k] = v } - settings := map[string]interface{}{ + settings := map[string]any{ "env": env, - // Use verbose progress reporting so that regtests can assert on + // Use verbose progress reporting so that integration tests can assert on // asynchronous operations being completed (such as diagnosing a snapshot). "verboseWorkDoneProgress": true, @@ -248,58 +256,54 @@ func makeSettings(sandbox *Sandbox, config EditorConfig) map[string]interface{} settings[k] = v } + // If the server is requesting configuration for a specific scope, apply + // settings for the nearest folder that has customized settings, if any. + if scopeURI != nil { + var ( + scopePath = protocol.DocumentURI(*scopeURI).Path() + closestDir string // longest dir with settings containing the scope, if any + closestSettings map[string]any // settings for that dir, if any + ) + for relPath, settings := range config.FolderSettings { + dir := sandbox.Workdir.AbsPath(relPath) + if strings.HasPrefix(scopePath+string(filepath.Separator), dir+string(filepath.Separator)) && len(dir) > len(closestDir) { + closestDir = dir + closestSettings = settings + } + } + if closestSettings != nil { + for k, v := range closestSettings { + settings[k] = v + } + } + } + return settings } func (e *Editor) initialize(ctx context.Context) error { config := e.Config() + clientName := config.ClientName + if clientName == "" { + clientName = "fake.Editor" + } + params := &protocol.ParamInitialize{} - if e.config.ClientName != "" { - params.ClientInfo = &protocol.Msg_XInitializeParams_clientInfo{} - params.ClientInfo.Name = e.config.ClientName - params.ClientInfo.Version = "v1.0.0" + params.ClientInfo = &protocol.ClientInfo{ + Name: clientName, + Version: "v1.0.0", } - params.InitializationOptions = makeSettings(e.sandbox, config) + params.InitializationOptions = makeSettings(e.sandbox, config, nil) params.WorkspaceFolders = makeWorkspaceFolders(e.sandbox, config.WorkspaceFolders) - // Set various client capabilities that are sought by gopls. - params.Capabilities.Workspace.Configuration = true // support workspace/configuration - params.Capabilities.Window.WorkDoneProgress = true // support window/workDoneProgress - params.Capabilities.TextDocument.Completion.CompletionItem.TagSupport.ValueSet = []protocol.CompletionItemTag{protocol.ComplDeprecated} - params.Capabilities.TextDocument.Completion.CompletionItem.SnippetSupport = true - params.Capabilities.TextDocument.SemanticTokens.Requests.Full.Value = true - params.Capabilities.TextDocument.SemanticTokens.TokenTypes = []string{ - "namespace", "type", "class", "enum", "interface", - "struct", "typeParameter", "parameter", "variable", "property", "enumMember", - "event", "function", "method", "macro", "keyword", "modifier", "comment", - "string", "number", "regexp", "operator", - } - params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = []string{ - "declaration", "definition", "readonly", "static", - "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", - } - // The LSP tests have historically enabled this flag, - // but really we should test both ways for older editors. - params.Capabilities.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport = true - // Glob pattern watching is enabled. - params.Capabilities.Workspace.DidChangeWatchedFiles.DynamicRegistration = true - // "rename" operations are used for package renaming. - // - // TODO(rfindley): add support for other resource operations (create, delete, ...) - params.Capabilities.Workspace.WorkspaceEdit = &protocol.WorkspaceEditClientCapabilities{ - ResourceOperations: []protocol.ResourceOperationKind{ - "rename", - }, - } - // Apply capabilities overlay. - if config.CapabilitiesJSON != nil { - if err := json.Unmarshal(config.CapabilitiesJSON, ¶ms.Capabilities); err != nil { - return fmt.Errorf("unmarshalling EditorConfig.CapabilitiesJSON: %v", err) - } + capabilities, err := clientCapabilities(config) + if err != nil { + return fmt.Errorf("unmarshalling EditorConfig.CapabilitiesJSON: %v", err) } + params.Capabilities = capabilities - trace := protocol.TraceValues("messages") + trace := protocol.TraceValue("messages") params.Trace = &trace // TODO: support workspace folders. if e.Server != nil { @@ -324,6 +328,50 @@ func (e *Editor) initialize(ctx context.Context) error { return nil } +func clientCapabilities(cfg EditorConfig) (protocol.ClientCapabilities, error) { + var capabilities protocol.ClientCapabilities + // Set various client capabilities that are sought by gopls. + capabilities.Workspace.Configuration = true // support workspace/configuration + capabilities.TextDocument.Completion.CompletionItem.TagSupport = &protocol.CompletionItemTagOptions{} + capabilities.TextDocument.Completion.CompletionItem.TagSupport.ValueSet = []protocol.CompletionItemTag{protocol.ComplDeprecated} + capabilities.TextDocument.Completion.CompletionItem.SnippetSupport = true + capabilities.TextDocument.SemanticTokens.Requests.Full = &protocol.Or_ClientSemanticTokensRequestOptions_full{Value: true} + capabilities.Window.WorkDoneProgress = true // support window/workDoneProgress + capabilities.TextDocument.SemanticTokens.TokenTypes = []string{ + "namespace", "type", "class", "enum", "interface", + "struct", "typeParameter", "parameter", "variable", "property", "enumMember", + "event", "function", "method", "macro", "keyword", "modifier", "comment", + "string", "number", "regexp", "operator", + // Additional types supported by this client: + "label", + } + capabilities.TextDocument.SemanticTokens.TokenModifiers = []string{ + "declaration", "definition", "readonly", "static", + "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", + } + // The LSP tests have historically enabled this flag, + // but really we should test both ways for older editors. + capabilities.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport = true + // Glob pattern watching is enabled. + capabilities.Workspace.DidChangeWatchedFiles.DynamicRegistration = true + // "rename" operations are used for package renaming. + // + // TODO(rfindley): add support for other resource operations (create, delete, ...) + capabilities.Workspace.WorkspaceEdit = &protocol.WorkspaceEditClientCapabilities{ + ResourceOperations: []protocol.ResourceOperationKind{ + "rename", + }, + } + + // Apply capabilities overlay. + if cfg.CapabilitiesJSON != nil { + if err := json.Unmarshal(cfg.CapabilitiesJSON, &capabilities); err != nil { + return protocol.ClientCapabilities{}, fmt.Errorf("unmarshalling EditorConfig.CapabilitiesJSON: %v", err) + } + } + return capabilities, nil +} + // marshalUnmarshal is a helper to json Marshal and then Unmarshal as a // different type. Used to work around cases where our protocol types are not // specific. @@ -407,7 +455,7 @@ func (e *Editor) onFileChanges(ctx context.Context, evts []protocol.FileEvent) { } var matchedEvts []protocol.FileEvent for _, evt := range evts { - filename := filepath.ToSlash(evt.URI.SpanURI().Filename()) + filename := filepath.ToSlash(evt.URI.Path()) for _, g := range e.watchPatterns { if g.Match(filename) { matchedEvts = append(matchedEvts, evt) @@ -478,7 +526,7 @@ func (e *Editor) createBuffer(ctx context.Context, path string, dirty bool, cont return fmt.Errorf("buffer %q already exists", path) } - uri := e.sandbox.Workdir.URI(path).SpanURI() + uri := e.sandbox.Workdir.URI(path) buf := buffer{ version: 1, path: path, @@ -529,17 +577,17 @@ var defaultFileAssociations = map[string]*regexp.Regexp{ // languageID returns the language identifier for the path p given the user // configured fileAssociations. -func languageID(p string, fileAssociations map[string]string) string { +func languageID(p string, fileAssociations map[string]string) protocol.LanguageKind { base := path.Base(p) for lang, re := range fileAssociations { re := regexp.MustCompile(re) if re.MatchString(base) { - return lang + return protocol.LanguageKind(lang) } } for lang, re := range defaultFileAssociations { if re.MatchString(base) { - return lang + return protocol.LanguageKind(lang) } } return "" @@ -787,22 +835,22 @@ func (e *Editor) setBufferContentLocked(ctx context.Context, path string, dirty buf.version++ buf.dirty = dirty e.buffers[path] = buf + // A simple heuristic: if there is only one edit, send it incrementally. // Otherwise, send the entire content. - var evts []protocol.TextDocumentContentChangeEvent + var evt protocol.TextDocumentContentChangeEvent if len(fromEdits) == 1 { - evts = append(evts, EditToChangeEvent(fromEdits[0])) + evt.Range = &fromEdits[0].Range + evt.Text = fromEdits[0].NewText } else { - evts = append(evts, protocol.TextDocumentContentChangeEvent{ - Text: buf.text(), - }) + evt.Text = buf.text() } params := &protocol.DidChangeTextDocumentParams{ TextDocument: protocol.VersionedTextDocumentIdentifier{ Version: int32(buf.version), TextDocumentIdentifier: e.TextDocumentIdentifier(buf.path), }, - ContentChanges: evts, + ContentChanges: []protocol.TextDocumentContentChangeEvent{evt}, } if e.Server != nil { if err := e.Server.DidChange(ctx, params); err != nil { @@ -901,6 +949,21 @@ func (e *Editor) ApplyQuickFixes(ctx context.Context, loc protocol.Location, dia // ApplyCodeAction applies the given code action. func (e *Editor) ApplyCodeAction(ctx context.Context, action protocol.CodeAction) error { + // Resolve the code actions if necessary and supported. + if action.Edit == nil { + editSupport, err := e.EditResolveSupport() + if err != nil { + return err + } + if editSupport { + ca, err := e.Server.ResolveCodeAction(ctx, &action) + if err != nil { + return err + } + action.Edit = ca.Edit + } + } + if action.Edit != nil { for _, change := range action.Edit.DocumentChanges { if change.TextDocumentEdit != nil { @@ -909,7 +972,7 @@ func (e *Editor) ApplyCodeAction(ctx context.Context, action protocol.CodeAction // Skip edits for old versions. continue } - if err := e.EditBuffer(ctx, path, change.TextDocumentEdit.Edits); err != nil { + if err := e.EditBuffer(ctx, path, protocol.AsTextEdits(change.TextDocumentEdit.Edits)); err != nil { return fmt.Errorf("editing buffer %q: %w", path, err) } } @@ -931,11 +994,11 @@ func (e *Editor) ApplyCodeAction(ctx context.Context, action protocol.CodeAction // GetQuickFixes returns the available quick fix code actions. func (e *Editor) GetQuickFixes(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic) ([]protocol.CodeAction, error) { - return e.getCodeActions(ctx, loc, diagnostics, protocol.QuickFix, protocol.SourceFixAll) + return e.CodeActions(ctx, loc, diagnostics, protocol.QuickFix, protocol.SourceFixAll) } func (e *Editor) applyCodeActions(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic, only ...protocol.CodeActionKind) (int, error) { - actions, err := e.getCodeActions(ctx, loc, diagnostics, only...) + actions, err := e.CodeActions(ctx, loc, diagnostics, only...) if err != nil { return 0, err } @@ -962,7 +1025,7 @@ func (e *Editor) applyCodeActions(ctx context.Context, loc protocol.Location, di return applied, nil } -func (e *Editor) getCodeActions(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic, only ...protocol.CodeActionKind) ([]protocol.CodeAction, error) { +func (e *Editor) CodeActions(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic, only ...protocol.CodeActionKind) ([]protocol.CodeAction, error) { if e.Server == nil { return nil, nil } @@ -1054,7 +1117,7 @@ func (e *Editor) RunGenerate(ctx context.Context, dir string) error { } absDir := e.sandbox.Workdir.AbsPath(dir) cmd, err := command.NewGenerateCommand("", command.GenerateArgs{ - Dir: protocol.URIFromSpanURI(span.URIFromPath(absDir)), + Dir: protocol.URIFromPath(absDir), Recursive: false, }) if err != nil { @@ -1068,7 +1131,7 @@ func (e *Editor) RunGenerate(ctx context.Context, dir string) error { return fmt.Errorf("running generate: %v", err) } // Unfortunately we can't simply poll the workdir for file changes here, - // because server-side command may not have completed. In regtests, we can + // because server-side command may not have completed. In integration tests, we can // Await this state change, but here we must delegate that responsibility to // the caller. return nil @@ -1263,7 +1326,7 @@ func (e *Editor) SignatureHelp(ctx context.Context, loc protocol.Location) (*pro } func (e *Editor) RenameFile(ctx context.Context, oldPath, newPath string) error { - closed, opened, err := e.renameBuffers(ctx, oldPath, newPath) + closed, opened, err := e.renameBuffers(oldPath, newPath) if err != nil { return err } @@ -1289,7 +1352,7 @@ func (e *Editor) RenameFile(ctx context.Context, oldPath, newPath string) error // renameBuffers renames in-memory buffers affected by the renaming of // oldPath->newPath, returning the resulting text documents that must be closed // and opened over the LSP. -func (e *Editor) renameBuffers(ctx context.Context, oldPath, newPath string) (closed []protocol.TextDocumentIdentifier, opened []protocol.TextDocumentItem, _ error) { +func (e *Editor) renameBuffers(oldPath, newPath string) (closed []protocol.TextDocumentIdentifier, opened []protocol.TextDocumentItem, _ error) { e.mu.Lock() defer e.mu.Unlock() @@ -1303,7 +1366,7 @@ func (e *Editor) renameBuffers(ctx context.Context, oldPath, newPath string) (cl for path := range e.buffers { abs := e.sandbox.Workdir.AbsPath(path) - if oldAbs == abs || source.InDir(oldAbs, abs) { + if oldAbs == abs || pathutil.InDir(oldAbs, abs) { rel, err := filepath.Rel(oldAbs, abs) if err != nil { return nil, nil, fmt.Errorf("filepath.Rel(%q, %q): %v", oldAbs, abs, err) @@ -1360,7 +1423,7 @@ func (e *Editor) applyTextDocumentEdit(ctx context.Context, change protocol.Text return err } } - return e.EditBuffer(ctx, path, change.Edits) + return e.EditBuffer(ctx, path, protocol.AsTextEdits(change.Edits)) } // Config returns the current editor configuration. @@ -1470,6 +1533,14 @@ func (e *Editor) CodeAction(ctx context.Context, loc protocol.Location, diagnost return lens, nil } +func (e *Editor) EditResolveSupport() (bool, error) { + capabilities, err := clientCapabilities(e.Config()) + if err != nil { + return false, err + } + return capabilities.TextDocument.CodeAction.ResolveSupport != nil && slices.Contains(capabilities.TextDocument.CodeAction.ResolveSupport.Properties, "edit"), nil +} + // Hover triggers a hover at the given position in an open buffer. func (e *Editor) Hover(ctx context.Context, loc protocol.Location) (*protocol.MarkupContent, protocol.Location, error) { if err := e.checkBufferLocation(loc); err != nil { @@ -1512,9 +1583,9 @@ func (e *Editor) DocumentHighlight(ctx context.Context, loc protocol.Location) ( return e.Server.DocumentHighlight(ctx, params) } -// SemanticTokens invokes textDocument/semanticTokens/full, and interprets its -// result. -func (e *Editor) SemanticTokens(ctx context.Context, path string) ([]SemanticToken, error) { +// SemanticTokensFull invokes textDocument/semanticTokens/full, and interprets +// its result. +func (e *Editor) SemanticTokensFull(ctx context.Context, path string) ([]SemanticToken, error) { p := &protocol.SemanticTokensParams{ TextDocument: protocol.TextDocumentIdentifier{ URI: e.sandbox.Workdir.URI(path), @@ -1531,6 +1602,26 @@ func (e *Editor) SemanticTokens(ctx context.Context, path string) ([]SemanticTok return e.interpretTokens(resp.Data, content), nil } +// SemanticTokensRange invokes textDocument/semanticTokens/range, and +// interprets its result. +func (e *Editor) SemanticTokensRange(ctx context.Context, loc protocol.Location) ([]SemanticToken, error) { + p := &protocol.SemanticTokensRangeParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, + Range: loc.Range, + } + resp, err := e.Server.SemanticTokensRange(ctx, p) + if err != nil { + return nil, err + } + path := e.sandbox.Workdir.URIToPath(loc.URI) + // As noted above: buffers should be keyed by protocol.DocumentURI. + content, ok := e.BufferText(path) + if !ok { + return nil, fmt.Errorf("buffer %s is not open", path) + } + return e.interpretTokens(resp.Data, content), nil +} + // A SemanticToken is an interpreted semantic token value. type SemanticToken struct { Token string diff --git a/gopls/internal/lsp/fake/editor_test.go b/gopls/internal/test/integration/fake/editor_test.go similarity index 95% rename from gopls/internal/lsp/fake/editor_test.go rename to gopls/internal/test/integration/fake/editor_test.go index cc8a14744d2..68983bda50c 100644 --- a/gopls/internal/lsp/fake/editor_test.go +++ b/gopls/internal/test/integration/fake/editor_test.go @@ -8,7 +8,7 @@ import ( "context" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) const exampleProgram = ` diff --git a/gopls/internal/lsp/glob/glob.go b/gopls/internal/test/integration/fake/glob/glob.go similarity index 100% rename from gopls/internal/lsp/glob/glob.go rename to gopls/internal/test/integration/fake/glob/glob.go diff --git a/gopls/internal/lsp/glob/glob_test.go b/gopls/internal/test/integration/fake/glob/glob_test.go similarity index 97% rename from gopls/internal/lsp/glob/glob_test.go rename to gopls/internal/test/integration/fake/glob/glob_test.go index df602624d9c..8accd908e7a 100644 --- a/gopls/internal/lsp/glob/glob_test.go +++ b/gopls/internal/test/integration/fake/glob/glob_test.go @@ -7,7 +7,7 @@ package glob_test import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/glob" + "golang.org/x/tools/gopls/internal/test/integration/fake/glob" ) func TestParseErrors(t *testing.T) { diff --git a/gopls/internal/lsp/fake/proxy.go b/gopls/internal/test/integration/fake/proxy.go similarity index 100% rename from gopls/internal/lsp/fake/proxy.go rename to gopls/internal/test/integration/fake/proxy.go diff --git a/gopls/internal/lsp/fake/sandbox.go b/gopls/internal/test/integration/fake/sandbox.go similarity index 100% rename from gopls/internal/lsp/fake/sandbox.go rename to gopls/internal/test/integration/fake/sandbox.go diff --git a/gopls/internal/lsp/fake/workdir.go b/gopls/internal/test/integration/fake/workdir.go similarity index 96% rename from gopls/internal/lsp/fake/workdir.go rename to gopls/internal/test/integration/fake/workdir.go index 462d54821f1..4d21554d4a8 100644 --- a/gopls/internal/lsp/fake/workdir.go +++ b/gopls/internal/test/integration/fake/workdir.go @@ -18,8 +18,7 @@ import ( "sync" "time" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/robustio" ) @@ -56,7 +55,7 @@ func writeFileData(path string, content []byte, rel RelativeTo) error { } backoff := 1 * time.Millisecond for { - err := os.WriteFile(fp, []byte(content), 0644) + err := os.WriteFile(fp, content, 0644) if err != nil { // This lock file violation is not handled by the robustio package, as it // indicates a real race condition that could be avoided. @@ -129,7 +128,7 @@ func hashFile(data []byte) string { // RootURI returns the root URI for this working directory of this scratch // environment. func (w *Workdir) RootURI() protocol.DocumentURI { - return toURI(string(w.RelativeTo)) + return protocol.URIFromPath(string(w.RelativeTo)) } // AddWatcher registers the given func to be called on any file change. @@ -141,18 +140,13 @@ func (w *Workdir) AddWatcher(watcher func(context.Context, []protocol.FileEvent) // URI returns the URI to a the workdir-relative path. func (w *Workdir) URI(path string) protocol.DocumentURI { - return toURI(w.AbsPath(path)) + return protocol.URIFromPath(w.AbsPath(path)) } // URIToPath converts a uri to a workdir-relative path (or an absolute path, // if the uri is outside of the workdir). func (w *Workdir) URIToPath(uri protocol.DocumentURI) string { - fp := uri.SpanURI().Filename() - return w.RelPath(fp) -} - -func toURI(fp string) protocol.DocumentURI { - return protocol.DocumentURI(span.URIFromPath(fp)) + return w.RelPath(uri.Path()) } // ReadFile reads a text file specified by a workdir-relative path. @@ -181,7 +175,7 @@ func (w *Workdir) RegexpSearch(path string, re string) (protocol.Location, error if err != nil { return protocol.Location{}, err } - mapper := protocol.NewMapper(w.URI(path).SpanURI(), content) + mapper := protocol.NewMapper(w.URI(path), content) return regexpLocation(mapper, re) } @@ -281,7 +275,7 @@ func (w *Workdir) RenameFile(ctx context.Context, oldPath, newPath string) error // the error from Rename may be accurate. return renameErr } - if writeErr := writeFileData(newPath, []byte(content), w.RelativeTo); writeErr != nil { + if writeErr := writeFileData(newPath, content, w.RelativeTo); writeErr != nil { // At this point we have tried to actually write the file. // If it still doesn't exist, assume that the error from Rename was accurate: // for example, maybe we don't have permission to create the new path. diff --git a/gopls/internal/lsp/fake/workdir_test.go b/gopls/internal/test/integration/fake/workdir_test.go similarity index 99% rename from gopls/internal/lsp/fake/workdir_test.go rename to gopls/internal/test/integration/fake/workdir_test.go index b45b5339991..153a3576b4e 100644 --- a/gopls/internal/lsp/fake/workdir_test.go +++ b/gopls/internal/test/integration/fake/workdir_test.go @@ -11,7 +11,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) const sharedData = ` diff --git a/gopls/internal/lsp/fake/workdir_windows.go b/gopls/internal/test/integration/fake/workdir_windows.go similarity index 100% rename from gopls/internal/lsp/fake/workdir_windows.go rename to gopls/internal/test/integration/fake/workdir_windows.go diff --git a/gopls/internal/regtest/inlayhints/inlayhints_test.go b/gopls/internal/test/integration/inlayhints/inlayhints_test.go similarity index 82% rename from gopls/internal/regtest/inlayhints/inlayhints_test.go rename to gopls/internal/test/integration/inlayhints/inlayhints_test.go index a4b3764a2f8..eab430f23bb 100644 --- a/gopls/internal/regtest/inlayhints/inlayhints_test.go +++ b/gopls/internal/test/integration/inlayhints/inlayhints_test.go @@ -6,10 +6,10 @@ package inlayhint import ( "testing" - "golang.org/x/tools/gopls/internal/bug" + "golang.org/x/tools/gopls/internal/golang" "golang.org/x/tools/gopls/internal/hooks" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/source" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" ) func TestMain(m *testing.M) { @@ -42,12 +42,12 @@ const ( }, { label: "enable const", - enabled: map[string]bool{source.ConstantValues: true}, + enabled: map[string]bool{golang.ConstantValues: true}, wantInlayHint: true, }, { label: "enable parameter names", - enabled: map[string]bool{source.ParameterNames: true}, + enabled: map[string]bool{golang.ParameterNames: true}, wantInlayHint: false, }, } diff --git a/gopls/internal/regtest/misc/call_hierarchy_test.go b/gopls/internal/test/integration/misc/call_hierarchy_test.go similarity index 87% rename from gopls/internal/regtest/misc/call_hierarchy_test.go rename to gopls/internal/test/integration/misc/call_hierarchy_test.go index f0f5d4a4117..4d16dba2b3c 100644 --- a/gopls/internal/regtest/misc/call_hierarchy_test.go +++ b/gopls/internal/test/integration/misc/call_hierarchy_test.go @@ -1,13 +1,14 @@ // Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. + package misc import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" ) // Test for golang/go#49125 diff --git a/gopls/internal/regtest/misc/configuration_test.go b/gopls/internal/test/integration/misc/configuration_test.go similarity index 76% rename from gopls/internal/regtest/misc/configuration_test.go rename to gopls/internal/test/integration/misc/configuration_test.go index d74162e225b..39980f353df 100644 --- a/gopls/internal/regtest/misc/configuration_test.go +++ b/gopls/internal/test/integration/misc/configuration_test.go @@ -7,7 +7,7 @@ package misc import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/internal/testenv" ) @@ -15,10 +15,10 @@ import ( // Test that enabling and disabling produces the expected results of showing // and hiding staticcheck analysis results. func TestChangeConfiguration(t *testing.T) { - // Staticcheck only supports Go versions >= 1.19. + // Staticcheck only supports Go versions >= 1.20. // Note: keep this in sync with TestStaticcheckWarning. Below this version we // should get an error when setting staticcheck configuration. - testenv.NeedsGo1Point(t, 19) + testenv.NeedsGo1Point(t, 20) const files = ` -- go.mod -- @@ -49,13 +49,55 @@ var FooErr = errors.New("foo") }) } +// Test that clients can configure per-workspace configuration, which is +// queried via the scopeURI of a workspace/configuration request. +// (this was broken in golang/go#65519). +func TestWorkspaceConfiguration(t *testing.T) { + const files = ` +-- go.mod -- +module example.com/config + +go 1.18 + +-- a/a.go -- +package a + +import "example.com/config/b" + +func _() { + _ = b.B{2} +} + +-- b/b.go -- +package b + +type B struct { + F int +} +` + + WithOptions( + WorkspaceFolders("a"), + FolderSettings{ + "a": { + "analyses": map[string]bool{ + "composites": false, + }, + }, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + env.AfterChange(NoDiagnostics()) + }) +} + // TestMajorOptionsChange is like TestChangeConfiguration, but modifies an // an open buffer before making a major (but inconsequential) change that // causes gopls to recreate the view. // // Gopls should not get confused about buffer content when recreating the view. func TestMajorOptionsChange(t *testing.T) { - testenv.NeedsGo1Point(t, 19) // needs staticcheck + testenv.NeedsGo1Point(t, 20) // needs staticcheck const files = ` -- go.mod -- @@ -96,7 +138,7 @@ var ErrFoo = errors.New("foo") func TestStaticcheckWarning(t *testing.T) { // Note: keep this in sync with TestChangeConfiguration. - testenv.SkipAfterGo1Point(t, 16) + testenv.SkipAfterGo1Point(t, 19) const files = ` -- go.mod -- @@ -122,19 +164,6 @@ var FooErr = errors.New("foo") }) } -func TestGofumptWarning(t *testing.T) { - testenv.SkipAfterGo1Point(t, 17) - - WithOptions( - Settings{"gofumpt": true}, - ).Run(t, "", func(t *testing.T, env *Env) { - env.OnceMet( - InitialWorkspaceLoad, - ShownMessage("gofumpt is not supported"), - ) - }) -} - func TestDeprecatedSettings(t *testing.T) { WithOptions( Settings{ @@ -142,7 +171,7 @@ func TestDeprecatedSettings(t *testing.T) { "experimentalWatchedFileDelay": "1s", "experimentalWorkspaceModule": true, "tempModfile": true, - "expandWorkspaceToModule": false, + "allowModfileModifications": true, }, ).Run(t, "", func(t *testing.T, env *Env) { env.OnceMet( @@ -150,8 +179,8 @@ func TestDeprecatedSettings(t *testing.T) { ShownMessage("experimentalWorkspaceModule"), ShownMessage("experimentalUseInvalidMetadata"), ShownMessage("experimentalWatchedFileDelay"), - ShownMessage("https://go.dev/issue/63537"), // issue to remove tempModfile - ShownMessage("https://go.dev/issue/63536"), // issue to remove expandWorkspaceToModule + ShownMessage("tempModfile"), + ShownMessage("allowModfileModifications"), ) }) } diff --git a/gopls/internal/regtest/misc/debugserver_test.go b/gopls/internal/test/integration/misc/debugserver_test.go similarity index 87% rename from gopls/internal/regtest/misc/debugserver_test.go rename to gopls/internal/test/integration/misc/debugserver_test.go index 519f7944790..d1ce21bd47a 100644 --- a/gopls/internal/regtest/misc/debugserver_test.go +++ b/gopls/internal/test/integration/misc/debugserver_test.go @@ -8,10 +8,10 @@ import ( "net/http" "testing" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestStartDebugging(t *testing.T) { diff --git a/gopls/internal/regtest/misc/definition_test.go b/gopls/internal/test/integration/misc/definition_test.go similarity index 94% rename from gopls/internal/regtest/misc/definition_test.go rename to gopls/internal/test/integration/misc/definition_test.go index d16539f0dbb..6b364e2e9d5 100644 --- a/gopls/internal/regtest/misc/definition_test.go +++ b/gopls/internal/test/integration/misc/definition_test.go @@ -11,9 +11,9 @@ import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" ) const internalDefinition = ` @@ -495,9 +495,7 @@ const _ = b.K } // Run 'go mod vendor' outside the editor. - if err := env.Sandbox.RunGoCommand(env.Ctx, ".", "mod", []string{"vendor"}, nil, true); err != nil { - t.Fatalf("go mod vendor: %v", err) - } + env.RunGoCommand("mod", "vendor") // Synchronize changes to watched files. env.Await(env.DoneWithChangeWatchedFiles()) @@ -569,3 +567,31 @@ func TestGoToEmbedDefinition(t *testing.T) { } }) } + +func TestDefinitionOfErrorErrorMethod(t *testing.T) { + const src = `Regression test for a panic in definition of error.Error (of course). +golang/go#64086 + +-- go.mod -- +module mod.com +go 1.18 + +-- a.go -- +package a + +func _(err error) { + _ = err.Error() +} + +` + Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + + start := env.RegexpSearch("a.go", `Error`) + loc := env.GoToDefinition(start) + + if !strings.HasSuffix(string(loc.URI), "builtin.go") { + t.Errorf("GoToDefinition(err.Error) = %#v, want builtin.go", loc) + } + }) +} diff --git a/gopls/internal/regtest/misc/embed_test.go b/gopls/internal/test/integration/misc/embed_test.go similarity index 92% rename from gopls/internal/regtest/misc/embed_test.go rename to gopls/internal/test/integration/misc/embed_test.go index 021fbfcc06d..894cff9f5a3 100644 --- a/gopls/internal/regtest/misc/embed_test.go +++ b/gopls/internal/test/integration/misc/embed_test.go @@ -1,12 +1,13 @@ // Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. + package misc import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestMissingPatternDiagnostic(t *testing.T) { diff --git a/gopls/internal/regtest/misc/extract_test.go b/gopls/internal/test/integration/misc/extract_test.go similarity index 88% rename from gopls/internal/regtest/misc/extract_test.go rename to gopls/internal/test/integration/misc/extract_test.go index 23efffbb70e..86afb45a49a 100644 --- a/gopls/internal/regtest/misc/extract_test.go +++ b/gopls/internal/test/integration/misc/extract_test.go @@ -1,15 +1,16 @@ // Copyright 2022 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. + package misc import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" ) func TestExtractFunction(t *testing.T) { diff --git a/gopls/internal/regtest/misc/failures_test.go b/gopls/internal/test/integration/misc/failures_test.go similarity index 94% rename from gopls/internal/regtest/misc/failures_test.go rename to gopls/internal/test/integration/misc/failures_test.go index b5da9b02e15..81fa17deb9b 100644 --- a/gopls/internal/regtest/misc/failures_test.go +++ b/gopls/internal/test/integration/misc/failures_test.go @@ -7,8 +7,8 @@ package misc import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/compare" ) // This is a slight variant of TestHoverOnError in definition_test.go diff --git a/gopls/internal/test/integration/misc/fix_test.go b/gopls/internal/test/integration/misc/fix_test.go new file mode 100644 index 00000000000..b3d86e1a080 --- /dev/null +++ b/gopls/internal/test/integration/misc/fix_test.go @@ -0,0 +1,161 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "testing" + + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" + + "golang.org/x/tools/gopls/internal/protocol" +) + +// A basic test for fillstruct, now that it uses a command and supports resolve edits. +func TestFillStruct(t *testing.T) { + tc := []struct { + name string + capabilities string + wantCommand bool + }{ + {"default", "{}", true}, + {"no data", `{ "textDocument": {"codeAction": { "resolveSupport": { "properties": ["edit"] } } } }`, true}, + {"resolve support", `{ "textDocument": {"codeAction": { "dataSupport": true, "resolveSupport": { "properties": ["edit"] } } } }`, false}, + } + + const basic = ` +-- go.mod -- +module mod.com + +go 1.14 +-- main.go -- +package main + +type Info struct { + WordCounts map[string]int + Words []string +} + +func Foo() { + _ = Info{} +} +` + + for _, tt := range tc { + t.Run(tt.name, func(t *testing.T) { + runner := WithOptions(CapabilitiesJSON([]byte(tt.capabilities))) + + runner.Run(t, basic, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + fixes, err := env.Editor.CodeActions(env.Ctx, env.RegexpSearch("main.go", "Info{}"), nil, protocol.RefactorRewrite) + if err != nil { + t.Fatal(err) + } + + if len(fixes) != 1 { + t.Fatalf("expected 1 code action, got %v", len(fixes)) + } + if tt.wantCommand { + if fixes[0].Command == nil || fixes[0].Data != nil { + t.Errorf("expected code action to have command not data, got %v", fixes[0]) + } + } else { + if fixes[0].Command != nil || fixes[0].Data == nil { + t.Errorf("expected code action to have command not data, got %v", fixes[0]) + } + } + + // Apply the code action (handles resolving the code action), and check that the result is correct. + if err := env.Editor.RefactorRewrite(env.Ctx, env.RegexpSearch("main.go", "Info{}")); err != nil { + t.Fatal(err) + } + want := `package main + +type Info struct { + WordCounts map[string]int + Words []string +} + +func Foo() { + _ = Info{ + WordCounts: map[string]int{}, + Words: []string{}, + } +} +` + if got := env.BufferText("main.go"); got != want { + t.Fatalf("TestFillStruct failed:\n%s", compare.Text(want, got)) + } + }) + }) + } +} + +func TestFillReturns(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- main.go -- +package main + +func Foo() error { + return +} +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + var d protocol.PublishDiagnosticsParams + env.AfterChange( + // The error message here changed in 1.18; "return values" covers both forms. + Diagnostics(env.AtRegexp("main.go", `return`), WithMessage("return values")), + ReadDiagnostics("main.go", &d), + ) + var quickFixes []*protocol.CodeAction + for _, act := range env.CodeAction("main.go", d.Diagnostics) { + if act.Kind == protocol.QuickFix { + act := act // remove in go1.22 + quickFixes = append(quickFixes, &act) + } + } + if len(quickFixes) != 1 { + t.Fatalf("expected 1 quick fix, got %d:\n%v", len(quickFixes), quickFixes) + } + env.ApplyQuickFixes("main.go", d.Diagnostics) + env.AfterChange(NoDiagnostics(ForFile("main.go"))) + }) +} + +func TestUnusedParameter_Issue63755(t *testing.T) { + // This test verifies the fix for #63755, where codeActions panicked on parameters + // of functions with no function body. + + // We should not detect parameters as unused for external functions. + + const files = ` +-- go.mod -- +module unused.mod + +go 1.18 + +-- external.go -- +package external + +func External(z int) + +func _() { + External(1) +} + ` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("external.go") + _, err := env.Editor.CodeAction(env.Ctx, env.RegexpSearch("external.go", "z"), nil) + if err != nil { + t.Fatal(err) + } + // yay, no panic + }) +} diff --git a/gopls/internal/regtest/misc/formatting_test.go b/gopls/internal/test/integration/misc/formatting_test.go similarity index 97% rename from gopls/internal/regtest/misc/formatting_test.go rename to gopls/internal/test/integration/misc/formatting_test.go index 1556bb7f918..1808dbc8791 100644 --- a/gopls/internal/regtest/misc/formatting_test.go +++ b/gopls/internal/test/integration/misc/formatting_test.go @@ -8,8 +8,8 @@ import ( "strings" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/internal/testenv" ) @@ -303,8 +303,7 @@ func main() { } func TestGofumptFormatting(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - + testenv.NeedsGo1Point(t, 20) // gofumpt requires go 1.20+ // Exercise some gofumpt formatting rules: // - No empty lines following an assignment operator // - Octal integer literals should use the 0o prefix on modules using Go diff --git a/gopls/internal/test/integration/misc/generate_test.go b/gopls/internal/test/integration/misc/generate_test.go new file mode 100644 index 00000000000..548f3bd5f5e --- /dev/null +++ b/gopls/internal/test/integration/misc/generate_test.go @@ -0,0 +1,105 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// TODO(rfindley): figure out why go generate fails on android builders. + +//go:build !android +// +build !android + +package misc + +import ( + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestGenerateProgress(t *testing.T) { + const generatedWorkspace = ` +-- go.mod -- +module fake.test + +go 1.14 +-- generate.go -- +// +build ignore + +package main + +import ( + "os" +) + +func main() { + os.WriteFile("generated.go", []byte("package " + os.Args[1] + "\n\nconst Answer = 21"), 0644) +} + +-- lib1/lib.go -- +package lib1 + +//` + `go:generate go run ../generate.go lib1 + +-- lib2/lib.go -- +package lib2 + +//` + `go:generate go run ../generate.go lib2 + +-- main.go -- +package main + +import ( + "fake.test/lib1" + "fake.test/lib2" +) + +func main() { + println(lib1.Answer + lib2.Answer) +} +` + + Run(t, generatedWorkspace, func(t *testing.T, env *Env) { + env.OnceMet( + InitialWorkspaceLoad, + Diagnostics(env.AtRegexp("main.go", "lib1.(Answer)")), + ) + env.RunGenerate("./lib1") + env.RunGenerate("./lib2") + env.AfterChange( + NoDiagnostics(ForFile("main.go")), + ) + }) +} + +func TestGenerateUseNetwork(t *testing.T) { + const proxy = ` +-- example.com@v1.2.3/go.mod -- +module example.com + +go 1.21 +-- example.com@v1.2.3/main.go -- +package main + +func main() { + println("hello world") +} +` + const generatedWorkspace = ` +-- go.mod -- +module fake.test + +go 1.21 +-- main.go -- + +package main + +//go:` + /* hide this string from the go command */ `generate go run example.com@latest + +` + WithOptions(ProxyFiles(proxy)). + Run(t, generatedWorkspace, func(t *testing.T, env *Env) { + env.OnceMet( + InitialWorkspaceLoad, + ) + env.RunGenerate("./") + }) +} diff --git a/gopls/internal/regtest/misc/highlight_test.go b/gopls/internal/test/integration/misc/highlight_test.go similarity index 96% rename from gopls/internal/regtest/misc/highlight_test.go rename to gopls/internal/test/integration/misc/highlight_test.go index 8835d608ecf..9e3dd980464 100644 --- a/gopls/internal/regtest/misc/highlight_test.go +++ b/gopls/internal/test/integration/misc/highlight_test.go @@ -8,8 +8,8 @@ import ( "sort" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestWorkspacePackageHighlight(t *testing.T) { diff --git a/gopls/internal/regtest/misc/hover_test.go b/gopls/internal/test/integration/misc/hover_test.go similarity index 95% rename from gopls/internal/regtest/misc/hover_test.go rename to gopls/internal/test/integration/misc/hover_test.go index 7b84f8aa871..3853938f12f 100644 --- a/gopls/internal/regtest/misc/hover_test.go +++ b/gopls/internal/test/integration/misc/hover_test.go @@ -9,9 +9,9 @@ import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/internal/testenv" ) @@ -491,3 +491,26 @@ func TestHoverEmbedDirective(t *testing.T) { } }) } + +func TestHoverBrokenImport_Issue60592(t *testing.T) { + const files = ` +-- go.mod -- +module testdata +go 1.18 + +-- p.go -- +package main + +import foo "a" + +func _() { + foo.Print() +} + +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("p.go") + // This request should not crash gopls. + _, _, _ = env.Editor.Hover(env.Ctx, env.RegexpSearch("p.go", "foo[.]")) + }) +} diff --git a/gopls/internal/regtest/misc/import_test.go b/gopls/internal/test/integration/misc/import_test.go similarity index 92% rename from gopls/internal/regtest/misc/import_test.go rename to gopls/internal/test/integration/misc/import_test.go index 30986ba5077..0df3f8dadec 100644 --- a/gopls/internal/regtest/misc/import_test.go +++ b/gopls/internal/test/integration/misc/import_test.go @@ -8,10 +8,10 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestAddImport(t *testing.T) { diff --git a/gopls/internal/regtest/misc/imports_test.go b/gopls/internal/test/integration/misc/imports_test.go similarity index 87% rename from gopls/internal/regtest/misc/imports_test.go rename to gopls/internal/test/integration/misc/imports_test.go index 1e1d303379d..d8f453ee86c 100644 --- a/gopls/internal/regtest/misc/imports_test.go +++ b/gopls/internal/test/integration/misc/imports_test.go @@ -10,11 +10,10 @@ import ( "strings" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/gopls/internal/protocol" ) // Tests golang/go#38815. @@ -97,15 +96,21 @@ func main() { } ` - // The file remains unchanged, but if there are any CodeActions returned, they confuse vim. - // Therefore check for no CodeActions + // The file remains unchanged, but if there any quick fixes + // are returned, they confuse vim (according to CL 233117). + // Therefore check for no QuickFix CodeActions. Run(t, "", func(t *testing.T, env *Env) { env.CreateBuffer("main.go", vim1) env.OrganizeImports("main.go") - actions := env.CodeAction("main.go", nil) - if len(actions) > 0 { + + // Assert no quick fixes. + for _, act := range env.CodeAction("main.go", nil) { + if act.Kind == protocol.QuickFix { + t.Errorf("unexpected quick fix action: %#v", act) + } + } + if t.Failed() { got := env.BufferText("main.go") - t.Errorf("unexpected actions %#v", actions) if got == vim1 { t.Errorf("no changes") } else { @@ -135,9 +140,12 @@ func main() { Run(t, "", func(t *testing.T, env *Env) { env.CreateBuffer("main.go", vim2) env.OrganizeImports("main.go") - actions := env.CodeAction("main.go", nil) - if len(actions) > 0 { - t.Errorf("unexpected actions %#v", actions) + + // Assert no quick fixes. + for _, act := range env.CodeAction("main.go", nil) { + if act.Kind == protocol.QuickFix { + t.Errorf("unexpected quick-fix action: %#v", act) + } } }) } @@ -241,7 +249,6 @@ func TestA(t *testing.T) { // Test for golang/go#52784 func TestGoWorkImports(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const pkg = ` -- go.work -- go 1.19 diff --git a/gopls/internal/regtest/misc/link_test.go b/gopls/internal/test/integration/misc/link_test.go similarity index 93% rename from gopls/internal/regtest/misc/link_test.go rename to gopls/internal/test/integration/misc/link_test.go index a8f32f31592..53b0f0818f3 100644 --- a/gopls/internal/regtest/misc/link_test.go +++ b/gopls/internal/test/integration/misc/link_test.go @@ -8,7 +8,7 @@ import ( "strings" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestHoverAndDocumentLink(t *testing.T) { @@ -19,9 +19,6 @@ module mod.test go 1.12 require import.test v1.2.3 --- go.sum -- -import.test v1.2.3 h1:Mu4N9BICLJFxwwn8YNg6T3frkFWW1O7evXvo0HiRjBc= -import.test v1.2.3/go.mod h1:KooCN1g237upRg7irU7F+3oADn5tVClU8YYW4I1xhMk= -- main.go -- package main @@ -45,6 +42,7 @@ const Hello = "Hello" ` WithOptions( ProxyFiles(proxy), + WriteGoSum("."), ).Run(t, program, func(t *testing.T, env *Env) { env.OpenFile("main.go") env.OpenFile("go.mod") diff --git a/gopls/internal/test/integration/misc/misc_test.go b/gopls/internal/test/integration/misc/misc_test.go new file mode 100644 index 00000000000..1567044caef --- /dev/null +++ b/gopls/internal/test/integration/misc/misc_test.go @@ -0,0 +1,65 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/hooks" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" +) + +func TestMain(m *testing.M) { + bug.PanicOnBugs = true + integration.Main(m, hooks.Options) +} + +// TestDocumentURIFix ensures that a DocumentURI supplied by the +// client is subject to the "fixing" operation documented at +// [protocol.DocumentURI.UnmarshalText]. The details of the fixing are +// tested in the protocol package; here we aim to test only that it +// occurs at all. +func TestDocumentURIFix(t *testing.T) { + const mod = ` +-- go.mod -- +module testdata +go 1.18 + +-- a.go -- +package a + +const K = 1 +` + Run(t, mod, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + loc := env.RegexpSearch("a.go", "K") + path := strings.TrimPrefix(string(loc.URI), "file://") // (absolute) + + check := func() { + t.Helper() + t.Logf("URI = %s", loc.URI) + content, _ := env.Hover(loc) // must succeed + if content == nil || !strings.Contains(content.Value, "const K") { + t.Errorf("wrong content: %#v", content) + } + } + + // Regular URI (e.g. file://$TMPDIR/TestDocumentURIFix/default/work/a.go) + check() + + // URL-encoded path (e.g. contains %2F instead of last /) + loc.URI = protocol.DocumentURI("file://" + strings.Replace(path, "/a.go", "%2Fa.go", 1)) + check() + + // We intentionally do not test further cases (e.g. + // file:// without a third slash) as it would quickly + // get bogged down in irrelevant details of the + // fake editor's own handling of URIs. + }) +} diff --git a/gopls/internal/regtest/misc/multiple_adhoc_test.go b/gopls/internal/test/integration/misc/multiple_adhoc_test.go similarity index 94% rename from gopls/internal/regtest/misc/multiple_adhoc_test.go rename to gopls/internal/test/integration/misc/multiple_adhoc_test.go index 981b74efca0..aba7e987968 100644 --- a/gopls/internal/regtest/misc/multiple_adhoc_test.go +++ b/gopls/internal/test/integration/misc/multiple_adhoc_test.go @@ -7,7 +7,7 @@ package misc import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestMultipleAdHocPackages(t *testing.T) { diff --git a/gopls/internal/test/integration/misc/prompt_test.go b/gopls/internal/test/integration/misc/prompt_test.go new file mode 100644 index 00000000000..26c0e9322ac --- /dev/null +++ b/gopls/internal/test/integration/misc/prompt_test.go @@ -0,0 +1,232 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "fmt" + "os" + "path/filepath" + "regexp" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/server" + . "golang.org/x/tools/gopls/internal/test/integration" +) + +// Test that gopls prompts for telemetry only when it is supposed to. +func TestTelemetryPrompt_Conditions(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.12 +-- main.go -- +package main + +func main() { +} +` + + for _, enabled := range []bool{true, false} { + t.Run(fmt.Sprintf("telemetryPrompt=%v", enabled), func(t *testing.T) { + for _, initialMode := range []string{"", "local", "off", "on"} { + t.Run(fmt.Sprintf("initial_mode=%s", initialMode), func(t *testing.T) { + modeFile := filepath.Join(t.TempDir(), "mode") + if initialMode != "" { + if err := os.WriteFile(modeFile, []byte(initialMode), 0666); err != nil { + t.Fatal(err) + } + } + WithOptions( + Modes(Default), // no need to run this in all modes + EnvVars{ + server.GoplsConfigDirEnvvar: t.TempDir(), + server.FakeTelemetryModefileEnvvar: modeFile, + }, + Settings{ + "telemetryPrompt": enabled, + }, + ).Run(t, src, func(t *testing.T, env *Env) { + wantPrompt := enabled && (initialMode == "" || initialMode == "local") + expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") + if !wantPrompt { + expectation = Not(expectation) + } + env.OnceMet( + CompletedWork(server.TelemetryPromptWorkTitle, 1, true), + expectation, + ) + }) + }) + } + }) + } +} + +// Test that responding to the telemetry prompt results in the expected state. +func TestTelemetryPrompt_Response(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.12 +-- main.go -- +package main + +func main() { +} +` + + tests := []struct { + name string // subtest name + response string // response to choose for the telemetry dialog + wantMode string // resulting telemetry mode + wantMsg string // substring contained in the follow-up popup (if empty, no popup is expected) + }{ + {"yes", server.TelemetryYes, "on", "uploading is now enabled"}, + {"no", server.TelemetryNo, "", ""}, + {"empty", "", "", ""}, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modeFile := filepath.Join(t.TempDir(), "mode") + msgRE := regexp.MustCompile(".*Would you like to enable Go telemetry?") + respond := func(m *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) { + if msgRE.MatchString(m.Message) { + for _, item := range m.Actions { + if item.Title == test.response { + return &item, nil + } + } + if test.response != "" { + t.Errorf("action item %q not found", test.response) + } + } + return nil, nil + } + WithOptions( + Modes(Default), // no need to run this in all modes + EnvVars{ + server.GoplsConfigDirEnvvar: t.TempDir(), + server.FakeTelemetryModefileEnvvar: modeFile, + }, + Settings{ + "telemetryPrompt": true, + }, + MessageResponder(respond), + ).Run(t, src, func(t *testing.T, env *Env) { + var postConditions []Expectation + if test.wantMsg != "" { + postConditions = append(postConditions, ShownMessage(test.wantMsg)) + } + env.OnceMet( + CompletedWork(server.TelemetryPromptWorkTitle, 1, true), + postConditions..., + ) + gotMode := "" + if contents, err := os.ReadFile(modeFile); err == nil { + gotMode = string(contents) + } else if !os.IsNotExist(err) { + t.Fatal(err) + } + if gotMode != test.wantMode { + t.Errorf("after prompt, mode=%s, want %s", gotMode, test.wantMode) + } + }) + }) + } +} + +// Test that we stop asking about telemetry after the user ignores the question +// 5 times. +func TestTelemetryPrompt_GivingUp(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.12 +-- main.go -- +package main + +func main() { +} +` + + // For this test, we want to share state across gopls sessions. + modeFile := filepath.Join(t.TempDir(), "mode") + configDir := t.TempDir() + + const maxPrompts = 5 // internal prompt limit defined by gopls + + for i := 0; i < maxPrompts+1; i++ { + WithOptions( + Modes(Default), // no need to run this in all modes + EnvVars{ + server.GoplsConfigDirEnvvar: configDir, + server.FakeTelemetryModefileEnvvar: modeFile, + }, + Settings{ + "telemetryPrompt": true, + }, + ).Run(t, src, func(t *testing.T, env *Env) { + wantPrompt := i < maxPrompts + expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") + if !wantPrompt { + expectation = Not(expectation) + } + env.OnceMet( + CompletedWork(server.TelemetryPromptWorkTitle, 1, true), + expectation, + ) + }) + } +} + +// Test that gopls prompts for telemetry only when it is supposed to. +func TestTelemetryPrompt_Conditions2(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.12 +-- main.go -- +package main + +func main() { +} +` + modeFile := filepath.Join(t.TempDir(), "mode") + WithOptions( + Modes(Default), // no need to run this in all modes + EnvVars{ + server.GoplsConfigDirEnvvar: t.TempDir(), + server.FakeTelemetryModefileEnvvar: modeFile, + }, + Settings{ + // off because we are testing + // if we can trigger the prompt with command. + "telemetryPrompt": false, + }, + ).Run(t, src, func(t *testing.T, env *Env) { + cmd, err := command.NewMaybePromptForTelemetryCommand("prompt") + if err != nil { + t.Fatal(err) + } + var result error + env.ExecuteCommand(&protocol.ExecuteCommandParams{ + Command: cmd.Command, + }, &result) + if result != nil { + t.Fatal(err) + } + expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") + env.OnceMet( + CompletedWork(server.TelemetryPromptWorkTitle, 2, true), + expectation, + ) + }) +} diff --git a/gopls/internal/regtest/misc/references_test.go b/gopls/internal/test/integration/misc/references_test.go similarity index 95% rename from gopls/internal/regtest/misc/references_test.go rename to gopls/internal/test/integration/misc/references_test.go index 262284abc3d..73e4fffe3b8 100644 --- a/gopls/internal/regtest/misc/references_test.go +++ b/gopls/internal/test/integration/misc/references_test.go @@ -14,10 +14,9 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/regtest" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestStdlibReferences(t *testing.T) { @@ -88,7 +87,7 @@ func _() { if err != nil { t.Fatalf("references on (*s).Error failed: %v", err) } - // TODO(adonovan): this test is crying out for marker support in regtests. + // TODO(adonovan): this test is crying out for marker support in integration tests. var buf strings.Builder for _, ref := range refs { fmt.Fprintf(&buf, "%s %s\n", env.Sandbox.Workdir.URIToPath(ref.URI), ref.Range) @@ -103,7 +102,6 @@ func _() { } func TestDefsRefsBuiltins(t *testing.T) { - testenv.NeedsGo1Point(t, 17) // for unsafe.{Add,Slice} // TODO(adonovan): add unsafe.{SliceData,String,StringData} in later go versions. const files = ` -- go.mod -- @@ -362,8 +360,6 @@ func _() { // implementations in vendored modules were not found. The actual fix // was the same as for #55995; see TestVendoringInvalidatesMetadata. func TestImplementationsInVendor(t *testing.T) { - t.Skip("golang/go#56169: file watching does not capture vendor dirs") - const proxy = ` -- other.com/b@v1.0.0/go.mod -- module other.com/b @@ -417,9 +413,7 @@ var _ b.B checkVendor(env.Implementations(refLoc), false) // Run 'go mod vendor' outside the editor. - if err := env.Sandbox.RunGoCommand(env.Ctx, ".", "mod", []string{"vendor"}, nil, true); err != nil { - t.Fatalf("go mod vendor: %v", err) - } + env.RunGoCommand("mod", "vendor") // Synchronize changes to watched files. env.Await(env.DoneWithChangeWatchedFiles()) @@ -449,7 +443,6 @@ var _ b.B // a <command-line-arguments> package for packages that otherwise // wouldn't be found from the go.work file. func TestReferencesFromWorkspacePackages59674(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // for go.work support const src = ` -- a/go.mod -- module example.com/a @@ -563,7 +556,7 @@ func (*MyErrorPtr) Error() string { return "" } // relative file name and line number of each location. // Duplicates are not removed. // Standard library filenames are abstracted for robustness. -func fileLocations(env *regtest.Env, locs []protocol.Location) []string { +func fileLocations(env *integration.Env, locs []protocol.Location) []string { got := make([]string, 0, len(locs)) for _, loc := range locs { path := env.Sandbox.Workdir.URIToPath(loc.URI) // (slashified) diff --git a/gopls/internal/regtest/misc/rename_test.go b/gopls/internal/test/integration/misc/rename_test.go similarity index 96% rename from gopls/internal/regtest/misc/rename_test.go rename to gopls/internal/test/integration/misc/rename_test.go index ebb02609db9..e3116e1dd2a 100644 --- a/gopls/internal/regtest/misc/rename_test.go +++ b/gopls/internal/test/integration/misc/rename_test.go @@ -9,10 +9,9 @@ import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" - "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestPrepareRenameMainPackage(t *testing.T) { @@ -52,7 +51,6 @@ func main() { // Test case for golang/go#56227 func TestRenameWithUnsafeSlice(t *testing.T) { - testenv.NeedsGo1Point(t, 17) // unsafe.Slice was added in Go 1.17 const files = ` -- go.mod -- module mod.com @@ -111,7 +109,6 @@ func main() { } func TestPrepareRenameFailWithUnknownModule(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` go 1.14 -- lib/a.go -- @@ -146,7 +143,6 @@ func main() { // This test ensures that each import of a renamed package // is also renamed if it would otherwise create a conflict. func TestRenamePackageWithConflicts(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -193,7 +189,6 @@ func main() { } func TestRenamePackageWithAlias(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -233,7 +228,6 @@ func main() { } func TestRenamePackageWithDifferentDirectoryPath(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -273,7 +267,6 @@ func main() { } func TestRenamePackage(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -425,7 +418,6 @@ package b } func TestRenamePackage_Tests(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -494,7 +486,6 @@ func main() { } func TestRenamePackage_NestedModule(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const files = ` -- go.work -- go 1.18 @@ -576,7 +567,6 @@ func main() { } func TestRenamePackage_DuplicateImport(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -618,7 +608,6 @@ func main() { } func TestRenamePackage_DuplicateBlankImport(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -783,7 +772,6 @@ const _ = bar.Bar + baz.Baz + foox.Foo } func TestRenamePackage_Nesting(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -833,7 +821,6 @@ const C = libx.A + nested.B } func TestRenamePackage_InvalidName(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com @@ -860,7 +847,6 @@ const A = 1 + nested.B } func TestRenamePackage_InternalPackage(t *testing.T) { - testenv.NeedsGo1Point(t, 17) const files = ` -- go.mod -- module mod.com diff --git a/gopls/internal/test/integration/misc/semantictokens_test.go b/gopls/internal/test/integration/misc/semantictokens_test.go new file mode 100644 index 00000000000..96d35bf74f1 --- /dev/null +++ b/gopls/internal/test/integration/misc/semantictokens_test.go @@ -0,0 +1,239 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "fmt" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" +) + +func TestBadURICrash_VSCodeIssue1498(t *testing.T) { + const src = ` +-- go.mod -- +module example.com + +go 1.12 + +-- main.go -- +package main + +func main() {} + +` + WithOptions( + Modes(Default), + Settings{"allExperiments": true}, + ).Run(t, src, func(t *testing.T, env *Env) { + params := &protocol.SemanticTokensParams{} + const badURI = "http://foo/" + params.TextDocument.URI = badURI + // This call panicked in the past: golang/vscode-go#1498. + _, err := env.Editor.Server.SemanticTokensFull(env.Ctx, params) + + // Requests to an invalid URI scheme now result in an LSP error. + got := fmt.Sprint(err) + want := `DocumentURI scheme is not 'file': http://foo` + if !strings.Contains(got, want) { + t.Errorf("SemanticTokensFull error is %v, want substring %q", got, want) + } + }) +} + +// fix bug involving type parameters and regular parameters +// (golang/vscode-go#2527) +func TestSemantic_2527(t *testing.T) { + // these are the expected types of identifiers in text order + want := []fake.SemanticToken{ + {Token: "package", TokenType: "keyword"}, + {Token: "foo", TokenType: "namespace"}, + {Token: "// Deprecated (for testing)", TokenType: "comment"}, + {Token: "func", TokenType: "keyword"}, + {Token: "Add", TokenType: "function", Mod: "definition deprecated"}, + {Token: "T", TokenType: "typeParameter", Mod: "definition"}, + {Token: "int", TokenType: "type", Mod: "defaultLibrary"}, + {Token: "target", TokenType: "parameter", Mod: "definition"}, + {Token: "T", TokenType: "typeParameter"}, + {Token: "l", TokenType: "parameter", Mod: "definition"}, + {Token: "T", TokenType: "typeParameter"}, + {Token: "T", TokenType: "typeParameter"}, + {Token: "return", TokenType: "keyword"}, + {Token: "append", TokenType: "function", Mod: "defaultLibrary"}, + {Token: "l", TokenType: "parameter"}, + {Token: "target", TokenType: "parameter"}, + {Token: "for", TokenType: "keyword"}, + {Token: "range", TokenType: "keyword"}, + {Token: "l", TokenType: "parameter"}, + {Token: "// test coverage", TokenType: "comment"}, + {Token: "return", TokenType: "keyword"}, + {Token: "nil", TokenType: "variable", Mod: "readonly defaultLibrary"}, + } + src := ` +-- go.mod -- +module example.com + +go 1.19 +-- main.go -- +package foo +// Deprecated (for testing) +func Add[T int](target T, l []T) []T { + return append(l, target) + for range l {} // test coverage + return nil +} +` + WithOptions( + Modes(Default), + Settings{"semanticTokens": true}, + ).Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + env.AfterChange( + Diagnostics(env.AtRegexp("main.go", "for range")), + ) + seen := env.SemanticTokensFull("main.go") + if x := cmp.Diff(want, seen); x != "" { + t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) + } + }) + +} + +// fix inconsistency in TypeParameters +// https://github.com/golang/go/issues/57619 +func TestSemantic_57619(t *testing.T) { + src := ` +-- go.mod -- +module example.com + +go 1.19 +-- main.go -- +package foo +type Smap[K int, V any] struct { + Store map[K]V +} +func (s *Smap[K, V]) Get(k K) (V, bool) { + v, ok := s.Store[k] + return v, ok +} +func New[K int, V any]() Smap[K, V] { + return Smap[K, V]{Store: make(map[K]V)} +} +` + WithOptions( + Modes(Default), + Settings{"semanticTokens": true}, + ).Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + seen := env.SemanticTokensFull("main.go") + for i, s := range seen { + if (s.Token == "K" || s.Token == "V") && s.TokenType != "typeParameter" { + t.Errorf("%d: expected K and V to be type parameters, but got %v", i, s) + } + } + }) +} + +func TestSemanticGoDirectives(t *testing.T) { + src := ` +-- go.mod -- +module example.com + +go 1.19 +-- main.go -- +package foo + +//go:linkname now time.Now +func now() + +//go:noinline +func foo() {} + +// Mentioning go:noinline should not tokenize. + +//go:notadirective +func bar() {} +` + want := []fake.SemanticToken{ + {Token: "package", TokenType: "keyword"}, + {Token: "foo", TokenType: "namespace"}, + + {Token: "//", TokenType: "comment"}, + {Token: "go:linkname", TokenType: "namespace"}, + {Token: "now time.Now", TokenType: "comment"}, + {Token: "func", TokenType: "keyword"}, + {Token: "now", TokenType: "function", Mod: "definition"}, + + {Token: "//", TokenType: "comment"}, + {Token: "go:noinline", TokenType: "namespace"}, + {Token: "func", TokenType: "keyword"}, + {Token: "foo", TokenType: "function", Mod: "definition"}, + + {Token: "// Mentioning go:noinline should not tokenize.", TokenType: "comment"}, + + {Token: "//go:notadirective", TokenType: "comment"}, + {Token: "func", TokenType: "keyword"}, + {Token: "bar", TokenType: "function", Mod: "definition"}, + } + + WithOptions( + Modes(Default), + Settings{"semanticTokens": true}, + ).Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + seen := env.SemanticTokensFull("main.go") + if x := cmp.Diff(want, seen); x != "" { + t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) + } + }) +} + +// Make sure no zero-length tokens occur +func TestSemantic_65254(t *testing.T) { + src := ` +-- go.mod -- +module example.com + +go 1.21 +-- main.go -- +package main + +/* a comment with an + +empty line +*/ + +const bad = ` + + src += "`foo" + ` + ` + "bar`" + want := []fake.SemanticToken{ + {Token: "package", TokenType: "keyword"}, + {Token: "main", TokenType: "namespace"}, + {Token: "/* a comment with an", TokenType: "comment"}, + // --- Note that the zero length line does not show up + {Token: "empty line", TokenType: "comment"}, + {Token: "*/", TokenType: "comment"}, + {Token: "const", TokenType: "keyword"}, + {Token: "bad", TokenType: "variable", Mod: "definition readonly"}, + {Token: "`foo", TokenType: "string"}, + // --- Note the zero length line does not show up + {Token: "\tbar`", TokenType: "string"}, + } + WithOptions( + Modes(Default), + Settings{"semanticTokens": true}, + ).Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + seen := env.SemanticTokensFull("main.go") + if x := cmp.Diff(want, seen); x != "" { + t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) + } + }) +} diff --git a/gopls/internal/test/integration/misc/settings_test.go b/gopls/internal/test/integration/misc/settings_test.go new file mode 100644 index 00000000000..c367f9fc357 --- /dev/null +++ b/gopls/internal/test/integration/misc/settings_test.go @@ -0,0 +1,32 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestEmptyDirectoryFilters_Issue51843(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.12 +-- main.go -- +package main + +func main() { +} +` + + WithOptions( + Settings{"directoryFilters": []string{""}}, + ).Run(t, src, func(t *testing.T, env *Env) { + // No need to do anything. Issue golang/go#51843 is triggered by the empty + // directory filter above. + }) +} diff --git a/gopls/internal/regtest/misc/shared_test.go b/gopls/internal/test/integration/misc/shared_test.go similarity index 94% rename from gopls/internal/regtest/misc/shared_test.go rename to gopls/internal/test/integration/misc/shared_test.go index 410a8d32730..7bcfd918dd0 100644 --- a/gopls/internal/regtest/misc/shared_test.go +++ b/gopls/internal/test/integration/misc/shared_test.go @@ -7,8 +7,8 @@ package misc import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/fake" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" ) // Smoke test that simultaneous editing sessions in the same workspace works. diff --git a/gopls/internal/regtest/misc/signature_help_test.go b/gopls/internal/test/integration/misc/signature_help_test.go similarity index 92% rename from gopls/internal/regtest/misc/signature_help_test.go rename to gopls/internal/test/integration/misc/signature_help_test.go index fd9f4f07adb..8dffedf48e0 100644 --- a/gopls/internal/regtest/misc/signature_help_test.go +++ b/gopls/internal/test/integration/misc/signature_help_test.go @@ -8,8 +8,8 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestSignatureHelpInNonWorkspacePackage(t *testing.T) { diff --git a/gopls/internal/regtest/misc/staticcheck_test.go b/gopls/internal/test/integration/misc/staticcheck_test.go similarity index 78% rename from gopls/internal/regtest/misc/staticcheck_test.go rename to gopls/internal/test/integration/misc/staticcheck_test.go index fa049ab0e5f..bb3aa200dae 100644 --- a/gopls/internal/regtest/misc/staticcheck_test.go +++ b/gopls/internal/test/integration/misc/staticcheck_test.go @@ -5,15 +5,22 @@ package misc import ( + "os" + "strings" "testing" "golang.org/x/tools/internal/testenv" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestStaticcheckGenerics(t *testing.T) { - testenv.NeedsGo1Point(t, 19) // generics were introduced in Go 1.18, staticcheck requires go1.19+ + testenv.NeedsGo1Point(t, 20) // staticcheck requires go1.20+ + + // TODO(golang/go#65249): re-enable and fix this test with gotypesalias=1. + if strings.Contains(os.Getenv("GODEBUG"), "gotypesalias=1") { + t.Skipf("staticcheck needs updates for materialized aliases") + } const files = ` -- go.mod -- @@ -78,7 +85,13 @@ var FooErr error = errors.New("foo") // Test for golang/go#56270: an analysis with related info should not panic if // analysis.RelatedInformation.End is not set. func TestStaticcheckRelatedInfo(t *testing.T) { - testenv.NeedsGo1Point(t, 19) // staticcheck is only supported at Go 1.19+ + testenv.NeedsGo1Point(t, 20) // staticcheck is only supported at Go 1.20+ + + // TODO(golang/go#65249): re-enable and fix this test with gotypesalias=1. + if strings.Contains(os.Getenv("GODEBUG"), "gotypesalias=1") { + t.Skipf("staticcheck needs updates for materialized aliases") + } + const files = ` -- go.mod -- module mod.test diff --git a/gopls/internal/test/integration/misc/vendor_test.go b/gopls/internal/test/integration/misc/vendor_test.go new file mode 100644 index 00000000000..f3bed9082b7 --- /dev/null +++ b/gopls/internal/test/integration/misc/vendor_test.go @@ -0,0 +1,102 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" + + "golang.org/x/tools/gopls/internal/protocol" +) + +const basicProxy = ` +-- golang.org/x/hello@v1.2.3/go.mod -- +module golang.org/x/hello + +go 1.14 +-- golang.org/x/hello@v1.2.3/hi/hi.go -- +package hi + +var Goodbye error +` + +func TestInconsistentVendoring(t *testing.T) { + const pkgThatUsesVendoring = ` +-- go.mod -- +module mod.com + +go 1.14 + +require golang.org/x/hello v1.2.3 +-- go.sum -- +golang.org/x/hello v1.2.3 h1:EcMp5gSkIhaTkPXp8/3+VH+IFqTpk3ZbpOhqk0Ncmho= +golang.org/x/hello v1.2.3/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= +-- vendor/modules.txt -- +-- a/a1.go -- +package a + +import "golang.org/x/hello/hi" + +func _() { + _ = hi.Goodbye + var q int // hardcode a diagnostic +} +` + WithOptions( + Modes(Default), + ProxyFiles(basicProxy), + ).Run(t, pkgThatUsesVendoring, func(t *testing.T, env *Env) { + env.OpenFile("a/a1.go") + d := &protocol.PublishDiagnosticsParams{} + env.AfterChange( + Diagnostics(env.AtRegexp("go.mod", "module mod.com"), WithMessage("Inconsistent vendoring")), + ReadDiagnostics("go.mod", d), + ) + env.ApplyQuickFixes("go.mod", d.Diagnostics) + + env.AfterChange( + Diagnostics(env.AtRegexp("a/a1.go", `q int`), WithMessage("not used")), + ) + }) +} + +func TestWindowsVendoring_Issue56291(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.14 + +require golang.org/x/hello v1.2.3 +-- go.sum -- +golang.org/x/hello v1.2.3 h1:EcMp5gSkIhaTkPXp8/3+VH+IFqTpk3ZbpOhqk0Ncmho= +golang.org/x/hello v1.2.3/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= +-- main.go -- +package main + +import "golang.org/x/hello/hi" + +func main() { + _ = hi.Goodbye +} +` + WithOptions( + Modes(Default), + ProxyFiles(basicProxy), + ).Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + env.AfterChange(NoDiagnostics()) + env.RunGoCommand("mod", "tidy") + env.RunGoCommand("mod", "vendor") + env.AfterChange(NoDiagnostics()) + env.RegexpReplace("main.go", `import "golang.org/x/hello/hi"`, "") + env.AfterChange( + Diagnostics(env.AtRegexp("main.go", "hi.Goodbye")), + ) + env.SaveBuffer("main.go") + env.AfterChange(NoDiagnostics()) + }) +} diff --git a/gopls/internal/regtest/misc/vuln_test.go b/gopls/internal/test/integration/misc/vuln_test.go similarity index 90% rename from gopls/internal/regtest/misc/vuln_test.go rename to gopls/internal/test/integration/misc/vuln_test.go index 40baf8cb017..f47d06ac7af 100644 --- a/gopls/internal/regtest/misc/vuln_test.go +++ b/gopls/internal/test/integration/misc/vuln_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package misc import ( @@ -13,18 +10,17 @@ import ( "sort" "strings" "testing" + "time" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/vulncheck" - "golang.org/x/tools/gopls/internal/vulncheck/scan" "golang.org/x/tools/gopls/internal/vulncheck/vulntest" - "golang.org/x/tools/internal/testenv" ) func TestRunGovulncheckError(t *testing.T) { @@ -164,12 +160,11 @@ references: ` func TestRunGovulncheckStd(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const files = ` -- go.mod -- module mod.com -go 1.18 +go 1.19 -- main.go -- package main @@ -194,9 +189,9 @@ func main() { // Let the analyzer read vulnerabilities data from the testdata/vulndb. "GOVULNDB": db.URI(), // When fetchinging stdlib package vulnerability info, - // behave as if our go version is go1.18 for this testing. + // behave as if our go version is go1.19 for this testing. // The default behavior is to run `go env GOVERSION` (which isn't mutable env var). - scan.GoVersionForVulnTest: "go1.18", + cache.GoVersionForVulnTest: "go1.19", "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. }, Settings{ @@ -207,28 +202,9 @@ func main() { ).Run(t, files, func(t *testing.T, env *Env) { env.OpenFile("go.mod") - // Test CodeLens is present. - lenses := env.CodeLens("go.mod") - - const wantCommand = "gopls." + string(command.RunGovulncheck) - var gotCodelens = false - var lens protocol.CodeLens - for _, l := range lenses { - if l.Command.Command == wantCommand { - gotCodelens = true - lens = l - break - } - } - if !gotCodelens { - t.Fatal("got no vulncheck codelens") - } // Run Command included in the codelens. var result command.RunVulncheckResult - env.ExecuteCommand(&protocol.ExecuteCommandParams{ - Command: lens.Command.Command, - Arguments: lens.Command.Arguments, - }, &result) + env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) env.OnceMet( CompletedProgress(result.Token, nil), @@ -239,9 +215,7 @@ func main() { "go.mod": {IDs: []string{"GOSTDLIB"}, Mode: vulncheck.ModeGovulncheck}}) }) } - func TestFetchVulncheckResultStd(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const files = ` -- go.mod -- module mod.com @@ -272,7 +246,7 @@ func main() { "GOVULNDB": db.URI(), // When fetchinging stdlib package vulnerability info, // behave as if our go version is go1.18 for this testing. - scan.GoVersionForVulnTest: "go1.18", + cache.GoVersionForVulnTest: "go1.18", "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. }, Settings{"ui.diagnostic.vulncheck": "Imports"}, @@ -325,7 +299,7 @@ func testFetchVulncheckResult(t *testing.T, env *Env, want map[string]fetchVulnc ids = append(ids, id) } sort.Strings(ids) - modfile := env.Sandbox.Workdir.RelPath(k.SpanURI().Filename()) + modfile := env.Sandbox.Workdir.RelPath(k.Path()) got[modfile] = fetchVulncheckResult{ IDs: ids, Mode: r.Mode, @@ -457,7 +431,7 @@ func (v VulnData) Vuln1() {} func (v VulnData) Vuln2() {} ` -func vulnTestEnv(vulnsDB, proxyData string) (*vulntest.DB, []RunOption, error) { +func vulnTestEnv(proxyData string) (*vulntest.DB, []RunOption, error) { db, err := vulntest.NewDatabase(context.Background(), []byte(vulnsData)) if err != nil { return nil, nil, nil @@ -473,7 +447,7 @@ func vulnTestEnv(vulnsDB, proxyData string) (*vulntest.DB, []RunOption, error) { // When fetching stdlib package vulnerability info, // behave as if our go version is go1.18 for this testing. // The default behavior is to run `go env GOVERSION` (which isn't mutable env var). - scan.GoVersionForVulnTest: "go1.18", + cache.GoVersionForVulnTest: "go1.18", "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. "GOSUMDB": "off", } @@ -481,9 +455,7 @@ func vulnTestEnv(vulnsDB, proxyData string) (*vulntest.DB, []RunOption, error) { } func TestRunVulncheckPackageDiagnostics(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - - db, opts0, err := vulnTestEnv(vulnsData, proxy1) + db, opts0, err := vulnTestEnv(proxy1) if err != nil { t.Fatal(err) } @@ -511,7 +483,7 @@ func TestRunVulncheckPackageDiagnostics(t *testing.T) { { msg: "golang.org/amod has known vulnerabilities GO-2022-01, GO-2022-03.", severity: protocol.SeverityInformation, - source: string(source.Vulncheck), + source: string(cache.Vulncheck), codeActions: []string{ "Run govulncheck to verify", "Upgrade to v1.0.6", @@ -531,7 +503,7 @@ func TestRunVulncheckPackageDiagnostics(t *testing.T) { { msg: "golang.org/bmod has a vulnerability GO-2022-02.", severity: protocol.SeverityInformation, - source: string(source.Vulncheck), + source: string(cache.Vulncheck), codeActions: []string{ "Run govulncheck to verify", }, @@ -622,15 +594,48 @@ func TestRunVulncheckPackageDiagnostics(t *testing.T) { } } +// TestRunGovulncheck_Expiry checks that govulncheck results expire after a +// certain amount of time. +func TestRunGovulncheck_Expiry(t *testing.T) { + // For this test, set the max age to a duration smaller than the sleep below. + defer func(prev time.Duration) { + cache.MaxGovulncheckResultAge = prev + }(cache.MaxGovulncheckResultAge) + cache.MaxGovulncheckResultAge = 99 * time.Millisecond + + db, opts0, err := vulnTestEnv(proxy1) + if err != nil { + t.Fatal(err) + } + defer db.Clean() + + WithOptions(opts0...).Run(t, workspace1, func(t *testing.T, env *Env) { + env.OpenFile("go.mod") + env.OpenFile("x/x.go") + + var result command.RunVulncheckResult + env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) + env.OnceMet( + CompletedProgress(result.Token, nil), + ShownMessage("Found"), + ) + // Sleep long enough for the results to expire. + time.Sleep(100 * time.Millisecond) + // Make an arbitrary edit to force re-diagnosis of the workspace. + env.RegexpReplace("x/x.go", "package x", "package x ") + env.AfterChange( + NoDiagnostics(env.AtRegexp("go.mod", "golang.org/bmod")), + ) + }) +} + func stringify(a interface{}) string { data, _ := json.Marshal(a) return string(data) } func TestRunVulncheckWarning(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - - db, opts, err := vulnTestEnv(vulnsData, proxy1) + db, opts, err := vulnTestEnv(proxy1) if err != nil { t.Fatal(err) } @@ -652,7 +657,8 @@ func TestRunVulncheckWarning(t *testing.T) { ) testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{ - "go.mod": {IDs: []string{"GO-2022-01", "GO-2022-02", "GO-2022-03"}, Mode: vulncheck.ModeGovulncheck}, + // All vulnerabilities (symbol-level, import-level, module-level) are reported. + "go.mod": {IDs: []string{"GO-2022-01", "GO-2022-02", "GO-2022-03", "GO-2022-04"}, Mode: vulncheck.ModeGovulncheck}, }) env.OpenFile("x/x.go") env.OpenFile("y/y.go") @@ -663,7 +669,7 @@ func TestRunVulncheckWarning(t *testing.T) { { msg: "golang.org/amod has a vulnerability used in the code: GO-2022-01.", severity: protocol.SeverityWarning, - source: string(source.Govulncheck), + source: string(cache.Govulncheck), codeActions: []string{ "Upgrade to v1.0.4", "Upgrade to latest", @@ -673,7 +679,7 @@ func TestRunVulncheckWarning(t *testing.T) { { msg: "golang.org/amod has a vulnerability GO-2022-03 that is not used in the code.", severity: protocol.SeverityInformation, - source: string(source.Govulncheck), + source: string(cache.Govulncheck), codeActions: []string{ "Upgrade to v1.0.6", "Upgrade to latest", @@ -693,7 +699,7 @@ func TestRunVulncheckWarning(t *testing.T) { { msg: "golang.org/bmod has a vulnerability used in the code: GO-2022-02.", severity: protocol.SeverityWarning, - source: string(source.Govulncheck), + source: string(cache.Govulncheck), codeActions: []string{ "Reset govulncheck result", // no fix, but we should give an option to reset. }, @@ -785,9 +791,7 @@ func OK() {} // ok. ` func TestGovulncheckInfo(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - - db, opts, err := vulnTestEnv(vulnsData, proxy2) + db, opts, err := vulnTestEnv(proxy2) if err != nil { t.Fatal(err) } @@ -808,7 +812,7 @@ func TestGovulncheckInfo(t *testing.T) { ReadDiagnostics("go.mod", gotDiagnostics), ) - testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{"go.mod": {IDs: []string{"GO-2022-02"}, Mode: vulncheck.ModeGovulncheck}}) + testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{"go.mod": {IDs: []string{"GO-2022-02", "GO-2022-04"}, Mode: vulncheck.ModeGovulncheck}}) // wantDiagnostics maps a module path in the require // section of a go.mod to diagnostics that will be returned // when running vulncheck. @@ -818,7 +822,7 @@ func TestGovulncheckInfo(t *testing.T) { { msg: "golang.org/bmod has a vulnerability GO-2022-02 that is not used in the code.", severity: protocol.SeverityInformation, - source: string(source.Govulncheck), + source: string(cache.Govulncheck), codeActions: []string{ "Reset govulncheck result", }, @@ -923,16 +927,6 @@ type vulnDiag struct { source string } -func (i vulnRelatedInfo) less(j vulnRelatedInfo) bool { - if i.Filename != j.Filename { - return i.Filename < j.Filename - } - if i.Line != j.Line { - return i.Line < j.Line - } - return i.Message < j.Message -} - // vulnDiagExpectation maps a module path in the require // section of a go.mod to diagnostics that will be returned // when running vulncheck. diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/misc/webserver_test.go new file mode 100644 index 00000000000..f4fddf7aca0 --- /dev/null +++ b/gopls/internal/test/integration/misc/webserver_test.go @@ -0,0 +1,229 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "html" + "io" + "net/http" + "regexp" + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + . "golang.org/x/tools/gopls/internal/test/integration" +) + +// TestWebServer exercises the web server created on demand +// for code actions such as "View package documentation". +func TestWebServer(t *testing.T) { + const files = ` +-- go.mod -- +module example.com + +-- a/a.go -- +package a + +const A = 1 + +// EOF +` + Run(t, files, func(t *testing.T, env *Env) { + // Assert that the HTML page contains the expected const declaration. + // (We may need to make allowances for HTML markup.) + uri1 := viewPkgDoc(t, env, "a/a.go") + doc1 := get(t, uri1) + checkMatch(t, true, doc1, "const A =.*1") + + // Check that edits to the buffer (even unsaved) are + // reflected in the HTML document. + env.RegexpReplace("a/a.go", "// EOF", "func NewFunc() {}") + env.Await(env.DoneDiagnosingChanges()) + doc2 := get(t, uri1) + checkMatch(t, true, doc2, "func NewFunc") + + // TODO(adonovan): assert some basic properties of the + // HTML document using something like + // golang.org/x/pkgsite/internal/testing/htmlcheck. + + // Grab the URL in the HTML source link for NewFunc. + // (We don't have a DOM or JS interpreter so we have + // to know something of the document internals here.) + rx := regexp.MustCompile(`<h3 id='NewFunc'.*httpGET\("(.*)"\)`) + openURL := html.UnescapeString(string(rx.FindSubmatch(doc2)[1])) + + // Fetch the document. Its result isn't important, + // but it must have the side effect of another showDocument + // downcall, this time for a "file:" URL, causing the + // client editor to navigate to the source file. + t.Log("extracted /open URL", openURL) + get(t, openURL) + + // Check that that shown location is that of NewFunc. + shownSource := shownDocument(t, env, "file:") + gotLoc := protocol.Location{ + URI: protocol.DocumentURI(shownSource.URI), // fishy conversion + Range: *shownSource.Selection, + } + t.Log("showDocument(source file) URL:", gotLoc) + wantLoc := env.RegexpSearch("a/a.go", `func ()NewFunc`) + if gotLoc != wantLoc { + t.Errorf("got location %v, want %v", gotLoc, wantLoc) + } + }) +} + +func TestRenderNoPanic66449(t *testing.T) { + // This particular input triggered a latent bug in doc.New + // that would corrupt the AST while filtering out unexported + // symbols such as b, causing nodeHTML to panic. + // Now it doesn't crash. + // + // We also check cross-reference anchors for all symbols. + const files = ` +-- go.mod -- +module example.com + +-- a/a.go -- +package a + +// The 'π' suffix is to elimimate spurious matches with other HTML substrings, +// in particular the random base64 secret tokens that appear in gopls URLs. + +var Vπ, vπ = 0, 0 +const Cπ, cπ = 0, 0 + +func Fπ() +func fπ() + +type Tπ int +type tπ int + +func (Tπ) Mπ() {} +func (Tπ) mπ() {} + +func (tπ) Mπ() {} +func (tπ) mπ() {} +` + Run(t, files, func(t *testing.T, env *Env) { + uri1 := viewPkgDoc(t, env, "a/a.go") + doc := get(t, uri1) + // (Ideally our code rendering would also + // eliminate unexported symbols...) + checkMatch(t, true, doc, "var Vπ, vπ = .*0.*0") + checkMatch(t, true, doc, "const Cπ, cπ = .*0.*0") + + // Unexported funcs/types/... must still be discarded. + checkMatch(t, true, doc, "Fπ") + checkMatch(t, false, doc, "fπ") + checkMatch(t, true, doc, "Tπ") + checkMatch(t, false, doc, "tπ") + + // Also, check that anchors exist (only) for exported symbols. + // exported: + checkMatch(t, true, doc, "<a id='Vπ'") + checkMatch(t, true, doc, "<a id='Cπ'") + checkMatch(t, true, doc, "<h3 id='Tπ'") + checkMatch(t, true, doc, "<h3 id='Fπ'") + checkMatch(t, true, doc, "<h4 id='Tπ.Mπ'") + // unexported: + checkMatch(t, false, doc, "<a id='vπ'") + checkMatch(t, false, doc, "<a id='cπ'") + checkMatch(t, false, doc, "<h3 id='tπ'") + checkMatch(t, false, doc, "<h3 id='fπ'") + checkMatch(t, false, doc, "<h4 id='Tπ.mπ'") + checkMatch(t, false, doc, "<h4 id='tπ.Mπ'") + checkMatch(t, false, doc, "<h4 id='tπ.mπ'") + }) +} + +// viewPkgDoc invokes the "View package documention" code action in +// the specified file. It returns the URI of the document, or fails +// the test. +func viewPkgDoc(t *testing.T, env *Env, filename string) protocol.URI { + env.OpenFile(filename) + + // Invoke the "View package documentation" code + // action to start the server. + var docAction *protocol.CodeAction + actions := env.CodeAction(filename, nil) + for _, act := range actions { + if act.Title == "View package documentation" { + docAction = &act + break + } + } + if docAction == nil { + t.Fatalf("can't find action with Title 'View package documentation', only %#v", + actions) + } + + // Execute the command. + // Its side effect should be a single showDocument request. + params := &protocol.ExecuteCommandParams{ + Command: docAction.Command.Command, + Arguments: docAction.Command.Arguments, + } + var result command.DebuggingResult + env.ExecuteCommand(params, &result) + + doc := shownDocument(t, env, "http:") + if doc == nil { + t.Fatalf("no showDocument call had 'http:' prefix") + } + t.Log("showDocument(package doc) URL:", doc.URI) + return doc.URI +} + +// shownDocument returns the first shown document matching the URI prefix. +// It may be nil. +// +// TODO(adonovan): the integration test framework +// needs a way to reset ShownDocuments so they don't +// accumulate, necessitating the fragile prefix hack. +func shownDocument(t *testing.T, env *Env, prefix string) *protocol.ShowDocumentParams { + t.Helper() + var shown []*protocol.ShowDocumentParams + env.Await(ShownDocuments(&shown)) + var first *protocol.ShowDocumentParams + for _, sd := range shown { + if strings.HasPrefix(sd.URI, prefix) { + if first != nil { + t.Errorf("got multiple showDocument requests: %#v", shown) + break + } + first = sd + } + } + return first +} + +// get fetches the content of a document over HTTP. +func get(t *testing.T, url string) []byte { + t.Helper() + resp, err := http.Get(url) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + got, err := io.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + return got +} + +// checkMatch asserts that got matches (or doesn't match, if !want) the pattern. +func checkMatch(t *testing.T, want bool, got []byte, pattern string) { + t.Helper() + if regexp.MustCompile(pattern).Match(got) != want { + if want { + t.Errorf("input did not match wanted pattern %q; got:\n%s", pattern, got) + } else { + t.Errorf("input matched unwanted pattern %q; got:\n%s", pattern, got) + } + } +} diff --git a/gopls/internal/test/integration/misc/workspace_symbol_test.go b/gopls/internal/test/integration/misc/workspace_symbol_test.go new file mode 100644 index 00000000000..9420b146d85 --- /dev/null +++ b/gopls/internal/test/integration/misc/workspace_symbol_test.go @@ -0,0 +1,114 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/settings" +) + +func TestWorkspaceSymbolMissingMetadata(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.17 +-- a.go -- +package p + +const K1 = "a.go" +-- exclude.go -- + +//go:build exclude +// +build exclude + +package exclude + +const K2 = "exclude.go" +` + + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + checkSymbols(env, "K", "K1") + + // Opening up an ignored file will result in an overlay with missing + // metadata, but this shouldn't break workspace symbols requests. + env.OpenFile("exclude.go") + checkSymbols(env, "K", "K1") + }) +} + +func TestWorkspaceSymbolSorting(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.17 +-- a/a.go -- +package a + +const ( + Foo = iota + FooBar + Fooey + Fooex + Fooest +) +` + + var symbolMatcher = string(settings.SymbolFastFuzzy) + WithOptions( + Settings{"symbolMatcher": symbolMatcher}, + ).Run(t, files, func(t *testing.T, env *Env) { + checkSymbols(env, "Foo", + "Foo", // prefer exact segment matches first + "FooBar", // ...followed by exact word matches + "Fooex", // shorter than Fooest, FooBar, lexically before Fooey + "Fooey", // shorter than Fooest, Foobar + "Fooest", + ) + }) +} + +func TestWorkspaceSymbolSpecialPatterns(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.17 +-- a/a.go -- +package a + +const ( + AxxBxxCxx + ABC +) +` + + var symbolMatcher = string(settings.SymbolFastFuzzy) + WithOptions( + Settings{"symbolMatcher": symbolMatcher}, + ).Run(t, files, func(t *testing.T, env *Env) { + checkSymbols(env, "ABC", "ABC", "AxxBxxCxx") + checkSymbols(env, "'ABC", "ABC") + checkSymbols(env, "^mod.com", "mod.com/a.ABC", "mod.com/a.AxxBxxCxx") + checkSymbols(env, "^mod.com Axx", "mod.com/a.AxxBxxCxx") + checkSymbols(env, "C$", "ABC") + }) +} + +func checkSymbols(env *Env, query string, want ...string) { + env.T.Helper() + var got []string + for _, info := range env.Symbol(query) { + got = append(got, info.Name) + } + if diff := cmp.Diff(got, want); diff != "" { + env.T.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff) + } +} diff --git a/gopls/internal/regtest/modfile/modfile_test.go b/gopls/internal/test/integration/modfile/modfile_test.go similarity index 98% rename from gopls/internal/regtest/modfile/modfile_test.go rename to gopls/internal/test/integration/modfile/modfile_test.go index 076366958e6..a71caaebe97 100644 --- a/gopls/internal/regtest/modfile/modfile_test.go +++ b/gopls/internal/test/integration/modfile/modfile_test.go @@ -10,13 +10,12 @@ import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/bug" "golang.org/x/tools/gopls/internal/hooks" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/gopls/internal/protocol" ) func TestMain(m *testing.M) { @@ -241,7 +240,6 @@ require random.org v1.2.3 // Tests that multiple missing dependencies gives good single fixes. func TestMissingDependencyFixesWithGoWork(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const mod = ` -- go.work -- go 1.18 @@ -429,8 +427,9 @@ func main() { {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, {"nested", WithOptions(ProxyFiles(proxy))}, }.Run(t, mod, func(t *testing.T, env *Env) { - env.OnceMet( - InitialWorkspaceLoad, + // With zero-config gopls, we must open a/main.go to have a View including a/go.mod. + env.OpenFile("a/main.go") + env.AfterChange( Diagnostics(env.AtRegexp("a/go.mod", "require")), ) env.RunGoCommandInDir("a", "mod", "tidy") @@ -493,8 +492,8 @@ var _ = blah.Name env.AfterChange( // We would like for the error to appear in the v2 module, but // as of writing non-workspace packages are not diagnosed. - Diagnostics(env.AtRegexp("a/main.go", `"example.com/blah/v2"`), WithMessage("cannot find module providing")), - Diagnostics(env.AtRegexp("a/go.mod", `require example.com/blah/v2`), WithMessage("cannot find module providing")), + Diagnostics(env.AtRegexp("a/main.go", `"example.com/blah/v2"`), WithMessage("no required module provides")), + Diagnostics(env.AtRegexp("a/go.mod", `require example.com/blah/v2`), WithMessage("no required module provides")), ReadDiagnostics("a/go.mod", &modDiags), ) @@ -701,7 +700,6 @@ func main() { } func TestMultiModuleModDiagnostics(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work const mod = ` -- go.work -- go 1.18 @@ -989,7 +987,7 @@ require random.com v1.2.3 ` var diagnostics []protocol.Diagnostic for _, d := range d.Diagnostics { - if d.Range.Start.Line != uint32(pos.Line) { + if d.Range.Start.Line != pos.Line { continue } diagnostics = append(diagnostics, d) @@ -1202,7 +1200,6 @@ import "b.mod.com/aaa" import "fmt" func main() {fmt.Println(aaa.A)} ` - testenv.NeedsGo1Point(t, 18) WithOptions( ProxyFiles(proxy), Modes(Default), diff --git a/gopls/internal/regtest/modfile/tempmodfile_test.go b/gopls/internal/test/integration/modfile/tempmodfile_test.go similarity index 94% rename from gopls/internal/regtest/modfile/tempmodfile_test.go rename to gopls/internal/test/integration/modfile/tempmodfile_test.go index 8b0926ab422..9f8972dc13f 100644 --- a/gopls/internal/regtest/modfile/tempmodfile_test.go +++ b/gopls/internal/test/integration/modfile/tempmodfile_test.go @@ -7,7 +7,7 @@ package modfile import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) // This test replaces an older, problematic test (golang/go#57784). But it has diff --git a/gopls/internal/test/integration/options.go b/gopls/internal/test/integration/options.go new file mode 100644 index 00000000000..baa13d06ecd --- /dev/null +++ b/gopls/internal/test/integration/options.go @@ -0,0 +1,178 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package integration + +import ( + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake" +) + +type runConfig struct { + editor fake.EditorConfig + sandbox fake.SandboxConfig + modes Mode + noLogsOnError bool + writeGoSum []string +} + +func defaultConfig() runConfig { + return runConfig{ + editor: fake.EditorConfig{ + Settings: map[string]interface{}{ + // Shorten the diagnostic delay to speed up test execution (else we'd add + // the default delay to each assertion about diagnostics) + "diagnosticsDelay": "10ms", + }, + }, + } +} + +// A RunOption augments the behavior of the test runner. +type RunOption interface { + set(*runConfig) +} + +type optionSetter func(*runConfig) + +func (f optionSetter) set(opts *runConfig) { + f(opts) +} + +// ProxyFiles configures a file proxy using the given txtar-encoded string. +func ProxyFiles(txt string) RunOption { + return optionSetter(func(opts *runConfig) { + opts.sandbox.ProxyFiles = fake.UnpackTxt(txt) + }) +} + +// WriteGoSum causes the environment to write a go.sum file for the requested +// relative directories (via `go list -mod=mod`), before starting gopls. +// +// Useful for tests that use ProxyFiles, but don't care about crafting the +// go.sum content. +func WriteGoSum(dirs ...string) RunOption { + return optionSetter(func(opts *runConfig) { + opts.writeGoSum = dirs + }) +} + +// Modes configures the execution modes that the test should run in. +// +// By default, modes are configured by the test runner. If this option is set, +// it overrides the set of default modes and the test runs in exactly these +// modes. +func Modes(modes Mode) RunOption { + return optionSetter(func(opts *runConfig) { + if opts.modes != 0 { + panic("modes set more than once") + } + opts.modes = modes + }) +} + +// NoLogsOnError turns off dumping the LSP logs on test failures. +func NoLogsOnError() RunOption { + return optionSetter(func(opts *runConfig) { + opts.noLogsOnError = true + }) +} + +// WindowsLineEndings configures the editor to use windows line endings. +func WindowsLineEndings() RunOption { + return optionSetter(func(opts *runConfig) { + opts.editor.WindowsLineEndings = true + }) +} + +// ClientName sets the LSP client name. +func ClientName(name string) RunOption { + return optionSetter(func(opts *runConfig) { + opts.editor.ClientName = name + }) +} + +// CapabilitiesJSON sets the capabalities json. +func CapabilitiesJSON(capabilities []byte) RunOption { + return optionSetter(func(opts *runConfig) { + opts.editor.CapabilitiesJSON = capabilities + }) +} + +// Settings sets user-provided configuration for the LSP server. +// +// As a special case, the env setting must not be provided via Settings: use +// EnvVars instead. +type Settings map[string]interface{} + +func (s Settings) set(opts *runConfig) { + if opts.editor.Settings == nil { + opts.editor.Settings = make(map[string]interface{}) + } + for k, v := range s { + opts.editor.Settings[k] = v + } +} + +// WorkspaceFolders configures the workdir-relative workspace folders to send +// to the LSP server. By default the editor sends a single workspace folder +// corresponding to the workdir root. To explicitly configure no workspace +// folders, use WorkspaceFolders with no arguments. +func WorkspaceFolders(relFolders ...string) RunOption { + if len(relFolders) == 0 { + // Use an empty non-nil slice to signal explicitly no folders. + relFolders = []string{} + } + + return optionSetter(func(opts *runConfig) { + opts.editor.WorkspaceFolders = relFolders + }) +} + +// FolderSettings defines per-folder workspace settings, keyed by relative path +// to the folder. +// +// Use in conjunction with WorkspaceFolders to have different settings for +// different folders. +type FolderSettings map[string]Settings + +func (fs FolderSettings) set(opts *runConfig) { + // Re-use the Settings type, for symmetry, but translate back into maps for + // the editor config. + folders := make(map[string]map[string]any) + for k, v := range fs { + folders[k] = v + } + opts.editor.FolderSettings = folders +} + +// EnvVars sets environment variables for the LSP session. When applying these +// variables to the session, the special string $SANDBOX_WORKDIR is replaced by +// the absolute path to the sandbox working directory. +type EnvVars map[string]string + +func (e EnvVars) set(opts *runConfig) { + if opts.editor.Env == nil { + opts.editor.Env = make(map[string]string) + } + for k, v := range e { + opts.editor.Env[k] = v + } +} + +// InGOPATH configures the workspace working directory to be GOPATH, rather +// than a separate working directory for use with modules. +func InGOPATH() RunOption { + return optionSetter(func(opts *runConfig) { + opts.sandbox.InGoPath = true + }) +} + +// MessageResponder configures the editor to respond to +// window/showMessageRequest messages using the provided function. +func MessageResponder(f func(*protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error)) RunOption { + return optionSetter(func(opts *runConfig) { + opts.editor.MessageResponder = f + }) +} diff --git a/gopls/internal/test/integration/regtest.go b/gopls/internal/test/integration/regtest.go new file mode 100644 index 00000000000..c183cfde061 --- /dev/null +++ b/gopls/internal/test/integration/regtest.go @@ -0,0 +1,185 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package integration + +import ( + "context" + "flag" + "fmt" + "os" + "runtime" + "testing" + "time" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cmd" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/memoize" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/tool" +) + +var ( + runSubprocessTests = flag.Bool("enable_gopls_subprocess_tests", false, "run integration tests against a gopls subprocess (default: in-process)") + goplsBinaryPath = flag.String("gopls_test_binary", "", "path to the gopls binary for use as a remote, for use with the -enable_gopls_subprocess_tests flag") + timeout = flag.Duration("timeout", defaultTimeout(), "if nonzero, default timeout for each integration test; defaults to GOPLS_INTEGRATION_TEST_TIMEOUT") + skipCleanup = flag.Bool("skip_cleanup", false, "whether to skip cleaning up temp directories") + printGoroutinesOnFailure = flag.Bool("print_goroutines", false, "whether to print goroutines info on failure") + printLogs = flag.Bool("print_logs", false, "whether to print LSP logs") +) + +func defaultTimeout() time.Duration { + s := os.Getenv("GOPLS_INTEGRATION_TEST_TIMEOUT") + if s == "" { + return 0 + } + d, err := time.ParseDuration(s) + if err != nil { + fmt.Fprintf(os.Stderr, "invalid GOPLS_INTEGRATION_TEST_TIMEOUT %q: %v\n", s, err) + os.Exit(2) + } + return d +} + +var runner *Runner + +// The integrationTestRunner interface abstracts the Run operation, +// enables decorators for various optional features. +type integrationTestRunner interface { + Run(t *testing.T, files string, f TestFunc) +} + +func Run(t *testing.T, files string, f TestFunc) { + runner.Run(t, files, f) +} + +func WithOptions(opts ...RunOption) configuredRunner { + return configuredRunner{opts: opts} +} + +type configuredRunner struct { + opts []RunOption +} + +func (r configuredRunner) Run(t *testing.T, files string, f TestFunc) { + // Print a warning if the test's temporary directory is not + // suitable as a workspace folder, as this may lead to + // otherwise-cryptic failures. This situation typically occurs + // when an arbitrary string (e.g. "foo.") is used as a subtest + // name, on a platform with filename restrictions (e.g. no + // trailing period on Windows). + tmp := t.TempDir() + if err := cache.CheckPathValid(tmp); err != nil { + t.Logf("Warning: testing.T.TempDir(%s) is not valid as a workspace folder: %s", + tmp, err) + } + + runner.Run(t, files, f, r.opts...) +} + +type RunMultiple []struct { + Name string + Runner integrationTestRunner +} + +func (r RunMultiple) Run(t *testing.T, files string, f TestFunc) { + for _, runner := range r { + t.Run(runner.Name, func(t *testing.T) { + runner.Runner.Run(t, files, f) + }) + } +} + +// DefaultModes returns the default modes to run for each regression test (they +// may be reconfigured by the tests themselves). +func DefaultModes() Mode { + modes := Default + if !testing.Short() { + modes |= Experimental | Forwarded + } + if *runSubprocessTests { + modes |= SeparateProcess + } + return modes +} + +var runFromMain = false // true if Main has been called + +// Main sets up and tears down the shared integration test state. +func Main(m *testing.M, hook func(*settings.Options)) { + runFromMain = true + + // golang/go#54461: enable additional debugging around hanging Go commands. + gocommand.DebugHangingGoCommands = true + + // If this magic environment variable is set, run gopls instead of the test + // suite. See the documentation for runTestAsGoplsEnvvar for more details. + if os.Getenv(runTestAsGoplsEnvvar) == "true" { + tool.Main(context.Background(), cmd.New(hook), os.Args[1:]) + os.Exit(0) + } + + if !testenv.HasExec() { + fmt.Printf("skipping all tests: exec not supported on %s/%s\n", runtime.GOOS, runtime.GOARCH) + os.Exit(0) + } + testenv.ExitIfSmallMachine() + + // Disable GOPACKAGESDRIVER, as it can cause spurious test failures. + os.Setenv("GOPACKAGESDRIVER", "off") + + if skipReason := checkBuilder(); skipReason != "" { + fmt.Printf("Skipping all tests: %s\n", skipReason) + os.Exit(0) + } + + if err := testenv.HasTool("go"); err != nil { + fmt.Println("Missing go command") + os.Exit(1) + } + + flag.Parse() + + runner = &Runner{ + DefaultModes: DefaultModes(), + Timeout: *timeout, + PrintGoroutinesOnFailure: *printGoroutinesOnFailure, + SkipCleanup: *skipCleanup, + OptionsHook: hook, + store: memoize.NewStore(memoize.NeverEvict), + } + + runner.goplsPath = *goplsBinaryPath + if runner.goplsPath == "" { + var err error + runner.goplsPath, err = os.Executable() + if err != nil { + panic(fmt.Sprintf("finding test binary path: %v", err)) + } + } + + dir, err := os.MkdirTemp("", "gopls-test-") + if err != nil { + panic(fmt.Errorf("creating temp directory: %v", err)) + } + runner.tempDir = dir + + var code int + defer func() { + if err := runner.Close(); err != nil { + fmt.Fprintf(os.Stderr, "closing test runner: %v\n", err) + // Cleanup is broken in go1.12 and earlier, and sometimes flakes on + // Windows due to file locking, but this is OK for our CI. + // + // Fail on go1.13+, except for windows and android which have shutdown problems. + if testenv.Go1Point() >= 13 && runtime.GOOS != "windows" && runtime.GOOS != "android" { + os.Exit(1) + } + } + os.Exit(code) + }() + code = m.Run() +} diff --git a/gopls/internal/lsp/regtest/runner.go b/gopls/internal/test/integration/runner.go similarity index 83% rename from gopls/internal/lsp/regtest/runner.go rename to gopls/internal/test/integration/runner.go index e4aa2f312fa..d66df2f8044 100644 --- a/gopls/internal/lsp/regtest/runner.go +++ b/gopls/internal/test/integration/runner.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package regtest +package integration import ( "bytes" @@ -11,6 +11,7 @@ import ( "io" "net" "os" + "os/exec" "path/filepath" "runtime" "runtime/pprof" @@ -19,14 +20,12 @@ import ( "testing" "time" - exec "golang.org/x/sys/execabs" - - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/lsprpc" - "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/lsprpc" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2/servertest" "golang.org/x/tools/internal/memoize" @@ -107,11 +106,11 @@ func (m Mode) String() string { // state. type Runner struct { // Configuration - DefaultModes Mode // modes to run for each test - Timeout time.Duration // per-test timeout, if set - PrintGoroutinesOnFailure bool // whether to dump goroutines on test failure - SkipCleanup bool // if set, don't delete test data directories when the test exits - OptionsHook func(*source.Options) // if set, use these options when creating gopls sessions + DefaultModes Mode // modes to run for each test + Timeout time.Duration // per-test timeout, if set + PrintGoroutinesOnFailure bool // whether to dump goroutines on test failure + SkipCleanup bool // if set, don't delete test data directories when the test exits + OptionsHook func(*settings.Options) // if set, use these options when creating gopls sessions // Immutable state shared across test invocations goplsPath string // path to the gopls executable (for SeparateProcess mode) @@ -136,14 +135,19 @@ type TestFunc func(t *testing.T, env *Env) func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOption) { // TODO(rfindley): this function has gotten overly complicated, and warrants // refactoring. - t.Helper() - checkBuilder(t) - testenv.NeedsGoPackages(t) + + if !runFromMain { + // Main performs various setup precondition checks. + // While it could theoretically be made OK for a Runner to be used outside + // of Main, it is simpler to enforce that we only use the Runner from + // integration test suites. + t.Fatal("integration.Runner.Run must be run from integration.Main") + } tests := []struct { name string mode Mode - getServer func(func(*source.Options)) jsonrpc2.StreamServer + getServer func(func(*settings.Options)) jsonrpc2.StreamServer }{ {"default", Default, r.defaultServer}, {"forwarded", Forwarded, r.forwardedServer}, @@ -183,7 +187,7 @@ func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOptio } // TODO(rfindley): do we need an instance at all? Can it be removed? - ctx = debug.WithInstance(ctx, "", "off") + ctx = debug.WithInstance(ctx, "off") rootDir := filepath.Join(r.tempDir, filepath.FromSlash(t.Name())) if err := os.MkdirAll(rootDir, 0755); err != nil { @@ -211,6 +215,13 @@ func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOptio } }() + // Write the go.sum file for the requested directories, before starting the server. + for _, dir := range config.writeGoSum { + if err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "./..."}, []string{"GOWORK=off"}, true); err != nil { + t.Fatal(err) + } + } + ss := tc.getServer(r.OptionsHook) framer := jsonrpc2.NewRawStream @@ -236,7 +247,7 @@ func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOptio if t.Failed() && r.PrintGoroutinesOnFailure { pprof.Lookup("goroutine").WriteTo(os.Stderr, 1) } - if t.Failed() || *printLogs { + if (t.Failed() && !config.noLogsOnError) || *printLogs { ls.printBuffers(t.Name(), os.Stderr) } // For tests that failed due to a timeout, don't fail to shutdown @@ -272,16 +283,17 @@ var longBuilders = map[string]string{ "windows-arm-zx2c4": "", } -func checkBuilder(t *testing.T) { - t.Helper() +// TODO(rfindley): inline into Main. +func checkBuilder() string { builder := os.Getenv("GO_BUILDER_NAME") if reason, ok := longBuilders[builder]; ok && testing.Short() { if reason != "" { - t.Skipf("Skipping %s with -short due to %s", builder, reason) + return fmt.Sprintf("skipping %s with -short due to %s", builder, reason) } else { - t.Skipf("Skipping %s with -short", builder) + return fmt.Sprintf("skipping %s with -short", builder) } } + return "" } type loggingFramer struct { @@ -333,13 +345,13 @@ func (s *loggingFramer) printBuffers(testname string, w io.Writer) { } // defaultServer handles the Default execution mode. -func (r *Runner) defaultServer(optsHook func(*source.Options)) jsonrpc2.StreamServer { +func (r *Runner) defaultServer(optsHook func(*settings.Options)) jsonrpc2.StreamServer { return lsprpc.NewStreamServer(cache.New(r.store), false, optsHook) } // experimentalServer handles the Experimental execution mode. -func (r *Runner) experimentalServer(optsHook func(*source.Options)) jsonrpc2.StreamServer { - options := func(o *source.Options) { +func (r *Runner) experimentalServer(optsHook func(*settings.Options)) jsonrpc2.StreamServer { + options := func(o *settings.Options) { optsHook(o) o.EnableAllExperiments() } @@ -347,10 +359,10 @@ func (r *Runner) experimentalServer(optsHook func(*source.Options)) jsonrpc2.Str } // forwardedServer handles the Forwarded execution mode. -func (r *Runner) forwardedServer(optsHook func(*source.Options)) jsonrpc2.StreamServer { +func (r *Runner) forwardedServer(optsHook func(*settings.Options)) jsonrpc2.StreamServer { r.tsOnce.Do(func() { ctx := context.Background() - ctx = debug.WithInstance(ctx, "", "off") + ctx = debug.WithInstance(ctx, "off") ss := lsprpc.NewStreamServer(cache.New(nil), false, optsHook) r.ts = servertest.NewTCPServer(ctx, ss, nil) }) @@ -363,13 +375,13 @@ func (r *Runner) forwardedServer(optsHook func(*source.Options)) jsonrpc2.Stream const runTestAsGoplsEnvvar = "_GOPLS_TEST_BINARY_RUN_AS_GOPLS" // separateProcessServer handles the SeparateProcess execution mode. -func (r *Runner) separateProcessServer(optsHook func(*source.Options)) jsonrpc2.StreamServer { +func (r *Runner) separateProcessServer(optsHook func(*settings.Options)) jsonrpc2.StreamServer { if runtime.GOOS != "linux" { panic("separate process execution mode is only supported on linux") } r.startRemoteOnce.Do(func() { - socketDir, err := os.MkdirTemp(r.tempDir, "gopls-regtest-socket") + socketDir, err := os.MkdirTemp(r.tempDir, "gopls-test-socket") if err != nil { r.remoteErr = err return @@ -404,7 +416,7 @@ func (r *Runner) separateProcessServer(optsHook func(*source.Options)) jsonrpc2. return newForwarder("unix", r.remoteSocket) } -func newForwarder(network, address string) *lsprpc.Forwarder { +func newForwarder(network, address string) jsonrpc2.StreamServer { server, err := lsprpc.NewForwarder(network+";"+address, nil) if err != nil { // This should never happen, as we are passing an explicit address. diff --git a/gopls/internal/test/integration/template/template_test.go b/gopls/internal/test/integration/template/template_test.go new file mode 100644 index 00000000000..28ea9182284 --- /dev/null +++ b/gopls/internal/test/integration/template/template_test.go @@ -0,0 +1,231 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/hooks" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" +) + +func TestMain(m *testing.M) { + bug.PanicOnBugs = true + Main(m, hooks.Options) +} + +func TestMultilineTokens(t *testing.T) { + // 51731: panic: runtime error: slice bounds out of range [38:3] + const files = ` +-- go.mod -- +module mod.com + +go 1.17 +-- hi.tmpl -- +{{if (foÜx .X.Y)}}😀{{$A := + "hi" + }}{{.Z $A}}{{else}} +{{$A.X 12}} +{{foo (.X.Y) 23 ($A.Z)}} +{{end}} +` + WithOptions( + Settings{ + "templateExtensions": []string{"tmpl"}, + "semanticTokens": true, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + var p protocol.SemanticTokensParams + p.TextDocument.URI = env.Sandbox.Workdir.URI("hi.tmpl") + toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p) + if err != nil { + t.Errorf("semantic token failed: %v", err) + } + if toks == nil || len(toks.Data) == 0 { + t.Errorf("got no semantic tokens") + } + }) +} + +func TestTemplatesFromExtensions(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- hello.tmpl -- +{{range .Planets}} +Hello {{}} <-- missing body +{{end}} +` + WithOptions( + Settings{ + "templateExtensions": []string{"tmpl"}, + "semanticTokens": true, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + // TODO: can we move this diagnostic onto {{}}? + var diags protocol.PublishDiagnosticsParams + env.OnceMet( + InitialWorkspaceLoad, + Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}")), + ReadDiagnostics("hello.tmpl", &diags), + ) + d := diags.Diagnostics // issue 50786: check for Source + if len(d) != 1 { + t.Errorf("expected 1 diagnostic, got %d", len(d)) + return + } + if d[0].Source != "template" { + t.Errorf("expected Source 'template', got %q", d[0].Source) + } + // issue 50801 (even broken templates could return some semantic tokens) + var p protocol.SemanticTokensParams + p.TextDocument.URI = env.Sandbox.Workdir.URI("hello.tmpl") + toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p) + if err != nil { + t.Errorf("semantic token failed: %v", err) + } + if toks == nil || len(toks.Data) == 0 { + t.Errorf("got no semantic tokens") + } + + env.WriteWorkspaceFile("hello.tmpl", "{{range .Planets}}\nHello {{.}}\n{{end}}") + env.AfterChange(NoDiagnostics(ForFile("hello.tmpl"))) + }) +} + +func TestTemplatesObserveDirectoryFilters(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- a/a.tmpl -- +A {{}} <-- missing body +-- b/b.tmpl -- +B {{}} <-- missing body +` + + WithOptions( + Settings{ + "directoryFilters": []string{"-b"}, + "templateExtensions": []string{"tmpl"}, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OnceMet( + InitialWorkspaceLoad, + Diagnostics(env.AtRegexp("a/a.tmpl", "()A")), + NoDiagnostics(ForFile("b/b.tmpl")), + ) + }) +} + +func TestTemplatesFromLangID(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +` + + Run(t, files, func(t *testing.T, env *Env) { + env.CreateBuffer("hello.tmpl", "") + env.AfterChange( + NoDiagnostics(ForFile("hello.tmpl")), // Don't get spurious errors for empty templates. + ) + env.SetBufferContent("hello.tmpl", "{{range .Planets}}\nHello {{}}\n{{end}}") + env.Await(Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}"))) + env.RegexpReplace("hello.tmpl", "{{}}", "{{.}}") + env.Await(NoDiagnostics(ForFile("hello.tmpl"))) + }) +} + +func TestClosingTemplatesMakesDiagnosticsDisappear(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- hello.tmpl -- +{{range .Planets}} +Hello {{}} <-- missing body +{{end}} +` + + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("hello.tmpl") + env.AfterChange( + Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}")), + ) + // Since we don't have templateExtensions configured, closing hello.tmpl + // should make its diagnostics disappear. + env.CloseBuffer("hello.tmpl") + env.AfterChange( + NoDiagnostics(ForFile("hello.tmpl")), + ) + }) +} + +func TestMultipleSuffixes(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com + +go 1.12 +-- b.gotmpl -- +{{define "A"}}goo{{end}} +-- a.tmpl -- +{{template "A"}} +` + + WithOptions( + Settings{ + "templateExtensions": []string{"tmpl", "gotmpl"}, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a.tmpl") + x := env.RegexpSearch("a.tmpl", `A`) + loc := env.GoToDefinition(x) + refs := env.References(loc) + if len(refs) != 2 { + t.Fatalf("got %v reference(s), want 2", len(refs)) + } + // make sure we got one from b.gotmpl + want := env.Sandbox.Workdir.URI("b.gotmpl") + if refs[0].URI != want && refs[1].URI != want { + t.Errorf("failed to find reference to %s", shorten(want)) + for i, r := range refs { + t.Logf("%d: URI:%s %v", i, shorten(r.URI), r.Range) + } + } + + content, nloc := env.Hover(loc) + if loc != nloc { + t.Errorf("loc? got %v, wanted %v", nloc, loc) + } + if content.Value != "template A defined" { + t.Errorf("got %s, wanted 'template A defined", content.Value) + } + }) +} + +// shorten long URIs +func shorten(fn protocol.DocumentURI) string { + if len(fn) <= 20 { + return string(fn) + } + pieces := strings.Split(string(fn), "/") + if len(pieces) < 2 { + return string(fn) + } + j := len(pieces) + return pieces[j-2] + "/" + pieces[j-1] +} + +// Hover needs tests diff --git a/gopls/internal/regtest/watch/setting_test.go b/gopls/internal/test/integration/watch/setting_test.go similarity index 96% rename from gopls/internal/regtest/watch/setting_test.go rename to gopls/internal/test/integration/watch/setting_test.go index 9ed7fdeaa83..48ef97668f4 100644 --- a/gopls/internal/regtest/watch/setting_test.go +++ b/gopls/internal/test/integration/watch/setting_test.go @@ -2,13 +2,13 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package regtest +package watch import ( "fmt" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestSubdirWatchPatterns(t *testing.T) { diff --git a/gopls/internal/regtest/watch/watch_test.go b/gopls/internal/test/integration/watch/watch_test.go similarity index 97% rename from gopls/internal/regtest/watch/watch_test.go rename to gopls/internal/test/integration/watch/watch_test.go index dccf869653d..fab302ff149 100644 --- a/gopls/internal/regtest/watch/watch_test.go +++ b/gopls/internal/test/integration/watch/watch_test.go @@ -2,17 +2,17 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package regtest +package watch import ( "testing" - "golang.org/x/tools/gopls/internal/bug" "golang.org/x/tools/gopls/internal/hooks" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake" ) func TestMain(m *testing.M) { @@ -258,7 +258,7 @@ func _() { } // Add a new method to an interface and implement it. -// Inspired by the structure of internal/lsp/source and internal/lsp/cache. +// Inspired by the structure of internal/golang and internal/cache. func TestCreateImplementation(t *testing.T) { const pkg = ` -- go.mod -- diff --git a/gopls/internal/regtest/workspace/adhoc_test.go b/gopls/internal/test/integration/workspace/adhoc_test.go similarity index 86% rename from gopls/internal/regtest/workspace/adhoc_test.go rename to gopls/internal/test/integration/workspace/adhoc_test.go index d726242c48d..3d451dd5f08 100644 --- a/gopls/internal/regtest/workspace/adhoc_test.go +++ b/gopls/internal/test/integration/workspace/adhoc_test.go @@ -7,15 +7,12 @@ package workspace import ( "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/testenv" + . "golang.org/x/tools/gopls/internal/test/integration" ) // Test for golang/go#57209: editing a file in an ad-hoc package should not // trigger conflicting diagnostics. func TestAdhoc_Edits(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - const files = ` -- a.go -- package foo diff --git a/gopls/internal/regtest/workspace/broken_test.go b/gopls/internal/test/integration/workspace/broken_test.go similarity index 93% rename from gopls/internal/regtest/workspace/broken_test.go rename to gopls/internal/test/integration/workspace/broken_test.go index baa6ec1384a..8f00be775e4 100644 --- a/gopls/internal/regtest/workspace/broken_test.go +++ b/gopls/internal/test/integration/workspace/broken_test.go @@ -8,8 +8,8 @@ import ( "strings" "testing" - "golang.org/x/tools/gopls/internal/lsp" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/server" + . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/internal/testenv" ) @@ -98,7 +98,7 @@ const CompleteMe = 222 ).Run(t, src, func(t *testing.T, env *Env) { env.OpenFile("package1/main.go") env.AfterChange( - OutstandingWork(lsp.WorkspaceLoadFailure, `module example.com/foo appears multiple times in workspace`), + OutstandingWork(server.WorkspaceLoadFailure, `module example.com/foo appears multiple times in workspace`), ) // Remove the redundant vendored copy of example.com. @@ -170,6 +170,8 @@ const F = named.D - 3 } func TestMultipleModules_Warning(t *testing.T) { + t.Skip("temporary skip for golang/go#57979: revisit after zero-config logic is in place") + msgForVersion := func(ver int) string { if ver >= 18 { return `gopls was not able to find modules in your workspace.` @@ -208,7 +210,7 @@ package b env.AfterChange( Diagnostics(env.AtRegexp("a/a.go", "package a")), Diagnostics(env.AtRegexp("b/go.mod", "module b.com")), - OutstandingWork(lsp.WorkspaceLoadFailure, msg), + OutstandingWork(server.WorkspaceLoadFailure, msg), ) // Changing the workspace folders to the valid modules should resolve @@ -240,7 +242,7 @@ package b // Diagnostics(env.AtRegexp("a/a.go", "package a")), // Diagnostics(env.AtRegexp("b/go.mod", "module b.com")), Diagnostics(env.AtRegexp("b/b.go", "package b")), - OutstandingWork(lsp.WorkspaceLoadFailure, msg), + OutstandingWork(server.WorkspaceLoadFailure, msg), ) }) }) diff --git a/gopls/internal/test/integration/workspace/directoryfilters_test.go b/gopls/internal/test/integration/workspace/directoryfilters_test.go new file mode 100644 index 00000000000..6eec8377233 --- /dev/null +++ b/gopls/internal/test/integration/workspace/directoryfilters_test.go @@ -0,0 +1,207 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "sort" + "strings" + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +// This file contains regression tests for the directoryFilters setting. +// +// TODO: +// - consolidate some of these tests into a single test +// - add more tests for changing directory filters + +func TestDirectoryFilters(t *testing.T) { + WithOptions( + ProxyFiles(workspaceProxy), + WorkspaceFolders("pkg"), + Settings{ + "directoryFilters": []string{"-inner"}, + }, + ).Run(t, workspaceModule, func(t *testing.T, env *Env) { + syms := env.Symbol("Hi") + sort.Slice(syms, func(i, j int) bool { return syms[i].ContainerName < syms[j].ContainerName }) + for _, s := range syms { + if strings.Contains(s.ContainerName, "inner") { + t.Errorf("WorkspaceSymbol: found symbol %q with container %q, want \"inner\" excluded", s.Name, s.ContainerName) + } + } + }) +} + +func TestDirectoryFiltersLoads(t *testing.T) { + // exclude, and its error, should be excluded from the workspace. + const files = ` +-- go.mod -- +module example.com + +go 1.12 +-- exclude/exclude.go -- +package exclude + +const _ = Nonexistant +` + + WithOptions( + Settings{"directoryFilters": []string{"-exclude"}}, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OnceMet( + InitialWorkspaceLoad, + NoDiagnostics(ForFile("exclude/x.go")), + ) + }) +} + +func TestDirectoryFiltersTransitiveDep(t *testing.T) { + // Even though exclude is excluded from the workspace, it should + // still be importable as a non-workspace package. + const files = ` +-- go.mod -- +module example.com + +go 1.12 +-- include/include.go -- +package include +import "example.com/exclude" + +const _ = exclude.X +-- exclude/exclude.go -- +package exclude + +const _ = Nonexistant // should be ignored, since this is a non-workspace package +const X = 1 +` + + WithOptions( + Settings{"directoryFilters": []string{"-exclude"}}, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OnceMet( + InitialWorkspaceLoad, + NoDiagnostics(ForFile("exclude/exclude.go")), // filtered out + NoDiagnostics(ForFile("include/include.go")), // successfully builds + ) + }) +} + +// Test for golang/go#46438: support for '**' in directory filters. +func TestDirectoryFilters_Wildcard(t *testing.T) { + filters := []string{"-**/bye"} + WithOptions( + ProxyFiles(workspaceProxy), + WorkspaceFolders("pkg"), + Settings{ + "directoryFilters": filters, + }, + ).Run(t, workspaceModule, func(t *testing.T, env *Env) { + syms := env.Symbol("Bye") + sort.Slice(syms, func(i, j int) bool { return syms[i].ContainerName < syms[j].ContainerName }) + for _, s := range syms { + if strings.Contains(s.ContainerName, "bye") { + t.Errorf("WorkspaceSymbol: found symbol %q with container %q with filters %v", s.Name, s.ContainerName, filters) + } + } + }) +} + +// Test for golang/go#52993: wildcard directoryFilters should apply to +// goimports scanning as well. +func TestDirectoryFilters_ImportScanning(t *testing.T) { + const files = ` +-- go.mod -- +module mod.test + +go 1.12 +-- main.go -- +package main + +func main() { + bye.Goodbye() + hi.Hello() +} +-- p/bye/bye.go -- +package bye + +func Goodbye() {} +-- hi/hi.go -- +package hi + +func Hello() {} +` + + WithOptions( + Settings{ + "directoryFilters": []string{"-**/bye", "-hi"}, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + beforeSave := env.BufferText("main.go") + env.OrganizeImports("main.go") + got := env.BufferText("main.go") + if got != beforeSave { + t.Errorf("after organizeImports code action, got modified buffer:\n%s", got) + } + }) +} + +// Test for golang/go#52993: non-wildcard directoryFilters should still be +// applied relative to the workspace folder, not the module root. +func TestDirectoryFilters_MultiRootImportScanning(t *testing.T) { + const files = ` +-- go.work -- +go 1.18 + +use ( + a + b +) +-- a/go.mod -- +module mod1.test + +go 1.18 +-- a/main.go -- +package main + +func main() { + hi.Hi() +} +-- a/hi/hi.go -- +package hi + +func Hi() {} +-- b/go.mod -- +module mod2.test + +go 1.18 +-- b/main.go -- +package main + +func main() { + hi.Hi() +} +-- b/hi/hi.go -- +package hi + +func Hi() {} +` + + WithOptions( + Settings{ + "directoryFilters": []string{"-hi"}, // this test fails with -**/hi + }, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a/main.go") + beforeSave := env.BufferText("a/main.go") + env.OrganizeImports("a/main.go") + got := env.BufferText("a/main.go") + if got == beforeSave { + t.Errorf("after organizeImports code action, got identical buffer:\n%s", got) + } + }) +} diff --git a/gopls/internal/regtest/workspace/fromenv_test.go b/gopls/internal/test/integration/workspace/fromenv_test.go similarity index 93% rename from gopls/internal/regtest/workspace/fromenv_test.go rename to gopls/internal/test/integration/workspace/fromenv_test.go index 3913a4476f0..bc909c7deca 100644 --- a/gopls/internal/regtest/workspace/fromenv_test.go +++ b/gopls/internal/test/integration/workspace/fromenv_test.go @@ -9,14 +9,11 @@ import ( "path/filepath" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/testenv" + . "golang.org/x/tools/gopls/internal/test/integration" ) // Test that setting go.work via environment variables or settings works. func TestUseGoWorkOutsideTheWorkspace(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - // As discussed in // https://github.com/golang/go/issues/59458#issuecomment-1513794691, we must // use \-separated paths in go.work use directives for this test to work diff --git a/gopls/internal/test/integration/workspace/goversion_test.go b/gopls/internal/test/integration/workspace/goversion_test.go new file mode 100644 index 00000000000..b6604afe6b3 --- /dev/null +++ b/gopls/internal/test/integration/workspace/goversion_test.go @@ -0,0 +1,62 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "flag" + "os" + "runtime" + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +var go121bin = flag.String("go121bin", "", "bin directory containing go 1.21 or later") + +// TODO(golang/go#65917): delete this test once we no longer support building +// gopls with older Go versions. +func TestCanHandlePatchVersions(t *testing.T) { + // This test verifies the fixes for golang/go#66195 and golang/go#66636 -- + // that gopls does not crash when encountering a go version with a patch + // number in the go.mod file. + // + // This is tricky to test, because the regression requires that gopls is + // built with an older go version, and then the environment is upgraded to + // have a more recent go. To set up this scenario, the test requires a path + // to a bin directory containing go1.21 or later. + if *go121bin == "" { + t.Skip("-go121bin directory is not set") + } + + if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { + t.Skip("requires linux or darwin") // for PATH separator + } + + path := os.Getenv("PATH") + t.Setenv("PATH", *go121bin+":"+path) + + const files = ` +-- go.mod -- +module example.com/bar + +go 1.21.1 + +-- p.go -- +package bar + +type I interface { string } +` + + WithOptions( + EnvVars{ + "PATH": path, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("p.go") + env.AfterChange( + NoDiagnostics(ForFile("p.go")), + ) + }) +} diff --git a/gopls/internal/regtest/workspace/metadata_test.go b/gopls/internal/test/integration/workspace/metadata_test.go similarity index 94% rename from gopls/internal/regtest/workspace/metadata_test.go rename to gopls/internal/test/integration/workspace/metadata_test.go index e5da300870a..59dfec3ad97 100644 --- a/gopls/internal/regtest/workspace/metadata_test.go +++ b/gopls/internal/test/integration/workspace/metadata_test.go @@ -8,8 +8,7 @@ import ( "strings" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/internal/testenv" + . "golang.org/x/tools/gopls/internal/test/integration" ) // TODO(rfindley): move workspace tests related to metadata bugs into this @@ -42,8 +41,6 @@ const C = 42 // Test that moving ignoring a file via build constraints causes diagnostics to // be resolved. func TestIgnoreFile(t *testing.T) { - testenv.NeedsGo1Point(t, 17) // needs native overlays and support for go:build directives - const src = ` -- go.mod -- module mod.test @@ -88,14 +85,12 @@ func main() {} // packages for bar.go env.RegexpReplace("bar.go", "ignore", "excluded") env.AfterChange( - Diagnostics(env.AtRegexp("bar.go", "package (main)"), WithMessage("not included in your workspace")), + Diagnostics(env.AtRegexp("bar.go", "package (main)"), WithMessage("excluded due to its build tags")), ) }) } func TestReinitializeRepeatedly(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work - const multiModule = ` -- go.work -- go 1.18 @@ -174,8 +169,6 @@ func Hello() int { // Test for golang/go#59458. With lazy module loading, we may not need // transitively required modules. func TestNestedModuleLoading_Issue59458(t *testing.T) { - testenv.NeedsGo1Point(t, 17) // needs lazy module loading - // In this test, module b.com/nested requires b.com/other, which in turn // requires b.com, but b.com/nested does not reach b.com through the package // graph. Therefore, b.com/nested does not need b.com on 1.17 and later, diff --git a/gopls/internal/regtest/workspace/misspelling_test.go b/gopls/internal/test/integration/workspace/misspelling_test.go similarity index 94% rename from gopls/internal/regtest/workspace/misspelling_test.go rename to gopls/internal/test/integration/workspace/misspelling_test.go index 0419a116344..ddca05c860e 100644 --- a/gopls/internal/regtest/workspace/misspelling_test.go +++ b/gopls/internal/test/integration/workspace/misspelling_test.go @@ -8,8 +8,8 @@ import ( "runtime" "testing" - . "golang.org/x/tools/gopls/internal/lsp/regtest" - "golang.org/x/tools/gopls/internal/lsp/tests/compare" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/compare" ) // Test for golang/go#57081. diff --git a/gopls/internal/test/integration/workspace/multi_folder_test.go b/gopls/internal/test/integration/workspace/multi_folder_test.go new file mode 100644 index 00000000000..6adc1f8d5ce --- /dev/null +++ b/gopls/internal/test/integration/workspace/multi_folder_test.go @@ -0,0 +1,128 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +// TODO(rfindley): update the marker tests to support the concept of multiple +// workspace folders, and move this there. +func TestMultiView_Diagnostics(t *testing.T) { + // In the past, gopls would only diagnose one View at a time + // (the last to have changed). + // + // This test verifies that gopls can maintain diagnostics for multiple Views. + const files = ` + +-- a/go.mod -- +module golang.org/lsptests/a + +go 1.20 +-- a/a.go -- +package a + +func _() { + x := 1 // unused +} +-- b/go.mod -- +module golang.org/lsptests/b + +go 1.20 +-- b/b.go -- +package b + +func _() { + y := 2 // unused +} +` + + WithOptions( + WorkspaceFolders("a", "b"), + ).Run(t, files, func(t *testing.T, env *Env) { + env.OnceMet( + InitialWorkspaceLoad, + Diagnostics(env.AtRegexp("a/a.go", "x")), + Diagnostics(env.AtRegexp("b/b.go", "y")), + ) + }) +} + +func TestMultiView_LocalReplace(t *testing.T) { + // This is a regression test for #66145, where gopls attempted to load a + // package in a locally replaced module as a workspace package, resulting in + // spurious import diagnostics because the module graph had been pruned. + + const proxy = ` +-- example.com/c@v1.2.3/go.mod -- +module example.com/c + +go 1.20 + +-- example.com/c@v1.2.3/c.go -- +package c + +const C = 3 + +` + // In the past, gopls would only diagnose one View at a time + // (the last to have changed). + // + // This test verifies that gopls can maintain diagnostics for multiple Views. + const files = ` +-- a/go.mod -- +module golang.org/lsptests/a + +go 1.20 + +require golang.org/lsptests/b v1.2.3 + +replace golang.org/lsptests/b => ../b + +-- a/a.go -- +package a + +import "golang.org/lsptests/b" + +const A = b.B - 1 + +-- b/go.mod -- +module golang.org/lsptests/b + +go 1.20 + +require example.com/c v1.2.3 + +-- b/go.sum -- +example.com/c v1.2.3 h1:hsOPhoHQLZPEn7l3kNya3fR3SfqW0/rafZMP8ave6fg= +example.com/c v1.2.3/go.mod h1:4uG6Y5qX88LrEd4KfRoiguHZIbdLKUEHD1wXqPyrHcA= +-- b/b.go -- +package b + +const B = 2 + +-- b/unrelated/u.go -- +package unrelated + +import "example.com/c" + +const U = c.C +` + + WithOptions( + WorkspaceFolders("a", "b"), + ProxyFiles(proxy), + ).Run(t, files, func(t *testing.T, env *Env) { + // Opening unrelated first ensures that when we compute workspace packages + // for the "a" workspace, it includes the unrelated package, which will be + // unloadable from a as there is no a/go.sum. + env.OpenFile("b/unrelated/u.go") + env.AfterChange() + env.OpenFile("a/a.go") + env.AfterChange(NoDiagnostics()) + }) +} diff --git a/gopls/internal/test/integration/workspace/quickfix_test.go b/gopls/internal/test/integration/workspace/quickfix_test.go new file mode 100644 index 00000000000..6f7c8e854d0 --- /dev/null +++ b/gopls/internal/test/integration/workspace/quickfix_test.go @@ -0,0 +1,458 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/compare" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestQuickFix_UseModule(t *testing.T) { + t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") + + const files = ` +-- go.work -- +go 1.20 + +use ( + ./a +) +-- a/go.mod -- +module mod.com/a + +go 1.18 + +-- a/main.go -- +package main + +import "mod.com/a/lib" + +func main() { + _ = lib.C +} + +-- a/lib/lib.go -- +package lib + +const C = "b" +-- b/go.mod -- +module mod.com/b + +go 1.18 + +-- b/main.go -- +package main + +import "mod.com/b/lib" + +func main() { + _ = lib.C +} + +-- b/lib/lib.go -- +package lib + +const C = "b" +` + + for _, title := range []string{ + "Use this module", + "Use all modules", + } { + t.Run(title, func(t *testing.T) { + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("b/main.go") + var d protocol.PublishDiagnosticsParams + env.AfterChange(ReadDiagnostics("b/main.go", &d)) + fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) + var toApply []protocol.CodeAction + for _, fix := range fixes { + if strings.Contains(fix.Title, title) { + toApply = append(toApply, fix) + } + } + if len(toApply) != 1 { + t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) + } + env.ApplyCodeAction(toApply[0]) + env.AfterChange(NoDiagnostics()) + want := `go 1.20 + +use ( + ./a + ./b +) +` + got := env.ReadWorkspaceFile("go.work") + if diff := compare.Text(want, got); diff != "" { + t.Errorf("unexpeced go.work content:\n%s", diff) + } + }) + }) + } +} + +func TestQuickFix_AddGoWork(t *testing.T) { + t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") + + const files = ` +-- a/go.mod -- +module mod.com/a + +go 1.18 + +-- a/main.go -- +package main + +import "mod.com/a/lib" + +func main() { + _ = lib.C +} + +-- a/lib/lib.go -- +package lib + +const C = "b" +-- b/go.mod -- +module mod.com/b + +go 1.18 + +-- b/main.go -- +package main + +import "mod.com/b/lib" + +func main() { + _ = lib.C +} + +-- b/lib/lib.go -- +package lib + +const C = "b" +` + + tests := []struct { + name string + file string + title string + want string // expected go.work content, excluding go directive line + }{ + { + "use b", + "b/main.go", + "Add a go.work file using this module", + ` +use ./b +`, + }, + { + "use a", + "a/main.go", + "Add a go.work file using this module", + ` +use ./a +`, + }, + { + "use all", + "a/main.go", + "Add a go.work file using all modules", + ` +use ( + ./a + ./b +) +`, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile(test.file) + var d protocol.PublishDiagnosticsParams + env.AfterChange(ReadDiagnostics(test.file, &d)) + fixes := env.GetQuickFixes(test.file, d.Diagnostics) + var toApply []protocol.CodeAction + for _, fix := range fixes { + if strings.Contains(fix.Title, test.title) { + toApply = append(toApply, fix) + } + } + if len(toApply) != 1 { + t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), test.title, toApply) + } + env.ApplyCodeAction(toApply[0]) + env.AfterChange( + NoDiagnostics(ForFile(test.file)), + ) + + got := env.ReadWorkspaceFile("go.work") + // Ignore the `go` directive, which we assume is on the first line of + // the go.work file. This allows the test to be independent of go version. + got = strings.Join(strings.Split(got, "\n")[1:], "\n") + if diff := compare.Text(test.want, got); diff != "" { + t.Errorf("unexpected go.work content:\n%s", diff) + } + }) + }) + } +} + +func TestQuickFix_UnsavedGoWork(t *testing.T) { + t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") + + const files = ` +-- go.work -- +go 1.21 + +use ( + ./a +) +-- a/go.mod -- +module mod.com/a + +go 1.18 + +-- a/main.go -- +package main + +func main() {} +-- b/go.mod -- +module mod.com/b + +go 1.18 + +-- b/main.go -- +package main + +func main() {} +` + + for _, title := range []string{ + "Use this module", + "Use all modules", + } { + t.Run(title, func(t *testing.T) { + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("go.work") + env.OpenFile("b/main.go") + env.RegexpReplace("go.work", "go 1.21", "go 1.21 // arbitrary comment") + var d protocol.PublishDiagnosticsParams + env.AfterChange(ReadDiagnostics("b/main.go", &d)) + fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) + var toApply []protocol.CodeAction + for _, fix := range fixes { + if strings.Contains(fix.Title, title) { + toApply = append(toApply, fix) + } + } + if len(toApply) != 1 { + t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) + } + fix := toApply[0] + err := env.Editor.ApplyCodeAction(env.Ctx, fix) + if err == nil { + t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title) + } + + if got := err.Error(); !strings.Contains(got, "must save") { + t.Errorf("codeAction(%q) returned error %q, want containing \"must save\"", fix.Title, err) + } + }) + }) + } +} + +func TestQuickFix_GOWORKOff(t *testing.T) { + t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") + + const files = ` +-- go.work -- +go 1.21 + +use ( + ./a +) +-- a/go.mod -- +module mod.com/a + +go 1.18 + +-- a/main.go -- +package main + +func main() {} +-- b/go.mod -- +module mod.com/b + +go 1.18 + +-- b/main.go -- +package main + +func main() {} +` + + for _, title := range []string{ + "Use this module", + "Use all modules", + } { + t.Run(title, func(t *testing.T) { + WithOptions( + EnvVars{"GOWORK": "off"}, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("go.work") + env.OpenFile("b/main.go") + var d protocol.PublishDiagnosticsParams + env.AfterChange(ReadDiagnostics("b/main.go", &d)) + fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) + var toApply []protocol.CodeAction + for _, fix := range fixes { + if strings.Contains(fix.Title, title) { + toApply = append(toApply, fix) + } + } + if len(toApply) != 1 { + t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) + } + fix := toApply[0] + err := env.Editor.ApplyCodeAction(env.Ctx, fix) + if err == nil { + t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title) + } + + if got := err.Error(); !strings.Contains(got, "GOWORK=off") { + t.Errorf("codeAction(%q) returned error %q, want containing \"GOWORK=off\"", fix.Title, err) + } + }) + }) + } +} + +func TestStubMethods64087(t *testing.T) { + // We can't use the @fix or @suggestedfixerr or @codeactionerr + // because the error now reported by the corrected logic + // is internal and silently causes no fix to be offered. + // + // See also the similar TestStubMethods64545 below. + + const files = ` +This is a regression test for a panic (issue #64087) in stub methods. + +The illegal expression int("") caused a "cannot convert" error that +spuriously triggered the "stub methods" in a function whose return +statement had too many operands, leading to an out-of-bounds index. + +-- go.mod -- +module mod.com +go 1.18 + +-- a.go -- +package a + +func f() error { + return nil, myerror{int("")} +} + +type myerror struct{any} +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + + // Expect a "wrong result count" diagnostic. + var d protocol.PublishDiagnosticsParams + env.AfterChange(ReadDiagnostics("a.go", &d)) + + // In no particular order, we expect: + // "...too many return values..." (compiler) + // "...cannot convert..." (compiler) + // and possibly: + // "...too many return values..." (fillreturns) + // We check only for the first of these. + found := false + for i, diag := range d.Diagnostics { + t.Logf("Diagnostics[%d] = %q (%s)", i, diag.Message, diag.Source) + if strings.Contains(diag.Message, "too many return") { + found = true + } + } + if !found { + t.Fatalf("Expected WrongResultCount diagnostic not found.") + } + + // GetQuickFixes should not panic (the original bug). + fixes := env.GetQuickFixes("a.go", d.Diagnostics) + + // We should not be offered a "stub methods" fix. + for _, fix := range fixes { + if strings.Contains(fix.Title, "Implement error") { + t.Errorf("unexpected 'stub methods' fix: %#v", fix) + } + } + }) +} + +func TestStubMethods64545(t *testing.T) { + // We can't use the @fix or @suggestedfixerr or @codeactionerr + // because the error now reported by the corrected logic + // is internal and silently causes no fix to be offered. + // + // TODO(adonovan): we may need to generalize this test and + // TestStubMethods64087 if this happens a lot. + + const files = ` +This is a regression test for a panic (issue #64545) in stub methods. + +The illegal expression int("") caused a "cannot convert" error that +spuriously triggered the "stub methods" in a function whose var +spec had no RHS values, leading to an out-of-bounds index. + +-- go.mod -- +module mod.com +go 1.18 + +-- a.go -- +package a + +var _ [int("")]byte +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + + // Expect a "cannot convert" diagnostic, and perhaps others. + var d protocol.PublishDiagnosticsParams + env.AfterChange(ReadDiagnostics("a.go", &d)) + + found := false + for i, diag := range d.Diagnostics { + t.Logf("Diagnostics[%d] = %q (%s)", i, diag.Message, diag.Source) + if strings.Contains(diag.Message, "cannot convert") { + found = true + } + } + if !found { + t.Fatalf("Expected 'cannot convert' diagnostic not found.") + } + + // GetQuickFixes should not panic (the original bug). + fixes := env.GetQuickFixes("a.go", d.Diagnostics) + + // We should not be offered a "stub methods" fix. + for _, fix := range fixes { + if strings.Contains(fix.Title, "Implement error") { + t.Errorf("unexpected 'stub methods' fix: %#v", fix) + } + } + }) +} diff --git a/gopls/internal/regtest/workspace/standalone_test.go b/gopls/internal/test/integration/workspace/standalone_test.go similarity index 98% rename from gopls/internal/regtest/workspace/standalone_test.go rename to gopls/internal/test/integration/workspace/standalone_test.go index 3e0ea40345d..d837899f7fb 100644 --- a/gopls/internal/regtest/workspace/standalone_test.go +++ b/gopls/internal/test/integration/workspace/standalone_test.go @@ -9,8 +9,8 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/lsp/protocol" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestStandaloneFiles(t *testing.T) { diff --git a/gopls/internal/test/integration/workspace/std_test.go b/gopls/internal/test/integration/workspace/std_test.go new file mode 100644 index 00000000000..9c021fef4f3 --- /dev/null +++ b/gopls/internal/test/integration/workspace/std_test.go @@ -0,0 +1,73 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "os/exec" + "path/filepath" + "runtime" + "strings" + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestStdWorkspace(t *testing.T) { + // This test checks that we actually load workspace packages when opening + // GOROOT. + // + // In golang/go#65801, we failed to do this because go/packages returns nil + // Module for std and cmd. + // + // Because this test loads std as a workspace, it may be slow on smaller + // builders. + if testing.Short() { + t.Skip("skipping with -short: loads GOROOT") + } + + // The test also fails on Windows because an absolute path does not match + // (likely a misspelling due to slashes). + // TODO(rfindley): investigate and fix this on windows. + if runtime.GOOS == "windows" { + t.Skip("skipping on windows: fails to misspelled paths") + } + + // Query GOROOT. This is slightly more precise than e.g. runtime.GOROOT, as + // it queries the go command in the environment. + goroot, err := exec.Command("go", "env", "GOROOT").Output() + if err != nil { + t.Fatal(err) + } + stdDir := filepath.Join(strings.TrimSpace(string(goroot)), "src") + WithOptions( + Modes(Default), // This test may be slow. No reason to run it multiple times. + WorkspaceFolders(stdDir), + ).Run(t, "", func(t *testing.T, env *Env) { + // Find parser.ParseFile. Query with `'` to get an exact match. + syms := env.Symbol("'go/parser.ParseFile") + if len(syms) != 1 { + t.Fatalf("got %d symbols, want exactly 1. Symbols:\n%v", len(syms), syms) + } + parserPath := syms[0].Location.URI.Path() + env.OpenFile(parserPath) + + // Find the reference to ast.File from the signature of ParseFile. This + // helps guard against matching a comment. + astFile := env.RegexpSearch(parserPath, `func ParseFile\(.*ast\.(File)`) + refs := env.References(astFile) + + // If we've successfully loaded workspace packages for std, we should find + // a reference in go/types. + foundGoTypesReference := false + for _, ref := range refs { + if strings.Contains(string(ref.URI), "go/types") { + foundGoTypesReference = true + } + } + if !foundGoTypesReference { + t.Errorf("references(ast.File) did not return a go/types reference. Refs:\n%v", refs) + } + }) +} diff --git a/gopls/internal/test/integration/workspace/vendor_test.go b/gopls/internal/test/integration/workspace/vendor_test.go new file mode 100644 index 00000000000..f14cf539de0 --- /dev/null +++ b/gopls/internal/test/integration/workspace/vendor_test.go @@ -0,0 +1,67 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestWorkspacePackagesExcludesVendor(t *testing.T) { + // This test verifies that packages in the vendor directory are not workspace + // packages. This would be an easy mistake for gopls to make, since mod + // vendoring excludes go.mod files, and therefore the nearest go.mod file for + // vendored packages is often the workspace mod file. + const proxy = ` +-- other.com/b@v1.0.0/go.mod -- +module other.com/b + +go 1.18 + +-- other.com/b@v1.0.0/b.go -- +package b + +type B int + +func _() { + var V int // unused +} +` + const src = ` +-- go.mod -- +module example.com/a +go 1.14 +require other.com/b v1.0.0 + +-- go.sum -- +other.com/b v1.0.0 h1:ct1+0RPozzMvA2rSYnVvIfr/GDHcd7oVnw147okdi3g= +other.com/b v1.0.0/go.mod h1:bfTSZo/4ZtAQJWBYScopwW6n9Ctfsl2mi8nXsqjDXR8= + +-- a.go -- +package a + +import "other.com/b" + +var _ b.B + +` + WithOptions( + ProxyFiles(proxy), + Modes(Default), + ).Run(t, src, func(t *testing.T, env *Env) { + env.RunGoCommand("mod", "vendor") + // Uncomment for updated go.sum contents. + // env.DumpGoSum(".") + env.OpenFile("a.go") + env.AfterChange( + NoDiagnostics(), // as b is not a workspace package + ) + env.GoToDefinition(env.RegexpSearch("a.go", `b\.(B)`)) + env.AfterChange( + Diagnostics(env.AtRegexp("vendor/other.com/b/b.go", "V"), WithMessage("not used")), + ) + }) +} diff --git a/gopls/internal/regtest/workspace/workspace_test.go b/gopls/internal/test/integration/workspace/workspace_test.go similarity index 79% rename from gopls/internal/regtest/workspace/workspace_test.go rename to gopls/internal/test/integration/workspace/workspace_test.go index fa04a41ddbc..e2819404dfa 100644 --- a/gopls/internal/regtest/workspace/workspace_test.go +++ b/gopls/internal/test/integration/workspace/workspace_test.go @@ -7,19 +7,20 @@ package workspace import ( "context" "fmt" - "path/filepath" + "sort" "strings" "testing" - "golang.org/x/tools/gopls/internal/bug" + "github.com/google/go-cmp/cmp" "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/goversion" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/testenv" - . "golang.org/x/tools/gopls/internal/lsp/regtest" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestMain(m *testing.M) { @@ -172,7 +173,7 @@ func TestReloadOnlyOnce(t *testing.T) { ProxyFiles(workspaceProxy), WorkspaceFolders("pkg"), ).Run(t, workspaceModule, func(t *testing.T, env *Env) { - dir := env.Sandbox.Workdir.URI("goodbye").SpanURI().Filename() + dir := env.Sandbox.Workdir.URI("goodbye").Path() goModWithReplace := fmt.Sprintf(`%s replace random.org => %s `, env.ReadWorkspaceFile("pkg/go.mod"), dir) @@ -206,9 +207,7 @@ package b func Hello() {} ` -func TestAutomaticWorkspaceModule_Interdependent(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work - const multiModule = ` +const multiModule = ` -- moda/a/go.mod -- module a.com @@ -237,6 +236,8 @@ func Hello() int { var x int } ` + +func TestAutomaticWorkspaceModule_Interdependent(t *testing.T) { WithOptions( ProxyFiles(workspaceModuleProxy), ).Run(t, multiModule, func(t *testing.T, env *Env) { @@ -250,6 +251,25 @@ func Hello() int { }) } +func TestWorkspaceVendoring(t *testing.T) { + testenv.NeedsGo1Point(t, 22) + WithOptions( + ProxyFiles(workspaceModuleProxy), + ).Run(t, multiModule, func(t *testing.T, env *Env) { + env.RunGoCommand("work", "init") + env.RunGoCommand("work", "use", "moda/a") + env.AfterChange() + env.OpenFile("moda/a/a.go") + env.RunGoCommand("work", "vendor") + env.AfterChange() + loc := env.GoToDefinition(env.RegexpSearch("moda/a/a.go", "b.(Hello)")) + const want = "vendor/b.com/b/b.go" + if got := env.Sandbox.Workdir.URIToPath(loc.URI); got != want { + t.Errorf("Definition: got location %q, want %q", got, want) + } + }) +} + func TestModuleWithExclude(t *testing.T) { const proxy = ` -- c.com@v1.2.3/go.mod -- @@ -279,7 +299,7 @@ module b.com go 1.12 ` - const multiModule = ` + const files = ` -- go.mod -- module a.com @@ -298,7 +318,7 @@ func main() { ` WithOptions( ProxyFiles(proxy), - ).Run(t, multiModule, func(t *testing.T, env *Env) { + ).Run(t, files, func(t *testing.T, env *Env) { env.OnceMet( InitialWorkspaceLoad, Diagnostics(env.AtRegexp("main.go", "x")), @@ -312,7 +332,6 @@ func main() { // TODO(golang/go#55331): delete this placeholder along with experimental // workspace module. func TestDeleteModule_Interdependent(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work const multiModule = ` -- go.work -- go 1.18 @@ -379,7 +398,6 @@ func Hello() int { // Tests that the version of the module used changes after it has been added // to the workspace. func TestCreateModule_Interdependent(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work const multiModule = ` -- go.work -- go 1.18 @@ -444,7 +462,6 @@ func Hello() int { // This test confirms that a gopls workspace can recover from initialization // with one invalid module. func TestOneBrokenModule(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work const multiModule = ` -- go.work -- go 1.18 @@ -508,7 +525,6 @@ module example.com/bar } func TestUseGoWork(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work // This test validates certain functionality related to using a go.work // file to specify workspace modules. const multiModule = ` @@ -643,7 +659,10 @@ use ( // This fails if guarded with a OnceMet(DoneWithSave(), ...), because it is // delayed (and therefore not synchronous with the change). - env.Await(NoDiagnostics(ForFile("modb/go.mod"))) + // + // Note: this check used to assert on NoDiagnostics, but with zero-config + // gopls we still have diagnostics. + env.Await(Diagnostics(ForFile("modb/go.mod"), WithMessage("example.com is not used"))) // Test Formatting. env.SetBufferContent("go.work", `go 1.18 @@ -670,8 +689,6 @@ use ( } func TestUseGoWorkDiagnosticMissingModule(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work - const files = ` -- go.work -- go 1.18 @@ -703,7 +720,6 @@ module example.com/bar } func TestUseGoWorkDiagnosticSyntaxError(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const files = ` -- go.work -- go 1.18 @@ -721,8 +737,6 @@ replace } func TestUseGoWorkHover(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - const files = ` -- go.work -- go 1.18 @@ -758,7 +772,6 @@ module example.com/bar/baz } func TestExpandToGoWork(t *testing.T) { - testenv.NeedsGo1Point(t, 18) const workspace = ` -- moda/a/go.mod -- module a.com @@ -807,6 +820,51 @@ use ( }) } +func TestInnerGoWork(t *testing.T) { + // This test checks that gopls honors a go.work file defined + // inside a go module (golang/go#63917). + const workspace = ` +-- go.mod -- +module a.com + +require b.com v1.2.3 +-- a/go.work -- +go 1.18 + +use ( + .. + ../b +) +-- a/a.go -- +package a + +import "b.com/b" + +var _ = b.B +-- b/go.mod -- +module b.com/b + +-- b/b.go -- +package b + +const B = 0 +` + WithOptions( + // This doesn't work if we open the outer module. I'm not sure it should, + // since the go.work file does not apply to the entire module, just a + // subdirectory. + WorkspaceFolders("a"), + ).Run(t, workspace, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + loc := env.GoToDefinition(env.RegexpSearch("a/a.go", "b.(B)")) + got := env.Sandbox.Workdir.URIToPath(loc.URI) + want := "b/b.go" + if got != want { + t.Errorf("Definition(b.B): got %q, want %q", got, want) + } + }) +} + func TestNonWorkspaceFileCreation(t *testing.T) { const files = ` -- work/go.mod -- @@ -832,7 +890,6 @@ var _ = fmt.Printf } func TestGoWork_V2Module(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work // When using a go.work, we must have proxy content even if it is replaced. const proxy = ` -- b.com/v2@v2.1.9/go.mod -- @@ -923,7 +980,6 @@ func TestMultiModule_OneBrokenModule(t *testing.T) { // missing go.sum with diagnostics. With go.work files, this doesn't work: // the go.command will happily write go.work.sum. t.Skip("golang/go#57509: go.mod diagnostics do not work in go.work mode") - testenv.NeedsGo1Point(t, 18) // uses go.work const files = ` -- go.work -- go 1.18 @@ -985,106 +1041,6 @@ func main() { }) } -// Sometimes users may have their module cache within the workspace. -// We shouldn't consider any module in the module cache to be in the workspace. -func TestGOMODCACHEInWorkspace(t *testing.T) { - const mod = ` --- a/go.mod -- -module a.com - -go 1.12 --- a/a.go -- -package a - -func _() {} --- a/c/c.go -- -package c --- gopath/src/b/b.go -- -package b --- gopath/pkg/mod/example.com/go.mod -- -module example.com - -go 1.12 --- gopath/pkg/mod/example.com/main.go -- -package main -` - WithOptions( - EnvVars{"GOPATH": filepath.FromSlash("$SANDBOX_WORKDIR/gopath")}, - Modes(Default), - ).Run(t, mod, func(t *testing.T, env *Env) { - env.Await( - // Confirm that the build configuration is seen as valid, - // even though there are technically multiple go.mod files in the - // worskpace. - LogMatching(protocol.Info, ".*valid build configuration = true.*", 1, false), - ) - }) -} - -func TestAddAndRemoveGoWork(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - // Use a workspace with a module in the root directory to exercise the case - // where a go.work is added to the existing root directory. This verifies - // that we're detecting changes to the module source, not just the root - // directory. - const nomod = ` --- go.mod -- -module a.com - -go 1.16 --- main.go -- -package main - -func main() {} --- b/go.mod -- -module b.com - -go 1.16 --- b/main.go -- -package main - -func main() {} -` - WithOptions( - Modes(Default), - ).Run(t, nomod, func(t *testing.T, env *Env) { - env.OpenFile("main.go") - env.OpenFile("b/main.go") - // Since b/main.go is not in the workspace, it should have a warning on its - // package declaration. - env.AfterChange( - NoDiagnostics(ForFile("main.go")), - Diagnostics(env.AtRegexp("b/main.go", "package (main)")), - ) - env.WriteWorkspaceFile("go.work", `go 1.16 - -use ( - . - b -) -`) - env.AfterChange(NoDiagnostics()) - // Removing the go.work file should put us back where we started. - env.RemoveWorkspaceFile("go.work") - - // TODO(golang/go#57558, golang/go#57508): file watching is asynchronous, - // and we must wait for the view to be reconstructed before touching - // b/main.go, so that the new view "knows" about b/main.go. This is simply - // a bug, but awaiting the change here avoids it. - env.Await(env.DoneWithChangeWatchedFiles()) - - // TODO(rfindley): fix this bug: reopening b/main.go is necessary here - // because we no longer "see" the file in any view. - env.CloseBuffer("b/main.go") - env.OpenFile("b/main.go") - - env.AfterChange( - NoDiagnostics(ForFile("main.go")), - Diagnostics(env.AtRegexp("b/main.go", "package (main)")), - ) - }) -} - // Tests the fix for golang/go#52500. func TestChangeTestVariant_Issue52500(t *testing.T) { const src = ` @@ -1132,8 +1088,6 @@ func (Server) Foo() {} // Test for golang/go#48929. func TestClearNonWorkspaceDiagnostics(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // uses go.work - const ws = ` -- go.work -- go 1.18 @@ -1171,16 +1125,139 @@ import ( env.AfterChange( Diagnostics(env.AtRegexp("a/main.go", "V"), WithMessage("not used")), ) + // Here, diagnostics are added because of zero-config gopls. + // In the past, they were added simply due to diagnosing changed files. + // (see TestClearNonWorkspaceDiagnostics_NoView below for a + // reimplementation of that test). + if got, want := len(env.Views()), 2; got != want { + t.Errorf("after opening a/main.go, got %d views, want %d", got, want) + } env.CloseBuffer("a/main.go") + env.AfterChange( + NoDiagnostics(ForFile("a/main.go")), + ) + if got, want := len(env.Views()), 1; got != want { + t.Errorf("after closing a/main.go, got %d views, want %d", got, want) + } + }) +} + +// This test is like TestClearNonWorkspaceDiagnostics, but bypasses the +// zero-config algorithm by opening a nested workspace folder. +// +// We should still compute diagnostics correctly for open packages. +func TestClearNonWorkspaceDiagnostics_NoView(t *testing.T) { + const ws = ` +-- a/go.mod -- +module example.com/a + +go 1.18 + +require example.com/b v1.2.3 + +replace example.com/b => ../b + +-- a/a.go -- +package a + +import "example.com/b" + +func _() { + V := b.B // unused +} + +-- b/go.mod -- +module b + +go 1.18 + +-- b/b.go -- +package b + +const B = 2 + +func _() { + var V int // unused +} + +-- b/b2.go -- +package b + +const B2 = B + +-- c/c.go -- +package main + +func main() { + var V int // unused +} +` + WithOptions( + WorkspaceFolders("a"), + ).Run(t, ws, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + env.AfterChange( + Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), + NoDiagnostics(ForFile("b/b.go")), + NoDiagnostics(ForFile("c/c.go")), + ) + env.OpenFile("b/b.go") + env.AfterChange( + Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), + Diagnostics(env.AtRegexp("b/b.go", "V"), WithMessage("not used")), + NoDiagnostics(ForFile("c/c.go")), + ) - // Make an arbitrary edit because gopls explicitly diagnoses a/main.go - // whenever it is "changed". + // Opening b/b.go should not result in a new view, because b is not + // contained in a workspace folder. // - // TODO(rfindley): it should not be necessary to make another edit here. - // Gopls should be smart enough to avoid diagnosing a. - env.RegexpReplace("b/main.go", "package b", "package b // a package") + // Yet we should get diagnostics for b, because it is open. + if got, want := len(env.Views()), 1; got != want { + t.Errorf("after opening b/b.go, got %d views, want %d", got, want) + } + env.CloseBuffer("b/b.go") env.AfterChange( - NoDiagnostics(ForFile("a/main.go")), + Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), + NoDiagnostics(ForFile("b/b.go")), + NoDiagnostics(ForFile("c/c.go")), + ) + + // We should get references in the b package. + bUse := env.RegexpSearch("a/a.go", `b\.(B)`) + refs := env.References(bUse) + wantRefs := []string{"a/a.go", "b/b.go", "b/b2.go"} + var gotRefs []string + for _, ref := range refs { + gotRefs = append(gotRefs, env.Sandbox.Workdir.URIToPath(ref.URI)) + } + sort.Strings(gotRefs) + if diff := cmp.Diff(wantRefs, gotRefs); diff != "" { + t.Errorf("references(b.B) mismatch (-want +got)\n%s", diff) + } + + // Opening c/c.go should also not result in a new view, yet we should get + // orphaned file diagnostics. + env.OpenFile("c/c.go") + env.AfterChange( + Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), + NoDiagnostics(ForFile("b/b.go")), + Diagnostics(env.AtRegexp("c/c.go", "V"), WithMessage("not used")), + ) + if got, want := len(env.Views()), 1; got != want { + t.Errorf("after opening b/b.go, got %d views, want %d", got, want) + } + + env.CloseBuffer("c/c.go") + env.AfterChange( + Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), + NoDiagnostics(ForFile("b/b.go")), + NoDiagnostics(ForFile("c/c.go")), + ) + env.CloseBuffer("a/a.go") + env.AfterChange( + Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), + NoDiagnostics(ForFile("b/b.go")), + NoDiagnostics(ForFile("c/c.go")), ) }) } @@ -1189,7 +1266,7 @@ import ( // supported. func TestOldGoNotification_SupportedVersion(t *testing.T) { v := goVersion(t) - if v < lsp.OldestSupportedGoVersion() { + if v < goversion.OldestSupported() { t.Skipf("go version 1.%d is unsupported", v) } @@ -1206,7 +1283,7 @@ func TestOldGoNotification_SupportedVersion(t *testing.T) { // legacy Go versions (see also TestOldGoNotification_Fake) func TestOldGoNotification_UnsupportedVersion(t *testing.T) { v := goVersion(t) - if v >= lsp.OldestSupportedGoVersion() { + if v >= goversion.OldestSupported() { t.Skipf("go version 1.%d is supported", v) } @@ -1226,15 +1303,15 @@ func TestOldGoNotification_Fake(t *testing.T) { // oldest supported Go version here, we can at least ensure that the // ShowMessage pop-up works. ctx := context.Background() - goversion, err := gocommand.GoVersion(ctx, gocommand.Invocation{}, &gocommand.Runner{}) + version, err := gocommand.GoVersion(ctx, gocommand.Invocation{}, &gocommand.Runner{}) if err != nil { t.Fatal(err) } - defer func(t []lsp.GoVersionSupport) { - lsp.GoVersionTable = t - }(lsp.GoVersionTable) - lsp.GoVersionTable = []lsp.GoVersionSupport{ - {GoVersion: goversion, InstallGoplsVersion: "v1.0.0"}, + defer func(t []goversion.Support) { + goversion.Supported = t + }(goversion.Supported) + goversion.Supported = []goversion.Support{ + {GoVersion: version, InstallGoplsVersion: "v1.0.0"}, } Run(t, "", func(t *testing.T, env *Env) { @@ -1256,3 +1333,23 @@ func goVersion(t *testing.T) int { } return goversion } + +func TestGoworkMutation(t *testing.T) { + WithOptions( + ProxyFiles(workspaceModuleProxy), + ).Run(t, multiModule, func(t *testing.T, env *Env) { + env.RunGoCommand("work", "init") + env.RunGoCommand("work", "use", "-r", ".") + env.AfterChange( + Diagnostics(env.AtRegexp("moda/a/a.go", "x")), + Diagnostics(env.AtRegexp("modb/b/b.go", "x")), + NoDiagnostics(env.AtRegexp("moda/a/a.go", `b\.Hello`)), + ) + env.RunGoCommand("work", "edit", "-dropuse", "modb") + env.Await( + Diagnostics(env.AtRegexp("moda/a/a.go", "x")), + NoDiagnostics(env.AtRegexp("modb/b/b.go", "x")), + Diagnostics(env.AtRegexp("moda/a/a.go", `b\.Hello`)), + ) + }) +} diff --git a/gopls/internal/test/integration/workspace/zero_config_test.go b/gopls/internal/test/integration/workspace/zero_config_test.go new file mode 100644 index 00000000000..57498831a7d --- /dev/null +++ b/gopls/internal/test/integration/workspace/zero_config_test.go @@ -0,0 +1,326 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/protocol/command" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestAddAndRemoveGoWork(t *testing.T) { + // Use a workspace with a module in the root directory to exercise the case + // where a go.work is added to the existing root directory. This verifies + // that we're detecting changes to the module source, not just the root + // directory. + const nomod = ` +-- go.mod -- +module a.com + +go 1.16 +-- main.go -- +package main + +func main() {} +-- b/go.mod -- +module b.com + +go 1.16 +-- b/main.go -- +package main + +func main() {} +` + WithOptions( + Modes(Default), + ).Run(t, nomod, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + env.OpenFile("b/main.go") + + summary := func(typ cache.ViewType, root, folder string) command.View { + return command.View{ + Type: typ.String(), + Root: env.Sandbox.Workdir.URI(root), + Folder: env.Sandbox.Workdir.URI(folder), + } + } + checkViews := func(want ...command.View) { + got := env.Views() + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) + } + } + + // Zero-config gopls makes this work. + env.AfterChange( + NoDiagnostics(ForFile("main.go")), + NoDiagnostics(env.AtRegexp("b/main.go", "package (main)")), + ) + checkViews(summary(cache.GoModView, ".", "."), summary(cache.GoModView, "b", ".")) + + env.WriteWorkspaceFile("go.work", `go 1.16 + +use ( + . + b +) +`) + env.AfterChange(NoDiagnostics()) + checkViews(summary(cache.GoWorkView, ".", ".")) + + // Removing the go.work file should put us back where we started. + env.RemoveWorkspaceFile("go.work") + + // Again, zero-config gopls makes this work. + env.AfterChange( + NoDiagnostics(ForFile("main.go")), + NoDiagnostics(env.AtRegexp("b/main.go", "package (main)")), + ) + checkViews(summary(cache.GoModView, ".", "."), summary(cache.GoModView, "b", ".")) + + // Close and reopen b, to ensure the views are adjusted accordingly. + env.CloseBuffer("b/main.go") + env.AfterChange() + checkViews(summary(cache.GoModView, ".", ".")) + + env.OpenFile("b/main.go") + env.AfterChange() + checkViews(summary(cache.GoModView, ".", "."), summary(cache.GoModView, "b", ".")) + }) +} + +func TestOpenAndClosePorts(t *testing.T) { + // This test checks that as we open and close files requiring a different + // port, the set of Views is adjusted accordingly. + const files = ` +-- go.mod -- +module a.com/a + +go 1.20 + +-- a_linux.go -- +package a + +-- a_darwin.go -- +package a + +-- a_windows.go -- +package a +` + + WithOptions( + EnvVars{ + "GOOS": "linux", // assume that linux is the default GOOS + }, + ).Run(t, files, func(t *testing.T, env *Env) { + summary := func(envOverlay ...string) command.View { + return command.View{ + Type: cache.GoModView.String(), + Root: env.Sandbox.Workdir.URI("."), + Folder: env.Sandbox.Workdir.URI("."), + EnvOverlay: envOverlay, + } + } + checkViews := func(want ...command.View) { + got := env.Views() + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) + } + } + checkViews(summary()) + env.OpenFile("a_linux.go") + checkViews(summary()) + env.OpenFile("a_darwin.go") + checkViews( + summary(), + summary("GOARCH=amd64", "GOOS=darwin"), + ) + env.OpenFile("a_windows.go") + checkViews( + summary(), + summary("GOARCH=amd64", "GOOS=darwin"), + summary("GOARCH=amd64", "GOOS=windows"), + ) + env.CloseBuffer("a_darwin.go") + checkViews( + summary(), + summary("GOARCH=amd64", "GOOS=windows"), + ) + env.CloseBuffer("a_linux.go") + checkViews( + summary(), + summary("GOARCH=amd64", "GOOS=windows"), + ) + env.CloseBuffer("a_windows.go") + checkViews(summary()) + }) +} + +func TestCriticalErrorsInOrphanedFiles(t *testing.T) { + // This test checks that as we open and close files requiring a different + // port, the set of Views is adjusted accordingly. + const files = ` +-- go.mod -- +modul golang.org/lsptests/broken + +go 1.20 + +-- a.go -- +package broken + +const C = 0 +` + + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + env.AfterChange( + Diagnostics(env.AtRegexp("go.mod", "modul")), + Diagnostics(env.AtRegexp("a.go", "broken"), WithMessage("initialization failed")), + ) + }) +} + +func TestGoModReplace(t *testing.T) { + // This test checks that we treat locally replaced modules as workspace + // modules, according to the "includeReplaceInWorkspace" setting. + const files = ` +-- moda/go.mod -- +module golang.org/a + +require golang.org/b v1.2.3 + +replace golang.org/b => ../modb + +go 1.20 + +-- moda/a.go -- +package a + +import "golang.org/b" + +const A = b.B + +-- modb/go.mod -- +module golang.org/b + +go 1.20 + +-- modb/b.go -- +package b + +const B = 1 +` + + for useReplace, expectation := range map[bool]Expectation{ + true: FileWatchMatching("modb"), + false: NoFileWatchMatching("modb"), + } { + WithOptions( + WorkspaceFolders("moda"), + Settings{ + "includeReplaceInWorkspace": useReplace, + }, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OnceMet( + InitialWorkspaceLoad, + expectation, + ) + }) + } +} + +func TestDisableZeroConfig(t *testing.T) { + // This test checks that we treat locally replaced modules as workspace + // modules, according to the "includeReplaceInWorkspace" setting. + const files = ` +-- moda/go.mod -- +module golang.org/a + +go 1.20 + +-- moda/a.go -- +package a + +-- modb/go.mod -- +module golang.org/b + +go 1.20 + +-- modb/b.go -- +package b + +` + + WithOptions( + Settings{"zeroConfig": false}, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("moda/a.go") + env.OpenFile("modb/b.go") + env.AfterChange() + if got := env.Views(); len(got) != 1 || got[0].Type != cache.AdHocView.String() { + t.Errorf("Views: got %v, want one adhoc view", got) + } + }) +} + +func TestVendorExcluded(t *testing.T) { + // Test that we don't create Views for vendored modules. + // + // We construct the vendor directory manually here, as `go mod vendor` will + // omit the go.mod file. This synthesizes the setup of Kubernetes, where the + // entire module is vendored through a symlinked directory. + const src = ` +-- go.mod -- +module example.com/a + +go 1.18 + +require other.com/b v1.0.0 + +-- a.go -- +package a +import "other.com/b" +var _ b.B + +-- vendor/modules.txt -- +# other.com/b v1.0.0 +## explicit; go 1.14 +other.com/b + +-- vendor/other.com/b/go.mod -- +module other.com/b +go 1.14 + +-- vendor/other.com/b/b.go -- +package b +type B int + +func _() { + var V int // unused +} +` + WithOptions( + Modes(Default), + ).Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + env.AfterChange(NoDiagnostics()) + loc := env.GoToDefinition(env.RegexpSearch("a.go", `b\.(B)`)) + if !strings.Contains(string(loc.URI), "/vendor/") { + t.Fatalf("Definition(b.B) = %v, want vendored location", loc.URI) + } + env.AfterChange( + Diagnostics(env.AtRegexp("vendor/other.com/b/b.go", "V"), WithMessage("not used")), + ) + + if views := env.Views(); len(views) != 1 { + t.Errorf("After opening /vendor/, got %d views, want 1. Views:\n%v", len(views), views) + } + }) +} diff --git a/gopls/internal/lsp/regtest/wrappers.go b/gopls/internal/test/integration/wrappers.go similarity index 92% rename from gopls/internal/lsp/regtest/wrappers.go rename to gopls/internal/test/integration/wrappers.go index a2c6a1eea7e..ce51208d0a3 100644 --- a/gopls/internal/lsp/regtest/wrappers.go +++ b/gopls/internal/test/integration/wrappers.go @@ -2,15 +2,15 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package regtest +package integration import ( "encoding/json" "path" - "golang.org/x/tools/gopls/internal/lsp/command" - "golang.org/x/tools/gopls/internal/lsp/fake" - "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/internal/xcontext" ) @@ -269,7 +269,7 @@ func (e *Env) RunGenerate(dir string) { e.T.Fatal(err) } e.Await(NoOutstandingWork(IgnoreTelemetryPromptWork)) - // Ideally the fake.Workspace would handle all synthetic file watching, but + // Ideally the editor.Workspace would handle all synthetic file watching, but // we help it out here as we need to wait for the generate command to // complete before checking the filesystem. e.CheckForFileChanges() @@ -314,14 +314,14 @@ func (e *Env) GoVersion() int { } // DumpGoSum prints the correct go.sum contents for dir in txtar format, -// for use in creating regtests. +// for use in creating integration tests. func (e *Env) DumpGoSum(dir string) { e.T.Helper() - if err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "..."}, nil, true); err != nil { + if err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "./..."}, nil, true); err != nil { e.T.Fatal(err) } - sumFile := path.Join(dir, "/go.sum") + sumFile := path.Join(dir, "go.sum") e.T.Log("\n\n-- " + sumFile + " --\n" + e.ReadWorkspaceFile(sumFile)) e.T.Fatal("see contents above") } @@ -393,6 +393,20 @@ func (e *Env) ExecuteCommand(params *protocol.ExecuteCommandParams, result inter } } +// Views returns the server's views. +func (e *Env) Views() []command.View { + var summaries []command.View + cmd, err := command.NewViewsCommand("") + if err != nil { + e.T.Fatal(err) + } + e.ExecuteCommand(&protocol.ExecuteCommandParams{ + Command: cmd.Command, + Arguments: cmd.Arguments, + }, &summaries) + return summaries +} + // StartProfile starts a CPU profile with the given name, using the // gopls.start_profile custom command. It calls t.Fatal on any error. // @@ -514,7 +528,7 @@ func (e *Env) AcceptCompletion(loc protocol.Location, item protocol.CompletionIt } } -// CodeAction calls testDocument/codeAction for the given path, and calls +// CodeAction calls textDocument/codeAction for the given path, and calls // t.Fatal if there are errors. func (e *Env) CodeAction(path string, diagnostics []protocol.Diagnostic) []protocol.CodeAction { e.T.Helper() @@ -543,6 +557,28 @@ func (e *Env) ChangeWorkspaceFolders(newFolders ...string) { } } +// SemanticTokensFull invokes textDocument/semanticTokens/full, calling t.Fatal +// on any error. +func (e *Env) SemanticTokensFull(path string) []fake.SemanticToken { + e.T.Helper() + toks, err := e.Editor.SemanticTokensFull(e.Ctx, path) + if err != nil { + e.T.Fatal(err) + } + return toks +} + +// SemanticTokensRange invokes textDocument/semanticTokens/range, calling t.Fatal +// on any error. +func (e *Env) SemanticTokensRange(loc protocol.Location) []fake.SemanticToken { + e.T.Helper() + toks, err := e.Editor.SemanticTokensRange(e.Ctx, loc) + if err != nil { + e.T.Fatal(err) + } + return toks +} + // Close shuts down the editor session and cleans up the sandbox directory, // calling t.Error on any error. func (e *Env) Close() { diff --git a/gopls/internal/test/marker/doc.go b/gopls/internal/test/marker/doc.go new file mode 100644 index 00000000000..243b7749142 --- /dev/null +++ b/gopls/internal/test/marker/doc.go @@ -0,0 +1,361 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package marker defines a framework for running "marker" tests, each +defined by a file in the testdata subdirectory. + +Use this command to run the tests: + + $ go test ./gopls/internal/test/marker [-update] + +A marker test uses the '//@' marker syntax of the x/tools/go/expect package +to annotate source code with various information such as locations and +arguments of LSP operations to be executed by the test. The syntax following +'@' is parsed as a comma-separated list of ordinary Go function calls, for +example + + //@foo(a, "b", 3),bar(0) + +and delegates to a corresponding function to perform LSP-related operations. +See the Marker types documentation below for a list of supported markers. + +Each call argument is converted to the type of the corresponding parameter of +the designated function. The conversion logic may use the surrounding context, +such as the position or nearby text. See the Argument conversion section below +for the full set of special conversions. As a special case, the blank +identifier '_' is treated as the zero value of the parameter type. + +The test runner collects test cases by searching the given directory for +files with the .txt extension. Each file is interpreted as a txtar archive, +which is extracted to a temporary directory. The relative path to the .txt +file is used as the subtest name. The preliminary section of the file +(before the first archive entry) is a free-form comment. + +# Special files + +There are several types of file within the test archive that are given special +treatment by the test runner: + + - "skip": the presence of this file causes the test to be skipped, with + the file content used as the skip message. + + - "flags": this file is treated as a whitespace-separated list of flags + that configure the MarkerTest instance. Supported flags: + -min_go=go1.20 sets the minimum Go version for the test; + -cgo requires that CGO_ENABLED is set and the cgo tool is available + -write_sumfile=a,b,c instructs the test runner to generate go.sum files + in these directories before running the test. + -skip_goos=a,b,c instructs the test runner to skip the test for the + listed GOOS values. + -skip_goarch=a,b,c does the same for GOARCH. + -ignore_extra_diags suppresses errors for unmatched diagnostics + TODO(rfindley): using build constraint expressions for -skip_go{os,arch} would + be clearer. + -filter_builtins=false disables the filtering of builtins from + completion results. + -filter_keywords=false disables the filtering of keywords from + completion results. + TODO(rfindley): support flag values containing whitespace. + + - "settings.json": this file is parsed as JSON, and used as the + session configuration (see gopls/doc/settings.md) + + - "capabilities.json": this file is parsed as JSON client capabilities, + and applied as an overlay over the default editor client capabilities. + see https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#clientCapabilities + for more details. + + - "env": this file is parsed as a list of VAR=VALUE fields specifying the + editor environment. + + - Golden files: Within the archive, file names starting with '@' are + treated as "golden" content, and are not written to disk, but instead are + made available to test methods expecting an argument of type *Golden, + using the identifier following '@'. For example, if the first parameter of + Foo were of type *Golden, the test runner would convert the identifier a + in the call @foo(a, "b", 3) into a *Golden by collecting golden file + data starting with "@a/". As a special case, for tests that only need one + golden file, the data contained in the file "@a" is indexed in the *Golden + value by the empty string "". + + - proxy files: any file starting with proxy/ is treated as a Go proxy + file. If present, these files are written to a separate temporary + directory and GOPROXY is set to file://<proxy directory>. + +# Marker types + +Markers are of two kinds. A few are "value markers" (e.g. @item), which are +processed in a first pass and each computes a value that may be referred to +by name later. Most are "action markers", which are processed in a second +pass and take some action such as testing an LSP operation; they may refer +to values computed by value markers. + +The following markers are supported within marker tests: + + - acceptcompletion(location, label, golden): specifies that accepting the + completion candidate produced at the given location with provided label + results in the given golden state. + + - codeaction(start, end, kind, golden, ...titles): specifies a code action + to request for the given range. To support multi-line ranges, the range + is defined to be between start.Start and end.End. The golden directory + contains changed file content after the code action is applied. + If titles are provided, they are used to filter the matching code + action. + + TODO(rfindley): consolidate with codeactionedit, via a @loc2 marker that + allows binding multi-line locations. + + - codeactionedit(range, kind, golden, ...titles): a shorter form of + codeaction. Invokes a code action of the given kind for the given + in-line range, and compares the resulting formatted unified *edits* + (notably, not the full file content) with the golden directory. + + - codeactionerr(start, end, kind, wantError): specifies a codeaction that + fails with an error that matches the expectation. + + - codelens(location, title): specifies that a codelens is expected at the + given location, with given title. Must be used in conjunction with + @codelenses. + + - codelenses(): specifies that textDocument/codeLens should be run for the + current document, with results compared to the @codelens annotations in + the current document. + + - complete(location, ...items): specifies expected completion results at + the given location. Must be used in conjunction with @item. + + - diag(location, regexp): specifies an expected diagnostic matching the + given regexp at the given location. The test runner requires + a 1:1 correspondence between observed diagnostics and diag annotations. + The diagnostics source and kind fields are ignored, to reduce fuss. + + The specified location must match the start position of the diagnostic, + but end positions are ignored. + + TODO(adonovan): in the older marker framework, the annotation asserted + two additional fields (source="compiler", kind="error"). Restore them? + + - def(src, dst location): performs a textDocument/definition request at + the src location, and check the result points to the dst location. + + - documentLink(golden): asserts that textDocument/documentLink returns + links as described by the golden file. + + - foldingrange(golden): performs a textDocument/foldingRange for the + current document, and compare with the golden content, which is the + original source annotated with numbered tags delimiting the resulting + ranges (e.g. <1 kind="..."> ... </1>). + + - format(golden): performs a textDocument/format request for the enclosing + file, and compare against the named golden file. If the formatting + request succeeds, the golden file must contain the resulting formatted + source. If the formatting request fails, the golden file must contain + the error message. + + - highlight(src location, dsts ...location): makes a + textDocument/highlight request at the given src location, which should + highlight the provided dst locations. + + - hover(src, dst location, sm stringMatcher): performs a textDocument/hover + at the src location, and checks that the result is the dst location, with + matching hover content. + + - hovererr(src, sm stringMatcher): performs a textDocument/hover at the src + location, and checks that the error matches the given stringMatcher. + + - implementations(src location, want ...location): makes a + textDocument/implementation query at the src location and + checks that the resulting set of locations matches want. + + - incomingcalls(src location, want ...location): makes a + callHierarchy/incomingCalls query at the src location, and checks that + the set of call.From locations matches want. + + - item(label, details, kind): defines a completion item with the provided + fields. This information is not positional, and therefore @item markers + may occur anywhere in the source. Used in conjunction with @complete, + snippet, or rank. + + TODO(rfindley): rethink whether floating @item annotations are the best + way to specify completion results. + + - loc(name, location): specifies the name for a location in the source. These + locations may be referenced by other markers. + + - outgoingcalls(src location, want ...location): makes a + callHierarchy/outgoingCalls query at the src location, and checks that + the set of call.To locations matches want. + + - preparerename(src, spn, placeholder): asserts that a textDocument/prepareRename + request at the src location expands to the spn location, with given + placeholder. If placeholder is "", this is treated as a negative + assertion and prepareRename should return nil. + + - rename(location, new, golden): specifies a renaming of the + identifier at the specified location to the new name. + The golden directory contains the transformed files. + + - renameerr(location, new, wantError): specifies a renaming that + fails with an error that matches the expectation. + + - signature(location, label, active): specifies that + signatureHelp at the given location should match the provided string, with + the active parameter (an index) highlighted. + + - suggestedfix(location, regexp, golden): like diag, the location and + regexp identify an expected diagnostic. This diagnostic must + to have exactly one associated code action of the specified kind. + This action is executed for its editing effects on the source files. + Like rename, the golden directory contains the expected transformed files. + + - suggestedfixerr(location, regexp, kind, wantError): specifies that the + suggestedfix operation should fail with an error that matches the expectation. + (Failures in the computation to offer a fix do not generally result + in LSP errors, so this marker is not appropriate for testing them.) + + - rank(location, ...completionItem): executes a textDocument/completion + request at the given location, and verifies that each expected + completion item occurs in the results, in the expected order. Other + unexpected completion items may occur in the results. + TODO(rfindley): this exists for compatibility with the old marker tests. + Replace this with rankl, and rename. + A "!" prefix on a label asserts that the symbol is not a + completion candidate. + + - rankl(location, ...label): like rank, but only cares about completion + item labels. + + - refs(location, want ...location): executes a textDocument/references + request at the first location and asserts that the result is the set of + 'want' locations. The first want location must be the declaration + (assumedly unique). + + - snippet(location, completionItem, snippet): executes a + textDocument/completion request at the location, and searches for a + result with label matching that of the provided completion item + (TODO(rfindley): accept a label rather than a completion item). Check + the result snippet matches the provided snippet. + + - symbol(golden): makes a textDocument/documentSymbol request + for the enclosing file, formats the response with one symbol + per line, sorts it, and compares against the named golden file. + Each line is of the form: + + dotted.symbol.name kind "detail" +n lines + + where the "+n lines" part indicates that the declaration spans + several lines. The test otherwise makes no attempt to check + location information. There is no point to using more than one + @symbol marker in a given file. + + - token(location, tokenType, mod): makes a textDocument/semanticTokens/range + request at the given location, and asserts that the result includes + exactly one token with the given token type and modifier string. + + - workspacesymbol(query, golden): makes a workspace/symbol request for the + given query, formats the response with one symbol per line, and compares + against the named golden file. As workspace symbols are by definition a + workspace-wide request, the location of the workspace symbol marker does + not matter. Each line is of the form: + + location name kind + +# Argument conversion + +Marker arguments are first parsed by the go/expect package, which accepts +the following tokens as defined by the Go spec: + - string, int64, float64, and rune literals + - true and false + - nil + - identifiers (type expect.Identifier) + - regular expressions, denoted the two tokens re"abc" (type *regexp.Regexp) + +These values are passed as arguments to the corresponding parameter of the +test function. Additional value conversions may occur for these argument -> +parameter type pairs: + - string->regexp: the argument is parsed as a regular expressions. + - string->location: the argument is converted to the location of the first + instance of the argument in the partial line preceding the note. + - regexp->location: the argument is converted to the location of the first + match for the argument in the partial line preceding the note. If the + regular expression contains exactly one subgroup, the position of the + subgroup is used rather than the position of the submatch. + - name->location: the argument is replaced by the named location. + - name->Golden: the argument is used to look up golden content prefixed by + @<argument>. + - {string,regexp,identifier}->stringMatcher: a stringMatcher type + specifies an expected string, either in the form of a substring + that must be present, a regular expression that it must match, or an + identifier (e.g. foo) such that the archive entry @foo exists and + contains the exact expected string. + stringMatchers are used by some markers to match positive results + (outputs) and by other markers to match error messages. + +# Example + +Here is a complete example: + + This test checks hovering over constants. + + -- a.go -- + package a + + const abc = 0x2a //@hover("b", "abc", abc),hover(" =", "abc", abc) + + -- @abc -- + ```go + const abc untyped int = 42 + ``` + + @hover("b", "abc", abc),hover(" =", "abc", abc) + +In this example, the @hover annotation tells the test runner to run the +hoverMarker function, which has parameters: + + (mark marker, src, dsc protocol.Location, g *Golden). + +The first argument holds the test context, including fake editor with open +files, and sandboxed directory. + +Argument converters translate the "b" and "abc" arguments into locations by +interpreting each one as a substring (or as a regular expression, if of the +form re"a|b") and finding the location of its first occurrence on the preceding +portion of the line, and the abc identifier into a the golden content contained +in the file @abc. Then the hoverMarker method executes a textDocument/hover LSP +request at the src position, and ensures the result spans "abc", with the +markdown content from @abc. (Note that the markdown content includes the expect +annotation as the doc comment.) + +The next hover on the same line asserts the same result, but initiates the +hover immediately after "abc" in the source. This tests that we find the +preceding identifier when hovering. + +# Updating golden files + +To update golden content in the test archive, it is easier to regenerate +content automatically rather than edit it by hand. To do this, run the +tests with the -update flag. Only tests that actually run will be updated. + +In some cases, golden content will vary by Go version (for example, gopls +produces different markdown at Go versions before the 1.19 go/doc update). +By convention, the golden content in test archives should match the output +at Go tip. Each test function can normalize golden content for older Go +versions. + +Note that -update does not cause missing @diag or @loc markers to be added. + +# TODO + + - Rename the files .txtar. + - Provide some means by which locations in the standard library + (or builtin.go) can be named, so that, for example, we can we + can assert that MyError implements the built-in error type. + - If possible, improve handling for optional arguments. Rather than have + multiple variations of a marker, it would be nice to support a more + flexible signature: can codeaction, codeactionedit, codeactionerr, and + suggestedfix be consolidated? +*/ +package marker diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go new file mode 100644 index 00000000000..6a4859662ed --- /dev/null +++ b/gopls/internal/test/marker/marker_test.go @@ -0,0 +1,2326 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package marker + +// This file defines the marker test framework. +// See doc.go for extensive documentation. + +import ( + "bytes" + "context" + "encoding/json" + "flag" + "fmt" + "go/token" + "go/types" + "io/fs" + "log" + "os" + "path" + "path/filepath" + "reflect" + "regexp" + "runtime" + "sort" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + + "golang.org/x/tools/go/expect" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/debug" + "golang.org/x/tools/gopls/internal/hooks" + "golang.org/x/tools/gopls/internal/lsprpc" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/compare" + "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/slices" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/diff/myers" + "golang.org/x/tools/internal/jsonrpc2" + "golang.org/x/tools/internal/jsonrpc2/servertest" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/txtar" +) + +var update = flag.Bool("update", false, "if set, update test data during marker tests") + +func TestMain(m *testing.M) { + bug.PanicOnBugs = true + testenv.ExitIfSmallMachine() + // Disable GOPACKAGESDRIVER, as it can cause spurious test failures. + os.Setenv("GOPACKAGESDRIVER", "off") + os.Exit(m.Run()) +} + +// Test runs the marker tests from the testdata directory. +// +// See package documentation for details on how marker tests work. +// +// These tests were inspired by (and in many places copied from) a previous +// iteration of the marker tests built on top of the packagestest framework. +// Key design decisions motivating this reimplementation are as follows: +// - The old tests had a single global session, causing interaction at a +// distance and several awkward workarounds. +// - The old tests could not be safely parallelized, because certain tests +// manipulated the server options +// - Relatedly, the old tests did not have a logic grouping of assertions into +// a single unit, resulting in clusters of files serving clusters of +// entangled assertions. +// - The old tests used locations in the source as test names and as the +// identity of golden content, meaning that a single edit could change the +// name of an arbitrary number of subtests, and making it difficult to +// manually edit golden content. +// - The old tests did not hew closely to LSP concepts, resulting in, for +// example, each marker implementation doing its own position +// transformations, and inventing its own mechanism for configuration. +// - The old tests had an ad-hoc session initialization process. The integration +// test environment has had more time devoted to its initialization, and has a +// more convenient API. +// - The old tests lacked documentation, and often had failures that were hard +// to understand. By starting from scratch, we can revisit these aspects. +func Test(t *testing.T) { + if testing.Short() { + builder := os.Getenv("GO_BUILDER_NAME") + // Note that HasPrefix(builder, "darwin-" only matches legacy builders. + // LUCI builder names start with x_tools-goN.NN. + // We want to exclude solaris on both legacy and LUCI builders, as + // it is timing out. + if strings.HasPrefix(builder, "darwin-") || strings.Contains(builder, "solaris") { + t.Skip("golang/go#64473: skipping with -short: this test is too slow on darwin and solaris builders") + } + } + // The marker tests must be able to run go/packages.Load. + testenv.NeedsGoPackages(t) + + const dir = "testdata" + tests, err := loadMarkerTests(dir) + if err != nil { + t.Fatal(err) + } + + // Opt: use a shared cache. + cache := cache.New(nil) + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + if test.skipReason != "" { + t.Skip(test.skipReason) + } + if slices.Contains(test.skipGOOS, runtime.GOOS) { + t.Skipf("skipping on %s due to -skip_goos", runtime.GOOS) + } + if slices.Contains(test.skipGOARCH, runtime.GOARCH) { + t.Skipf("skipping on %s due to -skip_goos", runtime.GOOS) + } + + // TODO(rfindley): it may be more useful to have full support for build + // constraints. + if test.minGoVersion != "" { + var go1point int + if _, err := fmt.Sscanf(test.minGoVersion, "go1.%d", &go1point); err != nil { + t.Fatalf("parsing -min_go version: %v", err) + } + testenv.NeedsGo1Point(t, go1point) + } + if test.cgo { + testenv.NeedsTool(t, "cgo") + } + config := fake.EditorConfig{ + Settings: test.settings, + CapabilitiesJSON: test.capabilities, + Env: test.env, + } + if _, ok := config.Settings["diagnosticsDelay"]; !ok { + if config.Settings == nil { + config.Settings = make(map[string]any) + } + config.Settings["diagnosticsDelay"] = "10ms" + } + // inv: config.Settings != nil + + run := &markerTestRun{ + test: test, + env: newEnv(t, cache, test.files, test.proxyFiles, test.writeGoSum, config), + settings: config.Settings, + values: make(map[expect.Identifier]any), + diags: make(map[protocol.Location][]protocol.Diagnostic), + extraNotes: make(map[protocol.DocumentURI]map[string][]*expect.Note), + } + // TODO(rfindley): make it easier to clean up the integration test environment. + defer run.env.Editor.Shutdown(context.Background()) // ignore error + defer run.env.Sandbox.Close() // ignore error + + // Open all files so that we operate consistently with LSP clients, and + // (pragmatically) so that we have a Mapper available via the fake + // editor. + // + // This also allows avoiding mutating the editor state in tests. + for file := range test.files { + run.env.OpenFile(file) + } + // Wait for the didOpen notifications to be processed, then collect + // diagnostics. + var diags map[string]*protocol.PublishDiagnosticsParams + run.env.AfterChange(integration.ReadAllDiagnostics(&diags)) + for path, params := range diags { + uri := run.env.Sandbox.Workdir.URI(path) + for _, diag := range params.Diagnostics { + loc := protocol.Location{ + URI: uri, + Range: protocol.Range{ + Start: diag.Range.Start, + End: diag.Range.Start, // ignore end positions + }, + } + run.diags[loc] = append(run.diags[loc], diag) + } + } + + var markers []marker + for _, note := range test.notes { + mark := marker{run: run, note: note} + if fn, ok := valueMarkerFuncs[note.Name]; ok { + fn(mark) + } else if _, ok := actionMarkerFuncs[note.Name]; ok { + markers = append(markers, mark) // save for later + } else { + uri := mark.uri() + if run.extraNotes[uri] == nil { + run.extraNotes[uri] = make(map[string][]*expect.Note) + } + run.extraNotes[uri][note.Name] = append(run.extraNotes[uri][note.Name], note) + } + } + + // Invoke each remaining marker in the test. + for _, mark := range markers { + actionMarkerFuncs[mark.note.Name](mark) + } + + // Any remaining (un-eliminated) diagnostics are an error. + if !test.ignoreExtraDiags { + for loc, diags := range run.diags { + for _, diag := range diags { + t.Errorf("%s: unexpected diagnostic: %q", run.fmtLoc(loc), diag.Message) + } + } + } + + // TODO(rfindley): use these for whole-file marker tests. + for uri, extras := range run.extraNotes { + for name, extra := range extras { + if len(extra) > 0 { + t.Errorf("%s: %d unused %q markers", run.env.Sandbox.Workdir.URIToPath(uri), len(extra), name) + } + } + } + + formatted, err := formatTest(test) + if err != nil { + t.Errorf("formatTest: %v", err) + } else if *update { + filename := filepath.Join(dir, test.name) + if err := os.WriteFile(filename, formatted, 0644); err != nil { + t.Error(err) + } + } else if !t.Failed() { + // Verify that the testdata has not changed. + // + // Only check this if the test hasn't already failed, otherwise we'd + // report duplicate mismatches of golden data. + // Otherwise, verify that formatted content matches. + if diff := compare.NamedText("formatted", "on-disk", string(formatted), string(test.content)); diff != "" { + t.Errorf("formatted test does not match on-disk content:\n%s", diff) + } + } + }) + } + + if abs, err := filepath.Abs(dir); err == nil && t.Failed() { + t.Logf("(Filenames are relative to %s.)", abs) + } +} + +// A marker holds state for the execution of a single @marker +// annotation in the source. +type marker struct { + run *markerTestRun + note *expect.Note +} + +// ctx returns the mark context. +func (m marker) ctx() context.Context { return m.run.env.Ctx } + +// T returns the testing.TB for this mark. +func (m marker) T() testing.TB { return m.run.env.T } + +// server returns the LSP server for the marker test run. +func (m marker) editor() *fake.Editor { return m.run.env.Editor } + +// server returns the LSP server for the marker test run. +func (m marker) server() protocol.Server { return m.run.env.Editor.Server } + +// uri returns the URI of the file containing the marker. +func (mark marker) uri() protocol.DocumentURI { + return mark.run.env.Sandbox.Workdir.URI(mark.run.test.fset.File(mark.note.Pos).Name()) +} + +// document returns a protocol.TextDocumentIdentifier for the current file. +func (mark marker) document() protocol.TextDocumentIdentifier { + return protocol.TextDocumentIdentifier{URI: mark.uri()} +} + +// path returns the relative path to the file containing the marker. +func (mark marker) path() string { + return mark.run.env.Sandbox.Workdir.RelPath(mark.run.test.fset.File(mark.note.Pos).Name()) +} + +// mapper returns a *protocol.Mapper for the current file. +func (mark marker) mapper() *protocol.Mapper { + mapper, err := mark.editor().Mapper(mark.path()) + if err != nil { + mark.T().Fatalf("failed to get mapper for current mark: %v", err) + } + return mapper +} + +// errorf reports an error with a prefix indicating the position of the marker note. +// +// It formats the error message using mark.sprintf. +func (mark marker) errorf(format string, args ...any) { + mark.T().Helper() + msg := mark.sprintf(format, args...) + // TODO(adonovan): consider using fmt.Fprintf(os.Stderr)+t.Fail instead of + // t.Errorf to avoid reporting uninteresting positions in the Go source of + // the driver. However, this loses the order of stderr wrt "FAIL: TestFoo" + // subtest dividers. + mark.T().Errorf("%s: %s", mark.run.fmtPos(mark.note.Pos), msg) +} + +// valueMarkerFunc returns a wrapper around a function that allows it to be +// called during the processing of value markers (e.g. @value(v, 123)) with marker +// arguments converted to function parameters. The provided function's first +// parameter must be of type 'marker', and it must return a value. +// +// Unlike action markers, which are executed for actions such as test +// assertions, value markers are all evaluated first, and each computes +// a value that is recorded by its identifier, which is the marker's first +// argument. These values may be referred to from an action marker by +// this identifier, e.g. @action(... , v, ...). +// +// For example, given a fn with signature +// +// func(mark marker, label, details, kind string) CompletionItem +// +// The result of valueMarkerFunc can associated with @item notes, and invoked +// as follows: +// +// //@item(FooCompletion, "Foo", "func() int", "func") +// +// The provided fn should not mutate the test environment. +func valueMarkerFunc(fn any) func(marker) { + ftype := reflect.TypeOf(fn) + if ftype.NumIn() == 0 || ftype.In(0) != markerType { + panic(fmt.Sprintf("value marker function %#v must accept marker as its first argument", ftype)) + } + if ftype.NumOut() != 1 { + panic(fmt.Sprintf("value marker function %#v must have exactly 1 result", ftype)) + } + + return func(mark marker) { + if len(mark.note.Args) == 0 || !is[expect.Identifier](mark.note.Args[0]) { + mark.errorf("first argument to a value marker function must be an identifier") + return + } + id := mark.note.Args[0].(expect.Identifier) + if alt, ok := mark.run.values[id]; ok { + mark.errorf("%s already declared as %T", id, alt) + return + } + args := append([]any{mark}, mark.note.Args[1:]...) + argValues, err := convertArgs(mark, ftype, args) + if err != nil { + mark.errorf("converting args: %v", err) + return + } + results := reflect.ValueOf(fn).Call(argValues) + mark.run.values[id] = results[0].Interface() + } +} + +// actionMarkerFunc returns a wrapper around a function that allows it to be +// called during the processing of action markers (e.g. @action("abc", 123)) +// with marker arguments converted to function parameters. The provided +// function's first parameter must be of type 'marker', and it must not return +// any values. +// +// The provided fn should not mutate the test environment. +func actionMarkerFunc(fn any) func(marker) { + ftype := reflect.TypeOf(fn) + if ftype.NumIn() == 0 || ftype.In(0) != markerType { + panic(fmt.Sprintf("action marker function %#v must accept marker as its first argument", ftype)) + } + if ftype.NumOut() != 0 { + panic(fmt.Sprintf("action marker function %#v cannot have results", ftype)) + } + + return func(mark marker) { + args := append([]any{mark}, mark.note.Args...) + argValues, err := convertArgs(mark, ftype, args) + if err != nil { + mark.errorf("converting args: %v", err) + return + } + reflect.ValueOf(fn).Call(argValues) + } +} + +func convertArgs(mark marker, ftype reflect.Type, args []any) ([]reflect.Value, error) { + var ( + argValues []reflect.Value + pnext int // next param index + p reflect.Type // current param + ) + for i, arg := range args { + if i < ftype.NumIn() { + p = ftype.In(pnext) + pnext++ + } else if p == nil || !ftype.IsVariadic() { + // The actual number of arguments expected by the mark varies, depending + // on whether this is a value marker or an action marker. + // + // Since this error indicates a bug, probably OK to have an imprecise + // error message here. + return nil, fmt.Errorf("too many arguments to %s", mark.note.Name) + } + elemType := p + if ftype.IsVariadic() && pnext == ftype.NumIn() { + elemType = p.Elem() + } + var v reflect.Value + if id, ok := arg.(expect.Identifier); ok && id == "_" { + v = reflect.Zero(elemType) + } else { + a, err := convert(mark, arg, elemType) + if err != nil { + return nil, err + } + v = reflect.ValueOf(a) + } + argValues = append(argValues, v) + } + // Check that we have sufficient arguments. If the function is variadic, we + // do not need arguments for the final parameter. + if pnext < ftype.NumIn()-1 || pnext == ftype.NumIn()-1 && !ftype.IsVariadic() { + // Same comment as above: OK to be vague here. + return nil, fmt.Errorf("not enough arguments to %s", mark.note.Name) + } + return argValues, nil +} + +// is reports whether arg is a T. +func is[T any](arg any) bool { + _, ok := arg.(T) + return ok +} + +// Supported value marker functions. See [valueMarkerFunc] for more details. +var valueMarkerFuncs = map[string]func(marker){ + "loc": valueMarkerFunc(locMarker), + "item": valueMarkerFunc(completionItemMarker), +} + +// Supported action marker functions. See [actionMarkerFunc] for more details. +var actionMarkerFuncs = map[string]func(marker){ + "acceptcompletion": actionMarkerFunc(acceptCompletionMarker), + "codeaction": actionMarkerFunc(codeActionMarker), + "codeactionedit": actionMarkerFunc(codeActionEditMarker), + "codeactionerr": actionMarkerFunc(codeActionErrMarker), + "codelenses": actionMarkerFunc(codeLensesMarker), + "complete": actionMarkerFunc(completeMarker), + "def": actionMarkerFunc(defMarker), + "diag": actionMarkerFunc(diagMarker), + "documentlink": actionMarkerFunc(documentLinkMarker), + "foldingrange": actionMarkerFunc(foldingRangeMarker), + "format": actionMarkerFunc(formatMarker), + "highlight": actionMarkerFunc(highlightMarker), + "hover": actionMarkerFunc(hoverMarker), + "hovererr": actionMarkerFunc(hoverErrMarker), + "implementation": actionMarkerFunc(implementationMarker), + "incomingcalls": actionMarkerFunc(incomingCallsMarker), + "inlayhints": actionMarkerFunc(inlayhintsMarker), + "outgoingcalls": actionMarkerFunc(outgoingCallsMarker), + "preparerename": actionMarkerFunc(prepareRenameMarker), + "rank": actionMarkerFunc(rankMarker), + "rankl": actionMarkerFunc(ranklMarker), + "refs": actionMarkerFunc(refsMarker), + "rename": actionMarkerFunc(renameMarker), + "renameerr": actionMarkerFunc(renameErrMarker), + "selectionrange": actionMarkerFunc(selectionRangeMarker), + "signature": actionMarkerFunc(signatureMarker), + "snippet": actionMarkerFunc(snippetMarker), + "suggestedfix": actionMarkerFunc(suggestedfixMarker), + "suggestedfixerr": actionMarkerFunc(suggestedfixErrMarker), + "symbol": actionMarkerFunc(symbolMarker), + "token": actionMarkerFunc(tokenMarker), + "typedef": actionMarkerFunc(typedefMarker), + "workspacesymbol": actionMarkerFunc(workspaceSymbolMarker), +} + +// markerTest holds all the test data extracted from a test txtar archive. +// +// See the documentation for RunMarkerTests for more information on the archive +// format. +type markerTest struct { + name string // relative path to the txtar file in the testdata dir + fset *token.FileSet // fileset used for parsing notes + content []byte // raw test content + archive *txtar.Archive // original test archive + settings map[string]any // gopls settings + capabilities []byte // content of capabilities.json file + env map[string]string // editor environment + proxyFiles map[string][]byte // proxy content + files map[string][]byte // data files from the archive (excluding special files) + notes []*expect.Note // extracted notes from data files + golden map[expect.Identifier]*Golden // extracted golden content, by identifier name + + skipReason string // the skip reason extracted from the "skip" archive file + flags []string // flags extracted from the special "flags" archive file. + + // Parsed flags values. + minGoVersion string + cgo bool + writeGoSum []string // comma separated dirs to write go sum for + skipGOOS []string // comma separated GOOS values to skip + skipGOARCH []string // comma separated GOARCH values to skip + ignoreExtraDiags bool + filterBuiltins bool + filterKeywords bool +} + +// flagSet returns the flagset used for parsing the special "flags" file in the +// test archive. +func (t *markerTest) flagSet() *flag.FlagSet { + flags := flag.NewFlagSet(t.name, flag.ContinueOnError) + flags.StringVar(&t.minGoVersion, "min_go", "", "if set, the minimum go1.X version required for this test") + flags.BoolVar(&t.cgo, "cgo", false, "if set, requires cgo (both the cgo tool and CGO_ENABLED=1)") + flags.Var((*stringListValue)(&t.writeGoSum), "write_sumfile", "if set, write the sumfile for these directories") + flags.Var((*stringListValue)(&t.skipGOOS), "skip_goos", "if set, skip this test on these GOOS values") + flags.Var((*stringListValue)(&t.skipGOARCH), "skip_goarch", "if set, skip this test on these GOARCH values") + flags.BoolVar(&t.ignoreExtraDiags, "ignore_extra_diags", false, "if set, suppress errors for unmatched diagnostics") + flags.BoolVar(&t.filterBuiltins, "filter_builtins", true, "if set, filter builtins from completion results") + flags.BoolVar(&t.filterKeywords, "filter_keywords", true, "if set, filter keywords from completion results") + return flags +} + +// stringListValue implements flag.Value. +type stringListValue []string + +func (l *stringListValue) Set(s string) error { + if s != "" { + for _, d := range strings.Split(s, ",") { + *l = append(*l, strings.TrimSpace(d)) + } + } + return nil +} + +func (l stringListValue) String() string { + return strings.Join([]string(l), ",") +} + +func (t *markerTest) getGolden(id expect.Identifier) *Golden { + golden, ok := t.golden[id] + // If there was no golden content for this identifier, we must create one + // to handle the case where -update is set: we need a place to store + // the updated content. + if !ok { + golden = &Golden{id: id} + + // TODO(adonovan): the separation of markerTest (the + // static aspects) from markerTestRun (the dynamic + // ones) is evidently bogus because here we modify + // markerTest during execution. Let's merge the two. + t.golden[id] = golden + } + return golden +} + +// Golden holds extracted golden content for a single @<name> prefix. +// +// When -update is set, golden captures the updated golden contents for later +// writing. +type Golden struct { + id expect.Identifier + data map[string][]byte // key "" => @id itself + updated map[string][]byte +} + +// Get returns golden content for the given name, which corresponds to the +// relative path following the golden prefix @<name>/. For example, to access +// the content of @foo/path/to/result.json from the Golden associated with +// @foo, name should be "path/to/result.json". +// +// If -update is set, the given update function will be called to get the +// updated golden content that should be written back to testdata. +// +// Marker functions must use this method instead of accessing data entries +// directly otherwise the -update operation will delete those entries. +// +// TODO(rfindley): rethink the logic here. We may want to separate Get and Set, +// and not delete golden content that isn't set. +func (g *Golden) Get(t testing.TB, name string, updated []byte) ([]byte, bool) { + if existing, ok := g.updated[name]; ok { + // Multiple tests may reference the same golden data, but if they do they + // must agree about its expected content. + if diff := compare.NamedText("existing", "updated", string(existing), string(updated)); diff != "" { + t.Errorf("conflicting updates for golden data %s/%s:\n%s", g.id, name, diff) + } + } + if g.updated == nil { + g.updated = make(map[string][]byte) + } + g.updated[name] = updated + if *update { + return updated, true + } + + res, ok := g.data[name] + return res, ok +} + +// loadMarkerTests walks the given dir looking for .txt files, which it +// interprets as a txtar archive. +// +// See the documentation for RunMarkerTests for more details on the test data +// archive. +func loadMarkerTests(dir string) ([]*markerTest, error) { + var tests []*markerTest + err := filepath.WalkDir(dir, func(path string, _ fs.DirEntry, err error) error { + if strings.HasSuffix(path, ".txt") { + content, err := os.ReadFile(path) + if err != nil { + return err + } + + name := strings.TrimPrefix(path, dir+string(filepath.Separator)) + test, err := loadMarkerTest(name, content) + if err != nil { + return fmt.Errorf("%s: %v", path, err) + } + tests = append(tests, test) + } + return err + }) + return tests, err +} + +func loadMarkerTest(name string, content []byte) (*markerTest, error) { + archive := txtar.Parse(content) + if len(archive.Files) == 0 { + return nil, fmt.Errorf("txtar file has no '-- filename --' sections") + } + if bytes.Contains(archive.Comment, []byte("\n-- ")) { + // This check is conservative, but the comment is only a comment. + return nil, fmt.Errorf("ill-formed '-- filename --' header in comment") + } + test := &markerTest{ + name: name, + fset: token.NewFileSet(), + content: content, + archive: archive, + files: make(map[string][]byte), + golden: make(map[expect.Identifier]*Golden), + } + for _, file := range archive.Files { + switch { + case file.Name == "skip": + reason := strings.ReplaceAll(string(file.Data), "\n", " ") + reason = strings.TrimSpace(reason) + test.skipReason = reason + + case file.Name == "flags": + test.flags = strings.Fields(string(file.Data)) + + case file.Name == "settings.json": + if err := json.Unmarshal(file.Data, &test.settings); err != nil { + return nil, err + } + + case file.Name == "capabilities.json": + test.capabilities = file.Data // lazily unmarshalled by the editor + + case file.Name == "env": + test.env = make(map[string]string) + fields := strings.Fields(string(file.Data)) + for _, field := range fields { + key, value, ok := strings.Cut(field, "=") + if !ok { + return nil, fmt.Errorf("env vars must be formatted as var=value, got %q", field) + } + test.env[key] = value + } + + case strings.HasPrefix(file.Name, "@"): // golden content + idstring, name, _ := strings.Cut(file.Name[len("@"):], "/") + id := expect.Identifier(idstring) + // Note that a file.Name of just "@id" gives (id, name) = ("id", ""). + if _, ok := test.golden[id]; !ok { + test.golden[id] = &Golden{ + id: id, + data: make(map[string][]byte), + } + } + test.golden[id].data[name] = file.Data + + case strings.HasPrefix(file.Name, "proxy/"): + name := file.Name[len("proxy/"):] + if test.proxyFiles == nil { + test.proxyFiles = make(map[string][]byte) + } + test.proxyFiles[name] = file.Data + + default: // ordinary file content + notes, err := expect.Parse(test.fset, file.Name, file.Data) + if err != nil { + return nil, fmt.Errorf("parsing notes in %q: %v", file.Name, err) + } + + // Reject common misspelling: "// @mark". + // TODO(adonovan): permit "// @" within a string. Detect multiple spaces. + if i := bytes.Index(file.Data, []byte("// @")); i >= 0 { + line := 1 + bytes.Count(file.Data[:i], []byte("\n")) + return nil, fmt.Errorf("%s:%d: unwanted space before marker (// @)", file.Name, line) + } + + // The 'go list' command doesn't work correct with modules named + // testdata", so don't allow it as a module name (golang/go#65406). + // (Otherwise files within it will end up in an ad hoc + // package, "command-line-arguments/$TMPDIR/...".) + if filepath.Base(file.Name) == "go.mod" && + bytes.Contains(file.Data, []byte("module testdata")) { + return nil, fmt.Errorf("'testdata' is not a valid module name") + } + + test.notes = append(test.notes, notes...) + test.files[file.Name] = file.Data + } + + // Print a warning if we see what looks like "-- filename --" + // without the second "--". It's not necessarily wrong, + // but it should almost never appear in our test inputs. + if bytes.Contains(file.Data, []byte("\n-- ")) { + log.Printf("ill-formed '-- filename --' header in %s?", file.Name) + } + } + + // Parse flags after loading files, as they may have been set by the "flags" + // file. + if err := test.flagSet().Parse(test.flags); err != nil { + return nil, fmt.Errorf("parsing flags: %v", err) + } + + return test, nil +} + +// formatTest formats the test as a txtar archive. +func formatTest(test *markerTest) ([]byte, error) { + arch := &txtar.Archive{ + Comment: test.archive.Comment, + } + + updatedGolden := make(map[string][]byte) + for id, g := range test.golden { + for name, data := range g.updated { + filename := "@" + path.Join(string(id), name) // name may be "" + updatedGolden[filename] = data + } + } + + // Preserve the original ordering of archive files. + for _, file := range test.archive.Files { + switch file.Name { + // Preserve configuration files exactly as they were. They must have parsed + // if we got this far. + case "skip", "flags", "settings.json", "capabilities.json", "env": + arch.Files = append(arch.Files, file) + default: + if _, ok := test.files[file.Name]; ok { // ordinary file + arch.Files = append(arch.Files, file) + } else if strings.HasPrefix(file.Name, "proxy/") { // proxy file + arch.Files = append(arch.Files, file) + } else if data, ok := updatedGolden[file.Name]; ok { // golden file + arch.Files = append(arch.Files, txtar.File{Name: file.Name, Data: data}) + delete(updatedGolden, file.Name) + } + } + } + + // ...followed by any new golden files. + var newGoldenFiles []txtar.File + for filename, data := range updatedGolden { + // TODO(rfindley): it looks like this implicitly removes trailing newlines + // from golden content. Is there any way to fix that? Perhaps we should + // just make the diff tolerant of missing newlines? + newGoldenFiles = append(newGoldenFiles, txtar.File{Name: filename, Data: data}) + } + // Sort new golden files lexically. + sort.Slice(newGoldenFiles, func(i, j int) bool { + return newGoldenFiles[i].Name < newGoldenFiles[j].Name + }) + arch.Files = append(arch.Files, newGoldenFiles...) + + return txtar.Format(arch), nil +} + +// newEnv creates a new environment for a marker test. +// +// TODO(rfindley): simplify and refactor the construction of testing +// environments across integration tests, marker tests, and benchmarks. +func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byte, writeGoSum []string, config fake.EditorConfig) *integration.Env { + sandbox, err := fake.NewSandbox(&fake.SandboxConfig{ + RootDir: t.TempDir(), + Files: files, + ProxyFiles: proxyFiles, + }) + if err != nil { + t.Fatal(err) + } + + for _, dir := range writeGoSum { + if err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "..."}, []string{"GOWORK=off"}, true); err != nil { + t.Fatal(err) + } + } + + // Put a debug instance in the context to prevent logging to stderr. + // See associated TODO in runner.go: we should revisit this pattern. + ctx := context.Background() + ctx = debug.WithInstance(ctx, "off") + + awaiter := integration.NewAwaiter(sandbox.Workdir) + ss := lsprpc.NewStreamServer(cache, false, hooks.Options) + server := servertest.NewPipeServer(ss, jsonrpc2.NewRawStream) + const skipApplyEdits = true // capture edits but don't apply them + editor, err := fake.NewEditor(sandbox, config).Connect(ctx, server, awaiter.Hooks(), skipApplyEdits) + if err != nil { + sandbox.Close() // ignore error + t.Fatal(err) + } + if err := awaiter.Await(ctx, integration.InitialWorkspaceLoad); err != nil { + sandbox.Close() // ignore error + t.Fatal(err) + } + return &integration.Env{ + T: t, + Ctx: ctx, + Editor: editor, + Sandbox: sandbox, + Awaiter: awaiter, + } +} + +// A markerTestRun holds the state of one run of a marker test archive. +type markerTestRun struct { + test *markerTest + env *integration.Env + settings map[string]any + + // Collected information. + // Each @diag/@suggestedfix marker eliminates an entry from diags. + values map[expect.Identifier]any + diags map[protocol.Location][]protocol.Diagnostic // diagnostics by position; location end == start + + // Notes that weren't associated with a top-level marker func. They may be + // consumed by another marker (e.g. @codelenses collects @codelens markers). + // Any notes that aren't consumed are flagged as an error. + extraNotes map[protocol.DocumentURI]map[string][]*expect.Note +} + +// sprintf returns a formatted string after applying pre-processing to +// arguments of the following types: +// - token.Pos: formatted using (*markerTestRun).fmtPos +// - protocol.Location: formatted using (*markerTestRun).fmtLoc +func (c *marker) sprintf(format string, args ...any) string { + if false { + _ = fmt.Sprintf(format, args...) // enable vet printf checker + } + var args2 []any + for _, arg := range args { + switch arg := arg.(type) { + case token.Pos: + args2 = append(args2, c.run.fmtPos(arg)) + case protocol.Location: + args2 = append(args2, c.run.fmtLoc(arg)) + default: + args2 = append(args2, arg) + } + } + return fmt.Sprintf(format, args2...) +} + +// fmtLoc formats the given pos in the context of the test, using +// archive-relative paths for files and including the line number in the full +// archive file. +func (run *markerTestRun) fmtPos(pos token.Pos) string { + file := run.test.fset.File(pos) + if file == nil { + run.env.T.Errorf("position %d not in test fileset", pos) + return "<invalid location>" + } + m, err := run.env.Editor.Mapper(file.Name()) + if err != nil { + run.env.T.Errorf("%s", err) + return "<invalid location>" + } + loc, err := m.PosLocation(file, pos, pos) + if err != nil { + run.env.T.Errorf("Mapper(%s).PosLocation failed: %v", file.Name(), err) + } + return run.fmtLoc(loc) +} + +// fmtLoc formats the given location in the context of the test, using +// archive-relative paths for files and including the line number in the full +// archive file. +func (run *markerTestRun) fmtLoc(loc protocol.Location) string { + formatted := run.fmtLocDetails(loc, true) + if formatted == "" { + run.env.T.Errorf("unable to find %s in test archive", loc) + return "<invalid location>" + } + return formatted +} + +// See fmtLoc. If includeTxtPos is not set, the position in the full archive +// file is omitted. +// +// If the location cannot be found within the archive, fmtLocDetails returns "". +func (run *markerTestRun) fmtLocDetails(loc protocol.Location, includeTxtPos bool) string { + if loc == (protocol.Location{}) { + return "" + } + lines := bytes.Count(run.test.archive.Comment, []byte("\n")) + var name string + for _, f := range run.test.archive.Files { + lines++ // -- separator -- + uri := run.env.Sandbox.Workdir.URI(f.Name) + if uri == loc.URI { + name = f.Name + break + } + lines += bytes.Count(f.Data, []byte("\n")) + } + if name == "" { + return "" + } + m, err := run.env.Editor.Mapper(name) + if err != nil { + run.env.T.Errorf("internal error: %v", err) + return "<invalid location>" + } + start, end, err := m.RangeOffsets(loc.Range) + if err != nil { + run.env.T.Errorf("error formatting location %s: %v", loc, err) + return "<invalid location>" + } + var ( + startLine, startCol8 = m.OffsetLineCol8(start) + endLine, endCol8 = m.OffsetLineCol8(end) + ) + innerSpan := fmt.Sprintf("%d:%d", startLine, startCol8) // relative to the embedded file + outerSpan := fmt.Sprintf("%d:%d", lines+startLine, startCol8) // relative to the archive file + if start != end { + if endLine == startLine { + innerSpan += fmt.Sprintf("-%d", endCol8) + outerSpan += fmt.Sprintf("-%d", endCol8) + } else { + innerSpan += fmt.Sprintf("-%d:%d", endLine, endCol8) + outerSpan += fmt.Sprintf("-%d:%d", lines+endLine, endCol8) + } + } + + if includeTxtPos { + return fmt.Sprintf("%s:%s (%s:%s)", name, innerSpan, run.test.name, outerSpan) + } else { + return fmt.Sprintf("%s:%s", name, innerSpan) + } +} + +// ---- converters ---- + +// converter is the signature of argument converters. +// A converter should return an error rather than calling marker.errorf(). +// +// type converter func(marker, any) (any, error) + +// Types with special conversions. +var ( + goldenType = reflect.TypeOf(&Golden{}) + locationType = reflect.TypeOf(protocol.Location{}) + markerType = reflect.TypeOf(marker{}) + stringMatcherType = reflect.TypeOf(stringMatcher{}) +) + +func convert(mark marker, arg any, paramType reflect.Type) (any, error) { + // Handle stringMatcher and golden parameters before resolving identifiers, + // because golden content lives in a separate namespace from other + // identifiers. + switch paramType { + case stringMatcherType: + return convertStringMatcher(mark, arg) + case goldenType: + id, ok := arg.(expect.Identifier) + if !ok { + return nil, fmt.Errorf("invalid input type %T: golden key must be an identifier", arg) + } + return mark.run.test.getGolden(id), nil + } + if id, ok := arg.(expect.Identifier); ok { + if arg, ok := mark.run.values[id]; ok { + if !reflect.TypeOf(arg).AssignableTo(paramType) { + return nil, fmt.Errorf("cannot convert %v (%T) to %s", arg, arg, paramType) + } + return arg, nil + } + } + if paramType == locationType { + return convertLocation(mark, arg) + } + if reflect.TypeOf(arg).AssignableTo(paramType) { + return arg, nil // no conversion required + } + return nil, fmt.Errorf("cannot convert %v (%T) to %s", arg, arg, paramType) +} + +// convertLocation converts a string or regexp argument into the protocol +// location corresponding to the first position of the string (or first match +// of the regexp) in the line preceding the note. +func convertLocation(mark marker, arg any) (protocol.Location, error) { + switch arg := arg.(type) { + case string: + startOff, preceding, m, err := linePreceding(mark.run, mark.note.Pos) + if err != nil { + return protocol.Location{}, err + } + idx := bytes.Index(preceding, []byte(arg)) + if idx < 0 { + return protocol.Location{}, fmt.Errorf("substring %q not found in %q", arg, preceding) + } + off := startOff + idx + return m.OffsetLocation(off, off+len(arg)) + case *regexp.Regexp: + return findRegexpInLine(mark.run, mark.note.Pos, arg) + default: + return protocol.Location{}, fmt.Errorf("cannot convert argument type %T to location (must be a string to match the preceding line)", arg) + } +} + +// findRegexpInLine searches the partial line preceding pos for a match for the +// regular expression re, returning a location spanning the first match. If re +// contains exactly one subgroup, the position of this subgroup match is +// returned rather than the position of the full match. +func findRegexpInLine(run *markerTestRun, pos token.Pos, re *regexp.Regexp) (protocol.Location, error) { + startOff, preceding, m, err := linePreceding(run, pos) + if err != nil { + return protocol.Location{}, err + } + + matches := re.FindSubmatchIndex(preceding) + if len(matches) == 0 { + return protocol.Location{}, fmt.Errorf("no match for regexp %q found in %q", re, string(preceding)) + } + var start, end int + switch len(matches) { + case 2: + // no subgroups: return the range of the regexp expression + start, end = matches[0], matches[1] + case 4: + // one subgroup: return its range + start, end = matches[2], matches[3] + default: + return protocol.Location{}, fmt.Errorf("invalid location regexp %q: expect either 0 or 1 subgroups, got %d", re, len(matches)/2-1) + } + + return m.OffsetLocation(start+startOff, end+startOff) +} + +func linePreceding(run *markerTestRun, pos token.Pos) (int, []byte, *protocol.Mapper, error) { + file := run.test.fset.File(pos) + posn := safetoken.Position(file, pos) + lineStart := file.LineStart(posn.Line) + startOff, endOff, err := safetoken.Offsets(file, lineStart, pos) + if err != nil { + return 0, nil, nil, err + } + m, err := run.env.Editor.Mapper(file.Name()) + if err != nil { + return 0, nil, nil, err + } + return startOff, m.Content[startOff:endOff], m, nil +} + +// convertStringMatcher converts a string, regexp, or identifier +// argument into a stringMatcher. The string is a substring of the +// expected error, the regexp is a pattern than matches the expected +// error, and the identifier is a golden file containing the expected +// error. +func convertStringMatcher(mark marker, arg any) (stringMatcher, error) { + switch arg := arg.(type) { + case string: + return stringMatcher{substr: arg}, nil + case *regexp.Regexp: + return stringMatcher{pattern: arg}, nil + case expect.Identifier: + golden := mark.run.test.getGolden(arg) + return stringMatcher{golden: golden}, nil + default: + return stringMatcher{}, fmt.Errorf("cannot convert %T to wantError (want: string, regexp, or identifier)", arg) + } +} + +// A stringMatcher represents an expectation of a specific string value. +// +// It may be indicated in one of three ways, in 'expect' notation: +// - an identifier 'foo', to compare (exactly) with the contents of the golden +// section @foo; +// - a pattern expression re"ab.*c", to match against a regular expression; +// - a string literal "abc", to check for a substring. +type stringMatcher struct { + golden *Golden + pattern *regexp.Regexp + substr string +} + +func (sc stringMatcher) String() string { + if sc.golden != nil { + return fmt.Sprintf("content from @%s entry", sc.golden.id) + } else if sc.pattern != nil { + return fmt.Sprintf("content matching %#q", sc.pattern) + } else { + return fmt.Sprintf("content with substring %q", sc.substr) + } +} + +// checkErr asserts that the given error matches the stringMatcher's expectations. +func (sc stringMatcher) checkErr(mark marker, err error) { + if err == nil { + mark.errorf("@%s succeeded unexpectedly, want %v", mark.note.Name, sc) + return + } + sc.check(mark, err.Error()) +} + +// check asserts that the given content matches the stringMatcher's expectations. +func (sc stringMatcher) check(mark marker, got string) { + if sc.golden != nil { + compareGolden(mark, []byte(got), sc.golden) + } else if sc.pattern != nil { + // Content must match the regular expression pattern. + if !sc.pattern.MatchString(got) { + mark.errorf("got %q, does not match pattern %#q", got, sc.pattern) + } + + } else if !strings.Contains(got, sc.substr) { + // Content must contain the expected substring. + mark.errorf("got %q, want substring %q", got, sc.substr) + } +} + +// checkChangedFiles compares the files changed by an operation with their expected (golden) state. +func checkChangedFiles(mark marker, changed map[string][]byte, golden *Golden) { + // Check changed files match expectations. + for filename, got := range changed { + if want, ok := golden.Get(mark.T(), filename, got); !ok { + mark.errorf("%s: unexpected change to file %s; got:\n%s", + mark.note.Name, filename, got) + + } else if string(got) != string(want) { + mark.errorf("%s: wrong file content for %s: got:\n%s\nwant:\n%s\ndiff:\n%s", + mark.note.Name, filename, got, want, + compare.Bytes(want, got)) + } + } + + // Report unmet expectations. + for filename := range golden.data { + if _, ok := changed[filename]; !ok { + want, _ := golden.Get(mark.T(), filename, nil) + mark.errorf("%s: missing change to file %s; want:\n%s", + mark.note.Name, filename, want) + } + } +} + +// checkDiffs computes unified diffs for each changed file, and compares with +// the diff content stored in the given golden directory. +func checkDiffs(mark marker, changed map[string][]byte, golden *Golden) { + diffs := make(map[string]string) + for name, after := range changed { + before := mark.run.env.FileContent(name) + // TODO(golang/go#64023): switch back to diff.Strings. + // The attached issue is only one obstacle to switching. + // Another is that different diff algorithms produce + // different results, so if we commit diffs in test + // expectations, then we need to either (1) state + // which diff implementation they use and never change + // it, or (2) don't compare diffs, but instead apply + // the "want" diff and check that it produces the + // "got" output. Option 2 is more robust, as it allows + // the test expectation to use any valid diff. + edits := myers.ComputeEdits(before, string(after)) + d, err := diff.ToUnified("before", "after", before, edits, 0) + if err != nil { + // Can't happen: edits are consistent. + log.Fatalf("internal error in diff.ToUnified: %v", err) + } + // Trim the unified header from diffs, as it is unnecessary and repetitive. + difflines := strings.Split(d, "\n") + if len(difflines) >= 2 && strings.HasPrefix(difflines[1], "+++") { + diffs[name] = strings.Join(difflines[2:], "\n") + } else { + diffs[name] = d + } + } + // Check changed files match expectations. + for filename, got := range diffs { + if want, ok := golden.Get(mark.T(), filename, []byte(got)); !ok { + mark.errorf("%s: unexpected change to file %s; got diff:\n%s", + mark.note.Name, filename, got) + + } else if got != string(want) { + mark.errorf("%s: wrong diff for %s:\n\ngot:\n%s\n\nwant:\n%s\n", + mark.note.Name, filename, got, want) + } + } + // Report unmet expectations. + for filename := range golden.data { + if _, ok := changed[filename]; !ok { + want, _ := golden.Get(mark.T(), filename, nil) + mark.errorf("%s: missing change to file %s; want:\n%s", + mark.note.Name, filename, want) + } + } +} + +// ---- marker functions ---- + +// TODO(rfindley): consolidate documentation of these markers. They are already +// documented above, so much of the documentation here is redundant. + +// completionItem is a simplified summary of a completion item. +type completionItem struct { + Label, Detail, Kind, Documentation string +} + +func completionItemMarker(mark marker, label string, other ...string) completionItem { + if len(other) > 3 { + mark.errorf("too many arguments to @item: expect at most 4") + } + item := completionItem{ + Label: label, + } + if len(other) > 0 { + item.Detail = other[0] + } + if len(other) > 1 { + item.Kind = other[1] + } + if len(other) > 2 { + item.Documentation = other[2] + } + return item +} + +func rankMarker(mark marker, src protocol.Location, items ...completionItem) { + // Separate positive and negative items (expectations). + var pos, neg []completionItem + for _, item := range items { + if strings.HasPrefix(item.Label, "!") { + neg = append(neg, item) + } else { + pos = append(pos, item) + } + } + + // Collect results that are present in items, preserving their order. + list := mark.run.env.Completion(src) + var got []string + for _, g := range list.Items { + for _, w := range pos { + if g.Label == w.Label { + got = append(got, g.Label) + break + } + } + for _, w := range neg { + if g.Label == w.Label[len("!"):] { + mark.errorf("got unwanted completion: %s", g.Label) + break + } + } + } + var want []string + for _, w := range pos { + want = append(want, w.Label) + } + if diff := cmp.Diff(want, got); diff != "" { + mark.errorf("completion rankings do not match (-want +got):\n%s", diff) + } +} + +func ranklMarker(mark marker, src protocol.Location, labels ...string) { + // Separate positive and negative labels (expectations). + var pos, neg []string + for _, label := range labels { + if strings.HasPrefix(label, "!") { + neg = append(neg, label[len("!"):]) + } else { + pos = append(pos, label) + } + } + + // Collect results that are present in items, preserving their order. + list := mark.run.env.Completion(src) + var got []string + for _, g := range list.Items { + if slices.Contains(pos, g.Label) { + got = append(got, g.Label) + } else if slices.Contains(neg, g.Label) { + mark.errorf("got unwanted completion: %s", g.Label) + } + } + if diff := cmp.Diff(pos, got); diff != "" { + mark.errorf("completion rankings do not match (-want +got):\n%s", diff) + } +} + +func snippetMarker(mark marker, src protocol.Location, item completionItem, want string) { + list := mark.run.env.Completion(src) + var ( + found bool + got string + all []string // for errors + ) + items := filterBuiltinsAndKeywords(mark, list.Items) + for _, i := range items { + all = append(all, i.Label) + if i.Label == item.Label { + found = true + if i.TextEdit != nil { + got = i.TextEdit.NewText + } + break + } + } + if !found { + mark.errorf("no completion item found matching %s (got: %v)", item.Label, all) + return + } + if got != want { + mark.errorf("snippets do not match: got %q, want %q", got, want) + } +} + +// completeMarker implements the @complete marker, running +// textDocument/completion at the given src location and asserting that the +// results match the expected results. +func completeMarker(mark marker, src protocol.Location, want ...completionItem) { + list := mark.run.env.Completion(src) + items := filterBuiltinsAndKeywords(mark, list.Items) + var got []completionItem + for i, item := range items { + simplified := completionItem{ + Label: item.Label, + Detail: item.Detail, + Kind: fmt.Sprint(item.Kind), + } + if item.Documentation != nil { + switch v := item.Documentation.Value.(type) { + case string: + simplified.Documentation = v + case protocol.MarkupContent: + simplified.Documentation = strings.TrimSpace(v.Value) // trim newlines + } + } + // Support short-hand notation: if Detail, Kind, or Documentation are omitted from the + // item, don't match them. + if i < len(want) { + if want[i].Detail == "" { + simplified.Detail = "" + } + if want[i].Kind == "" { + simplified.Kind = "" + } + if want[i].Documentation == "" { + simplified.Documentation = "" + } + } + got = append(got, simplified) + } + if len(want) == 0 { + want = nil // got is nil if empty + } + if diff := cmp.Diff(want, got); diff != "" { + mark.errorf("Completion(...) returned unexpect results (-want +got):\n%s", diff) + } +} + +// filterBuiltinsAndKeywords filters out builtins and keywords from completion +// results. +// +// It over-approximates, and does not detect if builtins are shadowed. +func filterBuiltinsAndKeywords(mark marker, items []protocol.CompletionItem) []protocol.CompletionItem { + keep := 0 + for _, item := range items { + if mark.run.test.filterKeywords && item.Kind == protocol.KeywordCompletion { + continue + } + if mark.run.test.filterBuiltins && types.Universe.Lookup(item.Label) != nil { + continue + } + items[keep] = item + keep++ + } + return items[:keep] +} + +// acceptCompletionMarker implements the @acceptCompletion marker, running +// textDocument/completion at the given src location and accepting the +// candidate with the given label. The resulting source must match the provided +// golden content. +func acceptCompletionMarker(mark marker, src protocol.Location, label string, golden *Golden) { + list := mark.run.env.Completion(src) + var selected *protocol.CompletionItem + for _, item := range list.Items { + if item.Label == label { + selected = &item + break + } + } + if selected == nil { + mark.errorf("Completion(...) did not return an item labeled %q", label) + return + } + filename := mark.path() + mapper := mark.mapper() + patched, _, err := protocol.ApplyEdits(mapper, append([]protocol.TextEdit{*selected.TextEdit}, selected.AdditionalTextEdits...)) + + if err != nil { + mark.errorf("ApplyProtocolEdits failed: %v", err) + return + } + changes := map[string][]byte{filename: patched} + // Check the file state. + checkChangedFiles(mark, changes, golden) +} + +// defMarker implements the @def marker, running textDocument/definition at +// the given src location and asserting that there is exactly one resulting +// location, matching dst. +// +// TODO(rfindley): support a variadic destination set. +func defMarker(mark marker, src, dst protocol.Location) { + got := mark.run.env.GoToDefinition(src) + if got != dst { + mark.errorf("definition location does not match:\n\tgot: %s\n\twant %s", + mark.run.fmtLoc(got), mark.run.fmtLoc(dst)) + } +} + +func typedefMarker(mark marker, src, dst protocol.Location) { + got := mark.run.env.TypeDefinition(src) + if got != dst { + mark.errorf("type definition location does not match:\n\tgot: %s\n\twant %s", + mark.run.fmtLoc(got), mark.run.fmtLoc(dst)) + } +} + +func foldingRangeMarker(mark marker, g *Golden) { + env := mark.run.env + ranges, err := mark.server().FoldingRange(env.Ctx, &protocol.FoldingRangeParams{ + TextDocument: mark.document(), + }) + if err != nil { + mark.errorf("foldingRange failed: %v", err) + return + } + var edits []protocol.TextEdit + insert := func(line, char uint32, text string) { + pos := protocol.Position{Line: line, Character: char} + edits = append(edits, protocol.TextEdit{ + Range: protocol.Range{ + Start: pos, + End: pos, + }, + NewText: text, + }) + } + for i, rng := range ranges { + insert(rng.StartLine, rng.StartCharacter, fmt.Sprintf("<%d kind=%q>", i, rng.Kind)) + insert(rng.EndLine, rng.EndCharacter, fmt.Sprintf("</%d>", i)) + } + filename := mark.path() + mapper, err := env.Editor.Mapper(filename) + if err != nil { + mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) + return + } + got, _, err := protocol.ApplyEdits(mapper, edits) + if err != nil { + mark.errorf("ApplyProtocolEdits failed: %v", err) + return + } + want, _ := g.Get(mark.T(), "", got) + if diff := compare.Bytes(want, got); diff != "" { + mark.errorf("foldingRange mismatch:\n%s", diff) + } +} + +// formatMarker implements the @format marker. +func formatMarker(mark marker, golden *Golden) { + edits, err := mark.server().Formatting(mark.ctx(), &protocol.DocumentFormattingParams{ + TextDocument: mark.document(), + }) + var got []byte + if err != nil { + got = []byte(err.Error() + "\n") // all golden content is newline terminated + } else { + env := mark.run.env + filename := mark.path() + mapper, err := env.Editor.Mapper(filename) + if err != nil { + mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) + } + + got, _, err = protocol.ApplyEdits(mapper, edits) + if err != nil { + mark.errorf("ApplyProtocolEdits failed: %v", err) + return + } + } + + compareGolden(mark, got, golden) +} + +func highlightMarker(mark marker, src protocol.Location, dsts ...protocol.Location) { + highlights := mark.run.env.DocumentHighlight(src) + var got []protocol.Range + for _, h := range highlights { + got = append(got, h.Range) + } + + var want []protocol.Range + for _, d := range dsts { + want = append(want, d.Range) + } + + sortRanges := func(s []protocol.Range) { + sort.Slice(s, func(i, j int) bool { + return protocol.CompareRange(s[i], s[j]) < 0 + }) + } + + sortRanges(got) + sortRanges(want) + + if diff := cmp.Diff(want, got); diff != "" { + mark.errorf("DocumentHighlight(%v) mismatch (-want +got):\n%s", src, diff) + } +} + +func hoverMarker(mark marker, src, dst protocol.Location, sc stringMatcher) { + content, gotDst := mark.run.env.Hover(src) + if gotDst != dst { + mark.errorf("hover location does not match:\n\tgot: %s\n\twant %s)", mark.run.fmtLoc(gotDst), mark.run.fmtLoc(dst)) + } + gotMD := "" + if content != nil { + gotMD = content.Value + } + sc.check(mark, gotMD) +} + +func hoverErrMarker(mark marker, src protocol.Location, em stringMatcher) { + _, _, err := mark.editor().Hover(mark.ctx(), src) + em.checkErr(mark, err) +} + +// locMarker implements the @loc marker. It is executed before other +// markers, so that locations are available. +func locMarker(mark marker, loc protocol.Location) protocol.Location { return loc } + +// diagMarker implements the @diag marker. It eliminates diagnostics from +// the observed set in mark.test. +func diagMarker(mark marker, loc protocol.Location, re *regexp.Regexp) { + if _, ok := removeDiagnostic(mark, loc, re); !ok { + mark.errorf("no diagnostic at %v matches %q", loc, re) + } +} + +// removeDiagnostic looks for a diagnostic matching loc at the given position. +// +// If found, it returns (diag, true), and eliminates the matched diagnostic +// from the unmatched set. +// +// If not found, it returns (protocol.Diagnostic{}, false). +func removeDiagnostic(mark marker, loc protocol.Location, re *regexp.Regexp) (protocol.Diagnostic, bool) { + loc.Range.End = loc.Range.Start // diagnostics ignore end position. + diags := mark.run.diags[loc] + for i, diag := range diags { + if re.MatchString(diag.Message) { + mark.run.diags[loc] = append(diags[:i], diags[i+1:]...) + return diag, true + } + } + return protocol.Diagnostic{}, false +} + +// renameMarker implements the @rename(location, new, golden) marker. +func renameMarker(mark marker, loc protocol.Location, newName string, golden *Golden) { + changed, err := rename(mark.run.env, loc, newName) + if err != nil { + mark.errorf("rename failed: %v. (Use @renameerr for expected errors.)", err) + return + } + checkDiffs(mark, changed, golden) +} + +// renameErrMarker implements the @renamererr(location, new, error) marker. +func renameErrMarker(mark marker, loc protocol.Location, newName string, wantErr stringMatcher) { + _, err := rename(mark.run.env, loc, newName) + wantErr.checkErr(mark, err) +} + +func selectionRangeMarker(mark marker, loc protocol.Location, g *Golden) { + ranges, err := mark.server().SelectionRange(mark.ctx(), &protocol.SelectionRangeParams{ + TextDocument: mark.document(), + Positions: []protocol.Position{loc.Range.Start}, + }) + if err != nil { + mark.errorf("SelectionRange failed: %v", err) + return + } + var buf bytes.Buffer + m := mark.mapper() + for i, path := range ranges { + fmt.Fprintf(&buf, "Ranges %d:", i) + rng := path + for { + s, e, err := m.RangeOffsets(rng.Range) + if err != nil { + mark.errorf("RangeOffsets failed: %v", err) + return + } + + var snippet string + if e-s < 30 { + snippet = string(m.Content[s:e]) + } else { + snippet = string(m.Content[s:s+15]) + "..." + string(m.Content[e-15:e]) + } + + fmt.Fprintf(&buf, "\n\t%v %q", rng.Range, strings.ReplaceAll(snippet, "\n", "\\n")) + + if rng.Parent == nil { + break + } + rng = *rng.Parent + } + buf.WriteRune('\n') + } + compareGolden(mark, buf.Bytes(), g) +} + +func tokenMarker(mark marker, loc protocol.Location, tokenType, mod string) { + tokens := mark.run.env.SemanticTokensRange(loc) + if len(tokens) != 1 { + mark.errorf("got %d tokens, want 1", len(tokens)) + return + } + tok := tokens[0] + if tok.TokenType != tokenType { + mark.errorf("token type = %q, want %q", tok.TokenType, tokenType) + } + if tok.Mod != mod { + mark.errorf("token mod = %q, want %q", tok.Mod, mod) + } +} + +func signatureMarker(mark marker, src protocol.Location, label string, active int64) { + got := mark.run.env.SignatureHelp(src) + if label == "" { + // A null result is expected. + // (There's no point having a @signatureerr marker + // because the server handler suppresses all errors.) + if got != nil && len(got.Signatures) > 0 { + mark.errorf("signatureHelp = %v, want 0 signatures", got) + } + return + } + if got == nil || len(got.Signatures) != 1 { + mark.errorf("signatureHelp = %v, want exactly 1 signature", got) + return + } + if got := got.Signatures[0].Label; got != label { + mark.errorf("signatureHelp: got label %q, want %q", got, label) + } + if got := int64(got.ActiveParameter); got != active { + mark.errorf("signatureHelp: got active parameter %d, want %d", got, active) + } +} + +// rename returns the new contents of the files that would be modified +// by renaming the identifier at loc to newName. +func rename(env *integration.Env, loc protocol.Location, newName string) (map[string][]byte, error) { + // We call Server.Rename directly, instead of + // env.Editor.Rename(env.Ctx, loc, newName) + // to isolate Rename from PrepareRename, and because we don't + // want to modify the file system in a scenario with multiple + // @rename markers. + + editMap, err := env.Editor.Server.Rename(env.Ctx, &protocol.RenameParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, + Position: loc.Range.Start, + NewName: newName, + }) + if err != nil { + return nil, err + } + + fileChanges := make(map[string][]byte) + if err := applyDocumentChanges(env, editMap.DocumentChanges, fileChanges); err != nil { + return nil, fmt.Errorf("applying document changes: %v", err) + } + return fileChanges, nil +} + +// applyDocumentChanges applies the given document changes to the editor buffer +// content, recording the resulting contents in the fileChanges map. It is an +// error for a change to an edit a file that is already present in the +// fileChanges map. +func applyDocumentChanges(env *integration.Env, changes []protocol.DocumentChanges, fileChanges map[string][]byte) error { + getMapper := func(path string) (*protocol.Mapper, error) { + if _, ok := fileChanges[path]; ok { + return nil, fmt.Errorf("internal error: %s is already edited", path) + } + return env.Editor.Mapper(path) + } + + for _, change := range changes { + if change.RenameFile != nil { + // rename + oldFile := env.Sandbox.Workdir.URIToPath(change.RenameFile.OldURI) + mapper, err := getMapper(oldFile) + if err != nil { + return err + } + newFile := env.Sandbox.Workdir.URIToPath(change.RenameFile.NewURI) + fileChanges[newFile] = mapper.Content + } else { + // edit + filename := env.Sandbox.Workdir.URIToPath(change.TextDocumentEdit.TextDocument.URI) + mapper, err := getMapper(filename) + if err != nil { + return err + } + patched, _, err := protocol.ApplyEdits(mapper, protocol.AsTextEdits(change.TextDocumentEdit.Edits)) + if err != nil { + return err + } + fileChanges[filename] = patched + } + } + + return nil +} + +func codeActionMarker(mark marker, start, end protocol.Location, actionKind string, g *Golden, titles ...string) { + // Request the range from start.Start to end.End. + loc := start + loc.Range.End = end.Range.End + + // Apply the fix it suggests. + changed, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil, titles) + if err != nil { + mark.errorf("codeAction failed: %v", err) + return + } + + // Check the file state. + checkChangedFiles(mark, changed, g) +} + +func codeActionEditMarker(mark marker, loc protocol.Location, actionKind string, g *Golden, titles ...string) { + changed, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil, titles) + if err != nil { + mark.errorf("codeAction failed: %v", err) + return + } + + checkDiffs(mark, changed, g) +} + +func codeActionErrMarker(mark marker, start, end protocol.Location, actionKind string, wantErr stringMatcher) { + loc := start + loc.Range.End = end.Range.End + _, err := codeAction(mark.run.env, loc.URI, loc.Range, actionKind, nil, nil) + wantErr.checkErr(mark, err) +} + +// codeLensesMarker runs the @codelenses() marker, collecting @codelens marks +// in the current file and comparing with the result of the +// textDocument/codeLens RPC. +func codeLensesMarker(mark marker) { + type codeLens struct { + Range protocol.Range + Title string + } + + lenses := mark.run.env.CodeLens(mark.path()) + var got []codeLens + for _, lens := range lenses { + title := "" + if lens.Command != nil { + title = lens.Command.Title + } + got = append(got, codeLens{lens.Range, title}) + } + + var want []codeLens + mark.consumeExtraNotes("codelens", actionMarkerFunc(func(_ marker, loc protocol.Location, title string) { + want = append(want, codeLens{loc.Range, title}) + })) + + for _, s := range [][]codeLens{got, want} { + sort.Slice(s, func(i, j int) bool { + li, lj := s[i], s[j] + if c := protocol.CompareRange(li.Range, lj.Range); c != 0 { + return c < 0 + } + return li.Title < lj.Title + }) + } + + if diff := cmp.Diff(want, got); diff != "" { + mark.errorf("codelenses: unexpected diff (-want +got):\n%s", diff) + } +} + +func documentLinkMarker(mark marker, g *Golden) { + var b bytes.Buffer + links := mark.run.env.DocumentLink(mark.path()) + for _, l := range links { + if l.Target == nil { + mark.errorf("%s: nil link target", l.Range) + continue + } + loc := protocol.Location{URI: mark.uri(), Range: l.Range} + fmt.Fprintln(&b, mark.run.fmtLocDetails(loc, false), *l.Target) + } + + compareGolden(mark, b.Bytes(), g) +} + +// consumeExtraNotes runs the provided func for each extra note with the given +// name, and deletes all matching notes. +func (mark marker) consumeExtraNotes(name string, f func(marker)) { + uri := mark.uri() + notes := mark.run.extraNotes[uri][name] + delete(mark.run.extraNotes[uri], name) + + for _, note := range notes { + f(marker{run: mark.run, note: note}) + } +} + +// suggestedfixMarker implements the @suggestedfix(location, regexp, +// kind, golden) marker. It acts like @diag(location, regexp), to set +// the expectation of a diagnostic, but then it applies the first code +// action of the specified kind suggested by the matched diagnostic. +func suggestedfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, golden *Golden) { + loc.Range.End = loc.Range.Start // diagnostics ignore end position. + // Find and remove the matching diagnostic. + diag, ok := removeDiagnostic(mark, loc, re) + if !ok { + mark.errorf("no diagnostic at %v matches %q", loc, re) + return + } + + // Apply the fix it suggests. + changed, err := codeAction(mark.run.env, loc.URI, diag.Range, "quickfix", &diag, nil) + if err != nil { + mark.errorf("suggestedfix failed: %v. (Use @suggestedfixerr for expected errors.)", err) + return + } + + // Check the file state. + checkDiffs(mark, changed, golden) +} + +func suggestedfixErrMarker(mark marker, loc protocol.Location, re *regexp.Regexp, wantErr stringMatcher) { + loc.Range.End = loc.Range.Start // diagnostics ignore end position. + // Find and remove the matching diagnostic. + diag, ok := removeDiagnostic(mark, loc, re) + if !ok { + mark.errorf("no diagnostic at %v matches %q", loc, re) + return + } + + // Apply the fix it suggests. + _, err := codeAction(mark.run.env, loc.URI, diag.Range, "quickfix", &diag, nil) + wantErr.checkErr(mark, err) +} + +// codeAction executes a textDocument/codeAction request for the specified +// location and kind. If diag is non-nil, it is used as the code action +// context. +// +// The resulting map contains resulting file contents after the code action is +// applied. Currently, this function does not support code actions that return +// edits directly; it only supports code action commands. +func codeAction(env *integration.Env, uri protocol.DocumentURI, rng protocol.Range, actionKind string, diag *protocol.Diagnostic, titles []string) (map[string][]byte, error) { + changes, err := codeActionChanges(env, uri, rng, actionKind, diag, titles) + if err != nil { + return nil, err + } + fileChanges := make(map[string][]byte) + if err := applyDocumentChanges(env, changes, fileChanges); err != nil { + return nil, fmt.Errorf("applying document changes: %v", err) + } + return fileChanges, nil +} + +// codeActionChanges executes a textDocument/codeAction request for the +// specified location and kind, and captures the resulting document changes. +// If diag is non-nil, it is used as the code action context. +// If titles is non-empty, the code action title must be present among the provided titles. +func codeActionChanges(env *integration.Env, uri protocol.DocumentURI, rng protocol.Range, actionKind string, diag *protocol.Diagnostic, titles []string) ([]protocol.DocumentChanges, error) { + // Request all code actions that apply to the diagnostic. + // (The protocol supports filtering using Context.Only={actionKind} + // but we can give a better error if we don't filter.) + params := &protocol.CodeActionParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: uri}, + Range: rng, + Context: protocol.CodeActionContext{ + Only: nil, // => all kinds + }, + } + if diag != nil { + params.Context.Diagnostics = []protocol.Diagnostic{*diag} + } + + actions, err := env.Editor.Server.CodeAction(env.Ctx, params) + if err != nil { + return nil, err + } + + // Find the sole candidates CodeAction of the specified kind (e.g. refactor.rewrite). + var candidates []protocol.CodeAction + for _, act := range actions { + if act.Kind == protocol.CodeActionKind(actionKind) { + if len(titles) > 0 { + for _, f := range titles { + if act.Title == f { + candidates = append(candidates, act) + break + } + } + } else { + candidates = append(candidates, act) + } + } + } + if len(candidates) != 1 { + for _, act := range actions { + env.T.Logf("found CodeAction Kind=%s Title=%q", act.Kind, act.Title) + } + return nil, fmt.Errorf("found %d CodeActions of kind %s matching filters %v for this diagnostic, want 1", len(candidates), actionKind, titles) + } + action := candidates[0] + + // Apply the codeAction. + // + // Spec: + // "If a code action provides an edit and a command, first the edit is + // executed and then the command." + // An action may specify an edit and/or a command, to be + // applied in that order. But since applyDocumentChanges(env, + // action.Edit.DocumentChanges) doesn't compose, for now we + // assert that actions return one or the other. + + // Resolve code action edits first if the client has resolve support + // and the code action has no edits. + if action.Edit == nil { + editSupport, err := env.Editor.EditResolveSupport() + if err != nil { + return nil, err + } + if editSupport { + resolved, err := env.Editor.Server.ResolveCodeAction(env.Ctx, &action) + if err != nil { + return nil, err + } + action.Edit = resolved.Edit + } + } + + if action.Edit != nil { + if action.Edit.Changes != nil { + env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title) + } + if action.Edit.DocumentChanges != nil { + if action.Command != nil { + env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title) + } + return action.Edit.DocumentChanges, nil + } + } + + if action.Command != nil { + // This is a typical CodeAction command: + // + // Title: "Implement error" + // Command: gopls.apply_fix + // Arguments: [{"Fix":"stub_methods","URI":".../a.go","Range":...}}] + // + // The client makes an ExecuteCommand RPC to the server, + // which dispatches it to the ApplyFix handler. + // ApplyFix dispatches to the "stub_methods" suggestedfix hook (the meat). + // The server then makes an ApplyEdit RPC to the client, + // whose Awaiter hook gathers the edits instead of applying them. + + _ = env.Awaiter.TakeDocumentChanges() // reset (assuming Env is confined to this thread) + + if _, err := env.Editor.Server.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ + Command: action.Command.Command, + Arguments: action.Command.Arguments, + }); err != nil { + return nil, err + } + return env.Awaiter.TakeDocumentChanges(), nil + } + + return nil, nil +} + +// refsMarker implements the @refs marker. +func refsMarker(mark marker, src protocol.Location, want ...protocol.Location) { + refs := func(includeDeclaration bool, want []protocol.Location) error { + got, err := mark.server().References(mark.ctx(), &protocol.ReferenceParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), + Context: protocol.ReferenceContext{ + IncludeDeclaration: includeDeclaration, + }, + }) + if err != nil { + return err + } + + return compareLocations(mark, got, want) + } + + for _, includeDeclaration := range []bool{false, true} { + // Ignore first 'want' location if we didn't request the declaration. + // TODO(adonovan): don't assume a single declaration: + // there may be >1 if corresponding methods are considered. + want := want + if !includeDeclaration && len(want) > 0 { + want = want[1:] + } + if err := refs(includeDeclaration, want); err != nil { + mark.errorf("refs(includeDeclaration=%t) failed: %v", + includeDeclaration, err) + } + } +} + +// implementationMarker implements the @implementation marker. +func implementationMarker(mark marker, src protocol.Location, want ...protocol.Location) { + got, err := mark.server().Implementation(mark.ctx(), &protocol.ImplementationParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), + }) + if err != nil { + mark.errorf("implementation at %s failed: %v", src, err) + return + } + if err := compareLocations(mark, got, want); err != nil { + mark.errorf("implementation: %v", err) + } +} + +func itemLocation(item protocol.CallHierarchyItem) protocol.Location { + return protocol.Location{ + URI: item.URI, + Range: item.Range, + } +} + +func incomingCallsMarker(mark marker, src protocol.Location, want ...protocol.Location) { + getCalls := func(item protocol.CallHierarchyItem) ([]protocol.Location, error) { + calls, err := mark.server().IncomingCalls(mark.ctx(), &protocol.CallHierarchyIncomingCallsParams{Item: item}) + if err != nil { + return nil, err + } + var locs []protocol.Location + for _, call := range calls { + locs = append(locs, itemLocation(call.From)) + } + return locs, nil + } + callHierarchy(mark, src, getCalls, want) +} + +func outgoingCallsMarker(mark marker, src protocol.Location, want ...protocol.Location) { + getCalls := func(item protocol.CallHierarchyItem) ([]protocol.Location, error) { + calls, err := mark.server().OutgoingCalls(mark.ctx(), &protocol.CallHierarchyOutgoingCallsParams{Item: item}) + if err != nil { + return nil, err + } + var locs []protocol.Location + for _, call := range calls { + locs = append(locs, itemLocation(call.To)) + } + return locs, nil + } + callHierarchy(mark, src, getCalls, want) +} + +type callHierarchyFunc = func(protocol.CallHierarchyItem) ([]protocol.Location, error) + +func callHierarchy(mark marker, src protocol.Location, getCalls callHierarchyFunc, want []protocol.Location) { + items, err := mark.server().PrepareCallHierarchy(mark.ctx(), &protocol.CallHierarchyPrepareParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), + }) + if err != nil { + mark.errorf("PrepareCallHierarchy failed: %v", err) + return + } + if nitems := len(items); nitems != 1 { + mark.errorf("PrepareCallHierarchy returned %d items, want exactly 1", nitems) + return + } + if loc := itemLocation(items[0]); loc != src { + mark.errorf("PrepareCallHierarchy found call %v, want %v", loc, src) + return + } + calls, err := getCalls(items[0]) + if err != nil { + mark.errorf("call hierarchy failed: %v", err) + return + } + if calls == nil { + calls = []protocol.Location{} + } + // TODO(rfindley): why aren't call hierarchy results stable? + sortLocs := func(locs []protocol.Location) { + sort.Slice(locs, func(i, j int) bool { + return protocol.CompareLocation(locs[i], locs[j]) < 0 + }) + } + sortLocs(want) + sortLocs(calls) + if d := cmp.Diff(want, calls); d != "" { + mark.errorf("call hierarchy: unexpected results (-want +got):\n%s", d) + } +} + +func inlayhintsMarker(mark marker, g *Golden) { + hints := mark.run.env.InlayHints(mark.path()) + + // Map inlay hints to text edits. + edits := make([]protocol.TextEdit, len(hints)) + for i, hint := range hints { + var paddingLeft, paddingRight string + if hint.PaddingLeft { + paddingLeft = " " + } + if hint.PaddingRight { + paddingRight = " " + } + edits[i] = protocol.TextEdit{ + Range: protocol.Range{Start: hint.Position, End: hint.Position}, + NewText: fmt.Sprintf("<%s%s%s>", paddingLeft, hint.Label[0].Value, paddingRight), + } + } + + m := mark.mapper() + got, _, err := protocol.ApplyEdits(m, edits) + if err != nil { + mark.errorf("ApplyProtocolEdits: %v", err) + return + } + + compareGolden(mark, got, g) +} + +func prepareRenameMarker(mark marker, src, spn protocol.Location, placeholder string) { + params := &protocol.PrepareRenameParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), + } + got, err := mark.server().PrepareRename(mark.ctx(), params) + if err != nil { + mark.T().Fatal(err) + } + if placeholder == "" { + if got != nil { + mark.errorf("PrepareRename(...) = %v, want nil", got) + } + return + } + want := &protocol.PrepareRenameResult{Range: spn.Range, Placeholder: placeholder} + if diff := cmp.Diff(want, got); diff != "" { + mark.errorf("mismatching PrepareRename result:\n%s", diff) + } +} + +// symbolMarker implements the @symbol marker. +func symbolMarker(mark marker, golden *Golden) { + // Retrieve information about all symbols in this file. + symbols, err := mark.server().DocumentSymbol(mark.ctx(), &protocol.DocumentSymbolParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: mark.uri()}, + }) + if err != nil { + mark.errorf("DocumentSymbol request failed: %v", err) + return + } + + // Format symbols one per line, sorted (in effect) by first column, a dotted name. + var lines []string + for _, symbol := range symbols { + // Each result element is a union of (legacy) + // SymbolInformation and (new) DocumentSymbol, + // so we ascertain which one and then transcode. + data, err := json.Marshal(symbol) + if err != nil { + mark.T().Fatal(err) + } + if _, ok := symbol.(map[string]any)["location"]; ok { + // This case is not reached because Editor initialization + // enables HierarchicalDocumentSymbolSupport. + // TODO(adonovan): test this too. + var sym protocol.SymbolInformation + if err := json.Unmarshal(data, &sym); err != nil { + mark.T().Fatal(err) + } + mark.errorf("fake Editor doesn't support SymbolInformation") + + } else { + var sym protocol.DocumentSymbol // new hierarchical hotness + if err := json.Unmarshal(data, &sym); err != nil { + mark.T().Fatal(err) + } + + // Print each symbol in the response tree. + var visit func(sym protocol.DocumentSymbol, prefix []string) + visit = func(sym protocol.DocumentSymbol, prefix []string) { + var out strings.Builder + out.WriteString(strings.Join(prefix, ".")) + fmt.Fprintf(&out, " %q", sym.Detail) + if delta := sym.Range.End.Line - sym.Range.Start.Line; delta > 0 { + fmt.Fprintf(&out, " +%d lines", delta) + } + lines = append(lines, out.String()) + + for _, child := range sym.Children { + visit(child, append(prefix, child.Name)) + } + } + visit(sym, []string{sym.Name}) + } + } + sort.Strings(lines) + lines = append(lines, "") // match trailing newline in .txtar file + got := []byte(strings.Join(lines, "\n")) + + // Compare with golden. + want, ok := golden.Get(mark.T(), "", got) + if !ok { + mark.errorf("%s: missing golden file @%s", mark.note.Name, golden.id) + } else if diff := cmp.Diff(string(got), string(want)); diff != "" { + mark.errorf("%s: unexpected output: got:\n%s\nwant:\n%s\ndiff:\n%s", + mark.note.Name, got, want, diff) + } +} + +// compareLocations returns an error message if got and want are not +// the same set of locations. The marker is used only for fmtLoc. +func compareLocations(mark marker, got, want []protocol.Location) error { + toStrings := func(locs []protocol.Location) []string { + strs := make([]string, len(locs)) + for i, loc := range locs { + strs[i] = mark.run.fmtLoc(loc) + } + sort.Strings(strs) + return strs + } + if diff := cmp.Diff(toStrings(want), toStrings(got)); diff != "" { + return fmt.Errorf("incorrect result locations: (got %d, want %d):\n%s", + len(got), len(want), diff) + } + return nil +} + +func workspaceSymbolMarker(mark marker, query string, golden *Golden) { + params := &protocol.WorkspaceSymbolParams{ + Query: query, + } + + gotSymbols, err := mark.server().Symbol(mark.ctx(), params) + if err != nil { + mark.errorf("Symbol(%q) failed: %v", query, err) + return + } + var got bytes.Buffer + for _, s := range gotSymbols { + // Omit the txtar position of the symbol location; otherwise edits to the + // txtar archive lead to unexpected failures. + loc := mark.run.fmtLocDetails(s.Location, false) + // TODO(rfindley): can we do better here, by detecting if the location is + // relative to GOROOT? + if loc == "" { + loc = "<unknown>" + } + fmt.Fprintf(&got, "%s %s %s\n", loc, s.Name, s.Kind) + } + + compareGolden(mark, got.Bytes(), golden) +} + +// compareGolden compares the content of got with that of g.Get(""), reporting +// errors on any mismatch. +// +// TODO(rfindley): use this helper in more places. +func compareGolden(mark marker, got []byte, g *Golden) { + want, ok := g.Get(mark.T(), "", got) + if !ok { + mark.errorf("missing golden file @%s", g.id) + return + } + // Normalize newline termination: archive files (i.e. Golden content) can't + // contain non-newline terminated files, except in the special case where the + // file is completely empty. + // + // Note that txtar partitions a contiguous byte slice, so we must copy before + // appending. + normalize := func(s []byte) []byte { + if n := len(s); n > 0 && s[n-1] != '\n' { + s = append(s[:n:n], '\n') // don't mutate array + } + return s + } + got = normalize(got) + want = normalize(want) + if diff := compare.Bytes(want, got); diff != "" { + mark.errorf("%s does not match @%s:\n%s", mark.note.Name, g.id, diff) + } +} diff --git a/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt b/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt new file mode 100644 index 00000000000..2621f6709fc --- /dev/null +++ b/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt @@ -0,0 +1,94 @@ +This test checks call hierarchy queries. + +-ignore_extra_diags due to the initialization cycle. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/callhierarchy + +-- incoming/incoming.go -- +package incoming + +import "golang.org/lsptests/callhierarchy" + +// A is exported to test incoming calls across packages +func A() { //@loc(incomingA, "A") + callhierarchy.D() +} + +-- outgoing/outgoing.go -- +package outgoing + +// B is exported to test outgoing calls across packages +func B() { //@loc(outgoingB, "B") +} + +-- hierarchy.go -- +package callhierarchy + +import "golang.org/lsptests/callhierarchy/outgoing" + +func a() { //@loc(hierarchyA, "a") + D() +} + +func b() { //@loc(hierarchyB, "b") + D() +} + +// C is an exported function +func C() { //@loc(hierarchyC, "C") + D() + D() +} + +// To test hierarchy across function literals +var x = func() { //@loc(hierarchyLiteral, "func"),loc(hierarchyLiteralOut, "x") + D() +} + +// D is exported to test incoming/outgoing calls across packages +func D() { //@loc(hierarchyD, "D"),incomingcalls(hierarchyD, hierarchyA, hierarchyB, hierarchyC, hierarchyLiteral, incomingA),outgoingcalls(hierarchyD, hierarchyE, hierarchyF, hierarchyG, hierarchyLiteralOut, outgoingB, hierarchyFoo, hierarchyH, hierarchyI, hierarchyJ, hierarchyK) + e() + x() + F() + outgoing.B() + foo := func() {} //@loc(hierarchyFoo, "foo"),incomingcalls(hierarchyFoo, hierarchyD),outgoingcalls(hierarchyFoo) + foo() + + func() { + g() + }() + + var i Interface = impl{} + i.H() + i.I() + + s := Struct{} + s.J() + s.K() +} + +func e() {} //@loc(hierarchyE, "e") + +// F is an exported function +func F() {} //@loc(hierarchyF, "F") + +func g() {} //@loc(hierarchyG, "g") + +type Interface interface { + H() //@loc(hierarchyH, "H") + I() //@loc(hierarchyI, "I") +} + +type impl struct{} + +func (i impl) H() {} +func (i impl) I() {} + +type Struct struct { + J func() //@loc(hierarchyJ, "J") + K func() //@loc(hierarchyK, "K") +} diff --git a/gopls/internal/test/marker/testdata/codeaction/change_quote.txt b/gopls/internal/test/marker/testdata/codeaction/change_quote.txt new file mode 100644 index 00000000000..0fa144c1e56 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/change_quote.txt @@ -0,0 +1,69 @@ +This test checks the behavior of the 'change quote' code action. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/changequote + +go 1.18 + +-- a.go -- +package changequote + +import ( + "fmt" +) + +func foo() { + var s string + s = "hello" //@codeactionedit(`"`, "refactor.rewrite", a1, "Convert to raw string literal") + s = `hello` //@codeactionedit("`", "refactor.rewrite", a2, "Convert to interpreted string literal") + s = "hello\tworld" //@codeactionedit(`"`, "refactor.rewrite", a3, "Convert to raw string literal") + s = `hello world` //@codeactionedit("`", "refactor.rewrite", a4, "Convert to interpreted string literal") + s = "hello\nworld" //@codeactionedit(`"`, "refactor.rewrite", a5, "Convert to raw string literal") + // add a comment to avoid affect diff compute + s = `hello +world` //@codeactionedit("`", "refactor.rewrite", a6, "Convert to interpreted string literal") + s = "hello\"world" //@codeactionedit(`"`, "refactor.rewrite", a7, "Convert to raw string literal") + s = `hello"world` //@codeactionedit("`", "refactor.rewrite", a8, "Convert to interpreted string literal") + s = "hello\x1bworld" //@codeactionerr(`"`, "", "refactor.rewrite", re"found 0 CodeActions") + s = "hello`world" //@codeactionerr(`"`, "", "refactor.rewrite", re"found 0 CodeActions") + s = "hello\x7fworld" //@codeactionerr(`"`, "", "refactor.rewrite", re"found 0 CodeActions") + fmt.Println(s) +} + +-- @a1/a.go -- +@@ -9 +9 @@ +- s = "hello" //@codeactionedit(`"`, "refactor.rewrite", a1, "Convert to raw string literal") ++ s = `hello` //@codeactionedit(`"`, "refactor.rewrite", a1, "Convert to raw string literal") +-- @a2/a.go -- +@@ -10 +10 @@ +- s = `hello` //@codeactionedit("`", "refactor.rewrite", a2, "Convert to interpreted string literal") ++ s = "hello" //@codeactionedit("`", "refactor.rewrite", a2, "Convert to interpreted string literal") +-- @a3/a.go -- +@@ -11 +11 @@ +- s = "hello\tworld" //@codeactionedit(`"`, "refactor.rewrite", a3, "Convert to raw string literal") ++ s = `hello world` //@codeactionedit(`"`, "refactor.rewrite", a3, "Convert to raw string literal") +-- @a4/a.go -- +@@ -12 +12 @@ +- s = `hello world` //@codeactionedit("`", "refactor.rewrite", a4, "Convert to interpreted string literal") ++ s = "hello\tworld" //@codeactionedit("`", "refactor.rewrite", a4, "Convert to interpreted string literal") +-- @a5/a.go -- +@@ -13 +13,2 @@ +- s = "hello\nworld" //@codeactionedit(`"`, "refactor.rewrite", a5, "Convert to raw string literal") ++ s = `hello ++world` //@codeactionedit(`"`, "refactor.rewrite", a5, "Convert to raw string literal") +-- @a6/a.go -- +@@ -15,2 +15 @@ +- s = `hello +-world` //@codeactionedit("`", "refactor.rewrite", a6, "Convert to interpreted string literal") ++ s = "hello\nworld" //@codeactionedit("`", "refactor.rewrite", a6, "Convert to interpreted string literal") +-- @a7/a.go -- +@@ -17 +17 @@ +- s = "hello\"world" //@codeactionedit(`"`, "refactor.rewrite", a7, "Convert to raw string literal") ++ s = `hello"world` //@codeactionedit(`"`, "refactor.rewrite", a7, "Convert to raw string literal") +-- @a8/a.go -- +@@ -18 +18 @@ +- s = `hello"world` //@codeactionedit("`", "refactor.rewrite", a8, "Convert to interpreted string literal") ++ s = "hello\"world" //@codeactionedit("`", "refactor.rewrite", a8, "Convert to interpreted string literal") diff --git a/gopls/internal/test/marker/testdata/codeaction/extract-variadic-63287.txt b/gopls/internal/test/marker/testdata/codeaction/extract-variadic-63287.txt new file mode 100644 index 00000000000..d5dbe931226 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/extract-variadic-63287.txt @@ -0,0 +1,28 @@ +This test exercises extract on a variadic function. +It is a regression test for bug #63287 in which +the final paramater's "..." would go missing. + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +//@codeactionedit(block, "refactor.extract", out, "Extract function") + +func _() { + var logf func(string, ...any) + { println(logf) } //@loc(block, re`{.*}`) +} + +-- @out/a/a.go -- +@@ -7 +7 @@ +- { println(logf) } //@loc(block, re`{.*}`) ++ { newFunction(logf) } //@loc(block, re`{.*}`) +@@ -10 +10,4 @@ ++func newFunction(logf func( string, ...any)) { ++ println(logf) ++} ++ +-- end -- diff --git a/gopls/internal/regtest/marker/testdata/codeaction/extract_method.txt b/gopls/internal/test/marker/testdata/codeaction/extract_method.txt similarity index 89% rename from gopls/internal/regtest/marker/testdata/codeaction/extract_method.txt rename to gopls/internal/test/marker/testdata/codeaction/extract_method.txt index 3fc9f58923a..943a3ac672c 100644 --- a/gopls/internal/regtest/marker/testdata/codeaction/extract_method.txt +++ b/gopls/internal/test/marker/testdata/codeaction/extract_method.txt @@ -43,51 +43,43 @@ func (a A) Add() int { } -- @func1/basic.go -- ---- before -+++ after -@@ -22 +22,5 @@ +@@ -22 +22 @@ - return a.x < a.y //@loc(A_XLessThanYP, re`return.*a\.y`) + return newFunction(a) //@loc(A_XLessThanYP, re`return.*a\.y`) -+} -+ +@@ -25 +25,4 @@ +func newFunction(a *A) bool { + return a.x < a.y ++} ++ -- @func2/basic.go -- ---- before -+++ after -@@ -26,2 +26,7 @@ +@@ -26 +26 @@ - sum := a.x + a.y //@loc(A_AddP1, re`sum.*a\.y`) + sum := newFunction(a) //@loc(A_AddP1, re`sum.*a\.y`) -- return sum //@loc(A_AddP2, re`return.*sum`) -+ return sum //@loc(A_AddP2, re`return.*sum`) -+} -+ +@@ -30 +30,5 @@ +func newFunction(a *A) int { + sum := a.x + a.y + return sum ++} ++ -- @func3/basic.go -- ---- before -+++ after -@@ -27 +27,5 @@ +@@ -27 +27 @@ - return sum //@loc(A_AddP2, re`return.*sum`) + return newFunction(sum) //@loc(A_AddP2, re`return.*sum`) -+} -+ +@@ -30 +30,4 @@ +func newFunction(sum int) int { + return sum ++} ++ -- @func4/basic.go -- ---- before -+++ after -@@ -31 +31,5 @@ +@@ -31 +31 @@ - return a.x < a.y //@loc(A_XLessThanY, re`return.*a\.y`) + return newFunction(a) //@loc(A_XLessThanY, re`return.*a\.y`) -+} -+ +@@ -34 +34,4 @@ +func newFunction(a A) bool { + return a.x < a.y ++} ++ -- @func5/basic.go -- ---- before -+++ after @@ -35 +35 @@ - sum := a.x + a.y //@loc(A_Add1, re`sum.*a\.y`) + sum := newFunction(a) //@loc(A_Add1, re`sum.*a\.y`) @@ -98,8 +90,6 @@ func (a A) Add() int { +} + -- @func6/basic.go -- ---- before -+++ after @@ -36 +36 @@ - return sum //@loc(A_Add2, re`return.*sum`) + return newFunction(sum) //@loc(A_Add2, re`return.*sum`) @@ -109,51 +99,43 @@ func (a A) Add() int { +} + -- @meth1/basic.go -- ---- before -+++ after -@@ -22 +22,5 @@ +@@ -22 +22 @@ - return a.x < a.y //@loc(A_XLessThanYP, re`return.*a\.y`) + return a.newMethod() //@loc(A_XLessThanYP, re`return.*a\.y`) -+} -+ +@@ -25 +25,4 @@ +func (a *A) newMethod() bool { + return a.x < a.y ++} ++ -- @meth2/basic.go -- ---- before -+++ after -@@ -26,2 +26,7 @@ +@@ -26 +26 @@ - sum := a.x + a.y //@loc(A_AddP1, re`sum.*a\.y`) + sum := a.newMethod() //@loc(A_AddP1, re`sum.*a\.y`) -- return sum //@loc(A_AddP2, re`return.*sum`) -+ return sum //@loc(A_AddP2, re`return.*sum`) -+} -+ +@@ -30 +30,5 @@ +func (a *A) newMethod() int { + sum := a.x + a.y + return sum ++} ++ -- @meth3/basic.go -- ---- before -+++ after -@@ -27 +27,5 @@ +@@ -27 +27 @@ - return sum //@loc(A_AddP2, re`return.*sum`) + return a.newMethod(sum) //@loc(A_AddP2, re`return.*sum`) -+} -+ +@@ -30 +30,4 @@ +func (*A) newMethod(sum int) int { + return sum ++} ++ -- @meth4/basic.go -- ---- before -+++ after -@@ -31 +31,5 @@ +@@ -31 +31 @@ - return a.x < a.y //@loc(A_XLessThanY, re`return.*a\.y`) + return a.newMethod() //@loc(A_XLessThanY, re`return.*a\.y`) -+} -+ +@@ -34 +34,4 @@ +func (a A) newMethod() bool { + return a.x < a.y ++} ++ -- @meth5/basic.go -- ---- before -+++ after @@ -35 +35 @@ - sum := a.x + a.y //@loc(A_Add1, re`sum.*a\.y`) + sum := a.newMethod() //@loc(A_Add1, re`sum.*a\.y`) @@ -164,8 +146,6 @@ func (a A) Add() int { +} + -- @meth6/basic.go -- ---- before -+++ after @@ -36 +36 @@ - return sum //@loc(A_Add2, re`return.*sum`) + return a.newMethod(sum) //@loc(A_Add2, re`return.*sum`) @@ -201,18 +181,15 @@ func (b *B) LongList(ctx context.Context) (int, error) { return p1 + p2 + p3, ctx.Err() //@loc(B_LongList, re`return.*ctx\.Err\(\)`) } -- @contextMeth1/context.go -- ---- before -+++ after -@@ -17 +17,5 @@ +@@ -17 +17 @@ - return sum, ctx.Err() //@loc(B_AddP, re`return.*ctx\.Err\(\)`) + return b.newMethod(ctx, sum) //@loc(B_AddP, re`return.*ctx\.Err\(\)`) -+} -+ +@@ -20 +20,4 @@ +func (*B) newMethod(ctx context.Context, sum int) (int, error) { + return sum, ctx.Err() ++} ++ -- @contextMeth2/context.go -- ---- before -+++ after @@ -24 +24 @@ - return p1 + p2 + p3, ctx.Err() //@loc(B_LongList, re`return.*ctx\.Err\(\)`) + return b.newMethod(ctx, p1, p2, p3) //@loc(B_LongList, re`return.*ctx\.Err\(\)`) @@ -222,8 +199,6 @@ func (b *B) LongList(ctx context.Context) (int, error) { + return p1 + p2 + p3, ctx.Err() +} -- @contextFunc2/context.go -- ---- before -+++ after @@ -24 +24 @@ - return p1 + p2 + p3, ctx.Err() //@loc(B_LongList, re`return.*ctx\.Err\(\)`) + return newFunction(ctx, p1, p2, p3) //@loc(B_LongList, re`return.*ctx\.Err\(\)`) @@ -233,12 +208,11 @@ func (b *B) LongList(ctx context.Context) (int, error) { + return p1 + p2 + p3, ctx.Err() +} -- @contextFunc1/context.go -- ---- before -+++ after -@@ -17 +17,5 @@ +@@ -17 +17 @@ - return sum, ctx.Err() //@loc(B_AddP, re`return.*ctx\.Err\(\)`) + return newFunction(ctx, sum) //@loc(B_AddP, re`return.*ctx\.Err\(\)`) -+} -+ +@@ -20 +20,4 @@ +func newFunction(ctx context.Context, sum int) (int, error) { + return sum, ctx.Err() ++} ++ diff --git a/gopls/internal/regtest/marker/testdata/codeaction/extract_variable.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt similarity index 92% rename from gopls/internal/regtest/marker/testdata/codeaction/extract_variable.txt rename to gopls/internal/test/marker/testdata/codeaction/extract_variable.txt index 81226aed157..685b4ff9372 100644 --- a/gopls/internal/regtest/marker/testdata/codeaction/extract_variable.txt +++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt @@ -1,4 +1,5 @@ This test checks the behavior of the 'extract variable' code action. +See extract_variable_resolve.txt for the same test with resolve support. -- flags -- -ignore_extra_diags @@ -12,17 +13,11 @@ func _() { } -- @basic_lit1/basic_lit.go -- ---- before -+++ after -@@ -3,2 +3,3 @@ --func _() { -+func _() { -+ x := 1 +@@ -4 +4,2 @@ - var _ = 1 + 2 //@codeactionedit("1", "refactor.extract", basic_lit1) ++ x := 1 + var _ = x + 2 //@codeactionedit("1", "refactor.extract", basic_lit1) -- @basic_lit2/basic_lit.go -- ---- before -+++ after @@ -5 +5,2 @@ - var _ = 3 + 4 //@codeactionedit("3 + 4", "refactor.extract", basic_lit2) + x := 3 + 4 @@ -39,15 +34,11 @@ func _() { } -- @func_call1/func_call.go -- ---- before -+++ after @@ -6 +6,2 @@ - x0 := append([]int{}, 1) //@codeactionedit("append([]int{}, 1)", "refactor.extract", func_call1) + x := append([]int{}, 1) + x0 := x //@codeactionedit("append([]int{}, 1)", "refactor.extract", func_call1) -- @func_call2/func_call.go -- ---- before -+++ after @@ -8 +8,2 @@ - b, err := strconv.Atoi(str) //@codeactionedit("strconv.Atoi(str)", "refactor.extract", func_call2) + x, x1 := strconv.Atoi(str) @@ -68,15 +59,11 @@ func _() { } -- @scope1/scope.go -- ---- before -+++ after @@ -8 +8,2 @@ - y := ast.CompositeLit{} //@codeactionedit("ast.CompositeLit{}", "refactor.extract", scope1) + x := ast.CompositeLit{} + y := x //@codeactionedit("ast.CompositeLit{}", "refactor.extract", scope1) -- @scope2/scope.go -- ---- before -+++ after @@ -11 +11,2 @@ - x1 := !false //@codeactionedit("!false", "refactor.extract", scope2) + x := !false diff --git a/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt new file mode 100644 index 00000000000..dc6ad787afb --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt @@ -0,0 +1,81 @@ +This test checks the behavior of the 'extract variable' code action, with resolve support. +See extract_variable.txt for the same test without resolve support. + +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": true, + "resolveSupport": { + "properties": ["edit"] + } + } + } +} +-- flags -- +-ignore_extra_diags + +-- basic_lit.go -- +package extract + +func _() { + var _ = 1 + 2 //@codeactionedit("1", "refactor.extract", basic_lit1) + var _ = 3 + 4 //@codeactionedit("3 + 4", "refactor.extract", basic_lit2) +} + +-- @basic_lit1/basic_lit.go -- +@@ -4 +4,2 @@ +- var _ = 1 + 2 //@codeactionedit("1", "refactor.extract", basic_lit1) ++ x := 1 ++ var _ = x + 2 //@codeactionedit("1", "refactor.extract", basic_lit1) +-- @basic_lit2/basic_lit.go -- +@@ -5 +5,2 @@ +- var _ = 3 + 4 //@codeactionedit("3 + 4", "refactor.extract", basic_lit2) ++ x := 3 + 4 ++ var _ = x //@codeactionedit("3 + 4", "refactor.extract", basic_lit2) +-- func_call.go -- +package extract + +import "strconv" + +func _() { + x0 := append([]int{}, 1) //@codeactionedit("append([]int{}, 1)", "refactor.extract", func_call1) + str := "1" + b, err := strconv.Atoi(str) //@codeactionedit("strconv.Atoi(str)", "refactor.extract", func_call2) +} + +-- @func_call1/func_call.go -- +@@ -6 +6,2 @@ +- x0 := append([]int{}, 1) //@codeactionedit("append([]int{}, 1)", "refactor.extract", func_call1) ++ x := append([]int{}, 1) ++ x0 := x //@codeactionedit("append([]int{}, 1)", "refactor.extract", func_call1) +-- @func_call2/func_call.go -- +@@ -8 +8,2 @@ +- b, err := strconv.Atoi(str) //@codeactionedit("strconv.Atoi(str)", "refactor.extract", func_call2) ++ x, x1 := strconv.Atoi(str) ++ b, err := x, x1 //@codeactionedit("strconv.Atoi(str)", "refactor.extract", func_call2) +-- scope.go -- +package extract + +import "go/ast" + +func _() { + x0 := 0 + if true { + y := ast.CompositeLit{} //@codeactionedit("ast.CompositeLit{}", "refactor.extract", scope1) + } + if true { + x1 := !false //@codeactionedit("!false", "refactor.extract", scope2) + } +} + +-- @scope1/scope.go -- +@@ -8 +8,2 @@ +- y := ast.CompositeLit{} //@codeactionedit("ast.CompositeLit{}", "refactor.extract", scope1) ++ x := ast.CompositeLit{} ++ y := x //@codeactionedit("ast.CompositeLit{}", "refactor.extract", scope1) +-- @scope2/scope.go -- +@@ -11 +11,2 @@ +- x1 := !false //@codeactionedit("!false", "refactor.extract", scope2) ++ x := !false ++ x1 := x //@codeactionedit("!false", "refactor.extract", scope2) diff --git a/gopls/internal/regtest/marker/testdata/codeaction/fill_struct.txt b/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt similarity index 93% rename from gopls/internal/regtest/marker/testdata/codeaction/fill_struct.txt rename to gopls/internal/test/marker/testdata/codeaction/fill_struct.txt index c5398ead279..deac1d78507 100644 --- a/gopls/internal/regtest/marker/testdata/codeaction/fill_struct.txt +++ b/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt @@ -1,4 +1,5 @@ This test checks the behavior of the 'fill struct' code action. +See fill_struct_resolve.txt for same test with resolve support. -- flags -- -ignore_extra_diags @@ -45,16 +46,12 @@ var _ = nestedStruct{} //@codeactionedit("}", "refactor.rewrite", a3) var _ = data.B{} //@codeactionedit("}", "refactor.rewrite", a4) -- @a1/a.go -- ---- before -+++ after @@ -11 +11,3 @@ -var _ = basicStruct{} //@codeactionedit("}", "refactor.rewrite", a1) +var _ = basicStruct{ + foo: 0, +} //@codeactionedit("}", "refactor.rewrite", a1) -- @a2/a.go -- ---- before -+++ after @@ -18 +18,4 @@ -var _ = twoArgStruct{} //@codeactionedit("}", "refactor.rewrite", a2) +var _ = twoArgStruct{ @@ -62,8 +59,6 @@ var _ = data.B{} //@codeactionedit("}", "refactor.rewrite", a4) + bar: "", +} //@codeactionedit("}", "refactor.rewrite", a2) -- @a3/a.go -- ---- before -+++ after @@ -25 +25,4 @@ -var _ = nestedStruct{} //@codeactionedit("}", "refactor.rewrite", a3) +var _ = nestedStruct{ @@ -71,8 +66,6 @@ var _ = data.B{} //@codeactionedit("}", "refactor.rewrite", a4) + basic: basicStruct{}, +} //@codeactionedit("}", "refactor.rewrite", a3) -- @a4/a.go -- ---- before -+++ after @@ -27 +27,3 @@ -var _ = data.B{} //@codeactionedit("}", "refactor.rewrite", a4) +var _ = data.B{ @@ -97,11 +90,11 @@ type funStruct struct { var _ = funStruct{} //@codeactionedit("}", "refactor.rewrite", a22) -type funStructCompex struct { +type funStructComplex struct { fn func(i int, s string) (string, int) } -var _ = funStructCompex{} //@codeactionedit("}", "refactor.rewrite", a23) +var _ = funStructComplex{} //@codeactionedit("}", "refactor.rewrite", a23) type funStructEmpty struct { fn func() @@ -110,8 +103,6 @@ type funStructEmpty struct { var _ = funStructEmpty{} //@codeactionedit("}", "refactor.rewrite", a24) -- @a21/a2.go -- ---- before -+++ after @@ -11 +11,7 @@ -var _ = typedStruct{} //@codeactionedit("}", "refactor.rewrite", a21) +var _ = typedStruct{ @@ -122,8 +113,6 @@ var _ = funStructEmpty{} //@codeactionedit("}", "refactor.rewrite", a24) + a: [2]string{}, +} //@codeactionedit("}", "refactor.rewrite", a21) -- @a22/a2.go -- ---- before -+++ after @@ -17 +17,4 @@ -var _ = funStruct{} //@codeactionedit("}", "refactor.rewrite", a22) +var _ = funStruct{ @@ -131,17 +120,13 @@ var _ = funStructEmpty{} //@codeactionedit("}", "refactor.rewrite", a24) + }, +} //@codeactionedit("}", "refactor.rewrite", a22) -- @a23/a2.go -- ---- before -+++ after @@ -23 +23,4 @@ --var _ = funStructCompex{} //@codeactionedit("}", "refactor.rewrite", a23) -+var _ = funStructCompex{ +-var _ = funStructComplex{} //@codeactionedit("}", "refactor.rewrite", a23) ++var _ = funStructComplex{ + fn: func(i int, s string) (string, int) { + }, +} //@codeactionedit("}", "refactor.rewrite", a23) -- @a24/a2.go -- ---- before -+++ after @@ -29 +29,4 @@ -var _ = funStructEmpty{} //@codeactionedit("}", "refactor.rewrite", a24) +var _ = funStructEmpty{ @@ -192,8 +177,6 @@ var _ = []ast.BasicLit{ var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) -- @a31/a3.go -- ---- before -+++ after @@ -17 +17,4 @@ -var _ = Bar{} //@codeactionedit("}", "refactor.rewrite", a31) +var _ = Bar{ @@ -201,8 +184,6 @@ var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) + Y: &Foo{}, +} //@codeactionedit("}", "refactor.rewrite", a31) -- @a32/a3.go -- ---- before -+++ after @@ -28 +28,9 @@ -var _ = importedStruct{} //@codeactionedit("}", "refactor.rewrite", a32) +var _ = importedStruct{ @@ -215,8 +196,6 @@ var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) + st: ast.CompositeLit{}, +} //@codeactionedit("}", "refactor.rewrite", a32) -- @a33/a3.go -- ---- before -+++ after @@ -36 +36,5 @@ -var _ = pointerBuiltinStruct{} //@codeactionedit("}", "refactor.rewrite", a33) +var _ = pointerBuiltinStruct{ @@ -225,8 +204,6 @@ var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) + i: new(int), +} //@codeactionedit("}", "refactor.rewrite", a33) -- @a34/a3.go -- ---- before -+++ after @@ -39 +39,5 @@ - {}, //@codeactionedit("}", "refactor.rewrite", a34) + { @@ -235,8 +212,6 @@ var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) + Value: "", + }, //@codeactionedit("}", "refactor.rewrite", a34) -- @a35/a3.go -- ---- before -+++ after @@ -42 +42,5 @@ -var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) +var _ = []ast.BasicLit{{ @@ -286,24 +261,18 @@ func fill() { } -- @a41/a4.go -- ---- before -+++ after @@ -25 +25,3 @@ - var _ = iStruct{} //@codeactionedit("}", "refactor.rewrite", a41) + var _ = iStruct{ + X: x, + } //@codeactionedit("}", "refactor.rewrite", a41) -- @a42/a4.go -- ---- before -+++ after @@ -28 +28,3 @@ - var _ = sStruct{} //@codeactionedit("}", "refactor.rewrite", a42) + var _ = sStruct{ + str: s, + } //@codeactionedit("}", "refactor.rewrite", a42) -- @a43/a4.go -- ---- before -+++ after @@ -35 +35,5 @@ - var _ = multiFill{} //@codeactionedit("}", "refactor.rewrite", a43) + var _ = multiFill{ @@ -312,8 +281,6 @@ func fill() { + arr: []int{}, + } //@codeactionedit("}", "refactor.rewrite", a43) -- @a45/a4.go -- ---- before -+++ after @@ -38 +38,3 @@ - var _ = assignStruct{} //@codeactionedit("}", "refactor.rewrite", a45) + var _ = assignStruct{ @@ -348,8 +315,6 @@ func fill() { } -- @fill_struct1/fill_struct.go -- ---- before -+++ after @@ -20 +20,7 @@ - a := StructA{} //@codeactionedit("}", "refactor.rewrite", fill_struct1) + a := StructA{ @@ -360,24 +325,18 @@ func fill() { + StructB: StructB{}, + } //@codeactionedit("}", "refactor.rewrite", fill_struct1) -- @fill_struct2/fill_struct.go -- ---- before -+++ after @@ -21 +21,3 @@ - b := StructA2{} //@codeactionedit("}", "refactor.rewrite", fill_struct2) + b := StructA2{ + B: &StructB{}, + } //@codeactionedit("}", "refactor.rewrite", fill_struct2) -- @fill_struct3/fill_struct.go -- ---- before -+++ after @@ -22 +22,3 @@ - c := StructA3{} //@codeactionedit("}", "refactor.rewrite", fill_struct3) + c := StructA3{ + B: StructB{}, + } //@codeactionedit("}", "refactor.rewrite", fill_struct3) -- @fill_struct4/fill_struct.go -- ---- before -+++ after @@ -24 +24,3 @@ - _ = StructA3{} //@codeactionedit("}", "refactor.rewrite", fill_struct4) + _ = StructA3{ @@ -399,8 +358,6 @@ func fill() { _ := StructAnon{} //@codeactionedit("}", "refactor.rewrite", fill_struct_anon) } -- @fill_struct_anon/fill_struct_anon.go -- ---- before -+++ after @@ -13 +13,5 @@ - _ := StructAnon{} //@codeactionedit("}", "refactor.rewrite", fill_struct_anon) + _ := StructAnon{ @@ -426,8 +383,6 @@ func nested() { } -- @fill_nested/fill_struct_nested.go -- ---- before -+++ after @@ -13 +13,3 @@ - StructC: StructC{}, //@codeactionedit("}", "refactor.rewrite", fill_nested) + StructC: StructC{ @@ -447,16 +402,12 @@ func unexported() { _ = h2.Client{} //@codeactionedit("}", "refactor.rewrite", fill_struct_package2) } -- @fill_struct_package1/fill_struct_package.go -- ---- before -+++ after @@ -10 +10,3 @@ - a := data.B{} //@codeactionedit("}", "refactor.rewrite", fill_struct_package1) + a := data.B{ + ExportedInt: 0, + } //@codeactionedit("}", "refactor.rewrite", fill_struct_package1) -- @fill_struct_package2/fill_struct_package.go -- ---- before -+++ after @@ -11 +11,7 @@ - _ = h2.Client{} //@codeactionedit("}", "refactor.rewrite", fill_struct_package2) + _ = h2.Client{ @@ -493,22 +444,18 @@ func fill() { } -- @fill_struct_partial1/fill_struct_partial.go -- ---- before -+++ after @@ -16 +16,3 @@ - PrefilledInt: 5, + PrefilledInt: 5, + UnfilledInt: 0, + StructPartialB: StructPartialB{}, -- @fill_struct_partial2/fill_struct_partial.go -- ---- before -+++ after @@ -19,4 +19,2 @@ - /* this comment should disappear */ -+ PrefilledInt: 7, - PrefilledInt: 7, // This comment should be blown away. - /* As should - this one */ ++ PrefilledInt: 7, + UnfilledInt: 0, -- fill_struct_spaces.go -- package fillstruct @@ -522,8 +469,6 @@ func spaces() { } -- @fill_struct_spaces/fill_struct_spaces.go -- ---- before -+++ after @@ -8 +8,3 @@ - d := StructD{} //@codeactionedit("}", "refactor.rewrite", fill_struct_spaces) + d := StructD{ @@ -544,8 +489,6 @@ func fill() { } -- @fill_struct_unsafe/fill_struct_unsafe.go -- ---- before -+++ after @@ -11 +11,4 @@ - _ := unsafeStruct{} //@codeactionedit("}", "refactor.rewrite", fill_struct_unsafe) + _ := unsafeStruct{ @@ -588,16 +531,12 @@ func _[T any]() { _ = S{} //@codeactionedit("}", "refactor.rewrite", typeparams5) } -- @typeparams1/typeparams.go -- ---- before -+++ after @@ -11 +11,3 @@ -var _ = basicStructWithTypeParams[int]{} //@codeactionedit("}", "refactor.rewrite", typeparams1) +var _ = basicStructWithTypeParams[int]{ + foo: 0, +} //@codeactionedit("}", "refactor.rewrite", typeparams1) -- @typeparams2/typeparams.go -- ---- before -+++ after @@ -18 +18,4 @@ -var _ = twoArgStructWithTypeParams[string, int]{} //@codeactionedit("}", "refactor.rewrite", typeparams2) +var _ = twoArgStructWithTypeParams[string, int]{ @@ -605,15 +544,9 @@ func _[T any]() { + bar: 0, +} //@codeactionedit("}", "refactor.rewrite", typeparams2) -- @typeparams3/typeparams.go -- ---- before -+++ after -@@ -20 +20,2 @@ --var _ = twoArgStructWithTypeParams[int, string]{ -+var _ = twoArgStructWithTypeParams[int, string]{ +@@ -21 +21 @@ + foo: 0, -- @typeparams4/typeparams.go -- ---- before -+++ after @@ -29 +29,4 @@ -var _ = nestedStructWithTypeParams{} //@codeactionedit("}", "refactor.rewrite", typeparams4) +var _ = nestedStructWithTypeParams{ @@ -621,10 +554,22 @@ func _[T any]() { + basic: basicStructWithTypeParams{}, +} //@codeactionedit("}", "refactor.rewrite", typeparams4) -- @typeparams5/typeparams.go -- ---- before -+++ after @@ -33 +33,3 @@ - _ = S{} //@codeactionedit("}", "refactor.rewrite", typeparams5) + _ = S{ + t: *new(T), + } //@codeactionedit("}", "refactor.rewrite", typeparams5) +-- issue63921.go -- +package fillstruct + +// Test for golang/go#63921: fillstruct panicked with invalid fields. +type invalidStruct struct { + F int + Undefined +} + +func _() { + // Note: the golden content for issue63921 is empty: fillstruct produces no + // edits, but does not panic. + invalidStruct{} //@codeactionedit("}", "refactor.rewrite", issue63921) +} diff --git a/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt new file mode 100644 index 00000000000..e553d1c5993 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt @@ -0,0 +1,586 @@ +This test checks the behavior of the 'fill struct' code action, with resolve support. +See fill_struct.txt for same test without resolve support. + +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": true, + "resolveSupport": { + "properties": ["edit"] + } + } + } +} +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/fillstruct + +go 1.18 + +-- data/data.go -- +package data + +type B struct { + ExportedInt int + unexportedInt int +} + +-- a.go -- +package fillstruct + +import ( + "golang.org/lsptests/fillstruct/data" +) + +type basicStruct struct { + foo int +} + +var _ = basicStruct{} //@codeactionedit("}", "refactor.rewrite", a1) + +type twoArgStruct struct { + foo int + bar string +} + +var _ = twoArgStruct{} //@codeactionedit("}", "refactor.rewrite", a2) + +type nestedStruct struct { + bar string + basic basicStruct +} + +var _ = nestedStruct{} //@codeactionedit("}", "refactor.rewrite", a3) + +var _ = data.B{} //@codeactionedit("}", "refactor.rewrite", a4) +-- @a1/a.go -- +@@ -11 +11,3 @@ +-var _ = basicStruct{} //@codeactionedit("}", "refactor.rewrite", a1) ++var _ = basicStruct{ ++ foo: 0, ++} //@codeactionedit("}", "refactor.rewrite", a1) +-- @a2/a.go -- +@@ -18 +18,4 @@ +-var _ = twoArgStruct{} //@codeactionedit("}", "refactor.rewrite", a2) ++var _ = twoArgStruct{ ++ foo: 0, ++ bar: "", ++} //@codeactionedit("}", "refactor.rewrite", a2) +-- @a3/a.go -- +@@ -25 +25,4 @@ +-var _ = nestedStruct{} //@codeactionedit("}", "refactor.rewrite", a3) ++var _ = nestedStruct{ ++ bar: "", ++ basic: basicStruct{}, ++} //@codeactionedit("}", "refactor.rewrite", a3) +-- @a4/a.go -- +@@ -27 +27,3 @@ +-var _ = data.B{} //@codeactionedit("}", "refactor.rewrite", a4) ++var _ = data.B{ ++ ExportedInt: 0, ++} //@codeactionedit("}", "refactor.rewrite", a4) +-- a2.go -- +package fillstruct + +type typedStruct struct { + m map[string]int + s []int + c chan int + c1 <-chan int + a [2]string +} + +var _ = typedStruct{} //@codeactionedit("}", "refactor.rewrite", a21) + +type funStruct struct { + fn func(i int) int +} + +var _ = funStruct{} //@codeactionedit("}", "refactor.rewrite", a22) + +type funStructComplex struct { + fn func(i int, s string) (string, int) +} + +var _ = funStructComplex{} //@codeactionedit("}", "refactor.rewrite", a23) + +type funStructEmpty struct { + fn func() +} + +var _ = funStructEmpty{} //@codeactionedit("}", "refactor.rewrite", a24) + +-- @a21/a2.go -- +@@ -11 +11,7 @@ +-var _ = typedStruct{} //@codeactionedit("}", "refactor.rewrite", a21) ++var _ = typedStruct{ ++ m: map[string]int{}, ++ s: []int{}, ++ c: make(chan int), ++ c1: make(<-chan int), ++ a: [2]string{}, ++} //@codeactionedit("}", "refactor.rewrite", a21) +-- @a22/a2.go -- +@@ -17 +17,4 @@ +-var _ = funStruct{} //@codeactionedit("}", "refactor.rewrite", a22) ++var _ = funStruct{ ++ fn: func(i int) int { ++ }, ++} //@codeactionedit("}", "refactor.rewrite", a22) +-- @a23/a2.go -- +@@ -23 +23,4 @@ +-var _ = funStructComplex{} //@codeactionedit("}", "refactor.rewrite", a23) ++var _ = funStructComplex{ ++ fn: func(i int, s string) (string, int) { ++ }, ++} //@codeactionedit("}", "refactor.rewrite", a23) +-- @a24/a2.go -- +@@ -29 +29,4 @@ +-var _ = funStructEmpty{} //@codeactionedit("}", "refactor.rewrite", a24) ++var _ = funStructEmpty{ ++ fn: func() { ++ }, ++} //@codeactionedit("}", "refactor.rewrite", a24) +-- a3.go -- +package fillstruct + +import ( + "go/ast" + "go/token" +) + +type Foo struct { + A int +} + +type Bar struct { + X *Foo + Y *Foo +} + +var _ = Bar{} //@codeactionedit("}", "refactor.rewrite", a31) + +type importedStruct struct { + m map[*ast.CompositeLit]ast.Field + s []ast.BadExpr + a [3]token.Token + c chan ast.EmptyStmt + fn func(ast_decl ast.DeclStmt) ast.Ellipsis + st ast.CompositeLit +} + +var _ = importedStruct{} //@codeactionedit("}", "refactor.rewrite", a32) + +type pointerBuiltinStruct struct { + b *bool + s *string + i *int +} + +var _ = pointerBuiltinStruct{} //@codeactionedit("}", "refactor.rewrite", a33) + +var _ = []ast.BasicLit{ + {}, //@codeactionedit("}", "refactor.rewrite", a34) +} + +var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) +-- @a31/a3.go -- +@@ -17 +17,4 @@ +-var _ = Bar{} //@codeactionedit("}", "refactor.rewrite", a31) ++var _ = Bar{ ++ X: &Foo{}, ++ Y: &Foo{}, ++} //@codeactionedit("}", "refactor.rewrite", a31) +-- @a32/a3.go -- +@@ -28 +28,9 @@ +-var _ = importedStruct{} //@codeactionedit("}", "refactor.rewrite", a32) ++var _ = importedStruct{ ++ m: map[*ast.CompositeLit]ast.Field{}, ++ s: []ast.BadExpr{}, ++ a: [3]token.Token{}, ++ c: make(chan ast.EmptyStmt), ++ fn: func(ast_decl ast.DeclStmt) ast.Ellipsis { ++ }, ++ st: ast.CompositeLit{}, ++} //@codeactionedit("}", "refactor.rewrite", a32) +-- @a33/a3.go -- +@@ -36 +36,5 @@ +-var _ = pointerBuiltinStruct{} //@codeactionedit("}", "refactor.rewrite", a33) ++var _ = pointerBuiltinStruct{ ++ b: new(bool), ++ s: new(string), ++ i: new(int), ++} //@codeactionedit("}", "refactor.rewrite", a33) +-- @a34/a3.go -- +@@ -39 +39,5 @@ +- {}, //@codeactionedit("}", "refactor.rewrite", a34) ++ { ++ ValuePos: 0, ++ Kind: 0, ++ Value: "", ++ }, //@codeactionedit("}", "refactor.rewrite", a34) +-- @a35/a3.go -- +@@ -42 +42,5 @@ +-var _ = []ast.BasicLit{{}} //@codeactionedit("}", "refactor.rewrite", a35) ++var _ = []ast.BasicLit{{ ++ ValuePos: 0, ++ Kind: 0, ++ Value: "", ++}} //@codeactionedit("}", "refactor.rewrite", a35) +-- a4.go -- +package fillstruct + +import "go/ast" + +type iStruct struct { + X int +} + +type sStruct struct { + str string +} + +type multiFill struct { + num int + strin string + arr []int +} + +type assignStruct struct { + n ast.Node +} + +func fill() { + var x int + var _ = iStruct{} //@codeactionedit("}", "refactor.rewrite", a41) + + var s string + var _ = sStruct{} //@codeactionedit("}", "refactor.rewrite", a42) + + var n int + _ = []int{} + if true { + arr := []int{1, 2} + } + var _ = multiFill{} //@codeactionedit("}", "refactor.rewrite", a43) + + var node *ast.CompositeLit + var _ = assignStruct{} //@codeactionedit("}", "refactor.rewrite", a45) +} + +-- @a41/a4.go -- +@@ -25 +25,3 @@ +- var _ = iStruct{} //@codeactionedit("}", "refactor.rewrite", a41) ++ var _ = iStruct{ ++ X: x, ++ } //@codeactionedit("}", "refactor.rewrite", a41) +-- @a42/a4.go -- +@@ -28 +28,3 @@ +- var _ = sStruct{} //@codeactionedit("}", "refactor.rewrite", a42) ++ var _ = sStruct{ ++ str: s, ++ } //@codeactionedit("}", "refactor.rewrite", a42) +-- @a43/a4.go -- +@@ -35 +35,5 @@ +- var _ = multiFill{} //@codeactionedit("}", "refactor.rewrite", a43) ++ var _ = multiFill{ ++ num: n, ++ strin: s, ++ arr: []int{}, ++ } //@codeactionedit("}", "refactor.rewrite", a43) +-- @a45/a4.go -- +@@ -38 +38,3 @@ +- var _ = assignStruct{} //@codeactionedit("}", "refactor.rewrite", a45) ++ var _ = assignStruct{ ++ n: node, ++ } //@codeactionedit("}", "refactor.rewrite", a45) +-- fill_struct.go -- +package fillstruct + +type StructA struct { + unexportedIntField int + ExportedIntField int + MapA map[int]string + Array []int + StructB +} + +type StructA2 struct { + B *StructB +} + +type StructA3 struct { + B StructB +} + +func fill() { + a := StructA{} //@codeactionedit("}", "refactor.rewrite", fill_struct1) + b := StructA2{} //@codeactionedit("}", "refactor.rewrite", fill_struct2) + c := StructA3{} //@codeactionedit("}", "refactor.rewrite", fill_struct3) + if true { + _ = StructA3{} //@codeactionedit("}", "refactor.rewrite", fill_struct4) + } +} + +-- @fill_struct1/fill_struct.go -- +@@ -20 +20,7 @@ +- a := StructA{} //@codeactionedit("}", "refactor.rewrite", fill_struct1) ++ a := StructA{ ++ unexportedIntField: 0, ++ ExportedIntField: 0, ++ MapA: map[int]string{}, ++ Array: []int{}, ++ StructB: StructB{}, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct1) +-- @fill_struct2/fill_struct.go -- +@@ -21 +21,3 @@ +- b := StructA2{} //@codeactionedit("}", "refactor.rewrite", fill_struct2) ++ b := StructA2{ ++ B: &StructB{}, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct2) +-- @fill_struct3/fill_struct.go -- +@@ -22 +22,3 @@ +- c := StructA3{} //@codeactionedit("}", "refactor.rewrite", fill_struct3) ++ c := StructA3{ ++ B: StructB{}, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct3) +-- @fill_struct4/fill_struct.go -- +@@ -24 +24,3 @@ +- _ = StructA3{} //@codeactionedit("}", "refactor.rewrite", fill_struct4) ++ _ = StructA3{ ++ B: StructB{}, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct4) +-- fill_struct_anon.go -- +package fillstruct + +type StructAnon struct { + a struct{} + b map[string]interface{} + c map[string]struct { + d int + e bool + } +} + +func fill() { + _ := StructAnon{} //@codeactionedit("}", "refactor.rewrite", fill_struct_anon) +} +-- @fill_struct_anon/fill_struct_anon.go -- +@@ -13 +13,5 @@ +- _ := StructAnon{} //@codeactionedit("}", "refactor.rewrite", fill_struct_anon) ++ _ := StructAnon{ ++ a: struct{}{}, ++ b: map[string]interface{}{}, ++ c: map[string]struct{d int; e bool}{}, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct_anon) +-- fill_struct_nested.go -- +package fillstruct + +type StructB struct { + StructC +} + +type StructC struct { + unexportedInt int +} + +func nested() { + c := StructB{ + StructC: StructC{}, //@codeactionedit("}", "refactor.rewrite", fill_nested) + } +} + +-- @fill_nested/fill_struct_nested.go -- +@@ -13 +13,3 @@ +- StructC: StructC{}, //@codeactionedit("}", "refactor.rewrite", fill_nested) ++ StructC: StructC{ ++ unexportedInt: 0, ++ }, //@codeactionedit("}", "refactor.rewrite", fill_nested) +-- fill_struct_package.go -- +package fillstruct + +import ( + h2 "net/http" + + "golang.org/lsptests/fillstruct/data" +) + +func unexported() { + a := data.B{} //@codeactionedit("}", "refactor.rewrite", fill_struct_package1) + _ = h2.Client{} //@codeactionedit("}", "refactor.rewrite", fill_struct_package2) +} +-- @fill_struct_package1/fill_struct_package.go -- +@@ -10 +10,3 @@ +- a := data.B{} //@codeactionedit("}", "refactor.rewrite", fill_struct_package1) ++ a := data.B{ ++ ExportedInt: 0, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct_package1) +-- @fill_struct_package2/fill_struct_package.go -- +@@ -11 +11,7 @@ +- _ = h2.Client{} //@codeactionedit("}", "refactor.rewrite", fill_struct_package2) ++ _ = h2.Client{ ++ Transport: nil, ++ CheckRedirect: func(req *h2.Request, via []*h2.Request) error { ++ }, ++ Jar: nil, ++ Timeout: 0, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct_package2) +-- fill_struct_partial.go -- +package fillstruct + +type StructPartialA struct { + PrefilledInt int + UnfilledInt int + StructPartialB +} + +type StructPartialB struct { + PrefilledInt int + UnfilledInt int +} + +func fill() { + a := StructPartialA{ + PrefilledInt: 5, + } //@codeactionedit("}", "refactor.rewrite", fill_struct_partial1) + b := StructPartialB{ + /* this comment should disappear */ + PrefilledInt: 7, // This comment should be blown away. + /* As should + this one */ + } //@codeactionedit("}", "refactor.rewrite", fill_struct_partial2) +} + +-- @fill_struct_partial1/fill_struct_partial.go -- +@@ -16 +16,3 @@ +- PrefilledInt: 5, ++ PrefilledInt: 5, ++ UnfilledInt: 0, ++ StructPartialB: StructPartialB{}, +-- @fill_struct_partial2/fill_struct_partial.go -- +@@ -19,4 +19,2 @@ +- /* this comment should disappear */ +- PrefilledInt: 7, // This comment should be blown away. +- /* As should +- this one */ ++ PrefilledInt: 7, ++ UnfilledInt: 0, +-- fill_struct_spaces.go -- +package fillstruct + +type StructD struct { + ExportedIntField int +} + +func spaces() { + d := StructD{} //@codeactionedit("}", "refactor.rewrite", fill_struct_spaces) +} + +-- @fill_struct_spaces/fill_struct_spaces.go -- +@@ -8 +8,3 @@ +- d := StructD{} //@codeactionedit("}", "refactor.rewrite", fill_struct_spaces) ++ d := StructD{ ++ ExportedIntField: 0, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct_spaces) +-- fill_struct_unsafe.go -- +package fillstruct + +import "unsafe" + +type unsafeStruct struct { + x int + p unsafe.Pointer +} + +func fill() { + _ := unsafeStruct{} //@codeactionedit("}", "refactor.rewrite", fill_struct_unsafe) +} + +-- @fill_struct_unsafe/fill_struct_unsafe.go -- +@@ -11 +11,4 @@ +- _ := unsafeStruct{} //@codeactionedit("}", "refactor.rewrite", fill_struct_unsafe) ++ _ := unsafeStruct{ ++ x: 0, ++ p: nil, ++ } //@codeactionedit("}", "refactor.rewrite", fill_struct_unsafe) +-- typeparams.go -- +package fillstruct + +type emptyStructWithTypeParams[A any] struct{} + +var _ = emptyStructWithTypeParams[int]{} // no suggested fix + +type basicStructWithTypeParams[T any] struct { + foo T +} + +var _ = basicStructWithTypeParams[int]{} //@codeactionedit("}", "refactor.rewrite", typeparams1) + +type twoArgStructWithTypeParams[F, B any] struct { + foo F + bar B +} + +var _ = twoArgStructWithTypeParams[string, int]{} //@codeactionedit("}", "refactor.rewrite", typeparams2) + +var _ = twoArgStructWithTypeParams[int, string]{ + bar: "bar", +} //@codeactionedit("}", "refactor.rewrite", typeparams3) + +type nestedStructWithTypeParams struct { + bar string + basic basicStructWithTypeParams[int] +} + +var _ = nestedStructWithTypeParams{} //@codeactionedit("}", "refactor.rewrite", typeparams4) + +func _[T any]() { + type S struct{ t T } + _ = S{} //@codeactionedit("}", "refactor.rewrite", typeparams5) +} +-- @typeparams1/typeparams.go -- +@@ -11 +11,3 @@ +-var _ = basicStructWithTypeParams[int]{} //@codeactionedit("}", "refactor.rewrite", typeparams1) ++var _ = basicStructWithTypeParams[int]{ ++ foo: 0, ++} //@codeactionedit("}", "refactor.rewrite", typeparams1) +-- @typeparams2/typeparams.go -- +@@ -18 +18,4 @@ +-var _ = twoArgStructWithTypeParams[string, int]{} //@codeactionedit("}", "refactor.rewrite", typeparams2) ++var _ = twoArgStructWithTypeParams[string, int]{ ++ foo: "", ++ bar: 0, ++} //@codeactionedit("}", "refactor.rewrite", typeparams2) +-- @typeparams3/typeparams.go -- +@@ -21 +21 @@ ++ foo: 0, +-- @typeparams4/typeparams.go -- +@@ -29 +29,4 @@ +-var _ = nestedStructWithTypeParams{} //@codeactionedit("}", "refactor.rewrite", typeparams4) ++var _ = nestedStructWithTypeParams{ ++ bar: "", ++ basic: basicStructWithTypeParams{}, ++} //@codeactionedit("}", "refactor.rewrite", typeparams4) +-- @typeparams5/typeparams.go -- +@@ -33 +33,3 @@ +- _ = S{} //@codeactionedit("}", "refactor.rewrite", typeparams5) ++ _ = S{ ++ t: *new(T), ++ } //@codeactionedit("}", "refactor.rewrite", typeparams5) +-- issue63921.go -- +package fillstruct + +// Test for golang/go#63921: fillstruct panicked with invalid fields. +type invalidStruct struct { + F int + Undefined +} + +func _() { + // Note: the golden content for issue63921 is empty: fillstruct produces no + // edits, but does not panic. + invalidStruct{} //@codeactionedit("}", "refactor.rewrite", issue63921) +} diff --git a/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt b/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt new file mode 100644 index 00000000000..2c1b19e130c --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt @@ -0,0 +1,105 @@ +This test checks the behavior of the 'fill switch' code action. +See fill_switch_resolve.txt for same test with resolve support. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/fillswitch + +go 1.18 + +-- data/data.go -- +package data + +type TypeB int + +const ( + TypeBOne TypeB = iota + TypeBTwo + TypeBThree +) + +-- a.go -- +package fillswitch + +import ( + "golang.org/lsptests/fillswitch/data" +) + +type typeA int + +const ( + typeAOne typeA = iota + typeATwo + typeAThree +) + +type notification interface { + isNotification() +} + +type notificationOne struct{} + +func (notificationOne) isNotification() {} + +type notificationTwo struct{} + +func (notificationTwo) isNotification() {} + +func doSwitch() { + var b data.TypeB + switch b { + case data.TypeBOne: //@codeactionedit(":", "refactor.rewrite", a1) + } + + var a typeA + switch a { + case typeAThree: //@codeactionedit(":", "refactor.rewrite", a2) + } + + var n notification + switch n.(type) { //@codeactionedit("{", "refactor.rewrite", a3) + } + + switch nt := n.(type) { //@codeactionedit("{", "refactor.rewrite", a4) + } + + var s struct { + a typeA + } + + switch s.a { + case typeAThree: //@codeactionedit(":", "refactor.rewrite", a5) + } +} +-- @a1/a.go -- +@@ -31 +31,4 @@ ++ case data.TypeBThree: ++ case data.TypeBTwo: ++ default: ++ panic(fmt.Sprintf("unexpected data.TypeB: %#v", b)) +-- @a2/a.go -- +@@ -36 +36,4 @@ ++ case typeAOne: ++ case typeATwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", a)) +-- @a3/a.go -- +@@ -40 +40,4 @@ ++ case notificationOne: ++ case notificationTwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", n)) +-- @a4/a.go -- +@@ -43 +43,4 @@ ++ case notificationOne: ++ case notificationTwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", nt)) +-- @a5/a.go -- +@@ -51 +51,4 @@ ++ case typeAOne: ++ case typeATwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", s.a)) diff --git a/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt new file mode 100644 index 00000000000..504acd6043e --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt @@ -0,0 +1,116 @@ +This test checks the behavior of the 'fill switch' code action, with resolve support. +See fill_switch.txt for same test without resolve support. + +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": true, + "resolveSupport": { + "properties": ["edit"] + } + } + } +} +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/fillswitch + +go 1.18 + +-- data/data.go -- +package data + +type TypeB int + +const ( + TypeBOne TypeB = iota + TypeBTwo + TypeBThree +) + +-- a.go -- +package fillswitch + +import ( + "golang.org/lsptests/fillswitch/data" +) + +type typeA int + +const ( + typeAOne typeA = iota + typeATwo + typeAThree +) + +type notification interface { + isNotification() +} + +type notificationOne struct{} + +func (notificationOne) isNotification() {} + +type notificationTwo struct{} + +func (notificationTwo) isNotification() {} + +func doSwitch() { + var b data.TypeB + switch b { + case data.TypeBOne: //@codeactionedit(":", "refactor.rewrite", a1) + } + + var a typeA + switch a { + case typeAThree: //@codeactionedit(":", "refactor.rewrite", a2) + } + + var n notification + switch n.(type) { //@codeactionedit("{", "refactor.rewrite", a3) + } + + switch nt := n.(type) { //@codeactionedit("{", "refactor.rewrite", a4) + } + + var s struct { + a typeA + } + + switch s.a { + case typeAThree: //@codeactionedit(":", "refactor.rewrite", a5) + } +} +-- @a1/a.go -- +@@ -31 +31,4 @@ ++ case data.TypeBThree: ++ case data.TypeBTwo: ++ default: ++ panic(fmt.Sprintf("unexpected data.TypeB: %#v", b)) +-- @a2/a.go -- +@@ -36 +36,4 @@ ++ case typeAOne: ++ case typeATwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", a)) +-- @a3/a.go -- +@@ -40 +40,4 @@ ++ case notificationOne: ++ case notificationTwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", n)) +-- @a4/a.go -- +@@ -43 +43,4 @@ ++ case notificationOne: ++ case notificationTwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", nt)) +-- @a5/a.go -- +@@ -51 +51,4 @@ ++ case typeAOne: ++ case typeATwo: ++ default: ++ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", s.a)) diff --git a/gopls/internal/regtest/marker/testdata/codeaction/functionextraction.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/codeaction/functionextraction.txt rename to gopls/internal/test/marker/testdata/codeaction/functionextraction.txt diff --git a/gopls/internal/regtest/marker/testdata/codeaction/functionextraction_issue44813.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue44813.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/codeaction/functionextraction_issue44813.txt rename to gopls/internal/test/marker/testdata/codeaction/functionextraction_issue44813.txt diff --git a/gopls/internal/test/marker/testdata/codeaction/grouplines.txt b/gopls/internal/test/marker/testdata/codeaction/grouplines.txt new file mode 100644 index 00000000000..8d1134c5d6c --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/grouplines.txt @@ -0,0 +1,206 @@ +This test exercises the refactoring of putting arguments, return values, and composite literal elements into a +single line. + +-- go.mod -- +module unused.mod + +go 1.18 + +-- func_arg/func_arg.go -- +package func_arg + +func A( + a string, + b, c int64, + x int /*@codeaction("x", "x", "refactor.rewrite", func_arg)*/, + y int, +) (r1 string, r2, r3 int64, r4 int, r5 int) { + return a, b, c, x, y +} + +-- @func_arg/func_arg/func_arg.go -- +package func_arg + +func A(a string, b, c int64, x int /*@codeaction("x", "x", "refactor.rewrite", func_arg)*/, y int) (r1 string, r2, r3 int64, r4 int, r5 int) { + return a, b, c, x, y +} + +-- func_ret/func_ret.go -- +package func_ret + +func A(a string, b, c int64, x int, y int) ( + r1 string /*@codeaction("r1", "r1", "refactor.rewrite", func_ret)*/, + r2, r3 int64, + r4 int, + r5 int, +) { + return a, b, c, x, y +} + +-- @func_ret/func_ret/func_ret.go -- +package func_ret + +func A(a string, b, c int64, x int, y int) (r1 string /*@codeaction("r1", "r1", "refactor.rewrite", func_ret)*/, r2, r3 int64, r4 int, r5 int) { + return a, b, c, x, y +} + +-- functype_arg/functype_arg.go -- +package functype_arg + +type A func( + a string, + b, c int64, + x int /*@codeaction("x", "x", "refactor.rewrite", functype_arg)*/, + y int, +) (r1 string, r2, r3 int64, r4 int, r5 int) + +-- @functype_arg/functype_arg/functype_arg.go -- +package functype_arg + +type A func(a string, b, c int64, x int /*@codeaction("x", "x", "refactor.rewrite", functype_arg)*/, y int) (r1 string, r2, r3 int64, r4 int, r5 int) + +-- functype_ret/functype_ret.go -- +package functype_ret + +type A func(a string, b, c int64, x int, y int) ( + r1 string /*@codeaction("r1", "r1", "refactor.rewrite", functype_ret)*/, + r2, r3 int64, + r4 int, + r5 int, +) + +-- @functype_ret/functype_ret/functype_ret.go -- +package functype_ret + +type A func(a string, b, c int64, x int, y int) (r1 string /*@codeaction("r1", "r1", "refactor.rewrite", functype_ret)*/, r2, r3 int64, r4 int, r5 int) + +-- func_call/func_call.go -- +package func_call + +import "fmt" + +func a() { + fmt.Println( + 1 /*@codeaction("1", "1", "refactor.rewrite", func_call)*/, + 2, + 3, + fmt.Sprintf("hello %d", 4), + ) +} + +-- @func_call/func_call/func_call.go -- +package func_call + +import "fmt" + +func a() { + fmt.Println(1 /*@codeaction("1", "1", "refactor.rewrite", func_call)*/, 2, 3, fmt.Sprintf("hello %d", 4)) +} + +-- indent/indent.go -- +package indent + +import "fmt" + +func a() { + fmt.Println( + 1, + 2, + 3, + fmt.Sprintf( + "hello %d" /*@codeaction("hello", "hello", "refactor.rewrite", indent, "Join parameters into one line")*/, + 4, + )) +} + +-- @indent/indent/indent.go -- +package indent + +import "fmt" + +func a() { + fmt.Println( + 1, + 2, + 3, + fmt.Sprintf("hello %d" /*@codeaction("hello", "hello", "refactor.rewrite", indent, "Join parameters into one line")*/, 4)) +} + +-- structelts/structelts.go -- +package structelts + +type A struct{ + a int + b int +} + +func a() { + _ = A{ + a: 1, + b: 2 /*@codeaction("b", "b", "refactor.rewrite", structelts)*/, + } +} + +-- @structelts/structelts/structelts.go -- +package structelts + +type A struct{ + a int + b int +} + +func a() { + _ = A{a: 1, b: 2 /*@codeaction("b", "b", "refactor.rewrite", structelts)*/} +} + +-- sliceelts/sliceelts.go -- +package sliceelts + +func a() { + _ = []int{ + 1 /*@codeaction("1", "1", "refactor.rewrite", sliceelts)*/, + 2, + } +} + +-- @sliceelts/sliceelts/sliceelts.go -- +package sliceelts + +func a() { + _ = []int{1 /*@codeaction("1", "1", "refactor.rewrite", sliceelts)*/, 2} +} + +-- mapelts/mapelts.go -- +package mapelts + +func a() { + _ = map[string]int{ + "a": 1 /*@codeaction("1", "1", "refactor.rewrite", mapelts)*/, + "b": 2, + } +} + +-- @mapelts/mapelts/mapelts.go -- +package mapelts + +func a() { + _ = map[string]int{"a": 1 /*@codeaction("1", "1", "refactor.rewrite", mapelts)*/, "b": 2} +} + +-- starcomment/starcomment.go -- +package starcomment + +func A( + /*1*/ x /*2*/ string /*3*/ /*@codeaction("x", "x", "refactor.rewrite", starcomment)*/, + /*4*/ y /*5*/ int /*6*/, +) (string, int) { + return x, y +} + +-- @starcomment/starcomment/starcomment.go -- +package starcomment + +func A(/*1*/ x /*2*/ string /*3*/ /*@codeaction("x", "x", "refactor.rewrite", starcomment)*/, /*4*/ y /*5*/ int /*6*/) (string, int) { + return x, y +} + diff --git a/gopls/internal/test/marker/testdata/codeaction/import-shadows-builtin.txt b/gopls/internal/test/marker/testdata/codeaction/import-shadows-builtin.txt new file mode 100644 index 00000000000..aeb86a22686 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/import-shadows-builtin.txt @@ -0,0 +1,55 @@ +This is a regression test for bug #63592 in "organize imports" whereby +the new imports would shadow predeclared names. + +In the original example, the conflict was between predeclared error +type and the unfortunately named package github.com/coreos/etcd/error, +but this example uses a package with the ludicrous name of complex128. + +The new behavior is that we will not attempt to import packages +that shadow predeclared names. (Ideally we would do that only if +the predeclared name is actually referenced in the file, which +complex128 happens to be in this example, but that's a trickier +analysis than the internal/imports package is game for.) + +The name complex127 works as usual. + +-- go.mod -- +module example.com +go 1.18 + +-- complex128/a.go -- +package complex128 + +var V int + +-- complex127/a.go -- +package complex127 + +var V int + +-- main.go -- +package main + +import () //@codeaction("import", "", "source.organizeImports", out) + +func main() { + complex128.V() //@diag("V", re"type complex128 has no field") + complex127.V() //@diag("complex127", re"(undeclared|undefined)") +} + +func _() { + var _ complex128 = 1 + 2i +} +-- @out/main.go -- +package main + +import "example.com/complex127" //@codeaction("import", "", "source.organizeImports", out) + +func main() { + complex128.V() //@diag("V", re"type complex128 has no field") + complex127.V() //@diag("complex127", re"(undeclared|undefined)") +} + +func _() { + var _ complex128 = 1 + 2i +} diff --git a/gopls/internal/regtest/marker/testdata/codeaction/imports.txt b/gopls/internal/test/marker/testdata/codeaction/imports.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/codeaction/imports.txt rename to gopls/internal/test/marker/testdata/codeaction/imports.txt diff --git a/gopls/internal/test/marker/testdata/codeaction/infertypeargs.txt b/gopls/internal/test/marker/testdata/codeaction/infertypeargs.txt new file mode 100644 index 00000000000..b622efdc358 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/infertypeargs.txt @@ -0,0 +1,25 @@ +This test verifies the infertypeargs refactoring. + +-- go.mod -- +module mod.test/infertypeargs + +go 1.18 + +-- p.go -- +package infertypeargs + +func app[S interface{ ~[]E }, E interface{}](s S, e E) S { + return append(s, e) +} + +func _() { + _ = app[[]int] + _ = app[[]int, int] + _ = app[[]int]([]int{}, 0) //@suggestedfix("[[]int]", re"unnecessary type arguments", infer) + _ = app([]int{}, 0) +} + +-- @infer/p.go -- +@@ -10 +10 @@ +- _ = app[[]int]([]int{}, 0) //@suggestedfix("[[]int]", re"unnecessary type arguments", infer) ++ _ = app([]int{}, 0) //@suggestedfix("[[]int]", re"unnecessary type arguments", infer) diff --git a/gopls/internal/test/marker/testdata/codeaction/inline.txt b/gopls/internal/test/marker/testdata/codeaction/inline.txt new file mode 100644 index 00000000000..0c5bcb41658 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/inline.txt @@ -0,0 +1,24 @@ +This is a minimal test of the refactor.inline code action, without resolve support. +See inline_resolve.txt for same test with resolve support. + +-- go.mod -- +module example.com/codeaction +go 1.18 + +-- a/a.go -- +package a + +func _() { + println(add(1, 2)) //@codeaction("add", ")", "refactor.inline", inline) +} + +func add(x, y int) int { return x + y } + +-- @inline/a/a.go -- +package a + +func _() { + println(1 + 2) //@codeaction("add", ")", "refactor.inline", inline) +} + +func add(x, y int) int { return x + y } diff --git a/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt new file mode 100644 index 00000000000..02c27e6505b --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt @@ -0,0 +1,35 @@ +This is a minimal test of the refactor.inline code actions, with resolve support. +See inline.txt for same test without resolve support. + +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": true, + "resolveSupport": { + "properties": ["edit"] + } + } + } +} +-- go.mod -- +module example.com/codeaction +go 1.18 + +-- a/a.go -- +package a + +func _() { + println(add(1, 2)) //@codeaction("add", ")", "refactor.inline", inline) +} + +func add(x, y int) int { return x + y } + +-- @inline/a/a.go -- +package a + +func _() { + println(1 + 2) //@codeaction("add", ")", "refactor.inline", inline) +} + +func add(x, y int) int { return x + y } diff --git a/gopls/internal/test/marker/testdata/codeaction/invertif.txt b/gopls/internal/test/marker/testdata/codeaction/invertif.txt new file mode 100644 index 00000000000..57e77530844 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/invertif.txt @@ -0,0 +1,218 @@ +This test exercises the 'invert if condition' code action. + +-- p.go -- +package invertif + +import ( + "fmt" + "os" +) + +func Boolean() { + b := true + if b { //@codeactionedit("if b", "refactor.rewrite", boolean) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +func BooleanFn() { + if os.IsPathSeparator('X') { //@codeactionedit("if os.IsPathSeparator('X')", "refactor.rewrite", boolean_fn) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +// Note that the comment here jumps to the wrong location. +func DontRemoveParens() { + a := false + b := true + if !(a || + b) { //@codeactionedit("b", "refactor.rewrite", dont_remove_parens) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +func ElseIf() { + // No inversion expected when there's not else clause + if len(os.Args) > 2 { + fmt.Println("A") + } + + // No inversion expected for else-if, that would become unreadable + if len(os.Args) > 2 { + fmt.Println("A") + } else if os.Args[0] == "X" { //@codeactionedit(re"if os.Args.0. == .X.", "refactor.rewrite", else_if) + fmt.Println("B") + } else { + fmt.Println("C") + } +} + +func GreaterThan() { + if len(os.Args) > 2 { //@codeactionedit("i", "refactor.rewrite", greater_than) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +func NotBoolean() { + b := true + if !b { //@codeactionedit("if !b", "refactor.rewrite", not_boolean) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +func RemoveElse() { + if true { //@codeactionedit("if true", "refactor.rewrite", remove_else) + fmt.Println("A") + } else { + fmt.Println("B") + return + } + + fmt.Println("C") +} + +func RemoveParens() { + b := true + if !(b) { //@codeactionedit("if", "refactor.rewrite", remove_parens) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +func Semicolon() { + if _, err := fmt.Println("x"); err != nil { //@codeactionedit("if", "refactor.rewrite", semicolon) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +func SemicolonAnd() { + if n, err := fmt.Println("x"); err != nil && n > 0 { //@codeactionedit("f", "refactor.rewrite", semicolon_and) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +func SemicolonOr() { + if n, err := fmt.Println("x"); err != nil || n < 5 { //@codeactionedit(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite", semicolon_or) + fmt.Println("A") + } else { + fmt.Println("B") + } +} + +-- @boolean/p.go -- +@@ -10,3 +10 @@ +- if b { //@codeactionedit("if b", "refactor.rewrite", boolean) +- fmt.Println("A") +- } else { ++ if !b { +@@ -14 +12,2 @@ ++ } else { //@codeactionedit("if b", "refactor.rewrite", boolean) ++ fmt.Println("A") +-- @boolean_fn/p.go -- +@@ -18,3 +18 @@ +- if os.IsPathSeparator('X') { //@codeactionedit("if os.IsPathSeparator('X')", "refactor.rewrite", boolean_fn) +- fmt.Println("A") +- } else { ++ if !os.IsPathSeparator('X') { +@@ -22 +20,2 @@ ++ } else { //@codeactionedit("if os.IsPathSeparator('X')", "refactor.rewrite", boolean_fn) ++ fmt.Println("A") +-- @dont_remove_parens/p.go -- +@@ -29,4 +29,2 @@ +- if !(a || +- b) { //@codeactionedit("b", "refactor.rewrite", dont_remove_parens) +- fmt.Println("A") +- } else { ++ if (a || ++ b) { +@@ -34 +32,2 @@ ++ } else { //@codeactionedit("b", "refactor.rewrite", dont_remove_parens) ++ fmt.Println("A") +-- @else_if/p.go -- +@@ -46,3 +46 @@ +- } else if os.Args[0] == "X" { //@codeactionedit(re"if os.Args.0. == .X.", "refactor.rewrite", else_if) +- fmt.Println("B") +- } else { ++ } else if os.Args[0] != "X" { +@@ -50 +48,2 @@ ++ } else { //@codeactionedit(re"if os.Args.0. == .X.", "refactor.rewrite", else_if) ++ fmt.Println("B") +-- @greater_than/p.go -- +@@ -54,3 +54 @@ +- if len(os.Args) > 2 { //@codeactionedit("i", "refactor.rewrite", greater_than) +- fmt.Println("A") +- } else { ++ if len(os.Args) <= 2 { +@@ -58 +56,2 @@ ++ } else { //@codeactionedit("i", "refactor.rewrite", greater_than) ++ fmt.Println("A") +-- @not_boolean/p.go -- +@@ -63,3 +63 @@ +- if !b { //@codeactionedit("if !b", "refactor.rewrite", not_boolean) +- fmt.Println("A") +- } else { ++ if b { +@@ -67 +65,2 @@ ++ } else { //@codeactionedit("if !b", "refactor.rewrite", not_boolean) ++ fmt.Println("A") +-- @remove_else/p.go -- +@@ -71,3 +71 @@ +- if true { //@codeactionedit("if true", "refactor.rewrite", remove_else) +- fmt.Println("A") +- } else { ++ if false { +@@ -78 +76,3 @@ ++ //@codeactionedit("if true", "refactor.rewrite", remove_else) ++ fmt.Println("A") ++ +-- @remove_parens/p.go -- +@@ -83,3 +83 @@ +- if !(b) { //@codeactionedit("if", "refactor.rewrite", remove_parens) +- fmt.Println("A") +- } else { ++ if b { +@@ -87 +85,2 @@ ++ } else { //@codeactionedit("if", "refactor.rewrite", remove_parens) ++ fmt.Println("A") +-- @semicolon/p.go -- +@@ -91,3 +91 @@ +- if _, err := fmt.Println("x"); err != nil { //@codeactionedit("if", "refactor.rewrite", semicolon) +- fmt.Println("A") +- } else { ++ if _, err := fmt.Println("x"); err == nil { +@@ -95 +93,2 @@ ++ } else { //@codeactionedit("if", "refactor.rewrite", semicolon) ++ fmt.Println("A") +-- @semicolon_and/p.go -- +@@ -99,3 +99 @@ +- if n, err := fmt.Println("x"); err != nil && n > 0 { //@codeactionedit("f", "refactor.rewrite", semicolon_and) +- fmt.Println("A") +- } else { ++ if n, err := fmt.Println("x"); err == nil || n <= 0 { +@@ -103 +101,2 @@ ++ } else { //@codeactionedit("f", "refactor.rewrite", semicolon_and) ++ fmt.Println("A") +-- @semicolon_or/p.go -- +@@ -107,3 +107 @@ +- if n, err := fmt.Println("x"); err != nil || n < 5 { //@codeactionedit(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite", semicolon_or) +- fmt.Println("A") +- } else { ++ if n, err := fmt.Println("x"); err == nil && n >= 5 { +@@ -111 +109,2 @@ ++ } else { //@codeactionedit(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite", semicolon_or) ++ fmt.Println("A") diff --git a/gopls/internal/test/marker/testdata/codeaction/issue64558.txt b/gopls/internal/test/marker/testdata/codeaction/issue64558.txt new file mode 100644 index 00000000000..59aaffba371 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/issue64558.txt @@ -0,0 +1,14 @@ +Test of an inlining failure due to an ill-typed input program (#64558). + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +func _() { + f(1, 2) //@ diag("2", re"too many arguments"), codeactionerr("f", ")", "refactor.inline", re`inlining failed \("args/params mismatch"\), likely because inputs were ill-typed`) +} + +func f(int) {} diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam.txt similarity index 86% rename from gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt rename to gopls/internal/test/marker/testdata/codeaction/removeparam.txt index ad2289284d8..25ec6ae1d96 100644 --- a/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam.txt @@ -1,4 +1,5 @@ This test exercises the refactoring to remove unused parameters. +See removeparam_resolve.txt for same test with resolve support. -- go.mod -- module unused.mod @@ -98,7 +99,7 @@ func _() { -- field/field.go -- package field -func Field(x int, field int) { //@codeaction("int", "int", "refactor.rewrite", field) +func Field(x int, field int) { //@codeaction("int", "int", "refactor.rewrite", field, "Refactor: remove unused parameter") } func _() { @@ -107,7 +108,7 @@ func _() { -- @field/field/field.go -- package field -func Field(field int) { //@codeaction("int", "int", "refactor.rewrite", field) +func Field(field int) { //@codeaction("int", "int", "refactor.rewrite", field, "Refactor: remove unused parameter") } func _() { @@ -161,7 +162,7 @@ func i() []any -- ellipsis2/ellipsis2.go -- package ellipsis2 -func Ellipsis2(_, _ int, rest ...int) { //@codeaction("_", "_", "refactor.rewrite", ellipsis2) +func Ellipsis2(_, _ int, rest ...int) { //@codeaction("_", "_", "refactor.rewrite", ellipsis2, "Refactor: remove unused parameter") } func _() { @@ -175,7 +176,7 @@ func h() (int, int) -- @ellipsis2/ellipsis2/ellipsis2.go -- package ellipsis2 -func Ellipsis2(_ int, rest ...int) { //@codeaction("_", "_", "refactor.rewrite", ellipsis2) +func Ellipsis2(_ int, rest ...int) { //@codeaction("_", "_", "refactor.rewrite", ellipsis2, "Refactor: remove unused parameter") } func _() { @@ -202,7 +203,7 @@ func _() { -- effects/effects.go -- package effects -func effects(x, y int) int { //@codeaction("y", "y", "refactor.rewrite", effects) +func effects(x, y int) int { //@ diag("y", re"unused"), codeaction("y", "y", "refactor.rewrite", effects) return x } @@ -216,7 +217,7 @@ func _() { -- @effects/effects/effects.go -- package effects -func effects(x int) int { //@codeaction("y", "y", "refactor.rewrite", effects) +func effects(x int) int { //@ diag("y", re"unused"), codeaction("y", "y", "refactor.rewrite", effects) return x } diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_formatting.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt rename to gopls/internal/test/marker/testdata/codeaction/removeparam_formatting.txt diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_funcvalue.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt rename to gopls/internal/test/marker/testdata/codeaction/removeparam_funcvalue.txt diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt rename to gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt new file mode 100644 index 00000000000..8d09afb84dc --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt @@ -0,0 +1,123 @@ +This test verifies that gopls can remove unused parameters from methods. + +Specifically, check +1. basic removal of unused parameters, when the receiver is named, locally and + across package boundaries +2. handling of unnamed receivers + +-- go.mod -- +module example.com/rm + +go 1.20 + +-- basic.go -- +package rm + +type Basic int + +func (t Basic) Foo(x int) { //@codeaction("x", "x", "refactor.rewrite", basic) +} + +func _(b Basic) { + b.Foo(1) + // TODO(rfindley): methodexprs should not get rewritten as methods. + Basic.Foo(1, 2) +} + +-- basicuse/p.go -- +package basicuse + +import "example.com/rm" + +func _() { + x := new(rm.Basic) + x.Foo(sideEffects()) + rm.Basic.Foo(1,2) +} + +func sideEffects() int + +-- @basic/basic.go -- +package rm + +type Basic int + +func (t Basic) Foo() { //@codeaction("x", "x", "refactor.rewrite", basic) +} + +func _(b Basic) { + b.Foo() + // TODO(rfindley): methodexprs should not get rewritten as methods. + Basic(1).Foo() +} +-- @basic/basicuse/p.go -- +package basicuse + +import "example.com/rm" + +func _() { + x := new(rm.Basic) + var ( + t rm.Basic = *x + _ int = sideEffects() + ) + t.Foo() + rm.Basic(1).Foo() +} + +func sideEffects() int +-- missingrecv.go -- +package rm + +type Missing struct{} + +var r2 int + +func (Missing) M(a, b, c, r0 int) (r1 int) { //@codeaction("b", "b", "refactor.rewrite", missingrecv) + return a + c +} + +func _() { + m := &Missing{} + _ = m.M(1, 2, 3, 4) +} + +-- missingrecvuse/p.go -- +package missingrecvuse + +import "example.com/rm" + +func _() { + x := rm.Missing{} + x.M(1, sideEffects(), 3, 4) +} + +func sideEffects() int + +-- @missingrecv/missingrecv.go -- +package rm + +type Missing struct{} + +var r2 int + +func (Missing) M(a, c, r0 int) (r1 int) { //@codeaction("b", "b", "refactor.rewrite", missingrecv) + return a + c +} + +func _() { + m := &Missing{} + _ = (*m).M(1, 3, 4) +} +-- @missingrecv/missingrecvuse/p.go -- +package missingrecvuse + +import "example.com/rm" + +func _() { + x := rm.Missing{} + var _ int = sideEffects() + x.M(1, 3, 4) +} + +func sideEffects() int diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt new file mode 100644 index 00000000000..c67e8a5d039 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt @@ -0,0 +1,258 @@ +This test exercises the refactoring to remove unused parameters, with resolve support. +See removeparam.txt for same test without resolve support. + +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": true, + "resolveSupport": { + "properties": ["edit"] + } + } + } +} +-- go.mod -- +module unused.mod + +go 1.18 + +-- a/a.go -- +package a + +func A(x, unused int) int { //@codeaction("unused", "unused", "refactor.rewrite", a) + return x +} + +-- @a/a/a.go -- +package a + +func A(x int) int { //@codeaction("unused", "unused", "refactor.rewrite", a) + return x +} + +-- a/a2.go -- +package a + +func _() { + A(1, 2) +} + +-- a/a_test.go -- +package a + +func _() { + A(1, 2) +} + +-- a/a_x_test.go -- +package a_test + +import "unused.mod/a" + +func _() { + a.A(1, 2) +} + +-- b/b.go -- +package b + +import "unused.mod/a" + +func f() int { + return 1 +} + +func g() int { + return 2 +} + +func _() { + a.A(f(), 1) +} + +-- @a/a/a2.go -- +package a + +func _() { + A(1) +} +-- @a/a/a_test.go -- +package a + +func _() { + A(1) +} +-- @a/a/a_x_test.go -- +package a_test + +import "unused.mod/a" + +func _() { + a.A(1) +} +-- @a/b/b.go -- +package b + +import "unused.mod/a" + +func f() int { + return 1 +} + +func g() int { + return 2 +} + +func _() { + a.A(f()) +} +-- field/field.go -- +package field + +func Field(x int, field int) { //@codeaction("int", "int", "refactor.rewrite", field, "Refactor: remove unused parameter") +} + +func _() { + Field(1, 2) +} +-- @field/field/field.go -- +package field + +func Field(field int) { //@codeaction("int", "int", "refactor.rewrite", field, "Refactor: remove unused parameter") +} + +func _() { + Field(2) +} +-- ellipsis/ellipsis.go -- +package ellipsis + +func Ellipsis(...any) { //@codeaction("any", "any", "refactor.rewrite", ellipsis) +} + +func _() { + // TODO(rfindley): investigate the broken formatting resulting from these inlinings. + Ellipsis() + Ellipsis(1) + Ellipsis(1, 2) + Ellipsis(1, f(), g()) + Ellipsis(h()) + Ellipsis(i()...) +} + +func f() int +func g() int +func h() (int, int) +func i() []any + +-- @ellipsis/ellipsis/ellipsis.go -- +package ellipsis + +func Ellipsis() { //@codeaction("any", "any", "refactor.rewrite", ellipsis) +} + +func _() { + // TODO(rfindley): investigate the broken formatting resulting from these inlinings. + Ellipsis() + Ellipsis() + Ellipsis() + var _ []any = []any{1, f(), g()} + Ellipsis() + func(_ ...any) { + Ellipsis() + }(h()) + var _ []any = i() + Ellipsis() +} + +func f() int +func g() int +func h() (int, int) +func i() []any +-- ellipsis2/ellipsis2.go -- +package ellipsis2 + +func Ellipsis2(_, _ int, rest ...int) { //@codeaction("_", "_", "refactor.rewrite", ellipsis2, "Refactor: remove unused parameter") +} + +func _() { + Ellipsis2(1,2,3) + Ellipsis2(h()) + Ellipsis2(1,2, []int{3, 4}...) +} + +func h() (int, int) + +-- @ellipsis2/ellipsis2/ellipsis2.go -- +package ellipsis2 + +func Ellipsis2(_ int, rest ...int) { //@codeaction("_", "_", "refactor.rewrite", ellipsis2, "Refactor: remove unused parameter") +} + +func _() { + Ellipsis2(2, []int{3}...) + func(_, blank0 int, rest ...int) { + Ellipsis2(blank0, rest...) + }(h()) + Ellipsis2(2, []int{3, 4}...) +} + +func h() (int, int) +-- overlapping/overlapping.go -- +package overlapping + +func Overlapping(i int) int { //@codeactionerr(re"(i) int", re"(i) int", "refactor.rewrite", re"overlapping") + return 0 +} + +func _() { + x := Overlapping(Overlapping(0)) + _ = x +} + +-- effects/effects.go -- +package effects + +func effects(x, y int) int { //@codeaction("y", "y", "refactor.rewrite", effects), diag("y", re"unused") + return x +} + +func f() int +func g() int + +func _() { + effects(f(), g()) + effects(f(), g()) +} +-- @effects/effects/effects.go -- +package effects + +func effects(x int) int { //@codeaction("y", "y", "refactor.rewrite", effects), diag("y", re"unused") + return x +} + +func f() int +func g() int + +func _() { + var x, _ int = f(), g() + effects(x) + { + var x, _ int = f(), g() + effects(x) + } +} +-- recursive/recursive.go -- +package recursive + +func Recursive(x int) int { //@codeaction("x", "x", "refactor.rewrite", recursive) + return Recursive(1) +} + +-- @recursive/recursive/recursive.go -- +package recursive + +func Recursive() int { //@codeaction("x", "x", "refactor.rewrite", recursive) + return Recursive() +} diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt new file mode 100644 index 00000000000..e4d8eb9e74b --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt @@ -0,0 +1,62 @@ +This test verifies that gopls can remove unused parameters from methods, +when that method satisfies an interface. + +For now, we just update static calls. In the future, we should compute the set +of dynamic calls that must change (and therefore, the set of concrete functions +that must be modified), in order to produce the desired outcome for our users. + +Doing so would be more complicated, so for now this test simply records the +current behavior. + +-- go.mod -- +module example.com/rm + +go 1.20 + +-- p.go -- +package rm + +type T int + +func (t T) Foo(x int) { //@codeaction("x", "x", "refactor.rewrite", basic) +} + +-- use/use.go -- +package use + +import "example.com/rm" + +type Fooer interface{ + Foo(int) +} + +var _ Fooer = rm.T(0) + +func _() { + var x rm.T + x.Foo(1) +} +-- @basic/p.go -- +package rm + +type T int + +func (t T) Foo() { //@codeaction("x", "x", "refactor.rewrite", basic) +} + +-- @basic/use/use.go -- +package use + +import "example.com/rm" + +type Fooer interface { + Foo(int) +} + +var _ Fooer = rm.T(0) + +func _() { + var x rm.T + var t rm.T = x + t.Foo() +} diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_witherrs.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_witherrs.txt new file mode 100644 index 00000000000..60080028f0e --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_witherrs.txt @@ -0,0 +1,11 @@ +This test checks that we can't remove parameters for packages with errors. + +-- p.go -- +package p + +func foo(unused int) { //@codeactionerr("unused", "unused", "refactor.rewrite", re"found 0") +} + +func _() { + foo("") //@diag(`""`, re"cannot use") +} diff --git a/gopls/internal/test/marker/testdata/codeaction/splitlines.txt b/gopls/internal/test/marker/testdata/codeaction/splitlines.txt new file mode 100644 index 00000000000..a02e39505e5 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/splitlines.txt @@ -0,0 +1,223 @@ +This test exercises the refactoring of putting arguments, return values, and composite literal elements +into separate lines. + +-- go.mod -- +module unused.mod + +go 1.18 + +-- func_arg/func_arg.go -- +package func_arg + +func A(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) { //@codeaction("x", "x", "refactor.rewrite", func_arg) + return a, b, c, x, y +} + +-- @func_arg/func_arg/func_arg.go -- +package func_arg + +func A( + a string, + b, c int64, + x int, + y int, +) (r1 string, r2, r3 int64, r4 int, r5 int) { //@codeaction("x", "x", "refactor.rewrite", func_arg) + return a, b, c, x, y +} + +-- func_ret/func_ret.go -- +package func_ret + +func A(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) { //@codeaction("r1", "r1", "refactor.rewrite", func_ret) + return a, b, c, x, y +} + +-- @func_ret/func_ret/func_ret.go -- +package func_ret + +func A(a string, b, c int64, x int, y int) ( + r1 string, + r2, r3 int64, + r4 int, + r5 int, +) { //@codeaction("r1", "r1", "refactor.rewrite", func_ret) + return a, b, c, x, y +} + +-- functype_arg/functype_arg.go -- +package functype_arg + +type A func(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) //@codeaction("x", "x", "refactor.rewrite", functype_arg) + +-- @functype_arg/functype_arg/functype_arg.go -- +package functype_arg + +type A func( + a string, + b, c int64, + x int, + y int, +) (r1 string, r2, r3 int64, r4 int, r5 int) //@codeaction("x", "x", "refactor.rewrite", functype_arg) + +-- functype_ret/functype_ret.go -- +package functype_ret + +type A func(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) //@codeaction("r1", "r1", "refactor.rewrite", functype_ret) + +-- @functype_ret/functype_ret/functype_ret.go -- +package functype_ret + +type A func(a string, b, c int64, x int, y int) ( + r1 string, + r2, r3 int64, + r4 int, + r5 int, +) //@codeaction("r1", "r1", "refactor.rewrite", functype_ret) + +-- func_call/func_call.go -- +package func_call + +import "fmt" + +func a() { + fmt.Println(1, 2, 3, fmt.Sprintf("hello %d", 4)) //@codeaction("1", "1", "refactor.rewrite", func_call) +} + +-- @func_call/func_call/func_call.go -- +package func_call + +import "fmt" + +func a() { + fmt.Println( + 1, + 2, + 3, + fmt.Sprintf("hello %d", 4), + ) //@codeaction("1", "1", "refactor.rewrite", func_call) +} + +-- indent/indent.go -- +package indent + +import "fmt" + +func a() { + fmt.Println(1, 2, 3, fmt.Sprintf("hello %d", 4)) //@codeaction("hello", "hello", "refactor.rewrite", indent, "Split parameters into separate lines") +} + +-- @indent/indent/indent.go -- +package indent + +import "fmt" + +func a() { + fmt.Println(1, 2, 3, fmt.Sprintf( + "hello %d", + 4, + )) //@codeaction("hello", "hello", "refactor.rewrite", indent, "Split parameters into separate lines") +} + +-- indent2/indent2.go -- +package indent2 + +import "fmt" + +func a() { + fmt. + Println(1, 2, 3, fmt.Sprintf("hello %d", 4)) //@codeaction("1", "1", "refactor.rewrite", indent2, "Split parameters into separate lines") +} + +-- @indent2/indent2/indent2.go -- +package indent2 + +import "fmt" + +func a() { + fmt. + Println( + 1, + 2, + 3, + fmt.Sprintf("hello %d", 4), + ) //@codeaction("1", "1", "refactor.rewrite", indent2, "Split parameters into separate lines") +} + +-- structelts/structelts.go -- +package structelts + +type A struct{ + a int + b int +} + +func a() { + _ = A{a: 1, b: 2} //@codeaction("b", "b", "refactor.rewrite", structelts) +} + +-- @structelts/structelts/structelts.go -- +package structelts + +type A struct{ + a int + b int +} + +func a() { + _ = A{ + a: 1, + b: 2, + } //@codeaction("b", "b", "refactor.rewrite", structelts) +} + +-- sliceelts/sliceelts.go -- +package sliceelts + +func a() { + _ = []int{1, 2} //@codeaction("1", "1", "refactor.rewrite", sliceelts) +} + +-- @sliceelts/sliceelts/sliceelts.go -- +package sliceelts + +func a() { + _ = []int{ + 1, + 2, + } //@codeaction("1", "1", "refactor.rewrite", sliceelts) +} + +-- mapelts/mapelts.go -- +package mapelts + +func a() { + _ = map[string]int{"a": 1, "b": 2} //@codeaction("1", "1", "refactor.rewrite", mapelts) +} + +-- @mapelts/mapelts/mapelts.go -- +package mapelts + +func a() { + _ = map[string]int{ + "a": 1, + "b": 2, + } //@codeaction("1", "1", "refactor.rewrite", mapelts) +} + +-- starcomment/starcomment.go -- +package starcomment + +func A(/*1*/ x /*2*/ string /*3*/, /*4*/ y /*5*/ int /*6*/) (string, int) { //@codeaction("x", "x", "refactor.rewrite", starcomment) + return x, y +} + +-- @starcomment/starcomment/starcomment.go -- +package starcomment + +func A( + /*1*/ x /*2*/ string /*3*/, + /*4*/ y /*5*/ int /*6*/, +) (string, int) { //@codeaction("x", "x", "refactor.rewrite", starcomment) + return x, y +} + diff --git a/gopls/internal/regtest/marker/testdata/codelens/generate.txt b/gopls/internal/test/marker/testdata/codelens/generate.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/codelens/generate.txt rename to gopls/internal/test/marker/testdata/codelens/generate.txt diff --git a/gopls/internal/regtest/marker/testdata/codelens/test.txt b/gopls/internal/test/marker/testdata/codelens/test.txt similarity index 82% rename from gopls/internal/regtest/marker/testdata/codelens/test.txt rename to gopls/internal/test/marker/testdata/codelens/test.txt index 90782bddef9..ba68cf019df 100644 --- a/gopls/internal/regtest/marker/testdata/codelens/test.txt +++ b/gopls/internal/test/marker/testdata/codelens/test.txt @@ -22,7 +22,8 @@ func TestMain(m *testing.M) {} // no code lens for TestMain func TestFuncWithCodeLens(t *testing.T) { //@codelens(re"()func", "run test") } -func thisShouldNotHaveACodeLens(t *testing.T) { +func thisShouldNotHaveACodeLens(t *testing.T) { //@diag("t ", re"unused parameter") + println() // nonempty body => "unused parameter" } func BenchmarkFuncWithCodeLens(b *testing.B) { //@codelens(re"()func", "run benchmark") diff --git a/gopls/internal/regtest/marker/testdata/completion/address.txt b/gopls/internal/test/marker/testdata/completion/address.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/address.txt rename to gopls/internal/test/marker/testdata/completion/address.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/anon.txt b/gopls/internal/test/marker/testdata/completion/anon.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/anon.txt rename to gopls/internal/test/marker/testdata/completion/anon.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/append.txt b/gopls/internal/test/marker/testdata/completion/append.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/append.txt rename to gopls/internal/test/marker/testdata/completion/append.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/assign.txt b/gopls/internal/test/marker/testdata/completion/assign.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/assign.txt rename to gopls/internal/test/marker/testdata/completion/assign.txt diff --git a/gopls/internal/test/marker/testdata/completion/bad.txt b/gopls/internal/test/marker/testdata/completion/bad.txt new file mode 100644 index 00000000000..30a96afb043 --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/bad.txt @@ -0,0 +1,68 @@ +This test exercises completion in the presence of type errors. + +Note: this test was ported from the old marker tests, which did not enable +unimported completion. Enabling it causes matches in e.g. crypto/rand. + +-- settings.json -- +{ + "completeUnimported": false +} + +-- go.mod -- +module bad.test + +go 1.18 + +-- bad/bad0.go -- +package bad + +func stuff() { //@item(stuff, "stuff", "func()", "func") + x := "heeeeyyyy" + random2(x) //@diag("x", re"cannot use x \\(variable of type string\\) as int value in argument to random2") + random2(1) //@complete("dom", random, random2, random3) + y := 3 //@diag("y", re"y.*declared (and|but) not used") +} + +type bob struct { //@item(bob, "bob", "struct{...}", "struct") + x int +} + +func _() { + var q int + _ = &bob{ + f: q, //@diag("f: q", re"unknown field f in struct literal") + } +} + +-- bad/bad1.go -- +package bad + +// See #36637 +type stateFunc func() stateFunc //@item(stateFunc, "stateFunc", "func() stateFunc", "type") + +var a unknown //@item(global_a, "a", "unknown", "var"),diag("unknown", re"(undeclared name|undefined): unknown") + +func random() int { //@item(random, "random", "func() int", "func") + //@complete("", global_a, bob, random, random2, random3, stateFunc, stuff) + return 0 +} + +func random2(y int) int { //@item(random2, "random2", "func(y int) int", "func"),item(bad_y_param, "y", "int", "var") + x := 6 //@item(x, "x", "int", "var"),diag("x", re"x.*declared (and|but) not used") + var q blah //@item(q, "q", "blah", "var"),diag("q", re"q.*declared (and|but) not used"),diag("blah", re"(undeclared name|undefined): blah") + var t **blob //@item(t, "t", "**blob", "var"),diag("t", re"t.*declared (and|but) not used"),diag("blob", re"(undeclared name|undefined): blob") + //@complete("", q, t, x, bad_y_param, global_a, bob, random, random2, random3, stateFunc, stuff) + + return y +} + +func random3(y ...int) { //@item(random3, "random3", "func(y ...int)", "func"),item(y_variadic_param, "y", "[]int", "var") + //@complete("", y_variadic_param, global_a, bob, random, random2, random3, stateFunc, stuff) + + var ch chan (favType1) //@item(ch, "ch", "chan (favType1)", "var"),diag("ch", re"ch.*declared (and|but) not used"),diag("favType1", re"(undeclared name|undefined): favType1") + var m map[keyType]int //@item(m, "m", "map[keyType]int", "var"),diag("m", re"m.*declared (and|but) not used"),diag("keyType", re"(undeclared name|undefined): keyType") + var arr []favType2 //@item(arr, "arr", "[]favType2", "var"),diag("arr", re"arr.*declared (and|but) not used"),diag("favType2", re"(undeclared name|undefined): favType2") + var fn1 func() badResult //@item(fn1, "fn1", "func() badResult", "var"),diag("fn1", re"fn1.*declared (and|but) not used"),diag("badResult", re"(undeclared name|undefined): badResult") + var fn2 func(badParam) //@item(fn2, "fn2", "func(badParam)", "var"),diag("fn2", re"fn2.*declared (and|but) not used"),diag("badParam", re"(undeclared name|undefined): badParam") + //@complete("", arr, ch, fn1, fn2, m, y_variadic_param, global_a, bob, random, random2, random3, stateFunc, stuff) +} diff --git a/gopls/internal/regtest/marker/testdata/completion/basic_lit.txt b/gopls/internal/test/marker/testdata/completion/basic_lit.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/basic_lit.txt rename to gopls/internal/test/marker/testdata/completion/basic_lit.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/builtins.txt b/gopls/internal/test/marker/testdata/completion/builtins.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/builtins.txt rename to gopls/internal/test/marker/testdata/completion/builtins.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/casesensitive.txt b/gopls/internal/test/marker/testdata/completion/casesensitive.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/casesensitive.txt rename to gopls/internal/test/marker/testdata/completion/casesensitive.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/cast.txt b/gopls/internal/test/marker/testdata/completion/cast.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/cast.txt rename to gopls/internal/test/marker/testdata/completion/cast.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/channel.txt b/gopls/internal/test/marker/testdata/completion/channel.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/channel.txt rename to gopls/internal/test/marker/testdata/completion/channel.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/comment.txt b/gopls/internal/test/marker/testdata/completion/comment.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/comment.txt rename to gopls/internal/test/marker/testdata/completion/comment.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/complit.txt b/gopls/internal/test/marker/testdata/completion/complit.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/complit.txt rename to gopls/internal/test/marker/testdata/completion/complit.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/constant.txt b/gopls/internal/test/marker/testdata/completion/constant.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/constant.txt rename to gopls/internal/test/marker/testdata/completion/constant.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/danglingstmt.txt b/gopls/internal/test/marker/testdata/completion/danglingstmt.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/danglingstmt.txt rename to gopls/internal/test/marker/testdata/completion/danglingstmt.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/deep.txt b/gopls/internal/test/marker/testdata/completion/deep.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/deep.txt rename to gopls/internal/test/marker/testdata/completion/deep.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/deep2.txt b/gopls/internal/test/marker/testdata/completion/deep2.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/deep2.txt rename to gopls/internal/test/marker/testdata/completion/deep2.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/errors.txt b/gopls/internal/test/marker/testdata/completion/errors.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/errors.txt rename to gopls/internal/test/marker/testdata/completion/errors.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/field_list.txt b/gopls/internal/test/marker/testdata/completion/field_list.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/field_list.txt rename to gopls/internal/test/marker/testdata/completion/field_list.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/foobarbaz.txt b/gopls/internal/test/marker/testdata/completion/foobarbaz.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/foobarbaz.txt rename to gopls/internal/test/marker/testdata/completion/foobarbaz.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/func_rank.txt b/gopls/internal/test/marker/testdata/completion/func_rank.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/func_rank.txt rename to gopls/internal/test/marker/testdata/completion/func_rank.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/func_sig.txt b/gopls/internal/test/marker/testdata/completion/func_sig.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/func_sig.txt rename to gopls/internal/test/marker/testdata/completion/func_sig.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/func_snippets.txt b/gopls/internal/test/marker/testdata/completion/func_snippets.txt similarity index 88% rename from gopls/internal/regtest/marker/testdata/completion/func_snippets.txt rename to gopls/internal/test/marker/testdata/completion/func_snippets.txt index efbc393f30f..01316342b7f 100644 --- a/gopls/internal/regtest/marker/testdata/completion/func_snippets.txt +++ b/gopls/internal/test/marker/testdata/completion/func_snippets.txt @@ -28,5 +28,5 @@ func Identity[P ~int](p P) P { //@item(Identity, "Identity", "", "") func _() { _ = NewSyncM //@snippet(" //", NewSyncMap, "NewSyncMap[${1:K comparable}, ${2:V any}]()") - _ = Identi //@snippet(" //", Identity, "Identity[${1:P ~int}](${2:p P})") + _ = Identi //@snippet(" //", Identity, "Identity(${1:p P})") } diff --git a/gopls/internal/regtest/marker/testdata/completion/func_value.txt b/gopls/internal/test/marker/testdata/completion/func_value.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/func_value.txt rename to gopls/internal/test/marker/testdata/completion/func_value.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/fuzzy.txt b/gopls/internal/test/marker/testdata/completion/fuzzy.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/fuzzy.txt rename to gopls/internal/test/marker/testdata/completion/fuzzy.txt diff --git a/gopls/internal/test/marker/testdata/completion/imported-std.txt b/gopls/internal/test/marker/testdata/completion/imported-std.txt new file mode 100644 index 00000000000..5f4520f6b6a --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/imported-std.txt @@ -0,0 +1,61 @@ +Test of imported completions respecting the effective Go version of the file. + +(See "un-" prefixed file for same test of unimported completions.) + +These symbols below were introduced to go/types in go1.22: + + Alias + Info.FileVersions + (Checker).PkgNameOf + +The underlying logic depends on versions.FileVersion, which only +behaves correctly in go1.22. (When go1.22 is assured, we can remove +the min_go flag but leave the test inputs unchanged.) + +-- flags -- +-ignore_extra_diags -min_go=go1.22 + +-- go.mod -- +module example.com + +go 1.21 + +-- a/a.go -- +package a + +import "go/ast" +import "go/token" +import "go/types" + +// package-level decl +var _ = types.Sat //@rankl("Sat", "Satisfies") +var _ = types.Ali //@rankl("Ali", "!Alias") + +// field +var _ = new(types.Info).Use //@rankl("Use", "Uses") +var _ = new(types.Info).Fil //@rankl("Fil", "!FileVersions") + +// method +var _ = new(types.Checker).Obje //@rankl("Obje", "ObjectOf") +var _ = new(types.Checker).PkgN //@rankl("PkgN", "!PkgNameOf") + +-- b/b.go -- +//go:build go1.22 + +package a + +import "go/ast" +import "go/token" +import "go/types" + +// package-level decl +var _ = types.Sat //@rankl("Sat", "Satisfies") +var _ = types.Ali //@rankl("Ali", "Alias") + +// field +var _ = new(types.Info).Use //@rankl("Use", "Uses") +var _ = new(types.Info).Fil //@rankl("Fil", "FileVersions") + +// method +var _ = new(types.Checker).Obje //@rankl("Obje", "ObjectOf") +var _ = new(types.Checker).PkgN //@rankl("PkgN", "PkgNameOf") diff --git a/gopls/internal/regtest/marker/testdata/completion/index.txt b/gopls/internal/test/marker/testdata/completion/index.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/index.txt rename to gopls/internal/test/marker/testdata/completion/index.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/interfacerank.txt b/gopls/internal/test/marker/testdata/completion/interfacerank.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/interfacerank.txt rename to gopls/internal/test/marker/testdata/completion/interfacerank.txt diff --git a/gopls/internal/test/marker/testdata/completion/issue51783.txt b/gopls/internal/test/marker/testdata/completion/issue51783.txt new file mode 100644 index 00000000000..074259ca713 --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/issue51783.txt @@ -0,0 +1,47 @@ +Regression test for "completion gives unneeded generic type +instantiation snippet", #51783. + +Type parameters that can be inferred from the arguments +are not part of the offered completion snippet. + +-- flags -- +-ignore_extra_diags + +-- a.go -- +package a + +// identity has a single simple type parameter. +// The completion omits the instantiation. +func identity[T any](x T) T + +// clone has a second type parameter that is nonetheless constrained by the parameter. +// The completion omits the instantiation. +func clone[S ~[]E, E any](s S) S + +// unconstrained has a type parameter constrained only by the result. +// The completion suggests instantiation. +func unconstrained[X, Y any](x X) Y + +// partial has three type parameters, +// only the last two of which may be omitted as they +// are constrained by the arguments. +func partial[R any, S ~[]E, E any](s S) R + +//@item(identity, "identity", "details", "kind") +//@item(clone, "clone", "details", "kind") +//@item(unconstrained, "unconstrained", "details", "kind") +//@item(partial, "partial", "details", "kind") + +func _() { + _ = identity //@snippet("identity", identity, "identity(${1:})") + + _ = clone //@snippet("clone", clone, "clone(${1:})") + + _ = unconstrained //@snippet("unconstrained", unconstrained, "unconstrained[${1:}](${2:})") + + _ = partial //@snippet("partial", partial, "partial[${1:}](${2:})") + + // Result-type inference permits us to omit Y in this (rare) case, + // but completion doesn't support that. + var _ int = unconstrained //@snippet("unconstrained", unconstrained, "unconstrained[${1:}](${2:})") +} diff --git a/gopls/internal/regtest/marker/testdata/completion/issue56505.txt b/gopls/internal/test/marker/testdata/completion/issue56505.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/issue56505.txt rename to gopls/internal/test/marker/testdata/completion/issue56505.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/issue59096.txt b/gopls/internal/test/marker/testdata/completion/issue59096.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/issue59096.txt rename to gopls/internal/test/marker/testdata/completion/issue59096.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/issue60545.txt b/gopls/internal/test/marker/testdata/completion/issue60545.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/issue60545.txt rename to gopls/internal/test/marker/testdata/completion/issue60545.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/issue62141.txt b/gopls/internal/test/marker/testdata/completion/issue62141.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/issue62141.txt rename to gopls/internal/test/marker/testdata/completion/issue62141.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/issue62560.txt b/gopls/internal/test/marker/testdata/completion/issue62560.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/issue62560.txt rename to gopls/internal/test/marker/testdata/completion/issue62560.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/issue62676.txt b/gopls/internal/test/marker/testdata/completion/issue62676.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/issue62676.txt rename to gopls/internal/test/marker/testdata/completion/issue62676.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/keywords.txt b/gopls/internal/test/marker/testdata/completion/keywords.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/keywords.txt rename to gopls/internal/test/marker/testdata/completion/keywords.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/labels.txt b/gopls/internal/test/marker/testdata/completion/labels.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/labels.txt rename to gopls/internal/test/marker/testdata/completion/labels.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/lit.txt b/gopls/internal/test/marker/testdata/completion/lit.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/lit.txt rename to gopls/internal/test/marker/testdata/completion/lit.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/maps.txt b/gopls/internal/test/marker/testdata/completion/maps.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/maps.txt rename to gopls/internal/test/marker/testdata/completion/maps.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/multi_return.txt b/gopls/internal/test/marker/testdata/completion/multi_return.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/multi_return.txt rename to gopls/internal/test/marker/testdata/completion/multi_return.txt diff --git a/gopls/internal/test/marker/testdata/completion/nested_complit.txt b/gopls/internal/test/marker/testdata/completion/nested_complit.txt new file mode 100644 index 00000000000..264ae77eab8 --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/nested_complit.txt @@ -0,0 +1,26 @@ +This test checks completion of nested composite literals; + +Parser recovery changed in Go 1.20, so this test requires at least that +version for consistency. + +-- flags -- +-ignore_extra_diags +-min_go=go1.20 + +-- nested_complit.go -- +package nested_complit + +type ncFoo struct {} //@item(structNCFoo, "ncFoo", "struct{...}", "struct") + +type ncBar struct { //@item(structNCBar, "ncBar", "struct{...}", "struct") + baz []ncFoo +} + +func _() { + _ = []ncFoo{} //@item(litNCFoo, "[]ncFoo{}", "", "var") + _ = make([]ncFoo, 0) //@item(makeNCFoo, "make([]ncFoo, 0)", "", "func") + + _ := ncBar{ + baz: [] //@complete(" //", litNCFoo, makeNCFoo, structNCBar, structNCFoo) + } +} diff --git a/gopls/internal/test/marker/testdata/completion/postfix.txt b/gopls/internal/test/marker/testdata/completion/postfix.txt new file mode 100644 index 00000000000..9b54b578f4c --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/postfix.txt @@ -0,0 +1,131 @@ +These tests check that postfix completions do and do not show up in certain +cases. Tests for the postfix completion contents are implemented as ad-hoc +integration tests. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/snippets + +go 1.18 + +-- postfix.go -- +package snippets + +import ( + "strconv" +) + +func _() { + var foo []int + foo.append //@rank(" //", postfixAppend) + + []int{}.append //@complete(" //") + + []int{}.last //@complete(" //") + + + foo.copy //@rank(" //", postfixCopy) + + var s struct{ i []int } + s.i.copy //@rank(" //", postfixCopy) + + var _ []int = s.i.copy //@complete(" //") + + var blah func() []int + blah().append //@complete(" //") +} + +func _() { + /* append! */ //@item(postfixAppend, "append!", "append and re-assign slice", "snippet") + /* copy! */ //@item(postfixCopy, "copy!", "duplicate slice", "snippet") + /* for! */ //@item(postfixFor, "for!", "range over slice by index", "snippet") + /* forr! */ //@item(postfixForr, "forr!", "range over slice by index and value", "snippet") + /* last! */ //@item(postfixLast, "last!", "s[len(s)-1]", "snippet") + /* len! */ //@item(postfixLen, "len!", "len(s)", "snippet") + /* print! */ //@item(postfixPrint, "print!", "print to stdout", "snippet") + /* range! */ //@item(postfixRange, "range!", "range over slice", "snippet") + /* reverse! */ //@item(postfixReverse, "reverse!", "reverse slice", "snippet") + /* sort! */ //@item(postfixSort, "sort!", "sort.Slice()", "snippet") + /* var! */ //@item(postfixVar, "var!", "assign to variable", "snippet") + /* ifnotnil! */ //@item(postfixIfNotNil, "ifnotnil!", "if expr != nil", "snippet") + + var foo []int + foo. //@complete(" //", postfixAppend, postfixCopy, postfixFor, postfixForr, postfixIfNotNil, postfixLast, postfixLen, postfixPrint, postfixRange, postfixReverse, postfixSort, postfixVar) + foo = nil + + foo.append //@snippet(" //", postfixAppend, "foo = append(foo, $0)") + foo.copy //snippet(" //", postfixCopy, "fooCopy := make([]int, len(foo))\ncopy($fooCopy, foo)\n") + foo.fo //@snippet(" //", postfixFor, "for ${1:} := range foo {\n\t$0\n}") + foo.forr //@snippet(" //", postfixForr, "for ${1:}, ${2:} := range foo {\n\t$0\n}") + foo.last //@snippet(" //", postfixLast, "foo[len(foo)-1]") + foo.len //@snippet(" //", postfixLen, "len(foo)") + foo.print //@snippet(" //", postfixPrint, `fmt.Printf("foo: %v\n", foo)`) + foo.rang //@snippet(" //", postfixRange, "for ${1:}, ${2:} := range foo {\n\t$0\n}") + foo.reverse //@snippet(" //", postfixReverse, "slices.Reverse(foo)") + foo.sort //@snippet(" //", postfixSort, "sort.Slice(foo, func(i, j int) bool {\n\t$0\n})") + foo.va //@snippet(" //", postfixVar, "${1:} := foo") + foo.ifnotnil //@snippet(" //", postfixIfNotNil, "if foo != nil {\n\t$0\n}") +} + +func _() { + /* for! */ //@item(postfixForMap, "for!", "range over map by key", "snippet") + /* forr! */ //@item(postfixForrMap, "forr!", "range over map by key and value", "snippet") + /* range! */ //@item(postfixRangeMap, "range!", "range over map", "snippet") + /* clear! */ //@item(postfixClear, "clear!", "clear map contents", "snippet") + /* keys! */ //@item(postfixKeys, "keys!", "create slice of keys", "snippet") + + var foo map[int]int + foo. //@complete(" //", postfixClear, postfixForMap, postfixForrMap, postfixIfNotNil, postfixKeys, postfixLen, postfixPrint, postfixRangeMap, postfixVar) + + foo = nil + + foo.fo //@snippet(" //", postfixFor, "for ${1:} := range foo {\n\t$0\n}") + foo.forr //@snippet(" //", postfixForr, "for ${1:}, ${2:} := range foo {\n\t$0\n}") + foo.rang //@snippet(" //", postfixRange, "for ${1:}, ${2:} := range foo {\n\t$0\n}") + foo.clear //@snippet(" //", postfixClear, "for k := range foo {\n\tdelete(foo, k)\n}\n") + foo.keys //@snippet(" //", postfixKeys, "keys := make([]int, 0, len(foo))\nfor k := range foo {\n\tkeys = append(keys, k)\n}\n") +} + +func _() { + /* for! */ //@item(postfixForChannel, "for!", "range over channel", "snippet") + /* range! */ //@item(postfixRangeChannel, "range!", "range over channel", "snippet") + + var foo chan int + foo. //@complete(" //", postfixForChannel, postfixIfNotNil, postfixLen, postfixPrint, postfixRangeChannel, postfixVar) + + foo = nil + + foo.fo //@snippet(" //", postfixForChannel, "for ${1:} := range foo {\n\t$0\n}") + foo.rang //@snippet(" //", postfixRangeChannel, "for ${1:} := range foo {\n\t$0\n}") +} + +type T struct { + Name string +} + +func _() (string, T, map[string]string, error) { + /* iferr! */ //@item(postfixIfErr, "iferr!", "check error and return", "snippet") + /* variferr! */ //@item(postfixVarIfErr, "variferr!", "assign variables and check error", "snippet") + /* var! */ //@item(postfixVars, "var!", "assign to variables", "snippet") + + strconv.Atoi("32"). //@complete(" //", postfixIfErr, postfixPrint, postfixVars, postfixVarIfErr) + + var err error + err.iferr //@snippet(" //", postfixIfErr, "if err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") + + strconv.Atoi("32").iferr //@snippet(" //", postfixIfErr, "if _, err := strconv.Atoi(\"32\"); err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") + + strconv.Atoi("32").variferr //@snippet(" //", postfixVarIfErr, "${1:}, ${2:} := strconv.Atoi(\"32\")\nif ${2:} != nil {\n\treturn \"\", T{}, nil, ${3:}\n}\n") + + // test function return multiple errors + var foo func() (error, error) + foo().iferr //@snippet(" //", postfixIfErr, "if _, err := foo(); err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") + foo().variferr //@snippet(" //", postfixVarIfErr, "${1:}, ${2:} := foo()\nif ${2:} != nil {\n\treturn \"\", T{}, nil, ${3:}\n}\n") + + // test function just return error + var bar func() error + bar().iferr //@snippet(" //", postfixIfErr, "if err := bar(); err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") + bar().variferr //@snippet(" //", postfixVarIfErr, "${1:} := bar()\nif ${1:} != nil {\n\treturn \"\", T{}, nil, ${2:}\n}\n") +} diff --git a/gopls/internal/test/marker/testdata/completion/postfix_placeholder.txt b/gopls/internal/test/marker/testdata/completion/postfix_placeholder.txt new file mode 100644 index 00000000000..7569f130466 --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/postfix_placeholder.txt @@ -0,0 +1,83 @@ +These tests check that postfix completions when enable usePlaceholders + +-- flags -- +-ignore_extra_diags + +-- settings.json -- +{ + "usePlaceholders": true +} + +-- go.mod -- +module golang.org/lsptests/snippets + +go 1.18 + +-- postfix.go -- +package snippets + +import ( + "strconv" +) + +func _() { + /* for! */ //@item(postfixFor, "for!", "range over slice by index", "snippet") + /* forr! */ //@item(postfixForr, "forr!", "range over slice by index and value", "snippet") + /* range! */ //@item(postfixRange, "range!", "range over slice", "snippet") + /* var! */ //@item(postfixVar, "var!", "assign to variable", "snippet") + + var foo []int + + foo.fo //@snippet(" //", postfixFor, "for ${1:i} := range foo {\n\t$0\n}") + foo.forr //@snippet(" //", postfixForr, "for ${1:i}, ${2:v} := range foo {\n\t$0\n}") + foo.rang //@snippet(" //", postfixRange, "for ${1:i}, ${2:v} := range foo {\n\t$0\n}") + foo.va //@snippet(" //", postfixVar, "${1:i} := foo") +} + +func _() { + /* for! */ //@item(postfixForMap, "for!", "range over map by key", "snippet") + /* forr! */ //@item(postfixForrMap, "forr!", "range over map by key and value", "snippet") + /* range! */ //@item(postfixRangeMap, "range!", "range over map", "snippet") + + var foo map[int]int + + foo.fo //@snippet(" //", postfixFor, "for ${1:k} := range foo {\n\t$0\n}") + foo.forr //@snippet(" //", postfixForr, "for ${1:k}, ${2:v} := range foo {\n\t$0\n}") + foo.rang //@snippet(" //", postfixRange, "for ${1:k}, ${2:v} := range foo {\n\t$0\n}") +} + +func _() { + /* for! */ //@item(postfixForChannel, "for!", "range over channel", "snippet") + /* range! */ //@item(postfixRangeChannel, "range!", "range over channel", "snippet") + + var foo chan int + + foo.fo //@snippet(" //", postfixForChannel, "for ${1:e} := range foo {\n\t$0\n}") + foo.rang //@snippet(" //", postfixRangeChannel, "for ${1:e} := range foo {\n\t$0\n}") +} + +type T struct { + Name string +} + +func _() (string, T, map[string]string, error) { + /* iferr! */ //@item(postfixIfErr, "iferr!", "check error and return", "snippet") + /* variferr! */ //@item(postfixVarIfErr, "variferr!", "assign variables and check error", "snippet") + /* var! */ //@item(postfixVars, "var!", "assign to variables", "snippet") + + + var err error + err.iferr //@snippet(" //", postfixIfErr, "if err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") + strconv.Atoi("32").iferr //@snippet(" //", postfixIfErr, "if _, err := strconv.Atoi(\"32\"); err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") + strconv.Atoi("32").variferr //@snippet(" //", postfixVarIfErr, "${1:i}, ${2:err} := strconv.Atoi(\"32\")\nif ${2:err} != nil {\n\treturn \"\", T{}, nil, ${3:${2:err}}\n}\n") + + // test function return multiple errors + var foo func() (error, error) + foo().iferr //@snippet(" //", postfixIfErr, "if _, err := foo(); err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") + foo().variferr //@snippet(" //", postfixVarIfErr, "${1:err2}, ${2:err} := foo()\nif ${2:err} != nil {\n\treturn \"\", T{}, nil, ${3:${2:err}}\n}\n") + + // test function just return error + var bar func() error + bar().iferr //@snippet(" //", postfixIfErr, "if err := bar(); err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") + bar().variferr //@snippet(" //", postfixVarIfErr, "${1:err2} := bar()\nif ${1:err2} != nil {\n\treturn \"\", T{}, nil, ${2:${1:err2}}\n}\n") +} diff --git a/gopls/internal/regtest/marker/testdata/completion/printf.txt b/gopls/internal/test/marker/testdata/completion/printf.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/printf.txt rename to gopls/internal/test/marker/testdata/completion/printf.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/rank.txt b/gopls/internal/test/marker/testdata/completion/rank.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/rank.txt rename to gopls/internal/test/marker/testdata/completion/rank.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/snippet.txt b/gopls/internal/test/marker/testdata/completion/snippet.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/snippet.txt rename to gopls/internal/test/marker/testdata/completion/snippet.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/snippet_placeholder.txt b/gopls/internal/test/marker/testdata/completion/snippet_placeholder.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/snippet_placeholder.txt rename to gopls/internal/test/marker/testdata/completion/snippet_placeholder.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/statements.txt b/gopls/internal/test/marker/testdata/completion/statements.txt similarity index 91% rename from gopls/internal/regtest/marker/testdata/completion/statements.txt rename to gopls/internal/test/marker/testdata/completion/statements.txt index d013fefa5d6..9856d938ea3 100644 --- a/gopls/internal/regtest/marker/testdata/completion/statements.txt +++ b/gopls/internal/test/marker/testdata/completion/statements.txt @@ -119,3 +119,16 @@ func BenchmarkErr(b *testing.B) { _, err := os.Open("foo") //@snippet("", stmtOneIfErrBFatal, "if err != nil {\n\tb.Fatal(err)\n\\}") } + +-- return.go -- +package statements + +//@item(stmtReturnZeroValues, `return 0, "", nil`) + +func foo() (int, string, error) { + ret //@snippet(" ", stmtReturnZeroValues, "return ${1:0}, ${2:\"\"}, ${3:nil}") +} + +func bar() (int, string, error) { + return //@snippet(" ", stmtReturnZeroValues, "return ${1:0}, ${2:\"\"}, ${3:nil}") +} diff --git a/gopls/internal/test/marker/testdata/completion/testy.txt b/gopls/internal/test/marker/testdata/completion/testy.txt new file mode 100644 index 00000000000..a7a9e1ce36c --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/testy.txt @@ -0,0 +1,61 @@ + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module testy.test + +go 1.18 + +-- types/types.go -- +package types + + +-- signature/signature.go -- +package signature + +type Alias = int + +-- snippets/snippets.go -- +package snippets + +import ( + "testy.test/signature" + t "testy.test/types" +) + +func X(_ map[signature.Alias]t.CoolAlias) (map[signature.Alias]t.CoolAlias) { + return nil +} + +-- testy/testy.go -- +package testy + +func a() { //@item(funcA, "a", "func()", "func") + //@complete("", funcA) +} + + +-- testy/testy_test.go -- +package testy + +import ( + "testing" + + sig "testy.test/signature" + "testy.test/snippets" +) + +func TestSomething(t *testing.T) { //@item(TestSomething, "TestSomething(t *testing.T)", "", "func") + var x int //@loc(testyX, "x"), diag("x", re"x.*declared (and|but) not used") + a() //@loc(testyA, "a") +} + +func _() { + _ = snippets.X(nil) //@signature("nil", "X(_ map[sig.Alias]types.CoolAlias) map[sig.Alias]types.CoolAlias", 0) + var _ sig.Alias +} + +func issue63578(err error) { + err.Error() //@signature(")", "Error()", 0) +} diff --git a/gopls/internal/regtest/marker/testdata/completion/type_assert.txt b/gopls/internal/test/marker/testdata/completion/type_assert.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/type_assert.txt rename to gopls/internal/test/marker/testdata/completion/type_assert.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/type_mods.txt b/gopls/internal/test/marker/testdata/completion/type_mods.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/type_mods.txt rename to gopls/internal/test/marker/testdata/completion/type_mods.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/type_params.txt b/gopls/internal/test/marker/testdata/completion/type_params.txt similarity index 94% rename from gopls/internal/regtest/marker/testdata/completion/type_params.txt rename to gopls/internal/test/marker/testdata/completion/type_params.txt index 185d77f9911..8e2f5d7e401 100644 --- a/gopls/internal/regtest/marker/testdata/completion/type_params.txt +++ b/gopls/internal/test/marker/testdata/completion/type_params.txt @@ -52,8 +52,7 @@ func returnTP[A int | float64](a A) A { //@item(returnTP, "returnTP", "something } func _() { - // disabled - see issue #54822 - var _ int = returnTP // snippet(" //", returnTP, "returnTP[${1:}](${2:})") + var _ int = returnTP //@snippet(" //", returnTP, "returnTP(${1:})") var aa int //@item(tpInt, "aa", "int", "var") var ab float64 //@item(tpFloat, "ab", "float64", "var") diff --git a/gopls/internal/test/marker/testdata/completion/unimported-std.txt b/gopls/internal/test/marker/testdata/completion/unimported-std.txt new file mode 100644 index 00000000000..5eb996a487e --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/unimported-std.txt @@ -0,0 +1,49 @@ +Test of unimported completions respecting the effective Go version of the file. + +(See unprefixed file for same test of imported completions.) + +These symbols below were introduced to go/types in go1.22: + + Alias + Info.FileVersions + (Checker).PkgNameOf + +The underlying logic depends on versions.FileVersion, which only +behaves correctly in go1.22. (When go1.22 is assured, we can remove +the min_go flag but leave the test inputs unchanged.) + +-- flags -- +-ignore_extra_diags -min_go=go1.22 + +-- go.mod -- +module example.com + +go 1.21 + +-- a/a.go -- +package a + +// package-level func +var _ = types.Sat //@rankl("Sat", "Satisfies") +var _ = types.Ali //@rankl("Ali", "!Alias") + +// (We don't offer completions of methods +// of types from unimported packages, so the fact that +// we don't implement std version filtering isn't evident.) + +// field +var _ = new(types.Info).Use //@rankl("Use", "!Uses") +var _ = new(types.Info).Fil //@rankl("Fil", "!FileVersions") + +// method +var _ = new(types.Checker).Obje //@rankl("Obje", "!ObjectOf") +var _ = new(types.Checker).PkgN //@rankl("PkgN", "!PkgNameOf") + +-- b/b.go -- +//go:build go1.22 + +package a + +// package-level decl +var _ = types.Sat //@rankl("Sat", "Satisfies") +var _ = types.Ali //@rankl("Ali", "Alias") diff --git a/gopls/internal/regtest/marker/testdata/completion/unimported.txt b/gopls/internal/test/marker/testdata/completion/unimported.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/unimported.txt rename to gopls/internal/test/marker/testdata/completion/unimported.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/unresolved.txt b/gopls/internal/test/marker/testdata/completion/unresolved.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/unresolved.txt rename to gopls/internal/test/marker/testdata/completion/unresolved.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/unsafe.txt b/gopls/internal/test/marker/testdata/completion/unsafe.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/unsafe.txt rename to gopls/internal/test/marker/testdata/completion/unsafe.txt diff --git a/gopls/internal/regtest/marker/testdata/completion/variadic.txt b/gopls/internal/test/marker/testdata/completion/variadic.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/completion/variadic.txt rename to gopls/internal/test/marker/testdata/completion/variadic.txt diff --git a/gopls/internal/test/marker/testdata/configuration/static.txt b/gopls/internal/test/marker/testdata/configuration/static.txt new file mode 100644 index 00000000000..c84b55db117 --- /dev/null +++ b/gopls/internal/test/marker/testdata/configuration/static.txt @@ -0,0 +1,41 @@ +This test confirms that gopls honors configuration even if the client does not +support dynamic configuration. + +-- capabilities.json -- +{ + "configuration": false +} + +-- settings.json -- +{ + "usePlaceholders": true, + "analyses": { + "composites": false + } +} + +-- go.mod -- +module example.com/config + +go 1.18 + +-- a/a.go -- +package a + +import "example.com/config/b" + +func Identity[P ~int](p P) P { //@item(Identity, "Identity", "", "") + return p +} + +func _() { + _ = b.B{2} + _ = Identi //@snippet(" //", Identity, "Identity(${1:p P})"), diag("Ident", re"(undefined|undeclared)") +} + +-- b/b.go -- +package b + +type B struct { + F int +} diff --git a/gopls/internal/regtest/marker/testdata/definition/cgo.txt b/gopls/internal/test/marker/testdata/definition/cgo.txt similarity index 94% rename from gopls/internal/regtest/marker/testdata/definition/cgo.txt rename to gopls/internal/test/marker/testdata/definition/cgo.txt index 6d108a46656..777285b242b 100644 --- a/gopls/internal/regtest/marker/testdata/definition/cgo.txt +++ b/gopls/internal/test/marker/testdata/definition/cgo.txt @@ -38,7 +38,7 @@ func _() { Example() //@hover("ample", "Example", hoverExample), def("ample", cgoexample), complete("ample", cgoexampleItem) } --- @hoverExample/hover.md -- +-- @hoverExample -- ```go func Example() ``` @@ -54,7 +54,7 @@ import ( func _() { cgo.Example() //@hover("ample", "Example", hoverImportedExample), def("ample", cgoexample), complete("ample", cgoexampleItem) } --- @hoverImportedExample/hover.md -- +-- @hoverImportedExample -- ```go func cgo.Example() ``` diff --git a/gopls/internal/test/marker/testdata/definition/embed.txt b/gopls/internal/test/marker/testdata/definition/embed.txt new file mode 100644 index 00000000000..4bda1d71ebc --- /dev/null +++ b/gopls/internal/test/marker/testdata/definition/embed.txt @@ -0,0 +1,275 @@ +This test checks definition and hover operations over embedded fields and methods. + +Its size expectations assume a 64-bit machine, +and correct sizes information requires go1.21. + +-- flags -- +-skip_goarch=386,arm +-min_go=go1.21 + +-- go.mod -- +module mod.com + +go 1.18 + +-- a/a.go -- +package a + +type A string //@loc(AString, "A") + +func (_ A) Hi() {} //@loc(AHi, "Hi") + +type S struct { + Field int //@loc(SField, "Field") + R // embed a struct + H // embed an interface +} + +type R struct { + Field2 int //@loc(RField2, "Field2") +} + +func (r R) Hey() {} //@loc(RHey, "Hey") + +type H interface { //@loc(H, "H") + Goodbye() //@loc(HGoodbye, "Goodbye") +} + +type I interface { //@loc(I, "I") + B() //@loc(IB, "B") + J +} + +type J interface { //@loc(J, "J") + Hello() //@loc(JHello, "Hello") +} + +-- b/b.go -- +package b + +import "mod.com/a" //@loc(AImport, re"\".*\"") + +type embed struct { + F int //@loc(F, "F") +} + +func (embed) M() //@loc(M, "M") + +type Embed struct { + embed + *a.A + a.I + a.S +} + +func _() { + e := Embed{} + e.Hi() //@def("Hi", AHi),hover("Hi", "Hi", AHi) + e.B() //@def("B", IB),hover("B", "B", IB) + _ = e.Field //@def("Field", SField),hover("Field", "Field", SField) + _ = e.Field2 //@def("Field2", RField2),hover("Field2", "Field2", RField2) + e.Hello() //@def("Hello", JHello),hover("Hello", "Hello",JHello) + e.Hey() //@def("Hey", RHey),hover("Hey", "Hey", RHey) + e.Goodbye() //@def("Goodbye", HGoodbye),hover("Goodbye", "Goodbye", HGoodbye) + e.M() //@def("M", M),hover("M", "M", M) + _ = e.F //@def("F", F),hover("F", "F", F) +} + +type aAlias = a.A //@loc(aAlias, "aAlias") + +type S1 struct { //@loc(S1, "S1") + F1 int //@loc(S1F1, "F1") + S2 //@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) + a.A //@def("A", AString),hover("A", "A", aA) + aAlias //@def("a", aAlias),hover("a", "aAlias", aAlias) +} + +type S2 struct { //@loc(S2, "S2") + F1 string //@loc(S2F1, "F1") + F2 int //@loc(S2F2, "F2") + *a.A //@def("A", AString),def("a",AImport) +} + +type S3 struct { + F1 struct { + a.A //@def("A", AString) + } +} + +func Bar() { + var x S1 //@def("S1", S1),hover("S1", "S1", S1) + _ = x.S2 //@def("S2", S1S2),hover("S2", "S2", S1S2) + _ = x.F1 //@def("F1", S1F1),hover("F1", "F1", S1F1) + _ = x.F2 //@def("F2", S2F2),hover("F2", "F2", S2F2) + _ = x.S2.F1 //@def("F1", S2F1),hover("F1", "F1", S2F1) +} + +-- b/c.go -- +package b + +var _ = S1{ //@def("S1", S1),hover("S1", "S1", S1) + F1: 99, //@def("F1", S1F1),hover("F1", "F1", S1F1) +} + +-- @AHi -- +```go +func (a.A) Hi() +``` + +[`(a.A).Hi` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A.Hi) +-- @F -- +```go +field F int +``` + +@loc(F, "F") + + +[`(b.Embed).F` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.F) +-- @HGoodbye -- +```go +func (a.H) Goodbye() +``` + +@loc(HGoodbye, "Goodbye") + + +[`(a.H).Goodbye` on pkg.go.dev](https://pkg.go.dev/mod.com/a#H.Goodbye) +-- @IB -- +```go +func (a.I) B() +``` + +@loc(IB, "B") + + +[`(a.I).B` on pkg.go.dev](https://pkg.go.dev/mod.com/a#I.B) +-- @JHello -- +```go +func (a.J) Hello() +``` + +@loc(JHello, "Hello") + + +[`(a.J).Hello` on pkg.go.dev](https://pkg.go.dev/mod.com/a#J.Hello) +-- @M -- +```go +func (embed) M() +``` + +[`(b.Embed).M` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.M) +-- @RField2 -- +```go +field Field2 int +``` + +@loc(RField2, "Field2") + + +[`(a.R).Field2` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Field2) +-- @RHey -- +```go +func (r a.R) Hey() +``` + +[`(a.R).Hey` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Hey) +-- @S1 -- +```go +type S1 struct { + F1 int //@loc(S1F1, "F1") + S2 //@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) + a.A //@def("A", AString),hover("A", "A", aA) + aAlias //@def("a", aAlias),hover("a", "aAlias", aAlias) +} +``` + +```go +// Embedded fields: +F2 int // through S2 +``` + +[`b.S1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1) +-- @S1F1 -- +```go +field F1 int +``` + +@loc(S1F1, "F1") + + +[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.F1) +-- @S1S2 -- +```go +field S2 S2 +``` + +@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) + + +[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.S2) +-- @S2 -- +```go +type S2 struct { // size=32 (0x20) + F1 string //@loc(S2F1, "F1") + F2 int //@loc(S2F2, "F2") + *a.A //@def("A", AString),def("a",AImport) +} +``` + +```go +func (a.A) Hi() +``` + +[`b.S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2) +-- @S2F1 -- +```go +field F1 string +``` + +@loc(S2F1, "F1") + + +[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F1) +-- @S2F2 -- +```go +field F2 int +``` + +@loc(S2F2, "F2") + + +[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F2) +-- @SField -- +```go +field Field int +``` + +@loc(SField, "Field") + + +[`(a.S).Field` on pkg.go.dev](https://pkg.go.dev/mod.com/a#S.Field) +-- @aA -- +```go +type A string // size=16 (0x10) +``` + +@loc(AString, "A") + + +```go +func (a.A) Hi() +``` + +[`a.A` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A) +-- @aAlias -- +```go +type aAlias = a.A // size=16 (0x10) +``` + +@loc(aAlias, "aAlias") + + +```go +func (a.A) Hi() +``` diff --git a/gopls/internal/regtest/marker/testdata/definition/import.txt b/gopls/internal/test/marker/testdata/definition/import.txt similarity index 92% rename from gopls/internal/regtest/marker/testdata/definition/import.txt rename to gopls/internal/test/marker/testdata/definition/import.txt index 9e5e5929aa9..2ae95a8c29b 100644 --- a/gopls/internal/regtest/marker/testdata/definition/import.txt +++ b/gopls/internal/test/marker/testdata/definition/import.txt @@ -1,4 +1,5 @@ This test checks definition and hover over imports. + -- go.mod -- module mod.com @@ -29,7 +30,7 @@ func _() { DoFoo() //@hover("DoFoo", "DoFoo", DoFoo) } --- @DoFoo/hover.md -- +-- @DoFoo -- ```go func DoFoo() ``` @@ -38,13 +39,13 @@ DoFoo does foo. [`foo.DoFoo` on pkg.go.dev](https://pkg.go.dev/mod.com/foo#DoFoo) --- @FooVar/hover.md -- +-- @FooVar -- ```go var _ Foo ``` variable of type foo.Foo --- @myFoo/hover.md -- +-- @myFoo -- ```go package myFoo ("mod.com/foo") ``` diff --git a/gopls/internal/regtest/marker/testdata/definition/misc.txt b/gopls/internal/test/marker/testdata/definition/misc.txt similarity index 84% rename from gopls/internal/regtest/marker/testdata/definition/misc.txt rename to gopls/internal/test/marker/testdata/definition/misc.txt index 48f5d340c43..7c6bc27f316 100644 --- a/gopls/internal/regtest/marker/testdata/definition/misc.txt +++ b/gopls/internal/test/marker/testdata/definition/misc.txt @@ -1,8 +1,15 @@ This test exercises miscellaneous definition and hover requests. + +Its size expectations assume a 64-bit machine. + -- go.mod -- module mod.com go 1.16 + +-- flags -- +-skip_goarch=386,arm + -- a.go -- package a //@loc(aPackage, re"package (a)"),hover(aPackage, aPackage, aPackage) @@ -118,44 +125,44 @@ func _() { var p Pos _ = p.Sum() //@def("()", PosSum),hover("()", `Sum`, hoverSum) } --- @aPackage/hover.md -- --- @hoverDeclBlocka/hover.md -- +-- @aPackage -- +-- @hoverDeclBlocka -- ```go -type a struct { +type a struct { // size=16 (0x10) x string } ``` 1st type declaration block --- @hoverDeclBlockb/hover.md -- +-- @hoverDeclBlockb -- ```go -type b struct{} +type b struct{} // size=0 ``` b has a comment --- @hoverDeclBlockc/hover.md -- +-- @hoverDeclBlockc -- ```go -type c struct { +type c struct { // size=16 (0x10) f string } ``` c is a struct --- @hoverDeclBlockd/hover.md -- +-- @hoverDeclBlockd -- ```go -type d string +type d string // size=16 (0x10) ``` 3rd type declaration block --- @hoverDeclBlocke/hover.md -- +-- @hoverDeclBlocke -- ```go -type e struct { +type e struct { // size=8 f float64 } ``` e has a comment --- @hoverH/hover.md -- +-- @hoverH -- ```go type H interface { Goodbye() @@ -163,7 +170,7 @@ type H interface { ``` [`a.H` on pkg.go.dev](https://pkg.go.dev/mod.com#H) --- @hoverI/hover.md -- +-- @hoverI -- ```go type I interface { B() @@ -171,8 +178,12 @@ type I interface { } ``` +```go +func (J) Hello() +``` + [`a.I` on pkg.go.dev](https://pkg.go.dev/mod.com#I) --- @hoverJ/hover.md -- +-- @hoverJ -- ```go type J interface { Hello() @@ -180,49 +191,49 @@ type J interface { ``` [`a.J` on pkg.go.dev](https://pkg.go.dev/mod.com#J) --- @hoverSum/hover.md -- +-- @hoverSum -- ```go -func (*Pos).Sum() int +func (p *Pos) Sum() int ``` [`(a.Pos).Sum` on pkg.go.dev](https://pkg.go.dev/mod.com#Pos.Sum) --- @hoverTestA/hover.md -- +-- @hoverTestA -- ```go func TestA(t *testing.T) ``` --- @hoverfield/hover.md -- +-- @hoverfield -- ```go field field string ``` --- @hoverg/hover.md -- +-- @hoverg -- ```go const g untyped int = 1 ``` When I hover on g, I should see this comment. --- @hoverh/hover.md -- +-- @hoverh -- ```go const h untyped int = 2 ``` Constant block. When I hover on h, I should see this comment. --- @hoverpx/hover.md -- +-- @hoverpx -- ```go field x int ``` @loc(PosX, "x"),loc(PosY, "y") --- @hoverx/hover.md -- +-- @hoverx -- ```go var x string ``` x is a variable. --- @hovery/hover.md -- +-- @hovery -- ```go var y int ``` --- @hoverz/hover.md -- +-- @hoverz -- ```go var z string ``` diff --git a/gopls/internal/test/marker/testdata/definition/standalone.txt b/gopls/internal/test/marker/testdata/definition/standalone.txt new file mode 100644 index 00000000000..6af1149184d --- /dev/null +++ b/gopls/internal/test/marker/testdata/definition/standalone.txt @@ -0,0 +1,42 @@ +This test checks the behavior of standalone packages, in particular documenting +our failure to support test files as standalone packages (golang/go#64233). + +-- go.mod -- +module golang.org/lsptests/a + +go 1.20 + +-- a.go -- +package a + +func F() {} //@loc(F, "F") + +-- standalone.go -- +//go:build ignore +package main + +import "golang.org/lsptests/a" + +func main() { + a.F() //@def("F", F) +} + +-- standalone_test.go -- +//go:build ignore +package main //@diag("main", re"No packages found") + +import "golang.org/lsptests/a" + +func main() { + a.F() //@hovererr("F", "no package") +} + +-- standalone_x_test.go -- +//go:build ignore +package main_test //@diag("main", re"No packages found") + +import "golang.org/lsptests/a" + +func main() { + a.F() //@hovererr("F", "no package") +} diff --git a/gopls/internal/test/marker/testdata/diagnostics/addgowork.txt b/gopls/internal/test/marker/testdata/diagnostics/addgowork.txt new file mode 100644 index 00000000000..5fbd890e65f --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/addgowork.txt @@ -0,0 +1,51 @@ +This test demonstrates diagnostics for adding a go.work file. + +Quick-fixes change files on disk, so are tested by integration tests. + +TODO(rfindley): improve the "cannot find package" import errors. + +-- skip -- +These diagnostics are no longer produced, because in golang/go#57979 +(zero-config gopls) we made gopls function independent of a go.work file. +Preserving this test as we may want to re-enable the code actions go manage +a go.work file. + +Note that in go.dev/issue/60584#issuecomment-1622238115, this test was flaky. +However, critical error logic has since been rewritten. + +-- a/go.mod -- +module mod.com/a + +go 1.18 + +-- a/main.go -- +package main //@diag("main", re"add a go.work file") + +import "mod.com/a/lib" //@diag("\"mod.com", re"cannot find package") + +func main() { + _ = lib.C +} + +-- a/lib/lib.go -- +package lib //@diag("lib", re"add a go.work file") + +const C = "b" +-- b/go.mod -- +module mod.com/b + +go 1.18 + +-- b/main.go -- +package main //@diag("main", re"add a go.work file") + +import "mod.com/b/lib" //@diag("\"mod.com", re"cannot find package") + +func main() { + _ = lib.C +} + +-- b/lib/lib.go -- +package lib //@diag("lib", re"add a go.work file") + +const C = "b" diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt similarity index 86% rename from gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt rename to gopls/internal/test/marker/testdata/diagnostics/analyzers.txt index 837a1163a52..38f8a81eb59 100644 --- a/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt @@ -13,7 +13,6 @@ package analyzer import ( "fmt" - "log/slog" "sync" "testing" "time" @@ -34,11 +33,6 @@ func printfWrapper(format string, args ...interface{}) { fmt.Printf(format, args...) } -// slog -func _() { - slog.Info("msg", 1) //@diag("1", re`slog.Info arg "1" should be a string or a slog.Attr`) -} - // tests func Testbad(t *testing.T) { //@diag("", re"Testbad has malformed name: first letter after 'Test' must not be lowercase") } @@ -55,3 +49,20 @@ func _(ptr *int) { _ = *ptr //@diag("*ptr", re"nil dereference in load") } } + +// unusedwrite +func _(s struct{x int}) { + s.x = 1 //@diag("x", re"unused write to field x") +} + +-- bad_test_go121.go -- +//go:build go1.21 + +package analyzer + +import "log/slog" + +// slog +func _() { + slog.Info("msg", 1) //@diag("1", re`slog.Info arg "1" should be a string or a slog.Attr`) +} diff --git a/gopls/internal/test/marker/testdata/diagnostics/excludedfile.txt b/gopls/internal/test/marker/testdata/diagnostics/excludedfile.txt new file mode 100644 index 00000000000..ae3045b338d --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/excludedfile.txt @@ -0,0 +1,36 @@ +This test demonstrates diagnostics for various forms of file exclusion. + +Note: this test used to also check the errors when a file was excluded due to +an inactive module, or mismatching GOOS/GOARCH, comment, but with zero-config +gopls (golang/go#57979) and improved build tag support (golang/go#29202), we no +longer get these errors. + +-- go.work -- +go 1.21 + +use ( + ./a +) +-- a/go.mod -- +module mod.com/a + +go 1.18 + +-- a/a.go -- +package a + +-- a/a_plan9.go -- +package a // Not excluded, due to improved build tag support. + +-- a/a_ignored.go -- +//go:build skip +package a //@diag(re"package (a)", re"excluded due to its build tags") + +-- b/go.mod -- +module mod.com/b + +go 1.18 + +-- b/b.go -- +package b // Not excluded, due to zero-config gopls. + diff --git a/gopls/internal/test/marker/testdata/diagnostics/generated.txt b/gopls/internal/test/marker/testdata/diagnostics/generated.txt new file mode 100644 index 00000000000..7352f13aa94 --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/generated.txt @@ -0,0 +1,21 @@ +Test of "undeclared" diagnostic in generated code. + +-- go.mod -- +module example.com +go 1.12 + +-- generated.go -- +package generated + +// Code generated by generator.go. DO NOT EDIT. + +func _() { + var y int //@diag("y", re"y.*declared (and|but) not used") +} + +-- generator.go -- +package generated + +func _() { + var x int //@diag("x", re"x.*declared (and|but) not used") +} diff --git a/gopls/internal/test/marker/testdata/diagnostics/initcycle.txt b/gopls/internal/test/marker/testdata/diagnostics/initcycle.txt new file mode 100644 index 00000000000..f306bccf52c --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/initcycle.txt @@ -0,0 +1,17 @@ +This test verifies that gopls spreads initialization cycle errors across +multiple declarations. + +We set -ignore_extra_diags due to golang/go#65877: gopls produces redundant +diagnostics for initialization cycles. + +-- flags -- +-ignore_extra_diags + +-- p.go -- +package p + +var X = Y //@diag("X", re"initialization cycle") + +var Y = Z //@diag("Y", re"initialization cycle") + +var Z = X //@diag("Z", re"initialization cycle") diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/issue56943.txt b/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt similarity index 86% rename from gopls/internal/regtest/marker/testdata/diagnostics/issue56943.txt rename to gopls/internal/test/marker/testdata/diagnostics/issue56943.txt index f0b114bc42b..9695c0db0a2 100644 --- a/gopls/internal/regtest/marker/testdata/diagnostics/issue56943.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt @@ -12,7 +12,7 @@ import ( ) func main() { - var a int //@diag(re"(a) int", re"a declared.*not used") + var a int //@diag(re"(a) int", re"a.*declared.*not used") var _ ast.Expr = node{} //@diag("node{}", re"missing.*exprNode") } diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/issue59005.txt b/gopls/internal/test/marker/testdata/diagnostics/issue59005.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/diagnostics/issue59005.txt rename to gopls/internal/test/marker/testdata/diagnostics/issue59005.txt diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue60544.txt b/gopls/internal/test/marker/testdata/diagnostics/issue60544.txt new file mode 100644 index 00000000000..6b8d6ce0ad2 --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/issue60544.txt @@ -0,0 +1,9 @@ +This test exercises a crash due to treatment of "comparable" in methodset +calculation (golang/go#60544). + +-- main.go -- +package main + +type X struct{} + +func (X) test(x comparable) {} //@diag("comparable", re"outside a type constraint") diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/issue60605.txt b/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/diagnostics/issue60605.txt rename to gopls/internal/test/marker/testdata/diagnostics/issue60605.txt diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue64547.txt b/gopls/internal/test/marker/testdata/diagnostics/issue64547.txt new file mode 100644 index 00000000000..3f3e13bdf67 --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/issue64547.txt @@ -0,0 +1,14 @@ +This test checks the fix for golang/go#64547: the lostcancel analyzer reports +diagnostics that overflow the file. + +-- p.go -- +package p + +import "context" + +func _() { + _, cancel := context.WithCancel(context.Background()) //@diag("_, cancel", re"not used on all paths") + if false { + cancel() + } +} //@diag("}", re"may be reached without using the cancel") diff --git a/gopls/internal/test/marker/testdata/diagnostics/osarch_suffix.txt b/gopls/internal/test/marker/testdata/diagnostics/osarch_suffix.txt new file mode 100644 index 00000000000..95336085b2f --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/osarch_suffix.txt @@ -0,0 +1,46 @@ +This test verifies that we add an [os,arch] suffix to each diagnostic +that doesn't appear in the default build (=runtime.{GOOS,GOARCH}). + +See golang/go#65496. + +The two p/*.go files below are written to trigger the same diagnostic +(range, message, source, etc) but varying only by URI. + +In the q test, a single location in the common code q.go has two +diagnostics, one of which is tagged. + +This test would fail on openbsd/mips64 because it will be +the same as the default build, so we skip that platform. + +-- flags -- +-skip_goos=openbsd + +-- go.mod -- +module example.com + +-- p/p.go -- +package p + +var _ fmt.Stringer //@diag("fmt", re"unde.*: fmt$") + +-- p/p_openbsd_mips64.go -- +package p + +var _ fmt.Stringer //@diag("fmt", re"unde.*: fmt \\[openbsd,mips64\\]") + +-- q/q_default.go -- +//+build !openbsd && !mips64 + +package q + +func f(int) int + +-- q/q_openbsd_mips64.go -- +package q + +func f(string) int + +-- q/q.go -- +package q + +var _ = f() //@ diag(")", re`.*want \(string\) \[openbsd,mips64\]`), diag(")", re`.*want \(int\)$`) diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/parseerr.txt b/gopls/internal/test/marker/testdata/diagnostics/parseerr.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/diagnostics/parseerr.txt rename to gopls/internal/test/marker/testdata/diagnostics/parseerr.txt diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/rundespiteerrors.txt b/gopls/internal/test/marker/testdata/diagnostics/rundespiteerrors.txt similarity index 77% rename from gopls/internal/regtest/marker/testdata/diagnostics/rundespiteerrors.txt rename to gopls/internal/test/marker/testdata/diagnostics/rundespiteerrors.txt index 70e4ebba980..b14f4dfabd0 100644 --- a/gopls/internal/regtest/marker/testdata/diagnostics/rundespiteerrors.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/rundespiteerrors.txt @@ -1,16 +1,10 @@ This test verifies that analyzers without RunDespiteErrors are not executed on a package containing type errors (see issue #54762). -We require go1.18 because the range of the `1 + ""` go/types error -changed then, and the new @diag marker is quite particular. - -- go.mod -- module example.com go 1.12 --- flags -- --min_go=go1.18 - -- a.go -- package a diff --git a/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt b/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt new file mode 100644 index 00000000000..652ddd6b56a --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt @@ -0,0 +1,89 @@ +Test of "too new" diagnostics from the stdversion analyzer. + +This test references go1.21 symbols from std, but the analyzer itself +depends on the go1.22 behavior of versions.FileVersion. + +See also go/analysis/passes/stdversion/testdata/test.txtar, +which runs the same test in the analysistest framework. + +-- flags -- +-min_go=go1.22 + +-- go.mod -- +module example.com + +go 1.21 + +-- a/a.go -- +package a + +import "go/types" + +func _() { + // old package-level type + var _ types.Info // ok: defined by go1.0 + + // new field of older type + _ = new(types.Info).FileVersions //@diag("FileVersions", re`types.FileVersions requires go1.22 or later \(module is go1.21\)`) + + // new method of older type + _ = new(types.Info).PkgNameOf //@diag("PkgNameOf", re`types.PkgNameOf requires go1.22 or later \(module is go1.21\)`) + + // new package-level type + var a types.Alias //@diag("Alias", re`types.Alias requires go1.22 or later \(module is go1.21\)`) + + // new method of new type + a.Underlying() // no diagnostic +} + +-- sub/go.mod -- +module example.com/sub + +go 1.21 + +-- sub/sub.go -- +package sub + +import "go/types" + +func _() { + // old package-level type + var _ types.Info // ok: defined by go1.0 + + // new field of older type + _ = new(types.Info).FileVersions //@diag("FileVersions", re`types.FileVersions requires go1.22 or later \(module is go1.21\)`) + + // new method of older type + _ = new(types.Info).PkgNameOf //@diag("PkgNameOf", re`types.PkgNameOf requires go1.22 or later \(module is go1.21\)`) + + // new package-level type + var a types.Alias //@diag("Alias", re`types.Alias requires go1.22 or later \(module is go1.21\)`) + + // new method of new type + a.Underlying() // no diagnostic +} + +-- sub/tagged.go -- +//go:build go1.22 + +package sub + +import "go/types" + +func _() { + // old package-level type + var _ types.Info + + // new field of older type + _ = new(types.Info).FileVersions + + // new method of older type + _ = new(types.Info).PkgNameOf + + // new package-level type + var a types.Alias + + // new method of new type + a.Underlying() +} + diff --git a/gopls/internal/test/marker/testdata/diagnostics/strangefiles.txt b/gopls/internal/test/marker/testdata/diagnostics/strangefiles.txt new file mode 100644 index 00000000000..cc2ad8061ca --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/strangefiles.txt @@ -0,0 +1,21 @@ +This test checks diagnostics on files that are strange for one reason or +another. + +Note(rfindley): ported from the old marker tests. I'm not sure why these were +written originally. + +-ignore_extra_diags is required because the marker framework fails for +noparse.go, and we therefore can't match the EOF error. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests + +go 1.18 +-- %percent/perc%ent.go -- +package percent //@diag("percent", re"No packages") + +-- noparse/noparse.go -- + diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/typeerr.txt b/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt similarity index 97% rename from gopls/internal/regtest/marker/testdata/diagnostics/typeerr.txt rename to gopls/internal/test/marker/testdata/diagnostics/typeerr.txt index c14b9d734ba..686b05c371e 100644 --- a/gopls/internal/regtest/marker/testdata/diagnostics/typeerr.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt @@ -23,8 +23,6 @@ func f(x int) { } -- @fix/typeerr.go -- ---- before -+++ after @@ -6 +6 @@ - x := 123 //@diag(re"x := 123", re"no new variables"), suggestedfix(re"():", re"no new variables", fix) + x = 123 //@diag(re"x := 123", re"no new variables"), suggestedfix(re"():", re"no new variables", fix) diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/useinternal.txt b/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/diagnostics/useinternal.txt rename to gopls/internal/test/marker/testdata/diagnostics/useinternal.txt diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/usemodule.txt b/gopls/internal/test/marker/testdata/diagnostics/usemodule.txt similarity index 77% rename from gopls/internal/regtest/marker/testdata/diagnostics/usemodule.txt rename to gopls/internal/test/marker/testdata/diagnostics/usemodule.txt index 35d2e43bf23..699a4166692 100644 --- a/gopls/internal/regtest/marker/testdata/diagnostics/usemodule.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/usemodule.txt @@ -1,10 +1,11 @@ This test demonstrates diagnostics for a module that is missing from the go.work file. -Quick-fixes change files on disk, so are tested by regtests. +Quick-fixes change files on disk, so are tested by integration tests. --- flags -- --min_go=go1.18 +-- skip -- +Temporary skip due to golang/go#57979, with zero-config gopls, these modules +are no longer orphaned. -- go.work -- go 1.21 diff --git a/gopls/internal/test/marker/testdata/fixedbugs/issue59318.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue59318.txt new file mode 100644 index 00000000000..8a738718940 --- /dev/null +++ b/gopls/internal/test/marker/testdata/fixedbugs/issue59318.txt @@ -0,0 +1,20 @@ +Previously, this test verifies that we can load multiple orphaned files as +command-line-arguments packages. In the distant past, we would load only one +because go/packages returns at most one command-line-arguments package per +query. + +With zero-config gopls, these packages are successfully loaded as ad-hoc +packages. + +-- a/main.go -- +package main + +func main() { + var a int //@diag(re"var (a)", re"not used") +} +-- b/main.go -- +package main + +func main() { + var b int //@diag(re"var (b)", re"not used") +} diff --git a/gopls/internal/regtest/marker/testdata/fixedbugs/issue59944.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue59944.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/fixedbugs/issue59944.txt rename to gopls/internal/test/marker/testdata/fixedbugs/issue59944.txt diff --git a/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt new file mode 100644 index 00000000000..c73390066ae --- /dev/null +++ b/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt @@ -0,0 +1,25 @@ +This test exercises the crash in golang/go#66109: a dangling reference due to +test variants of a command-line-arguments package. + +-- flags -- +-min_go=go1.22 + +-- go.mod -- +module example.com/tools + +go 1.22 + +-- tools_test.go -- +//go:build tools + +package tools //@diag("tools", re"No packages found") + +import ( + _ "example.com/tools/tool" +) + +-- tool/tool.go -- +package main + +func main() { +} diff --git a/gopls/internal/test/marker/testdata/fixedbugs/issue66250.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue66250.txt new file mode 100644 index 00000000000..748d19de6d4 --- /dev/null +++ b/gopls/internal/test/marker/testdata/fixedbugs/issue66250.txt @@ -0,0 +1,17 @@ +This bug checks the fix for golang/go#66250. Package references should not +crash when one package file lacks a package name. + +TODO(rfindley): the -ignore_extra_diags flag is only necessary because of +problems matching diagnostics in the broken file, likely due to poor parser +recovery. + +-- flags -- +-ignore_extra_diags + +-- a.go -- +package x //@refs("x", "x") + +-- b.go -- + +func _() { +} diff --git a/gopls/internal/regtest/marker/testdata/foldingrange/a.txt b/gopls/internal/test/marker/testdata/foldingrange/a.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/foldingrange/a.txt rename to gopls/internal/test/marker/testdata/foldingrange/a.txt diff --git a/gopls/internal/regtest/marker/testdata/foldingrange/a_lineonly.txt b/gopls/internal/test/marker/testdata/foldingrange/a_lineonly.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/foldingrange/a_lineonly.txt rename to gopls/internal/test/marker/testdata/foldingrange/a_lineonly.txt diff --git a/gopls/internal/regtest/marker/testdata/foldingrange/bad.txt b/gopls/internal/test/marker/testdata/foldingrange/bad.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/foldingrange/bad.txt rename to gopls/internal/test/marker/testdata/foldingrange/bad.txt diff --git a/gopls/internal/regtest/marker/testdata/format/format.txt b/gopls/internal/test/marker/testdata/format/format.txt similarity index 84% rename from gopls/internal/regtest/marker/testdata/format/format.txt rename to gopls/internal/test/marker/testdata/format/format.txt index b1437386768..75b8997860a 100644 --- a/gopls/internal/regtest/marker/testdata/format/format.txt +++ b/gopls/internal/test/marker/testdata/format/format.txt @@ -39,11 +39,11 @@ func hello() { - var x int //@diag("x", re"x declared (and|but) not used") + var x int //@diag("x", re"x.*declared (and|but) not used") } func hi() { - runtime.GOROOT() + runtime.NumCPU() fmt.Printf("") log.Printf("") @@ -59,11 +59,11 @@ import ( func hello() { - var x int //@diag("x", re"x declared (and|but) not used") + var x int //@diag("x", re"x.*declared (and|but) not used") } func hi() { - runtime.GOROOT() + runtime.NumCPU() fmt.Printf("") log.Printf("") diff --git a/gopls/internal/regtest/marker/testdata/format/issue59554.txt b/gopls/internal/test/marker/testdata/format/issue59554.txt similarity index 90% rename from gopls/internal/regtest/marker/testdata/format/issue59554.txt rename to gopls/internal/test/marker/testdata/format/issue59554.txt index 1e49e3884d7..816c9d1e06f 100644 --- a/gopls/internal/regtest/marker/testdata/format/issue59554.txt +++ b/gopls/internal/test/marker/testdata/format/issue59554.txt @@ -4,15 +4,16 @@ directives. Note that gofumpt is needed for this test case, as it reformats var decls into short var decls. -Note that gofumpt requires Go 1.18. +Note that gofumpt requires Go 1.20. -- flags -- --min_go=go1.18 +-min_go=go1.20 -- settings.json -- { "formatting.gofumpt": true } + -- main.go -- package main //@format(main) diff --git a/gopls/internal/regtest/marker/testdata/format/noparse.txt b/gopls/internal/test/marker/testdata/format/noparse.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/format/noparse.txt rename to gopls/internal/test/marker/testdata/format/noparse.txt diff --git a/gopls/internal/test/marker/testdata/highlight/controlflow.txt b/gopls/internal/test/marker/testdata/highlight/controlflow.txt new file mode 100644 index 00000000000..25cc9394a47 --- /dev/null +++ b/gopls/internal/test/marker/testdata/highlight/controlflow.txt @@ -0,0 +1,71 @@ +This test verifies document highlighting for control flow. + +-- go.mod -- +module mod.com + +go 1.18 + +-- p.go -- +package p + +-- issue60589.go -- +package p + +// This test verifies that control flow lighlighting correctly +// accounts for multi-name result parameters. +// In golang/go#60589, it did not. + +func _() (foo int, bar, baz string) { //@ loc(func, "func"), loc(foo, "foo"), loc(fooint, "foo int"), loc(int, "int"), loc(bar, "bar"), loc(beforebaz, " baz"), loc(baz, "baz"), loc(barbazstring, "bar, baz string"), loc(beforestring, re`() string`), loc(string, "string") + return 0, "1", "2" //@ loc(return, `return 0, "1", "2"`), loc(l0, "0"), loc(l1, `"1"`), loc(l2, `"2"`) +} + +// Assertions, expressed here to avoid clutter above. +// Note that when the cursor is over the field type, there is some +// (likely harmless) redundancy. + +//@ highlight(func, func, return) +//@ highlight(foo, foo, l0) +//@ highlight(int, fooint, int, l0) +//@ highlight(bar, bar, l1) +//@ highlight(beforebaz) +//@ highlight(baz, baz, l2) +//@ highlight(beforestring, baz, l2) +//@ highlight(string, barbazstring, string, l1, l2) +//@ highlight(l0, foo, l0) +//@ highlight(l1, bar, l1) +//@ highlight(l2, baz, l2) + +// Check that duplicate result names do not cause +// inaccurate highlighting. + +func _() (x, x int32) { //@ loc(x1, re`\((x)`), loc(x2, re`(x) int`), diag(x1, re"redeclared"), diag(x2, re"redeclared") + return 1, 2 //@ loc(one, "1"), loc(two, "2") +} + +//@ highlight(one, one, x1) +//@ highlight(two, two, x2) +//@ highlight(x1, x1, one) +//@ highlight(x2, x2, two) + +-- issue65516.go -- +package p + +// This test checks that gopls doesn't crash while highlighting +// functions with no body (golang/go#65516). + +func Foo() (int, string) //@highlight("int", "int"), highlight("func", "func") + +-- issue65952.go -- +package p + +// This test checks that gopls doesn't crash while highlighting +// return values in functions with no results. + +func _() { + return 0 //@highlight("0", "0"), diag("0", re"too many return") +} + +func _() () { + // TODO(golang/go#65966): fix the triplicate diagnostics here. + return 0 //@highlight("0", "0"), diag("0", re"too many return"), diag("0", re"too many return"), diag("0", re"too many return") +} diff --git a/gopls/internal/regtest/marker/testdata/highlight/highlight.txt b/gopls/internal/test/marker/testdata/highlight/highlight.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/highlight/highlight.txt rename to gopls/internal/test/marker/testdata/highlight/highlight.txt diff --git a/gopls/internal/regtest/marker/testdata/highlight/issue60435.txt b/gopls/internal/test/marker/testdata/highlight/issue60435.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/highlight/issue60435.txt rename to gopls/internal/test/marker/testdata/highlight/issue60435.txt diff --git a/gopls/internal/test/marker/testdata/highlight/switchbreak.txt b/gopls/internal/test/marker/testdata/highlight/switchbreak.txt new file mode 100644 index 00000000000..b486ad1d80d --- /dev/null +++ b/gopls/internal/test/marker/testdata/highlight/switchbreak.txt @@ -0,0 +1,21 @@ +This is a regression test for issue 65752: a break in a switch should +highlight the switch, not the enclosing loop. + +-- a.go -- +package a + +func _(x any) { + for { + // type switch + switch x.(type) { //@loc(tswitch, "switch") + default: + break //@highlight("break", tswitch, "break") + } + + // value switch + switch { //@loc(vswitch, "switch") + default: + break //@highlight("break", vswitch, "break") + } + } +} diff --git a/gopls/internal/regtest/marker/testdata/hover/basiclit.txt b/gopls/internal/test/marker/testdata/hover/basiclit.txt similarity index 88% rename from gopls/internal/regtest/marker/testdata/hover/basiclit.txt rename to gopls/internal/test/marker/testdata/hover/basiclit.txt index 911de1c7471..9c26b2a2f07 100644 --- a/gopls/internal/regtest/marker/testdata/hover/basiclit.txt +++ b/gopls/internal/test/marker/testdata/hover/basiclit.txt @@ -1,4 +1,5 @@ This test checks gopls behavior when hovering over basic literals. + -- basiclit.go -- package basiclit @@ -53,29 +54,29 @@ func _() { _ = 0X1234567890 //@hover("0X1234567890", "0X1234567890", hexNumber) _ = 0x1000000000000000000 //@hover("0x1", "0x1000000000000000000", bigHex) ) --- @bigHex/hover.md -- +-- @bigHex -- 4722366482869645213696 --- @binaryNumber/hover.md -- +-- @binaryNumber -- 9 --- @control/hover.md -- +-- @control -- U+0007, control --- @hexNumber/hover.md -- +-- @hexNumber -- 78187493520 --- @latinA/hover.md -- +-- @latinA -- 'a', U+0061, LATIN SMALL LETTER A --- @latinAHex/hover.md -- +-- @latinAHex -- 97, 'a', U+0061, LATIN SMALL LETTER A --- @leftCurly/hover.md -- +-- @leftCurly -- '{', U+007B, LEFT CURLY BRACKET --- @octalNumber/hover.md -- +-- @octalNumber -- 63 --- @summation/hover.md -- +-- @summation -- '∑', U+2211, N-ARY SUMMATION --- @summationHex/hover.md -- +-- @summationHex -- 8721, '∑', U+2211, N-ARY SUMMATION --- @tilde/hover.md -- +-- @tilde -- '~', U+007E, TILDE --- @waterWave/hover.md -- +-- @waterWave -- '🌊', U+1F30A, WATER WAVE --- @waterWaveHex/hover.md -- +-- @waterWaveHex -- 127754, '🌊', U+1F30A, WATER WAVE diff --git a/gopls/internal/regtest/marker/testdata/hover/const.txt b/gopls/internal/test/marker/testdata/hover/const.txt similarity index 88% rename from gopls/internal/regtest/marker/testdata/hover/const.txt rename to gopls/internal/test/marker/testdata/hover/const.txt index 6effaf49de6..179ff155357 100644 --- a/gopls/internal/regtest/marker/testdata/hover/const.txt +++ b/gopls/internal/test/marker/testdata/hover/const.txt @@ -1,8 +1,5 @@ This test checks hovering over constants. --- flags -- --min_go=go1.17 - -- go.mod -- module mod.com @@ -81,7 +78,7 @@ func _() { _ = math.Log2E //@hover("Log2E", "Log2E", log2eConst) } --- @bX/hover.md -- +-- @bX -- ```go const X untyped int = 0 ``` @@ -90,63 +87,63 @@ const X untyped int = 0 [`c.X` on pkg.go.dev](https://pkg.go.dev/mod.com#X) --- @dur/hover.md -- +-- @dur -- ```go const dur time.Duration = 15*time.Minute + 10*time.Second + 350*time.Millisecond // 15m10.35s ``` dur is a constant of type time.Duration. --- @decimalConst/hover.md -- +-- @decimalConst -- ```go const decimal untyped int = 153 ``` no inline comment --- @hexConst/hover.md -- +-- @hexConst -- ```go const hex untyped int = 0xe34e // 58190 ``` --- @binConst/hover.md -- +-- @binConst -- ```go const bin untyped int = 0b1001001 // 73 ``` --- @numberWithUnderscoreConst/hover.md -- +-- @numberWithUnderscoreConst -- ```go const numberWithUnderscore int64 = 10_000_000_000 // 10000000000 ``` --- @octalConst/hover.md -- +-- @octalConst -- ```go const octal untyped int = 0o777 // 511 ``` --- @exprConst/hover.md -- +-- @exprConst -- ```go const expr untyped int = 2 << (0b111&0b101 - 2) // 16 ``` --- @boolConst/hover.md -- +-- @boolConst -- ```go const boolean untyped bool = (55 - 3) == (26 * 2) // true ``` --- @ln10Const/hover.md -- +-- @ln10Const -- ```go const ln10 untyped float = 2.30258509299404568401799145468436420760110148862877297603332790 // 2.30259 ``` --- @aIota/hover.md -- +-- @aIota -- ```go const a untyped int = 1 << iota // 1 ``` --- @bIota/hover.md -- +-- @bIota -- ```go const b untyped int = 2 ``` --- @strConst/hover.md -- +-- @strConst -- ```go const str untyped string = "hello world" ``` --- @longStrConst/hover.md -- +-- @longStrConst -- ```go const longStr untyped string = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Curabitur e... ``` --- @log2eConst/hover.md -- +-- @log2eConst -- ```go const math.Log2E untyped float = 1 / Ln2 // 1.4427 ``` diff --git a/gopls/internal/test/marker/testdata/hover/embed.txt b/gopls/internal/test/marker/testdata/hover/embed.txt new file mode 100644 index 00000000000..1dc3fcbfa12 --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/embed.txt @@ -0,0 +1,57 @@ +This test checks that hover reports accessible embedded fields +(after the doc comment and before the accessible methods). + +-- go.mod -- +module example.com + +go 1.18 + +-- q/q.go -- +package q + +type Q struct { + One int + two string + q2[chan int] +} + +type q2[T any] struct { + Three *T + four string +} + +-- p.go -- +package p + +import "example.com/q" + +// doc +type P struct { + q.Q +} + +func (P) m() {} + +var p P //@hover("P", "P", P) + +-- @P -- +```go +type P struct { + q.Q +} +``` + +doc + + +```go +// Embedded fields: +One int // through Q +Three *chan int // through Q.q2 +``` + +```go +func (P) m() +``` + +[`p.P` on pkg.go.dev](https://pkg.go.dev/example.com#P) diff --git a/gopls/internal/test/marker/testdata/hover/generics.txt b/gopls/internal/test/marker/testdata/hover/generics.txt new file mode 100644 index 00000000000..86e2b2ce17b --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/generics.txt @@ -0,0 +1,85 @@ +This file contains tests for hovering over generic Go code. + +Requires go1.20+ for the new go/doc/comment package, and a change in Go 1.20 +that affected the formatting of constraint interfaces. + +-- flags -- +-min_go=go1.20 + +-- go.mod -- +// A go.mod is require for correct pkgsite links. +// TODO(rfindley): don't link to ad-hoc or command-line-arguments packages! +module mod.com + +go 1.18 + +-- generics.go -- +package generics + +type value[T any] struct { //hover("lue", "value", value),hover("T", "T", valueT) + val T //@hover("T", "T", valuevalT) + Q int64 //@hover("Q", "Q", valueQ) +} + +type Value[T any] struct { //@hover("T", "T", ValueT) + val T //@hover("T", "T", ValuevalT) + Q int64 //@hover("Q", "Q", ValueQ) +} + +func F[P interface{ ~int | string }]() { //@hover("P", "P", Ptparam) + var _ P //@hover("P","P",Pvar) +} + +-- inferred.go -- +package generics + +func app[S interface{ ~[]E }, E interface{}](s S, e E) S { + return append(s, e) +} + +func _() { + _ = app[[]int] //@hover("app", "app", appint) + _ = app[[]int, int] //@hover("app", "app", appint) + _ = app[[]int]([]int{}, 0) //@hover("app", "app", appint), diag("[[]int]", re"unnecessary") + _ = app([]int{}, 0) //@hover("app", "app", appint) +} + +-- @ValueQ -- +```go +field Q int64 // size=8 +``` + +@hover("Q", "Q", ValueQ) + + +[`(generics.Value).Q` on pkg.go.dev](https://pkg.go.dev/mod.com#Value.Q) +-- @ValueT -- +```go +type parameter T any +``` +-- @ValuevalT -- +```go +type parameter T any +``` +-- @appint -- +```go +func app(s []int, e int) []int // func[S interface{~[]E}, E interface{}](s S, e E) S +``` +-- @valueQ -- +```go +field Q int64 // size=8 +``` + +@hover("Q", "Q", valueQ) +-- @valuevalT -- +```go +type parameter T any +``` +-- @Ptparam -- +```go +type parameter P interface{~int | string} +``` +-- @Pvar -- +```go +type parameter P interface{~int | string} +``` diff --git a/gopls/internal/regtest/marker/testdata/hover/godef.txt b/gopls/internal/test/marker/testdata/hover/godef.txt similarity index 88% rename from gopls/internal/regtest/marker/testdata/hover/godef.txt rename to gopls/internal/test/marker/testdata/hover/godef.txt index e6a67616302..9b2e7ec2ce3 100644 --- a/gopls/internal/regtest/marker/testdata/hover/godef.txt +++ b/gopls/internal/test/marker/testdata/hover/godef.txt @@ -1,6 +1,14 @@ This test was ported from 'godef' in the old marker tests. It tests various hover and definition requests. +Requires go1.19+ for the new go/doc/comment package. + +TODO(adonovan): figure out why this test also fails +without -min_go=go1.20. Or just wait... + +-- flags -- +-min_go=go1.19 + -- flags -- -min_go=go1.20 @@ -20,11 +28,11 @@ func TestA2(t *testing.T) { //@hover("TestA2", "TestA2", TestA2) Nonexistant() //@diag("Nonexistant", re"(undeclared name|undefined): Nonexistant") } --- @TestA2/hover.md -- +-- @TestA2 -- ```go func TestA2(t *testing.T) ``` --- @ember/hover.md -- +-- @ember -- ```go field Member string ``` @@ -88,13 +96,13 @@ func (n *NextThing) Method3() int { var nextThing NextThing //@hover("NextThing", "NextThing", NextThing), def("NextThing", NextThing) --- @ings/hover.md -- +-- @ings -- ```go func Things(val []string) []Thing ``` [`a.Things` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Things) --- @ther/hover.md -- +-- @ther -- ```go var Other Thing ``` @@ -103,35 +111,47 @@ var Other Thing [`a.Other` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Other) --- @a/hover.md -- --- @ing/hover.md -- +-- @a -- +-- @ing -- ```go type Thing struct { Member string //@loc(Member, "Member") } +``` -func (Thing).Method(i int) string -func (*Thing).Method2(i int, j int) (error, string) -func (Thing).Method3() -func (*Thing).private() +```go +func (t Thing) Method(i int) string +func (t *Thing) Method2(i int, j int) (error, string) +func (t Thing) Method3() +func (t *Thing) private() ``` [`a.Thing` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing) --- @NextThing/hover.md -- +-- @NextThing -- ```go type NextThing struct { Thing Value int } +``` -func (*NextThing).Method3() int -func (NextThing).another() string +```go +// Embedded fields: +Member string // through Thing +``` + +```go +func (t Thing) Method(i int) string +func (t *Thing) Method2(i int, j int) (error, string) +func (n *NextThing) Method3() int +func (n NextThing) another() string +func (t *Thing) private() ``` [`a.NextThing` on pkg.go.dev](https://pkg.go.dev/godef.test/a#NextThing) --- @eth/hover.md -- +-- @eth -- ```go -func (Thing).Method(i int) string +func (t Thing) Method(i int) string ``` [`(a.Thing).Method` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing.Method) @@ -152,15 +172,15 @@ func TypeStuff() { } } --- @inty/hover.md -- +-- @inty -- ```go var y int ``` --- @stringy/hover.md -- +-- @stringy -- ```go var y string ``` --- @y/hover.md -- +-- @y -- ```go var y interface{} ``` @@ -312,37 +332,37 @@ func _() { _ = r[0].x //@def("x", returnX) _ = r[0].y //@def("y", returnY) } --- @hoverStructKeyX/hover.md -- +-- @hoverStructKeyX -- ```go field x string ``` X value field --- @hoverStructKeyY/hover.md -- +-- @hoverStructKeyY -- ```go field y string ``` Y key field --- @nestedNumber/hover.md -- +-- @nestedNumber -- ```go field number int64 ``` nested number --- @nestedString/hover.md -- +-- @nestedString -- ```go field str string ``` nested string --- @openMethod/hover.md -- +-- @openMethod -- ```go -func (interface).open() error +func (interface) open() error ``` open method comment --- @nestedMap/hover.md -- +-- @nestedMap -- ```go field m map[string]float64 ``` @@ -380,15 +400,15 @@ func _() { fmt.Println(x) //@hover("x", "x", xInt) } } --- @xInt/hover.md -- +-- @xInt -- ```go var x int ``` --- @xInterface/hover.md -- +-- @xInterface -- ```go var x interface{} ``` --- @xString/hover.md -- +-- @xString -- ```go var x string ``` diff --git a/gopls/internal/regtest/marker/testdata/hover/goprivate.txt b/gopls/internal/test/marker/testdata/hover/goprivate.txt similarity index 86% rename from gopls/internal/regtest/marker/testdata/hover/goprivate.txt rename to gopls/internal/test/marker/testdata/hover/goprivate.txt index 4c309ef38cf..202b4a11314 100644 --- a/gopls/internal/regtest/marker/testdata/hover/goprivate.txt +++ b/gopls/internal/test/marker/testdata/hover/goprivate.txt @@ -1,4 +1,5 @@ This test checks that links in hover obey GOPRIVATE. + -- env -- GOPRIVATE=mod.com -- go.mod -- @@ -13,15 +14,15 @@ package lib // GOPRIVATE should also match nested packages. type L struct{} //@hover("L", "L", L) --- @L/hover.md -- +-- @L -- ```go -type L struct{} +type L struct{} // size=0 ``` GOPRIVATE should also match nested packages. --- @T/hover.md -- +-- @T -- ```go -type T struct{} +type T struct{} // size=0 ``` T should not be linked, as it is private. diff --git a/gopls/internal/regtest/marker/testdata/hover/hover.txt b/gopls/internal/test/marker/testdata/hover/hover.txt similarity index 89% rename from gopls/internal/regtest/marker/testdata/hover/hover.txt rename to gopls/internal/test/marker/testdata/hover/hover.txt index f6a54329a6c..b5e4a88434b 100644 --- a/gopls/internal/regtest/marker/testdata/hover/hover.txt +++ b/gopls/internal/test/marker/testdata/hover/hover.txt @@ -1,4 +1,5 @@ This test demonstrates some features of the new marker test runner. + -- a.go -- package a @@ -13,17 +14,17 @@ func _() { println(x) //@hover("x", "x", xint),hover(")", "x", xint) } } --- @abc/hover.md -- +-- @abc -- ```go const abc untyped int = 0x2a // 42 ``` @hover("b", "abc", abc),hover(" =", "abc", abc) --- @x/hover.md -- +-- @x -- ```go var x interface{} ``` --- @xint/hover.md -- +-- @xint -- ```go var x int ``` diff --git a/gopls/internal/test/marker/testdata/hover/issues.txt b/gopls/internal/test/marker/testdata/hover/issues.txt new file mode 100644 index 00000000000..6212964dff2 --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/issues.txt @@ -0,0 +1,22 @@ +This test verifies fixes for various issues reported for hover. + +-- go.mod -- +module golang.org/lsptests + +-- issue64239/p.go -- +package issue64239 + +// golang/go#64239: hover fails for objects in the unsafe package. + +import "unsafe" + +var _ = unsafe.Sizeof(struct{}{}) //@hover("Sizeof", "Sizeof", "`Sizeof` on pkg.go.dev") + +-- issue64237/p.go -- +package issue64237 + +// golang/go#64237: hover panics for broken imports. + +import "golang.org/lsptests/nonexistant" //@diag("\"golang", re"could not import") + +var _ = nonexistant.Value //@hovererr("nonexistant", "no package data") diff --git a/gopls/internal/test/marker/testdata/hover/linkable.txt b/gopls/internal/test/marker/testdata/hover/linkable.txt new file mode 100644 index 00000000000..fefedbceab6 --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/linkable.txt @@ -0,0 +1,134 @@ +This test checks that we correctly determine pkgsite links for various +identifiers. + +We should only produce links that work, meaning the object is reachable via the +package's public API. + +-- go.mod -- +module mod.com + +go 1.18 +-- p.go -- +package p + +type E struct { + Embed int64 +} + +// T is in the package scope, and so should be linkable. +type T struct{ //@hover("T", "T", T) + // Only exported fields should be linkable + + f int64 //@hover("f", "f", f) + F int64 //@hover("F", "F", F) + + E + + // TODO(rfindley): is the link here correct? It ignores N. + N struct { + // Nested fields should also be linkable. + Nested int64 //@hover("Nested", "Nested", Nested) + } +} +// M is an exported method, and so should be linkable. +func (T) M() {} + +// m is not exported, and so should not be linkable. +func (T) m() {} + +func _() { + var t T + + // Embedded fields should be linkable. + _ = t.Embed //@hover("Embed", "Embed", Embed) + + // Local variables should not be linkable, even if they are capitalized. + var X int64 //@hover("X", "X", X) + _ = X + + // Local types should not be linkable, even if they are capitalized. + type Local struct { //@hover("Local", "Local", Local) + E + } + + // But the embedded field should still be linkable. + var l Local + _ = l.Embed //@hover("Embed", "Embed", Embed) +} +-- @Embed -- +```go +field Embed int64 +``` + +[`(p.E).Embed` on pkg.go.dev](https://pkg.go.dev/mod.com#E.Embed) +-- @F -- +```go +field F int64 // size=8, offset=8 +``` + +@hover("F", "F", F) + + +[`(p.T).F` on pkg.go.dev](https://pkg.go.dev/mod.com#T.F) +-- @Local -- +```go +type Local struct { // size=8 + E +} +``` + +Local types should not be linkable, even if they are capitalized. + + +```go +// Embedded fields: +Embed int64 // through E +``` +-- @Nested -- +```go +field Nested int64 // size=8, offset=0 +``` + +Nested fields should also be linkable. +-- @T -- +```go +type T struct { // size=32 (0x20) + f int64 //@hover("f", "f", f) + F int64 //@hover("F", "F", F) + + E + + // TODO(rfindley): is the link here correct? It ignores N. + N struct { + // Nested fields should also be linkable. + Nested int64 //@hover("Nested", "Nested", Nested) + } +} +``` + +T is in the package scope, and so should be linkable. + + +```go +// Embedded fields: +Embed int64 // through E +``` + +```go +func (T) M() +func (T) m() +``` + +[`p.T` on pkg.go.dev](https://pkg.go.dev/mod.com#T) +-- @X -- +```go +var X int64 +``` + +Local variables should not be linkable, even if they are capitalized. +-- @f -- +```go +field f int64 // size=8, offset=0 +``` + +@hover("f", "f", f) diff --git a/gopls/internal/regtest/marker/testdata/hover/linkable_generics.txt b/gopls/internal/test/marker/testdata/hover/linkable_generics.txt similarity index 87% rename from gopls/internal/regtest/marker/testdata/hover/linkable_generics.txt rename to gopls/internal/test/marker/testdata/hover/linkable_generics.txt index be8e9e5cd7a..0b7ade7965e 100644 --- a/gopls/internal/regtest/marker/testdata/hover/linkable_generics.txt +++ b/gopls/internal/test/marker/testdata/hover/linkable_generics.txt @@ -1,8 +1,5 @@ This file contains tests for documentation links to generic code in hover. --- flags -- --min_go=go1.18 - -- go.mod -- module mod.com @@ -40,7 +37,7 @@ func (GT[P]) M(p P) { //@hover("GT", "GT", GTrecv),hover("M","M", M),hover(re"p func GF[P any] (p P) { //@hover("GF", "GF", GF) } --- @F/hover.md -- +-- @F -- ```go field F P ``` @@ -49,23 +46,21 @@ field F P [`(generic.GT).F` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.F) --- @FP/hover.md -- +-- @FP -- ```go type parameter P any ``` --- @GF/hover.md -- +-- @GF -- ```go func GF[P any](p P) ``` [`generic.GF` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GF) --- @GT/hover.md -- +-- @GT -- ```go type GT[P any] struct { F P //@hover("F", "F", F),hover("P", "P", FP) } - -func (GT[P]).M(p P) ``` Hovering over type parameters should link to documentation. @@ -73,18 +68,20 @@ Hovering over type parameters should link to documentation. TODO(rfindley): should it? We should probably link to the type. +```go +func (GT[P]) M(p P) +``` + [`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) --- @GTP/hover.md -- +-- @GTP -- ```go type parameter P any ``` --- @GTrecv/hover.md -- +-- @GTrecv -- ```go type GT[P any] struct { F P //@hover("F", "F", F),hover("P", "P", FP) } - -func (GT[P]).M(p P) ``` Hovering over type parameters should link to documentation. @@ -92,34 +89,38 @@ Hovering over type parameters should link to documentation. TODO(rfindley): should it? We should probably link to the type. +```go +func (GT[P]) M(p P) +``` + [`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) --- @M/hover.md -- +-- @M -- ```go -func (GT[P]).M(p P) +func (GT[P]) M(p P) ``` [`(generic.GT).M` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.M) --- @f/hover.md -- +-- @f -- ```go var f func(p int) ``` --- @fGF/hover.md -- +-- @fGF -- ```go func generic.GF(p int) // func[P any](p P) ``` [`generic.GF` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GF) --- @pP/hover.md -- +-- @pP -- ```go type parameter P any ``` --- @x/hover.md -- +-- @x -- ```go var x generic.GT[int] ``` @hover("GT", "GT", xGT) --- @xF/hover.md -- +-- @xF -- ```go field F int ``` @@ -128,13 +129,11 @@ field F int [`(generic.GT).F` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.F) --- @xGT/hover.md -- +-- @xGT -- ```go type GT[P any] struct { F P //@hover("F", "F", F),hover("P", "P", FP) } - -func (generic.GT[P]).M(p P) ``` Hovering over type parameters should link to documentation. @@ -142,4 +141,8 @@ Hovering over type parameters should link to documentation. TODO(rfindley): should it? We should probably link to the type. +```go +func (generic.GT[P]) M(p P) +``` + [`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) diff --git a/gopls/internal/regtest/marker/testdata/hover/linkname.txt b/gopls/internal/test/marker/testdata/hover/linkname.txt similarity index 95% rename from gopls/internal/regtest/marker/testdata/hover/linkname.txt rename to gopls/internal/test/marker/testdata/hover/linkname.txt index 0d244c4e73c..8bb2eeb33cd 100644 --- a/gopls/internal/regtest/marker/testdata/hover/linkname.txt +++ b/gopls/internal/test/marker/testdata/hover/linkname.txt @@ -1,4 +1,5 @@ This test check hover on the 2nd argument in go:linkname directives. + -- go.mod -- module mod.com @@ -21,7 +22,7 @@ func bar() string { return "foo by bar" } --- @bar/hover.md -- +-- @bar -- ```go func bar() string ``` diff --git a/gopls/internal/test/marker/testdata/hover/methods.txt b/gopls/internal/test/marker/testdata/hover/methods.txt new file mode 100644 index 00000000000..8af22494f75 --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/methods.txt @@ -0,0 +1,71 @@ +This test checks the formatting of the list of accessible methods. + +Observe that: +- interface methods that appear in the syntax are not repeated + in the method set of the type; +- promoted methods of structs are shown; +- receiver variables are correctly named; +- receiver variables have a pointer type if appropriate; +- only accessible methods are shown. + +-- go.mod -- +module example.com + +-- lib/lib.go -- +package lib + +type I interface { + A() + b() + J +} + +type J interface { C() } + +type S struct { I } +func (s S) A() {} +func (s S) b() {} +func (s *S) PA() {} +func (s *S) pb() {} + +-- a/a.go -- +package a + +import "example.com/lib" + +var _ lib.I //@hover("I", "I", I) +var _ lib.J //@hover("J", "J", J) +var _ lib.S //@hover("S", "S", S) + +-- @I -- +```go +type I interface { + A() + b() + J +} +``` + +```go +func (lib.J) C() +``` + +[`lib.I` on pkg.go.dev](https://pkg.go.dev/example.com/lib#I) +-- @J -- +```go +type J interface{ C() } +``` + +[`lib.J` on pkg.go.dev](https://pkg.go.dev/example.com/lib#J) +-- @S -- +```go +type S struct{ I } +``` + +```go +func (s lib.S) A() +func (lib.J) C() +func (s *lib.S) PA() +``` + +[`lib.S` on pkg.go.dev](https://pkg.go.dev/example.com/lib#S) diff --git a/gopls/internal/test/marker/testdata/hover/sizeoffset.txt b/gopls/internal/test/marker/testdata/hover/sizeoffset.txt new file mode 100644 index 00000000000..62f3b76dd60 --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/sizeoffset.txt @@ -0,0 +1,117 @@ +This test checks that hover reports the sizes of vars/types, +and the offsets of struct fields. + +Notes: +- this only works on the declaring identifier, not on refs. +- the size of a type is undefined if it depends on type parameters. +- the offset of a field is undefined if it or any preceding field + has undefined size/alignment. +- the test's size expectations assumes a 64-bit machine. +- requires go1.22 because size information was inaccurate before. + +-- flags -- +-skip_goarch=386,arm +-min_go=go1.22 + +-- go.mod -- +module example.com + +go 1.18 +-- a.go -- +package a + +type T struct { //@ hover("T", "T", T) + a int //@ hover("a", "a", a) + U U //@ hover("U", "U", U) + y, z int //@ hover("y", "y", y), hover("z", "z", z) +} + +type U struct { + slice []string +} + +type G[T any] struct { + p T //@ hover("p", "p", p) + q int //@ hover("q", "q", q) +} + +var _ struct { + Gint G[int] //@ hover("Gint", "Gint", Gint) + Gstring G[string] //@ hover("Gstring", "Gstring", Gstring) +} + +type wasteful struct { //@ hover("wasteful", "wasteful", wasteful) + a bool + b [2]string + c bool +} + +-- @T -- +```go +type T struct { // size=48 (0x30) + a int //@ hover("a", "a", a) + U U //@ hover("U", "U", U) + y, z int //@ hover("y", "y", y), hover("z", "z", z) +} +``` + +[`a.T` on pkg.go.dev](https://pkg.go.dev/example.com#T) +-- @wasteful -- +```go +type wasteful struct { // size=48 (0x30) (29% wasted) + a bool + b [2]string + c bool +} +``` +-- @a -- +```go +field a int // size=8, offset=0 +``` + +@ hover("a", "a", a) +-- @U -- +```go +field U U // size=24 (0x18), offset=8 +``` + +@ hover("U", "U", U) + + +[`(a.T).U` on pkg.go.dev](https://pkg.go.dev/example.com#T.U) +-- @y -- +```go +field y int // size=8, offset=32 (0x20) +``` + +@ hover("y", "y", y), hover("z", "z", z) +-- @z -- +```go +field z int // size=8, offset=40 (0x28) +``` + +@ hover("y", "y", y), hover("z", "z", z) +-- @p -- +```go +field p T +``` + +@ hover("p", "p", p) +-- @q -- +```go +field q int // size=8 +``` + +@ hover("q", "q", q) +-- @Gint -- +```go +field Gint G[int] // size=16 (0x10), offset=0 +``` + +@ hover("Gint", "Gint", Gint) +-- @Gstring -- +```go +field Gstring G[string] // size=24 (0x18), offset=16 (0x10) +``` + +@ hover("Gstring", "Gstring", Gstring) diff --git a/gopls/internal/regtest/marker/testdata/hover/std.txt b/gopls/internal/test/marker/testdata/hover/std.txt similarity index 89% rename from gopls/internal/regtest/marker/testdata/hover/std.txt rename to gopls/internal/test/marker/testdata/hover/std.txt index a526b5211eb..c0db135f6b1 100644 --- a/gopls/internal/regtest/marker/testdata/hover/std.txt +++ b/gopls/internal/test/marker/testdata/hover/std.txt @@ -6,14 +6,17 @@ synopsis does not. In the future we may need to limit this test to the latest Go version to avoid documentation churn. + -- settings.json -- { "hoverKind": "SynopsisDocumentation" } + -- go.mod -- module mod.com go 1.18 + -- std.go -- package std @@ -36,31 +39,31 @@ func _() { var typ *types.Named //@hover("types", "types", hoverTypes) typ.Obj().Name() //@hover("Name", "Name", hoverName) } --- @hoverLock/hover.md -- +-- @hoverLock -- ```go -func (*sync.Mutex).Lock() +func (m *sync.Mutex) Lock() ``` Lock locks m. [`(sync.Mutex).Lock` on pkg.go.dev](https://pkg.go.dev/sync#Mutex.Lock) --- @hoverName/hover.md -- +-- @hoverName -- ```go -func (*types.object).Name() string +func (obj *types.object) Name() string ``` Name returns the object's (package-local, unqualified) name. [`(types.TypeName).Name` on pkg.go.dev](https://pkg.go.dev/go/types#TypeName.Name) --- @hoverTypes/hover.md -- +-- @hoverTypes -- ```go package types ("go/types") ``` [`types` on pkg.go.dev](https://pkg.go.dev/go/types) --- @hovermake/hover.md -- +-- @hovermake -- ```go func make(t Type, size ...int) Type ``` @@ -69,7 +72,7 @@ The make built-in function allocates and initializes an object of type slice, ma [`make` on pkg.go.dev](https://pkg.go.dev/builtin#make) --- @hoverstring/hover.md -- +-- @hoverstring -- ```go type string string ``` diff --git a/gopls/internal/regtest/marker/testdata/implementation/basic.txt b/gopls/internal/test/marker/testdata/implementation/basic.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/implementation/basic.txt rename to gopls/internal/test/marker/testdata/implementation/basic.txt diff --git a/gopls/internal/test/marker/testdata/implementation/generics.txt b/gopls/internal/test/marker/testdata/implementation/generics.txt new file mode 100644 index 00000000000..4a6c31b22f8 --- /dev/null +++ b/gopls/internal/test/marker/testdata/implementation/generics.txt @@ -0,0 +1,31 @@ +Test of 'implementation' query on generic types. + +-- go.mod -- +module example.com +go 1.18 + +-- implementation/implementation.go -- +package implementation + +type GenIface[T any] interface { //@loc(GenIface, "GenIface"),implementation("GenIface", GC) + F(int, string, T) //@loc(GenIfaceF, "F"),implementation("F", GCF) +} + +type GenConc[U any] int //@loc(GenConc, "GenConc"),implementation("GenConc", GI) + +func (GenConc[V]) F(int, string, V) {} //@loc(GenConcF, "F"),implementation("F", GIF) + +type GenConcString struct{ GenConc[string] } //@loc(GenConcString, "GenConcString"),implementation(GenConcString, GIString) + +-- other/other.go -- +package other + +type GI[T any] interface { //@loc(GI, "GI"),implementation("GI", GenConc) + F(int, string, T) //@loc(GIF, "F"),implementation("F", GenConcF) +} + +type GIString GI[string] //@loc(GIString, "GIString"),implementation("GIString", GenConcString) + +type GC[U any] int //@loc(GC, "GC"),implementation("GC", GenIface) + +func (GC[V]) F(int, string, V) {} //@loc(GCF, "F"),implementation("F", GenIfaceF) diff --git a/gopls/internal/regtest/marker/testdata/implementation/issue43655.txt b/gopls/internal/test/marker/testdata/implementation/issue43655.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/implementation/issue43655.txt rename to gopls/internal/test/marker/testdata/implementation/issue43655.txt diff --git a/gopls/internal/test/marker/testdata/inlayhints/inlayhints.txt b/gopls/internal/test/marker/testdata/inlayhints/inlayhints.txt new file mode 100644 index 00000000000..e690df72c1c --- /dev/null +++ b/gopls/internal/test/marker/testdata/inlayhints/inlayhints.txt @@ -0,0 +1,405 @@ + +-- flags -- +-ignore_extra_diags + +-- settings.json -- +{ + "hints": { + "assignVariableTypes": true, + "compositeLiteralFields": true, + "compositeLiteralTypes": true, + "constantValues": true, + "functionTypeParameters": true, + "parameterNames": true, + "rangeVariabletypes": true + } +} + +-- composite_literals.go -- +package inlayHint //@inlayhints(complit) + +import "fmt" + +func fieldNames() { + for _, c := range []struct { + in, want string + }{ + struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, + {"Hello, 世界", "界世 ,olleH"}, + {"", ""}, + } { + fmt.Println(c.in == c.want) + } +} + +func fieldNamesPointers() { + for _, c := range []*struct { + in, want string + }{ + &struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, + {"Hello, 世界", "界世 ,olleH"}, + {"", ""}, + } { + fmt.Println(c.in == c.want) + } +} + +-- @complit -- +package inlayHint //@inlayhints(complit) + +import "fmt" + +func fieldNames() { + for _, c := range []struct { + in, want string + }{ + struct{ in, want string }{<in: >"Hello, world", <want: >"dlrow ,olleH"}, + <struct{in string; want string}>{<in: >"Hello, 世界", <want: >"界世 ,olleH"}, + <struct{in string; want string}>{<in: >"", <want: >""}, + } { + fmt.Println(<a...: >c.in == c.want) + } +} + +func fieldNamesPointers() { + for _, c := range []*struct { + in, want string + }{ + &struct{ in, want string }{<in: >"Hello, world", <want: >"dlrow ,olleH"}, + <&struct{in string; want string}>{<in: >"Hello, 世界", <want: >"界世 ,olleH"}, + <&struct{in string; want string}>{<in: >"", <want: >""}, + } { + fmt.Println(<a...: >c.in == c.want) + } +} + +-- constant_values.go -- +package inlayHint //@inlayhints(values) + +const True = true + +type Kind int + +const ( + KindNone Kind = iota + KindPrint + KindPrintf + KindErrorf +) + +const ( + u = iota * 4 + v float64 = iota * 42 + w = iota * 42 +) + +const ( + a, b = 1, 2 + c, d + e, f = 5 * 5, "hello" + "world" + g, h + i, j = true, f +) + +// No hint +const ( + Int = 3 + Float = 3.14 + Bool = true + Rune = '3' + Complex = 2.7i + String = "Hello, world!" +) + +var ( + varInt = 3 + varFloat = 3.14 + varBool = true + varRune = '3' + '4' + varComplex = 2.7i + varString = "Hello, world!" +) + +-- @values -- +package inlayHint //@inlayhints(values) + +const True = true + +type Kind int + +const ( + KindNone Kind = iota< = 0> + KindPrint< = 1> + KindPrintf< = 2> + KindErrorf< = 3> +) + +const ( + u = iota * 4< = 0> + v float64 = iota * 42< = 42> + w = iota * 42< = 84> +) + +const ( + a, b = 1, 2 + c, d< = 1, 2> + e, f = 5 * 5, "hello" + "world"< = 25, "helloworld"> + g, h< = 25, "helloworld"> + i, j = true, f< = true, "helloworld"> +) + +// No hint +const ( + Int = 3 + Float = 3.14 + Bool = true + Rune = '3' + Complex = 2.7i + String = "Hello, world!" +) + +var ( + varInt = 3 + varFloat = 3.14 + varBool = true + varRune = '3' + '4' + varComplex = 2.7i + varString = "Hello, world!" +) + +-- parameter_names.go -- +package inlayHint //@inlayhints(parameters) + +import "fmt" + +func hello(name string) string { + return "Hello " + name +} + +func helloWorld() string { + return hello("World") +} + +type foo struct{} + +func (*foo) bar(baz string, qux int) int { + if baz != "" { + return qux + 1 + } + return qux +} + +func kase(foo int, bar bool, baz ...string) { + fmt.Println(foo, bar, baz) +} + +func kipp(foo string, bar, baz string) { + fmt.Println(foo, bar, baz) +} + +func plex(foo, bar string, baz string) { + fmt.Println(foo, bar, baz) +} + +func tars(foo string, bar, baz string) { + fmt.Println(foo, bar, baz) +} + +func foobar() { + var x foo + x.bar("", 1) + kase(0, true, "c", "d", "e") + kipp("a", "b", "c") + plex("a", "b", "c") + tars("a", "b", "c") + foo, bar, baz := "a", "b", "c" + kipp(foo, bar, baz) + plex("a", bar, baz) + tars(foo+foo, (bar), "c") + +} + +-- @parameters -- +package inlayHint //@inlayhints(parameters) + +import "fmt" + +func hello(name string) string { + return "Hello " + name +} + +func helloWorld() string { + return hello(<name: >"World") +} + +type foo struct{} + +func (*foo) bar(baz string, qux int) int { + if baz != "" { + return qux + 1 + } + return qux +} + +func kase(foo int, bar bool, baz ...string) { + fmt.Println(<a...: >foo, bar, baz) +} + +func kipp(foo string, bar, baz string) { + fmt.Println(<a...: >foo, bar, baz) +} + +func plex(foo, bar string, baz string) { + fmt.Println(<a...: >foo, bar, baz) +} + +func tars(foo string, bar, baz string) { + fmt.Println(<a...: >foo, bar, baz) +} + +func foobar() { + var x foo + x.bar(<baz: >"", <qux: >1) + kase(<foo: >0, <bar: >true, <baz...: >"c", "d", "e") + kipp(<foo: >"a", <bar: >"b", <baz: >"c") + plex(<foo: >"a", <bar: >"b", <baz: >"c") + tars(<foo: >"a", <bar: >"b", <baz: >"c") + foo< string>, bar< string>, baz< string> := "a", "b", "c" + kipp(foo, bar, baz) + plex(<foo: >"a", bar, baz) + tars(<foo: >foo+foo, <bar: >(bar), <baz: >"c") + +} + +-- type_params.go -- +package inlayHint //@inlayhints(typeparams) + +func main() { + ints := map[string]int64{ + "first": 34, + "second": 12, + } + + floats := map[string]float64{ + "first": 35.98, + "second": 26.99, + } + + SumIntsOrFloats[string, int64](ints) + SumIntsOrFloats[string, float64](floats) + + SumIntsOrFloats(ints) + SumIntsOrFloats(floats) + + SumNumbers(ints) + SumNumbers(floats) +} + +type Number interface { + int64 | float64 +} + +func SumIntsOrFloats[K comparable, V int64 | float64](m map[K]V) V { + var s V + for _, v := range m { + s += v + } + return s +} + +func SumNumbers[K comparable, V Number](m map[K]V) V { + var s V + for _, v := range m { + s += v + } + return s +} + +-- @typeparams -- +package inlayHint //@inlayhints(typeparams) + +func main() { + ints< map[string]int64> := map[string]int64{ + "first": 34, + "second": 12, + } + + floats< map[string]float64> := map[string]float64{ + "first": 35.98, + "second": 26.99, + } + + SumIntsOrFloats[string, int64](<m: >ints) + SumIntsOrFloats[string, float64](<m: >floats) + + SumIntsOrFloats<[string, int64]>(<m: >ints) + SumIntsOrFloats<[string, float64]>(<m: >floats) + + SumNumbers<[string, int64]>(<m: >ints) + SumNumbers<[string, float64]>(<m: >floats) +} + +type Number interface { + int64 | float64 +} + +func SumIntsOrFloats[K comparable, V int64 | float64](m map[K]V) V { + var s V + for _, v := range m { + s += v + } + return s +} + +func SumNumbers[K comparable, V Number](m map[K]V) V { + var s V + for _, v := range m { + s += v + } + return s +} + +-- variable_types.go -- +package inlayHint //@inlayhints(vartypes) + +func assignTypes() { + i, j := 0, len([]string{})-1 + println(i, j) +} + +func rangeTypes() { + for k, v := range []string{} { + println(k, v) + } +} + +func funcLitType() { + myFunc := func(a string) string { return "" } +} + +func compositeLitType() { + foo := map[string]interface{}{"": ""} +} + +-- @vartypes -- +package inlayHint //@inlayhints(vartypes) + +func assignTypes() { + i< int>, j< int> := 0, len([]string{})-1 + println(i, j) +} + +func rangeTypes() { + for k, v := range []string{} { + println(k, v) + } +} + +func funcLitType() { + myFunc< func(a string) string> := func(a string) string { return "" } +} + +func compositeLitType() { + foo< map[string]interface{}> := map[string]interface{}{"": ""} +} + diff --git a/gopls/internal/regtest/marker/testdata/links/links.txt b/gopls/internal/test/marker/testdata/links/links.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/links/links.txt rename to gopls/internal/test/marker/testdata/links/links.txt diff --git a/gopls/internal/regtest/marker/testdata/references/crosspackage.txt b/gopls/internal/test/marker/testdata/references/crosspackage.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/crosspackage.txt rename to gopls/internal/test/marker/testdata/references/crosspackage.txt diff --git a/gopls/internal/regtest/marker/testdata/references/imports.txt b/gopls/internal/test/marker/testdata/references/imports.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/imports.txt rename to gopls/internal/test/marker/testdata/references/imports.txt diff --git a/gopls/internal/regtest/marker/testdata/references/interfaces.txt b/gopls/internal/test/marker/testdata/references/interfaces.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/interfaces.txt rename to gopls/internal/test/marker/testdata/references/interfaces.txt diff --git a/gopls/internal/regtest/marker/testdata/references/intrapackage.txt b/gopls/internal/test/marker/testdata/references/intrapackage.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/intrapackage.txt rename to gopls/internal/test/marker/testdata/references/intrapackage.txt diff --git a/gopls/internal/regtest/marker/testdata/references/issue58506.txt b/gopls/internal/test/marker/testdata/references/issue58506.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/issue58506.txt rename to gopls/internal/test/marker/testdata/references/issue58506.txt diff --git a/gopls/internal/regtest/marker/testdata/references/issue59851.txt b/gopls/internal/test/marker/testdata/references/issue59851.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/issue59851.txt rename to gopls/internal/test/marker/testdata/references/issue59851.txt diff --git a/gopls/internal/regtest/marker/testdata/references/issue60369.txt b/gopls/internal/test/marker/testdata/references/issue60369.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/issue60369.txt rename to gopls/internal/test/marker/testdata/references/issue60369.txt diff --git a/gopls/internal/regtest/marker/testdata/references/issue60622.txt b/gopls/internal/test/marker/testdata/references/issue60622.txt similarity index 92% rename from gopls/internal/regtest/marker/testdata/references/issue60622.txt rename to gopls/internal/test/marker/testdata/references/issue60622.txt index 803ec8b3500..45d7ec58023 100644 --- a/gopls/internal/regtest/marker/testdata/references/issue60622.txt +++ b/gopls/internal/test/marker/testdata/references/issue60622.txt @@ -1,9 +1,6 @@ Regression test for 'references' bug golang/go#60622: references to methods of generics were missing. --- flags -- --min_go=go1.18 - -- go.mod -- module example.com go 1.18 diff --git a/gopls/internal/regtest/marker/testdata/references/issue60676.txt b/gopls/internal/test/marker/testdata/references/issue60676.txt similarity index 93% rename from gopls/internal/regtest/marker/testdata/references/issue60676.txt rename to gopls/internal/test/marker/testdata/references/issue60676.txt index 98f608ee10c..5cef978927f 100644 --- a/gopls/internal/regtest/marker/testdata/references/issue60676.txt +++ b/gopls/internal/test/marker/testdata/references/issue60676.txt @@ -5,9 +5,6 @@ shared by types from multiple packages. See golang/go#60676. Note that the marker test runner awaits the initial workspace load, so export data should be populated at the time references are requested. --- flags -- --min_go=go1.18 - -- go.mod -- module mod.test @@ -58,10 +55,11 @@ import "mod.test/b" func _() { x := b.B{ - F: 42, //@refs("F", FDef, "F") + F: 42, //@refs("F", FDef, "F", Fuse) } x.G = "hi" //@refs("G", GDef, "G") _ = x.E //@refs("E", EDef, "E") + _ = x.F //@loc(Fuse, "F") } func _(y b.BI) { diff --git a/gopls/internal/regtest/marker/testdata/references/issue61618.txt b/gopls/internal/test/marker/testdata/references/issue61618.txt similarity index 96% rename from gopls/internal/regtest/marker/testdata/references/issue61618.txt rename to gopls/internal/test/marker/testdata/references/issue61618.txt index 6027d448048..47dc02ef793 100644 --- a/gopls/internal/regtest/marker/testdata/references/issue61618.txt +++ b/gopls/internal/test/marker/testdata/references/issue61618.txt @@ -1,9 +1,6 @@ Regression test for 'references' bug golang/go#61618: references to instantiated fields were missing. --- flags -- --min_go=go1.18 - -- go.mod -- module example.com go 1.18 diff --git a/gopls/internal/regtest/marker/testdata/references/shadow.txt b/gopls/internal/test/marker/testdata/references/shadow.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/shadow.txt rename to gopls/internal/test/marker/testdata/references/shadow.txt diff --git a/gopls/internal/regtest/marker/testdata/references/test.txt b/gopls/internal/test/marker/testdata/references/test.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/test.txt rename to gopls/internal/test/marker/testdata/references/test.txt diff --git a/gopls/internal/regtest/marker/testdata/references/typeswitch.txt b/gopls/internal/test/marker/testdata/references/typeswitch.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/references/typeswitch.txt rename to gopls/internal/test/marker/testdata/references/typeswitch.txt diff --git a/gopls/internal/test/marker/testdata/rename/bad.txt b/gopls/internal/test/marker/testdata/rename/bad.txt new file mode 100644 index 00000000000..c596ad13c92 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/bad.txt @@ -0,0 +1,19 @@ +This test checks that rename fails in the presence of errors. + +-- go.mod -- +module golang.org/lsptests/bad + +go 1.18 + +-- bad.go -- +package bad + +type myStruct struct { +} + +func (s *myStruct) sFunc() bool { //@renameerr("sFunc", "rFunc", re"not possible") + return s.Bad //@diag("Bad", re"no field or method") +} + +-- bad_test.go -- +package bad diff --git a/gopls/internal/test/marker/testdata/rename/basic.txt b/gopls/internal/test/marker/testdata/rename/basic.txt new file mode 100644 index 00000000000..618f9593668 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/basic.txt @@ -0,0 +1,35 @@ +This test performs basic coverage of 'rename' within a single package. + +-- basic.go -- +package p + +func f(x int) { println(x) } //@rename("x", "y", xToy) + +-- @xToy/basic.go -- +@@ -3 +3 @@ +-func f(x int) { println(x) } //@rename("x", "y", xToy) ++func f(y int) { println(y) } //@rename("x", "y", xToy) +-- alias.go -- +package p + +// from golang/go#61625 +type LongNameHere struct{} +type A = LongNameHere //@rename("A", "B", AToB) +func Foo() A + +-- errors.go -- +package p + +func _(x []int) { //@renameerr("_", "blank", `can't rename "_"`) + x = append(x, 1) //@renameerr("append", "blank", "built in and cannot be renamed") + x = nil //@renameerr("nil", "blank", "built in and cannot be renamed") + x = nil //@renameerr("x", "x", "old and new names are the same: x") + _ = 1 //@renameerr("1", "x", "no identifier found") +} + +-- @AToB/alias.go -- +@@ -5,2 +5,2 @@ +-type A = LongNameHere //@rename("A", "B", AToB) +-func Foo() A ++type B = LongNameHere //@rename("A", "B", AToB) ++func Foo() B diff --git a/gopls/internal/regtest/marker/testdata/rename/conflict.txt b/gopls/internal/test/marker/testdata/rename/conflict.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/rename/conflict.txt rename to gopls/internal/test/marker/testdata/rename/conflict.txt diff --git a/gopls/internal/test/marker/testdata/rename/crosspkg.txt b/gopls/internal/test/marker/testdata/rename/crosspkg.txt new file mode 100644 index 00000000000..c60930b0114 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/crosspkg.txt @@ -0,0 +1,72 @@ +This test checks cross-package renaming. + +-- go.mod -- +module golang.org/lsptests/rename + +go 1.18 + +-- crosspkg/crosspkg.go -- +package crosspkg + +func Foo() { //@rename("Foo", "Dolphin", FooToDolphin) + +} + +var Bar int //@rename("Bar", "Tomato", BarToTomato) + +-- crosspkg/another/another.go -- +package another + +type ( + I interface{ F() } + C struct{ I } +) + +func (C) g() + +func _() { + var x I = C{} + x.F() //@rename("F", "G", FToG) +} + +-- crosspkg/other/other.go -- +package other + +import "golang.org/lsptests/rename/crosspkg" + +func Other() { + crosspkg.Bar //@diag("crosspkg", re"not used") + crosspkg.Foo() //@rename("Foo", "Flamingo", FooToFlamingo) +} + +-- @BarToTomato/crosspkg/crosspkg.go -- +@@ -7 +7 @@ +-var Bar int //@rename("Bar", "Tomato", BarToTomato) ++var Tomato int //@rename("Bar", "Tomato", BarToTomato) +-- @BarToTomato/crosspkg/other/other.go -- +@@ -6 +6 @@ +- crosspkg.Bar //@diag("crosspkg", re"not used") ++ crosspkg.Tomato //@diag("crosspkg", re"not used") +-- @FToG/crosspkg/another/another.go -- +@@ -4 +4 @@ +- I interface{ F() } ++ I interface{ G() } +@@ -12 +12 @@ +- x.F() //@rename("F", "G", FToG) ++ x.G() //@rename("F", "G", FToG) +-- @FooToDolphin/crosspkg/crosspkg.go -- +@@ -3 +3 @@ +-func Foo() { //@rename("Foo", "Dolphin", FooToDolphin) ++func Dolphin() { //@rename("Foo", "Dolphin", FooToDolphin) +-- @FooToDolphin/crosspkg/other/other.go -- +@@ -7 +7 @@ +- crosspkg.Foo() //@rename("Foo", "Flamingo", FooToFlamingo) ++ crosspkg.Dolphin() //@rename("Foo", "Flamingo", FooToFlamingo) +-- @FooToFlamingo/crosspkg/crosspkg.go -- +@@ -3 +3 @@ +-func Foo() { //@rename("Foo", "Dolphin", FooToDolphin) ++func Flamingo() { //@rename("Foo", "Dolphin", FooToDolphin) +-- @FooToFlamingo/crosspkg/other/other.go -- +@@ -7 +7 @@ +- crosspkg.Foo() //@rename("Foo", "Flamingo", FooToFlamingo) ++ crosspkg.Flamingo() //@rename("Foo", "Flamingo", FooToFlamingo) diff --git a/gopls/internal/test/marker/testdata/rename/doclink.txt b/gopls/internal/test/marker/testdata/rename/doclink.txt new file mode 100644 index 00000000000..d4e9f96891e --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/doclink.txt @@ -0,0 +1,180 @@ +This test checks that doc links are also handled correctly (golang/go#64495). + +-- go.mod -- +module example.com + +go 1.21 + +-- a/a.go -- +package a + +// Foo just for test [Foo] +// reference others objects [A] [B] [C] [C.F] [C.PF] +func Foo() {} //@rename("Foo", "Bar", FooToBar) + +const A = 1 //@rename("A", "AA", AToAA) + +var B = 1 //@rename("B", "BB", BToBB) + +type C int //@rename("C", "CC", CToCC) + +func (C) F() {} //@rename("F", "FF", FToFF) + +func (*C) PF() {} //@rename("PF", "PFF", PFToPFF) + +// D just for test [*D] +type D int //@rename("D", "DD", DToDD) + +// E test generic type doc link [E] [E.Foo] +type E[T any] struct { //@rename("E", "EE", EToEE) + Field T +} + +func (E[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) + +-- b/b.go -- +package b + +import aa "example.com/a" //@rename("aa", "a", pkgRename) + +// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +// reference pointer type [*aa.D] +// reference generic type links [aa.E] [aa.E.Foo] +func FooBar() { + aa.Foo() + var e aa.E[int] + e.Foo() +} + + +-- @FooToBar/a/a.go -- +@@ -3 +3 @@ +-// Foo just for test [Foo] ++// Bar just for test [Bar] +@@ -5 +5 @@ +-func Foo() {} //@rename("Foo", "Bar", FooToBar) ++func Bar() {} //@rename("Foo", "Bar", FooToBar) +-- @FooToBar/b/b.go -- +@@ -5 +5 @@ +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] ++// FooBar just for test [aa.Bar] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +@@ -9 +9 @@ +- aa.Foo() ++ aa.Bar() +-- @AToAA/a/a.go -- +@@ -4 +4 @@ +-// reference others objects [A] [B] [C] [C.F] [C.PF] ++// reference others objects [AA] [B] [C] [C.F] [C.PF] +@@ -7 +7 @@ +-const A = 1 //@rename("A", "AA", AToAA) ++const AA = 1 //@rename("A", "AA", AToAA) +-- @AToAA/b/b.go -- +@@ -5 +5 @@ +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] ++// FooBar just for test [aa.Foo] [aa.AA] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-- @BToBB/a/a.go -- +@@ -4 +4 @@ +-// reference others objects [A] [B] [C] [C.F] [C.PF] ++// reference others objects [A] [BB] [C] [C.F] [C.PF] +@@ -9 +9 @@ +-var B = 1 //@rename("B", "BB", BToBB) ++var BB = 1 //@rename("B", "BB", BToBB) +-- @BToBB/b/b.go -- +@@ -5 +5 @@ +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] ++// FooBar just for test [aa.Foo] [aa.A] [aa.BB] [aa.C] [aa.C.F] [aa.C.PF] +-- @CToCC/a/a.go -- +@@ -4 +4 @@ +-// reference others objects [A] [B] [C] [C.F] [C.PF] ++// reference others objects [A] [B] [CC] [CC.F] [CC.PF] +@@ -11 +11 @@ +-type C int //@rename("C", "CC", CToCC) ++type CC int //@rename("C", "CC", CToCC) +@@ -13 +13 @@ +-func (C) F() {} //@rename("F", "FF", FToFF) ++func (CC) F() {} //@rename("F", "FF", FToFF) +@@ -15 +15 @@ +-func (*C) PF() {} //@rename("PF", "PFF", PFToPFF) ++func (*CC) PF() {} //@rename("PF", "PFF", PFToPFF) +-- @CToCC/b/b.go -- +@@ -5 +5 @@ +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] ++// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.CC] [aa.CC.F] [aa.CC.PF] +-- @FToFF/a/a.go -- +@@ -4 +4 @@ +-// reference others objects [A] [B] [C] [C.F] [C.PF] ++// reference others objects [A] [B] [C] [C.FF] [C.PF] +@@ -13 +13 @@ +-func (C) F() {} //@rename("F", "FF", FToFF) ++func (C) FF() {} //@rename("F", "FF", FToFF) +-- @FToFF/b/b.go -- +@@ -5 +5 @@ +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] ++// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.FF] [aa.C.PF] +-- @PFToPFF/a/a.go -- +@@ -4 +4 @@ +-// reference others objects [A] [B] [C] [C.F] [C.PF] ++// reference others objects [A] [B] [C] [C.F] [C.PFF] +@@ -15 +15 @@ +-func (*C) PF() {} //@rename("PF", "PFF", PFToPFF) ++func (*C) PFF() {} //@rename("PF", "PFF", PFToPFF) +-- @PFToPFF/b/b.go -- +@@ -5 +5 @@ +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] ++// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PFF] +-- @pkgRename/b/b.go -- +@@ -3 +3 @@ +-import aa "example.com/a" //@rename("aa", "a", pkgRename) ++import "example.com/a" //@rename("aa", "a", pkgRename) +@@ -5,3 +5,3 @@ +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-// reference pointer type [*aa.D] +-// reference generic type links [aa.E] [aa.E.Foo] ++// FooBar just for test [a.Foo] [a.A] [a.B] [a.C] [a.C.F] [a.C.PF] ++// reference pointer type [*a.D] ++// reference generic type links [a.E] [a.E.Foo] +@@ -9,2 +9,2 @@ +- aa.Foo() +- var e aa.E[int] ++ a.Foo() ++ var e a.E[int] +-- @DToDD/a/a.go -- +@@ -17,2 +17,2 @@ +-// D just for test [*D] +-type D int //@rename("D", "DD", DToDD) ++// DD just for test [*DD] ++type DD int //@rename("D", "DD", DToDD) +-- @DToDD/b/b.go -- +@@ -6 +6 @@ +-// reference pointer type [*aa.D] ++// reference pointer type [*aa.DD] +-- @EToEE/a/a.go -- +@@ -20,2 +20,2 @@ +-// E test generic type doc link [E] [E.Foo] +-type E[T any] struct { //@rename("E", "EE", EToEE) ++// EE test generic type doc link [EE] [EE.Foo] ++type EE[T any] struct { //@rename("E", "EE", EToEE) +@@ -25 +25 @@ +-func (E[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) ++func (EE[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) +-- @EToEE/b/b.go -- +@@ -7 +7 @@ +-// reference generic type links [aa.E] [aa.E.Foo] ++// reference generic type links [aa.EE] [aa.EE.Foo] +@@ -10 +10 @@ +- var e aa.E[int] ++ var e aa.EE[int] +-- @EFooToEBar/a/a.go -- +@@ -20 +20 @@ +-// E test generic type doc link [E] [E.Foo] ++// E test generic type doc link [E] [E.Bar] +@@ -25 +25 @@ +-func (E[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) ++func (E[T]) Bar() {} //@rename("Foo", "Bar", EFooToEBar) +-- @EFooToEBar/b/b.go -- +@@ -7 +7 @@ +-// reference generic type links [aa.E] [aa.E.Foo] ++// reference generic type links [aa.E] [aa.E.Bar] +@@ -11 +11 @@ +- e.Foo() ++ e.Bar() diff --git a/gopls/internal/test/marker/testdata/rename/embed.txt b/gopls/internal/test/marker/testdata/rename/embed.txt new file mode 100644 index 00000000000..8e6009e42ca --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/embed.txt @@ -0,0 +1,33 @@ +This test exercises renaming of types used as embedded fields. + +-- go.mod -- +module example.com +go 1.12 + +-- a/a.go -- +package a + +type A int //@rename("A", "A2", type) + +-- b/b.go -- +package b + +import "example.com/a" + +type B struct { a.A } //@renameerr("A", "A3", errAnonField) + +var _ = new(B).A //@renameerr("A", "A4", errAnonField) + +-- @errAnonField -- +can't rename embedded fields: rename the type directly or name the field +-- @type/a/a.go -- +@@ -3 +3 @@ +-type A int //@rename("A", "A2", type) ++type A2 int //@rename("A", "A2", type) +-- @type/b/b.go -- +@@ -5 +5 @@ +-type B struct { a.A } //@renameerr("A", "A3", errAnonField) ++type B struct { a.A2 } //@renameerr("A", "A3", errAnonField) +@@ -7 +7 @@ +-var _ = new(B).A //@renameerr("A", "A4", errAnonField) ++var _ = new(B).A2 //@renameerr("A", "A4", errAnonField) diff --git a/gopls/internal/test/marker/testdata/rename/generics.txt b/gopls/internal/test/marker/testdata/rename/generics.txt new file mode 100644 index 00000000000..0f57570a5fb --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/generics.txt @@ -0,0 +1,185 @@ +This test exercises various renaming features on generic code. + +Fixed bugs: + +- golang/go#61614: renaming a method of a type in a package that uses type + parameter composite lits used to panic, because previous iterations of the + satisfy analysis did not account for this language feature. + +- golang/go#61635: renaming type parameters did not work when they were + capitalized and the package was imported by another package. + +-- go.mod -- +module example.com +go 1.20 + +-- a.go -- +package a + +type I int + +func (I) m() {} //@rename("m", "M", mToM) + +func _[P ~[]int]() { + _ = P{} +} + +-- @mToM/a.go -- +@@ -5 +5 @@ +-func (I) m() {} //@rename("m", "M", mToM) ++func (I) M() {} //@rename("m", "M", mToM) +-- g.go -- +package a + +type S[P any] struct { //@rename("P", "Q", PToQ) + P P + F func(P) P +} + +func F[R any](r R) { + var _ R //@rename("R", "S", RToS) +} + +-- @PToQ/g.go -- +@@ -3,3 +3,3 @@ +-type S[P any] struct { //@rename("P", "Q", PToQ) +- P P +- F func(P) P ++type S[Q any] struct { //@rename("P", "Q", PToQ) ++ P Q ++ F func(Q) Q +-- @RToS/g.go -- +@@ -8,2 +8,2 @@ +-func F[R any](r R) { +- var _ R //@rename("R", "S", RToS) ++func F[S any](r S) { ++ var _ S //@rename("R", "S", RToS) +-- issue61635/p.go -- +package issue61635 + +type builder[S ~[]F, F ~string] struct { //@rename("S", "T", SToT) + name string + elements S + elemData map[F][]ElemData[F] + // other fields... +} + +type ElemData[F ~string] struct { + Name F + // other fields... +} + +type BuilderImpl[S ~[]F, F ~string] struct{ builder[S, F] } + +-- importer/i.go -- +package importer + +import "example.com/issue61635" // importing is necessary to repro golang/go#61635 + +var _ issue61635.ElemData[string] + +-- @SToT/issue61635/p.go -- +@@ -3 +3 @@ +-type builder[S ~[]F, F ~string] struct { //@rename("S", "T", SToT) ++type builder[T ~[]F, F ~string] struct { //@rename("S", "T", SToT) +@@ -5 +5 @@ +- elements S ++ elements T +-- instances/type.go -- +package instances + +type R[P any] struct { //@rename("R", "u", Rtou) + Next *R[P] //@rename("R", "s", RTos) +} + +func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) + var x R[P] + return rv.Do(x) //@rename("Do", "Do2", DoToDo2) +} + +func _() { + var x R[int] //@rename("R", "r", RTor) + x = x.Do(x) +} + +-- @RTos/instances/type.go -- +@@ -3,2 +3,2 @@ +-type R[P any] struct { //@rename("R", "u", Rtou) +- Next *R[P] //@rename("R", "s", RTos) ++type s[P any] struct { //@rename("R", "u", Rtou) ++ Next *s[P] //@rename("R", "s", RTos) +@@ -7,2 +7,2 @@ +-func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +- var x R[P] ++func (rv s[P]) Do(s[P]) s[P] { //@rename("Do", "Do1", DoToDo1) ++ var x s[P] +@@ -13 +13 @@ +- var x R[int] //@rename("R", "r", RTor) ++ var x s[int] //@rename("R", "r", RTor) +-- @Rtou/instances/type.go -- +@@ -3,2 +3,2 @@ +-type R[P any] struct { //@rename("R", "u", Rtou) +- Next *R[P] //@rename("R", "s", RTos) ++type u[P any] struct { //@rename("R", "u", Rtou) ++ Next *u[P] //@rename("R", "s", RTos) +@@ -7,2 +7,2 @@ +-func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +- var x R[P] ++func (rv u[P]) Do(u[P]) u[P] { //@rename("Do", "Do1", DoToDo1) ++ var x u[P] +@@ -13 +13 @@ +- var x R[int] //@rename("R", "r", RTor) ++ var x u[int] //@rename("R", "r", RTor) +-- @DoToDo1/instances/type.go -- +@@ -7 +7 @@ +-func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) ++func (rv R[P]) Do1(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +@@ -9 +9 @@ +- return rv.Do(x) //@rename("Do", "Do2", DoToDo2) ++ return rv.Do1(x) //@rename("Do", "Do2", DoToDo2) +@@ -14 +14 @@ +- x = x.Do(x) ++ x = x.Do1(x) +-- @DoToDo2/instances/type.go -- +@@ -7 +7 @@ +-func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) ++func (rv R[P]) Do2(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +@@ -9 +9 @@ +- return rv.Do(x) //@rename("Do", "Do2", DoToDo2) ++ return rv.Do2(x) //@rename("Do", "Do2", DoToDo2) +@@ -14 +14 @@ +- x = x.Do(x) ++ x = x.Do2(x) +-- instances/func.go -- +package instances + +func Foo[P any](p P) { //@rename("Foo", "Bar", FooToBar) + Foo(p) //@rename("Foo", "Baz", FooToBaz) +} + +-- @FooToBar/instances/func.go -- +@@ -3,2 +3,2 @@ +-func Foo[P any](p P) { //@rename("Foo", "Bar", FooToBar) +- Foo(p) //@rename("Foo", "Baz", FooToBaz) ++func Bar[P any](p P) { //@rename("Foo", "Bar", FooToBar) ++ Bar(p) //@rename("Foo", "Baz", FooToBaz) +-- @FooToBaz/instances/func.go -- +@@ -3,2 +3,2 @@ +-func Foo[P any](p P) { //@rename("Foo", "Bar", FooToBar) +- Foo(p) //@rename("Foo", "Baz", FooToBaz) ++func Baz[P any](p P) { //@rename("Foo", "Bar", FooToBar) ++ Baz(p) //@rename("Foo", "Baz", FooToBaz) +-- @RTor/instances/type.go -- +@@ -3,2 +3,2 @@ +-type R[P any] struct { //@rename("R", "u", Rtou) +- Next *R[P] //@rename("R", "s", RTos) ++type r[P any] struct { //@rename("R", "u", Rtou) ++ Next *r[P] //@rename("R", "s", RTos) +@@ -7,2 +7,2 @@ +-func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +- var x R[P] ++func (rv r[P]) Do(r[P]) r[P] { //@rename("Do", "Do1", DoToDo1) ++ var x r[P] +@@ -13 +13 @@ +- var x R[int] //@rename("R", "r", RTor) ++ var x r[int] //@rename("R", "r", RTor) diff --git a/gopls/internal/test/marker/testdata/rename/generics_basic.txt b/gopls/internal/test/marker/testdata/rename/generics_basic.txt new file mode 100644 index 00000000000..16b0a00c87b --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/generics_basic.txt @@ -0,0 +1,107 @@ +This test exercise basic renaming of generic code. + +-- embedded.go -- +package a + +type foo[P any] int //@rename("foo", "bar", fooTobar) + +var x struct{ foo[int] } + +var _ = x.foo + +-- @fooTobar/embedded.go -- +@@ -3 +3 @@ +-type foo[P any] int //@rename("foo", "bar", fooTobar) ++type bar[P any] int //@rename("foo", "bar", fooTobar) +@@ -5 +5 @@ +-var x struct{ foo[int] } ++var x struct{ bar[int] } +@@ -7 +7 @@ +-var _ = x.foo ++var _ = x.bar +-- generics.go -- +package a + +type G[P any] struct { + F int +} + +func (G[_]) M() {} + +func F[P any](P) { + var p P //@rename("P", "Q", PToQ) + _ = p +} + +func _() { + var x G[int] //@rename("G", "H", GToH) + _ = x.F //@rename("F", "K", FToK) + x.M() //@rename("M", "N", MToN) + + var y G[string] + _ = y.F + y.M() +} + +-- @FToK/generics.go -- +@@ -4 +4 @@ +- F int ++ K int +@@ -16 +16 @@ +- _ = x.F //@rename("F", "K", FToK) ++ _ = x.K //@rename("F", "K", FToK) +@@ -20 +20 @@ +- _ = y.F ++ _ = y.K +-- @GToH/generics.go -- +@@ -3 +3 @@ +-type G[P any] struct { ++type H[P any] struct { +@@ -7 +7 @@ +-func (G[_]) M() {} ++func (H[_]) M() {} +@@ -15 +15 @@ +- var x G[int] //@rename("G", "H", GToH) ++ var x H[int] //@rename("G", "H", GToH) +@@ -19 +19 @@ +- var y G[string] ++ var y H[string] +-- @MToN/generics.go -- +@@ -7 +7 @@ +-func (G[_]) M() {} ++func (G[_]) N() {} +@@ -17 +17 @@ +- x.M() //@rename("M", "N", MToN) ++ x.N() //@rename("M", "N", MToN) +@@ -21 +21 @@ +- y.M() ++ y.N() +-- @PToQ/generics.go -- +@@ -9,2 +9,2 @@ +-func F[P any](P) { +- var p P //@rename("P", "Q", PToQ) ++func F[Q any](Q) { ++ var p Q //@rename("P", "Q", PToQ) +-- unions.go -- +package a + +type T string //@rename("T", "R", TToR) + +type C interface { + T | ~int //@rename("T", "S", TToS) +} + +-- @TToR/unions.go -- +@@ -3 +3 @@ +-type T string //@rename("T", "R", TToR) ++type R string //@rename("T", "R", TToR) +@@ -6 +6 @@ +- T | ~int //@rename("T", "S", TToS) ++ R | ~int //@rename("T", "S", TToS) +-- @TToS/unions.go -- +@@ -3 +3 @@ +-type T string //@rename("T", "R", TToR) ++type S string //@rename("T", "R", TToR) +@@ -6 +6 @@ +- T | ~int //@rename("T", "S", TToS) ++ S | ~int //@rename("T", "S", TToS) diff --git a/gopls/internal/test/marker/testdata/rename/issue39614.txt b/gopls/internal/test/marker/testdata/rename/issue39614.txt new file mode 100644 index 00000000000..d6d9c241ba7 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue39614.txt @@ -0,0 +1,18 @@ + +-- flags -- +-ignore_extra_diags + +-- p.go -- +package issue39614 + +func fn() { + var foo bool //@rename("foo", "bar", fooTobar) + make(map[string]bool + if true { + } +} + +-- @fooTobar/p.go -- +@@ -4 +4 @@ +- var foo bool //@rename("foo", "bar", fooTobar) ++ var bar bool //@rename("foo", "bar", fooTobar) diff --git a/gopls/internal/test/marker/testdata/rename/issue42134.txt b/gopls/internal/test/marker/testdata/rename/issue42134.txt new file mode 100644 index 00000000000..05fee50bed9 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue42134.txt @@ -0,0 +1,80 @@ +Regression test for #42134, +"rename fails to update doc comment for local variable of function type" + +-- 1.go -- +package issue42134 + +func _() { + // foo computes things. + foo := func() {} + + foo() //@rename("foo", "bar", fooTobar) +} +-- @fooTobar/1.go -- +@@ -4,2 +4,2 @@ +- // foo computes things. +- foo := func() {} ++ // bar computes things. ++ bar := func() {} +@@ -7 +7 @@ +- foo() //@rename("foo", "bar", fooTobar) ++ bar() //@rename("foo", "bar", fooTobar) +-- 2.go -- +package issue42134 + +import "fmt" + +func _() { + // minNumber is a min number. + // Second line. + minNumber := min(1, 2) + fmt.Println(minNumber) //@rename("minNumber", "res", minNumberTores) +} + +func min(a, b int) int { return a + b } +-- @minNumberTores/2.go -- +@@ -6 +6 @@ +- // minNumber is a min number. ++ // res is a min number. +@@ -8,2 +8,2 @@ +- minNumber := min(1, 2) +- fmt.Println(minNumber) //@rename("minNumber", "res", minNumberTores) ++ res := min(1, 2) ++ fmt.Println(res) //@rename("minNumber", "res", minNumberTores) +-- 3.go -- +package issue42134 + +func _() { + /* + tests contains test cases + */ + tests := []struct { //@rename("tests", "testCases", testsTotestCases) + in, out string + }{} + _ = tests +} +-- @testsTotestCases/3.go -- +@@ -5 +5 @@ +- tests contains test cases ++ testCases contains test cases +@@ -7 +7 @@ +- tests := []struct { //@rename("tests", "testCases", testsTotestCases) ++ testCases := []struct { //@rename("tests", "testCases", testsTotestCases) +@@ -10 +10 @@ +- _ = tests ++ _ = testCases +-- 4.go -- +package issue42134 + +func _() { + // a is equal to 5. Comment must stay the same + + a := 5 + _ = a //@rename("a", "b", aTob) +} +-- @aTob/4.go -- +@@ -6,2 +6,2 @@ +- a := 5 +- _ = a //@rename("a", "b", aTob) ++ b := 5 ++ _ = b //@rename("a", "b", aTob) diff --git a/gopls/internal/test/marker/testdata/rename/issue43616.txt b/gopls/internal/test/marker/testdata/rename/issue43616.txt new file mode 100644 index 00000000000..19cfac4a435 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue43616.txt @@ -0,0 +1,21 @@ +This test verifies the fix for golang/go#43616: renaming mishandles embedded +fields. + +-- p.go -- +package issue43616 + +type foo int //@rename("foo", "bar", fooToBar),preparerename("oo","foo","foo") + +var x struct{ foo } //@renameerr("foo", "baz", "rename the type directly") + +var _ = x.foo //@renameerr("foo", "quux", "rename the type directly") +-- @fooToBar/p.go -- +@@ -3 +3 @@ +-type foo int //@rename("foo", "bar", fooToBar),preparerename("oo","foo","foo") ++type bar int //@rename("foo", "bar", fooToBar),preparerename("oo","foo","foo") +@@ -5 +5 @@ +-var x struct{ foo } //@renameerr("foo", "baz", "rename the type directly") ++var x struct{ bar } //@renameerr("foo", "baz", "rename the type directly") +@@ -7 +7 @@ +-var _ = x.foo //@renameerr("foo", "quux", "rename the type directly") ++var _ = x.bar //@renameerr("foo", "quux", "rename the type directly") diff --git a/gopls/internal/test/marker/testdata/rename/issue60752.txt b/gopls/internal/test/marker/testdata/rename/issue60752.txt new file mode 100644 index 00000000000..eec24b8e9de --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue60752.txt @@ -0,0 +1,57 @@ + +This test renames a receiver, type parameter, parameter or result var +whose name matches a package-level decl. Prior to go1.22, this used to +cause a spurious shadowing error because of an edge case in the +behavior of types.Scope for function parameters and results. + +This is a regression test for issue #60752, a bug in the type checker. + +-- flags -- +-min_go=go1.22 + +-- go.mod -- +module example.com +go 1.18 + +-- a/type.go -- +package a + +type t int + +-- a/recv.go -- +package a + +func (v t) _() {} //@ rename("v", "t", recv) + +-- a/param.go -- +package a + +func _(v t) {} //@ rename("v", "t", param) + +-- a/result.go -- +package a + +func _() (v t) { return } //@ rename("v", "t", result) + +-- a/typeparam.go -- +package a + +func _[v t]() {} //@ renameerr("v", "t", re"would shadow (.|\n)*type.go:3:6") + +-- b/b.go -- +package b + +import _ "example.com/a" + +-- @param/a/param.go -- +@@ -3 +3 @@ +-func _(v t) {} //@ rename("v", "t", param) ++func _(t t) {} //@ rename("v", "t", param) +-- @recv/a/recv.go -- +@@ -3 +3 @@ +-func (v t) _() {} //@ rename("v", "t", recv) ++func (t t) _() {} //@ rename("v", "t", recv) +-- @result/a/result.go -- +@@ -3 +3 @@ +-func _() (v t) { return } //@ rename("v", "t", result) ++func _() (t t) { return } //@ rename("v", "t", result) diff --git a/gopls/internal/regtest/marker/testdata/rename/issue60789.txt b/gopls/internal/test/marker/testdata/rename/issue60789.txt similarity index 76% rename from gopls/internal/regtest/marker/testdata/rename/issue60789.txt rename to gopls/internal/test/marker/testdata/rename/issue60789.txt index 40173320c74..d5a0b9bb5ae 100644 --- a/gopls/internal/regtest/marker/testdata/rename/issue60789.txt +++ b/gopls/internal/test/marker/testdata/rename/issue60789.txt @@ -27,10 +27,9 @@ package b import _ "example.com/a" -- @fToG/a/a.go -- -package a - -type unexported int -func (unexported) G() {} //@rename("F", "G", fToG) - -var _ = unexported(0).G - +@@ -4 +4 @@ +-func (unexported) F() {} //@rename("F", "G", fToG) ++func (unexported) G() {} //@rename("F", "G", fToG) +@@ -6 +6 @@ +-var _ = unexported(0).F ++var _ = unexported(0).G diff --git a/gopls/internal/test/marker/testdata/rename/issue61294.txt b/gopls/internal/test/marker/testdata/rename/issue61294.txt new file mode 100644 index 00000000000..f376cf1d29a --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue61294.txt @@ -0,0 +1,26 @@ + +This test renames a parameter var whose name is the same as a +package-level var, which revealed a bug in isLocal. + +This is a regression test for issue #61294. + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +func One() + +func Two(One int) //@rename("One", "Three", OneToThree) + +-- b/b.go -- +package b + +import _ "example.com/a" + +-- @OneToThree/a/a.go -- +@@ -5 +5 @@ +-func Two(One int) //@rename("One", "Three", OneToThree) ++func Two(Three int) //@rename("One", "Three", OneToThree) diff --git a/gopls/internal/test/marker/testdata/rename/issue61640.txt b/gopls/internal/test/marker/testdata/rename/issue61640.txt new file mode 100644 index 00000000000..d195399bee4 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue61640.txt @@ -0,0 +1,33 @@ +This test verifies that gopls can rename instantiated fields. + +-- a.go -- +package a + +// This file is adapted from the example in the issue. + +type builder[S ~[]int] struct { + elements S //@rename("elements", "elements2", OneToTwo) +} + +type BuilderImpl[S ~[]int] struct{ builder[S] } + +func NewBuilderImpl[S ~[]int](name string) *BuilderImpl[S] { + impl := &BuilderImpl[S]{ + builder[S]{ + elements: S{}, + }, + } + + _ = impl.elements + return impl +} +-- @OneToTwo/a.go -- +@@ -6 +6 @@ +- elements S //@rename("elements", "elements2", OneToTwo) ++ elements2 S //@rename("elements", "elements2", OneToTwo) +@@ -14 +14 @@ +- elements: S{}, ++ elements2: S{}, +@@ -18 +18 @@ +- _ = impl.elements ++ _ = impl.elements2 diff --git a/gopls/internal/test/marker/testdata/rename/issue61813.txt b/gopls/internal/test/marker/testdata/rename/issue61813.txt new file mode 100644 index 00000000000..9d3779bb427 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue61813.txt @@ -0,0 +1,14 @@ +This test exercises the panic reported in golang/go#61813. + +-- p.go -- +package p + +type P struct{} + +func (P) M() {} //@rename("M", "N", MToN) + +var x = []*P{{}} +-- @MToN/p.go -- +@@ -5 +5 @@ +-func (P) M() {} //@rename("M", "N", MToN) ++func (P) N() {} //@rename("M", "N", MToN) diff --git a/gopls/internal/test/marker/testdata/rename/methods.txt b/gopls/internal/test/marker/testdata/rename/methods.txt new file mode 100644 index 00000000000..5f5c5688479 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/methods.txt @@ -0,0 +1,57 @@ +This test exercises renaming of interface methods. + +The golden is currently wrong due to https://github.com/golang/go/issues/58506: +the reference to B.F in package b should be renamed too. + +-- go.mod -- +module example.com +go 1.12 + +-- a/a.go -- +package a + +type A int + +func (A) F() {} //@renameerr("F", "G", errAfToG) + +-- b/b.go -- +package b + +import "example.com/a" +import "example.com/c" + +type B interface { F() } //@rename("F", "G", BfToG) + +var _ B = a.A(0) +var _ B = c.C(0) + +-- c/c.go -- +package c + +type C int + +func (C) F() {} //@renameerr("F", "G", errCfToG) + +-- d/d.go -- +package d + +import "example.com/b" + +var _ = b.B.F + +-- @errAfToG -- +a/a.go:5:10: renaming this method "F" to "G" +b/b.go:6:6: would make example.com/a.A no longer assignable to interface B +b/b.go:6:20: (rename example.com/b.B.F if you intend to change both types) +-- @BfToG/b/b.go -- +@@ -6 +6 @@ +-type B interface { F() } //@rename("F", "G", BfToG) ++type B interface { G() } //@rename("F", "G", BfToG) +-- @BfToG/d/d.go -- +@@ -5 +5 @@ +-var _ = b.B.F ++var _ = b.B.G +-- @errCfToG -- +c/c.go:5:10: renaming this method "F" to "G" +b/b.go:6:6: would make example.com/c.C no longer assignable to interface B +b/b.go:6:20: (rename example.com/b.B.F if you intend to change both types) diff --git a/gopls/internal/regtest/marker/testdata/rename/prepare.txt b/gopls/internal/test/marker/testdata/rename/prepare.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/rename/prepare.txt rename to gopls/internal/test/marker/testdata/rename/prepare.txt diff --git a/gopls/internal/test/marker/testdata/rename/random.txt b/gopls/internal/test/marker/testdata/rename/random.txt new file mode 100644 index 00000000000..5c58b3db626 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/random.txt @@ -0,0 +1,238 @@ +This test ports some "random" rename tests from the old marker tests. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/rename + +go 1.18 +-- a/a.go -- +package a + +import ( + lg "log" + "fmt" //@rename("fmt", "fmty", fmtTofmty) + f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +) + +func Random() int { + y := 6 + 7 + return y +} + +func Random2(y int) int { //@rename("y", "z", yToz) + return y +} + +type Pos struct { + x, y int +} + +func (p *Pos) Sum() int { + return p.x + p.y //@rename("x", "myX", xTomyX) +} + +func _() { + var p Pos //@rename("p", "pos", pTopos) + _ = p.Sum() //@rename("Sum", "GetSum", SumToGetSum) +} + +func sw() { + var x interface{} + + switch y := x.(type) { //@rename("y", "y0", yToy0) + case int: + fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) + case string: + lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) + default: + f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) + } +} +-- @SumToGetSum/a/a.go -- +@@ -22 +22 @@ +-func (p *Pos) Sum() int { ++func (p *Pos) GetSum() int { +@@ -28 +28 @@ +- _ = p.Sum() //@rename("Sum", "GetSum", SumToGetSum) ++ _ = p.GetSum() //@rename("Sum", "GetSum", SumToGetSum) +-- @f2Tof2name/a/a.go -- +@@ -6 +6 @@ +- f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) ++ f2name "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +@@ -40 +40 @@ +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) ++ f2name.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-- @f2Tofmt2/a/a.go -- +@@ -6 +6 @@ +- f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) ++ fmt2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +@@ -40 +40 @@ +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) ++ fmt2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-- @fmtTof2y/a/a.go -- +@@ -6 +6 @@ +- f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) ++ f2y "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +@@ -40 +40 @@ +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) ++ f2y.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-- @fmtTofmty/a/a.go -- +@@ -5 +5 @@ +- "fmt" //@rename("fmt", "fmty", fmtTofmty) ++ fmty "fmt" //@rename("fmt", "fmty", fmtTofmty) +@@ -36 +36 @@ +- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) ++ fmty.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-- @fmtToformat/a/a.go -- +@@ -5 +5 @@ +- "fmt" //@rename("fmt", "fmty", fmtTofmty) ++ format "fmt" //@rename("fmt", "fmty", fmtTofmty) +@@ -36 +36 @@ +- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) ++ format.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-- @lgTolog/a/a.go -- +@@ -4 +4 @@ +- lg "log" ++ "log" +@@ -38 +38 @@ +- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) ++ log.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-- @pTopos/a/a.go -- +@@ -27,2 +27,2 @@ +- var p Pos //@rename("p", "pos", pTopos) +- _ = p.Sum() //@rename("Sum", "GetSum", SumToGetSum) ++ var pos Pos //@rename("p", "pos", pTopos) ++ _ = pos.Sum() //@rename("Sum", "GetSum", SumToGetSum) +-- @xTomyX/a/a.go -- +@@ -19 +19 @@ +- x, y int ++ myX, y int +@@ -23 +23 @@ +- return p.x + p.y //@rename("x", "myX", xTomyX) ++ return p.myX + p.y //@rename("x", "myX", xTomyX) +-- @yToy0/a/a.go -- +@@ -34 +34 @@ +- switch y := x.(type) { //@rename("y", "y0", yToy0) ++ switch y0 := x.(type) { //@rename("y", "y0", yToy0) +@@ -36 +36 @@ +- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) ++ fmt.Printf("%d", y0) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +@@ -38 +38 @@ +- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) ++ lg.Printf("%s", y0) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +@@ -40 +40 @@ +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) ++ f2.Printf("%v", y0) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-- @yToy1/a/a.go -- +@@ -34 +34 @@ +- switch y := x.(type) { //@rename("y", "y0", yToy0) ++ switch y1 := x.(type) { //@rename("y", "y0", yToy0) +@@ -36 +36 @@ +- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) ++ fmt.Printf("%d", y1) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +@@ -38 +38 @@ +- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) ++ lg.Printf("%s", y1) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +@@ -40 +40 @@ +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) ++ f2.Printf("%v", y1) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-- @yToy2/a/a.go -- +@@ -34 +34 @@ +- switch y := x.(type) { //@rename("y", "y0", yToy0) ++ switch y2 := x.(type) { //@rename("y", "y0", yToy0) +@@ -36 +36 @@ +- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) ++ fmt.Printf("%d", y2) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +@@ -38 +38 @@ +- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) ++ lg.Printf("%s", y2) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +@@ -40 +40 @@ +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) ++ f2.Printf("%v", y2) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-- @yToy3/a/a.go -- +@@ -34 +34 @@ +- switch y := x.(type) { //@rename("y", "y0", yToy0) ++ switch y3 := x.(type) { //@rename("y", "y0", yToy0) +@@ -36 +36 @@ +- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) ++ fmt.Printf("%d", y3) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +@@ -38 +38 @@ +- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) ++ lg.Printf("%s", y3) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +@@ -40 +40 @@ +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) ++ f2.Printf("%v", y3) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-- @yToz/a/a.go -- +@@ -14,2 +14,2 @@ +-func Random2(y int) int { //@rename("y", "z", yToz) +- return y ++func Random2(z int) int { //@rename("y", "z", yToz) ++ return z +-- b/b.go -- +package b + +var c int //@renameerr("int", "uint", re"cannot be renamed") + +func _() { + a := 1 //@rename("a", "error", aToerror) + a = 2 + _ = a +} + +var ( + // Hello there. + // Foo does the thing. + Foo int //@rename("Foo", "Bob", FooToBob) +) + +/* +Hello description +*/ +func Hello() {} //@rename("Hello", "Goodbye", HelloToGoodbye) + +-- c/c.go -- +package c + +import "golang.org/lsptests/rename/b" + +func _() { + b.Hello() //@rename("Hello", "Goodbye", HelloToGoodbye) +} + +-- c/c2.go -- +package c + +//go:embed Static/* +var Static embed.FS //@rename("Static", "static", StaticTostatic) + +-- @FooToBob/b/b.go -- +@@ -13,2 +13,2 @@ +- // Foo does the thing. +- Foo int //@rename("Foo", "Bob", FooToBob) ++ // Bob does the thing. ++ Bob int //@rename("Foo", "Bob", FooToBob) +-- @HelloToGoodbye/b/b.go -- +@@ -18 +18 @@ +-Hello description ++Goodbye description +@@ -20 +20 @@ +-func Hello() {} //@rename("Hello", "Goodbye", HelloToGoodbye) ++func Goodbye() {} //@rename("Hello", "Goodbye", HelloToGoodbye) +-- @aToerror/b/b.go -- +@@ -6,3 +6,3 @@ +- a := 1 //@rename("a", "error", aToerror) +- a = 2 +- _ = a ++ error := 1 //@rename("a", "error", aToerror) ++ error = 2 ++ _ = error +-- @HelloToGoodbye/c/c.go -- +@@ -6 +6 @@ +- b.Hello() //@rename("Hello", "Goodbye", HelloToGoodbye) ++ b.Goodbye() //@rename("Hello", "Goodbye", HelloToGoodbye) +-- @StaticTostatic/c/c2.go -- +@@ -4 +4 @@ +-var Static embed.FS //@rename("Static", "static", StaticTostatic) ++var static embed.FS //@rename("Static", "static", StaticTostatic) diff --git a/gopls/internal/test/marker/testdata/rename/shadow.txt b/gopls/internal/test/marker/testdata/rename/shadow.txt new file mode 100644 index 00000000000..8f6239e7dbb --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/shadow.txt @@ -0,0 +1,36 @@ + +-- shadow.go -- +package shadow + +func _() { + a := true + b, c, _ := A(), B(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) + d := false + _, _, _, _ = a, b, c, d +} + +func A() int { + return 0 +} + +func B() int { + return 0 +} + +func D() int { + return 0 +} +-- @BTob/shadow.go -- +@@ -5 +5 @@ +- b, c, _ := A(), B(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) ++ b, c, _ := A(), b(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) +@@ -14 +14 @@ +-func B() int { ++func b() int { +-- @DTod/shadow.go -- +@@ -5 +5 @@ +- b, c, _ := A(), B(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) ++ b, c, _ := A(), B(), d() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) +@@ -18 +18 @@ +-func D() int { ++func d() int { diff --git a/gopls/internal/test/marker/testdata/rename/testy.txt b/gopls/internal/test/marker/testdata/rename/testy.txt new file mode 100644 index 00000000000..e7f75038a06 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/testy.txt @@ -0,0 +1,41 @@ + +-- flags -- +-ignore_extra_diags + +-- testy.go -- +package testy + +type tt int //@rename("tt", "testyType", ttTotestyType) + +func a() { + foo := 42 //@rename("foo", "bar", fooTobar) +} +-- testy_test.go -- +package testy + +import "testing" + +func TestSomething(t *testing.T) { + var x int //@rename("x", "testyX", xTotestyX) + a() //@rename("a", "b", aTob) +} +-- @aTob/testy.go -- +@@ -5 +5 @@ +-func a() { ++func b() { +-- @aTob/testy_test.go -- +@@ -7 +7 @@ +- a() //@rename("a", "b", aTob) ++ b() //@rename("a", "b", aTob) +-- @fooTobar/testy.go -- +@@ -6 +6 @@ +- foo := 42 //@rename("foo", "bar", fooTobar) ++ bar := 42 //@rename("foo", "bar", fooTobar) +-- @ttTotestyType/testy.go -- +@@ -3 +3 @@ +-type tt int //@rename("tt", "testyType", ttTotestyType) ++type testyType int //@rename("tt", "testyType", ttTotestyType) +-- @xTotestyX/testy_test.go -- +@@ -6 +6 @@ +- var x int //@rename("x", "testyX", xTotestyX) ++ var testyX int //@rename("x", "testyX", xTotestyX) diff --git a/gopls/internal/test/marker/testdata/rename/typeswitch.txt b/gopls/internal/test/marker/testdata/rename/typeswitch.txt new file mode 100644 index 00000000000..ec550021745 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/typeswitch.txt @@ -0,0 +1,24 @@ +This test covers the special case of renaming a type switch var. + +-- p.go -- +package p + +func _(x interface{}) { + switch y := x.(type) { //@rename("y", "z", yToZ) + case string: + print(y) //@rename("y", "z", yToZ) + default: + print(y) //@rename("y", "z", yToZ) + } +} + +-- @yToZ/p.go -- +@@ -4 +4 @@ +- switch y := x.(type) { //@rename("y", "z", yToZ) ++ switch z := x.(type) { //@rename("y", "z", yToZ) +@@ -6 +6 @@ +- print(y) //@rename("y", "z", yToZ) ++ print(z) //@rename("y", "z", yToZ) +@@ -8 +8 @@ +- print(y) //@rename("y", "z", yToZ) ++ print(z) //@rename("y", "z", yToZ) diff --git a/gopls/internal/regtest/marker/testdata/rename/unexported.txt b/gopls/internal/test/marker/testdata/rename/unexported.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/rename/unexported.txt rename to gopls/internal/test/marker/testdata/rename/unexported.txt diff --git a/gopls/internal/test/marker/testdata/selectionrange/selectionrange.txt b/gopls/internal/test/marker/testdata/selectionrange/selectionrange.txt new file mode 100644 index 00000000000..d186ae2da52 --- /dev/null +++ b/gopls/internal/test/marker/testdata/selectionrange/selectionrange.txt @@ -0,0 +1,42 @@ +This test checks selection range functionality. + +-- foo.go -- +package foo + +import "time" + +func Bar(x, y int, t time.Time) int { + zs := []int{1, 2, 3} //@selectionrange("1", a) + + for _, z := range zs { + x = x + z + y + zs[1] //@selectionrange("1", b) + } + + return x + y //@selectionrange("+", c) +} +-- @a -- +Ranges 0: + 5:13-5:14 "1" + 5:7-5:21 "[]int{1, 2, 3}" + 5:1-5:21 "zs := []int{1, 2, 3}" + 4:36-12:1 "{\\n\tzs := []int{...range(\"+\", c)\\n}" + 4:0-12:1 "func Bar(x, y i...range(\"+\", c)\\n}" + 0:0-12:1 "package foo\\n\\nim...range(\"+\", c)\\n}" +-- @b -- +Ranges 0: + 8:21-8:22 "1" + 8:18-8:23 "zs[1]" + 8:6-8:23 "x + z + y + zs[1]" + 8:2-8:23 "x = x + z + y + zs[1]" + 7:22-9:2 "{\\n\t\tx = x + z +...ange(\"1\", b)\\n\t}" + 7:1-9:2 "for _, z := ran...ange(\"1\", b)\\n\t}" + 4:36-12:1 "{\\n\tzs := []int{...range(\"+\", c)\\n}" + 4:0-12:1 "func Bar(x, y i...range(\"+\", c)\\n}" + 0:0-12:1 "package foo\\n\\nim...range(\"+\", c)\\n}" +-- @c -- +Ranges 0: + 11:8-11:13 "x + y" + 11:1-11:13 "return x + y" + 4:36-12:1 "{\\n\tzs := []int{...range(\"+\", c)\\n}" + 4:0-12:1 "func Bar(x, y i...range(\"+\", c)\\n}" + 0:0-12:1 "package foo\\n\\nim...range(\"+\", c)\\n}" diff --git a/gopls/internal/regtest/marker/testdata/signature/generic.txt b/gopls/internal/test/marker/testdata/signature/generic.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/signature/generic.txt rename to gopls/internal/test/marker/testdata/signature/generic.txt diff --git a/gopls/internal/test/marker/testdata/signature/issue63804.txt b/gopls/internal/test/marker/testdata/signature/issue63804.txt new file mode 100644 index 00000000000..b65183391ef --- /dev/null +++ b/gopls/internal/test/marker/testdata/signature/issue63804.txt @@ -0,0 +1,13 @@ +Regresson test for #63804: conversion to built-in type caused panic. + +the server's Signature method never returns an actual error, +so the best we can assert is that there is no result. + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +var _ = int(123) //@signature("123", "", 0) diff --git a/gopls/internal/regtest/marker/testdata/signature/signature.txt b/gopls/internal/test/marker/testdata/signature/signature.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/signature/signature.txt rename to gopls/internal/test/marker/testdata/signature/signature.txt diff --git a/gopls/internal/test/marker/testdata/stubmethods/basic.txt b/gopls/internal/test/marker/testdata/stubmethods/basic.txt new file mode 100644 index 00000000000..e4cfb6d05a0 --- /dev/null +++ b/gopls/internal/test/marker/testdata/stubmethods/basic.txt @@ -0,0 +1,20 @@ +This test exercises basic 'stub methods' functionality. +See basic_resolve.txt for the same test with resolve support. + +-- go.mod -- +module example.com +go 1.12 + +-- a/a.go -- +package a + +type C int + +var _ error = C(0) //@suggestedfix(re"C.0.", re"missing method Error", stub) +-- @stub/a/a.go -- +@@ -5 +5,5 @@ ++// Error implements error. ++func (c C) Error() string { ++ panic("unimplemented") ++} ++ diff --git a/gopls/internal/test/marker/testdata/stubmethods/basic_resolve.txt b/gopls/internal/test/marker/testdata/stubmethods/basic_resolve.txt new file mode 100644 index 00000000000..183b7d526eb --- /dev/null +++ b/gopls/internal/test/marker/testdata/stubmethods/basic_resolve.txt @@ -0,0 +1,31 @@ +This test exercises basic 'stub methods' functionality, with resolve support. +See basic.txt for the same test without resolve support. + +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": true, + "resolveSupport": { + "properties": ["edit"] + } + } + } +} +-- go.mod -- +module example.com +go 1.12 + +-- a/a.go -- +package a + +type C int + +var _ error = C(0) //@suggestedfix(re"C.0.", re"missing method Error", stub) +-- @stub/a/a.go -- +@@ -5 +5,5 @@ ++// Error implements error. ++func (c C) Error() string { ++ panic("unimplemented") ++} ++ diff --git a/gopls/internal/regtest/marker/testdata/stubmethods/issue61693.txt b/gopls/internal/test/marker/testdata/stubmethods/issue61693.txt similarity index 82% rename from gopls/internal/regtest/marker/testdata/stubmethods/issue61693.txt rename to gopls/internal/test/marker/testdata/stubmethods/issue61693.txt index f767b656b42..387b494bc72 100644 --- a/gopls/internal/regtest/marker/testdata/stubmethods/issue61693.txt +++ b/gopls/internal/test/marker/testdata/stubmethods/issue61693.txt @@ -18,13 +18,9 @@ func _() { F(x, C(0)) //@suggestedfix(re"C.0.", re"missing method Error", stub) } -- @stub/main.go -- ---- before -+++ after -@@ -3 +3,6 @@ --type C int -+type C int -+ +@@ -5 +5,5 @@ +// Error implements error. -+func (C) Error() string { ++func (c C) Error() string { + panic("unimplemented") +} ++ diff --git a/gopls/internal/regtest/marker/testdata/stubmethods/issue61830.txt b/gopls/internal/test/marker/testdata/stubmethods/issue61830.txt similarity index 76% rename from gopls/internal/regtest/marker/testdata/stubmethods/issue61830.txt rename to gopls/internal/test/marker/testdata/stubmethods/issue61830.txt index 3e6fab1bb00..bf5bcc5ca2e 100644 --- a/gopls/internal/regtest/marker/testdata/stubmethods/issue61830.txt +++ b/gopls/internal/test/marker/testdata/stubmethods/issue61830.txt @@ -16,13 +16,9 @@ type A struct{} var _ I = &A{} //@suggestedfix(re"&A..", re"missing method M", stub) -- @stub/p.go -- ---- before -+++ after -@@ -11 +11,6 @@ --type A struct{} -+type A struct{} -+ +@@ -13 +13,5 @@ +// M implements I. -+func (*A) M(io.Reader, B) { ++func (a *A) M(io.Reader, B) { + panic("unimplemented") +} ++ diff --git a/gopls/internal/test/marker/testdata/stubmethods/issue64078.txt b/gopls/internal/test/marker/testdata/stubmethods/issue64078.txt new file mode 100644 index 00000000000..50db6f27cfd --- /dev/null +++ b/gopls/internal/test/marker/testdata/stubmethods/issue64078.txt @@ -0,0 +1,36 @@ +This test verifies that the named receiver is generated. + +-- p.go -- +package p + +type A struct{} + +func (aa *A) M1() { + panic("unimplemented") +} + +type I interface { + M1() + M2(aa string) + M3(bb string) + M4() (aa string) +} + +var _ I = &A{} //@suggestedfix(re"&A..", re"missing method M", stub) +-- @stub/p.go -- +@@ -5 +5,15 @@ ++// M2 implements I. ++func (*A) M2(aa string) { ++ panic("unimplemented") ++} ++ ++// M3 implements I. ++func (aa *A) M3(bb string) { ++ panic("unimplemented") ++} ++ ++// M4 implements I. ++func (*A) M4() (aa string) { ++ panic("unimplemented") ++} ++ diff --git a/gopls/internal/test/marker/testdata/stubmethods/issue64114.txt b/gopls/internal/test/marker/testdata/stubmethods/issue64114.txt new file mode 100644 index 00000000000..35f6db728bb --- /dev/null +++ b/gopls/internal/test/marker/testdata/stubmethods/issue64114.txt @@ -0,0 +1,37 @@ +This test verifies that the embedded field has a method with the same name. + +-- issue64114.go -- +package stub + +// Regression test for issue #64114: code action "implement" is not listed. + +var _ WriteTest = (*WriteStruct)(nil) //@suggestedfix("(", re"does not implement", issue64114) + +type WriterTwoStruct struct{} + +// Write implements io.ReadWriter. +func (t *WriterTwoStruct) RRRR(str string) error { + panic("unimplemented") +} + +type WriteTest interface { + RRRR() + WWWW() +} + +type WriteStruct struct { + WriterTwoStruct +} +-- @issue64114/issue64114.go -- +@@ -22 +22,11 @@ ++ ++// RRRR implements WriteTest. ++// Subtle: this method shadows the method (WriterTwoStruct).RRRR of WriteStruct.WriterTwoStruct. ++func (w *WriteStruct) RRRR() { ++ panic("unimplemented") ++} ++ ++// WWWW implements WriteTest. ++func (w *WriteStruct) WWWW() { ++ panic("unimplemented") ++} diff --git a/gopls/internal/test/marker/testdata/suggestedfix/embeddirective.txt b/gopls/internal/test/marker/testdata/suggestedfix/embeddirective.txt new file mode 100644 index 00000000000..821eb10ef20 --- /dev/null +++ b/gopls/internal/test/marker/testdata/suggestedfix/embeddirective.txt @@ -0,0 +1,22 @@ +This test checks the quick fix to add a missing "embed" import. + +-- embed.txt -- +text +-- fix_import.go -- +package embeddirective + +import ( + "io" + "os" +) + +//go:embed embed.txt //@suggestedfix("//go:embed", re`must import "embed"`, fix_import) +var t string + +func unused() { + _ = os.Stdin + _ = io.EOF +} +-- @fix_import/fix_import.go -- +@@ -4 +4 @@ ++ _ "embed" diff --git a/gopls/internal/test/marker/testdata/suggestedfix/issue65024.txt b/gopls/internal/test/marker/testdata/suggestedfix/issue65024.txt new file mode 100644 index 00000000000..afdfce9f1cc --- /dev/null +++ b/gopls/internal/test/marker/testdata/suggestedfix/issue65024.txt @@ -0,0 +1,78 @@ +Regression example.com for #65024, "incorrect package qualification when +stubbing method in v2 module". + +The second test (a-a) ensures that we don't use path-based heuristics +to guess the PkgName of an import. + +-- a/v2/go.mod -- +module example.com/a/v2 +go 1.18 + +-- a/v2/a.go -- +package a + +type I interface { F() T } + +type T struct {} + +-- a/v2/b/b.go -- +package b + +import "example.com/a/v2" + +type B struct{} + +var _ a.I = &B{} //@ suggestedfix("&B{}", re"does not implement", out) + +// This line makes the diff tidier. + +-- @out/a/v2/b/b.go -- +@@ -7 +7,5 @@ ++// F implements a.I. ++func (b *B) F() a.T { ++ panic("unimplemented") ++} ++ +@@ -10 +15 @@ +- +-- a-a/v2/go.mod -- +// This module has a hyphenated name--how posh. +// It won't do to use it as an identifier. +// The correct name is the one in the package decl, +// which in this case is not what the path heuristic would guess. +module example.com/a-a/v2 +go 1.18 + +-- a-a/v2/a.go -- +package a +type I interface { F() T } +type T struct {} + +-- a-a/v2/b/b.go -- +package b + +// Note: no existing import of a. + +type B struct{} + +var _ I = &B{} //@ suggestedfix("&B{}", re"does not implement", out2) + +// This line makes the diff tidier. + +-- a-a/v2/b/import-a-I.go -- +package b +import "example.com/a-a/v2" +type I = a.I + +-- @out2/a-a/v2/b/b.go -- +@@ -3 +3,2 @@ ++import a "example.com/a-a/v2" ++ +@@ -7 +9,5 @@ ++// F implements a.I. ++func (b *B) F() a.T { ++ panic("unimplemented") ++} ++ +@@ -10 +17 @@ +- diff --git a/gopls/internal/test/marker/testdata/suggestedfix/missingfunction.txt b/gopls/internal/test/marker/testdata/suggestedfix/missingfunction.txt new file mode 100644 index 00000000000..b19095a06f3 --- /dev/null +++ b/gopls/internal/test/marker/testdata/suggestedfix/missingfunction.txt @@ -0,0 +1,127 @@ +This test checks the quick fix for undefined functions. + +-- channels.go -- +package missingfunction + +func channels(s string) { + undefinedChannels(c()) //@suggestedfix("undefinedChannels", re"(undeclared|undefined)", channels) +} + +func c() (<-chan string, chan string) { + return make(<-chan string), make(chan string) +} +-- @channels/channels.go -- +@@ -7 +7,4 @@ ++func undefinedChannels(ch1 <-chan string, ch2 chan string) { ++ panic("unimplemented") ++} ++ +-- consecutive.go -- +package missingfunction + +func consecutiveParams() { + var s string + undefinedConsecutiveParams(s, s) //@suggestedfix("undefinedConsecutiveParams", re"(undeclared|undefined)", consecutive) +} +-- @consecutive/consecutive.go -- +@@ -7 +7,4 @@ ++ ++func undefinedConsecutiveParams(s1, s2 string) { ++ panic("unimplemented") ++} +-- error.go -- +package missingfunction + +func errorParam() { + var err error + undefinedErrorParam(err) //@suggestedfix("undefinedErrorParam", re"(undeclared|undefined)", error) +} +-- @error/error.go -- +@@ -7 +7,4 @@ ++ ++func undefinedErrorParam(err error) { ++ panic("unimplemented") ++} +-- literals.go -- +package missingfunction + +type T struct{} + +func literals() { + undefinedLiterals("hey compiler", T{}, &T{}) //@suggestedfix("undefinedLiterals", re"(undeclared|undefined)", literals) +} +-- @literals/literals.go -- +@@ -8 +8,4 @@ ++ ++func undefinedLiterals(s string, t1 T, t2 *T) { ++ panic("unimplemented") ++} +-- operation.go -- +package missingfunction + +import "time" + +func operation() { + undefinedOperation(10 * time.Second) //@suggestedfix("undefinedOperation", re"(undeclared|undefined)", operation) +} +-- @operation/operation.go -- +@@ -8 +8,4 @@ ++ ++func undefinedOperation(duration time.Duration) { ++ panic("unimplemented") ++} +-- selector.go -- +package missingfunction + +func selector() { + m := map[int]bool{} + undefinedSelector(m[1]) //@suggestedfix("undefinedSelector", re"(undeclared|undefined)", selector) +} +-- @selector/selector.go -- +@@ -7 +7,4 @@ ++ ++func undefinedSelector(b bool) { ++ panic("unimplemented") ++} +-- slice.go -- +package missingfunction + +func slice() { + undefinedSlice([]int{1, 2}) //@suggestedfix("undefinedSlice", re"(undeclared|undefined)", slice) +} +-- @slice/slice.go -- +@@ -6 +6,4 @@ ++ ++func undefinedSlice(i []int) { ++ panic("unimplemented") ++} +-- tuple.go -- +package missingfunction + +func tuple() { + undefinedTuple(b()) //@suggestedfix("undefinedTuple", re"(undeclared|undefined)", tuple) +} + +func b() (string, error) { + return "", nil +} +-- @tuple/tuple.go -- +@@ -7 +7,4 @@ ++func undefinedTuple(s string, err error) { ++ panic("unimplemented") ++} ++ +-- unique_params.go -- +package missingfunction + +func uniqueArguments() { + var s string + var i int + undefinedUniqueArguments(s, i, s) //@suggestedfix("undefinedUniqueArguments", re"(undeclared|undefined)", unique) +} +-- @unique/unique_params.go -- +@@ -8 +8,4 @@ ++ ++func undefinedUniqueArguments(s1 string, i int, s2 string) { ++ panic("unimplemented") ++} diff --git a/gopls/internal/test/marker/testdata/suggestedfix/noresultvalues.txt b/gopls/internal/test/marker/testdata/suggestedfix/noresultvalues.txt new file mode 100644 index 00000000000..5847cea15b7 --- /dev/null +++ b/gopls/internal/test/marker/testdata/suggestedfix/noresultvalues.txt @@ -0,0 +1,18 @@ +This test checks the quick fix for removing extra return values. + +Note: gopls should really discard unnecessary return statements. + +-- noresultvalues.go -- +package typeerrors + +func x() { return nil } //@suggestedfix("nil", re"too many return", x) + +func y() { return nil, "hello" } //@suggestedfix("nil", re"too many return", y) +-- @x/noresultvalues.go -- +@@ -3 +3 @@ +-func x() { return nil } //@suggestedfix("nil", re"too many return", x) ++func x() { return } //@suggestedfix("nil", re"too many return", x) +-- @y/noresultvalues.go -- +@@ -5 +5 @@ +-func y() { return nil, "hello" } //@suggestedfix("nil", re"too many return", y) ++func y() { return } //@suggestedfix("nil", re"too many return", y) diff --git a/gopls/internal/regtest/marker/testdata/suggestedfix/self_assignment.txt b/gopls/internal/test/marker/testdata/suggestedfix/self_assignment.txt similarity index 94% rename from gopls/internal/regtest/marker/testdata/suggestedfix/self_assignment.txt rename to gopls/internal/test/marker/testdata/suggestedfix/self_assignment.txt index 1003ef21ffa..9f3c7ca5618 100644 --- a/gopls/internal/regtest/marker/testdata/suggestedfix/self_assignment.txt +++ b/gopls/internal/test/marker/testdata/suggestedfix/self_assignment.txt @@ -14,8 +14,6 @@ func goodbye() { } -- @fix/a.go -- ---- before -+++ after @@ -9 +9 @@ - s = s //@suggestedfix("s = s", re"self-assignment", fix) + //@suggestedfix("s = s", re"self-assignment", fix) diff --git a/gopls/internal/test/marker/testdata/suggestedfix/stub.txt b/gopls/internal/test/marker/testdata/suggestedfix/stub.txt new file mode 100644 index 00000000000..e31494ae461 --- /dev/null +++ b/gopls/internal/test/marker/testdata/suggestedfix/stub.txt @@ -0,0 +1,365 @@ +This test checks the 'implement interface' quick fix. + +-- go.mod -- +module golang.org/lsptests/stub + +go 1.18 + +-- other/other.go -- +package other + +import ( + "bytes" + renamed_context "context" +) + +type Interface interface { + Get(renamed_context.Context) *bytes.Buffer +} + +-- add_selector.go -- +package stub + +import "io" + +// This file tests that if an interface +// method references a type from its own package +// then our implementation must add the import/package selector +// in the concrete method if the concrete type is outside of the interface +// package +var _ io.ReaderFrom = &readerFrom{} //@suggestedfix("&readerFrom", re"cannot use", readerFrom) + +type readerFrom struct{} +-- @readerFrom/add_selector.go -- +@@ -13 +13,5 @@ ++ ++// ReadFrom implements io.ReaderFrom. ++func (*readerFrom) ReadFrom(r io.Reader) (n int64, err error) { ++ panic("unimplemented") ++} +-- assign.go -- +package stub + +import "io" + +func _() { + var br io.ByteWriter + br = &byteWriter{} //@suggestedfix("&", re"does not implement", assign) + _ = br +} + +type byteWriter struct{} +-- @assign/assign.go -- +@@ -12 +12,5 @@ ++ ++// WriteByte implements io.ByteWriter. ++func (b *byteWriter) WriteByte(c byte) error { ++ panic("unimplemented") ++} +-- assign_multivars.go -- +package stub + +import "io" + +func _() { + var br io.ByteWriter + var i int + i, br = 1, &multiByteWriter{} //@suggestedfix("&", re"does not implement", assign_multivars) + _, _ = i, br +} + +type multiByteWriter struct{} +-- @assign_multivars/assign_multivars.go -- +@@ -13 +13,5 @@ ++ ++// WriteByte implements io.ByteWriter. ++func (m *multiByteWriter) WriteByte(c byte) error { ++ panic("unimplemented") ++} +-- call_expr.go -- +package stub + +func main() { + check(&callExpr{}) //@suggestedfix("&", re"does not implement", call_expr) +} + +func check(err error) { + if err != nil { + panic(err) + } +} + +type callExpr struct{} +-- @call_expr/call_expr.go -- +@@ -14 +14,5 @@ ++ ++// Error implements error. ++func (c *callExpr) Error() string { ++ panic("unimplemented") ++} +-- embedded.go -- +package stub + +import ( + "io" + "sort" +) + +var _ embeddedInterface = (*embeddedConcrete)(nil) //@suggestedfix("(", re"does not implement", embedded) + +type embeddedConcrete struct{} + +type embeddedInterface interface { + sort.Interface + io.Reader +} +-- @embedded/embedded.go -- +@@ -12 +12,20 @@ ++// Len implements embeddedInterface. ++func (e *embeddedConcrete) Len() int { ++ panic("unimplemented") ++} ++ ++// Less implements embeddedInterface. ++func (e *embeddedConcrete) Less(i int, j int) bool { ++ panic("unimplemented") ++} ++ ++// Read implements embeddedInterface. ++func (e *embeddedConcrete) Read(p []byte) (n int, err error) { ++ panic("unimplemented") ++} ++ ++// Swap implements embeddedInterface. ++func (e *embeddedConcrete) Swap(i int, j int) { ++ panic("unimplemented") ++} ++ +-- err.go -- +package stub + +func _() { + var br error = &customErr{} //@suggestedfix("&", re"does not implement", err) + _ = br +} + +type customErr struct{} +-- @err/err.go -- +@@ -9 +9,5 @@ ++ ++// Error implements error. ++func (c *customErr) Error() string { ++ panic("unimplemented") ++} +-- function_return.go -- +package stub + +import ( + "io" +) + +func newCloser() io.Closer { + return closer{} //@suggestedfix("c", re"does not implement", function_return) +} + +type closer struct{} +-- @function_return/function_return.go -- +@@ -12 +12,5 @@ ++ ++// Close implements io.Closer. ++func (c closer) Close() error { ++ panic("unimplemented") ++} +-- generic_receiver.go -- +package stub + +import "io" + +// This file tests that that the stub method generator accounts for concrete +// types that have type parameters defined. +var _ io.ReaderFrom = &genReader[string, int]{} //@suggestedfix("&genReader", re"does not implement", generic_receiver) + +type genReader[T, Y any] struct { + T T + Y Y +} +-- @generic_receiver/generic_receiver.go -- +@@ -13 +13,5 @@ ++ ++// ReadFrom implements io.ReaderFrom. ++func (g *genReader[T, Y]) ReadFrom(r io.Reader) (n int64, err error) { ++ panic("unimplemented") ++} +-- ignored_imports.go -- +package stub + +import ( + "compress/zlib" + . "io" + _ "io" +) + +// This file tests that dot-imports and underscore imports +// are properly ignored and that a new import is added to +// reference method types + +var ( + _ Reader + _ zlib.Resetter = (*ignoredResetter)(nil) //@suggestedfix("(", re"does not implement", ignored_imports) +) + +type ignoredResetter struct{} +-- @ignored_imports/ignored_imports.go -- +@@ -19 +19,5 @@ ++ ++// Reset implements zlib.Resetter. ++func (i *ignoredResetter) Reset(r Reader, dict []byte) error { ++ panic("unimplemented") ++} +-- issue2606.go -- +package stub + +type I interface{ error } + +type C int + +var _ I = C(0) //@suggestedfix("C", re"does not implement", issue2606) +-- @issue2606/issue2606.go -- +@@ -7 +7,5 @@ ++// Error implements I. ++func (c C) Error() string { ++ panic("unimplemented") ++} ++ +-- multi_var.go -- +package stub + +import "io" + +// This test ensures that a variable declaration that +// has multiple values on the same line can still be +// analyzed correctly to target the interface implementation +// diagnostic. +var one, two, three io.Reader = nil, &multiVar{}, nil //@suggestedfix("&", re"does not implement", multi_var) + +type multiVar struct{} +-- @multi_var/multi_var.go -- +@@ -12 +12,5 @@ ++ ++// Read implements io.Reader. ++func (m *multiVar) Read(p []byte) (n int, err error) { ++ panic("unimplemented") ++} +-- pointer.go -- +package stub + +import "io" + +func getReaderFrom() io.ReaderFrom { + return &pointerImpl{} //@suggestedfix("&", re"does not implement", pointer) +} + +type pointerImpl struct{} +-- @pointer/pointer.go -- +@@ -10 +10,5 @@ ++ ++// ReadFrom implements io.ReaderFrom. ++func (p *pointerImpl) ReadFrom(r io.Reader) (n int64, err error) { ++ panic("unimplemented") ++} +-- renamed_import.go -- +package stub + +import ( + "compress/zlib" + myio "io" +) + +var _ zlib.Resetter = &myIO{} //@suggestedfix("&", re"does not implement", renamed_import) +var _ myio.Reader + +type myIO struct{} +-- @renamed_import/renamed_import.go -- +@@ -12 +12,5 @@ ++ ++// Reset implements zlib.Resetter. ++func (m *myIO) Reset(r myio.Reader, dict []byte) error { ++ panic("unimplemented") ++} +-- renamed_import_iface.go -- +package stub + +import ( + "golang.org/lsptests/stub/other" +) + +// This file tests that if an interface +// method references an import from its own package +// that the concrete type does not yet import, and that import happens +// to be renamed, then we prefer the renaming of the interface. +var _ other.Interface = &otherInterfaceImpl{} //@suggestedfix("&otherInterfaceImpl", re"does not implement", renamed_import_iface) + +type otherInterfaceImpl struct{} +-- @renamed_import_iface/renamed_import_iface.go -- +@@ -4 +4,2 @@ ++ "bytes" ++ "context" +@@ -14 +16,5 @@ ++ ++// Get implements other.Interface. ++func (o *otherInterfaceImpl) Get(context.Context) *bytes.Buffer { ++ panic("unimplemented") ++} +-- stdlib.go -- +package stub + +import ( + "io" +) + +var _ io.Writer = writer{} //@suggestedfix("w", re"does not implement", stdlib) + +type writer struct{} +-- @stdlib/stdlib.go -- +@@ -10 +10,5 @@ ++ ++// Write implements io.Writer. ++func (w writer) Write(p []byte) (n int, err error) { ++ panic("unimplemented") ++} +-- typedecl_group.go -- +package stub + +// Regression test for Issue #56825: file corrupted by insertion of +// methods after TypeSpec in a parenthesized TypeDecl. + +import "io" + +func newReadCloser() io.ReadCloser { + return rdcloser{} //@suggestedfix("rd", re"does not implement", typedecl_group) +} + +type ( + A int + rdcloser struct{} + B int +) + +func _() { + // Local types can't be stubbed as there's nowhere to put the methods. + // The suggestedfix assertion can't express this yet. TODO(adonovan): support it. + type local struct{} + var _ io.ReadCloser = local{} //@diag("local", re"does not implement") +} +-- @typedecl_group/typedecl_group.go -- +@@ -18 +18,10 @@ ++// Close implements io.ReadCloser. ++func (r rdcloser) Close() error { ++ panic("unimplemented") ++} ++ ++// Read implements io.ReadCloser. ++func (r rdcloser) Read(p []byte) (n int, err error) { ++ panic("unimplemented") ++} ++ diff --git a/gopls/internal/regtest/marker/testdata/suggestedfix/undeclared.txt b/gopls/internal/test/marker/testdata/suggestedfix/undeclared.txt similarity index 78% rename from gopls/internal/regtest/marker/testdata/suggestedfix/undeclared.txt rename to gopls/internal/test/marker/testdata/suggestedfix/undeclared.txt index e2c15675b98..897e9b14952 100644 --- a/gopls/internal/regtest/marker/testdata/suggestedfix/undeclared.txt +++ b/gopls/internal/test/marker/testdata/suggestedfix/undeclared.txt @@ -14,11 +14,7 @@ func a() { } -- @a/a.go -- ---- before -+++ after -@@ -3 +3,2 @@ --func a() { -+func a() { +@@ -4 +4 @@ + y := -- b.go -- package p @@ -30,11 +26,7 @@ func b() { } -- @b/b.go -- ---- before -+++ after -@@ -3 +3,2 @@ --func b() { -+func b() { +@@ -4 +4 @@ + n := -- c.go -- package p @@ -46,9 +38,5 @@ func c() { } -- @c/c.go -- ---- before -+++ after -@@ -3 +3,2 @@ --func c() { -+func c() { +@@ -4 +4 @@ + i := diff --git a/gopls/internal/test/marker/testdata/suggestedfix/undeclaredfunc.txt b/gopls/internal/test/marker/testdata/suggestedfix/undeclaredfunc.txt new file mode 100644 index 00000000000..d54dcae073f --- /dev/null +++ b/gopls/internal/test/marker/testdata/suggestedfix/undeclaredfunc.txt @@ -0,0 +1,19 @@ +This test checks the quick fix for "undeclared: f" that declares the +missing function. See #47558. + +TODO(adonovan): infer the result variables from the context (int, in this case). + +-- a.go -- +package a + +func _() int { return f(1, "") } //@suggestedfix(re"f.1", re"unde(fined|clared name): f", x) + +-- @x/a.go -- +@@ -3 +3 @@ +-func _() int { return f(1, "") } //@suggestedfix(re"f.1", re"unde(fined|clared name): f", x) ++func _() int { return f(1, "") } +@@ -5 +5,4 @@ ++func f(i int, s string) { ++ panic("unimplemented") ++} //@suggestedfix(re"f.1", re"unde(fined|clared name): f", x) ++ diff --git a/gopls/internal/regtest/marker/testdata/suggestedfix/unusedrequire.txt b/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire.txt similarity index 95% rename from gopls/internal/regtest/marker/testdata/suggestedfix/unusedrequire.txt rename to gopls/internal/test/marker/testdata/suggestedfix/unusedrequire.txt index c9f6eee5c3a..8ec46e9ea68 100644 --- a/gopls/internal/regtest/marker/testdata/suggestedfix/unusedrequire.txt +++ b/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire.txt @@ -16,8 +16,6 @@ go 1.14 require example.com v1.0.0 //@suggestedfix("require", re"not used", a) -- @a/a/go.mod -- ---- before -+++ after @@ -4,3 +4 @@ - -require example.com v1.0.0 //@suggestedfix("require", re"not used", a) diff --git a/gopls/internal/regtest/marker/testdata/suggestedfix/unusedrequire_gowork.txt b/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire_gowork.txt similarity index 92% rename from gopls/internal/regtest/marker/testdata/suggestedfix/unusedrequire_gowork.txt rename to gopls/internal/test/marker/testdata/suggestedfix/unusedrequire_gowork.txt index 35ed16c8d9d..73b0eb9607f 100644 --- a/gopls/internal/regtest/marker/testdata/suggestedfix/unusedrequire_gowork.txt +++ b/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire_gowork.txt @@ -4,9 +4,6 @@ go.mod files, when a go.work file is used. Note that unlike unusedrequire.txt, we need not write go.sum files when a go.work file is used. --- flags -- --min_go=go1.18 - -- proxy/example.com@v1.0.0/x.go -- package pkg const X = 1 @@ -26,8 +23,6 @@ go 1.14 require example.com v1.0.0 //@suggestedfix("require", re"not used", a) -- @a/a/go.mod -- ---- before -+++ after @@ -4,3 +4 @@ - -require example.com v1.0.0 //@suggestedfix("require", re"not used", a) @@ -44,8 +39,6 @@ go 1.14 require example.com v1.0.0 //@suggestedfix("require", re"not used", b) -- @b/b/go.mod -- ---- before -+++ after @@ -4,3 +4 @@ - -require example.com v1.0.0 //@suggestedfix("require", re"not used", b) diff --git a/gopls/internal/regtest/marker/testdata/symbol/basic.txt b/gopls/internal/test/marker/testdata/symbol/basic.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/symbol/basic.txt rename to gopls/internal/test/marker/testdata/symbol/basic.txt diff --git a/gopls/internal/regtest/marker/testdata/symbol/generic.txt b/gopls/internal/test/marker/testdata/symbol/generic.txt similarity index 85% rename from gopls/internal/regtest/marker/testdata/symbol/generic.txt rename to gopls/internal/test/marker/testdata/symbol/generic.txt index 07f2ed9ad46..1254851ad14 100644 --- a/gopls/internal/regtest/marker/testdata/symbol/generic.txt +++ b/gopls/internal/test/marker/testdata/symbol/generic.txt @@ -1,14 +1,8 @@ Basic tests of textDocument/documentSymbols with generics. --- flags -- --min_go=go1.18 - -- symbol.go -- //@symbol(want) -//go:build go1.18 -// +build go1.18 - package main type T[P any] struct { diff --git a/gopls/internal/test/marker/testdata/token/comment.txt b/gopls/internal/test/marker/testdata/token/comment.txt new file mode 100644 index 00000000000..082e95491dd --- /dev/null +++ b/gopls/internal/test/marker/testdata/token/comment.txt @@ -0,0 +1,55 @@ +This test checks the semantic tokens in comments (golang/go#64648). + +There will be doc links in the comments to reference other objects. Parse these +links and output tokens according to the referenced object types, so that the +editor can highlight them. This will help in checking the doc link errors and +reading comments in the code. + +-- settings.json -- +{ + "semanticTokens": true +} + +-- a.go -- +package p + +import "strconv" + +const A = 1 +var B = 2 + +type Foo int + + +// [F] accept a [Foo], and print it. //@token("F", "function", ""),token("Foo", "type", "") +func F(v Foo) { + println(v) + +} + +/* + [F1] print [A] and [B] //@token("F1", "function", ""),token("A", "variable", ""),token("B", "variable", "") +*/ +func F1() { + // print [A] and [B]. //@token("A", "variable", ""),token("B", "variable", "") + println(A, B) +} + +// [F2] use [strconv.Atoi] convert s, then print it //@token("F2", "function", ""),token("strconv", "namespace", ""),token("Atoi", "function", "") +func F2(s string) { + a, _ := strconv.Atoi("42") + b, _ := strconv.Atoi("42") + println(a, b) // this is a tail comment in F2 //hover(F2, "F2", F2) +} +-- b.go -- +package p + +// [F3] accept [*Foo] //@token("F3", "function", ""),token("Foo", "type", "") +func F3(v *Foo) { + println(*v) +} + +// [F4] equal [strconv.Atoi] //@token("F4", "function", ""),token("strconv", "namespace", ""),token("Atoi", "function", "") +func F4(s string) (int, error) { + return 0, nil +} diff --git a/gopls/internal/test/marker/testdata/token/range.txt b/gopls/internal/test/marker/testdata/token/range.txt new file mode 100644 index 00000000000..2f98c043d8e --- /dev/null +++ b/gopls/internal/test/marker/testdata/token/range.txt @@ -0,0 +1,29 @@ +This test checks the output of textDocument/semanticTokens/range. + +TODO: add more assertions. + +-- settings.json -- +{ + "semanticTokens": true +} + +-- a.go -- +package p //@token("package", "keyword", "") + +const C = 42 //@token("C", "variable", "definition readonly") + +func F() { //@token("F", "function", "definition") + x := 2 + 3//@token("x", "variable", "definition"),token("2", "number", ""),token("+", "operator", "") + _ = x //@token("x", "variable", "") + _ = F //@token("F", "function", "") +} + +func _() { + // A goto's label cannot be found by ascending the syntax tree. + goto loop //@ token("goto", "keyword", ""), token("loop", "label", "") + +loop: //@token("loop", "label", "definition") + for { + continue loop //@ token("continue", "keyword", ""), token("loop", "label", "") + } +} diff --git a/gopls/internal/regtest/marker/testdata/typedef/typedef.txt b/gopls/internal/test/marker/testdata/typedef/typedef.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/typedef/typedef.txt rename to gopls/internal/test/marker/testdata/typedef/typedef.txt diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/allscope.txt b/gopls/internal/test/marker/testdata/workspacesymbol/allscope.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/workspacesymbol/allscope.txt rename to gopls/internal/test/marker/testdata/workspacesymbol/allscope.txt diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/caseinsensitive.txt b/gopls/internal/test/marker/testdata/workspacesymbol/caseinsensitive.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/workspacesymbol/caseinsensitive.txt rename to gopls/internal/test/marker/testdata/workspacesymbol/caseinsensitive.txt diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/casesensitive.txt b/gopls/internal/test/marker/testdata/workspacesymbol/casesensitive.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/workspacesymbol/casesensitive.txt rename to gopls/internal/test/marker/testdata/workspacesymbol/casesensitive.txt diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/issue44806.txt b/gopls/internal/test/marker/testdata/workspacesymbol/issue44806.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/workspacesymbol/issue44806.txt rename to gopls/internal/test/marker/testdata/workspacesymbol/issue44806.txt diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/workspacesymbol.txt b/gopls/internal/test/marker/testdata/workspacesymbol/workspacesymbol.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/workspacesymbol/workspacesymbol.txt rename to gopls/internal/test/marker/testdata/workspacesymbol/workspacesymbol.txt diff --git a/gopls/internal/regtest/marker/testdata/workspacesymbol/wsscope.txt b/gopls/internal/test/marker/testdata/workspacesymbol/wsscope.txt similarity index 100% rename from gopls/internal/regtest/marker/testdata/workspacesymbol/wsscope.txt rename to gopls/internal/test/marker/testdata/workspacesymbol/wsscope.txt diff --git a/gopls/internal/test/marker/testdata/zeroconfig/adhoc.txt b/gopls/internal/test/marker/testdata/zeroconfig/adhoc.txt new file mode 100644 index 00000000000..ccef3b6fe6b --- /dev/null +++ b/gopls/internal/test/marker/testdata/zeroconfig/adhoc.txt @@ -0,0 +1,49 @@ +This test checks that gopls works with multiple ad-hoc packages, which lack +a go.mod file. + +We should be able to import standard library packages, get diagnostics, and +reference symbols defined in the same directory. + +-- main.go -- +package main + +import "fmt" + +func main() { + fmt.Println(mainMsg) //@def("mainMsg", mainMsg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} +-- main2.go -- +package main + +const mainMsg = "main" //@loc(mainMsg, "mainMsg") + +-- a/a.go -- +package a + +import "fmt" + +func _() { + fmt.Println(aMsg) //@def("aMsg", aMsg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} + +-- a/a2.go -- +package a + +const aMsg = "a" //@loc(aMsg, "aMsg") + +-- b/b.go -- +package b + +import "fmt" + +func _() { + fmt.Println(bMsg) //@def("bMsg", bMsg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} + +-- b/b2.go -- +package b + +const bMsg = "b" //@loc(bMsg, "bMsg") diff --git a/gopls/internal/test/marker/testdata/zeroconfig/dynamicports.txt b/gopls/internal/test/marker/testdata/zeroconfig/dynamicports.txt new file mode 100644 index 00000000000..6dcdfe4cd7a --- /dev/null +++ b/gopls/internal/test/marker/testdata/zeroconfig/dynamicports.txt @@ -0,0 +1,118 @@ +This test checks that the zero-config algorithm selects Views to cover first +class ports. + +In this test, package a imports b, and b imports c. Package a contains files +constrained by go:build directives, package b contains files constrained by the +GOOS matching their file name, and package c is unconstrained. Various +assertions check that diagnostics and navigation work as expected. + +-- go.mod -- +module golang.org/lsptests + +-- a/a.go -- +package a + +import "golang.org/lsptests/b" + +var _ = b.F //@loc(F, "F") + +-- a/linux64.go -- +//go:build (linux && amd64) + +package a + +import "golang.org/lsptests/b" + +var _ int = 1<<32 -1 // OK on 64 bit platforms. Compare linux32.go below. + +var ( + _ = b.LinuxOnly //@def("LinuxOnly", LinuxOnly) + _ = b.DarwinOnly //@diag("DarwinOnly", re"(undefined|declared)") + _ = b.WindowsOnly //@diag("WindowsOnly", re"(undefined|declared)") +) + +-- a/linux32.go -- +//go:build (linux && 386) + +package a + +import "golang.org/lsptests/b" + +var _ int = 1<<32 -1 //@diag("1<<32", re"overflows") + +var ( + _ = b.LinuxOnly //@def("LinuxOnly", LinuxOnly) + _ = b.DarwinOnly //@diag("DarwinOnly", re"(undefined|declared)") + _ = b.WindowsOnly //@diag("WindowsOnly", re"(undefined|declared)") +) + +-- a/darwin64.go -- +//go:build (darwin && amd64) + +package a + +import "golang.org/lsptests/b" + +var ( + _ = b.LinuxOnly //@diag("LinuxOnly", re"(undefined|declared)") + _ = b.DarwinOnly //@def("DarwinOnly", DarwinOnly) + _ = b.WindowsOnly //@diag("WindowsOnly", re"(undefined|declared)") +) + +-- a/windows64.go -- +//go:build (windows && amd64) + +package a + +import "golang.org/lsptests/b" + +var ( + _ = b.LinuxOnly //@diag("LinuxOnly", re"(undefined|declared)") + _ = b.DarwinOnly //@diag("DarwinOnly", re"(undefined|declared)") + _ = b.WindowsOnly //@def("WindowsOnly", WindowsOnly) +) + +-- b/b_other.go -- +//go:build !linux && !darwin && !windows +package b + +func F() {} + +-- b/b_linux.go -- +package b + +import "golang.org/lsptests/c" + +func F() { //@refs("F", "F", F) + x := c.Common //@diag("x", re"not used"),def("Common", Common) +} + +const LinuxOnly = "darwin" //@loc(LinuxOnly, "LinuxOnly") + +-- b/b_darwin.go -- +package b + +import "golang.org/lsptests/c" + +func F() { //@refs("F", "F", F) + x := c.Common //@diag("x", re"not used"),def("Common", Common) +} + +const DarwinOnly = "darwin" //@loc(DarwinOnly, "DarwinOnly") + +-- b/b_windows.go -- +package b + +import "golang.org/lsptests/c" + +func F() { //@refs("F", "F", F) + x := c.Common //@diag("x", re"not used"),def("Common", Common) +} + +const WindowsOnly = "windows" //@loc(WindowsOnly, "WindowsOnly") + +-- c/c.go -- +package c + +const Common = 0 //@loc(Common, "Common") + diff --git a/gopls/internal/test/marker/testdata/zeroconfig/nested.txt b/gopls/internal/test/marker/testdata/zeroconfig/nested.txt new file mode 100644 index 00000000000..2df74062407 --- /dev/null +++ b/gopls/internal/test/marker/testdata/zeroconfig/nested.txt @@ -0,0 +1,61 @@ +This test checks that gopls works with nested modules, including multiple +nested modules. + +-- main.go -- +package main + +import "fmt" + +func main() { + fmt.Println(mainMsg) //@def("mainMsg", mainMsg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} +-- main2.go -- +package main + +const mainMsg = "main" //@loc(mainMsg, "mainMsg") + +-- mod1/go.mod -- +module golang.org/lsptests/mod1 + +go 1.20 + +-- mod1/a/a.go -- +package a + +import ( + "fmt" + "golang.org/lsptests/mod1/b" +) + +func _() { + fmt.Println(b.Msg) //@def("Msg", Msg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} + +-- mod1/b/b.go -- +package b + +const Msg = "1" //@loc(Msg, "Msg") + +-- mod2/go.mod -- +module golang.org/lsptests/mod2 + +require golang.org/lsptests/mod1 v0.0.1 + +replace golang.org/lsptests/mod1 => ../mod1 + +go 1.20 + +-- mod2/c/c.go -- +package c + +import ( + "fmt" + "golang.org/lsptests/mod1/b" +) + +func _() { + fmt.Println(b.Msg) //@def("Msg", Msg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} diff --git a/gopls/internal/test/marker/testdata/zeroconfig/nonworkspacemodule.txt b/gopls/internal/test/marker/testdata/zeroconfig/nonworkspacemodule.txt new file mode 100644 index 00000000000..747635e6bb1 --- /dev/null +++ b/gopls/internal/test/marker/testdata/zeroconfig/nonworkspacemodule.txt @@ -0,0 +1,79 @@ +This test checks that gopls works with modules that aren't included in the +workspace file. + +-- go.work -- +go 1.20 + +use ( + ./a + ./b +) + +-- a/go.mod -- +module golang.org/lsptests/a + +go 1.18 + +-- a/a.go -- +package a + +import ( + "fmt" + "golang.org/lsptests/a/lib" +) + +func _() { + fmt.Println(lib.Msg) //@def("Msg", aMsg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} + +-- a/lib/lib.go -- +package lib + +const Msg = "hi" //@loc(aMsg, "Msg") + +-- b/go.mod -- +module golang.org/lsptests/b + +go 1.18 + +-- b/b.go -- +package b + +import ( + "fmt" + "golang.org/lsptests/b/lib" +) + +func main() { + fmt.Println(lib.Msg) //@def("Msg", bMsg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} + +-- b/lib/lib.go -- +package lib + +const Msg = "hi" //@loc(bMsg, "Msg") + +-- c/go.mod -- +module golang.org/lsptests/c + +go 1.18 + +-- c/c.go -- +package c + +import ( + "fmt" + "golang.org/lsptests/c/lib" +) + +func main() { + fmt.Println(lib.Msg) //@def("Msg", cMsg) + fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +} + +-- c/lib/lib.go -- +package lib + +const Msg = "hi" //@loc(cMsg, "Msg") diff --git a/gopls/internal/util/README.md b/gopls/internal/util/README.md new file mode 100644 index 00000000000..6be2ad51efa --- /dev/null +++ b/gopls/internal/util/README.md @@ -0,0 +1,7 @@ +# util + +This directory is not a Go package. + +Its subdirectories are for utility packages, defined as implementation +helpers (not core machinery) that are used in different ways across +the gopls codebase. \ No newline at end of file diff --git a/gopls/internal/astutil/purge.go b/gopls/internal/util/astutil/purge.go similarity index 97% rename from gopls/internal/astutil/purge.go rename to gopls/internal/util/astutil/purge.go index cec428842a2..95117c568ba 100644 --- a/gopls/internal/astutil/purge.go +++ b/gopls/internal/util/astutil/purge.go @@ -10,7 +10,7 @@ import ( "go/scanner" "go/token" - "golang.org/x/tools/gopls/internal/lsp/safetoken" + "golang.org/x/tools/gopls/internal/util/safetoken" ) // PurgeFuncBodies returns a copy of src in which the contents of each diff --git a/gopls/internal/astutil/purge_test.go b/gopls/internal/util/astutil/purge_test.go similarity index 97% rename from gopls/internal/astutil/purge_test.go rename to gopls/internal/util/astutil/purge_test.go index 97c04072f75..c67f9039adc 100644 --- a/gopls/internal/astutil/purge_test.go +++ b/gopls/internal/util/astutil/purge_test.go @@ -13,7 +13,7 @@ import ( "testing" "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/astutil" + "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/internal/testenv" ) diff --git a/gopls/internal/util/astutil/util.go b/gopls/internal/util/astutil/util.go new file mode 100644 index 00000000000..4fc313535b9 --- /dev/null +++ b/gopls/internal/util/astutil/util.go @@ -0,0 +1,69 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/internal/typeparams" +) + +// UnpackRecv unpacks a receiver type expression, reporting whether it is a +// pointer recever, along with the type name identifier and any receiver type +// parameter identifiers. +// +// Copied (with modifications) from go/types. +func UnpackRecv(rtyp ast.Expr) (ptr bool, rname *ast.Ident, tparams []*ast.Ident) { +L: // unpack receiver type + // This accepts invalid receivers such as ***T and does not + // work for other invalid receivers, but we don't care. The + // validity of receiver expressions is checked elsewhere. + for { + switch t := rtyp.(type) { + case *ast.ParenExpr: + rtyp = t.X + case *ast.StarExpr: + ptr = true + rtyp = t.X + default: + break L + } + } + + // unpack type parameters, if any + switch rtyp.(type) { + case *ast.IndexExpr, *ast.IndexListExpr: + var indices []ast.Expr + rtyp, _, indices, _ = typeparams.UnpackIndexExpr(rtyp) + for _, arg := range indices { + var par *ast.Ident + switch arg := arg.(type) { + case *ast.Ident: + par = arg + default: + // ignore errors + } + if par == nil { + par = &ast.Ident{NamePos: arg.Pos(), Name: "_"} + } + tparams = append(tparams, par) + } + } + + // unpack receiver name + if name, _ := rtyp.(*ast.Ident); name != nil { + rname = name + } + + return +} + +// NodeContains returns true if a node encloses a given position pos. +// +// Precondition: n must not be nil. +func NodeContains(n ast.Node, pos token.Pos) bool { + return n.Pos() <= pos && pos <= n.End() +} diff --git a/gopls/internal/lsp/browser/README.md b/gopls/internal/util/browser/README.md similarity index 100% rename from gopls/internal/lsp/browser/README.md rename to gopls/internal/util/browser/README.md diff --git a/gopls/internal/lsp/browser/browser.go b/gopls/internal/util/browser/browser.go similarity index 98% rename from gopls/internal/lsp/browser/browser.go rename to gopls/internal/util/browser/browser.go index 0ac4f20f0b2..6867c85d232 100644 --- a/gopls/internal/lsp/browser/browser.go +++ b/gopls/internal/util/browser/browser.go @@ -6,8 +6,8 @@ package browser import ( - exec "golang.org/x/sys/execabs" "os" + "os/exec" "runtime" "time" ) diff --git a/gopls/internal/bug/bug.go b/gopls/internal/util/bug/bug.go similarity index 97% rename from gopls/internal/bug/bug.go rename to gopls/internal/util/bug/bug.go index 7c290b0cd27..dcd242d4856 100644 --- a/gopls/internal/bug/bug.go +++ b/gopls/internal/util/bug/bug.go @@ -25,6 +25,9 @@ import ( // PanicOnBugs controls whether to panic when bugs are reported. // // It may be set to true during testing. +// +// TODO(adonovan): should we make the default true, and +// suppress it only in the product (gopls/main.go)? var PanicOnBugs = false var ( diff --git a/gopls/internal/bug/bug_test.go b/gopls/internal/util/bug/bug_test.go similarity index 100% rename from gopls/internal/bug/bug_test.go rename to gopls/internal/util/bug/bug_test.go diff --git a/internal/constraints/constraint.go b/gopls/internal/util/constraints/constraint.go similarity index 100% rename from internal/constraints/constraint.go rename to gopls/internal/util/constraints/constraint.go diff --git a/gopls/internal/lsp/frob/frob.go b/gopls/internal/util/frob/frob.go similarity index 90% rename from gopls/internal/lsp/frob/frob.go rename to gopls/internal/util/frob/frob.go index 7d037328424..cd385a9d692 100644 --- a/gopls/internal/lsp/frob/frob.go +++ b/gopls/internal/util/frob/frob.go @@ -93,7 +93,7 @@ func frobFor(t reflect.Type) *frob { case reflect.Array, reflect.Slice, - reflect.Ptr: // TODO(adonovan): after go1.18, use Pointer + reflect.Pointer: fr.addElem(fr.t.Elem()) case reflect.Map: @@ -167,15 +167,15 @@ func (fr *frob) encode(out *writer, v reflect.Value) { case reflect.Uint64: out.uint64(v.Uint()) case reflect.Uintptr: - out.uint64(uint64(v.Uint())) + out.uint64(v.Uint()) case reflect.Float32: out.uint32(math.Float32bits(float32(v.Float()))) case reflect.Float64: out.uint64(math.Float64bits(v.Float())) case reflect.Complex64: z := complex64(v.Complex()) - out.uint32(uint32(math.Float32bits(real(z)))) - out.uint32(uint32(math.Float32bits(imag(z)))) + out.uint32(math.Float32bits(real(z))) + out.uint32(math.Float32bits(imag(z))) case reflect.Complex128: z := v.Complex() out.uint64(math.Float64bits(real(z))) @@ -214,7 +214,7 @@ func (fr *frob) encode(out *writer, v reflect.Value) { } } - case reflect.Ptr: // TODO(adonovan): after go1.18, use Pointer + case reflect.Pointer: if v.IsNil() { out.uint8(0) } else { @@ -341,7 +341,7 @@ func (fr *frob) decode(in *reader, addr reflect.Value) { } } - case reflect.Ptr: // TODO(adonovan): after go1.18, use Pointer + case reflect.Pointer: isNil := in.uint8() == 0 if !isNil { ptr := reflect.New(fr.elems[0].t) @@ -402,38 +402,7 @@ func (r *reader) bytes(n int) []byte { type writer struct{ data []byte } func (w *writer) uint8(v uint8) { w.data = append(w.data, v) } -func (w *writer) uint16(v uint16) { w.data = appendUint16(w.data, v) } -func (w *writer) uint32(v uint32) { w.data = appendUint32(w.data, v) } -func (w *writer) uint64(v uint64) { w.data = appendUint64(w.data, v) } +func (w *writer) uint16(v uint16) { w.data = le.AppendUint16(w.data, v) } +func (w *writer) uint32(v uint32) { w.data = le.AppendUint32(w.data, v) } +func (w *writer) uint64(v uint64) { w.data = le.AppendUint64(w.data, v) } func (w *writer) bytes(v []byte) { w.data = append(w.data, v...) } - -// TODO(adonovan): delete these as in go1.19 they are methods on LittleEndian: - -func appendUint16(b []byte, v uint16) []byte { - return append(b, - byte(v), - byte(v>>8), - ) -} - -func appendUint32(b []byte, v uint32) []byte { - return append(b, - byte(v), - byte(v>>8), - byte(v>>16), - byte(v>>24), - ) -} - -func appendUint64(b []byte, v uint64) []byte { - return append(b, - byte(v), - byte(v>>8), - byte(v>>16), - byte(v>>24), - byte(v>>32), - byte(v>>40), - byte(v>>48), - byte(v>>56), - ) -} diff --git a/gopls/internal/lsp/frob/frob_test.go b/gopls/internal/util/frob/frob_test.go similarity index 98% rename from gopls/internal/lsp/frob/frob_test.go rename to gopls/internal/util/frob/frob_test.go index 6a0f6e729db..5765c9642ef 100644 --- a/gopls/internal/lsp/frob/frob_test.go +++ b/gopls/internal/util/frob/frob_test.go @@ -9,7 +9,7 @@ import ( "reflect" "testing" - "golang.org/x/tools/gopls/internal/lsp/frob" + "golang.org/x/tools/gopls/internal/util/frob" ) func TestBasics(t *testing.T) { diff --git a/gopls/internal/util/goversion/goversion.go b/gopls/internal/util/goversion/goversion.go new file mode 100644 index 00000000000..5b849b22b85 --- /dev/null +++ b/gopls/internal/util/goversion/goversion.go @@ -0,0 +1,93 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package goversions defines gopls's policy for which versions of Go it supports. +package goversion + +import ( + "fmt" + "strings" +) + +// Support holds information about end-of-life Go version support. +// +// Exposed for testing. +type Support struct { + // GoVersion is the Go version to which these settings relate. + GoVersion int + + // DeprecatedVersion is the first version of gopls that no longer supports + // this Go version. + // + // If unset, the version is already deprecated. + DeprecatedVersion string + + // InstallGoplsVersion is the latest gopls version that supports this Go + // version without warnings. + InstallGoplsVersion string +} + +// Supported maps Go versions to the gopls version in which support will +// be deprecated, and the final gopls version supporting them without warnings. +// Keep this in sync with gopls/README.md. +// +// Must be sorted in ascending order of Go version. +// +// Exposed (and mutable) for testing. +var Supported = []Support{ + {12, "", "v0.7.5"}, + {15, "", "v0.9.5"}, + {16, "", "v0.11.0"}, + {17, "", "v0.11.0"}, + {18, "v0.16.0", "v0.14.2"}, +} + +// OldestSupported is the last X in Go 1.X that this version of gopls +// supports without warnings. +// +// Exported for testing. +func OldestSupported() int { + return Supported[len(Supported)-1].GoVersion + 1 +} + +// Message returns the message to display if the user has the given Go +// version, if any. The goVersion variable is the X in Go 1.X. If +// fromBuild is set, the Go version is the version used to build +// gopls. Otherwise, it is the go command version. +// +// The second component of the result indicates whether the message is +// an error, not a mere warning. +// +// If goVersion is invalid (< 0), it returns "", false. +func Message(goVersion int, fromBuild bool) (string, bool) { + if goVersion < 0 { + return "", false + } + + for _, v := range Supported { + if goVersion <= v.GoVersion { + var msgBuilder strings.Builder + + isError := true + if fromBuild { + fmt.Fprintf(&msgBuilder, "Gopls was built with Go version 1.%d", goVersion) + } else { + fmt.Fprintf(&msgBuilder, "Found Go version 1.%d", goVersion) + } + if v.DeprecatedVersion != "" { + // not deprecated yet, just a warning + fmt.Fprintf(&msgBuilder, ", which will be unsupported by gopls %s. ", v.DeprecatedVersion) + isError = false // warning + } else { + fmt.Fprint(&msgBuilder, ", which is not supported by this version of gopls. ") + } + fmt.Fprintf(&msgBuilder, "Please upgrade to Go 1.%d or later and reinstall gopls. ", OldestSupported()) + fmt.Fprintf(&msgBuilder, "If you can't upgrade and want this message to go away, please install gopls %s. ", v.InstallGoplsVersion) + fmt.Fprint(&msgBuilder, "See https://go.dev/s/gopls-support-policy for more details.") + + return msgBuilder.String(), isError + } + } + return "", false +} diff --git a/gopls/internal/util/goversion/goversion_test.go b/gopls/internal/util/goversion/goversion_test.go new file mode 100644 index 00000000000..f48ef5008c8 --- /dev/null +++ b/gopls/internal/util/goversion/goversion_test.go @@ -0,0 +1,76 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package goversion_test + +import ( + "fmt" + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/util/goversion" +) + +func TestMessage(t *testing.T) { + // Note(rfindley): this test is a change detector, as it must be updated + // whenever we deprecate a version. + // + // However, I chose to leave it as is since it gives us confidence in error + // messages served for Go versions that we no longer support (and therefore + // no longer run in CI). + type test struct { + goVersion int + fromBuild bool + wantContains []string // string fragments that we expect to see + wantIsError bool // an error, not a mere warning + } + + deprecated := func(goVersion int, lastVersion string) test { + return test{ + goVersion: goVersion, + fromBuild: false, + wantContains: []string{ + fmt.Sprintf("Found Go version 1.%d", goVersion), + "not supported", + fmt.Sprintf("upgrade to Go 1.%d", goversion.OldestSupported()), + fmt.Sprintf("install gopls %s", lastVersion), + }, + wantIsError: true, + } + } + + tests := []struct { + goVersion int + fromBuild bool + wantContains []string // string fragments that we expect to see + wantIsError bool // an error, not a mere warning + }{ + {-1, false, nil, false}, + deprecated(12, "v0.7.5"), + deprecated(13, "v0.9.5"), + deprecated(15, "v0.9.5"), + deprecated(16, "v0.11.0"), + deprecated(17, "v0.11.0"), + {18, false, []string{"Found Go version 1.18", "unsupported by gopls v0.16.0", "upgrade to Go 1.19", "install gopls v0.14.2"}, false}, + {18, true, []string{"Gopls was built with Go version 1.18", "unsupported by gopls v0.16.0", "upgrade to Go 1.19", "install gopls v0.14.2"}, false}, + } + + for _, test := range tests { + gotMsg, gotIsError := goversion.Message(test.goVersion, test.fromBuild) + + if len(test.wantContains) == 0 && gotMsg != "" { + t.Errorf("versionMessage(%d) = %q, want \"\"", test.goVersion, gotMsg) + } + + for _, want := range test.wantContains { + if !strings.Contains(gotMsg, want) { + t.Errorf("versionMessage(%d) = %q, want containing %q", test.goVersion, gotMsg, want) + } + } + + if gotIsError != test.wantIsError { + t.Errorf("versionMessage(%d) isError = %v, want %v", test.goVersion, gotIsError, test.wantIsError) + } + } +} diff --git a/gopls/internal/util/immutable/immutable.go b/gopls/internal/util/immutable/immutable.go new file mode 100644 index 00000000000..a88133fe92f --- /dev/null +++ b/gopls/internal/util/immutable/immutable.go @@ -0,0 +1,43 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The immutable package defines immutable wrappers around common data +// structures. These are used for additional type safety inside gopls. +// +// See the "persistent" package for copy-on-write data structures. +package immutable + +// Map is an immutable wrapper around an ordinary Go map. +type Map[K comparable, V any] struct { + m map[K]V +} + +// MapOf wraps the given Go map. +// +// The caller must not subsequently mutate the map. +func MapOf[K comparable, V any](m map[K]V) Map[K, V] { + return Map[K, V]{m} +} + +// Value returns the mapped value for k. +// It is equivalent to the commaok form of an ordinary go map, and returns +// (zero, false) if the key is not present. +func (m Map[K, V]) Value(k K) (V, bool) { + v, ok := m.m[k] + return v, ok +} + +// Len returns the number of entries in the Map. +func (m Map[K, V]) Len() int { + return len(m.m) +} + +// Range calls f for each mapped (key, value) pair. +// There is no way to break out of the loop. +// TODO: generalize when Go iterators (#61405) land. +func (m Map[K, V]) Range(f func(k K, v V)) { + for k, v := range m.m { + f(k, v) + } +} diff --git a/gopls/internal/lsp/lru/lru.go b/gopls/internal/util/lru/lru.go similarity index 100% rename from gopls/internal/lsp/lru/lru.go rename to gopls/internal/util/lru/lru.go diff --git a/gopls/internal/lsp/lru/lru_fuzz_test.go b/gopls/internal/util/lru/lru_fuzz_test.go similarity index 91% rename from gopls/internal/lsp/lru/lru_fuzz_test.go rename to gopls/internal/util/lru/lru_fuzz_test.go index c5afceeaf3b..b82776b25ba 100644 --- a/gopls/internal/lsp/lru/lru_fuzz_test.go +++ b/gopls/internal/util/lru/lru_fuzz_test.go @@ -2,15 +2,12 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package lru_test import ( "testing" - "golang.org/x/tools/gopls/internal/lsp/lru" + "golang.org/x/tools/gopls/internal/util/lru" ) // Simple fuzzing test for consistency. diff --git a/gopls/internal/lsp/lru/lru_test.go b/gopls/internal/util/lru/lru_test.go similarity index 98% rename from gopls/internal/lsp/lru/lru_test.go rename to gopls/internal/util/lru/lru_test.go index a9e6407a7c6..9ffe346257d 100644 --- a/gopls/internal/lsp/lru/lru_test.go +++ b/gopls/internal/util/lru/lru_test.go @@ -14,7 +14,7 @@ import ( "testing" "golang.org/x/sync/errgroup" - "golang.org/x/tools/gopls/internal/lsp/lru" + "golang.org/x/tools/gopls/internal/util/lru" ) func TestCache(t *testing.T) { diff --git a/gopls/internal/util/maps/maps.go b/gopls/internal/util/maps/maps.go new file mode 100644 index 00000000000..0a4ac7cfbe5 --- /dev/null +++ b/gopls/internal/util/maps/maps.go @@ -0,0 +1,38 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package maps + +// Group returns a new non-nil map containing the elements of s grouped by the +// keys returned from the key func. +func Group[K comparable, V any](s []V, key func(V) K) map[K][]V { + m := make(map[K][]V) + for _, v := range s { + k := key(v) + m[k] = append(m[k], v) + } + return m +} + +// Keys returns the keys of the map M. +func Keys[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} + +// SameKeys reports whether x and y have equal sets of keys. +func SameKeys[K comparable, V1, V2 any](x map[K]V1, y map[K]V2) bool { + if len(x) != len(y) { + return false + } + for k := range x { + if _, ok := y[k]; !ok { + return false + } + } + return true +} diff --git a/gopls/internal/util/pathutil/util.go b/gopls/internal/util/pathutil/util.go new file mode 100644 index 00000000000..e19863e202a --- /dev/null +++ b/gopls/internal/util/pathutil/util.go @@ -0,0 +1,49 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package pathutil + +import ( + "path/filepath" + "strings" +) + +// InDir checks whether path is in the file tree rooted at dir. +// It checks only the lexical form of the file names. +// It does not consider symbolic links. +// +// Copied from go/src/cmd/go/internal/search/search.go. +func InDir(dir, path string) bool { + pv := strings.ToUpper(filepath.VolumeName(path)) + dv := strings.ToUpper(filepath.VolumeName(dir)) + path = path[len(pv):] + dir = dir[len(dv):] + switch { + default: + return false + case pv != dv: + return false + case len(path) == len(dir): + if path == dir { + return true + } + return false + case dir == "": + return path != "" + case len(path) > len(dir): + if dir[len(dir)-1] == filepath.Separator { + if path[:len(dir)] == dir { + return path[len(dir):] != "" + } + return false + } + if path[len(dir)] == filepath.Separator && path[:len(dir)] == dir { + if len(path) == len(dir)+1 { + return true + } + return path[len(dir)+1:] != "" + } + return false + } +} diff --git a/internal/persistent/map.go b/gopls/internal/util/persistent/map.go similarity index 95% rename from internal/persistent/map.go rename to gopls/internal/util/persistent/map.go index 64cd500c65a..b0e49f27d42 100644 --- a/internal/persistent/map.go +++ b/gopls/internal/util/persistent/map.go @@ -13,7 +13,7 @@ import ( "strings" "sync/atomic" - "golang.org/x/tools/internal/constraints" + "golang.org/x/tools/gopls/internal/util/constraints" ) // Implementation details: @@ -146,6 +146,15 @@ func (pm *Map[K, V]) Clear() { pm.root = nil } +// Keys returns all keys present in the map. +func (pm *Map[K, V]) Keys() []K { + var keys []K + pm.root.forEach(func(k, _ any) { + keys = append(keys, k.(K)) + }) + return keys +} + // Range calls f sequentially in ascending key order for all entries in the map. func (pm *Map[K, V]) Range(f func(key K, value V)) { pm.root.forEach(func(k, v any) { @@ -271,17 +280,20 @@ func split(n *mapNode, key any, less func(any, any) bool, requireMid bool) (left } // Delete deletes the value for a key. -func (pm *Map[K, V]) Delete(key K) { +// +// The result reports whether the key was present in the map. +func (pm *Map[K, V]) Delete(key K) bool { root := pm.root left, mid, right := split(root, key, pm.less, true) if mid == nil { - return + return false } pm.root = merge(left, right) left.decref() mid.decref() right.decref() root.decref() + return true } // merge two trees while preserving the weight invariant. diff --git a/internal/persistent/map_test.go b/gopls/internal/util/persistent/map_test.go similarity index 98% rename from internal/persistent/map_test.go rename to gopls/internal/util/persistent/map_test.go index c73e5662d90..effa1c1da85 100644 --- a/internal/persistent/map_test.go +++ b/gopls/internal/util/persistent/map_test.go @@ -312,7 +312,10 @@ func (vm *validatedMap) set(t *testing.T, key, value int) { func (vm *validatedMap) remove(t *testing.T, key int) { vm.clock++ - vm.impl.Delete(key) + deleted := vm.impl.Delete(key) + if _, ok := vm.expected[key]; ok != deleted { + t.Fatalf("Delete(%d) = %t, want %t", key, deleted, ok) + } delete(vm.expected, key) vm.validate(t) diff --git a/internal/persistent/set.go b/gopls/internal/util/persistent/set.go similarity index 96% rename from internal/persistent/set.go rename to gopls/internal/util/persistent/set.go index 348de5a71d2..2d5f4edac96 100644 --- a/internal/persistent/set.go +++ b/gopls/internal/util/persistent/set.go @@ -4,7 +4,7 @@ package persistent -import "golang.org/x/tools/internal/constraints" +import "golang.org/x/tools/gopls/internal/util/constraints" // Set is a collection of elements of type K. // diff --git a/internal/persistent/set_test.go b/gopls/internal/util/persistent/set_test.go similarity index 95% rename from internal/persistent/set_test.go rename to gopls/internal/util/persistent/set_test.go index 59025140bce..31911b451b3 100644 --- a/internal/persistent/set_test.go +++ b/gopls/internal/util/persistent/set_test.go @@ -9,8 +9,8 @@ import ( "strings" "testing" - "golang.org/x/tools/internal/constraints" - "golang.org/x/tools/internal/persistent" + "golang.org/x/tools/gopls/internal/util/constraints" + "golang.org/x/tools/gopls/internal/util/persistent" ) func TestSet(t *testing.T) { diff --git a/gopls/internal/lsp/safetoken/safetoken.go b/gopls/internal/util/safetoken/safetoken.go similarity index 100% rename from gopls/internal/lsp/safetoken/safetoken.go rename to gopls/internal/util/safetoken/safetoken.go diff --git a/gopls/internal/lsp/safetoken/safetoken_test.go b/gopls/internal/util/safetoken/safetoken_test.go similarity index 96% rename from gopls/internal/lsp/safetoken/safetoken_test.go rename to gopls/internal/util/safetoken/safetoken_test.go index 83a50fbec10..4cdce7a97b9 100644 --- a/gopls/internal/lsp/safetoken/safetoken_test.go +++ b/gopls/internal/util/safetoken/safetoken_test.go @@ -13,7 +13,7 @@ import ( "testing" "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/lsp/safetoken" + "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/testenv" ) @@ -72,7 +72,6 @@ func TestWorkaroundIssue57490(t *testing.T) { // suggests alternatives. func TestGoplsSourceDoesNotCallTokenFileMethods(t *testing.T) { testenv.NeedsGoPackages(t) - testenv.NeedsGo1Point(t, 18) testenv.NeedsLocalXTools(t) cfg := &packages.Config{ @@ -117,7 +116,7 @@ func TestGoplsSourceDoesNotCallTokenFileMethods(t *testing.T) { for _, pkg := range pkgs { switch pkg.PkgPath { - case "go/token", "golang.org/x/tools/gopls/internal/lsp/safetoken": + case "go/token", "golang.org/x/tools/gopls/internal/util/safetoken": continue // allow calls within these packages } diff --git a/gopls/internal/util/slices/slices.go b/gopls/internal/util/slices/slices.go new file mode 100644 index 00000000000..8df79870945 --- /dev/null +++ b/gopls/internal/util/slices/slices.go @@ -0,0 +1,116 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package slices + +// Clone returns a copy of the slice. +// The elements are copied using assignment, so this is a shallow clone. +// TODO(rfindley): use go1.21 slices.Clone. +func Clone[S ~[]E, E any](s S) S { + // The s[:0:0] preserves nil in case it matters. + return append(s[:0:0], s...) +} + +// Contains reports whether x is present in slice. +// TODO(adonovan): use go1.21 slices.Contains. +func Contains[S ~[]E, E comparable](slice S, x E) bool { + for _, elem := range slice { + if elem == x { + return true + } + } + return false +} + +// IndexFunc returns the first index i satisfying f(s[i]), +// or -1 if none do. +// TODO(adonovan): use go1.21 slices.IndexFunc. +func IndexFunc[S ~[]E, E any](s S, f func(E) bool) int { + for i := range s { + if f(s[i]) { + return i + } + } + return -1 +} + +// ContainsFunc reports whether at least one +// element e of s satisfies f(e). +// TODO(adonovan): use go1.21 slices.ContainsFunc. +func ContainsFunc[S ~[]E, E any](s S, f func(E) bool) bool { + return IndexFunc(s, f) >= 0 +} + +// Concat returns a new slice concatenating the passed in slices. +// TODO(rfindley): use go1.22 slices.Concat. +func Concat[S ~[]E, E any](slices ...S) S { + size := 0 + for _, s := range slices { + size += len(s) + if size < 0 { + panic("len out of range") + } + } + newslice := Grow[S](nil, size) + for _, s := range slices { + newslice = append(newslice, s...) + } + return newslice +} + +// Grow increases the slice's capacity, if necessary, to guarantee space for +// another n elements. After Grow(n), at least n elements can be appended +// to the slice without another allocation. If n is negative or too large to +// allocate the memory, Grow panics. +// TODO(rfindley): use go1.21 slices.Grow. +func Grow[S ~[]E, E any](s S, n int) S { + if n < 0 { + panic("cannot be negative") + } + if n -= cap(s) - len(s); n > 0 { + s = append(s[:cap(s)], make([]E, n)...)[:len(s)] + } + return s +} + +// DeleteFunc removes any elements from s for which del returns true, +// returning the modified slice. +// DeleteFunc zeroes the elements between the new length and the original length. +// TODO(adonovan): use go1.21 slices.DeleteFunc. +func DeleteFunc[S ~[]E, E any](s S, del func(E) bool) S { + i := IndexFunc(s, del) + if i == -1 { + return s + } + // Don't start copying elements until we find one to delete. + for j := i + 1; j < len(s); j++ { + if v := s[j]; !del(v) { + s[i] = v + i++ + } + } + clear(s[i:]) // zero/nil out the obsolete elements, for GC + return s[:i] +} + +func clear[T any](slice []T) { + for i := range slice { + slice[i] = *new(T) + } +} + +// Remove removes all values equal to elem from slice. +// +// The closest equivalent in the standard slices package is: +// +// DeleteFunc(func(x T) bool { return x == elem }) +func Remove[T comparable](slice []T, elem T) []T { + out := slice[:0] + for _, v := range slice { + if v != elem { + out = append(out, v) + } + } + return out +} diff --git a/gopls/internal/util/typesutil/typesutil.go b/gopls/internal/util/typesutil/typesutil.go new file mode 100644 index 00000000000..3597b4b4bbc --- /dev/null +++ b/gopls/internal/util/typesutil/typesutil.go @@ -0,0 +1,49 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesutil + +import ( + "go/ast" + "go/types" +) + +// ImportedPkgName returns the PkgName object declared by an ImportSpec. +// TODO(adonovan): use go1.22's Info.PkgNameOf. +func ImportedPkgName(info *types.Info, imp *ast.ImportSpec) (*types.PkgName, bool) { + var obj types.Object + if imp.Name != nil { + obj = info.Defs[imp.Name] + } else { + obj = info.Implicits[imp] + } + pkgname, ok := obj.(*types.PkgName) + return pkgname, ok +} + +// FileQualifier returns a [types.Qualifier] function that qualifies +// imported symbols appropriately based on the import environment of a +// given file. +func FileQualifier(f *ast.File, pkg *types.Package, info *types.Info) types.Qualifier { + // Construct mapping of import paths to their defined or implicit names. + imports := make(map[*types.Package]string) + for _, imp := range f.Imports { + if pkgname, ok := ImportedPkgName(info, imp); ok { + imports[pkgname.Imported()] = pkgname.Name() + } + } + // Define qualifier to replace full package paths with names of the imports. + return func(p *types.Package) string { + if p == pkg { + return "" + } + if name, ok := imports[p]; ok { + if name == "." { + return "" + } + return name + } + return p.Name() + } +} diff --git a/gopls/internal/version/version.go b/gopls/internal/version/version.go new file mode 100644 index 00000000000..96f18190aff --- /dev/null +++ b/gopls/internal/version/version.go @@ -0,0 +1,29 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package version manages the gopls version. +// +// The VersionOverride variable may be used to set the gopls version at link +// time. +package version + +import "runtime/debug" + +var VersionOverride = "" + +// Version returns the gopls version. +// +// By default, this is read from runtime/debug.ReadBuildInfo, but may be +// overridden by the [VersionOverride] variable. +func Version() string { + if VersionOverride != "" { + return VersionOverride + } + if info, ok := debug.ReadBuildInfo(); ok { + if info.Main.Version != "" { + return info.Main.Version + } + } + return "(unknown)" +} diff --git a/gopls/internal/vulncheck/scan/command.go b/gopls/internal/vulncheck/scan/command.go index 89d24e08b71..4ef005010c9 100644 --- a/gopls/internal/vulncheck/scan/command.go +++ b/gopls/internal/vulncheck/scan/command.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package scan import ( @@ -15,26 +12,16 @@ import ( "os" "os/exec" "sort" - "strings" - "sync" "time" - "golang.org/x/mod/semver" "golang.org/x/sync/errgroup" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/lsp/source" + "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/vulncheck" "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" "golang.org/x/tools/gopls/internal/vulncheck/osv" - isem "golang.org/x/tools/gopls/internal/vulncheck/semver" "golang.org/x/vuln/scan" ) -// GoVersionForVulnTest is an internal environment variable used in gopls -// testing to examine govulncheck behavior with a go version different -// than what `go version` returns in the system. -const GoVersionForVulnTest = "_GOPLS_TEST_VULNCHECK_GOVERSION" - // Main implements gopls vulncheck. func Main(ctx context.Context, args ...string) error { // wrapping govulncheck. @@ -48,7 +35,10 @@ func Main(ctx context.Context, args ...string) error { // RunGovulncheck implements the codelens "Run Govulncheck" // that runs 'gopls vulncheck' and converts the output to gopls's internal data // used for diagnostics and hover message construction. -func RunGovulncheck(ctx context.Context, pattern string, snapshot source.Snapshot, dir string, log io.Writer) (*vulncheck.Result, error) { +// +// TODO(rfindley): this should accept a *View (which exposes) Options, rather +// than a snapshot. +func RunGovulncheck(ctx context.Context, pattern string, snapshot *cache.Snapshot, dir string, log io.Writer) (*vulncheck.Result, error) { vulncheckargs := []string{ "vulncheck", "--", "-json", @@ -58,7 +48,7 @@ func RunGovulncheck(ctx context.Context, pattern string, snapshot source.Snapsho if dir != "" { vulncheckargs = append(vulncheckargs, "-C", dir) } - if db := getEnv(snapshot, "GOVULNDB"); db != "" { + if db := cache.GetEnv(snapshot, "GOVULNDB"); db != "" { vulncheckargs = append(vulncheckargs, "-db", db) } vulncheckargs = append(vulncheckargs, pattern) @@ -78,7 +68,7 @@ func RunGovulncheck(ctx context.Context, pattern string, snapshot source.Snapsho cmd := exec.CommandContext(ctx, os.Args[0], vulncheckargs...) cmd.Env = getEnvSlices(snapshot) - if goversion := getEnv(snapshot, GoVersionForVulnTest); goversion != "" { + if goversion := cache.GetEnv(snapshot, cache.GoVersionForVulnTest); goversion != "" { // Let govulncheck API use a different Go version using the (undocumented) hook // in https://go.googlesource.com/vuln/+/v1.0.1/internal/scan/run.go#76 cmd.Env = append(cmd.Env, "GOVERSION="+goversion) @@ -123,7 +113,6 @@ func RunGovulncheck(ctx context.Context, pattern string, snapshot source.Snapsho type govulncheckHandler struct { logger io.Writer // forward progress reports to logger. - err error osvs map[string]*osv.Entry findings []*govulncheck.Finding @@ -171,306 +160,6 @@ func (h *govulncheckHandler) Progress(progress *govulncheck.Progress) error { return nil } -func getEnv(snapshot source.Snapshot, key string) string { - val, ok := snapshot.Options().Env[key] - if ok { - return val - } - return os.Getenv(key) -} - -func getEnvSlices(snapshot source.Snapshot) []string { +func getEnvSlices(snapshot *cache.Snapshot) []string { return append(os.Environ(), snapshot.Options().EnvSlice()...) } - -// semverToGoTag returns the Go standard library repository tag corresponding -// to semver, a version string without the initial "v". -// Go tags differ from standard semantic versions in a few ways, -// such as beginning with "go" instead of "v". -func semverToGoTag(v string) string { - if strings.HasPrefix(v, "v0.0.0") { - return "master" - } - // Special case: v1.0.0 => go1. - if v == "v1.0.0" { - return "go1" - } - if !semver.IsValid(v) { - return fmt.Sprintf("<!%s:invalid semver>", v) - } - goVersion := semver.Canonical(v) - prerelease := semver.Prerelease(goVersion) - versionWithoutPrerelease := strings.TrimSuffix(goVersion, prerelease) - patch := strings.TrimPrefix(versionWithoutPrerelease, semver.MajorMinor(goVersion)+".") - if patch == "0" { - versionWithoutPrerelease = strings.TrimSuffix(versionWithoutPrerelease, ".0") - } - goVersion = fmt.Sprintf("go%s", strings.TrimPrefix(versionWithoutPrerelease, "v")) - if prerelease != "" { - // Go prereleases look like "beta1" instead of "beta.1". - // "beta1" is bad for sorting (since beta10 comes before beta9), so - // require the dot form. - i := finalDigitsIndex(prerelease) - if i >= 1 { - if prerelease[i-1] != '.' { - return fmt.Sprintf("<!%s:final digits in a prerelease must follow a period>", v) - } - // Remove the dot. - prerelease = prerelease[:i-1] + prerelease[i:] - } - goVersion += strings.TrimPrefix(prerelease, "-") - } - return goVersion -} - -// finalDigitsIndex returns the index of the first digit in the sequence of digits ending s. -// If s doesn't end in digits, it returns -1. -func finalDigitsIndex(s string) int { - // Assume ASCII (since the semver package does anyway). - var i int - for i = len(s) - 1; i >= 0; i-- { - if s[i] < '0' || s[i] > '9' { - break - } - } - if i == len(s)-1 { - return -1 - } - return i + 1 -} - -// VulnerablePackages queries the vulndb and reports which vulnerabilities -// apply to this snapshot. The result contains a set of packages, -// grouped by vuln ID and by module. This implements the "import-based" -// vulnerability report on go.mod files. -func VulnerablePackages(ctx context.Context, snapshot source.Snapshot) (*vulncheck.Result, error) { - // TODO(hyangah): can we let 'govulncheck' take a package list - // used in the workspace and implement this function? - - // We want to report the intersection of vulnerable packages in the vulndb - // and packages transitively imported by this module ('go list -deps all'). - // We use snapshot.AllMetadata to retrieve the list of packages - // as an approximation. - // - // TODO(hyangah): snapshot.AllMetadata is a superset of - // `go list all` - e.g. when the workspace has multiple main modules - // (multiple go.mod files), that can include packages that are not - // used by this module. Vulncheck behavior with go.work is not well - // defined. Figure out the meaning, and if we decide to present - // the result as if each module is analyzed independently, make - // gopls track a separate build list for each module and use that - // information instead of snapshot.AllMetadata. - metadata, err := snapshot.AllMetadata(ctx) - if err != nil { - return nil, err - } - - // TODO(hyangah): handle vulnerabilities in the standard library. - - // Group packages by modules since vuln db is keyed by module. - metadataByModule := map[source.PackagePath][]*source.Metadata{} - for _, md := range metadata { - modulePath := source.PackagePath(osv.GoStdModulePath) - if mi := md.Module; mi != nil { - modulePath = source.PackagePath(mi.Path) - } - metadataByModule[modulePath] = append(metadataByModule[modulePath], md) - } - - var ( - mu sync.Mutex - // Keys are osv.Entry.ID - osvs = map[string]*osv.Entry{} - findings []*govulncheck.Finding - ) - - goVersion := snapshot.Options().Env[GoVersionForVulnTest] - if goVersion == "" { - goVersion = snapshot.View().GoVersionString() - } - - stdlibModule := &packages.Module{ - Path: osv.GoStdModulePath, - Version: goVersion, - } - - // GOVULNDB may point the test db URI. - db := getEnv(snapshot, "GOVULNDB") - - var group errgroup.Group - group.SetLimit(10) // limit govulncheck api runs - for _, mds := range metadataByModule { - mds := mds - group.Go(func() error { - effectiveModule := stdlibModule - if m := mds[0].Module; m != nil { - effectiveModule = m - } - for effectiveModule.Replace != nil { - effectiveModule = effectiveModule.Replace - } - ver := effectiveModule.Version - if ver == "" || !isem.Valid(ver) { - // skip invalid version strings. the underlying scan api is strict. - return nil - } - - // TODO(hyangah): batch these requests and add in-memory cache for efficiency. - vulns, err := osvsByModule(ctx, db, effectiveModule.Path+"@"+ver) - if err != nil { - return err - } - if len(vulns) == 0 { // No known vulnerability. - return nil - } - - // set of packages in this module known to gopls. - // This will be lazily initialized when we need it. - var knownPkgs map[source.PackagePath]bool - - // Report vulnerabilities that affect packages of this module. - for _, entry := range vulns { - var vulnerablePkgs []*govulncheck.Finding - fixed := fixedVersion(effectiveModule.Path, entry.Affected) - - for _, a := range entry.Affected { - if a.Module.Ecosystem != osv.GoEcosystem || a.Module.Path != effectiveModule.Path { - continue - } - for _, imp := range a.EcosystemSpecific.Packages { - if knownPkgs == nil { - knownPkgs = toPackagePathSet(mds) - } - if knownPkgs[source.PackagePath(imp.Path)] { - vulnerablePkgs = append(vulnerablePkgs, &govulncheck.Finding{ - OSV: entry.ID, - FixedVersion: fixed, - Trace: []*govulncheck.Frame{ - { - Module: effectiveModule.Path, - Version: effectiveModule.Version, - Package: imp.Path, - }, - }, - }) - } - } - } - if len(vulnerablePkgs) == 0 { - continue - } - mu.Lock() - osvs[entry.ID] = entry - findings = append(findings, vulnerablePkgs...) - mu.Unlock() - } - return nil - }) - } - if err := group.Wait(); err != nil { - return nil, err - } - - // Sort so the results are deterministic. - sort.Slice(findings, func(i, j int) bool { - x, y := findings[i], findings[j] - if x.OSV != y.OSV { - return x.OSV < y.OSV - } - return x.Trace[0].Package < y.Trace[0].Package - }) - ret := &vulncheck.Result{ - Entries: osvs, - Findings: findings, - Mode: vulncheck.ModeImports, - } - return ret, nil -} - -// toPackagePathSet transforms the metadata to a set of package paths. -func toPackagePathSet(mds []*source.Metadata) map[source.PackagePath]bool { - pkgPaths := make(map[source.PackagePath]bool, len(mds)) - for _, md := range mds { - pkgPaths[md.PkgPath] = true - } - return pkgPaths -} - -func fixedVersion(modulePath string, affected []osv.Affected) string { - fixed := LatestFixed(modulePath, affected) - if fixed != "" { - fixed = versionString(modulePath, fixed) - } - return fixed -} - -// versionString prepends a version string prefix (`v` or `go` -// depending on the modulePath) to the given semver-style version string. -func versionString(modulePath, version string) string { - if version == "" { - return "" - } - v := "v" + version - // These are internal Go module paths used by the vuln DB - // when listing vulns in standard library and the go command. - if modulePath == "stdlib" || modulePath == "toolchain" { - return semverToGoTag(v) - } - return v -} - -// osvsByModule runs a govulncheck database query. -func osvsByModule(ctx context.Context, db, moduleVersion string) ([]*osv.Entry, error) { - var args []string - args = append(args, "-mode=query", "-json") - if db != "" { - args = append(args, "-db="+db) - } - args = append(args, moduleVersion) - - ir, iw := io.Pipe() - handler := &osvReader{} - - var g errgroup.Group - g.Go(func() error { - defer iw.Close() // scan API doesn't close cmd.Stderr/cmd.Stdout. - cmd := scan.Command(ctx, args...) - cmd.Stdout = iw - // TODO(hakim): Do we need to set cmd.Env = getEnvSlices(), - // or is the process environment good enough? - if err := cmd.Start(); err != nil { - return err - } - return cmd.Wait() - }) - g.Go(func() error { - return govulncheck.HandleJSON(ir, handler) - }) - - if err := g.Wait(); err != nil { - return nil, err - } - return handler.entry, nil -} - -// osvReader implements govulncheck.Handler. -type osvReader struct { - entry []*osv.Entry -} - -func (h *osvReader) OSV(entry *osv.Entry) error { - h.entry = append(h.entry, entry) - return nil -} - -func (h *osvReader) Config(config *govulncheck.Config) error { - return nil -} - -func (h *osvReader) Finding(finding *govulncheck.Finding) error { - return nil -} - -func (h *osvReader) Progress(progress *govulncheck.Progress) error { - return nil -} diff --git a/gopls/internal/vulncheck/scan/util.go b/gopls/internal/vulncheck/scan/util.go deleted file mode 100644 index 2ea75a5183a..00000000000 --- a/gopls/internal/vulncheck/scan/util.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package scan - -import ( - "golang.org/x/mod/semver" - "golang.org/x/tools/gopls/internal/vulncheck/osv" - isem "golang.org/x/tools/gopls/internal/vulncheck/semver" -) - -// LatestFixed returns the latest fixed version in the list of affected ranges, -// or the empty string if there are no fixed versions. -func LatestFixed(modulePath string, as []osv.Affected) string { - v := "" - for _, a := range as { - if a.Module.Path != modulePath { - continue - } - for _, r := range a.Ranges { - if r.Type == osv.RangeTypeSemver { - for _, e := range r.Events { - if e.Fixed != "" && (v == "" || - semver.Compare(isem.CanonicalizeSemverPrefix(e.Fixed), isem.CanonicalizeSemverPrefix(v)) > 0) { - v = e.Fixed - } - } - } - } - } - return v -} diff --git a/gopls/internal/vulncheck/semver/semver.go b/gopls/internal/vulncheck/semver/semver.go index 5cd1ee864d3..ade710d0573 100644 --- a/gopls/internal/vulncheck/semver/semver.go +++ b/gopls/internal/vulncheck/semver/semver.go @@ -2,15 +2,11 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - // Package semver provides shared utilities for manipulating // Go semantic versions. package semver import ( - "regexp" "strings" "golang.org/x/mod/semver" @@ -47,13 +43,3 @@ func CanonicalizeSemverPrefix(s string) string { func Valid(v string) bool { return semver.IsValid(CanonicalizeSemverPrefix(v)) } - -var ( - // Regexp for matching go tags. The groups are: - // 1 the major.minor version - // 2 the patch version, or empty if none - // 3 the entire prerelease, if present - // 4 the prerelease type ("beta" or "rc") - // 5 the prerelease number - tagRegexp = regexp.MustCompile(`^go(\d+\.\d+)(\.\d+|)((beta|rc|-pre)(\d+))?$`) -) diff --git a/gopls/internal/vulncheck/semver/semver_test.go b/gopls/internal/vulncheck/semver/semver_test.go index 6daead6855b..8a462287fa4 100644 --- a/gopls/internal/vulncheck/semver/semver_test.go +++ b/gopls/internal/vulncheck/semver/semver_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package semver import ( diff --git a/gopls/internal/vulncheck/vulntest/db.go b/gopls/internal/vulncheck/vulntest/db.go index bda6d898a50..e661b83bc71 100644 --- a/gopls/internal/vulncheck/vulntest/db.go +++ b/gopls/internal/vulncheck/vulntest/db.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - // Package vulntest provides helpers for vulncheck functionality testing. package vulntest @@ -19,7 +16,7 @@ import ( "strings" "time" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/vulncheck/osv" "golang.org/x/tools/txtar" ) @@ -62,7 +59,7 @@ type DB struct { // URI returns the file URI that can be used for VULNDB environment // variable. func (db *DB) URI() string { - u := span.URIFromPath(filepath.Join(db.disk, "ID")) + u := protocol.URIFromPath(filepath.Join(db.disk, "ID")) return string(u) } diff --git a/gopls/internal/vulncheck/vulntest/db_test.go b/gopls/internal/vulncheck/vulntest/db_test.go index d68ba08b1eb..3c3407105ac 100644 --- a/gopls/internal/vulncheck/vulntest/db_test.go +++ b/gopls/internal/vulncheck/vulntest/db_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package vulntest import ( @@ -17,7 +14,7 @@ import ( "time" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/vulncheck/osv" ) @@ -37,7 +34,7 @@ func TestNewDatabase(t *testing.T) { t.Fatal(err) } defer db.Clean() - dbpath := span.URIFromURI(db.URI()).Filename() + dbpath := protocol.DocumentURI(db.URI()).Path() // The generated JSON file will be in DB/GO-2022-0001.json. got := readOSVEntry(t, filepath.Join(dbpath, "GO-2020-0001.json")) diff --git a/gopls/internal/vulncheck/vulntest/report.go b/gopls/internal/vulncheck/vulntest/report.go index cbfd0aeb8ff..b67986cf8c2 100644 --- a/gopls/internal/vulncheck/vulntest/report.go +++ b/gopls/internal/vulncheck/vulntest/report.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package vulntest import ( diff --git a/gopls/internal/vulncheck/vulntest/report_test.go b/gopls/internal/vulncheck/vulntest/report_test.go index 31f62aba838..b88633c2f1c 100644 --- a/gopls/internal/vulncheck/vulntest/report_test.go +++ b/gopls/internal/vulncheck/vulntest/report_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package vulntest import ( diff --git a/gopls/internal/vulncheck/vulntest/stdlib.go b/gopls/internal/vulncheck/vulntest/stdlib.go index 9bf4d4ef0d4..57194f71688 100644 --- a/gopls/internal/vulncheck/vulntest/stdlib.go +++ b/gopls/internal/vulncheck/vulntest/stdlib.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package vulntest import ( diff --git a/gopls/internal/vulncheck/vulntest/stdlib_test.go b/gopls/internal/vulncheck/vulntest/stdlib_test.go index 8f893f3ec42..7b212976350 100644 --- a/gopls/internal/vulncheck/vulntest/stdlib_test.go +++ b/gopls/internal/vulncheck/vulntest/stdlib_test.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package vulntest import "testing" diff --git a/gopls/internal/work/completion.go b/gopls/internal/work/completion.go new file mode 100644 index 00000000000..194721ef36d --- /dev/null +++ b/gopls/internal/work/completion.go @@ -0,0 +1,161 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package work + +import ( + "context" + "errors" + "fmt" + "io/fs" + "os" + "path/filepath" + "sort" + "strings" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.CompletionList, error) { + ctx, done := event.Start(ctx, "work.Completion") + defer done() + + // Get the position of the cursor. + pw, err := snapshot.ParseWork(ctx, fh) + if err != nil { + return nil, fmt.Errorf("getting go.work file handle: %w", err) + } + cursor, err := pw.Mapper.PositionOffset(position) + if err != nil { + return nil, fmt.Errorf("computing cursor offset: %w", err) + } + + // Find the use statement the user is in. + use, pathStart, _ := usePath(pw, cursor) + if use == nil { + return &protocol.CompletionList{}, nil + } + completingFrom := use.Path[:cursor-pathStart] + + // We're going to find the completions of the user input + // (completingFrom) by doing a walk on the innermost directory + // of the given path, and comparing the found paths to make sure + // that they match the component of the path after the + // innermost directory. + // + // We'll maintain two paths when doing this: pathPrefixSlash + // is essentially the path the user typed in, and pathPrefixAbs + // is the path made absolute from the go.work directory. + + pathPrefixSlash := completingFrom + pathPrefixAbs := filepath.FromSlash(pathPrefixSlash) + if !filepath.IsAbs(pathPrefixAbs) { + pathPrefixAbs = filepath.Join(filepath.Dir(pw.URI.Path()), pathPrefixAbs) + } + + // pathPrefixDir is the directory that will be walked to find matches. + // If pathPrefixSlash is not explicitly a directory boundary (is either equivalent to "." or + // ends in a separator) we need to examine its parent directory to find sibling files that + // match. + depthBound := 5 + pathPrefixDir, pathPrefixBase := pathPrefixAbs, "" + pathPrefixSlashDir := pathPrefixSlash + if filepath.Clean(pathPrefixSlash) != "." && !strings.HasSuffix(pathPrefixSlash, "/") { + depthBound++ + pathPrefixDir, pathPrefixBase = filepath.Split(pathPrefixAbs) + pathPrefixSlashDir = dirNonClean(pathPrefixSlash) + } + + var completions []string + // Stop traversing deeper once we've hit 10k files to try to stay generally under 100ms. + const numSeenBound = 10000 + var numSeen int + stopWalking := errors.New("hit numSeenBound") + err = filepath.WalkDir(pathPrefixDir, func(wpath string, entry fs.DirEntry, err error) error { + if err != nil { + // golang/go#64225: an error reading a dir is expected, as the user may + // be typing out a use directive for a directory that doesn't exist. + return nil + } + if numSeen > numSeenBound { + // Stop traversing if we hit bound. + return stopWalking + } + numSeen++ + + // rel is the path relative to pathPrefixDir. + // Make sure that it has pathPrefixBase as a prefix + // otherwise it won't match the beginning of the + // base component of the path the user typed in. + rel := strings.TrimPrefix(wpath[len(pathPrefixDir):], string(filepath.Separator)) + if entry.IsDir() && wpath != pathPrefixDir && !strings.HasPrefix(rel, pathPrefixBase) { + return filepath.SkipDir + } + + // Check for a match (a module directory). + if filepath.Base(rel) == "go.mod" { + relDir := strings.TrimSuffix(dirNonClean(rel), string(os.PathSeparator)) + completionPath := join(pathPrefixSlashDir, filepath.ToSlash(relDir)) + + if !strings.HasPrefix(completionPath, completingFrom) { + return nil + } + if strings.HasSuffix(completionPath, "/") { + // Don't suggest paths that end in "/". This happens + // when the input is a path that ends in "/" and + // the completion is empty. + return nil + } + completion := completionPath[len(completingFrom):] + if completingFrom == "" && !strings.HasPrefix(completion, "./") { + // Bias towards "./" prefixes. + completion = join(".", completion) + } + + completions = append(completions, completion) + } + + if depth := strings.Count(rel, string(filepath.Separator)); depth >= depthBound { + return filepath.SkipDir + } + return nil + }) + if err != nil && !errors.Is(err, stopWalking) { + return nil, fmt.Errorf("walking to find completions: %w", err) + } + + sort.Strings(completions) + + items := []protocol.CompletionItem{} // must be a slice + for _, c := range completions { + items = append(items, protocol.CompletionItem{ + Label: c, + InsertText: c, + }) + } + return &protocol.CompletionList{Items: items}, nil +} + +// dirNonClean is filepath.Dir, without the Clean at the end. +func dirNonClean(path string) string { + vol := filepath.VolumeName(path) + i := len(path) - 1 + for i >= len(vol) && !os.IsPathSeparator(path[i]) { + i-- + } + return path[len(vol) : i+1] +} + +func join(a, b string) string { + if a == "" { + return b + } + if b == "" { + return a + } + return strings.TrimSuffix(a, "/") + "/" + b +} diff --git a/gopls/internal/work/diagnostics.go b/gopls/internal/work/diagnostics.go new file mode 100644 index 00000000000..f1acd4d27c7 --- /dev/null +++ b/gopls/internal/work/diagnostics.go @@ -0,0 +1,92 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package work + +import ( + "context" + "fmt" + "os" + "path/filepath" + + "golang.org/x/mod/modfile" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +func Diagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { + ctx, done := event.Start(ctx, "work.Diagnostics", snapshot.Labels()...) + defer done() + + reports := map[protocol.DocumentURI][]*cache.Diagnostic{} + uri := snapshot.View().GoWork() + if uri == "" { + return nil, nil + } + fh, err := snapshot.ReadFile(ctx, uri) + if err != nil { + return nil, err + } + reports[fh.URI()] = []*cache.Diagnostic{} + diagnostics, err := diagnoseOne(ctx, snapshot, fh) + if err != nil { + return nil, err + } + for _, d := range diagnostics { + fh, err := snapshot.ReadFile(ctx, d.URI) + if err != nil { + return nil, err + } + reports[fh.URI()] = append(reports[fh.URI()], d) + } + + return reports, nil +} + +func diagnoseOne(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]*cache.Diagnostic, error) { + pw, err := snapshot.ParseWork(ctx, fh) + if err != nil { + if pw == nil || len(pw.ParseErrors) == 0 { + return nil, err + } + return pw.ParseErrors, nil + } + + // Add diagnostic if a directory does not contain a module. + var diagnostics []*cache.Diagnostic + for _, use := range pw.File.Use { + rng, err := pw.Mapper.OffsetRange(use.Syntax.Start.Byte, use.Syntax.End.Byte) + if err != nil { + return nil, err + } + + modfh, err := snapshot.ReadFile(ctx, modFileURI(pw, use)) + if err != nil { + return nil, err + } + if _, err := modfh.Content(); err != nil && os.IsNotExist(err) { + diagnostics = append(diagnostics, &cache.Diagnostic{ + URI: fh.URI(), + Range: rng, + Severity: protocol.SeverityError, + Source: cache.WorkFileError, + Message: fmt.Sprintf("directory %v does not contain a module", use.Path), + }) + } + } + return diagnostics, nil +} + +func modFileURI(pw *cache.ParsedWorkFile, use *modfile.Use) protocol.DocumentURI { + workdir := filepath.Dir(pw.URI.Path()) + + modroot := filepath.FromSlash(use.Path) + if !filepath.IsAbs(modroot) { + modroot = filepath.Join(workdir, modroot) + } + + return protocol.URIFromPath(filepath.Join(modroot, "go.mod")) +} diff --git a/gopls/internal/work/format.go b/gopls/internal/work/format.go new file mode 100644 index 00000000000..162bc8c0004 --- /dev/null +++ b/gopls/internal/work/format.go @@ -0,0 +1,30 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package work + +import ( + "context" + + "golang.org/x/mod/modfile" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/event" +) + +func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.TextEdit, error) { + ctx, done := event.Start(ctx, "work.Format") + defer done() + + pw, err := snapshot.ParseWork(ctx, fh) + if err != nil { + return nil, err + } + formatted := modfile.Format(pw.File.Syntax) + // Calculate the edits to be made due to the change. + diffs := diff.Bytes(pw.Mapper.Content, formatted) + return protocol.EditsFromDiffEdits(pw.Mapper, diffs) +} diff --git a/gopls/internal/work/hover.go b/gopls/internal/work/hover.go new file mode 100644 index 00000000000..c59c14789be --- /dev/null +++ b/gopls/internal/work/hover.go @@ -0,0 +1,93 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package work + +import ( + "bytes" + "context" + "fmt" + + "golang.org/x/mod/modfile" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { + // We only provide hover information for the view's go.work file. + if fh.URI() != snapshot.View().GoWork() { + return nil, nil + } + + ctx, done := event.Start(ctx, "work.Hover") + defer done() + + // Get the position of the cursor. + pw, err := snapshot.ParseWork(ctx, fh) + if err != nil { + return nil, fmt.Errorf("getting go.work file handle: %w", err) + } + offset, err := pw.Mapper.PositionOffset(position) + if err != nil { + return nil, fmt.Errorf("computing cursor offset: %w", err) + } + + // Confirm that the cursor is inside a use statement, and then find + // the position of the use statement's directory path. + use, pathStart, pathEnd := usePath(pw, offset) + + // The cursor position is not on a use statement. + if use == nil { + return nil, nil + } + + // Get the mod file denoted by the use. + modfh, err := snapshot.ReadFile(ctx, modFileURI(pw, use)) + if err != nil { + return nil, fmt.Errorf("getting modfile handle: %w", err) + } + pm, err := snapshot.ParseMod(ctx, modfh) + if err != nil { + return nil, fmt.Errorf("getting modfile handle: %w", err) + } + if pm.File.Module == nil { + return nil, fmt.Errorf("modfile has no module declaration") + } + mod := pm.File.Module.Mod + + // Get the range to highlight for the hover. + rng, err := pw.Mapper.OffsetRange(pathStart, pathEnd) + if err != nil { + return nil, err + } + options := snapshot.Options() + return &protocol.Hover{ + Contents: protocol.MarkupContent{ + Kind: options.PreferredContentFormat, + Value: mod.Path, + }, + Range: rng, + }, nil +} + +func usePath(pw *cache.ParsedWorkFile, offset int) (use *modfile.Use, pathStart, pathEnd int) { + for _, u := range pw.File.Use { + path := []byte(u.Path) + s, e := u.Syntax.Start.Byte, u.Syntax.End.Byte + i := bytes.Index(pw.Mapper.Content[s:e], path) + if i == -1 { + // This should not happen. + continue + } + // Shift the start position to the location of the + // module directory within the use statement. + pathStart, pathEnd = s+i, s+i+len(path) + if pathStart <= offset && offset <= pathEnd { + return u, pathStart, pathEnd + } + } + return nil, 0, 0 +} diff --git a/gopls/main.go b/gopls/main.go index 32507f25e7f..9217b278b1a 100644 --- a/gopls/main.go +++ b/gopls/main.go @@ -17,14 +17,19 @@ import ( "context" "os" + "golang.org/x/telemetry" + "golang.org/x/tools/gopls/internal/cmd" "golang.org/x/tools/gopls/internal/hooks" - "golang.org/x/tools/gopls/internal/lsp/cmd" - "golang.org/x/tools/gopls/internal/telemetry" + versionpkg "golang.org/x/tools/gopls/internal/version" "golang.org/x/tools/internal/tool" ) +var version = "" // if set by the linker, overrides the gopls version + func main() { - telemetry.Start() + versionpkg.VersionOverride = version + + telemetry.Start(telemetry.Config{ReportCrashes: true}) ctx := context.Background() - tool.Main(ctx, cmd.New("gopls", "", nil, hooks.Options), os.Args[1:]) + tool.Main(ctx, cmd.New(hooks.Options), os.Args[1:]) } diff --git a/gopls/release/release.go b/gopls/release/release.go index b2e0b3ca847..26ce5f7870a 100644 --- a/gopls/release/release.go +++ b/gopls/release/release.go @@ -14,18 +14,14 @@ package main import ( "flag" "fmt" - "go/types" "log" "os" + "os/exec" "path/filepath" - "strconv" "strings" - exec "golang.org/x/sys/execabs" - "golang.org/x/mod/modfile" "golang.org/x/mod/semver" - "golang.org/x/tools/go/packages" ) var versionFlag = flag.String("version", "", "version to tag") @@ -53,10 +49,6 @@ func main() { if filepath.Base(wd) != "gopls" { log.Fatalf("must run from the gopls module") } - // Confirm that they have updated the hardcoded version. - if err := validateHardcodedVersion(*versionFlag); err != nil { - log.Fatal(err) - } // Confirm that the versions in the go.mod file are correct. if err := validateGoModFile(wd); err != nil { log.Fatal(err) @@ -65,47 +57,6 @@ func main() { os.Exit(0) } -// validateHardcodedVersion reports whether the version hardcoded in the gopls -// binary is equivalent to the version being published. It reports an error if -// not. -func validateHardcodedVersion(version string) error { - const debugPkg = "golang.org/x/tools/gopls/internal/lsp/debug" - pkgs, err := packages.Load(&packages.Config{ - Mode: packages.NeedName | packages.NeedFiles | - packages.NeedCompiledGoFiles | packages.NeedImports | - packages.NeedTypes | packages.NeedTypesSizes, - }, debugPkg) - if err != nil { - return err - } - if len(pkgs) != 1 { - return fmt.Errorf("expected 1 package, got %v", len(pkgs)) - } - pkg := pkgs[0] - if len(pkg.Errors) > 0 { - return fmt.Errorf("failed to load %q: first error: %w", debugPkg, pkg.Errors[0]) - } - obj := pkg.Types.Scope().Lookup("Version") - c, ok := obj.(*types.Const) - if !ok { - return fmt.Errorf("no constant named Version") - } - hardcodedVersion, err := strconv.Unquote(c.Val().ExactString()) - if err != nil { - return err - } - if semver.Prerelease(hardcodedVersion) != "" { - return fmt.Errorf("unexpected pre-release for hardcoded version: %s", hardcodedVersion) - } - // Don't worry about pre-release tags and expect that there is no build - // suffix. - version = strings.TrimSuffix(version, semver.Prerelease(version)) - if hardcodedVersion != version { - return fmt.Errorf("expected version to be %s, got %s", *versionFlag, hardcodedVersion) - } - return nil -} - func validateGoModFile(goplsDir string) error { filename := filepath.Join(goplsDir, "go.mod") data, err := os.ReadFile(filename) diff --git a/gopls/test/debug/debug_test.go b/gopls/test/debug/debug_test.go deleted file mode 100644 index dfe8a3e6edf..00000000000 --- a/gopls/test/debug/debug_test.go +++ /dev/null @@ -1,153 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package debug_test - -// Provide 'static type checking' of the templates. This guards against changes in various -// gopls datastructures causing template execution to fail. The checking is done by -// the github.com/jba/templatecheck package. Before that is run, the test checks that -// its list of templates and their arguments corresponds to the arguments in -// calls to render(). The test assumes that all uses of templates are done through render(). - -import ( - "go/ast" - "html/template" - "os" - "runtime" - "sort" - "strings" - "testing" - - "github.com/jba/templatecheck" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/debug" - "golang.org/x/tools/internal/testenv" -) - -var templates = map[string]struct { - tmpl *template.Template - data interface{} // a value of the needed type -}{ - "MainTmpl": {debug.MainTmpl, &debug.Instance{}}, - "DebugTmpl": {debug.DebugTmpl, nil}, - "RPCTmpl": {debug.RPCTmpl, &debug.Rpcs{}}, - "TraceTmpl": {debug.TraceTmpl, debug.TraceResults{}}, - "CacheTmpl": {debug.CacheTmpl, &cache.Cache{}}, - "SessionTmpl": {debug.SessionTmpl, &cache.Session{}}, - "ViewTmpl": {debug.ViewTmpl, &cache.View{}}, - "ClientTmpl": {debug.ClientTmpl, &debug.Client{}}, - "ServerTmpl": {debug.ServerTmpl, &debug.Server{}}, - "FileTmpl": {debug.FileTmpl, &cache.Overlay{}}, - "InfoTmpl": {debug.InfoTmpl, "something"}, - "MemoryTmpl": {debug.MemoryTmpl, runtime.MemStats{}}, - "AnalysisTmpl": {debug.AnalysisTmpl, new(debug.State).Analysis()}, -} - -func TestTemplates(t *testing.T) { - testenv.NeedsGoPackages(t) - testenv.NeedsLocalXTools(t) - - cfg := &packages.Config{ - Mode: packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo, - } - cfg.Env = os.Environ() - cfg.Env = append(cfg.Env, - "GOPACKAGESDRIVER=off", - "GOWORK=off", // necessary for -mod=mod below - "GOFLAGS=-mod=mod", - ) - - pkgs, err := packages.Load(cfg, "golang.org/x/tools/gopls/internal/lsp/debug") - if err != nil { - t.Fatal(err) - } - if len(pkgs) != 1 { - t.Fatalf("expected a single package, but got %d", len(pkgs)) - } - p := pkgs[0] - if len(p.Errors) != 0 { - t.Fatalf("compiler error, e.g. %v", p.Errors[0]) - } - // find the calls to render in serve.go - tree := treeOf(p, "serve.go") - if tree == nil { - t.Fatalf("found no syntax tree for %s", "serve.go") - } - renders := callsOf(p, tree, "render") - if len(renders) == 0 { - t.Fatalf("found no calls to render") - } - var found = make(map[string]bool) - for _, r := range renders { - if len(r.Args) != 2 { - // template, func - t.Fatalf("got %d args, expected 2", len(r.Args)) - } - t0, ok := p.TypesInfo.Types[r.Args[0]] - if !ok || !t0.IsValue() || t0.Type.String() != "*html/template.Template" { - t.Fatalf("no type info for template") - } - if id, ok := r.Args[0].(*ast.Ident); !ok { - t.Errorf("expected *ast.Ident, got %T", r.Args[0]) - } else { - found[id.Name] = true - } - } - // make sure found and templates have the same templates - for k := range found { - if _, ok := templates[k]; !ok { - t.Errorf("code has template %s, but test does not", k) - } - } - for k := range templates { - if _, ok := found[k]; !ok { - t.Errorf("test has template %s, code does not", k) - } - } - // now check all the known templates, in alphabetic order, for determinacy - keys := []string{} - for k := range templates { - keys = append(keys, k) - } - sort.Strings(keys) - for _, k := range keys { - v := templates[k] - // the FuncMap is an annoyance; should not be necessary - if err := templatecheck.CheckHTML(v.tmpl, v.data); err != nil { - t.Errorf("%s: %v", k, err) - continue - } - t.Logf("%s ok", k) - } -} - -func callsOf(p *packages.Package, tree *ast.File, name string) []*ast.CallExpr { - var ans []*ast.CallExpr - f := func(n ast.Node) bool { - x, ok := n.(*ast.CallExpr) - if !ok { - return true - } - if y, ok := x.Fun.(*ast.Ident); ok { - if y.Name == name { - ans = append(ans, x) - } - } - return true - } - ast.Inspect(tree, f) - return ans -} - -func treeOf(p *packages.Package, fname string) *ast.File { - for _, tree := range p.Syntax { - loc := tree.Package - pos := p.Fset.PositionFor(loc, false) - if strings.HasSuffix(pos.Filename, fname) { - return tree - } - } - return nil -} diff --git a/internal/aliases/aliases.go b/internal/aliases/aliases.go new file mode 100644 index 00000000000..f89112c8ee5 --- /dev/null +++ b/internal/aliases/aliases.go @@ -0,0 +1,28 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package aliases + +import ( + "go/token" + "go/types" +) + +// Package aliases defines backward compatible shims +// for the types.Alias type representation added in 1.22. +// This defines placeholders for x/tools until 1.26. + +// NewAlias creates a new TypeName in Package pkg that +// is an alias for the type rhs. +// +// When GoVersion>=1.22 and GODEBUG=gotypesalias=1, +// the Type() of the return value is a *types.Alias. +func NewAlias(pos token.Pos, pkg *types.Package, name string, rhs types.Type) *types.TypeName { + if enabled() { + tname := types.NewTypeName(pos, pkg, name, nil) + newAlias(tname, rhs) + return tname + } + return types.NewTypeName(pos, pkg, name, rhs) +} diff --git a/internal/aliases/aliases_go121.go b/internal/aliases/aliases_go121.go new file mode 100644 index 00000000000..1872b56ff8f --- /dev/null +++ b/internal/aliases/aliases_go121.go @@ -0,0 +1,30 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.22 +// +build !go1.22 + +package aliases + +import ( + "go/types" +) + +// Alias is a placeholder for a go/types.Alias for <=1.21. +// It will never be created by go/types. +type Alias struct{} + +func (*Alias) String() string { panic("unreachable") } + +func (*Alias) Underlying() types.Type { panic("unreachable") } + +func (*Alias) Obj() *types.TypeName { panic("unreachable") } + +// Unalias returns the type t for go <=1.21. +func Unalias(t types.Type) types.Type { return t } + +// Always false for go <=1.21. Ignores GODEBUG. +func enabled() bool { return false } + +func newAlias(name *types.TypeName, rhs types.Type) *Alias { panic("unreachable") } diff --git a/internal/aliases/aliases_go122.go b/internal/aliases/aliases_go122.go new file mode 100644 index 00000000000..8b92116284d --- /dev/null +++ b/internal/aliases/aliases_go122.go @@ -0,0 +1,72 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.22 +// +build go1.22 + +package aliases + +import ( + "go/ast" + "go/parser" + "go/token" + "go/types" + "os" + "strings" + "sync" +) + +// Alias is an alias of types.Alias. +type Alias = types.Alias + +// Unalias is a wrapper of types.Unalias. +func Unalias(t types.Type) types.Type { return types.Unalias(t) } + +// newAlias is an internal alias around types.NewAlias. +// Direct usage is discouraged as the moment. +// Try to use NewAlias instead. +func newAlias(tname *types.TypeName, rhs types.Type) *Alias { + a := types.NewAlias(tname, rhs) + // TODO(go.dev/issue/65455): Remove kludgy workaround to set a.actual as a side-effect. + Unalias(a) + return a +} + +// enabled returns true when types.Aliases are enabled. +func enabled() bool { + // Use the gotypesalias value in GODEBUG if set. + godebug := os.Getenv("GODEBUG") + value := -1 // last set value. + for _, f := range strings.Split(godebug, ",") { + switch f { + case "gotypesalias=1": + value = 1 + case "gotypesalias=0": + value = 0 + } + } + switch value { + case 0: + return false + case 1: + return true + default: + return aliasesDefault() + } +} + +// aliasesDefault reports if aliases are enabled by default. +func aliasesDefault() bool { + // Dynamically check if Aliases will be produced from go/types. + aliasesDefaultOnce.Do(func() { + fset := token.NewFileSet() + f, _ := parser.ParseFile(fset, "a.go", "package p; type A = int", 0) + pkg, _ := new(types.Config).Check("p", fset, []*ast.File{f}, nil) + _, gotypesaliasDefault = pkg.Scope().Lookup("A").Type().(*types.Alias) + }) + return gotypesaliasDefault +} + +var gotypesaliasDefault bool +var aliasesDefaultOnce sync.Once diff --git a/internal/aliases/aliases_test.go b/internal/aliases/aliases_test.go new file mode 100644 index 00000000000..d2d3464e19a --- /dev/null +++ b/internal/aliases/aliases_test.go @@ -0,0 +1,72 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package aliases_test + +import ( + "go/ast" + "go/parser" + "go/token" + "go/types" + "testing" + + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/testenv" +) + +// Assert that Obj exists on Alias. +var _ func(*aliases.Alias) *types.TypeName = (*aliases.Alias).Obj + +// TestNewAlias tests that alias.NewAlias creates an alias of a type +// whose underlying and Unaliased type is *Named. +// When gotypesalias=1 and GoVersion >= 1.22, the type will +// be an *aliases.Alias. +func TestNewAlias(t *testing.T) { + const source = ` + package P + + type Named int + ` + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "hello.go", source, 0) + if err != nil { + t.Fatal(err) + } + + var conf types.Config + pkg, err := conf.Check("P", fset, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + expr := `*Named` + tv, err := types.Eval(fset, pkg, 0, expr) + if err != nil { + t.Fatalf("Eval(%s) failed: %v", expr, err) + } + + for _, godebug := range []string{"", "gotypesalias=1"} { + t.Run(godebug, func(t *testing.T) { + t.Setenv("GODEBUG", godebug) + + A := aliases.NewAlias(token.NoPos, pkg, "A", tv.Type) + if got, want := A.Name(), "A"; got != want { + t.Errorf("Expected A.Name()==%q. got %q", want, got) + } + + if got, want := A.Type().Underlying(), tv.Type; got != want { + t.Errorf("Expected A.Type().Underlying()==%q. got %q", want, got) + } + if got, want := aliases.Unalias(A.Type()), tv.Type; got != want { + t.Errorf("Expected Unalias(A)==%q. got %q", want, got) + } + + if testenv.Go1Point() >= 22 && godebug == "gotypesalias=1" { + if _, ok := A.Type().(*aliases.Alias); !ok { + t.Errorf("Expected A.Type() to be a types.Alias(). got %q", A.Type()) + } + } + }) + } +} diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 2b291680479..eb830888aad 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -13,6 +13,8 @@ import ( "go/token" "go/types" "strconv" + + "golang.org/x/tools/internal/aliases" ) func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { @@ -28,21 +30,24 @@ func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos } func ZeroValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { - under := typ - if n, ok := typ.(*types.Named); ok { + // TODO(adonovan): think about generics, and also generic aliases. + under := aliases.Unalias(typ) + // Don't call Underlying unconditionally: although it removes + // Named and Alias, it also removes TypeParam. + if n, ok := under.(*types.Named); ok { under = n.Underlying() } - switch u := under.(type) { + switch under := under.(type) { case *types.Basic: switch { - case u.Info()&types.IsNumeric != 0: + case under.Info()&types.IsNumeric != 0: return &ast.BasicLit{Kind: token.INT, Value: "0"} - case u.Info()&types.IsBoolean != 0: + case under.Info()&types.IsBoolean != 0: return &ast.Ident{Name: "false"} - case u.Info()&types.IsString != 0: + case under.Info()&types.IsString != 0: return &ast.BasicLit{Kind: token.STRING, Value: `""`} default: - panic(fmt.Sprintf("unknown basic type %v", u)) + panic(fmt.Sprintf("unknown basic type %v", under)) } case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice, *types.Array: return ast.NewIdent("nil") @@ -151,6 +156,10 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { }, }) } + if t.Variadic() { + last := params[len(params)-1] + last.Type = &ast.Ellipsis{Elt: last.Type.(*ast.ArrayType).Elt} + } var returns []*ast.Field for i := 0; i < t.Results().Len(); i++ { r := TypeExpr(f, pkg, t.Results().At(i).Type()) @@ -169,7 +178,7 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { List: returns, }, } - case *types.Named: + case interface{ Obj() *types.TypeName }: // *types.{Alias,Named,TypeParam} if t.Obj().Pkg() == nil { return ast.NewIdent(t.Obj().Name()) } diff --git a/go/analysis/passes/internal/analysisutil/extractdoc.go b/internal/analysisinternal/extractdoc.go similarity index 99% rename from go/analysis/passes/internal/analysisutil/extractdoc.go rename to internal/analysisinternal/extractdoc.go index 0e175ca06f8..39507723d3d 100644 --- a/go/analysis/passes/internal/analysisutil/extractdoc.go +++ b/internal/analysisinternal/extractdoc.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package analysisutil +package analysisinternal import ( "fmt" diff --git a/go/analysis/passes/internal/analysisutil/extractdoc_test.go b/internal/analysisinternal/extractdoc_test.go similarity index 92% rename from go/analysis/passes/internal/analysisutil/extractdoc_test.go rename to internal/analysisinternal/extractdoc_test.go index 8fbf80e2655..8c99b11ede3 100644 --- a/go/analysis/passes/internal/analysisutil/extractdoc_test.go +++ b/internal/analysisinternal/extractdoc_test.go @@ -2,12 +2,12 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package analysisutil_test +package analysisinternal_test import ( "testing" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/internal/analysisinternal" ) func TestExtractDoc(t *testing.T) { @@ -68,7 +68,7 @@ var x = syntax error {multi, "nocolon", "error: 'Analyzer nocolon' heading not followed by 'nocolon: summary...' line"}, } { - got, err := analysisutil.ExtractDoc(test.content, test.name) + got, err := analysisinternal.ExtractDoc(test.content, test.name) if err != nil { got = "error: " + err.Error() } diff --git a/internal/apidiff/apidiff.go b/internal/apidiff/apidiff.go index 873ee85fbc4..087e112e599 100644 --- a/internal/apidiff/apidiff.go +++ b/internal/apidiff/apidiff.go @@ -19,6 +19,8 @@ import ( "go/constant" "go/token" "go/types" + + "golang.org/x/tools/internal/aliases" ) // Changes reports on the differences between the APIs of the old and new packages. @@ -206,7 +208,7 @@ func (d *differ) typeChanged(obj types.Object, part string, old, new types.Type) // Since these can change without affecting compatibility, we don't want users to // be distracted by them, so we remove them. func removeNamesFromSignature(t types.Type) types.Type { - sig, ok := t.(*types.Signature) + sig, ok := aliases.Unalias(t).(*types.Signature) if !ok { return t } diff --git a/internal/apidiff/compatibility.go b/internal/apidiff/compatibility.go index 2e327485b52..64cad5337be 100644 --- a/internal/apidiff/compatibility.go +++ b/internal/apidiff/compatibility.go @@ -8,9 +8,14 @@ import ( "fmt" "go/types" "reflect" + + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typesinternal" ) func (d *differ) checkCompatible(otn *types.TypeName, old, new types.Type) { + old = aliases.Unalias(old) + new = aliases.Unalias(new) switch old := old.(type) { case *types.Interface: if new, ok := new.(*types.Interface); ok { @@ -268,7 +273,7 @@ func (d *differ) checkCompatibleDefined(otn *types.TypeName, old *types.Named, n return } // Interface method sets are checked in checkCompatibleInterface. - if _, ok := old.Underlying().(*types.Interface); ok { + if types.IsInterface(old) { return } @@ -287,7 +292,7 @@ func (d *differ) checkMethodSet(otn *types.TypeName, oldt, newt types.Type, addc oldMethodSet := exportedMethods(oldt) newMethodSet := exportedMethods(newt) msname := otn.Name() - if _, ok := oldt.(*types.Pointer); ok { + if _, ok := aliases.Unalias(oldt).(*types.Pointer); ok { msname = "*" + msname } for name, oldMethod := range oldMethodSet { @@ -301,7 +306,8 @@ func (d *differ) checkMethodSet(otn *types.TypeName, oldt, newt types.Type, addc // T and one for the embedded type U. We want both messages to appear, // but the messageSet dedup logic will allow only one message for a given // object. So use the part string to distinguish them. - if receiverNamedType(oldMethod).Obj() != otn { + recv := oldMethod.Type().(*types.Signature).Recv() + if _, named := typesinternal.ReceiverNamed(recv); named.Obj() != otn { part = fmt.Sprintf(", method set of %s", msname) } d.incompatible(oldMethod, part, "removed") @@ -332,11 +338,11 @@ func (d *differ) checkMethodSet(otn *types.TypeName, oldt, newt types.Type, addc } // exportedMethods collects all the exported methods of type's method set. -func exportedMethods(t types.Type) map[string]types.Object { - m := map[string]types.Object{} +func exportedMethods(t types.Type) map[string]*types.Func { + m := make(map[string]*types.Func) ms := types.NewMethodSet(t) for i := 0; i < ms.Len(); i++ { - obj := ms.At(i).Obj() + obj := ms.At(i).Obj().(*types.Func) if obj.Exported() { m[obj.Name()] = obj } @@ -344,22 +350,7 @@ func exportedMethods(t types.Type) map[string]types.Object { return m } -func receiverType(method types.Object) types.Type { - return method.Type().(*types.Signature).Recv().Type() -} - -func receiverNamedType(method types.Object) *types.Named { - switch t := receiverType(method).(type) { - case *types.Pointer: - return t.Elem().(*types.Named) - case *types.Named: - return t - default: - panic("unreachable") - } -} - -func hasPointerReceiver(method types.Object) bool { - _, ok := receiverType(method).(*types.Pointer) - return ok +func hasPointerReceiver(method *types.Func) bool { + isptr, _ := typesinternal.ReceiverNamed(method.Type().(*types.Signature).Recv()) + return isptr } diff --git a/internal/apidiff/correspondence.go b/internal/apidiff/correspondence.go index 0d7b4c5a5f1..dd2f5178173 100644 --- a/internal/apidiff/correspondence.go +++ b/internal/apidiff/correspondence.go @@ -7,6 +7,8 @@ package apidiff import ( "go/types" "sort" + + "golang.org/x/tools/internal/aliases" ) // Two types are correspond if they are identical except for defined types, @@ -31,6 +33,8 @@ func (d *differ) correspond(old, new types.Type) bool { // Compare this to the implementation of go/types.Identical. func (d *differ) corr(old, new types.Type, p *ifacePair) bool { // Structure copied from types.Identical. + old = aliases.Unalias(old) + new = aliases.Unalias(new) switch old := old.(type) { case *types.Basic: return types.Identical(old, new) diff --git a/internal/cmd/deadcode/doc.go b/internal/cmd/deadcode/doc.go deleted file mode 100644 index 8d28eb31288..00000000000 --- a/internal/cmd/deadcode/doc.go +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -The deadcode command reports unreachable functions in Go programs. - -Usage: deadcode [flags] package... - -The deadcode command loads a Go program from source then uses Rapid -Type Analysis (RTA) to build a call graph of all the functions -reachable from the program's main function. Any functions that are not -reachable are reported as dead code, grouped by package. - -Packages are expressed in the notation of 'go list' (or other -underlying build system if you are using an alternative -golang.org/x/go/packages driver). Only executable (main) packages are -considered starting points for the analysis. - -The -test flag causes it to analyze test executables too. Tests -sometimes make use of functions that would otherwise appear to be dead -code, and public API functions reported as dead with -test indicate -possible gaps in your test coverage. Bear in mind that an Example test -function without an "Output:" comment is merely documentation: -it is dead code, and does not contribute coverage. - -The -filter flag restricts results to packages that match the provided -regular expression; its default value is the module name of the first -package. Use -filter= to display all results. - -Example: show all dead code within the gopls module: - - $ deadcode -test golang.org/x/tools/gopls/... - -The analysis can soundly analyze dynamic calls though func values, -interface methods, and reflection. However, it does not currently -understand the aliasing created by //go:linkname directives, so it -will fail to recognize that calls to a linkname-annotated function -with no body in fact dispatch to the function named in the annotation. -This may result in the latter function being spuriously reported as dead. - -By default, the tool does not report dead functions in generated files, -as determined by the special comment described in -https://go.dev/s/generatedcode. Use the -generated flag to include them. - -In any case, just because a function is reported as dead does not mean -it is unconditionally safe to delete it. For example, a dead function -may be referenced (by another dead function), and a dead method may be -required to satisfy an interface (that is never called). -Some judgement is required. - -The analysis is valid only for a single GOOS/GOARCH/-tags configuration, -so a function reported as dead may be live in a different configuration. -Consider running the tool once for each configuration of interest. -Consider using a line-oriented output format (see below) to make it -easier to compute the intersection of results across all runs. - -# Output - -The command supports three output formats. - -With no flags, the command prints dead functions grouped by package. - -With the -json flag, the command prints an array of Package -objects, as defined by the JSON schema (see below). - -With the -f=template flag, the command executes the specified template -on each Package record. So, this template produces a result similar to the -default format: - - -f='{{println .Path}}{{range .Funcs}}{{printf "\t%s\n" .RelName}}{{end}}{{println}}' - -And this template shows only the list of source positions of dead functions: - - -f='{{range .Funcs}}{{println .Posn}}{{end}}' - -# Why is a function not dead? - -The -whylive=function flag explain why the named function is not dead -by showing an arbitrary shortest path to it from one of the main functions. -(To enumerate the functions in a program, or for more sophisticated -call graph queries, use golang.org/x/tools/cmd/callgraph.) - -Fully static call paths are preferred over paths involving dynamic -edges, even if longer. Paths starting from a non-test package are -preferred over those from tests. Paths from main functions are -preferred over paths from init functions. - -The result is a list of Edge objects (see JSON schema below). -Again, the -json and -f=template flags may be used to control -the formatting of the list of Edge objects. -The default format shows, for each edge in the path, whether the call -is static or dynamic, and its source line number. For example: - - $ deadcode -whylive="(*bytes.Buffer).String" -test ./internal/cmd/deadcode/... - golang.org/x/tools/internal/cmd/deadcode.main - static@L0321 --> (*golang.org/x/tools/go/ssa.Function).RelString - static@L0428 --> (*golang.org/x/tools/go/ssa.Function).relMethod - static@L0452 --> golang.org/x/tools/go/ssa.relType - static@L0047 --> go/types.TypeString - static@L0051 --> (*bytes.Buffer).String - -# JSON schema - - type Package struct { - Path string // import path of package - Funcs []Function // list of dead functions within it - } - - type Function struct { - Name string // name (with package qualifier) - RelName string // name (sans package qualifier) - Posn Position // file/line/column of function declaration - Generated bool // function is declared in a generated .go file - } - - type Edge struct { - Initial string // initial entrypoint (main or init); first edge only - Kind string // = static | dynamic - Posn Position // file/line/column of call site - Callee string // target of the call - } - - type Position struct { - File string // name of file - Line, Col int // line and byte index, both 1-based - } - -THIS TOOL IS EXPERIMENTAL and its interface may change. -At some point it may be published at cmd/deadcode. -In the meantime, please give us feedback at github.com/golang/go/issues. -*/ -package main diff --git a/internal/cmd/deadcode/testdata/basic.txtar b/internal/cmd/deadcode/testdata/basic.txtar deleted file mode 100644 index b0b380a0ecf..00000000000 --- a/internal/cmd/deadcode/testdata/basic.txtar +++ /dev/null @@ -1,32 +0,0 @@ -# Test of basic functionality. - - deadcode -filter= example.com - - want "(T).Goodbye" -!want "(T).Hello" - want "unreferenced" - - want "Scanf" - want "Printf" -!want "Println" - --- go.mod -- -module example.com -go 1.18 - --- main.go -- -package main - -import "fmt" - -type T int - -func main() { - var x T - x.Hello() -} - -func (T) Hello() { fmt.Println("hello") } -func (T) Goodbye() { fmt.Println("goodbye") } - -func unreferenced() {} \ No newline at end of file diff --git a/internal/cmd/deadcode/testdata/generated.txtar b/internal/cmd/deadcode/testdata/generated.txtar deleted file mode 100644 index 4a50a6eb543..00000000000 --- a/internal/cmd/deadcode/testdata/generated.txtar +++ /dev/null @@ -1,28 +0,0 @@ -# Test of -generated flag output. - - deadcode example.com -!want "main" - want "Dead1" -!want "Dead2" - - deadcode -generated example.com -!want "main" - want "Dead1" - want "Dead2" - --- go.mod -- -module example.com -go 1.18 - --- main.go -- -package main - -func main() {} -func Dead1() {} - --- gen.go -- -// Code generated by hand. DO NOT EDIT. - -package main - -func Dead2() {} \ No newline at end of file diff --git a/internal/cmd/deadcode/testdata/jsonflag.txtar b/internal/cmd/deadcode/testdata/jsonflag.txtar deleted file mode 100644 index f0f3ab21bd0..00000000000 --- a/internal/cmd/deadcode/testdata/jsonflag.txtar +++ /dev/null @@ -1,21 +0,0 @@ -# Very minimal test of -json flag. - -deadcode -json example.com/p - - want `"Path": "example.com/p",` - want `"Name": "example.com/p.Dead",` - want `"RelName": "Dead",` - want `"Generated": false` - want `"Line": 5,` - want `"Col": 6` - --- go.mod -- -module example.com -go 1.18 - --- p/p.go -- -package main - -func main() {} - -func Dead() {} diff --git a/internal/cmd/deadcode/testdata/lineflag.txtar b/internal/cmd/deadcode/testdata/lineflag.txtar deleted file mode 100644 index 51940ad3274..00000000000 --- a/internal/cmd/deadcode/testdata/lineflag.txtar +++ /dev/null @@ -1,32 +0,0 @@ -# Test of line-oriented output. - - deadcode "-f={{range .Funcs}}{{println .Name}}{{end}}" -filter= example.com - - want "(example.com.T).Goodbye" -!want "(example.com.T).Hello" - want "example.com.unreferenced" - - want "fmt.Scanf" - want "fmt.Printf" -!want "fmt.Println" - --- go.mod -- -module example.com -go 1.18 - --- main.go -- -package main - -import "fmt" - -type T int - -func main() { - var x T - x.Hello() -} - -func (T) Hello() { fmt.Println("hello") } -func (T) Goodbye() { fmt.Println("goodbye") } - -func unreferenced() {} \ No newline at end of file diff --git a/internal/cmd/deadcode/testdata/whylive.txtar b/internal/cmd/deadcode/testdata/whylive.txtar deleted file mode 100644 index 9e7b0e6e4af..00000000000 --- a/internal/cmd/deadcode/testdata/whylive.txtar +++ /dev/null @@ -1,56 +0,0 @@ -# Test of -whylive flag. - -# The -whylive argument must be live. - -!deadcode -whylive=example.com.d example.com - want "function example.com.d is dead code" - -# A fully static path is preferred, even if longer. - - deadcode -whylive=example.com.c example.com - want " example.com.main" - want " static@L0004 --> example.com.a" - want " static@L0009 --> example.com.b" - want " static@L0012 --> example.com.c" - -# Dynamic edges are followed if necessary. -# (Note that main is preferred over init.) - - deadcode -whylive=example.com.f example.com - want " example.com.main" - want "dynamic@L0006 --> example.com.e" - want " static@L0017 --> example.com.f" - -# Degenerate case where target is itself a root. - -!deadcode -whylive=example.com.main example.com - want "example.com.main is a root" - --- go.mod -- -module example.com -go 1.18 - --- main.go -- -package main - -func main() { - a() - println(c, e) // c, e are address-taken - (func ())(nil)() // potential dynamic call to c, e -} -func a() { - b() -} -func b() { - c() -} -func c() -func d() -func e() { - f() -} -func f() - -func init() { - (func ())(nil)() // potential dynamic call to c, e -} \ No newline at end of file diff --git a/internal/compat/appendf.go b/internal/compat/appendf.go deleted file mode 100644 index 069d5171704..00000000000 --- a/internal/compat/appendf.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 - -package compat - -import "fmt" - -func Appendf(b []byte, format string, a ...interface{}) []byte { - return fmt.Appendf(b, format, a...) -} diff --git a/internal/compat/appendf_118.go b/internal/compat/appendf_118.go deleted file mode 100644 index 29af353cdaf..00000000000 --- a/internal/compat/appendf_118.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 - -package compat - -import "fmt" - -func Appendf(b []byte, format string, a ...interface{}) []byte { - return append(b, fmt.Sprintf(format, a...)...) -} diff --git a/internal/compat/doc.go b/internal/compat/doc.go deleted file mode 100644 index 59c667a37a2..00000000000 --- a/internal/compat/doc.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// The compat package implements API shims for backward compatibility at older -// Go versions. -package compat diff --git a/internal/diff/difftest/difftest_test.go b/internal/diff/difftest/difftest_test.go index c64a0fa0c9f..dcd92d7dfeb 100644 --- a/internal/diff/difftest/difftest_test.go +++ b/internal/diff/difftest/difftest_test.go @@ -64,11 +64,17 @@ func getDiffOutput(a, b string) (string, error) { } cmd := exec.Command("diff", "-u", fileA.Name(), fileB.Name()) cmd.Env = append(cmd.Env, "LANG=en_US.UTF-8") - out, err := cmd.CombinedOutput() + out, err := cmd.Output() if err != nil { - if _, ok := err.(*exec.ExitError); !ok { - return "", fmt.Errorf("failed to run diff -u %v %v: %v\n%v", fileA.Name(), fileB.Name(), err, string(out)) + exit, ok := err.(*exec.ExitError) + if !ok { + return "", fmt.Errorf("can't exec %s: %v", cmd, err) } + if len(out) == 0 { + // Nonzero exit with no output: terminated by signal? + return "", fmt.Errorf("%s failed: %v; stderr:\n%s", cmd, err, exit.Stderr) + } + // nonzero exit + output => files differ } diff := string(out) if len(diff) <= 0 { diff --git a/internal/diff/lcs/git.sh b/internal/diff/lcs/git.sh index 6856f843958..b25ba4aac74 100644 --- a/internal/diff/lcs/git.sh +++ b/internal/diff/lcs/git.sh @@ -14,9 +14,9 @@ set -eu # WARNING: This script will install the latest version of $file # The largest real source file in the x/tools repo. -# file=internal/lsp/source/completion/completion.go -# file=internal/lsp/source/diagnostics.go -file=internal/lsp/protocol/tsprotocol.go +# file=internal/golang/completion/completion.go +# file=internal/golang/diagnostics.go +file=internal/protocol/tsprotocol.go tmp=$(mktemp -d) git log $file | diff --git a/internal/diff/myers/diff.go b/internal/diff/myers/diff.go index c0f6cce504b..e11ed08047e 100644 --- a/internal/diff/myers/diff.go +++ b/internal/diff/myers/diff.go @@ -15,6 +15,15 @@ import ( // https://blog.jcoglan.com/2017/02/17/the-myers-diff-algorithm-part-3/ // https://www.codeproject.com/Articles/42279/%2FArticles%2F42279%2FInvestigating-Myers-diff-algorithm-Part-1-of-2 +// ComputeEdits returns the diffs of two strings using a simple +// line-based implementation, like [diff.Strings]. +// +// Deprecated: this implementation is moribund. However, when diffs +// appear in marker test expectations, they are the particular diffs +// produced by this implementation. The marker test framework +// asserts diff(orig, got)==wantDiff, but ideally it would compute +// got==apply(orig, wantDiff) so that the notation of the diff +// is immaterial. func ComputeEdits(before, after string) []diff.Edit { beforeLines := splitLines(before) ops := operations(beforeLines, splitLines(after)) diff --git a/internal/diff/ndiff.go b/internal/diff/ndiff.go index 050b08ded46..fbef4d730c5 100644 --- a/internal/diff/ndiff.go +++ b/internal/diff/ndiff.go @@ -18,7 +18,7 @@ func Strings(before, after string) []Edit { return nil // common case } - if stringIsASCII(before) && stringIsASCII(after) { + if isASCII(before) && isASCII(after) { // TODO(adonovan): opt: specialize diffASCII for strings. return diffASCII([]byte(before), []byte(after)) } @@ -32,7 +32,7 @@ func Bytes(before, after []byte) []Edit { return nil // common case } - if bytesIsASCII(before) && bytesIsASCII(after) { + if isASCII(before) && isASCII(after) { return diffASCII(before, after) } return diffRunes(runes(before), runes(after)) @@ -88,18 +88,8 @@ func runesLen(runes []rune) (len int) { return len } -// stringIsASCII reports whether s contains only ASCII. -// TODO(adonovan): combine when x/tools allows generics. -func stringIsASCII(s string) bool { - for i := 0; i < len(s); i++ { - if s[i] >= utf8.RuneSelf { - return false - } - } - return true -} - -func bytesIsASCII(s []byte) bool { +// isASCII reports whether s contains only ASCII. +func isASCII[S string | []byte](s S) bool { for i := 0; i < len(s); i++ { if s[i] >= utf8.RuneSelf { return false diff --git a/internal/event/keys/util.go b/internal/event/keys/util.go new file mode 100644 index 00000000000..c0e8e731c90 --- /dev/null +++ b/internal/event/keys/util.go @@ -0,0 +1,21 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package keys + +import ( + "sort" + "strings" +) + +// Join returns a canonical join of the keys in S: +// a sorted comma-separated string list. +func Join[S ~[]T, T ~string](s S) string { + strs := make([]string, 0, len(s)) + for _, v := range s { + strs = append(strs, string(v)) + } + sort.Strings(strs) + return strings.Join(strs, ",") +} diff --git a/internal/event/keys/util_test.go b/internal/event/keys/util_test.go new file mode 100644 index 00000000000..c3e285e3ba5 --- /dev/null +++ b/internal/event/keys/util_test.go @@ -0,0 +1,29 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package keys + +import "testing" + +func TestJoin(t *testing.T) { + type T string + type S []T + + tests := []struct { + data S + want string + }{ + {S{"a", "b", "c"}, "a,b,c"}, + {S{"b", "a", "c"}, "a,b,c"}, + {S{"c", "a", "b"}, "a,b,c"}, + {nil, ""}, + {S{}, ""}, + } + + for _, test := range tests { + if got := Join(test.data); got != test.want { + t.Errorf("Join(%v) = %q, want %q", test.data, got, test.want) + } + } +} diff --git a/internal/facts/facts_test.go b/internal/facts/facts_test.go index 4f1e8d60d55..daebea2ff59 100644 --- a/internal/facts/facts_test.go +++ b/internal/facts/facts_test.go @@ -18,9 +18,9 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/facts" "golang.org/x/tools/internal/testenv" - "golang.org/x/tools/internal/typeparams" ) type myFact struct { @@ -250,9 +250,6 @@ func TestEncodeDecode(t *testing.T) { test := tests[i] t.Run(test.name, func(t *testing.T) { t.Parallel() - if test.typeparams && !typeparams.Enabled { - t.Skip("type parameters are not enabled") - } testEncodeDecode(t, test.files, test.plookups) }) } @@ -364,7 +361,7 @@ func find(p *types.Package, expr string) types.Object { if err != nil { return nil } - if n, ok := tv.Type.(*types.Named); ok { + if n, ok := aliases.Unalias(tv.Type).(*types.Named); ok { return n.Obj() } return nil @@ -444,9 +441,6 @@ func TestFactFilter(t *testing.T) { // happen when Analyzers have RunDespiteErrors set to true. So this // needs to robust, e.g. no infinite loops. func TestMalformed(t *testing.T) { - if !typeparams.Enabled { - t.Skip("type parameters are not enabled") - } var findPkg func(*types.Package, string) *types.Package findPkg = func(p *types.Package, name string) *types.Package { if p.Name() == name { diff --git a/internal/facts/imports.go b/internal/facts/imports.go index f64695ea520..9f706cd954f 100644 --- a/internal/facts/imports.go +++ b/internal/facts/imports.go @@ -7,7 +7,7 @@ package facts import ( "go/types" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" ) // importMap computes the import map for a package by traversing the @@ -47,6 +47,8 @@ func importMap(imports []*types.Package) map[string]*types.Package { addType = func(T types.Type) { switch T := T.(type) { + case *aliases.Alias: + addType(aliases.Unalias(T)) case *types.Basic: // nop case *types.Named: @@ -55,7 +57,7 @@ func importMap(imports []*types.Package) map[string]*types.Package { // infinite expansions: // type N[T any] struct { F *N[N[T]] } // importMap() is called on such types when Analyzer.RunDespiteErrors is true. - T = typeparams.NamedTypeOrigin(T) + T = T.Origin() if !typs[T] { typs[T] = true addObj(T.Obj()) @@ -63,12 +65,12 @@ func importMap(imports []*types.Package) map[string]*types.Package { for i := 0; i < T.NumMethods(); i++ { addObj(T.Method(i)) } - if tparams := typeparams.ForNamed(T); tparams != nil { + if tparams := T.TypeParams(); tparams != nil { for i := 0; i < tparams.Len(); i++ { addType(tparams.At(i)) } } - if targs := typeparams.NamedTypeArgs(T); targs != nil { + if targs := T.TypeArgs(); targs != nil { for i := 0; i < targs.Len(); i++ { addType(targs.At(i)) } @@ -88,7 +90,7 @@ func importMap(imports []*types.Package) map[string]*types.Package { case *types.Signature: addType(T.Params()) addType(T.Results()) - if tparams := typeparams.ForSignature(T); tparams != nil { + if tparams := T.TypeParams(); tparams != nil { for i := 0; i < tparams.Len(); i++ { addType(tparams.At(i)) } @@ -108,11 +110,11 @@ func importMap(imports []*types.Package) map[string]*types.Package { for i := 0; i < T.NumEmbeddeds(); i++ { addType(T.EmbeddedType(i)) // walk Embedded for implicits } - case *typeparams.Union: + case *types.Union: for i := 0; i < T.Len(); i++ { addType(T.Term(i).Type()) } - case *typeparams.TypeParam: + case *types.TypeParam: if !typs[T] { typs[T] = true addObj(T.Obj()) diff --git a/internal/gcimporter/bexport_test.go b/internal/gcimporter/bexport_test.go index 978c46e1932..1a2c8e8dd0a 100644 --- a/internal/gcimporter/bexport_test.go +++ b/internal/gcimporter/bexport_test.go @@ -18,8 +18,8 @@ import ( "strings" "testing" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/gcimporter" - "golang.org/x/tools/internal/typeparams" ) var isRace = false @@ -31,6 +31,8 @@ func fileLine(fset *token.FileSet, obj types.Object) string { } func equalType(x, y types.Type) error { + x = aliases.Unalias(x) + y = aliases.Unalias(y) if reflect.TypeOf(x) != reflect.TypeOf(y) { return fmt.Errorf("unequal kinds: %T vs %T", x, y) } @@ -143,10 +145,10 @@ func equalType(x, y types.Type) error { // return fmt.Errorf("receiver: %s", err) // } } - if err := equalTypeParams(typeparams.ForSignature(x), typeparams.ForSignature(y)); err != nil { + if err := equalTypeParams(x.TypeParams(), y.TypeParams()); err != nil { return fmt.Errorf("type params: %s", err) } - if err := equalTypeParams(typeparams.RecvTypeParams(x), typeparams.RecvTypeParams(y)); err != nil { + if err := equalTypeParams(x.RecvTypeParams(), y.RecvTypeParams()); err != nil { return fmt.Errorf("recv type params: %s", err) } case *types.Slice: @@ -184,8 +186,8 @@ func equalType(x, y types.Type) error { return fmt.Errorf("tuple element %d: %s", i, err) } } - case *typeparams.TypeParam: - y := y.(*typeparams.TypeParam) + case *types.TypeParam: + y := y.(*types.TypeParam) if x.String() != y.String() { return fmt.Errorf("unequal named types: %s vs %s", x, y) } @@ -209,15 +211,15 @@ func equalType(x, y types.Type) error { // cmpNamed compares two named types x and y, returning an error for any // discrepancies. It does not compare their underlying types. func cmpNamed(x, y *types.Named) error { - xOrig := typeparams.NamedTypeOrigin(x) - yOrig := typeparams.NamedTypeOrigin(y) + xOrig := x.Origin() + yOrig := y.Origin() if xOrig.String() != yOrig.String() { return fmt.Errorf("unequal named types: %s vs %s", x, y) } - if err := equalTypeParams(typeparams.ForNamed(x), typeparams.ForNamed(y)); err != nil { + if err := equalTypeParams(x.TypeParams(), y.TypeParams()); err != nil { return fmt.Errorf("type parameters: %s", err) } - if err := equalTypeArgs(typeparams.NamedTypeArgs(x), typeparams.NamedTypeArgs(y)); err != nil { + if err := equalTypeArgs(x.TypeArgs(), y.TypeArgs()); err != nil { return fmt.Errorf("type arguments: %s", err) } if x.NumMethods() != y.NumMethods() { @@ -252,7 +254,7 @@ func cmpNamed(x, y *types.Named) error { // makeExplicit returns an explicit version of typ, if typ is an implicit // interface. Otherwise it returns typ unmodified. func makeExplicit(typ types.Type) types.Type { - if iface, _ := typ.(*types.Interface); iface != nil && typeparams.IsImplicit(iface) { + if iface, _ := typ.(*types.Interface); iface != nil && iface.IsImplicit() { var methods []*types.Func for i := 0; i < iface.NumExplicitMethods(); i++ { methods = append(methods, iface.Method(i)) @@ -266,7 +268,7 @@ func makeExplicit(typ types.Type) types.Type { return typ } -func equalTypeArgs(x, y *typeparams.TypeList) error { +func equalTypeArgs(x, y *types.TypeList) error { if x.Len() != y.Len() { return fmt.Errorf("unequal lengths: %d vs %d", x.Len(), y.Len()) } @@ -278,7 +280,7 @@ func equalTypeArgs(x, y *typeparams.TypeList) error { return nil } -func equalTypeParams(x, y *typeparams.TypeParamList) error { +func equalTypeParams(x, y *types.TypeParamList) error { if x.Len() != y.Len() { return fmt.Errorf("unequal lengths: %d vs %d", x.Len(), y.Len()) } diff --git a/internal/gcimporter/gcimporter.go b/internal/gcimporter/gcimporter.go index 2d078ccb19c..39df91124a4 100644 --- a/internal/gcimporter/gcimporter.go +++ b/internal/gcimporter/gcimporter.go @@ -259,13 +259,6 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func return } -func deref(typ types.Type) types.Type { - if p, _ := typ.(*types.Pointer); p != nil { - return p.Elem() - } - return typ -} - type byPath []*types.Package func (a byPath) Len() int { return len(a) } diff --git a/internal/gcimporter/gcimporter_test.go b/internal/gcimporter/gcimporter_test.go index 3af088b23d8..95cc36c4d96 100644 --- a/internal/gcimporter/gcimporter_test.go +++ b/internal/gcimporter/gcimporter_test.go @@ -11,7 +11,6 @@ import ( "bytes" "fmt" "go/ast" - "go/build" "go/constant" goimporter "go/importer" goparser "go/parser" @@ -28,6 +27,7 @@ import ( "testing" "time" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/goroot" "golang.org/x/tools/internal/testenv" ) @@ -84,8 +84,7 @@ func compilePkg(t *testing.T, dirname, filename, outdirname string, packagefiles importreldir := strings.ReplaceAll(outdirname, string(os.PathSeparator), "/") cmd := exec.Command("go", "tool", "compile", "-p", pkg, "-D", importreldir, "-importcfg", importcfgfile, "-o", outname, filename) cmd.Dir = dirname - out, err := cmd.CombinedOutput() - if err != nil { + if out, err := cmd.CombinedOutput(); err != nil { t.Logf("%s", out) t.Fatalf("go tool compile %s failed: %s", filename, err) } @@ -165,8 +164,7 @@ func TestImportTypeparamTests(t *testing.T) { t.Skipf("in short mode, skipping test that requires export data for all of std") } - testenv.NeedsGo1Point(t, 18) // requires generics - testenv.NeedsGoBuild(t) // to find stdlib export data in the build cache + testenv.NeedsGoBuild(t) // to find stdlib export data in the build cache // This package only handles gc export data. if runtime.Compiler != "gc" { @@ -406,18 +404,6 @@ var importedObjectTests = []struct { {"go/types.Type", "type Type interface{String() string; Underlying() Type}"}, } -// TODO(rsc): Delete this init func after x/tools no longer needs to test successfully with Go 1.17. -func init() { - if build.Default.ReleaseTags[len(build.Default.ReleaseTags)-1] <= "go1.17" { - for i := range importedObjectTests { - if importedObjectTests[i].name == "context.Context" { - // Expand any to interface{}. - importedObjectTests[i].want = "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key interface{}) interface{}}" - } - } - } -} - func TestImportedTypes(t *testing.T) { // This package only handles gc export data. needsCompiler(t, "gc") @@ -439,7 +425,7 @@ func TestImportedTypes(t *testing.T) { t.Errorf("%s: got %q; want %q", test.name, got, test.want) } - if named, _ := obj.Type().(*types.Named); named != nil { + if named, _ := aliases.Unalias(obj.Type()).(*types.Named); named != nil { verifyInterfaceMethodRecvs(t, named, 0) } } @@ -522,7 +508,7 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) { // check embedded interfaces (if they are named, too) for i := 0; i < iface.NumEmbeddeds(); i++ { // embedding of interfaces cannot have cycles; recursion will terminate - if etype, _ := iface.EmbeddedType(i).(*types.Named); etype != nil { + if etype, _ := aliases.Unalias(iface.EmbeddedType(i)).(*types.Named); etype != nil { verifyInterfaceMethodRecvs(t, etype, level+1) } } @@ -542,7 +528,7 @@ func TestIssue5815(t *testing.T) { t.Errorf("no pkg for %s", obj) } if tname, _ := obj.(*types.TypeName); tname != nil { - named := tname.Type().(*types.Named) + named := aliases.Unalias(tname.Type()).(*types.Named) for i := 0; i < named.NumMethods(); i++ { m := named.Method(i) if m.Pkg() == nil { @@ -642,7 +628,7 @@ func TestIssue13898(t *testing.T) { // look for go/types.Object type obj := lookupObj(t, goTypesPkg.Scope(), "Object") - typ, ok := obj.Type().(*types.Named) + typ, ok := aliases.Unalias(obj.Type()).(*types.Named) if !ok { t.Fatalf("go/types.Object type is %v; wanted named type", typ) } @@ -739,8 +725,6 @@ func TestIssue25301(t *testing.T) { } func TestIssue51836(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // requires generics - // This package only handles gc export data. needsCompiler(t, "gc") @@ -770,8 +754,6 @@ func TestIssue51836(t *testing.T) { } func TestIssue61561(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // requires generics - const src = `package p type I[P any] interface { @@ -836,8 +818,6 @@ type K = StillBad[string] } func TestIssue57015(t *testing.T) { - testenv.NeedsGo1Point(t, 18) // requires generics - // This package only handles gc export data. needsCompiler(t, "gc") diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go index 6103dd7102b..683bd7395a6 100644 --- a/internal/gcimporter/iexport.go +++ b/internal/gcimporter/iexport.go @@ -21,10 +21,11 @@ import ( "sort" "strconv" "strings" + "unsafe" "golang.org/x/tools/go/types/objectpath" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/tokeninternal" - "golang.org/x/tools/internal/typeparams" ) // IExportShallow encodes "shallow" export data for the specified package. @@ -464,7 +465,7 @@ func (p *iexporter) doDecl(obj types.Object) { switch obj := obj.(type) { case *types.Var: - w.tag('V') + w.tag(varTag) w.pos(obj.Pos()) w.typ(obj.Type(), obj.Pkg()) @@ -481,10 +482,10 @@ func (p *iexporter) doDecl(obj types.Object) { } // Function. - if typeparams.ForSignature(sig).Len() == 0 { - w.tag('F') + if sig.TypeParams().Len() == 0 { + w.tag(funcTag) } else { - w.tag('G') + w.tag(genericFuncTag) } w.pos(obj.Pos()) // The tparam list of the function type is the declaration of the type @@ -494,27 +495,27 @@ func (p *iexporter) doDecl(obj types.Object) { // // While importing the type parameters, tparamList computes and records // their export name, so that it can be later used when writing the index. - if tparams := typeparams.ForSignature(sig); tparams.Len() > 0 { + if tparams := sig.TypeParams(); tparams.Len() > 0 { w.tparamList(obj.Name(), tparams, obj.Pkg()) } w.signature(sig) case *types.Const: - w.tag('C') + w.tag(constTag) w.pos(obj.Pos()) w.value(obj.Type(), obj.Val()) case *types.TypeName: t := obj.Type() - if tparam, ok := t.(*typeparams.TypeParam); ok { - w.tag('P') + if tparam, ok := aliases.Unalias(t).(*types.TypeParam); ok { + w.tag(typeParamTag) w.pos(obj.Pos()) constraint := tparam.Constraint() if p.version >= iexportVersionGo1_18 { implicit := false - if iface, _ := constraint.(*types.Interface); iface != nil { - implicit = typeparams.IsImplicit(iface) + if iface, _ := aliases.Unalias(constraint).(*types.Interface); iface != nil { + implicit = iface.IsImplicit() } w.bool(implicit) } @@ -523,8 +524,13 @@ func (p *iexporter) doDecl(obj types.Object) { } if obj.IsAlias() { - w.tag('A') + w.tag(aliasTag) w.pos(obj.Pos()) + if alias, ok := t.(*aliases.Alias); ok { + // Preserve materialized aliases, + // even of non-exported types. + t = aliasRHS(alias) + } w.typ(t, obj.Pkg()) break } @@ -535,20 +541,20 @@ func (p *iexporter) doDecl(obj types.Object) { panic(internalErrorf("%s is not a defined type", t)) } - if typeparams.ForNamed(named).Len() == 0 { - w.tag('T') + if named.TypeParams().Len() == 0 { + w.tag(typeTag) } else { - w.tag('U') + w.tag(genericTypeTag) } w.pos(obj.Pos()) - if typeparams.ForNamed(named).Len() > 0 { + if named.TypeParams().Len() > 0 { // While importing the type parameters, tparamList computes and records // their export name, so that it can be later used when writing the index. - w.tparamList(obj.Name(), typeparams.ForNamed(named), obj.Pkg()) + w.tparamList(obj.Name(), named.TypeParams(), obj.Pkg()) } - underlying := obj.Type().Underlying() + underlying := named.Underlying() w.typ(underlying, obj.Pkg()) if types.IsInterface(t) { @@ -565,7 +571,7 @@ func (p *iexporter) doDecl(obj types.Object) { // Receiver type parameters are type arguments of the receiver type, so // their name must be qualified before exporting recv. - if rparams := typeparams.RecvTypeParams(sig); rparams.Len() > 0 { + if rparams := sig.RecvTypeParams(); rparams.Len() > 0 { prefix := obj.Name() + "." + m.Name() for i := 0; i < rparams.Len(); i++ { rparam := rparams.At(i) @@ -739,20 +745,25 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { }() } switch t := t.(type) { + case *aliases.Alias: + // TODO(adonovan): support parameterized aliases, following *types.Named. + w.startType(aliasType) + w.qualifiedType(t.Obj()) + case *types.Named: - if targs := typeparams.NamedTypeArgs(t); targs.Len() > 0 { + if targs := t.TypeArgs(); targs.Len() > 0 { w.startType(instanceType) // TODO(rfindley): investigate if this position is correct, and if it // matters. w.pos(t.Obj().Pos()) w.typeList(targs, pkg) - w.typ(typeparams.NamedTypeOrigin(t), pkg) + w.typ(t.Origin(), pkg) return } w.startType(definedType) w.qualifiedType(t.Obj()) - case *typeparams.TypeParam: + case *types.TypeParam: w.startType(typeParamType) w.qualifiedType(t.Obj()) @@ -844,7 +855,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { for i := 0; i < n; i++ { ft := t.EmbeddedType(i) tPkg := pkg - if named, _ := ft.(*types.Named); named != nil { + if named, _ := aliases.Unalias(ft).(*types.Named); named != nil { w.pos(named.Obj().Pos()) } else { w.pos(token.NoPos) @@ -868,7 +879,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { w.signature(sig) } - case *typeparams.Union: + case *types.Union: w.startType(unionType) nt := t.Len() w.uint64(uint64(nt)) @@ -948,14 +959,14 @@ func (w *exportWriter) signature(sig *types.Signature) { } } -func (w *exportWriter) typeList(ts *typeparams.TypeList, pkg *types.Package) { +func (w *exportWriter) typeList(ts *types.TypeList, pkg *types.Package) { w.uint64(uint64(ts.Len())) for i := 0; i < ts.Len(); i++ { w.typ(ts.At(i), pkg) } } -func (w *exportWriter) tparamList(prefix string, list *typeparams.TypeParamList, pkg *types.Package) { +func (w *exportWriter) tparamList(prefix string, list *types.TypeParamList, pkg *types.Package) { ll := uint64(list.Len()) w.uint64(ll) for i := 0; i < list.Len(); i++ { @@ -973,7 +984,7 @@ const blankMarker = "$" // differs from its actual object name: it is prefixed with a qualifier, and // blank type parameter names are disambiguated by their index in the type // parameter list. -func tparamExportName(prefix string, tparam *typeparams.TypeParam) string { +func tparamExportName(prefix string, tparam *types.TypeParam) string { assert(prefix != "") name := tparam.Obj().Name() if name == "_" { @@ -1320,3 +1331,19 @@ func (e internalError) Error() string { return "gcimporter: " + string(e) } func internalErrorf(format string, args ...interface{}) error { return internalError(fmt.Sprintf(format, args...)) } + +// aliasRHS removes exactly one Alias constructor. +func aliasRHS(alias *aliases.Alias) types.Type { + // TODO(adonovan): if proposal #66559 is accepted, this will + // become Alias.RHS(alias). In the meantime, we must punch + // through the drywall. + type go123Alias struct { + _ *types.TypeName + _ *types.TypeParamList + RHS types.Type + _ types.Type + } + var raw *go123Alias + *(**aliases.Alias)(unsafe.Pointer(&raw)) = alias + return raw.RHS +} diff --git a/internal/gcimporter/iexport_go118_test.go b/internal/gcimporter/iexport_go118_test.go index 134c231f8c1..c748fb36165 100644 --- a/internal/gcimporter/iexport_go118_test.go +++ b/internal/gcimporter/iexport_go118_test.go @@ -2,11 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package gcimporter_test +// This file defines test of generics features introduce in go1.18. + import ( "bytes" "fmt" diff --git a/internal/gcimporter/iexport_test.go b/internal/gcimporter/iexport_test.go index 4ee79dac9d0..0da2599d531 100644 --- a/internal/gcimporter/iexport_test.go +++ b/internal/gcimporter/iexport_test.go @@ -32,6 +32,7 @@ import ( "golang.org/x/tools/go/buildutil" "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/loader" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/gcimporter" "golang.org/x/tools/internal/testenv" "golang.org/x/tools/internal/typeparams/genericfeatures" @@ -145,6 +146,7 @@ type UnknownType undefined t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) } + // TODO(adonovan): opt: parallelize this slow loop. for _, pkg := range sorted { if exportdata, err := iexport(conf.Fset, version, pkg); err != nil { t.Error(err) @@ -333,14 +335,15 @@ func cmpObj(x, y types.Object) error { if xalias, yalias := x.IsAlias(), y.(*types.TypeName).IsAlias(); xalias != yalias { return fmt.Errorf("mismatching IsAlias(): %s vs %s", x, y) } + // equalType does not recurse into the underlying types of named types, so // we must pass the underlying type explicitly here. However, in doing this // we may skip checking the features of the named types themselves, in // situations where the type name is not referenced by the underlying or // any other top-level declarations. Therefore, we must explicitly compare // named types here, before passing their underlying types into equalType. - xn, _ := xt.(*types.Named) - yn, _ := yt.(*types.Named) + xn, _ := aliases.Unalias(xt).(*types.Named) + yn, _ := aliases.Unalias(yt).(*types.Named) if (xn == nil) != (yn == nil) { return fmt.Errorf("mismatching types: %T vs %T", xt, yt) } diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go index 8e64cf644fc..2732121b5ef 100644 --- a/internal/gcimporter/iimport.go +++ b/internal/gcimporter/iimport.go @@ -22,7 +22,8 @@ import ( "strings" "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typesinternal" ) type intReader struct { @@ -79,6 +80,20 @@ const ( typeParamType instanceType unionType + aliasType +) + +// Object tags +const ( + varTag = 'V' + funcTag = 'F' + genericFuncTag = 'G' + constTag = 'C' + aliasTag = 'A' + genericAliasTag = 'B' + typeParamTag = 'P' + typeTag = 'T' + genericTypeTag = 'U' ) // IImportData imports a package from the serialized package data @@ -225,6 +240,7 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte // Gather the relevant packages from the manifest. items := make([]GetPackagesItem, r.uint64()) + uniquePkgPaths := make(map[string]bool) for i := range items { pkgPathOff := r.uint64() pkgPath := p.stringAt(pkgPathOff) @@ -249,6 +265,12 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte } items[i].nameIndex = nameIndex + + uniquePkgPaths[pkgPath] = true + } + // Debugging #63822; hypothesis: there are duplicate PkgPaths. + if len(uniquePkgPaths) != len(items) { + reportf("found duplicate PkgPaths while reading export data manifest: %v", items) } // Request packages all at once from the client, @@ -316,12 +338,12 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte } // SetConstraint can't be called if the constraint type is not yet complete. - // When type params are created in the 'P' case of (*importReader).obj(), + // When type params are created in the typeParamTag case of (*importReader).obj(), // the associated constraint type may not be complete due to recursion. // Therefore, we defer calling SetConstraint there, and call it here instead // after all types are complete. for _, d := range p.later { - typeparams.SetTypeParamConstraint(d.t, d.constraint) + d.t.SetConstraint(d.constraint) } for _, typ := range p.interfaceList { @@ -339,7 +361,7 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte } type setConstraintArgs struct { - t *typeparams.TypeParam + t *types.TypeParam constraint types.Type } @@ -516,7 +538,7 @@ func canReuse(def *types.Named, rhs types.Type) bool { if def == nil { return true } - iface, _ := rhs.(*types.Interface) + iface, _ := aliases.Unalias(rhs).(*types.Interface) if iface == nil { return true } @@ -538,25 +560,29 @@ func (r *importReader) obj(name string) { pos := r.pos() switch tag { - case 'A': + case aliasTag: typ := r.typ() - - r.declare(types.NewTypeName(pos, r.currPkg, name, typ)) - - case 'C': + // TODO(adonovan): support generic aliases: + // if tag == genericAliasTag { + // tparams := r.tparamList() + // alias.SetTypeParams(tparams) + // } + r.declare(aliases.NewAlias(pos, r.currPkg, name, typ)) + + case constTag: typ, val := r.value() r.declare(types.NewConst(pos, r.currPkg, name, typ, val)) - case 'F', 'G': - var tparams []*typeparams.TypeParam - if tag == 'G' { + case funcTag, genericFuncTag: + var tparams []*types.TypeParam + if tag == genericFuncTag { tparams = r.tparamList() } sig := r.signature(nil, nil, tparams) r.declare(types.NewFunc(pos, r.currPkg, name, sig)) - case 'T', 'U': + case typeTag, genericTypeTag: // Types can be recursive. We need to setup a stub // declaration before recursing. obj := types.NewTypeName(pos, r.currPkg, name, nil) @@ -564,9 +590,9 @@ func (r *importReader) obj(name string) { // Declare obj before calling r.tparamList, so the new type name is recognized // if used in the constraint of one of its own typeparams (see #48280). r.declare(obj) - if tag == 'U' { + if tag == genericTypeTag { tparams := r.tparamList() - typeparams.SetForNamed(named, tparams) + named.SetTypeParams(tparams) } underlying := r.p.typAt(r.uint64(), named).Underlying() @@ -581,14 +607,13 @@ func (r *importReader) obj(name string) { // If the receiver has any targs, set those as the // rparams of the method (since those are the // typeparams being used in the method sig/body). - base := baseType(recv.Type()) - assert(base != nil) - targs := typeparams.NamedTypeArgs(base) - var rparams []*typeparams.TypeParam + _, recvNamed := typesinternal.ReceiverNamed(recv) + targs := recvNamed.TypeArgs() + var rparams []*types.TypeParam if targs.Len() > 0 { - rparams = make([]*typeparams.TypeParam, targs.Len()) + rparams = make([]*types.TypeParam, targs.Len()) for i := range rparams { - rparams[i] = targs.At(i).(*typeparams.TypeParam) + rparams[i] = aliases.Unalias(targs.At(i)).(*types.TypeParam) } } msig := r.signature(recv, rparams, nil) @@ -597,7 +622,7 @@ func (r *importReader) obj(name string) { } } - case 'P': + case typeParamTag: // We need to "declare" a typeparam in order to have a name that // can be referenced recursively (if needed) in the type param's // bound. @@ -606,7 +631,7 @@ func (r *importReader) obj(name string) { } name0 := tparamName(name) tn := types.NewTypeName(pos, r.currPkg, name0, nil) - t := typeparams.NewTypeParam(tn, nil) + t := types.NewTypeParam(tn, nil) // To handle recursive references to the typeparam within its // bound, save the partial type in tparamIndex before reading the bounds. @@ -618,11 +643,11 @@ func (r *importReader) obj(name string) { } constraint := r.typ() if implicit { - iface, _ := constraint.(*types.Interface) + iface, _ := aliases.Unalias(constraint).(*types.Interface) if iface == nil { errorf("non-interface constraint marked implicit") } - typeparams.MarkImplicit(iface) + iface.MarkImplicit() } // The constraint type may not be complete, if we // are in the middle of a type recursion involving type @@ -630,7 +655,7 @@ func (r *importReader) obj(name string) { // completely set up all types in ImportData. r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint}) - case 'V': + case varTag: typ := r.typ() r.declare(types.NewVar(pos, r.currPkg, name, typ)) @@ -825,7 +850,7 @@ func (r *importReader) typ() types.Type { } func isInterface(t types.Type) bool { - _, ok := t.(*types.Interface) + _, ok := aliases.Unalias(t).(*types.Interface) return ok } @@ -847,7 +872,7 @@ func (r *importReader) doType(base *types.Named) (res types.Type) { errorf("unexpected kind tag in %q: %v", r.p.ipath, k) return nil - case definedType: + case aliasType, definedType: pkg, name := r.qualifiedIdent() r.p.doDecl(pkg, name) return pkg.Scope().Lookup(name).(*types.TypeName).Type() @@ -966,7 +991,7 @@ func (r *importReader) doType(base *types.Named) (res types.Type) { // The imported instantiated type doesn't include any methods, so // we must always use the methods of the base (orig) type. // TODO provide a non-nil *Environment - t, _ := typeparams.Instantiate(nil, baseType, targs, false) + t, _ := types.Instantiate(nil, baseType, targs, false) // Workaround for golang/go#61561. See the doc for instanceList for details. r.p.instanceList = append(r.p.instanceList, t) @@ -976,11 +1001,11 @@ func (r *importReader) doType(base *types.Named) (res types.Type) { if r.p.version < iexportVersionGenerics { errorf("unexpected instantiation type") } - terms := make([]*typeparams.Term, r.uint64()) + terms := make([]*types.Term, r.uint64()) for i := range terms { - terms[i] = typeparams.NewTerm(r.bool(), r.typ()) + terms[i] = types.NewTerm(r.bool(), r.typ()) } - return typeparams.NewUnion(terms) + return types.NewUnion(terms) } } @@ -1008,23 +1033,23 @@ func (r *importReader) objectPathObject() types.Object { return obj } -func (r *importReader) signature(recv *types.Var, rparams []*typeparams.TypeParam, tparams []*typeparams.TypeParam) *types.Signature { +func (r *importReader) signature(recv *types.Var, rparams []*types.TypeParam, tparams []*types.TypeParam) *types.Signature { params := r.paramList() results := r.paramList() variadic := params.Len() > 0 && r.bool() - return typeparams.NewSignatureType(recv, rparams, tparams, params, results, variadic) + return types.NewSignatureType(recv, rparams, tparams, params, results, variadic) } -func (r *importReader) tparamList() []*typeparams.TypeParam { +func (r *importReader) tparamList() []*types.TypeParam { n := r.uint64() if n == 0 { return nil } - xs := make([]*typeparams.TypeParam, n) + xs := make([]*types.TypeParam, n) for i := range xs { // Note: the standard library importer is tolerant of nil types here, // though would panic in SetTypeParams. - xs[i] = r.typ().(*typeparams.TypeParam) + xs[i] = aliases.Unalias(r.typ()).(*types.TypeParam) } return xs } @@ -1071,13 +1096,3 @@ func (r *importReader) byte() byte { } return x } - -func baseType(typ types.Type) *types.Named { - // pointer receivers are never types.Named types - if p, _ := typ.(*types.Pointer); p != nil { - typ = p.Elem() - } - // receiver base types are always (possibly generic) types.Named types - n, _ := typ.(*types.Named) - return n -} diff --git a/internal/gcimporter/main.go b/internal/gcimporter/main.go new file mode 100644 index 00000000000..4a4ddd2843a --- /dev/null +++ b/internal/gcimporter/main.go @@ -0,0 +1,117 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore + +// The gcimporter command reads the compiler's export data for the +// named packages and prints the decoded type information. +// +// It is provided for debugging export data problems. +package main + +import ( + "bytes" + "flag" + "fmt" + "go/token" + "go/types" + "log" + "os" + "sort" + + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/gcimporter" +) + +func main() { + flag.Parse() + cfg := &packages.Config{ + Fset: token.NewFileSet(), + // Don't request NeedTypes: we want to be certain that + // we loaded the types ourselves, from export data. + Mode: packages.NeedName | packages.NeedExportFile, + } + pkgs, err := packages.Load(cfg, flag.Args()...) + if err != nil { + log.Fatal(err) + } + if packages.PrintErrors(pkgs) > 0 { + os.Exit(1) + } + + for _, pkg := range pkgs { + // Read types from compiler's unified export data file. + // This Package may included non-exported functions if they + // are called by inlinable exported functions. + var tpkg1 *types.Package + { + export, err := os.ReadFile(pkg.ExportFile) + if err != nil { + log.Fatalf("can't read %q export data: %v", pkg.PkgPath, err) + } + r, err := gcexportdata.NewReader(bytes.NewReader(export)) + if err != nil { + log.Fatalf("reading export data %s: %v", pkg.ExportFile, err) + } + tpkg1, err = gcexportdata.Read(r, cfg.Fset, make(map[string]*types.Package), pkg.PkgPath) + if err != nil { + log.Fatalf("decoding export data: %v", err) + } + } + fmt.Println("# Read from compiler's unified export data:") + printPackage(tpkg1) + + // Now reexport as indexed (deep) export data, and reimport. + // The Package will contain only exported symbols. + var tpkg2 *types.Package + { + var out bytes.Buffer + if err := gcimporter.IExportData(&out, cfg.Fset, tpkg1); err != nil { + log.Fatal(err) + } + var err error + _, tpkg2, err = gcimporter.IImportData(cfg.Fset, make(map[string]*types.Package), out.Bytes(), tpkg1.Path()) + if err != nil { + log.Fatal(err) + } + } + fmt.Println("# After round-tripping through indexed export data:") + printPackage(tpkg2) + } +} + +func printPackage(pkg *types.Package) { + fmt.Printf("package %s %q\n", pkg.Name(), pkg.Path()) + + if !pkg.Complete() { + fmt.Printf("\thas incomplete exported type info\n") + } + + // imports + var lines []string + for _, imp := range pkg.Imports() { + lines = append(lines, fmt.Sprintf("\timport %q", imp.Path())) + } + sort.Strings(lines) + for _, line := range lines { + fmt.Println(line) + } + + // types of package members + qual := types.RelativeTo(pkg) + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + fmt.Printf("\t%s\n", types.ObjectString(obj, qual)) + if _, ok := obj.(*types.TypeName); ok { + for _, meth := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { + fmt.Printf("\t%s\n", types.SelectionString(meth, qual)) + } + } + } + + fmt.Println() +} diff --git a/internal/gcimporter/support_go117.go b/internal/gcimporter/support_go117.go deleted file mode 100644 index d892273efb6..00000000000 --- a/internal/gcimporter/support_go117.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package gcimporter - -import "go/types" - -const iexportVersion = iexportVersionGo1_11 - -func additionalPredeclared() []types.Type { - return nil -} diff --git a/internal/gcimporter/support_go118.go b/internal/gcimporter/support_go118.go index edbe6ea7041..0cd3b91b65a 100644 --- a/internal/gcimporter/support_go118.go +++ b/internal/gcimporter/support_go118.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 -// +build go1.18 - package gcimporter import "go/types" diff --git a/internal/gcimporter/unified_no.go b/internal/gcimporter/unified_no.go index 286bf445483..38b624cadab 100644 --- a/internal/gcimporter/unified_no.go +++ b/internal/gcimporter/unified_no.go @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build !(go1.18 && goexperiment.unified) -// +build !go1.18 !goexperiment.unified +//go:build !goexperiment.unified +// +build !goexperiment.unified package gcimporter diff --git a/internal/gcimporter/unified_yes.go b/internal/gcimporter/unified_yes.go index b5d69ffbe68..b5118d0b3a5 100644 --- a/internal/gcimporter/unified_yes.go +++ b/internal/gcimporter/unified_yes.go @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.18 && goexperiment.unified -// +build go1.18,goexperiment.unified +//go:build goexperiment.unified +// +build goexperiment.unified package gcimporter diff --git a/internal/gcimporter/ureader_no.go b/internal/gcimporter/ureader_no.go deleted file mode 100644 index 8eb20729c2a..00000000000 --- a/internal/gcimporter/ureader_no.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package gcimporter - -import ( - "fmt" - "go/token" - "go/types" -) - -func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { - err = fmt.Errorf("go/tools compiled with a Go version earlier than 1.18 cannot read unified IR export data") - return -} diff --git a/internal/gcimporter/ureader_yes.go b/internal/gcimporter/ureader_yes.go index b977435f626..b3be452ae8a 100644 --- a/internal/gcimporter/ureader_yes.go +++ b/internal/gcimporter/ureader_yes.go @@ -4,9 +4,6 @@ // Derived from go/internal/gcimporter/ureader.go -//go:build go1.18 -// +build go1.18 - package gcimporter import ( @@ -16,6 +13,7 @@ import ( "sort" "strings" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/pkgbits" ) @@ -526,7 +524,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { case pkgbits.ObjAlias: pos := r.pos() typ := r.typ() - declare(types.NewTypeName(pos, objPkg, objName, typ)) + declare(aliases.NewAlias(pos, objPkg, objName, typ)) case pkgbits.ObjConst: pos := r.pos() @@ -553,7 +551,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { // If the underlying type is an interface, we need to // duplicate its methods so we can replace the receiver // parameter's type (#49906). - if iface, ok := underlying.(*types.Interface); ok && iface.NumExplicitMethods() != 0 { + if iface, ok := aliases.Unalias(underlying).(*types.Interface); ok && iface.NumExplicitMethods() != 0 { methods := make([]*types.Func, iface.NumExplicitMethods()) for i := range methods { fn := iface.ExplicitMethod(i) diff --git a/internal/gocommand/invoke.go b/internal/gocommand/invoke.go index c27b91f8c7e..f7de3c8283b 100644 --- a/internal/gocommand/invoke.go +++ b/internal/gocommand/invoke.go @@ -13,6 +13,7 @@ import ( "io" "log" "os" + "os/exec" "reflect" "regexp" "runtime" @@ -21,8 +22,6 @@ import ( "sync" "time" - exec "golang.org/x/sys/execabs" - "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/keys" "golang.org/x/tools/internal/event/label" @@ -159,12 +158,15 @@ type Invocation struct { BuildFlags []string // If ModFlag is set, the go command is invoked with -mod=ModFlag. + // TODO(rfindley): remove, in favor of Args. ModFlag string // If ModFile is set, the go command is invoked with -modfile=ModFile. + // TODO(rfindley): remove, in favor of Args. ModFile string // If Overlay is set, the go command is invoked with -overlay=Overlay. + // TODO(rfindley): remove, in favor of Args. Overlay string // If CleanEnv is set, the invocation will run only with the environment diff --git a/internal/gopathwalk/walk.go b/internal/gopathwalk/walk.go index f79dd8cc3f5..8361515519f 100644 --- a/internal/gopathwalk/walk.go +++ b/internal/gopathwalk/walk.go @@ -9,11 +9,13 @@ package gopathwalk import ( "bufio" "bytes" + "io" "io/fs" - "log" "os" "path/filepath" + "runtime" "strings" + "sync" "time" ) @@ -21,8 +23,13 @@ import ( type Options struct { // If Logf is non-nil, debug logging is enabled through this function. Logf func(format string, args ...interface{}) + // Search module caches. Also disables legacy goimports ignore rules. ModulesEnabled bool + + // Maximum number of concurrent calls to user-provided callbacks, + // or 0 for GOMAXPROCS. + Concurrency int } // RootType indicates the type of a Root. @@ -43,22 +50,28 @@ type Root struct { Type RootType } -// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. -// For each package found, add will be called (concurrently) with the absolute +// Walk concurrently walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. +// +// For each package found, add will be called with the absolute // paths of the containing source directory and the package directory. -// add will be called concurrently. +// +// Unlike filepath.WalkDir, Walk follows symbolic links +// (while guarding against cycles). func Walk(roots []Root, add func(root Root, dir string), opts Options) { WalkSkip(roots, add, func(Root, string) bool { return false }, opts) } -// WalkSkip walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. -// For each package found, add will be called (concurrently) with the absolute +// WalkSkip concurrently walks Go source directories ($GOROOT, $GOPATH, etc) to +// find packages. +// +// For each package found, add will be called with the absolute // paths of the containing source directory and the package directory. -// For each directory that will be scanned, skip will be called (concurrently) +// For each directory that will be scanned, skip will be called // with the absolute paths of the containing source directory and the directory. // If skip returns false on a directory it will be processed. -// add will be called concurrently. -// skip will be called concurrently. +// +// Unlike filepath.WalkDir, WalkSkip follows symbolic links +// (while guarding against cycles). func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root, dir string) bool, opts Options) { for _, root := range roots { walkDir(root, add, skip, opts) @@ -67,45 +80,51 @@ func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root // walkDir creates a walker and starts fastwalk with this walker. func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) { + if opts.Logf == nil { + opts.Logf = func(format string, args ...interface{}) {} + } if _, err := os.Stat(root.Path); os.IsNotExist(err) { - if opts.Logf != nil { - opts.Logf("skipping nonexistent directory: %v", root.Path) - } + opts.Logf("skipping nonexistent directory: %v", root.Path) return } start := time.Now() - if opts.Logf != nil { - opts.Logf("scanning %s", root.Path) + opts.Logf("scanning %s", root.Path) + + concurrency := opts.Concurrency + if concurrency == 0 { + // The walk be either CPU-bound or I/O-bound, depending on what the + // caller-supplied add function does and the details of the user's platform + // and machine. Rather than trying to fine-tune the concurrency level for a + // specific environment, we default to GOMAXPROCS: it is likely to be a good + // choice for a CPU-bound add function, and if it is instead I/O-bound, then + // dealing with I/O saturation is arguably the job of the kernel and/or + // runtime. (Oversaturating I/O seems unlikely to harm performance as badly + // as failing to saturate would.) + concurrency = runtime.GOMAXPROCS(0) } - w := &walker{ - root: root, - add: add, - skip: skip, - opts: opts, - added: make(map[string]bool), + root: root, + add: add, + skip: skip, + opts: opts, + sem: make(chan struct{}, concurrency), } w.init() - // Add a trailing path separator to cause filepath.WalkDir to traverse symlinks. + w.sem <- struct{}{} path := root.Path - if len(path) == 0 { - path = "." + string(filepath.Separator) - } else if !os.IsPathSeparator(path[len(path)-1]) { - path = path + string(filepath.Separator) + if path == "" { + path = "." } - - if err := filepath.WalkDir(path, w.walk); err != nil { - logf := opts.Logf - if logf == nil { - logf = log.Printf - } - logf("scanning directory %v: %v", root.Path, err) + if fi, err := os.Lstat(path); err == nil { + w.walk(path, nil, fs.FileInfoToDirEntry(fi)) + } else { + w.opts.Logf("scanning directory %v: %v", root.Path, err) } + <-w.sem + w.walking.Wait() - if opts.Logf != nil { - opts.Logf("scanned %s in %v", root.Path, time.Since(start)) - } + opts.Logf("scanned %s in %v", root.Path, time.Since(start)) } // walker is the callback for fastwalk.Walk. @@ -115,9 +134,18 @@ type walker struct { skip func(Root, string) bool // The callback that will be invoked for every dir. dir is skipped if it returns true. opts Options // Options passed to Walk by the user. + walking sync.WaitGroup + sem chan struct{} // Channel of semaphore tokens; send to acquire, receive to release. ignoredDirs []string - added map[string]bool + added sync.Map // map[string]bool +} + +// A symlinkList is a linked list of os.FileInfos for parent directories +// reached via symlinks. +type symlinkList struct { + info os.FileInfo + prev *symlinkList } // init initializes the walker based on its Options @@ -134,9 +162,7 @@ func (w *walker) init() { for _, p := range ignoredPaths { full := filepath.Join(w.root.Path, p) w.ignoredDirs = append(w.ignoredDirs, full) - if w.opts.Logf != nil { - w.opts.Logf("Directory added to ignore list: %s", full) - } + w.opts.Logf("Directory added to ignore list: %s", full) } } @@ -146,12 +172,10 @@ func (w *walker) init() { func (w *walker) getIgnoredDirs(path string) []string { file := filepath.Join(path, ".goimportsignore") slurp, err := os.ReadFile(file) - if w.opts.Logf != nil { - if err != nil { - w.opts.Logf("%v", err) - } else { - w.opts.Logf("Read %s", file) - } + if err != nil { + w.opts.Logf("%v", err) + } else { + w.opts.Logf("Read %s", file) } if err != nil { return nil @@ -184,138 +208,130 @@ func (w *walker) shouldSkipDir(dir string) bool { } // walk walks through the given path. -func (w *walker) walk(path string, d fs.DirEntry, err error) error { - typ := d.Type() - if typ.IsRegular() { +// +// Errors are logged if w.opts.Logf is non-nil, but otherwise ignored. +func (w *walker) walk(path string, pathSymlinks *symlinkList, d fs.DirEntry) { + if d.Type()&os.ModeSymlink != 0 { + // Walk the symlink's target rather than the symlink itself. + // + // (Note that os.Stat, unlike the lower-lever os.Readlink, + // follows arbitrarily many layers of symlinks, so it will eventually + // reach either a non-symlink or a nonexistent target.) + // + // TODO(bcmills): 'go list all' itself ignores symlinks within GOROOT/src + // and GOPATH/src. Do we really need to traverse them here? If so, why? + + fi, err := os.Stat(path) + if err != nil { + w.opts.Logf("%v", err) + return + } + + // Avoid walking symlink cycles: if we have already followed a symlink to + // this directory as a parent of itself, don't follow it again. + // + // This doesn't catch the first time through a cycle, but it also minimizes + // the number of extra stat calls we make if we *don't* encounter a cycle. + // Since we don't actually expect to encounter symlink cycles in practice, + // this seems like the right tradeoff. + for parent := pathSymlinks; parent != nil; parent = parent.prev { + if os.SameFile(fi, parent.info) { + return + } + } + + pathSymlinks = &symlinkList{ + info: fi, + prev: pathSymlinks, + } + d = fs.FileInfoToDirEntry(fi) + } + + if d.Type().IsRegular() { if !strings.HasSuffix(path, ".go") { - return nil + return } dir := filepath.Dir(path) if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) { // Doesn't make sense to have regular files // directly in your $GOPATH/src or $GOROOT/src. - return nil + // + // TODO(bcmills): there are many levels of directory within + // RootModuleCache where this also wouldn't make sense, + // Can we generalize this to any directory without a corresponding + // import path? + return } - if !w.added[dir] { + if _, dup := w.added.LoadOrStore(dir, true); !dup { w.add(w.root, dir) - w.added[dir] = true } - return nil } - if typ == os.ModeDir { - base := filepath.Base(path) - if base == "" || base[0] == '.' || base[0] == '_' || - base == "testdata" || - (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") || - (!w.opts.ModulesEnabled && base == "node_modules") { - return filepath.SkipDir - } - if w.shouldSkipDir(path) { - return filepath.SkipDir - } - return nil - } - if typ == os.ModeSymlink && err == nil { - // TODO(bcmills): 'go list all' itself ignores symlinks within GOROOT/src - // and GOPATH/src. Do we really need to traverse them here? If so, why? - - if os.IsPathSeparator(path[len(path)-1]) { - // The OS was supposed to resolve a directory symlink but didn't. - // - // On macOS this may be caused by a known libc/kernel bug; - // see https://go.dev/issue/59586. - // - // On Windows before Go 1.21, this may be caused by a bug in - // os.Lstat (fixed in https://go.dev/cl/463177). - // - // In either case, we can work around the bug by walking this level - // explicitly: first the symlink target itself, then its contents. - - fi, err := os.Stat(path) - if err != nil || !fi.IsDir() { - return nil - } - err = w.walk(path, fs.FileInfoToDirEntry(fi), nil) - if err == filepath.SkipDir { - return nil - } else if err != nil { - return err - } - - ents, _ := os.ReadDir(path) // ignore error if unreadable - for _, d := range ents { - nextPath := filepath.Join(path, d.Name()) - var err error - if d.IsDir() { - err = filepath.WalkDir(nextPath, w.walk) - } else { - err = w.walk(nextPath, d, nil) - if err == filepath.SkipDir { - break - } - } - if err != nil { - return err - } - } - return nil - } - base := filepath.Base(path) - if strings.HasPrefix(base, ".#") { - // Emacs noise. - return nil - } - if w.shouldTraverse(path) { - // Add a trailing separator to traverse the symlink. - nextPath := path + string(filepath.Separator) - return filepath.WalkDir(nextPath, w.walk) - } + if !d.IsDir() { + return } - return nil -} -// shouldTraverse reports whether the symlink fi, found in dir, -// should be followed. It makes sure symlinks were never visited -// before to avoid symlink loops. -func (w *walker) shouldTraverse(path string) bool { - if w.shouldSkipDir(path) { - return false + base := filepath.Base(path) + if base == "" || base[0] == '.' || base[0] == '_' || + base == "testdata" || + (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") || + (!w.opts.ModulesEnabled && base == "node_modules") || + w.shouldSkipDir(path) { + return } - ts, err := os.Stat(path) + // Read the directory and walk its entries. + + f, err := os.Open(path) if err != nil { - logf := w.opts.Logf - if logf == nil { - logf = log.Printf - } - logf("%v", err) - return false - } - if !ts.IsDir() { - return false + w.opts.Logf("%v", err) + return } + defer f.Close() - // Check for symlink loops by statting each directory component - // and seeing if any are the same file as ts. for { - parent := filepath.Dir(path) - if parent == path { - // Made it to the root without seeing a cycle. - // Use this symlink. - return true - } - parentInfo, err := os.Stat(parent) + // We impose an arbitrary limit on the number of ReadDir results per + // directory to limit the amount of memory consumed for stale or upcoming + // directory entries. The limit trades off CPU (number of syscalls to read + // the whole directory) against RAM (reachable directory entries other than + // the one currently being processed). + // + // Since we process the directories recursively, we will end up maintaining + // a slice of entries for each level of the directory tree. + // (Compare https://go.dev/issue/36197.) + ents, err := f.ReadDir(1024) if err != nil { - return false + if err != io.EOF { + w.opts.Logf("%v", err) + } + break } - if os.SameFile(ts, parentInfo) { - // Cycle. Don't traverse. - return false + + for _, d := range ents { + nextPath := filepath.Join(path, d.Name()) + if d.IsDir() { + select { + case w.sem <- struct{}{}: + // Got a new semaphore token, so we can traverse the directory concurrently. + d := d + w.walking.Add(1) + go func() { + defer func() { + <-w.sem + w.walking.Done() + }() + w.walk(nextPath, pathSymlinks, d) + }() + continue + + default: + // No tokens available, so traverse serially. + } + } + + w.walk(nextPath, pathSymlinks, d) } - path = parent } - } diff --git a/internal/gopathwalk/walk_test.go b/internal/gopathwalk/walk_test.go index e46196b720b..8028f818588 100644 --- a/internal/gopathwalk/walk_test.go +++ b/internal/gopathwalk/walk_test.go @@ -5,90 +5,120 @@ package gopathwalk import ( - "log" "os" "path/filepath" "reflect" "runtime" + "sort" "strings" "sync" "testing" ) -func TestShouldTraverse(t *testing.T) { - switch runtime.GOOS { - case "windows", "plan9": - t.Skipf("skipping symlink-requiring test on %s", runtime.GOOS) - } +func TestSymlinkTraversal(t *testing.T) { + t.Parallel() - dir, err := os.MkdirTemp("", "goimports-") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(dir) + gopath := t.TempDir() - // Note: mapToDir prepends "src" to each element, since - // mapToDir was made for creating GOPATHs. - if err := mapToDir(dir, map[string]string{ - "foo/foo2/file.txt": "", - "foo/foo2/link-to-src": "LINK:../../", - "foo/foo2/link-to-src-foo": "LINK:../../foo", - "foo/foo2/link-to-dot": "LINK:.", - "bar/bar2/file.txt": "", - "bar/bar2/link-to-src-foo": "LINK:../../foo", - - "a/b/c": "LINK:../../a/d", - "a/d/e": "LINK:../../a/b", + if err := mapToDir(gopath, map[string]string{ + "a/b/c": "LINK:../../a/d", + "a/b/pkg/pkg.go": "package pkg", + "a/d/e": "LINK:../../a/b", + "a/d/pkg/pkg.go": "package pkg", + "a/f/loop": "LINK:../f", + "a/f/pkg/pkg.go": "package pkg", + "a/g/pkg/pkg.go": "LINK:../../f/pkg/pkg.go", + "a/self": "LINK:.", }); err != nil { + switch runtime.GOOS { + case "windows", "plan9": + t.Skipf("skipping symlink-requiring test on %s", runtime.GOOS) + } t.Fatal(err) } + + pkgc := make(chan []string, 1) + pkgc <- nil + add := func(root Root, dir string) { + rel, err := filepath.Rel(filepath.Join(root.Path, "src"), dir) + if err != nil { + t.Error(err) + } + pkgc <- append(<-pkgc, filepath.ToSlash(rel)) + } + + Walk([]Root{{Path: gopath, Type: RootGOPATH}}, add, Options{Logf: t.Logf}) + + pkgs := <-pkgc + sort.Strings(pkgs) + t.Logf("Found packages:\n\t%s", strings.Join(pkgs, "\n\t")) + + got := make(map[string]bool, len(pkgs)) + for _, pkg := range pkgs { + got[pkg] = true + } tests := []struct { - dir string - file string + path string want bool + why string }{ { - dir: "src/foo/foo2", - file: "link-to-src-foo", - want: false, // loop + path: "a/b/pkg", + want: true, + why: "found via regular directories", + }, + { + path: "a/b/c/pkg", + want: true, + why: "found via non-cyclic dir link", + }, + { + path: "a/b/c/e/pkg", + want: true, + why: "found via two non-cyclic dir links", }, { - dir: "src/foo/foo2", - file: "link-to-src", - want: false, // loop + path: "a/d/e/c/pkg", + want: true, + why: "found via two non-cyclic dir links", }, { - dir: "src/foo/foo2", - file: "link-to-dot", - want: false, // loop + path: "a/f/loop/pkg", + want: true, + why: "found via a single parent-dir link", }, { - dir: "src/bar/bar2", - file: "link-to-src-foo", - want: true, // not a loop + path: "a/f/loop/loop/pkg", + want: false, + why: "would follow loop symlink twice", }, { - dir: "src/a/b/c", - file: "e", - want: false, // loop: "e" is the same as "b". + path: "a/self/b/pkg", + want: true, + why: "follows self-link once", + }, + { + path: "a/self/self/b/pkg", + want: false, + why: "would follow self-link twice", }, } - for i, tt := range tests { - var w walker - got := w.shouldTraverse(filepath.Join(dir, tt.dir, tt.file)) - if got != tt.want { - t.Errorf("%d. shouldTraverse(%q, %q) = %v; want %v", i, tt.dir, tt.file, got, tt.want) + for _, tc := range tests { + if got[tc.path] != tc.want { + if tc.want { + t.Errorf("MISSING: %s (%s)", tc.path, tc.why) + } else { + t.Errorf("UNEXPECTED: %s (%s)", tc.path, tc.why) + } } } } // TestSkip tests that various goimports rules are followed in non-modules mode. func TestSkip(t *testing.T) { - dir, err := os.MkdirTemp("", "goimports-") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(dir) + t.Parallel() + + dir := t.TempDir() if err := mapToDir(dir, map[string]string{ "ignoreme/f.go": "package ignoreme", // ignored by .goimportsignore @@ -111,7 +141,10 @@ func TestSkip(t *testing.T) { found = append(found, dir[len(root.Path)+1:]) }, func(root Root, dir string) bool { return false - }, Options{ModulesEnabled: false, Logf: log.Printf}) + }, Options{ + ModulesEnabled: false, + Logf: t.Logf, + }) if want := []string{"shouldfind"}; !reflect.DeepEqual(found, want) { t.Errorf("expected to find only %v, got %v", want, found) } @@ -119,11 +152,9 @@ func TestSkip(t *testing.T) { // TestSkipFunction tests that scan successfully skips directories from user callback. func TestSkipFunction(t *testing.T) { - dir, err := os.MkdirTemp("", "goimports-") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(dir) + t.Parallel() + + dir := t.TempDir() if err := mapToDir(dir, map[string]string{ "ignoreme/f.go": "package ignoreme", // ignored by skip @@ -143,13 +174,53 @@ func TestSkipFunction(t *testing.T) { }, func(root Root, dir string) bool { return strings.HasSuffix(dir, "ignoreme") }, - Options{ModulesEnabled: false}) + Options{ + ModulesEnabled: false, + Logf: t.Logf, + }) if want := []string{"shouldfind"}; !reflect.DeepEqual(found, want) { t.Errorf("expected to find only %v, got %v", want, found) } } +// TestWalkSymlinkConcurrentDeletion is a regression test for the panic reported +// in https://go.dev/issue/58054#issuecomment-1791513726. +func TestWalkSymlinkConcurrentDeletion(t *testing.T) { + t.Parallel() + + src := t.TempDir() + + m := map[string]string{ + "dir/readme.txt": "dir is not a go package", + "dirlink": "LINK:dir", + } + if err := mapToDir(src, m); err != nil { + switch runtime.GOOS { + case "windows", "plan9": + t.Skipf("skipping symlink-requiring test on %s", runtime.GOOS) + } + t.Fatal(err) + } + + done := make(chan struct{}) + go func() { + if err := os.RemoveAll(src); err != nil { + t.Log(err) + } + close(done) + }() + defer func() { + <-done + }() + + add := func(root Root, dir string) { + t.Errorf("unexpected call to add(%q, %q)", root.Path, dir) + } + Walk([]Root{{Path: src, Type: RootGOPATH}}, add, Options{Logf: t.Logf}) +} + func mapToDir(destDir string, files map[string]string) error { + var symlinkPaths []string for path, contents := range files { file := filepath.Join(destDir, "src", path) if err := os.MkdirAll(filepath.Dir(file), 0755); err != nil { @@ -157,7 +228,9 @@ func mapToDir(destDir string, files map[string]string) error { } var err error if strings.HasPrefix(contents, "LINK:") { - err = os.Symlink(strings.TrimPrefix(contents, "LINK:"), file) + // To work around https://go.dev/issue/39183, wait to create symlinks + // until we have created all non-symlink paths. + symlinkPaths = append(symlinkPaths, path) } else { err = os.WriteFile(file, []byte(contents), 0644) } @@ -165,5 +238,15 @@ func mapToDir(destDir string, files map[string]string) error { return err } } + + for _, path := range symlinkPaths { + file := filepath.Join(destDir, "src", path) + target := filepath.FromSlash(strings.TrimPrefix(files[path], "LINK:")) + err := os.Symlink(target, file) + if err != nil { + return err + } + } + return nil } diff --git a/internal/imports/fix.go b/internal/imports/fix.go index 01e8ba5fa2d..55980327616 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -13,6 +13,7 @@ import ( "go/build" "go/parser" "go/token" + "go/types" "io/fs" "io/ioutil" "os" @@ -30,6 +31,7 @@ import ( "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/gopathwalk" + "golang.org/x/tools/internal/stdlib" ) // importToGroup is a list of functions which map from an import path to @@ -254,7 +256,7 @@ type pass struct { otherFiles []*ast.File // sibling files. // Intermediate state, generated by load. - existingImports map[string]*ImportInfo + existingImports map[string][]*ImportInfo allRefs references missingRefs references @@ -319,7 +321,7 @@ func (p *pass) importIdentifier(imp *ImportInfo) string { func (p *pass) load() ([]*ImportFix, bool) { p.knownPackages = map[string]*packageInfo{} p.missingRefs = references{} - p.existingImports = map[string]*ImportInfo{} + p.existingImports = map[string][]*ImportInfo{} // Load basic information about the file in question. p.allRefs = collectReferences(p.f) @@ -350,7 +352,7 @@ func (p *pass) load() ([]*ImportFix, bool) { } } for _, imp := range imports { - p.existingImports[p.importIdentifier(imp)] = imp + p.existingImports[p.importIdentifier(imp)] = append(p.existingImports[p.importIdentifier(imp)], imp) } // Find missing references. @@ -389,31 +391,33 @@ func (p *pass) fix() ([]*ImportFix, bool) { // Found everything, or giving up. Add the new imports and remove any unused. var fixes []*ImportFix - for _, imp := range p.existingImports { - // We deliberately ignore globals here, because we can't be sure - // they're in the same package. People do things like put multiple - // main packages in the same directory, and we don't want to - // remove imports if they happen to have the same name as a var in - // a different package. - if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok { - fixes = append(fixes, &ImportFix{ - StmtInfo: *imp, - IdentName: p.importIdentifier(imp), - FixType: DeleteImport, - }) - continue - } + for _, identifierImports := range p.existingImports { + for _, imp := range identifierImports { + // We deliberately ignore globals here, because we can't be sure + // they're in the same package. People do things like put multiple + // main packages in the same directory, and we don't want to + // remove imports if they happen to have the same name as a var in + // a different package. + if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok { + fixes = append(fixes, &ImportFix{ + StmtInfo: *imp, + IdentName: p.importIdentifier(imp), + FixType: DeleteImport, + }) + continue + } - // An existing import may need to update its import name to be correct. - if name := p.importSpecName(imp); name != imp.Name { - fixes = append(fixes, &ImportFix{ - StmtInfo: ImportInfo{ - Name: name, - ImportPath: imp.ImportPath, - }, - IdentName: p.importIdentifier(imp), - FixType: SetImportName, - }) + // An existing import may need to update its import name to be correct. + if name := p.importSpecName(imp); name != imp.Name { + fixes = append(fixes, &ImportFix{ + StmtInfo: ImportInfo{ + Name: name, + ImportPath: imp.ImportPath, + }, + IdentName: p.importIdentifier(imp), + FixType: SetImportName, + }) + } } } // Collecting fixes involved map iteration, so sort for stability. See @@ -508,9 +512,9 @@ func (p *pass) assumeSiblingImportsValid() { } for left, rights := range refs { if imp, ok := importsByName[left]; ok { - if m, ok := stdlib[imp.ImportPath]; ok { + if m, ok := stdlib.PackageSymbols[imp.ImportPath]; ok { // We have the stdlib in memory; no need to guess. - rights = copyExports(m) + rights = symbolNameSet(m) } p.addCandidate(imp, &packageInfo{ // no name; we already know it. @@ -638,7 +642,7 @@ func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filena dupCheck := map[string]struct{}{} // Start off with the standard library. - for importPath, exports := range stdlib { + for importPath, symbols := range stdlib.PackageSymbols { p := &pkg{ dir: filepath.Join(goenv["GOROOT"], "src", importPath), importPathShort: importPath, @@ -647,6 +651,13 @@ func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filena } dupCheck[importPath] = struct{}{} if notSelf(p) && wrappedCallback.dirFound(p) && wrappedCallback.packageNameLoaded(p) { + var exports []stdlib.Symbol + for _, sym := range symbols { + switch sym.Kind { + case stdlib.Func, stdlib.Type, stdlib.Var, stdlib.Const: + exports = append(exports, sym) + } + } wrappedCallback.exportsLoaded(p, exports) } } @@ -667,7 +678,7 @@ func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filena dupCheck[pkg.importPathShort] = struct{}{} return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg) }, - exportsLoaded: func(pkg *pkg, exports []string) { + exportsLoaded: func(pkg *pkg, exports []stdlib.Symbol) { // If we're an x_test, load the package under test's test variant. if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) { var err error @@ -698,20 +709,21 @@ func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) (map return result, nil } -func PrimeCache(ctx context.Context, env *ProcessEnv) error { +func PrimeCache(ctx context.Context, resolver Resolver) error { // Fully scan the disk for directories, but don't actually read any Go files. callback := &scanCallback{ - rootFound: func(gopathwalk.Root) bool { - return true + rootFound: func(root gopathwalk.Root) bool { + // See getCandidatePkgs: walking GOROOT is apparently expensive and + // unnecessary. + return root.Type != gopathwalk.RootGOROOT }, dirFound: func(pkg *pkg) bool { return false }, - packageNameLoaded: func(pkg *pkg) bool { - return false - }, + // packageNameLoaded and exportsLoaded must never be called. } - return getCandidatePkgs(ctx, callback, "", "", env) + + return resolver.scan(ctx, callback) } func candidateImportName(pkg *pkg) string { @@ -791,7 +803,7 @@ func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix, // A PackageExport is a package and its exports. type PackageExport struct { Fix *ImportFix - Exports []string + Exports []stdlib.Symbol } // GetPackageExports returns all known packages with name pkg and their exports. @@ -806,8 +818,8 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP packageNameLoaded: func(pkg *pkg) bool { return pkg.packageName == searchPkg }, - exportsLoaded: func(pkg *pkg, exports []string) { - sort.Strings(exports) + exportsLoaded: func(pkg *pkg, exports []stdlib.Symbol) { + sortSymbols(exports) wrapped(PackageExport{ Fix: &ImportFix{ StmtInfo: ImportInfo{ @@ -825,16 +837,45 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP return getCandidatePkgs(ctx, callback, filename, filePkg, env) } -var requiredGoEnvVars = []string{"GO111MODULE", "GOFLAGS", "GOINSECURE", "GOMOD", "GOMODCACHE", "GONOPROXY", "GONOSUMDB", "GOPATH", "GOPROXY", "GOROOT", "GOSUMDB", "GOWORK"} +// TODO(rfindley): we should depend on GOOS and GOARCH, to provide accurate +// imports when doing cross-platform development. +var requiredGoEnvVars = []string{ + "GO111MODULE", + "GOFLAGS", + "GOINSECURE", + "GOMOD", + "GOMODCACHE", + "GONOPROXY", + "GONOSUMDB", + "GOPATH", + "GOPROXY", + "GOROOT", + "GOSUMDB", + "GOWORK", +} // ProcessEnv contains environment variables and settings that affect the use of // the go command, the go/build package, etc. +// +// ...a ProcessEnv *also* overwrites its Env along with derived state in the +// form of the resolver. And because it is lazily initialized, an env may just +// be broken and unusable, but there is no way for the caller to detect that: +// all queries will just fail. +// +// TODO(rfindley): refactor this package so that this type (perhaps renamed to +// just Env or Config) is an immutable configuration struct, to be exchanged +// for an initialized object via a constructor that returns an error. Perhaps +// the signature should be `func NewResolver(*Env) (*Resolver, error)`, where +// resolver is a concrete type used for resolving imports. Via this +// refactoring, we can avoid the need to call ProcessEnv.init and +// ProcessEnv.GoEnv everywhere, and implicitly fix all the places where this +// these are misused. Also, we'd delegate the caller the decision of how to +// handle a broken environment. type ProcessEnv struct { GocmdRunner *gocommand.Runner BuildFlags []string ModFlag string - ModFile string // SkipPathInScan returns true if the path should be skipped from scans of // the RootCurrentModule root type. The function argument is a clean, @@ -844,7 +885,7 @@ type ProcessEnv struct { // Env overrides the OS environment, and can be used to specify // GOPROXY, GO111MODULE, etc. PATH cannot be set here, because // exec.Command will not honor it. - // Specifying all of RequiredGoEnvVars avoids a call to `go env`. + // Specifying all of requiredGoEnvVars avoids a call to `go env`. Env map[string]string WorkingDir string @@ -852,9 +893,17 @@ type ProcessEnv struct { // If Logf is non-nil, debug logging is enabled through this function. Logf func(format string, args ...interface{}) - initialized bool + // If set, ModCache holds a shared cache of directory info to use across + // multiple ProcessEnvs. + ModCache *DirInfoCache + + initialized bool // see TODO above - resolver Resolver + // resolver and resolverErr are lazily evaluated (see GetResolver). + // This is unclean, but see the big TODO in the docstring for ProcessEnv + // above: for now, we can't be sure that the ProcessEnv is fully initialized. + resolver Resolver + resolverErr error } func (e *ProcessEnv) goEnv() (map[string]string, error) { @@ -934,20 +983,33 @@ func (e *ProcessEnv) env() []string { } func (e *ProcessEnv) GetResolver() (Resolver, error) { - if e.resolver != nil { - return e.resolver, nil - } if err := e.init(); err != nil { return nil, err } - if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 { - e.resolver = newGopathResolver(e) - return e.resolver, nil + + if e.resolver == nil && e.resolverErr == nil { + // TODO(rfindley): we should only use a gopathResolver here if the working + // directory is actually *in* GOPATH. (I seem to recall an open gopls issue + // for this behavior, but I can't find it). + // + // For gopls, we can optionally explicitly choose a resolver type, since we + // already know the view type. + if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 { + e.resolver = newGopathResolver(e) + } else if r, err := newModuleResolver(e, e.ModCache); err != nil { + e.resolverErr = err + } else { + e.resolver = Resolver(r) + } } - e.resolver = newModuleResolver(e) - return e.resolver, nil + + return e.resolver, e.resolverErr } +// buildContext returns the build.Context to use for matching files. +// +// TODO(rfindley): support dynamic GOOS, GOARCH here, when doing cross-platform +// development. func (e *ProcessEnv) buildContext() (*build.Context, error) { ctx := build.Default goenv, err := e.goEnv() @@ -1002,7 +1064,7 @@ func addStdlibCandidates(pass *pass, refs references) error { if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir { return } - exports := copyExports(stdlib[pkg]) + exports := symbolNameSet(stdlib.PackageSymbols[pkg]) pass.addCandidate( &ImportInfo{ImportPath: pkg}, &packageInfo{name: path.Base(pkg), exports: exports}) @@ -1014,7 +1076,7 @@ func addStdlibCandidates(pass *pass, refs references) error { add("math/rand") continue } - for importPath := range stdlib { + for importPath := range stdlib.PackageSymbols { if path.Base(importPath) == left { add(importPath) } @@ -1027,15 +1089,23 @@ func addStdlibCandidates(pass *pass, refs references) error { type Resolver interface { // loadPackageNames loads the package names in importPaths. loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) + // scan works with callback to search for packages. See scanCallback for details. scan(ctx context.Context, callback *scanCallback) error + // loadExports returns the set of exported symbols in the package at dir. // loadExports may be called concurrently. - loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) + loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error) + // scoreImportPath returns the relevance for an import path. scoreImportPath(ctx context.Context, path string) float64 - ClearForNewScan() + // ClearForNewScan returns a new Resolver based on the receiver that has + // cleared its internal caches of directory contents. + // + // The new resolver should be primed and then set via + // [ProcessEnv.UpdateResolver]. + ClearForNewScan() Resolver } // A scanCallback controls a call to scan and receives its results. @@ -1054,7 +1124,7 @@ type scanCallback struct { // If it returns true, the package's exports will be loaded. packageNameLoaded func(pkg *pkg) bool // exportsLoaded is called when a package's exports have been loaded. - exportsLoaded func(pkg *pkg, exports []string) + exportsLoaded func(pkg *pkg, exports []stdlib.Symbol) } func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error { @@ -1118,7 +1188,7 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil go func(pkgName string, symbols map[string]bool) { defer wg.Done() - found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename) + found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols) if err != nil { firstErrOnce.Do(func() { @@ -1149,6 +1219,17 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil }() for result := range results { + // Don't offer completions that would shadow predeclared + // names, such as github.com/coreos/etcd/error. + if types.Universe.Lookup(result.pkg.name) != nil { // predeclared + // Ideally we would skip this candidate only + // if the predeclared name is actually + // referenced by the file, but that's a lot + // trickier to compute and would still create + // an import that is likely to surprise the + // user before long. + continue + } pass.addCandidate(result.imp, result.pkg) } return firstErr @@ -1191,31 +1272,22 @@ func ImportPathToAssumedName(importPath string) string { type gopathResolver struct { env *ProcessEnv walked bool - cache *dirInfoCache + cache *DirInfoCache scanSema chan struct{} // scanSema prevents concurrent scans. } func newGopathResolver(env *ProcessEnv) *gopathResolver { r := &gopathResolver{ - env: env, - cache: &dirInfoCache{ - dirs: map[string]*directoryPackageInfo{}, - listeners: map[*int]cacheListener{}, - }, + env: env, + cache: NewDirInfoCache(), scanSema: make(chan struct{}, 1), } r.scanSema <- struct{}{} return r } -func (r *gopathResolver) ClearForNewScan() { - <-r.scanSema - r.cache = &dirInfoCache{ - dirs: map[string]*directoryPackageInfo{}, - listeners: map[*int]cacheListener{}, - } - r.walked = false - r.scanSema <- struct{}{} +func (r *gopathResolver) ClearForNewScan() Resolver { + return newGopathResolver(r.env) } func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { @@ -1233,7 +1305,7 @@ func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) ( // importPathToName finds out the actual package name, as declared in its .go files. func importPathToName(bctx *build.Context, importPath, srcDir string) string { // Fast path for standard library without going to disk. - if _, ok := stdlib[importPath]; ok { + if stdlib.HasPackage(importPath) { return path.Base(importPath) // stdlib packages always match their paths. } @@ -1431,7 +1503,7 @@ func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error } func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) float64 { - if _, ok := stdlib[path]; ok { + if stdlib.HasPackage(path) { return MaxRelevance } return MaxRelevance - 1 @@ -1448,7 +1520,7 @@ func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) [] return result } -func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) { +func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error) { if info, ok := r.cache.Load(pkg.dir); ok && !includeTest { return r.cache.CacheExports(ctx, r.env, info) } @@ -1468,7 +1540,7 @@ func VendorlessPath(ipath string) string { return ipath } -func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) { +func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []stdlib.Symbol, error) { // Look for non-test, buildable .go files which could provide exports. all, err := os.ReadDir(dir) if err != nil { @@ -1492,7 +1564,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, incl } var pkgName string - var exports []string + var exports []stdlib.Symbol fset := token.NewFileSet() for _, fi := range files { select { @@ -1519,24 +1591,44 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, incl continue } pkgName = f.Name.Name - for name := range f.Scope.Objects { + for name, obj := range f.Scope.Objects { if ast.IsExported(name) { - exports = append(exports, name) + var kind stdlib.Kind + switch obj.Kind { + case ast.Con: + kind = stdlib.Const + case ast.Typ: + kind = stdlib.Type + case ast.Var: + kind = stdlib.Var + case ast.Fun: + kind = stdlib.Func + } + exports = append(exports, stdlib.Symbol{ + Name: name, + Kind: kind, + Version: 0, // unknown; be permissive + }) } } } + sortSymbols(exports) if env.Logf != nil { - sortedExports := append([]string(nil), exports...) - sort.Strings(sortedExports) - env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, strings.Join(sortedExports, ", ")) + env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, exports) } return pkgName, exports, nil } +func sortSymbols(syms []stdlib.Symbol) { + sort.Slice(syms, func(i, j int) bool { + return syms[i].Name < syms[j].Name + }) +} + // findImport searches for a package with the given symbols. // If no package is found, findImport returns ("", false, nil) -func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) { +func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool) (*pkg, error) { // Sort the candidates by their import package length, // assuming that shorter package names are better than long // ones. Note that this sorts by the de-vendored name, so @@ -1600,7 +1692,7 @@ func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgNa exportsMap := make(map[string]bool, len(exports)) for _, sym := range exports { - exportsMap[sym] = true + exportsMap[sym.Name] = true } // If it doesn't have the right @@ -1758,10 +1850,13 @@ func (fn visitFn) Visit(node ast.Node) ast.Visitor { return fn(node) } -func copyExports(pkg []string) map[string]bool { - m := make(map[string]bool, len(pkg)) - for _, v := range pkg { - m[v] = true +func symbolNameSet(symbols []stdlib.Symbol) map[string]bool { + names := make(map[string]bool) + for _, sym := range symbols { + switch sym.Kind { + case stdlib.Const, stdlib.Var, stdlib.Type, stdlib.Func: + names[sym.Name] = true + } } - return m + return names } diff --git a/internal/imports/fix_test.go b/internal/imports/fix_test.go index 7096ff25c56..08d90a5ca7b 100644 --- a/internal/imports/fix_test.go +++ b/internal/imports/fix_test.go @@ -20,6 +20,7 @@ import ( "golang.org/x/tools/go/packages/packagestest" "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/stdlib" ) var testDebug = flag.Bool("debug", false, "enable debug output") @@ -1181,6 +1182,19 @@ var _, _ = rand.Read, rand.NewZipf import "math/rand" var _, _ = rand.Read, rand.NewZipf +`, + }, + { + name: "unused_duplicate_imports_remove", + in: `package main + +import ( + "errors" + + "github.com/pkg/errors" +) +`, + out: `package main `, }, } @@ -1371,6 +1385,41 @@ var ( }.processTest(t, "golang.org/fake", "myotherpackage/toformat.go", nil, nil, want) } +// Test support for packages in GOPATH whose files are symlinks. +func TestImportSymlinkFiles(t *testing.T) { + const input = `package p + +var ( + _ = fmt.Print + _ = mypkg.Foo +) +` + const want = `package p + +import ( + "fmt" + + "golang.org/fake/x/y/mypkg" +) + +var ( + _ = fmt.Print + _ = mypkg.Foo +) +` + + testConfig{ + module: packagestest.Module{ + Name: "golang.org/fake", + Files: fm{ + "target/f.go": "package mypkg\nvar Foo = 123\n", + "x/y/mypkg/f.go": packagestest.Symlink("../../../target/f.go"), + "myotherpackage/toformat.go": input, + }, + }, + }.processTest(t, "golang.org/fake", "myotherpackage/toformat.go", nil, nil, want) +} + func TestImportSymlinksWithIgnore(t *testing.T) { const input = `package p @@ -1398,7 +1447,8 @@ var ( "x/y/mypkg": packagestest.Symlink("../../target"), // valid symlink "x/y/apkg": packagestest.Symlink(".."), // symlink loop "myotherpkg/toformat.go": input, - "../../.goimportsignore": "golang.org/fake/x/y/mypkg\n", + "../../.goimportsignore": "golang.org/fake/x/y/mypkg\n" + + "golang.org/fake/x/y/apkg\n", }, }, }.processTest(t, "golang.org/fake", "myotherpkg/toformat.go", nil, nil, want) @@ -1577,9 +1627,9 @@ import "bytes" var _ = bytes.Buffer ` // Force a scan of the stdlib. - savedStdlib := stdlib - defer func() { stdlib = savedStdlib }() - stdlib = map[string][]string{} + savedStdlib := stdlib.PackageSymbols + defer func() { stdlib.PackageSymbols = savedStdlib }() + stdlib.PackageSymbols = nil testConfig{ module: packagestest.Module{ @@ -2805,8 +2855,8 @@ func TestGetPackageCompletions(t *testing.T) { defer mu.Unlock() for _, csym := range c.Exports { for _, w := range want { - if c.Fix.StmtInfo.ImportPath == w.path && csym == w.symbol { - got = append(got, res{c.Fix.Relevance, c.Fix.IdentName, c.Fix.StmtInfo.ImportPath, csym}) + if c.Fix.StmtInfo.ImportPath == w.path && csym.Name == w.symbol { + got = append(got, res{c.Fix.Relevance, c.Fix.IdentName, c.Fix.StmtInfo.ImportPath, csym.Name}) } } } diff --git a/internal/imports/imports.go b/internal/imports/imports.go index 58e637b90f2..f83465520a4 100644 --- a/internal/imports/imports.go +++ b/internal/imports/imports.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:generate go run mkstdlib.go - // Package imports implements a Go pretty-printer (like package "go/format") // that also adds or removes import statements as necessary. package imports @@ -109,7 +107,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e } // formatFile formats the file syntax tree. -// It may mutate the token.FileSet. +// It may mutate the token.FileSet and the ast.File. // // If an adjust function is provided, it is called after formatting // with the original source (formatFile's src parameter) and the @@ -236,7 +234,7 @@ func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast src = src[:len(src)-len("}\n")] // Gofmt has also indented the function body one level. // Remove that indent. - src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1) + src = bytes.ReplaceAll(src, []byte("\n\t"), []byte("\n")) return matchSpace(orig, src) } return file, adjust, nil diff --git a/internal/imports/mkstdlib.go b/internal/imports/mkstdlib.go deleted file mode 100644 index 3896872c234..00000000000 --- a/internal/imports/mkstdlib.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build ignore -// +build ignore - -// mkstdlib generates the zstdlib.go file, containing the Go standard -// library API symbols. It's baked into the binary to avoid scanning -// GOPATH in the common case. -package main - -import ( - "bufio" - "bytes" - "fmt" - "go/format" - "go/token" - "io" - "log" - "os" - "path/filepath" - "regexp" - "runtime" - "sort" - "strings" - - "golang.org/x/tools/go/packages" -) - -func mustOpen(name string) io.Reader { - f, err := os.Open(name) - if err != nil { - log.Fatal(err) - } - return f -} - -func api(base string) string { - return filepath.Join(runtime.GOROOT(), "api", base) -} - -var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`) - -func main() { - var buf bytes.Buffer - outf := func(format string, args ...interface{}) { - fmt.Fprintf(&buf, format, args...) - } - outf(`// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -`) - outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n") - outf("package imports\n") - outf("var stdlib = map[string][]string{\n") - f := readAPI() - sc := bufio.NewScanner(f) - - // The APIs of the syscall/js and unsafe packages need to be computed explicitly, - // because they're not included in the GOROOT/api/go1.*.txt files at this time. - pkgs := map[string]map[string]bool{ - "syscall/js": syms("syscall/js", "GOOS=js", "GOARCH=wasm"), - "unsafe": syms("unsafe"), - } - paths := []string{"syscall/js", "unsafe"} - - for sc.Scan() { - l := sc.Text() - if m := sym.FindStringSubmatch(l); m != nil { - path, sym := m[1], m[2] - - if _, ok := pkgs[path]; !ok { - pkgs[path] = map[string]bool{} - paths = append(paths, path) - } - pkgs[path][sym] = true - } - } - if err := sc.Err(); err != nil { - log.Fatal(err) - } - sort.Strings(paths) - for _, path := range paths { - outf("\t%q: {\n", path) - pkg := pkgs[path] - var syms []string - for sym := range pkg { - syms = append(syms, sym) - } - sort.Strings(syms) - for _, sym := range syms { - outf("\t\t%q,\n", sym) - } - outf("},\n") - } - outf("}\n") - fmtbuf, err := format.Source(buf.Bytes()) - if err != nil { - log.Fatal(err) - } - err = os.WriteFile("zstdlib.go", fmtbuf, 0666) - if err != nil { - log.Fatal(err) - } -} - -// readAPI opens an io.Reader that reads all stdlib API content. -func readAPI() io.Reader { - entries, err := os.ReadDir(filepath.Join(runtime.GOROOT(), "api")) - if err != nil { - log.Fatal(err) - } - var readers []io.Reader - for _, entry := range entries { - name := entry.Name() - if strings.HasPrefix(name, "go") && strings.HasSuffix(name, ".txt") { - readers = append(readers, mustOpen(api(name))) - } - } - return io.MultiReader(readers...) -} - -// syms computes the exported symbols in the specified package. -func syms(pkg string, extraEnv ...string) map[string]bool { - var env []string - if len(extraEnv) != 0 { - env = append(os.Environ(), extraEnv...) - } - pkgs, err := packages.Load(&packages.Config{Mode: packages.NeedTypes, Env: env}, pkg) - if err != nil { - log.Fatalln(err) - } else if len(pkgs) != 1 { - log.Fatalf("got %d packages, want one package %q", len(pkgs), pkg) - } - syms := make(map[string]bool) - for _, name := range pkgs[0].Types.Scope().Names() { - if token.IsExported(name) { - syms[name] = true - } - } - return syms -} diff --git a/internal/imports/mod.go b/internal/imports/mod.go index 5f4d435d3cc..21ef938978e 100644 --- a/internal/imports/mod.go +++ b/internal/imports/mod.go @@ -21,51 +21,91 @@ import ( "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/gopathwalk" + "golang.org/x/tools/internal/stdlib" ) -// ModuleResolver implements resolver for modules using the go command as little -// as feasible. +// Notes(rfindley): ModuleResolver appears to be heavily optimized for scanning +// as fast as possible, which is desirable for a call to goimports from the +// command line, but it doesn't work as well for gopls, where it suffers from +// slow startup (golang/go#44863) and intermittent hanging (golang/go#59216), +// both caused by populating the cache, albeit in slightly different ways. +// +// A high level list of TODOs: +// - Optimize the scan itself, as there is some redundancy statting and +// reading go.mod files. +// - Invert the relationship between ProcessEnv and Resolver (see the +// docstring of ProcessEnv). +// - Make it easier to use an external resolver implementation. +// +// Smaller TODOs are annotated in the code below. + +// ModuleResolver implements the Resolver interface for a workspace using +// modules. +// +// A goal of the ModuleResolver is to invoke the Go command as little as +// possible. To this end, it runs the Go command only for listing module +// information (i.e. `go list -m -e -json ...`). Package scanning, the process +// of loading package information for the modules, is implemented internally +// via the scan method. +// +// It has two types of state: the state derived from the go command, which +// is populated by init, and the state derived from scans, which is populated +// via scan. A root is considered scanned if it has been walked to discover +// directories. However, if the scan did not require additional information +// from the directory (such as package name or exports), the directory +// information itself may be partially populated. It will be lazily filled in +// as needed by scans, using the scanCallback. type ModuleResolver struct { - env *ProcessEnv - moduleCacheDir string - dummyVendorMod *gocommand.ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory. - roots []gopathwalk.Root - scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots. - scannedRoots map[gopathwalk.Root]bool - - initialized bool - mains []*gocommand.ModuleJSON - mainByDir map[string]*gocommand.ModuleJSON - modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path... - modsByDir []*gocommand.ModuleJSON // ...or number of path components in their Dir. - - // moduleCacheCache stores information about the module cache. - moduleCacheCache *dirInfoCache - otherCache *dirInfoCache + env *ProcessEnv + + // Module state, populated during construction + dummyVendorMod *gocommand.ModuleJSON // if vendoring is enabled, a pseudo-module to represent the /vendor directory + moduleCacheDir string // GOMODCACHE, inferred from GOPATH if unset + roots []gopathwalk.Root // roots to scan, in approximate order of importance + mains []*gocommand.ModuleJSON // main modules + mainByDir map[string]*gocommand.ModuleJSON // module information by dir, to join with roots + modsByModPath []*gocommand.ModuleJSON // all modules, ordered by # of path components in their module path + modsByDir []*gocommand.ModuleJSON // ...or by the number of path components in their Dir. + + // Scanning state, populated by scan + + // scanSema prevents concurrent scans, and guards scannedRoots and the cache + // fields below (though the caches themselves are concurrency safe). + // Receive to acquire, send to release. + scanSema chan struct{} + scannedRoots map[gopathwalk.Root]bool // if true, root has been walked + + // Caches of directory info, populated by scans and scan callbacks + // + // moduleCacheCache stores cached information about roots in the module + // cache, which are immutable and therefore do not need to be invalidated. + // + // otherCache stores information about all other roots (even GOROOT), which + // may change. + moduleCacheCache *DirInfoCache + otherCache *DirInfoCache } -func newModuleResolver(e *ProcessEnv) *ModuleResolver { +// newModuleResolver returns a new module-aware goimports resolver. +// +// Note: use caution when modifying this constructor: changes must also be +// reflected in ModuleResolver.ClearForNewScan. +func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleResolver, error) { r := &ModuleResolver{ env: e, scanSema: make(chan struct{}, 1), } - r.scanSema <- struct{}{} - return r -} - -func (r *ModuleResolver) init() error { - if r.initialized { - return nil - } + r.scanSema <- struct{}{} // release goenv, err := r.env.goEnv() if err != nil { - return err + return nil, err } + + // TODO(rfindley): can we refactor to share logic with r.env.invokeGo? inv := gocommand.Invocation{ BuildFlags: r.env.BuildFlags, ModFlag: r.env.ModFlag, - ModFile: r.env.ModFile, Env: r.env.env(), Logf: r.env.Logf, WorkingDir: r.env.WorkingDir, @@ -77,9 +117,12 @@ func (r *ModuleResolver) init() error { // Module vendor directories are ignored in workspace mode: // https://go.googlesource.com/proposal/+/master/design/45713-workspace.md if len(r.env.Env["GOWORK"]) == 0 { + // TODO(rfindley): VendorEnabled runs the go command to get GOFLAGS, but + // they should be available from the ProcessEnv. Can we avoid the redundant + // invocation? vendorEnabled, mainModVendor, err = gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner) if err != nil { - return err + return nil, err } } @@ -100,19 +143,14 @@ func (r *ModuleResolver) init() error { // GO111MODULE=on. Other errors are fatal. if err != nil { if errMsg := err.Error(); !strings.Contains(errMsg, "working directory is not part of a module") && !strings.Contains(errMsg, "go.mod file not found") { - return err + return nil, err } } } - if gmc := r.env.Env["GOMODCACHE"]; gmc != "" { - r.moduleCacheDir = gmc - } else { - gopaths := filepath.SplitList(goenv["GOPATH"]) - if len(gopaths) == 0 { - return fmt.Errorf("empty GOPATH") - } - r.moduleCacheDir = filepath.Join(gopaths[0], "/pkg/mod") + r.moduleCacheDir = gomodcacheForEnv(goenv) + if r.moduleCacheDir == "" { + return nil, fmt.Errorf("cannot resolve GOMODCACHE") } sort.Slice(r.modsByModPath, func(i, j int) bool { @@ -141,7 +179,11 @@ func (r *ModuleResolver) init() error { } else { addDep := func(mod *gocommand.ModuleJSON) { if mod.Replace == nil { - // This is redundant with the cache, but we'll skip it cheaply enough. + // This is redundant with the cache, but we'll skip it cheaply enough + // when we encounter it in the module cache scan. + // + // Including it at a lower index in r.roots than the module cache dir + // helps prioritize matches from within existing dependencies. r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootModuleCache}) } else { r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootOther}) @@ -158,24 +200,40 @@ func (r *ModuleResolver) init() error { addDep(mod) } } + // If provided, share the moduleCacheCache. + // + // TODO(rfindley): The module cache is immutable. However, the loaded + // exports do depend on GOOS and GOARCH. Fortunately, the + // ProcessEnv.buildContext does not adjust these from build.DefaultContext + // (even though it should). So for now, this is OK to share, but we need to + // add logic for handling GOOS/GOARCH. + r.moduleCacheCache = moduleCacheCache r.roots = append(r.roots, gopathwalk.Root{Path: r.moduleCacheDir, Type: gopathwalk.RootModuleCache}) } r.scannedRoots = map[gopathwalk.Root]bool{} if r.moduleCacheCache == nil { - r.moduleCacheCache = &dirInfoCache{ - dirs: map[string]*directoryPackageInfo{}, - listeners: map[*int]cacheListener{}, - } + r.moduleCacheCache = NewDirInfoCache() } - if r.otherCache == nil { - r.otherCache = &dirInfoCache{ - dirs: map[string]*directoryPackageInfo{}, - listeners: map[*int]cacheListener{}, - } - } - r.initialized = true - return nil + r.otherCache = NewDirInfoCache() + return r, nil +} + +// gomodcacheForEnv returns the GOMODCACHE value to use based on the given env +// map, which must have GOMODCACHE and GOPATH populated. +// +// TODO(rfindley): this is defensive refactoring. +// 1. Is this even relevant anymore? Can't we just read GOMODCACHE. +// 2. Use this to separate module cache scanning from other scanning. +func gomodcacheForEnv(goenv map[string]string) string { + if gmc := goenv["GOMODCACHE"]; gmc != "" { + return gmc + } + gopaths := filepath.SplitList(goenv["GOPATH"]) + if len(gopaths) == 0 { + return "" + } + return filepath.Join(gopaths[0], "/pkg/mod") } func (r *ModuleResolver) initAllMods() error { @@ -206,30 +264,86 @@ func (r *ModuleResolver) initAllMods() error { return nil } -func (r *ModuleResolver) ClearForNewScan() { - <-r.scanSema - r.scannedRoots = map[gopathwalk.Root]bool{} - r.otherCache = &dirInfoCache{ - dirs: map[string]*directoryPackageInfo{}, - listeners: map[*int]cacheListener{}, +// ClearForNewScan invalidates the last scan. +// +// It preserves the set of roots, but forgets about the set of directories. +// Though it forgets the set of module cache directories, it remembers their +// contents, since they are assumed to be immutable. +func (r *ModuleResolver) ClearForNewScan() Resolver { + <-r.scanSema // acquire r, to guard scannedRoots + r2 := &ModuleResolver{ + env: r.env, + dummyVendorMod: r.dummyVendorMod, + moduleCacheDir: r.moduleCacheDir, + roots: r.roots, + mains: r.mains, + mainByDir: r.mainByDir, + modsByModPath: r.modsByModPath, + + scanSema: make(chan struct{}, 1), + scannedRoots: make(map[gopathwalk.Root]bool), + otherCache: NewDirInfoCache(), + moduleCacheCache: r.moduleCacheCache, } - r.scanSema <- struct{}{} + r2.scanSema <- struct{}{} // r2 must start released + // Invalidate root scans. We don't need to invalidate module cache roots, + // because they are immutable. + // (We don't support a use case where GOMODCACHE is cleaned in the middle of + // e.g. a gopls session: the user must restart gopls to get accurate + // imports.) + // + // Scanning for new directories in GOMODCACHE should be handled elsewhere, + // via a call to ScanModuleCache. + for _, root := range r.roots { + if root.Type == gopathwalk.RootModuleCache && r.scannedRoots[root] { + r2.scannedRoots[root] = true + } + } + r.scanSema <- struct{}{} // release r + return r2 } -func (r *ModuleResolver) ClearForNewMod() { - <-r.scanSema - *r = ModuleResolver{ - env: r.env, - moduleCacheCache: r.moduleCacheCache, - otherCache: r.otherCache, - scanSema: r.scanSema, +// ClearModuleInfo invalidates resolver state that depends on go.mod file +// contents (essentially, the output of go list -m -json ...). +// +// Notably, it does not forget directory contents, which are reset +// asynchronously via ClearForNewScan. +// +// If the ProcessEnv is a GOPATH environment, ClearModuleInfo is a no op. +// +// TODO(rfindley): move this to a new env.go, consolidating ProcessEnv methods. +func (e *ProcessEnv) ClearModuleInfo() { + if r, ok := e.resolver.(*ModuleResolver); ok { + resolver, err := newModuleResolver(e, e.ModCache) + if err != nil { + e.resolver = nil + e.resolverErr = err + return + } + + <-r.scanSema // acquire (guards caches) + resolver.moduleCacheCache = r.moduleCacheCache + resolver.otherCache = r.otherCache + r.scanSema <- struct{}{} // release + + e.UpdateResolver(resolver) } - r.init() - r.scanSema <- struct{}{} } -// findPackage returns the module and directory that contains the package at -// the given import path, or returns nil, "" if no module is in scope. +// UpdateResolver sets the resolver for the ProcessEnv to use in imports +// operations. Only for use with the result of [Resolver.ClearForNewScan]. +// +// TODO(rfindley): this awkward API is a result of the (arguably) inverted +// relationship between configuration and state described in the doc comment +// for [ProcessEnv]. +func (e *ProcessEnv) UpdateResolver(r Resolver) { + e.resolver = r + e.resolverErr = nil +} + +// findPackage returns the module and directory from within the main modules +// and their dependencies that contains the package at the given import path, +// or returns nil, "" if no module is in scope. func (r *ModuleResolver) findPackage(importPath string) (*gocommand.ModuleJSON, string) { // This can't find packages in the stdlib, but that's harmless for all // the existing code paths. @@ -295,10 +409,6 @@ func (r *ModuleResolver) cacheStore(info directoryPackageInfo) { } } -func (r *ModuleResolver) cacheKeys() []string { - return append(r.moduleCacheCache.Keys(), r.otherCache.Keys()...) -} - // cachePackageName caches the package name for a dir already in the cache. func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) { if info.rootType == gopathwalk.RootModuleCache { @@ -307,7 +417,7 @@ func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, er return r.otherCache.CachePackageName(info) } -func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) { +func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []stdlib.Symbol, error) { if info.rootType == gopathwalk.RootModuleCache { return r.moduleCacheCache.CacheExports(ctx, env, info) } @@ -367,15 +477,15 @@ func (r *ModuleResolver) dirIsNestedModule(dir string, mod *gocommand.ModuleJSON return modDir != mod.Dir } -func (r *ModuleResolver) modInfo(dir string) (modDir string, modName string) { - readModName := func(modFile string) string { - modBytes, err := os.ReadFile(modFile) - if err != nil { - return "" - } - return modulePath(modBytes) +func readModName(modFile string) string { + modBytes, err := os.ReadFile(modFile) + if err != nil { + return "" } + return modulePath(modBytes) +} +func (r *ModuleResolver) modInfo(dir string) (modDir, modName string) { if r.dirInModuleCache(dir) { if matches := modCacheRegexp.FindStringSubmatch(dir); len(matches) == 3 { index := strings.Index(dir, matches[1]+"@"+matches[2]) @@ -409,11 +519,9 @@ func (r *ModuleResolver) dirInModuleCache(dir string) bool { } func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { - if err := r.init(); err != nil { - return nil, err - } names := map[string]string{} for _, path := range importPaths { + // TODO(rfindley): shouldn't this use the dirInfoCache? _, packageDir := r.findPackage(path) if packageDir == "" { continue @@ -431,10 +539,6 @@ func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error ctx, done := event.Start(ctx, "imports.ModuleResolver.scan") defer done() - if err := r.init(); err != nil { - return err - } - processDir := func(info directoryPackageInfo) { // Skip this directory if we were not able to get the package information successfully. if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { @@ -444,18 +548,18 @@ func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error if err != nil { return } - if !callback.dirFound(pkg) { return } + pkg.packageName, err = r.cachePackageName(info) if err != nil { return } - if !callback.packageNameLoaded(pkg) { return } + _, exports, err := r.loadExports(ctx, pkg, false) if err != nil { return @@ -494,7 +598,6 @@ func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error return packageScanned } - // Add anything new to the cache, and process it if we're still listening. add := func(root gopathwalk.Root, dir string) { r.cacheStore(r.scanDirForPackage(root, dir)) } @@ -509,9 +612,9 @@ func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error select { case <-ctx.Done(): return - case <-r.scanSema: + case <-r.scanSema: // acquire } - defer func() { r.scanSema <- struct{}{} }() + defer func() { r.scanSema <- struct{}{} }() // release // We have the lock on r.scannedRoots, and no other scans can run. for _, root := range roots { if ctx.Err() != nil { @@ -534,7 +637,7 @@ func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error } func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) float64 { - if _, ok := stdlib[path]; ok { + if stdlib.HasPackage(path) { return MaxRelevance } mod, _ := r.findPackage(path) @@ -612,10 +715,7 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) { return res, nil } -func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) { - if err := r.init(); err != nil { - return "", nil, err - } +func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error) { if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest { return r.cacheExports(ctx, r.env, info) } diff --git a/internal/imports/mod_cache.go b/internal/imports/mod_cache.go index 45690abbb4f..b1192696b28 100644 --- a/internal/imports/mod_cache.go +++ b/internal/imports/mod_cache.go @@ -7,9 +7,14 @@ package imports import ( "context" "fmt" + "path" + "path/filepath" + "strings" "sync" + "golang.org/x/mod/module" "golang.org/x/tools/internal/gopathwalk" + "golang.org/x/tools/internal/stdlib" ) // To find packages to import, the resolver needs to know about all of @@ -39,6 +44,8 @@ const ( exportsLoaded ) +// directoryPackageInfo holds (possibly incomplete) information about packages +// contained in a given directory. type directoryPackageInfo struct { // status indicates the extent to which this struct has been filled in. status directoryPackageStatus @@ -63,8 +70,11 @@ type directoryPackageInfo struct { packageName string // the package name, as declared in the source. // Set when status >= exportsLoaded. - - exports []string + // TODO(rfindley): it's hard to see this, but exports depend implicitly on + // the default build context GOOS and GOARCH. + // + // We can make this explicit, and key exports by GOOS, GOARCH. + exports []stdlib.Symbol } // reachedStatus returns true when info has a status at least target and any error associated with @@ -79,7 +89,7 @@ func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) ( return true, nil } -// dirInfoCache is a concurrency safe map for storing information about +// DirInfoCache is a concurrency-safe map for storing information about // directories that may contain packages. // // The information in this cache is built incrementally. Entries are initialized in scan. @@ -92,21 +102,26 @@ func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) ( // The information in the cache is not expected to change for the cache's // lifetime, so there is no protection against competing writes. Users should // take care not to hold the cache across changes to the underlying files. -// -// TODO(suzmue): consider other concurrency strategies and data structures (RWLocks, sync.Map, etc) -type dirInfoCache struct { +type DirInfoCache struct { mu sync.Mutex // dirs stores information about packages in directories, keyed by absolute path. dirs map[string]*directoryPackageInfo listeners map[*int]cacheListener } +func NewDirInfoCache() *DirInfoCache { + return &DirInfoCache{ + dirs: make(map[string]*directoryPackageInfo), + listeners: make(map[*int]cacheListener), + } +} + type cacheListener func(directoryPackageInfo) // ScanAndListen calls listener on all the items in the cache, and on anything // newly added. The returned stop function waits for all in-flight callbacks to // finish and blocks new ones. -func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() { +func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() { ctx, cancel := context.WithCancel(ctx) // Flushing out all the callbacks is tricky without knowing how many there @@ -162,8 +177,10 @@ func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener } // Store stores the package info for dir. -func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) { +func (d *DirInfoCache) Store(dir string, info directoryPackageInfo) { d.mu.Lock() + // TODO(rfindley, golang/go#59216): should we overwrite an existing entry? + // That seems incorrect as the cache should be idempotent. _, old := d.dirs[dir] d.dirs[dir] = &info var listeners []cacheListener @@ -180,7 +197,7 @@ func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) { } // Load returns a copy of the directoryPackageInfo for absolute directory dir. -func (d *dirInfoCache) Load(dir string) (directoryPackageInfo, bool) { +func (d *DirInfoCache) Load(dir string) (directoryPackageInfo, bool) { d.mu.Lock() defer d.mu.Unlock() info, ok := d.dirs[dir] @@ -191,7 +208,7 @@ func (d *dirInfoCache) Load(dir string) (directoryPackageInfo, bool) { } // Keys returns the keys currently present in d. -func (d *dirInfoCache) Keys() (keys []string) { +func (d *DirInfoCache) Keys() (keys []string) { d.mu.Lock() defer d.mu.Unlock() for key := range d.dirs { @@ -200,7 +217,7 @@ func (d *dirInfoCache) Keys() (keys []string) { return keys } -func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) { +func (d *DirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) { if loaded, err := info.reachedStatus(nameLoaded); loaded { return info.packageName, err } @@ -213,7 +230,7 @@ func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, erro return info.packageName, info.err } -func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) { +func (d *DirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []stdlib.Symbol, error) { if reached, _ := info.reachedStatus(exportsLoaded); reached { return info.packageName, info.exports, info.err } @@ -234,3 +251,81 @@ func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info d d.Store(info.dir, info) return info.packageName, info.exports, info.err } + +// ScanModuleCache walks the given directory, which must be a GOMODCACHE value, +// for directory package information, storing the results in cache. +func ScanModuleCache(dir string, cache *DirInfoCache, logf func(string, ...any)) { + // Note(rfindley): it's hard to see, but this function attempts to implement + // just the side effects on cache of calling PrimeCache with a ProcessEnv + // that has the given dir as its GOMODCACHE. + // + // Teasing out the control flow, we see that we can avoid any handling of + // vendor/ and can infer module info entirely from the path, simplifying the + // logic here. + + root := gopathwalk.Root{ + Path: filepath.Clean(dir), + Type: gopathwalk.RootModuleCache, + } + + directoryInfo := func(root gopathwalk.Root, dir string) directoryPackageInfo { + // This is a copy of ModuleResolver.scanDirForPackage, trimmed down to + // logic that applies to a module cache directory. + + subdir := "" + if dir != root.Path { + subdir = dir[len(root.Path)+len("/"):] + } + + matches := modCacheRegexp.FindStringSubmatch(subdir) + if len(matches) == 0 { + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("invalid module cache path: %v", subdir), + } + } + modPath, err := module.UnescapePath(filepath.ToSlash(matches[1])) + if err != nil { + if logf != nil { + logf("decoding module cache path %q: %v", subdir, err) + } + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("decoding module cache path %q: %v", subdir, err), + } + } + importPath := path.Join(modPath, filepath.ToSlash(matches[3])) + index := strings.Index(dir, matches[1]+"@"+matches[2]) + modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2]) + modName := readModName(filepath.Join(modDir, "go.mod")) + return directoryPackageInfo{ + status: directoryScanned, + dir: dir, + rootType: root.Type, + nonCanonicalImportPath: importPath, + moduleDir: modDir, + moduleName: modName, + } + } + + add := func(root gopathwalk.Root, dir string) { + info := directoryInfo(root, dir) + cache.Store(info.dir, info) + } + + skip := func(_ gopathwalk.Root, dir string) bool { + // Skip directories that have already been scanned. + // + // Note that gopathwalk only adds "package" directories, which must contain + // a .go file, and all such package directories in the module cache are + // immutable. So if we can load a dir, it can be skipped. + info, ok := cache.Load(dir) + if !ok { + return false + } + packageScanned, _ := info.reachedStatus(directoryScanned) + return packageScanned + } + + gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Logf: logf, ModulesEnabled: true}) +} diff --git a/internal/imports/mod_cache_test.go b/internal/imports/mod_cache_test.go index 39c691e5330..3af85fb7f56 100644 --- a/internal/imports/mod_cache_test.go +++ b/internal/imports/mod_cache_test.go @@ -6,9 +6,12 @@ package imports import ( "fmt" + "os/exec" "reflect" "sort" + "strings" "testing" + "time" ) func TestDirectoryPackageInfoReachedStatus(t *testing.T) { @@ -58,9 +61,7 @@ func TestDirectoryPackageInfoReachedStatus(t *testing.T) { } func TestModCacheInfo(t *testing.T) { - m := &dirInfoCache{ - dirs: make(map[string]*directoryPackageInfo), - } + m := NewDirInfoCache() dirInfo := []struct { dir string @@ -124,3 +125,20 @@ func TestModCacheInfo(t *testing.T) { } } } + +func BenchmarkScanModuleCache(b *testing.B) { + output, err := exec.Command("go", "env", "GOMODCACHE").Output() + if err != nil { + b.Fatal(err) + } + gomodcache := strings.TrimSpace(string(output)) + cache := NewDirInfoCache() + start := time.Now() + ScanModuleCache(gomodcache, cache, nil) + b.Logf("initial scan took %v", time.Since(start)) + b.ResetTimer() + + for i := 0; i < b.N; i++ { + ScanModuleCache(gomodcache, cache, nil) + } +} diff --git a/internal/imports/mod_test.go b/internal/imports/mod_test.go index 26dac639062..e65104cbf2e 100644 --- a/internal/imports/mod_test.go +++ b/internal/imports/mod_test.go @@ -17,6 +17,7 @@ import ( "strings" "sync" "testing" + "time" "golang.org/x/mod/module" "golang.org/x/tools/internal/gocommand" @@ -93,7 +94,7 @@ package z mt.assertFound("y", "y") - scan, err := scanToSlice(mt.resolver, nil) + scan, err := scanToSlice(mt.env.resolver, nil) if err != nil { t.Fatal(err) } @@ -212,7 +213,7 @@ import _ "rsc.io/quote" } // Uninitialize the go.mod dependent cached information and make sure it still finds the package. - mt.resolver.ClearForNewMod() + mt.env.ClearModuleInfo() mt.assertScanFinds("rsc.io/quote", "quote") } @@ -241,8 +242,9 @@ import _ "rsc.io/sampler" } // Clear out the resolver's cache, since we've changed the environment. - mt.resolver = newModuleResolver(mt.env) mt.env.Env["GOFLAGS"] = "-mod=vendor" + mt.env.ClearModuleInfo() + mt.env.UpdateResolver(mt.env.resolver.ClearForNewScan()) mt.assertModuleFoundInDir("rsc.io/sampler", "sampler", `/vendor/`) } @@ -269,6 +271,10 @@ import _ "rsc.io/sampler" if testenv.Go1Point() >= 14 { wantDir = `/vendor/` } + + // Clear out the resolver's module info, since we've changed the environment. + // (the presence of a /vendor directory affects `go list -m`). + mt.env.ClearModuleInfo() mt.assertModuleFoundInDir("rsc.io/sampler", "sampler", wantDir) } @@ -553,8 +559,6 @@ package v // Tests that go.work files are respected. func TestModWorkspace(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - mt := setup(t, nil, ` -- go.work -- go 1.18 @@ -589,8 +593,6 @@ package b // respected and that a wildcard replace in go.work overrides a versioned replace // in go.mod. func TestModWorkspaceReplace(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - mt := setup(t, nil, ` -- go.work -- use m @@ -648,8 +650,6 @@ func G() { // Tests a case where conflicting replaces are overridden by a replace // in the go.work file. func TestModWorkspaceReplaceOverride(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - mt := setup(t, nil, `-- go.work -- use m use n @@ -713,8 +713,6 @@ func G() { // workspaces with module pruning. This is based on the // cmd/go mod_prune_all script test. func TestModWorkspacePrune(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - mt := setup(t, nil, ` -- go.work -- go 1.18 @@ -900,7 +898,7 @@ package x func (t *modTest) assertFound(importPath, pkgName string) (string, *pkg) { t.Helper() - names, err := t.resolver.loadPackageNames([]string{importPath}, t.env.WorkingDir) + names, err := t.env.resolver.loadPackageNames([]string{importPath}, t.env.WorkingDir) if err != nil { t.Errorf("loading package name for %v: %v", importPath, err) } @@ -909,13 +907,13 @@ func (t *modTest) assertFound(importPath, pkgName string) (string, *pkg) { } pkg := t.assertScanFinds(importPath, pkgName) - _, foundDir := t.resolver.findPackage(importPath) + _, foundDir := t.env.resolver.(*ModuleResolver).findPackage(importPath) return foundDir, pkg } func (t *modTest) assertScanFinds(importPath, pkgName string) *pkg { t.Helper() - scan, err := scanToSlice(t.resolver, nil) + scan, err := scanToSlice(t.env.resolver, nil) if err != nil { t.Errorf("scan failed: %v", err) } @@ -983,10 +981,9 @@ var proxyDir string type modTest struct { *testing.T - env *ProcessEnv - gopath string - resolver *ModuleResolver - cleanup func() + env *ProcessEnv + gopath string + cleanup func() } // setup builds a test environment from a txtar and supporting modules @@ -1046,16 +1043,20 @@ func setup(t *testing.T, extraEnv map[string]string, main, wd string) *modTest { } } - resolver, err := env.GetResolver() - if err != nil { + // Ensure the resolver is set for tests that (unsafely) access env.resolver + // directly. + // + // TODO(rfindley): fix this after addressing the TODO in the ProcessEnv + // docstring. + if _, err := env.GetResolver(); err != nil { t.Fatal(err) } + return &modTest{ - T: t, - gopath: env.Env["GOPATH"], - env: env, - resolver: resolver.(*ModuleResolver), - cleanup: func() { removeDir(dir) }, + T: t, + gopath: env.Env["GOPATH"], + env: env, + cleanup: func() { removeDir(dir) }, } } @@ -1098,7 +1099,7 @@ func writeProxyModule(base, arPath string) error { arName := filepath.Base(arPath) i := strings.LastIndex(arName, "_v") ver := strings.TrimSuffix(arName[i+1:], ".txt") - modDir := strings.Replace(arName[:i], "_", "/", -1) + modDir := strings.ReplaceAll(arName[:i], "_", "/") modPath, err := module.UnescapePath(modDir) if err != nil { return err @@ -1183,7 +1184,7 @@ import _ "rsc.io/quote" want := filepath.Join(mt.gopath, "pkg/mod", "rsc.io/quote@v1.5.2") found := mt.assertScanFinds("rsc.io/quote", "quote") - modDir, _ := mt.resolver.modInfo(found.dir) + modDir, _ := mt.env.resolver.(*ModuleResolver).modInfo(found.dir) if modDir != want { t.Errorf("expected: %s, got: %s", want, modDir) } @@ -1288,20 +1289,37 @@ import ( } } -func BenchmarkScanModCache(b *testing.B) { +func BenchmarkModuleResolver_RescanModCache(b *testing.B) { env := &ProcessEnv{ GocmdRunner: &gocommand.Runner{}, - Logf: b.Logf, + // Uncomment for verbose logging (too verbose to enable by default). + // Logf: b.Logf, } exclude := []gopathwalk.RootType{gopathwalk.RootGOROOT} resolver, err := env.GetResolver() if err != nil { b.Fatal(err) } + start := time.Now() scanToSlice(resolver, exclude) + b.Logf("warming the mod cache took %v", time.Since(start)) b.ResetTimer() for i := 0; i < b.N; i++ { scanToSlice(resolver, exclude) - resolver.(*ModuleResolver).ClearForNewScan() + resolver = resolver.ClearForNewScan() + } +} + +func BenchmarkModuleResolver_InitialScan(b *testing.B) { + for i := 0; i < b.N; i++ { + env := &ProcessEnv{ + GocmdRunner: &gocommand.Runner{}, + } + exclude := []gopathwalk.RootType{gopathwalk.RootGOROOT} + resolver, err := env.GetResolver() + if err != nil { + b.Fatal(err) + } + scanToSlice(resolver, exclude) } } diff --git a/internal/imports/sortimports.go b/internal/imports/sortimports.go index 1a0a7ebd9e4..da8194fd965 100644 --- a/internal/imports/sortimports.go +++ b/internal/imports/sortimports.go @@ -18,7 +18,7 @@ import ( // sortImports sorts runs of consecutive import lines in import blocks in f. // It also removes duplicate imports when it is possible to do so without data loss. // -// It may mutate the token.File. +// It may mutate the token.File and the ast.File. func sortImports(localPrefix string, tokFile *token.File, f *ast.File) { for i, d := range f.Decls { d, ok := d.(*ast.GenDecl) diff --git a/internal/imports/zstdlib.go b/internal/imports/zstdlib.go deleted file mode 100644 index 9f992c2bec8..00000000000 --- a/internal/imports/zstdlib.go +++ /dev/null @@ -1,11345 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by mkstdlib.go. DO NOT EDIT. - -package imports - -var stdlib = map[string][]string{ - "archive/tar": { - "ErrFieldTooLong", - "ErrHeader", - "ErrInsecurePath", - "ErrWriteAfterClose", - "ErrWriteTooLong", - "FileInfoHeader", - "Format", - "FormatGNU", - "FormatPAX", - "FormatUSTAR", - "FormatUnknown", - "Header", - "NewReader", - "NewWriter", - "Reader", - "TypeBlock", - "TypeChar", - "TypeCont", - "TypeDir", - "TypeFifo", - "TypeGNULongLink", - "TypeGNULongName", - "TypeGNUSparse", - "TypeLink", - "TypeReg", - "TypeRegA", - "TypeSymlink", - "TypeXGlobalHeader", - "TypeXHeader", - "Writer", - }, - "archive/zip": { - "Compressor", - "Decompressor", - "Deflate", - "ErrAlgorithm", - "ErrChecksum", - "ErrFormat", - "ErrInsecurePath", - "File", - "FileHeader", - "FileInfoHeader", - "NewReader", - "NewWriter", - "OpenReader", - "ReadCloser", - "Reader", - "RegisterCompressor", - "RegisterDecompressor", - "Store", - "Writer", - }, - "bufio": { - "ErrAdvanceTooFar", - "ErrBadReadCount", - "ErrBufferFull", - "ErrFinalToken", - "ErrInvalidUnreadByte", - "ErrInvalidUnreadRune", - "ErrNegativeAdvance", - "ErrNegativeCount", - "ErrTooLong", - "MaxScanTokenSize", - "NewReadWriter", - "NewReader", - "NewReaderSize", - "NewScanner", - "NewWriter", - "NewWriterSize", - "ReadWriter", - "Reader", - "ScanBytes", - "ScanLines", - "ScanRunes", - "ScanWords", - "Scanner", - "SplitFunc", - "Writer", - }, - "bytes": { - "Buffer", - "Clone", - "Compare", - "Contains", - "ContainsAny", - "ContainsFunc", - "ContainsRune", - "Count", - "Cut", - "CutPrefix", - "CutSuffix", - "Equal", - "EqualFold", - "ErrTooLarge", - "Fields", - "FieldsFunc", - "HasPrefix", - "HasSuffix", - "Index", - "IndexAny", - "IndexByte", - "IndexFunc", - "IndexRune", - "Join", - "LastIndex", - "LastIndexAny", - "LastIndexByte", - "LastIndexFunc", - "Map", - "MinRead", - "NewBuffer", - "NewBufferString", - "NewReader", - "Reader", - "Repeat", - "Replace", - "ReplaceAll", - "Runes", - "Split", - "SplitAfter", - "SplitAfterN", - "SplitN", - "Title", - "ToLower", - "ToLowerSpecial", - "ToTitle", - "ToTitleSpecial", - "ToUpper", - "ToUpperSpecial", - "ToValidUTF8", - "Trim", - "TrimFunc", - "TrimLeft", - "TrimLeftFunc", - "TrimPrefix", - "TrimRight", - "TrimRightFunc", - "TrimSpace", - "TrimSuffix", - }, - "cmp": { - "Compare", - "Less", - "Ordered", - }, - "compress/bzip2": { - "NewReader", - "StructuralError", - }, - "compress/flate": { - "BestCompression", - "BestSpeed", - "CorruptInputError", - "DefaultCompression", - "HuffmanOnly", - "InternalError", - "NewReader", - "NewReaderDict", - "NewWriter", - "NewWriterDict", - "NoCompression", - "ReadError", - "Reader", - "Resetter", - "WriteError", - "Writer", - }, - "compress/gzip": { - "BestCompression", - "BestSpeed", - "DefaultCompression", - "ErrChecksum", - "ErrHeader", - "Header", - "HuffmanOnly", - "NewReader", - "NewWriter", - "NewWriterLevel", - "NoCompression", - "Reader", - "Writer", - }, - "compress/lzw": { - "LSB", - "MSB", - "NewReader", - "NewWriter", - "Order", - "Reader", - "Writer", - }, - "compress/zlib": { - "BestCompression", - "BestSpeed", - "DefaultCompression", - "ErrChecksum", - "ErrDictionary", - "ErrHeader", - "HuffmanOnly", - "NewReader", - "NewReaderDict", - "NewWriter", - "NewWriterLevel", - "NewWriterLevelDict", - "NoCompression", - "Resetter", - "Writer", - }, - "container/heap": { - "Fix", - "Init", - "Interface", - "Pop", - "Push", - "Remove", - }, - "container/list": { - "Element", - "List", - "New", - }, - "container/ring": { - "New", - "Ring", - }, - "context": { - "AfterFunc", - "Background", - "CancelCauseFunc", - "CancelFunc", - "Canceled", - "Cause", - "Context", - "DeadlineExceeded", - "TODO", - "WithCancel", - "WithCancelCause", - "WithDeadline", - "WithDeadlineCause", - "WithTimeout", - "WithTimeoutCause", - "WithValue", - "WithoutCancel", - }, - "crypto": { - "BLAKE2b_256", - "BLAKE2b_384", - "BLAKE2b_512", - "BLAKE2s_256", - "Decrypter", - "DecrypterOpts", - "Hash", - "MD4", - "MD5", - "MD5SHA1", - "PrivateKey", - "PublicKey", - "RIPEMD160", - "RegisterHash", - "SHA1", - "SHA224", - "SHA256", - "SHA384", - "SHA3_224", - "SHA3_256", - "SHA3_384", - "SHA3_512", - "SHA512", - "SHA512_224", - "SHA512_256", - "Signer", - "SignerOpts", - }, - "crypto/aes": { - "BlockSize", - "KeySizeError", - "NewCipher", - }, - "crypto/cipher": { - "AEAD", - "Block", - "BlockMode", - "NewCBCDecrypter", - "NewCBCEncrypter", - "NewCFBDecrypter", - "NewCFBEncrypter", - "NewCTR", - "NewGCM", - "NewGCMWithNonceSize", - "NewGCMWithTagSize", - "NewOFB", - "Stream", - "StreamReader", - "StreamWriter", - }, - "crypto/des": { - "BlockSize", - "KeySizeError", - "NewCipher", - "NewTripleDESCipher", - }, - "crypto/dsa": { - "ErrInvalidPublicKey", - "GenerateKey", - "GenerateParameters", - "L1024N160", - "L2048N224", - "L2048N256", - "L3072N256", - "ParameterSizes", - "Parameters", - "PrivateKey", - "PublicKey", - "Sign", - "Verify", - }, - "crypto/ecdh": { - "Curve", - "P256", - "P384", - "P521", - "PrivateKey", - "PublicKey", - "X25519", - }, - "crypto/ecdsa": { - "GenerateKey", - "PrivateKey", - "PublicKey", - "Sign", - "SignASN1", - "Verify", - "VerifyASN1", - }, - "crypto/ed25519": { - "GenerateKey", - "NewKeyFromSeed", - "Options", - "PrivateKey", - "PrivateKeySize", - "PublicKey", - "PublicKeySize", - "SeedSize", - "Sign", - "SignatureSize", - "Verify", - "VerifyWithOptions", - }, - "crypto/elliptic": { - "Curve", - "CurveParams", - "GenerateKey", - "Marshal", - "MarshalCompressed", - "P224", - "P256", - "P384", - "P521", - "Unmarshal", - "UnmarshalCompressed", - }, - "crypto/hmac": { - "Equal", - "New", - }, - "crypto/md5": { - "BlockSize", - "New", - "Size", - "Sum", - }, - "crypto/rand": { - "Int", - "Prime", - "Read", - "Reader", - }, - "crypto/rc4": { - "Cipher", - "KeySizeError", - "NewCipher", - }, - "crypto/rsa": { - "CRTValue", - "DecryptOAEP", - "DecryptPKCS1v15", - "DecryptPKCS1v15SessionKey", - "EncryptOAEP", - "EncryptPKCS1v15", - "ErrDecryption", - "ErrMessageTooLong", - "ErrVerification", - "GenerateKey", - "GenerateMultiPrimeKey", - "OAEPOptions", - "PKCS1v15DecryptOptions", - "PSSOptions", - "PSSSaltLengthAuto", - "PSSSaltLengthEqualsHash", - "PrecomputedValues", - "PrivateKey", - "PublicKey", - "SignPKCS1v15", - "SignPSS", - "VerifyPKCS1v15", - "VerifyPSS", - }, - "crypto/sha1": { - "BlockSize", - "New", - "Size", - "Sum", - }, - "crypto/sha256": { - "BlockSize", - "New", - "New224", - "Size", - "Size224", - "Sum224", - "Sum256", - }, - "crypto/sha512": { - "BlockSize", - "New", - "New384", - "New512_224", - "New512_256", - "Size", - "Size224", - "Size256", - "Size384", - "Sum384", - "Sum512", - "Sum512_224", - "Sum512_256", - }, - "crypto/subtle": { - "ConstantTimeByteEq", - "ConstantTimeCompare", - "ConstantTimeCopy", - "ConstantTimeEq", - "ConstantTimeLessOrEq", - "ConstantTimeSelect", - "XORBytes", - }, - "crypto/tls": { - "AlertError", - "Certificate", - "CertificateRequestInfo", - "CertificateVerificationError", - "CipherSuite", - "CipherSuiteName", - "CipherSuites", - "Client", - "ClientAuthType", - "ClientHelloInfo", - "ClientSessionCache", - "ClientSessionState", - "Config", - "Conn", - "ConnectionState", - "CurveID", - "CurveP256", - "CurveP384", - "CurveP521", - "Dial", - "DialWithDialer", - "Dialer", - "ECDSAWithP256AndSHA256", - "ECDSAWithP384AndSHA384", - "ECDSAWithP521AndSHA512", - "ECDSAWithSHA1", - "Ed25519", - "InsecureCipherSuites", - "Listen", - "LoadX509KeyPair", - "NewLRUClientSessionCache", - "NewListener", - "NewResumptionState", - "NoClientCert", - "PKCS1WithSHA1", - "PKCS1WithSHA256", - "PKCS1WithSHA384", - "PKCS1WithSHA512", - "PSSWithSHA256", - "PSSWithSHA384", - "PSSWithSHA512", - "ParseSessionState", - "QUICClient", - "QUICConfig", - "QUICConn", - "QUICEncryptionLevel", - "QUICEncryptionLevelApplication", - "QUICEncryptionLevelEarly", - "QUICEncryptionLevelHandshake", - "QUICEncryptionLevelInitial", - "QUICEvent", - "QUICEventKind", - "QUICHandshakeDone", - "QUICNoEvent", - "QUICRejectedEarlyData", - "QUICServer", - "QUICSessionTicketOptions", - "QUICSetReadSecret", - "QUICSetWriteSecret", - "QUICTransportParameters", - "QUICTransportParametersRequired", - "QUICWriteData", - "RecordHeaderError", - "RenegotiateFreelyAsClient", - "RenegotiateNever", - "RenegotiateOnceAsClient", - "RenegotiationSupport", - "RequestClientCert", - "RequireAndVerifyClientCert", - "RequireAnyClientCert", - "Server", - "SessionState", - "SignatureScheme", - "TLS_AES_128_GCM_SHA256", - "TLS_AES_256_GCM_SHA384", - "TLS_CHACHA20_POLY1305_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", - "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", - "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", - "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", - "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", - "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", - "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", - "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", - "TLS_ECDHE_RSA_WITH_RC4_128_SHA", - "TLS_FALLBACK_SCSV", - "TLS_RSA_WITH_3DES_EDE_CBC_SHA", - "TLS_RSA_WITH_AES_128_CBC_SHA", - "TLS_RSA_WITH_AES_128_CBC_SHA256", - "TLS_RSA_WITH_AES_128_GCM_SHA256", - "TLS_RSA_WITH_AES_256_CBC_SHA", - "TLS_RSA_WITH_AES_256_GCM_SHA384", - "TLS_RSA_WITH_RC4_128_SHA", - "VerifyClientCertIfGiven", - "VersionName", - "VersionSSL30", - "VersionTLS10", - "VersionTLS11", - "VersionTLS12", - "VersionTLS13", - "X25519", - "X509KeyPair", - }, - "crypto/x509": { - "CANotAuthorizedForExtKeyUsage", - "CANotAuthorizedForThisName", - "CertPool", - "Certificate", - "CertificateInvalidError", - "CertificateRequest", - "ConstraintViolationError", - "CreateCertificate", - "CreateCertificateRequest", - "CreateRevocationList", - "DSA", - "DSAWithSHA1", - "DSAWithSHA256", - "DecryptPEMBlock", - "ECDSA", - "ECDSAWithSHA1", - "ECDSAWithSHA256", - "ECDSAWithSHA384", - "ECDSAWithSHA512", - "Ed25519", - "EncryptPEMBlock", - "ErrUnsupportedAlgorithm", - "Expired", - "ExtKeyUsage", - "ExtKeyUsageAny", - "ExtKeyUsageClientAuth", - "ExtKeyUsageCodeSigning", - "ExtKeyUsageEmailProtection", - "ExtKeyUsageIPSECEndSystem", - "ExtKeyUsageIPSECTunnel", - "ExtKeyUsageIPSECUser", - "ExtKeyUsageMicrosoftCommercialCodeSigning", - "ExtKeyUsageMicrosoftKernelCodeSigning", - "ExtKeyUsageMicrosoftServerGatedCrypto", - "ExtKeyUsageNetscapeServerGatedCrypto", - "ExtKeyUsageOCSPSigning", - "ExtKeyUsageServerAuth", - "ExtKeyUsageTimeStamping", - "HostnameError", - "IncompatibleUsage", - "IncorrectPasswordError", - "InsecureAlgorithmError", - "InvalidReason", - "IsEncryptedPEMBlock", - "KeyUsage", - "KeyUsageCRLSign", - "KeyUsageCertSign", - "KeyUsageContentCommitment", - "KeyUsageDataEncipherment", - "KeyUsageDecipherOnly", - "KeyUsageDigitalSignature", - "KeyUsageEncipherOnly", - "KeyUsageKeyAgreement", - "KeyUsageKeyEncipherment", - "MD2WithRSA", - "MD5WithRSA", - "MarshalECPrivateKey", - "MarshalPKCS1PrivateKey", - "MarshalPKCS1PublicKey", - "MarshalPKCS8PrivateKey", - "MarshalPKIXPublicKey", - "NameConstraintsWithoutSANs", - "NameMismatch", - "NewCertPool", - "NotAuthorizedToSign", - "PEMCipher", - "PEMCipher3DES", - "PEMCipherAES128", - "PEMCipherAES192", - "PEMCipherAES256", - "PEMCipherDES", - "ParseCRL", - "ParseCertificate", - "ParseCertificateRequest", - "ParseCertificates", - "ParseDERCRL", - "ParseECPrivateKey", - "ParsePKCS1PrivateKey", - "ParsePKCS1PublicKey", - "ParsePKCS8PrivateKey", - "ParsePKIXPublicKey", - "ParseRevocationList", - "PublicKeyAlgorithm", - "PureEd25519", - "RSA", - "RevocationList", - "RevocationListEntry", - "SHA1WithRSA", - "SHA256WithRSA", - "SHA256WithRSAPSS", - "SHA384WithRSA", - "SHA384WithRSAPSS", - "SHA512WithRSA", - "SHA512WithRSAPSS", - "SetFallbackRoots", - "SignatureAlgorithm", - "SystemCertPool", - "SystemRootsError", - "TooManyConstraints", - "TooManyIntermediates", - "UnconstrainedName", - "UnhandledCriticalExtension", - "UnknownAuthorityError", - "UnknownPublicKeyAlgorithm", - "UnknownSignatureAlgorithm", - "VerifyOptions", - }, - "crypto/x509/pkix": { - "AlgorithmIdentifier", - "AttributeTypeAndValue", - "AttributeTypeAndValueSET", - "CertificateList", - "Extension", - "Name", - "RDNSequence", - "RelativeDistinguishedNameSET", - "RevokedCertificate", - "TBSCertificateList", - }, - "database/sql": { - "ColumnType", - "Conn", - "DB", - "DBStats", - "Drivers", - "ErrConnDone", - "ErrNoRows", - "ErrTxDone", - "IsolationLevel", - "LevelDefault", - "LevelLinearizable", - "LevelReadCommitted", - "LevelReadUncommitted", - "LevelRepeatableRead", - "LevelSerializable", - "LevelSnapshot", - "LevelWriteCommitted", - "Named", - "NamedArg", - "NullBool", - "NullByte", - "NullFloat64", - "NullInt16", - "NullInt32", - "NullInt64", - "NullString", - "NullTime", - "Open", - "OpenDB", - "Out", - "RawBytes", - "Register", - "Result", - "Row", - "Rows", - "Scanner", - "Stmt", - "Tx", - "TxOptions", - }, - "database/sql/driver": { - "Bool", - "ColumnConverter", - "Conn", - "ConnBeginTx", - "ConnPrepareContext", - "Connector", - "DefaultParameterConverter", - "Driver", - "DriverContext", - "ErrBadConn", - "ErrRemoveArgument", - "ErrSkip", - "Execer", - "ExecerContext", - "Int32", - "IsScanValue", - "IsValue", - "IsolationLevel", - "NamedValue", - "NamedValueChecker", - "NotNull", - "Null", - "Pinger", - "Queryer", - "QueryerContext", - "Result", - "ResultNoRows", - "Rows", - "RowsAffected", - "RowsColumnTypeDatabaseTypeName", - "RowsColumnTypeLength", - "RowsColumnTypeNullable", - "RowsColumnTypePrecisionScale", - "RowsColumnTypeScanType", - "RowsNextResultSet", - "SessionResetter", - "Stmt", - "StmtExecContext", - "StmtQueryContext", - "String", - "Tx", - "TxOptions", - "Validator", - "Value", - "ValueConverter", - "Valuer", - }, - "debug/buildinfo": { - "BuildInfo", - "Read", - "ReadFile", - }, - "debug/dwarf": { - "AddrType", - "ArrayType", - "Attr", - "AttrAbstractOrigin", - "AttrAccessibility", - "AttrAddrBase", - "AttrAddrClass", - "AttrAlignment", - "AttrAllocated", - "AttrArtificial", - "AttrAssociated", - "AttrBaseTypes", - "AttrBinaryScale", - "AttrBitOffset", - "AttrBitSize", - "AttrByteSize", - "AttrCallAllCalls", - "AttrCallAllSourceCalls", - "AttrCallAllTailCalls", - "AttrCallColumn", - "AttrCallDataLocation", - "AttrCallDataValue", - "AttrCallFile", - "AttrCallLine", - "AttrCallOrigin", - "AttrCallPC", - "AttrCallParameter", - "AttrCallReturnPC", - "AttrCallTailCall", - "AttrCallTarget", - "AttrCallTargetClobbered", - "AttrCallValue", - "AttrCalling", - "AttrCommonRef", - "AttrCompDir", - "AttrConstExpr", - "AttrConstValue", - "AttrContainingType", - "AttrCount", - "AttrDataBitOffset", - "AttrDataLocation", - "AttrDataMemberLoc", - "AttrDecimalScale", - "AttrDecimalSign", - "AttrDeclColumn", - "AttrDeclFile", - "AttrDeclLine", - "AttrDeclaration", - "AttrDefaultValue", - "AttrDefaulted", - "AttrDeleted", - "AttrDescription", - "AttrDigitCount", - "AttrDiscr", - "AttrDiscrList", - "AttrDiscrValue", - "AttrDwoName", - "AttrElemental", - "AttrEncoding", - "AttrEndianity", - "AttrEntrypc", - "AttrEnumClass", - "AttrExplicit", - "AttrExportSymbols", - "AttrExtension", - "AttrExternal", - "AttrFrameBase", - "AttrFriend", - "AttrHighpc", - "AttrIdentifierCase", - "AttrImport", - "AttrInline", - "AttrIsOptional", - "AttrLanguage", - "AttrLinkageName", - "AttrLocation", - "AttrLoclistsBase", - "AttrLowerBound", - "AttrLowpc", - "AttrMacroInfo", - "AttrMacros", - "AttrMainSubprogram", - "AttrMutable", - "AttrName", - "AttrNamelistItem", - "AttrNoreturn", - "AttrObjectPointer", - "AttrOrdering", - "AttrPictureString", - "AttrPriority", - "AttrProducer", - "AttrPrototyped", - "AttrPure", - "AttrRanges", - "AttrRank", - "AttrRecursive", - "AttrReference", - "AttrReturnAddr", - "AttrRnglistsBase", - "AttrRvalueReference", - "AttrSegment", - "AttrSibling", - "AttrSignature", - "AttrSmall", - "AttrSpecification", - "AttrStartScope", - "AttrStaticLink", - "AttrStmtList", - "AttrStrOffsetsBase", - "AttrStride", - "AttrStrideSize", - "AttrStringLength", - "AttrStringLengthBitSize", - "AttrStringLengthByteSize", - "AttrThreadsScaled", - "AttrTrampoline", - "AttrType", - "AttrUpperBound", - "AttrUseLocation", - "AttrUseUTF8", - "AttrVarParam", - "AttrVirtuality", - "AttrVisibility", - "AttrVtableElemLoc", - "BasicType", - "BoolType", - "CharType", - "Class", - "ClassAddrPtr", - "ClassAddress", - "ClassBlock", - "ClassConstant", - "ClassExprLoc", - "ClassFlag", - "ClassLinePtr", - "ClassLocList", - "ClassLocListPtr", - "ClassMacPtr", - "ClassRangeListPtr", - "ClassReference", - "ClassReferenceAlt", - "ClassReferenceSig", - "ClassRngList", - "ClassRngListsPtr", - "ClassStrOffsetsPtr", - "ClassString", - "ClassStringAlt", - "ClassUnknown", - "CommonType", - "ComplexType", - "Data", - "DecodeError", - "DotDotDotType", - "Entry", - "EnumType", - "EnumValue", - "ErrUnknownPC", - "Field", - "FloatType", - "FuncType", - "IntType", - "LineEntry", - "LineFile", - "LineReader", - "LineReaderPos", - "New", - "Offset", - "PtrType", - "QualType", - "Reader", - "StructField", - "StructType", - "Tag", - "TagAccessDeclaration", - "TagArrayType", - "TagAtomicType", - "TagBaseType", - "TagCallSite", - "TagCallSiteParameter", - "TagCatchDwarfBlock", - "TagClassType", - "TagCoarrayType", - "TagCommonDwarfBlock", - "TagCommonInclusion", - "TagCompileUnit", - "TagCondition", - "TagConstType", - "TagConstant", - "TagDwarfProcedure", - "TagDynamicType", - "TagEntryPoint", - "TagEnumerationType", - "TagEnumerator", - "TagFileType", - "TagFormalParameter", - "TagFriend", - "TagGenericSubrange", - "TagImmutableType", - "TagImportedDeclaration", - "TagImportedModule", - "TagImportedUnit", - "TagInheritance", - "TagInlinedSubroutine", - "TagInterfaceType", - "TagLabel", - "TagLexDwarfBlock", - "TagMember", - "TagModule", - "TagMutableType", - "TagNamelist", - "TagNamelistItem", - "TagNamespace", - "TagPackedType", - "TagPartialUnit", - "TagPointerType", - "TagPtrToMemberType", - "TagReferenceType", - "TagRestrictType", - "TagRvalueReferenceType", - "TagSetType", - "TagSharedType", - "TagSkeletonUnit", - "TagStringType", - "TagStructType", - "TagSubprogram", - "TagSubrangeType", - "TagSubroutineType", - "TagTemplateAlias", - "TagTemplateTypeParameter", - "TagTemplateValueParameter", - "TagThrownType", - "TagTryDwarfBlock", - "TagTypeUnit", - "TagTypedef", - "TagUnionType", - "TagUnspecifiedParameters", - "TagUnspecifiedType", - "TagVariable", - "TagVariant", - "TagVariantPart", - "TagVolatileType", - "TagWithStmt", - "Type", - "TypedefType", - "UcharType", - "UintType", - "UnspecifiedType", - "UnsupportedType", - "VoidType", - }, - "debug/elf": { - "ARM_MAGIC_TRAMP_NUMBER", - "COMPRESS_HIOS", - "COMPRESS_HIPROC", - "COMPRESS_LOOS", - "COMPRESS_LOPROC", - "COMPRESS_ZLIB", - "COMPRESS_ZSTD", - "Chdr32", - "Chdr64", - "Class", - "CompressionType", - "DF_1_CONFALT", - "DF_1_DIRECT", - "DF_1_DISPRELDNE", - "DF_1_DISPRELPND", - "DF_1_EDITED", - "DF_1_ENDFILTEE", - "DF_1_GLOBAL", - "DF_1_GLOBAUDIT", - "DF_1_GROUP", - "DF_1_IGNMULDEF", - "DF_1_INITFIRST", - "DF_1_INTERPOSE", - "DF_1_KMOD", - "DF_1_LOADFLTR", - "DF_1_NOCOMMON", - "DF_1_NODEFLIB", - "DF_1_NODELETE", - "DF_1_NODIRECT", - "DF_1_NODUMP", - "DF_1_NOHDR", - "DF_1_NOKSYMS", - "DF_1_NOOPEN", - "DF_1_NORELOC", - "DF_1_NOW", - "DF_1_ORIGIN", - "DF_1_PIE", - "DF_1_SINGLETON", - "DF_1_STUB", - "DF_1_SYMINTPOSE", - "DF_1_TRANS", - "DF_1_WEAKFILTER", - "DF_BIND_NOW", - "DF_ORIGIN", - "DF_STATIC_TLS", - "DF_SYMBOLIC", - "DF_TEXTREL", - "DT_ADDRRNGHI", - "DT_ADDRRNGLO", - "DT_AUDIT", - "DT_AUXILIARY", - "DT_BIND_NOW", - "DT_CHECKSUM", - "DT_CONFIG", - "DT_DEBUG", - "DT_DEPAUDIT", - "DT_ENCODING", - "DT_FEATURE", - "DT_FILTER", - "DT_FINI", - "DT_FINI_ARRAY", - "DT_FINI_ARRAYSZ", - "DT_FLAGS", - "DT_FLAGS_1", - "DT_GNU_CONFLICT", - "DT_GNU_CONFLICTSZ", - "DT_GNU_HASH", - "DT_GNU_LIBLIST", - "DT_GNU_LIBLISTSZ", - "DT_GNU_PRELINKED", - "DT_HASH", - "DT_HIOS", - "DT_HIPROC", - "DT_INIT", - "DT_INIT_ARRAY", - "DT_INIT_ARRAYSZ", - "DT_JMPREL", - "DT_LOOS", - "DT_LOPROC", - "DT_MIPS_AUX_DYNAMIC", - "DT_MIPS_BASE_ADDRESS", - "DT_MIPS_COMPACT_SIZE", - "DT_MIPS_CONFLICT", - "DT_MIPS_CONFLICTNO", - "DT_MIPS_CXX_FLAGS", - "DT_MIPS_DELTA_CLASS", - "DT_MIPS_DELTA_CLASSSYM", - "DT_MIPS_DELTA_CLASSSYM_NO", - "DT_MIPS_DELTA_CLASS_NO", - "DT_MIPS_DELTA_INSTANCE", - "DT_MIPS_DELTA_INSTANCE_NO", - "DT_MIPS_DELTA_RELOC", - "DT_MIPS_DELTA_RELOC_NO", - "DT_MIPS_DELTA_SYM", - "DT_MIPS_DELTA_SYM_NO", - "DT_MIPS_DYNSTR_ALIGN", - "DT_MIPS_FLAGS", - "DT_MIPS_GOTSYM", - "DT_MIPS_GP_VALUE", - "DT_MIPS_HIDDEN_GOTIDX", - "DT_MIPS_HIPAGENO", - "DT_MIPS_ICHECKSUM", - "DT_MIPS_INTERFACE", - "DT_MIPS_INTERFACE_SIZE", - "DT_MIPS_IVERSION", - "DT_MIPS_LIBLIST", - "DT_MIPS_LIBLISTNO", - "DT_MIPS_LOCALPAGE_GOTIDX", - "DT_MIPS_LOCAL_GOTIDX", - "DT_MIPS_LOCAL_GOTNO", - "DT_MIPS_MSYM", - "DT_MIPS_OPTIONS", - "DT_MIPS_PERF_SUFFIX", - "DT_MIPS_PIXIE_INIT", - "DT_MIPS_PLTGOT", - "DT_MIPS_PROTECTED_GOTIDX", - "DT_MIPS_RLD_MAP", - "DT_MIPS_RLD_MAP_REL", - "DT_MIPS_RLD_TEXT_RESOLVE_ADDR", - "DT_MIPS_RLD_VERSION", - "DT_MIPS_RWPLT", - "DT_MIPS_SYMBOL_LIB", - "DT_MIPS_SYMTABNO", - "DT_MIPS_TIME_STAMP", - "DT_MIPS_UNREFEXTNO", - "DT_MOVEENT", - "DT_MOVESZ", - "DT_MOVETAB", - "DT_NEEDED", - "DT_NULL", - "DT_PLTGOT", - "DT_PLTPAD", - "DT_PLTPADSZ", - "DT_PLTREL", - "DT_PLTRELSZ", - "DT_POSFLAG_1", - "DT_PPC64_GLINK", - "DT_PPC64_OPD", - "DT_PPC64_OPDSZ", - "DT_PPC64_OPT", - "DT_PPC_GOT", - "DT_PPC_OPT", - "DT_PREINIT_ARRAY", - "DT_PREINIT_ARRAYSZ", - "DT_REL", - "DT_RELA", - "DT_RELACOUNT", - "DT_RELAENT", - "DT_RELASZ", - "DT_RELCOUNT", - "DT_RELENT", - "DT_RELSZ", - "DT_RPATH", - "DT_RUNPATH", - "DT_SONAME", - "DT_SPARC_REGISTER", - "DT_STRSZ", - "DT_STRTAB", - "DT_SYMBOLIC", - "DT_SYMENT", - "DT_SYMINENT", - "DT_SYMINFO", - "DT_SYMINSZ", - "DT_SYMTAB", - "DT_SYMTAB_SHNDX", - "DT_TEXTREL", - "DT_TLSDESC_GOT", - "DT_TLSDESC_PLT", - "DT_USED", - "DT_VALRNGHI", - "DT_VALRNGLO", - "DT_VERDEF", - "DT_VERDEFNUM", - "DT_VERNEED", - "DT_VERNEEDNUM", - "DT_VERSYM", - "Data", - "Dyn32", - "Dyn64", - "DynFlag", - "DynFlag1", - "DynTag", - "EI_ABIVERSION", - "EI_CLASS", - "EI_DATA", - "EI_NIDENT", - "EI_OSABI", - "EI_PAD", - "EI_VERSION", - "ELFCLASS32", - "ELFCLASS64", - "ELFCLASSNONE", - "ELFDATA2LSB", - "ELFDATA2MSB", - "ELFDATANONE", - "ELFMAG", - "ELFOSABI_86OPEN", - "ELFOSABI_AIX", - "ELFOSABI_ARM", - "ELFOSABI_AROS", - "ELFOSABI_CLOUDABI", - "ELFOSABI_FENIXOS", - "ELFOSABI_FREEBSD", - "ELFOSABI_HPUX", - "ELFOSABI_HURD", - "ELFOSABI_IRIX", - "ELFOSABI_LINUX", - "ELFOSABI_MODESTO", - "ELFOSABI_NETBSD", - "ELFOSABI_NONE", - "ELFOSABI_NSK", - "ELFOSABI_OPENBSD", - "ELFOSABI_OPENVMS", - "ELFOSABI_SOLARIS", - "ELFOSABI_STANDALONE", - "ELFOSABI_TRU64", - "EM_386", - "EM_486", - "EM_56800EX", - "EM_68HC05", - "EM_68HC08", - "EM_68HC11", - "EM_68HC12", - "EM_68HC16", - "EM_68K", - "EM_78KOR", - "EM_8051", - "EM_860", - "EM_88K", - "EM_960", - "EM_AARCH64", - "EM_ALPHA", - "EM_ALPHA_STD", - "EM_ALTERA_NIOS2", - "EM_AMDGPU", - "EM_ARC", - "EM_ARCA", - "EM_ARC_COMPACT", - "EM_ARC_COMPACT2", - "EM_ARM", - "EM_AVR", - "EM_AVR32", - "EM_BA1", - "EM_BA2", - "EM_BLACKFIN", - "EM_BPF", - "EM_C166", - "EM_CDP", - "EM_CE", - "EM_CLOUDSHIELD", - "EM_COGE", - "EM_COLDFIRE", - "EM_COOL", - "EM_COREA_1ST", - "EM_COREA_2ND", - "EM_CR", - "EM_CR16", - "EM_CRAYNV2", - "EM_CRIS", - "EM_CRX", - "EM_CSR_KALIMBA", - "EM_CUDA", - "EM_CYPRESS_M8C", - "EM_D10V", - "EM_D30V", - "EM_DSP24", - "EM_DSPIC30F", - "EM_DXP", - "EM_ECOG1", - "EM_ECOG16", - "EM_ECOG1X", - "EM_ECOG2", - "EM_ETPU", - "EM_EXCESS", - "EM_F2MC16", - "EM_FIREPATH", - "EM_FR20", - "EM_FR30", - "EM_FT32", - "EM_FX66", - "EM_H8S", - "EM_H8_300", - "EM_H8_300H", - "EM_H8_500", - "EM_HUANY", - "EM_IA_64", - "EM_INTEL205", - "EM_INTEL206", - "EM_INTEL207", - "EM_INTEL208", - "EM_INTEL209", - "EM_IP2K", - "EM_JAVELIN", - "EM_K10M", - "EM_KM32", - "EM_KMX16", - "EM_KMX32", - "EM_KMX8", - "EM_KVARC", - "EM_L10M", - "EM_LANAI", - "EM_LATTICEMICO32", - "EM_LOONGARCH", - "EM_M16C", - "EM_M32", - "EM_M32C", - "EM_M32R", - "EM_MANIK", - "EM_MAX", - "EM_MAXQ30", - "EM_MCHP_PIC", - "EM_MCST_ELBRUS", - "EM_ME16", - "EM_METAG", - "EM_MICROBLAZE", - "EM_MIPS", - "EM_MIPS_RS3_LE", - "EM_MIPS_RS4_BE", - "EM_MIPS_X", - "EM_MMA", - "EM_MMDSP_PLUS", - "EM_MMIX", - "EM_MN10200", - "EM_MN10300", - "EM_MOXIE", - "EM_MSP430", - "EM_NCPU", - "EM_NDR1", - "EM_NDS32", - "EM_NONE", - "EM_NORC", - "EM_NS32K", - "EM_OPEN8", - "EM_OPENRISC", - "EM_PARISC", - "EM_PCP", - "EM_PDP10", - "EM_PDP11", - "EM_PDSP", - "EM_PJ", - "EM_PPC", - "EM_PPC64", - "EM_PRISM", - "EM_QDSP6", - "EM_R32C", - "EM_RCE", - "EM_RH32", - "EM_RISCV", - "EM_RL78", - "EM_RS08", - "EM_RX", - "EM_S370", - "EM_S390", - "EM_SCORE7", - "EM_SEP", - "EM_SE_C17", - "EM_SE_C33", - "EM_SH", - "EM_SHARC", - "EM_SLE9X", - "EM_SNP1K", - "EM_SPARC", - "EM_SPARC32PLUS", - "EM_SPARCV9", - "EM_ST100", - "EM_ST19", - "EM_ST200", - "EM_ST7", - "EM_ST9PLUS", - "EM_STARCORE", - "EM_STM8", - "EM_STXP7X", - "EM_SVX", - "EM_TILE64", - "EM_TILEGX", - "EM_TILEPRO", - "EM_TINYJ", - "EM_TI_ARP32", - "EM_TI_C2000", - "EM_TI_C5500", - "EM_TI_C6000", - "EM_TI_PRU", - "EM_TMM_GPP", - "EM_TPC", - "EM_TRICORE", - "EM_TRIMEDIA", - "EM_TSK3000", - "EM_UNICORE", - "EM_V800", - "EM_V850", - "EM_VAX", - "EM_VIDEOCORE", - "EM_VIDEOCORE3", - "EM_VIDEOCORE5", - "EM_VISIUM", - "EM_VPP500", - "EM_X86_64", - "EM_XCORE", - "EM_XGATE", - "EM_XIMO16", - "EM_XTENSA", - "EM_Z80", - "EM_ZSP", - "ET_CORE", - "ET_DYN", - "ET_EXEC", - "ET_HIOS", - "ET_HIPROC", - "ET_LOOS", - "ET_LOPROC", - "ET_NONE", - "ET_REL", - "EV_CURRENT", - "EV_NONE", - "ErrNoSymbols", - "File", - "FileHeader", - "FormatError", - "Header32", - "Header64", - "ImportedSymbol", - "Machine", - "NT_FPREGSET", - "NT_PRPSINFO", - "NT_PRSTATUS", - "NType", - "NewFile", - "OSABI", - "Open", - "PF_MASKOS", - "PF_MASKPROC", - "PF_R", - "PF_W", - "PF_X", - "PT_AARCH64_ARCHEXT", - "PT_AARCH64_UNWIND", - "PT_ARM_ARCHEXT", - "PT_ARM_EXIDX", - "PT_DYNAMIC", - "PT_GNU_EH_FRAME", - "PT_GNU_MBIND_HI", - "PT_GNU_MBIND_LO", - "PT_GNU_PROPERTY", - "PT_GNU_RELRO", - "PT_GNU_STACK", - "PT_HIOS", - "PT_HIPROC", - "PT_INTERP", - "PT_LOAD", - "PT_LOOS", - "PT_LOPROC", - "PT_MIPS_ABIFLAGS", - "PT_MIPS_OPTIONS", - "PT_MIPS_REGINFO", - "PT_MIPS_RTPROC", - "PT_NOTE", - "PT_NULL", - "PT_OPENBSD_BOOTDATA", - "PT_OPENBSD_RANDOMIZE", - "PT_OPENBSD_WXNEEDED", - "PT_PAX_FLAGS", - "PT_PHDR", - "PT_S390_PGSTE", - "PT_SHLIB", - "PT_SUNWSTACK", - "PT_SUNW_EH_FRAME", - "PT_TLS", - "Prog", - "Prog32", - "Prog64", - "ProgFlag", - "ProgHeader", - "ProgType", - "R_386", - "R_386_16", - "R_386_32", - "R_386_32PLT", - "R_386_8", - "R_386_COPY", - "R_386_GLOB_DAT", - "R_386_GOT32", - "R_386_GOT32X", - "R_386_GOTOFF", - "R_386_GOTPC", - "R_386_IRELATIVE", - "R_386_JMP_SLOT", - "R_386_NONE", - "R_386_PC16", - "R_386_PC32", - "R_386_PC8", - "R_386_PLT32", - "R_386_RELATIVE", - "R_386_SIZE32", - "R_386_TLS_DESC", - "R_386_TLS_DESC_CALL", - "R_386_TLS_DTPMOD32", - "R_386_TLS_DTPOFF32", - "R_386_TLS_GD", - "R_386_TLS_GD_32", - "R_386_TLS_GD_CALL", - "R_386_TLS_GD_POP", - "R_386_TLS_GD_PUSH", - "R_386_TLS_GOTDESC", - "R_386_TLS_GOTIE", - "R_386_TLS_IE", - "R_386_TLS_IE_32", - "R_386_TLS_LDM", - "R_386_TLS_LDM_32", - "R_386_TLS_LDM_CALL", - "R_386_TLS_LDM_POP", - "R_386_TLS_LDM_PUSH", - "R_386_TLS_LDO_32", - "R_386_TLS_LE", - "R_386_TLS_LE_32", - "R_386_TLS_TPOFF", - "R_386_TLS_TPOFF32", - "R_390", - "R_390_12", - "R_390_16", - "R_390_20", - "R_390_32", - "R_390_64", - "R_390_8", - "R_390_COPY", - "R_390_GLOB_DAT", - "R_390_GOT12", - "R_390_GOT16", - "R_390_GOT20", - "R_390_GOT32", - "R_390_GOT64", - "R_390_GOTENT", - "R_390_GOTOFF", - "R_390_GOTOFF16", - "R_390_GOTOFF64", - "R_390_GOTPC", - "R_390_GOTPCDBL", - "R_390_GOTPLT12", - "R_390_GOTPLT16", - "R_390_GOTPLT20", - "R_390_GOTPLT32", - "R_390_GOTPLT64", - "R_390_GOTPLTENT", - "R_390_GOTPLTOFF16", - "R_390_GOTPLTOFF32", - "R_390_GOTPLTOFF64", - "R_390_JMP_SLOT", - "R_390_NONE", - "R_390_PC16", - "R_390_PC16DBL", - "R_390_PC32", - "R_390_PC32DBL", - "R_390_PC64", - "R_390_PLT16DBL", - "R_390_PLT32", - "R_390_PLT32DBL", - "R_390_PLT64", - "R_390_RELATIVE", - "R_390_TLS_DTPMOD", - "R_390_TLS_DTPOFF", - "R_390_TLS_GD32", - "R_390_TLS_GD64", - "R_390_TLS_GDCALL", - "R_390_TLS_GOTIE12", - "R_390_TLS_GOTIE20", - "R_390_TLS_GOTIE32", - "R_390_TLS_GOTIE64", - "R_390_TLS_IE32", - "R_390_TLS_IE64", - "R_390_TLS_IEENT", - "R_390_TLS_LDCALL", - "R_390_TLS_LDM32", - "R_390_TLS_LDM64", - "R_390_TLS_LDO32", - "R_390_TLS_LDO64", - "R_390_TLS_LE32", - "R_390_TLS_LE64", - "R_390_TLS_LOAD", - "R_390_TLS_TPOFF", - "R_AARCH64", - "R_AARCH64_ABS16", - "R_AARCH64_ABS32", - "R_AARCH64_ABS64", - "R_AARCH64_ADD_ABS_LO12_NC", - "R_AARCH64_ADR_GOT_PAGE", - "R_AARCH64_ADR_PREL_LO21", - "R_AARCH64_ADR_PREL_PG_HI21", - "R_AARCH64_ADR_PREL_PG_HI21_NC", - "R_AARCH64_CALL26", - "R_AARCH64_CONDBR19", - "R_AARCH64_COPY", - "R_AARCH64_GLOB_DAT", - "R_AARCH64_GOT_LD_PREL19", - "R_AARCH64_IRELATIVE", - "R_AARCH64_JUMP26", - "R_AARCH64_JUMP_SLOT", - "R_AARCH64_LD64_GOTOFF_LO15", - "R_AARCH64_LD64_GOTPAGE_LO15", - "R_AARCH64_LD64_GOT_LO12_NC", - "R_AARCH64_LDST128_ABS_LO12_NC", - "R_AARCH64_LDST16_ABS_LO12_NC", - "R_AARCH64_LDST32_ABS_LO12_NC", - "R_AARCH64_LDST64_ABS_LO12_NC", - "R_AARCH64_LDST8_ABS_LO12_NC", - "R_AARCH64_LD_PREL_LO19", - "R_AARCH64_MOVW_SABS_G0", - "R_AARCH64_MOVW_SABS_G1", - "R_AARCH64_MOVW_SABS_G2", - "R_AARCH64_MOVW_UABS_G0", - "R_AARCH64_MOVW_UABS_G0_NC", - "R_AARCH64_MOVW_UABS_G1", - "R_AARCH64_MOVW_UABS_G1_NC", - "R_AARCH64_MOVW_UABS_G2", - "R_AARCH64_MOVW_UABS_G2_NC", - "R_AARCH64_MOVW_UABS_G3", - "R_AARCH64_NONE", - "R_AARCH64_NULL", - "R_AARCH64_P32_ABS16", - "R_AARCH64_P32_ABS32", - "R_AARCH64_P32_ADD_ABS_LO12_NC", - "R_AARCH64_P32_ADR_GOT_PAGE", - "R_AARCH64_P32_ADR_PREL_LO21", - "R_AARCH64_P32_ADR_PREL_PG_HI21", - "R_AARCH64_P32_CALL26", - "R_AARCH64_P32_CONDBR19", - "R_AARCH64_P32_COPY", - "R_AARCH64_P32_GLOB_DAT", - "R_AARCH64_P32_GOT_LD_PREL19", - "R_AARCH64_P32_IRELATIVE", - "R_AARCH64_P32_JUMP26", - "R_AARCH64_P32_JUMP_SLOT", - "R_AARCH64_P32_LD32_GOT_LO12_NC", - "R_AARCH64_P32_LDST128_ABS_LO12_NC", - "R_AARCH64_P32_LDST16_ABS_LO12_NC", - "R_AARCH64_P32_LDST32_ABS_LO12_NC", - "R_AARCH64_P32_LDST64_ABS_LO12_NC", - "R_AARCH64_P32_LDST8_ABS_LO12_NC", - "R_AARCH64_P32_LD_PREL_LO19", - "R_AARCH64_P32_MOVW_SABS_G0", - "R_AARCH64_P32_MOVW_UABS_G0", - "R_AARCH64_P32_MOVW_UABS_G0_NC", - "R_AARCH64_P32_MOVW_UABS_G1", - "R_AARCH64_P32_PREL16", - "R_AARCH64_P32_PREL32", - "R_AARCH64_P32_RELATIVE", - "R_AARCH64_P32_TLSDESC", - "R_AARCH64_P32_TLSDESC_ADD_LO12_NC", - "R_AARCH64_P32_TLSDESC_ADR_PAGE21", - "R_AARCH64_P32_TLSDESC_ADR_PREL21", - "R_AARCH64_P32_TLSDESC_CALL", - "R_AARCH64_P32_TLSDESC_LD32_LO12_NC", - "R_AARCH64_P32_TLSDESC_LD_PREL19", - "R_AARCH64_P32_TLSGD_ADD_LO12_NC", - "R_AARCH64_P32_TLSGD_ADR_PAGE21", - "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", - "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", - "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", - "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", - "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", - "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", - "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", - "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", - "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", - "R_AARCH64_P32_TLS_DTPMOD", - "R_AARCH64_P32_TLS_DTPREL", - "R_AARCH64_P32_TLS_TPREL", - "R_AARCH64_P32_TSTBR14", - "R_AARCH64_PREL16", - "R_AARCH64_PREL32", - "R_AARCH64_PREL64", - "R_AARCH64_RELATIVE", - "R_AARCH64_TLSDESC", - "R_AARCH64_TLSDESC_ADD", - "R_AARCH64_TLSDESC_ADD_LO12_NC", - "R_AARCH64_TLSDESC_ADR_PAGE21", - "R_AARCH64_TLSDESC_ADR_PREL21", - "R_AARCH64_TLSDESC_CALL", - "R_AARCH64_TLSDESC_LD64_LO12_NC", - "R_AARCH64_TLSDESC_LDR", - "R_AARCH64_TLSDESC_LD_PREL19", - "R_AARCH64_TLSDESC_OFF_G0_NC", - "R_AARCH64_TLSDESC_OFF_G1", - "R_AARCH64_TLSGD_ADD_LO12_NC", - "R_AARCH64_TLSGD_ADR_PAGE21", - "R_AARCH64_TLSGD_ADR_PREL21", - "R_AARCH64_TLSGD_MOVW_G0_NC", - "R_AARCH64_TLSGD_MOVW_G1", - "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", - "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", - "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", - "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", - "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", - "R_AARCH64_TLSLD_ADR_PAGE21", - "R_AARCH64_TLSLD_ADR_PREL21", - "R_AARCH64_TLSLD_LDST128_DTPREL_LO12", - "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", - "R_AARCH64_TLSLE_ADD_TPREL_HI12", - "R_AARCH64_TLSLE_ADD_TPREL_LO12", - "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", - "R_AARCH64_TLSLE_LDST128_TPREL_LO12", - "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", - "R_AARCH64_TLSLE_MOVW_TPREL_G0", - "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", - "R_AARCH64_TLSLE_MOVW_TPREL_G1", - "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", - "R_AARCH64_TLSLE_MOVW_TPREL_G2", - "R_AARCH64_TLS_DTPMOD64", - "R_AARCH64_TLS_DTPREL64", - "R_AARCH64_TLS_TPREL64", - "R_AARCH64_TSTBR14", - "R_ALPHA", - "R_ALPHA_BRADDR", - "R_ALPHA_COPY", - "R_ALPHA_GLOB_DAT", - "R_ALPHA_GPDISP", - "R_ALPHA_GPREL32", - "R_ALPHA_GPRELHIGH", - "R_ALPHA_GPRELLOW", - "R_ALPHA_GPVALUE", - "R_ALPHA_HINT", - "R_ALPHA_IMMED_BR_HI32", - "R_ALPHA_IMMED_GP_16", - "R_ALPHA_IMMED_GP_HI32", - "R_ALPHA_IMMED_LO32", - "R_ALPHA_IMMED_SCN_HI32", - "R_ALPHA_JMP_SLOT", - "R_ALPHA_LITERAL", - "R_ALPHA_LITUSE", - "R_ALPHA_NONE", - "R_ALPHA_OP_PRSHIFT", - "R_ALPHA_OP_PSUB", - "R_ALPHA_OP_PUSH", - "R_ALPHA_OP_STORE", - "R_ALPHA_REFLONG", - "R_ALPHA_REFQUAD", - "R_ALPHA_RELATIVE", - "R_ALPHA_SREL16", - "R_ALPHA_SREL32", - "R_ALPHA_SREL64", - "R_ARM", - "R_ARM_ABS12", - "R_ARM_ABS16", - "R_ARM_ABS32", - "R_ARM_ABS32_NOI", - "R_ARM_ABS8", - "R_ARM_ALU_PCREL_15_8", - "R_ARM_ALU_PCREL_23_15", - "R_ARM_ALU_PCREL_7_0", - "R_ARM_ALU_PC_G0", - "R_ARM_ALU_PC_G0_NC", - "R_ARM_ALU_PC_G1", - "R_ARM_ALU_PC_G1_NC", - "R_ARM_ALU_PC_G2", - "R_ARM_ALU_SBREL_19_12_NC", - "R_ARM_ALU_SBREL_27_20_CK", - "R_ARM_ALU_SB_G0", - "R_ARM_ALU_SB_G0_NC", - "R_ARM_ALU_SB_G1", - "R_ARM_ALU_SB_G1_NC", - "R_ARM_ALU_SB_G2", - "R_ARM_AMP_VCALL9", - "R_ARM_BASE_ABS", - "R_ARM_CALL", - "R_ARM_COPY", - "R_ARM_GLOB_DAT", - "R_ARM_GNU_VTENTRY", - "R_ARM_GNU_VTINHERIT", - "R_ARM_GOT32", - "R_ARM_GOTOFF", - "R_ARM_GOTOFF12", - "R_ARM_GOTPC", - "R_ARM_GOTRELAX", - "R_ARM_GOT_ABS", - "R_ARM_GOT_BREL12", - "R_ARM_GOT_PREL", - "R_ARM_IRELATIVE", - "R_ARM_JUMP24", - "R_ARM_JUMP_SLOT", - "R_ARM_LDC_PC_G0", - "R_ARM_LDC_PC_G1", - "R_ARM_LDC_PC_G2", - "R_ARM_LDC_SB_G0", - "R_ARM_LDC_SB_G1", - "R_ARM_LDC_SB_G2", - "R_ARM_LDRS_PC_G0", - "R_ARM_LDRS_PC_G1", - "R_ARM_LDRS_PC_G2", - "R_ARM_LDRS_SB_G0", - "R_ARM_LDRS_SB_G1", - "R_ARM_LDRS_SB_G2", - "R_ARM_LDR_PC_G1", - "R_ARM_LDR_PC_G2", - "R_ARM_LDR_SBREL_11_10_NC", - "R_ARM_LDR_SB_G0", - "R_ARM_LDR_SB_G1", - "R_ARM_LDR_SB_G2", - "R_ARM_ME_TOO", - "R_ARM_MOVT_ABS", - "R_ARM_MOVT_BREL", - "R_ARM_MOVT_PREL", - "R_ARM_MOVW_ABS_NC", - "R_ARM_MOVW_BREL", - "R_ARM_MOVW_BREL_NC", - "R_ARM_MOVW_PREL_NC", - "R_ARM_NONE", - "R_ARM_PC13", - "R_ARM_PC24", - "R_ARM_PLT32", - "R_ARM_PLT32_ABS", - "R_ARM_PREL31", - "R_ARM_PRIVATE_0", - "R_ARM_PRIVATE_1", - "R_ARM_PRIVATE_10", - "R_ARM_PRIVATE_11", - "R_ARM_PRIVATE_12", - "R_ARM_PRIVATE_13", - "R_ARM_PRIVATE_14", - "R_ARM_PRIVATE_15", - "R_ARM_PRIVATE_2", - "R_ARM_PRIVATE_3", - "R_ARM_PRIVATE_4", - "R_ARM_PRIVATE_5", - "R_ARM_PRIVATE_6", - "R_ARM_PRIVATE_7", - "R_ARM_PRIVATE_8", - "R_ARM_PRIVATE_9", - "R_ARM_RABS32", - "R_ARM_RBASE", - "R_ARM_REL32", - "R_ARM_REL32_NOI", - "R_ARM_RELATIVE", - "R_ARM_RPC24", - "R_ARM_RREL32", - "R_ARM_RSBREL32", - "R_ARM_RXPC25", - "R_ARM_SBREL31", - "R_ARM_SBREL32", - "R_ARM_SWI24", - "R_ARM_TARGET1", - "R_ARM_TARGET2", - "R_ARM_THM_ABS5", - "R_ARM_THM_ALU_ABS_G0_NC", - "R_ARM_THM_ALU_ABS_G1_NC", - "R_ARM_THM_ALU_ABS_G2_NC", - "R_ARM_THM_ALU_ABS_G3", - "R_ARM_THM_ALU_PREL_11_0", - "R_ARM_THM_GOT_BREL12", - "R_ARM_THM_JUMP11", - "R_ARM_THM_JUMP19", - "R_ARM_THM_JUMP24", - "R_ARM_THM_JUMP6", - "R_ARM_THM_JUMP8", - "R_ARM_THM_MOVT_ABS", - "R_ARM_THM_MOVT_BREL", - "R_ARM_THM_MOVT_PREL", - "R_ARM_THM_MOVW_ABS_NC", - "R_ARM_THM_MOVW_BREL", - "R_ARM_THM_MOVW_BREL_NC", - "R_ARM_THM_MOVW_PREL_NC", - "R_ARM_THM_PC12", - "R_ARM_THM_PC22", - "R_ARM_THM_PC8", - "R_ARM_THM_RPC22", - "R_ARM_THM_SWI8", - "R_ARM_THM_TLS_CALL", - "R_ARM_THM_TLS_DESCSEQ16", - "R_ARM_THM_TLS_DESCSEQ32", - "R_ARM_THM_XPC22", - "R_ARM_TLS_CALL", - "R_ARM_TLS_DESCSEQ", - "R_ARM_TLS_DTPMOD32", - "R_ARM_TLS_DTPOFF32", - "R_ARM_TLS_GD32", - "R_ARM_TLS_GOTDESC", - "R_ARM_TLS_IE12GP", - "R_ARM_TLS_IE32", - "R_ARM_TLS_LDM32", - "R_ARM_TLS_LDO12", - "R_ARM_TLS_LDO32", - "R_ARM_TLS_LE12", - "R_ARM_TLS_LE32", - "R_ARM_TLS_TPOFF32", - "R_ARM_V4BX", - "R_ARM_XPC25", - "R_INFO", - "R_INFO32", - "R_LARCH", - "R_LARCH_32", - "R_LARCH_32_PCREL", - "R_LARCH_64", - "R_LARCH_ABS64_HI12", - "R_LARCH_ABS64_LO20", - "R_LARCH_ABS_HI20", - "R_LARCH_ABS_LO12", - "R_LARCH_ADD16", - "R_LARCH_ADD24", - "R_LARCH_ADD32", - "R_LARCH_ADD64", - "R_LARCH_ADD8", - "R_LARCH_B16", - "R_LARCH_B21", - "R_LARCH_B26", - "R_LARCH_COPY", - "R_LARCH_GNU_VTENTRY", - "R_LARCH_GNU_VTINHERIT", - "R_LARCH_GOT64_HI12", - "R_LARCH_GOT64_LO20", - "R_LARCH_GOT64_PC_HI12", - "R_LARCH_GOT64_PC_LO20", - "R_LARCH_GOT_HI20", - "R_LARCH_GOT_LO12", - "R_LARCH_GOT_PC_HI20", - "R_LARCH_GOT_PC_LO12", - "R_LARCH_IRELATIVE", - "R_LARCH_JUMP_SLOT", - "R_LARCH_MARK_LA", - "R_LARCH_MARK_PCREL", - "R_LARCH_NONE", - "R_LARCH_PCALA64_HI12", - "R_LARCH_PCALA64_LO20", - "R_LARCH_PCALA_HI20", - "R_LARCH_PCALA_LO12", - "R_LARCH_RELATIVE", - "R_LARCH_RELAX", - "R_LARCH_SOP_ADD", - "R_LARCH_SOP_AND", - "R_LARCH_SOP_ASSERT", - "R_LARCH_SOP_IF_ELSE", - "R_LARCH_SOP_NOT", - "R_LARCH_SOP_POP_32_S_0_10_10_16_S2", - "R_LARCH_SOP_POP_32_S_0_5_10_16_S2", - "R_LARCH_SOP_POP_32_S_10_12", - "R_LARCH_SOP_POP_32_S_10_16", - "R_LARCH_SOP_POP_32_S_10_16_S2", - "R_LARCH_SOP_POP_32_S_10_5", - "R_LARCH_SOP_POP_32_S_5_20", - "R_LARCH_SOP_POP_32_U", - "R_LARCH_SOP_POP_32_U_10_12", - "R_LARCH_SOP_PUSH_ABSOLUTE", - "R_LARCH_SOP_PUSH_DUP", - "R_LARCH_SOP_PUSH_GPREL", - "R_LARCH_SOP_PUSH_PCREL", - "R_LARCH_SOP_PUSH_PLT_PCREL", - "R_LARCH_SOP_PUSH_TLS_GD", - "R_LARCH_SOP_PUSH_TLS_GOT", - "R_LARCH_SOP_PUSH_TLS_TPREL", - "R_LARCH_SOP_SL", - "R_LARCH_SOP_SR", - "R_LARCH_SOP_SUB", - "R_LARCH_SUB16", - "R_LARCH_SUB24", - "R_LARCH_SUB32", - "R_LARCH_SUB64", - "R_LARCH_SUB8", - "R_LARCH_TLS_DTPMOD32", - "R_LARCH_TLS_DTPMOD64", - "R_LARCH_TLS_DTPREL32", - "R_LARCH_TLS_DTPREL64", - "R_LARCH_TLS_GD_HI20", - "R_LARCH_TLS_GD_PC_HI20", - "R_LARCH_TLS_IE64_HI12", - "R_LARCH_TLS_IE64_LO20", - "R_LARCH_TLS_IE64_PC_HI12", - "R_LARCH_TLS_IE64_PC_LO20", - "R_LARCH_TLS_IE_HI20", - "R_LARCH_TLS_IE_LO12", - "R_LARCH_TLS_IE_PC_HI20", - "R_LARCH_TLS_IE_PC_LO12", - "R_LARCH_TLS_LD_HI20", - "R_LARCH_TLS_LD_PC_HI20", - "R_LARCH_TLS_LE64_HI12", - "R_LARCH_TLS_LE64_LO20", - "R_LARCH_TLS_LE_HI20", - "R_LARCH_TLS_LE_LO12", - "R_LARCH_TLS_TPREL32", - "R_LARCH_TLS_TPREL64", - "R_MIPS", - "R_MIPS_16", - "R_MIPS_26", - "R_MIPS_32", - "R_MIPS_64", - "R_MIPS_ADD_IMMEDIATE", - "R_MIPS_CALL16", - "R_MIPS_CALL_HI16", - "R_MIPS_CALL_LO16", - "R_MIPS_DELETE", - "R_MIPS_GOT16", - "R_MIPS_GOT_DISP", - "R_MIPS_GOT_HI16", - "R_MIPS_GOT_LO16", - "R_MIPS_GOT_OFST", - "R_MIPS_GOT_PAGE", - "R_MIPS_GPREL16", - "R_MIPS_GPREL32", - "R_MIPS_HI16", - "R_MIPS_HIGHER", - "R_MIPS_HIGHEST", - "R_MIPS_INSERT_A", - "R_MIPS_INSERT_B", - "R_MIPS_JALR", - "R_MIPS_LITERAL", - "R_MIPS_LO16", - "R_MIPS_NONE", - "R_MIPS_PC16", - "R_MIPS_PJUMP", - "R_MIPS_REL16", - "R_MIPS_REL32", - "R_MIPS_RELGOT", - "R_MIPS_SCN_DISP", - "R_MIPS_SHIFT5", - "R_MIPS_SHIFT6", - "R_MIPS_SUB", - "R_MIPS_TLS_DTPMOD32", - "R_MIPS_TLS_DTPMOD64", - "R_MIPS_TLS_DTPREL32", - "R_MIPS_TLS_DTPREL64", - "R_MIPS_TLS_DTPREL_HI16", - "R_MIPS_TLS_DTPREL_LO16", - "R_MIPS_TLS_GD", - "R_MIPS_TLS_GOTTPREL", - "R_MIPS_TLS_LDM", - "R_MIPS_TLS_TPREL32", - "R_MIPS_TLS_TPREL64", - "R_MIPS_TLS_TPREL_HI16", - "R_MIPS_TLS_TPREL_LO16", - "R_PPC", - "R_PPC64", - "R_PPC64_ADDR14", - "R_PPC64_ADDR14_BRNTAKEN", - "R_PPC64_ADDR14_BRTAKEN", - "R_PPC64_ADDR16", - "R_PPC64_ADDR16_DS", - "R_PPC64_ADDR16_HA", - "R_PPC64_ADDR16_HI", - "R_PPC64_ADDR16_HIGH", - "R_PPC64_ADDR16_HIGHA", - "R_PPC64_ADDR16_HIGHER", - "R_PPC64_ADDR16_HIGHER34", - "R_PPC64_ADDR16_HIGHERA", - "R_PPC64_ADDR16_HIGHERA34", - "R_PPC64_ADDR16_HIGHEST", - "R_PPC64_ADDR16_HIGHEST34", - "R_PPC64_ADDR16_HIGHESTA", - "R_PPC64_ADDR16_HIGHESTA34", - "R_PPC64_ADDR16_LO", - "R_PPC64_ADDR16_LO_DS", - "R_PPC64_ADDR24", - "R_PPC64_ADDR32", - "R_PPC64_ADDR64", - "R_PPC64_ADDR64_LOCAL", - "R_PPC64_COPY", - "R_PPC64_D28", - "R_PPC64_D34", - "R_PPC64_D34_HA30", - "R_PPC64_D34_HI30", - "R_PPC64_D34_LO", - "R_PPC64_DTPMOD64", - "R_PPC64_DTPREL16", - "R_PPC64_DTPREL16_DS", - "R_PPC64_DTPREL16_HA", - "R_PPC64_DTPREL16_HI", - "R_PPC64_DTPREL16_HIGH", - "R_PPC64_DTPREL16_HIGHA", - "R_PPC64_DTPREL16_HIGHER", - "R_PPC64_DTPREL16_HIGHERA", - "R_PPC64_DTPREL16_HIGHEST", - "R_PPC64_DTPREL16_HIGHESTA", - "R_PPC64_DTPREL16_LO", - "R_PPC64_DTPREL16_LO_DS", - "R_PPC64_DTPREL34", - "R_PPC64_DTPREL64", - "R_PPC64_ENTRY", - "R_PPC64_GLOB_DAT", - "R_PPC64_GNU_VTENTRY", - "R_PPC64_GNU_VTINHERIT", - "R_PPC64_GOT16", - "R_PPC64_GOT16_DS", - "R_PPC64_GOT16_HA", - "R_PPC64_GOT16_HI", - "R_PPC64_GOT16_LO", - "R_PPC64_GOT16_LO_DS", - "R_PPC64_GOT_DTPREL16_DS", - "R_PPC64_GOT_DTPREL16_HA", - "R_PPC64_GOT_DTPREL16_HI", - "R_PPC64_GOT_DTPREL16_LO_DS", - "R_PPC64_GOT_DTPREL_PCREL34", - "R_PPC64_GOT_PCREL34", - "R_PPC64_GOT_TLSGD16", - "R_PPC64_GOT_TLSGD16_HA", - "R_PPC64_GOT_TLSGD16_HI", - "R_PPC64_GOT_TLSGD16_LO", - "R_PPC64_GOT_TLSGD_PCREL34", - "R_PPC64_GOT_TLSLD16", - "R_PPC64_GOT_TLSLD16_HA", - "R_PPC64_GOT_TLSLD16_HI", - "R_PPC64_GOT_TLSLD16_LO", - "R_PPC64_GOT_TLSLD_PCREL34", - "R_PPC64_GOT_TPREL16_DS", - "R_PPC64_GOT_TPREL16_HA", - "R_PPC64_GOT_TPREL16_HI", - "R_PPC64_GOT_TPREL16_LO_DS", - "R_PPC64_GOT_TPREL_PCREL34", - "R_PPC64_IRELATIVE", - "R_PPC64_JMP_IREL", - "R_PPC64_JMP_SLOT", - "R_PPC64_NONE", - "R_PPC64_PCREL28", - "R_PPC64_PCREL34", - "R_PPC64_PCREL_OPT", - "R_PPC64_PLT16_HA", - "R_PPC64_PLT16_HI", - "R_PPC64_PLT16_LO", - "R_PPC64_PLT16_LO_DS", - "R_PPC64_PLT32", - "R_PPC64_PLT64", - "R_PPC64_PLTCALL", - "R_PPC64_PLTCALL_NOTOC", - "R_PPC64_PLTGOT16", - "R_PPC64_PLTGOT16_DS", - "R_PPC64_PLTGOT16_HA", - "R_PPC64_PLTGOT16_HI", - "R_PPC64_PLTGOT16_LO", - "R_PPC64_PLTGOT_LO_DS", - "R_PPC64_PLTREL32", - "R_PPC64_PLTREL64", - "R_PPC64_PLTSEQ", - "R_PPC64_PLTSEQ_NOTOC", - "R_PPC64_PLT_PCREL34", - "R_PPC64_PLT_PCREL34_NOTOC", - "R_PPC64_REL14", - "R_PPC64_REL14_BRNTAKEN", - "R_PPC64_REL14_BRTAKEN", - "R_PPC64_REL16", - "R_PPC64_REL16DX_HA", - "R_PPC64_REL16_HA", - "R_PPC64_REL16_HI", - "R_PPC64_REL16_HIGH", - "R_PPC64_REL16_HIGHA", - "R_PPC64_REL16_HIGHER", - "R_PPC64_REL16_HIGHER34", - "R_PPC64_REL16_HIGHERA", - "R_PPC64_REL16_HIGHERA34", - "R_PPC64_REL16_HIGHEST", - "R_PPC64_REL16_HIGHEST34", - "R_PPC64_REL16_HIGHESTA", - "R_PPC64_REL16_HIGHESTA34", - "R_PPC64_REL16_LO", - "R_PPC64_REL24", - "R_PPC64_REL24_NOTOC", - "R_PPC64_REL24_P9NOTOC", - "R_PPC64_REL30", - "R_PPC64_REL32", - "R_PPC64_REL64", - "R_PPC64_RELATIVE", - "R_PPC64_SECTOFF", - "R_PPC64_SECTOFF_DS", - "R_PPC64_SECTOFF_HA", - "R_PPC64_SECTOFF_HI", - "R_PPC64_SECTOFF_LO", - "R_PPC64_SECTOFF_LO_DS", - "R_PPC64_TLS", - "R_PPC64_TLSGD", - "R_PPC64_TLSLD", - "R_PPC64_TOC", - "R_PPC64_TOC16", - "R_PPC64_TOC16_DS", - "R_PPC64_TOC16_HA", - "R_PPC64_TOC16_HI", - "R_PPC64_TOC16_LO", - "R_PPC64_TOC16_LO_DS", - "R_PPC64_TOCSAVE", - "R_PPC64_TPREL16", - "R_PPC64_TPREL16_DS", - "R_PPC64_TPREL16_HA", - "R_PPC64_TPREL16_HI", - "R_PPC64_TPREL16_HIGH", - "R_PPC64_TPREL16_HIGHA", - "R_PPC64_TPREL16_HIGHER", - "R_PPC64_TPREL16_HIGHERA", - "R_PPC64_TPREL16_HIGHEST", - "R_PPC64_TPREL16_HIGHESTA", - "R_PPC64_TPREL16_LO", - "R_PPC64_TPREL16_LO_DS", - "R_PPC64_TPREL34", - "R_PPC64_TPREL64", - "R_PPC64_UADDR16", - "R_PPC64_UADDR32", - "R_PPC64_UADDR64", - "R_PPC_ADDR14", - "R_PPC_ADDR14_BRNTAKEN", - "R_PPC_ADDR14_BRTAKEN", - "R_PPC_ADDR16", - "R_PPC_ADDR16_HA", - "R_PPC_ADDR16_HI", - "R_PPC_ADDR16_LO", - "R_PPC_ADDR24", - "R_PPC_ADDR32", - "R_PPC_COPY", - "R_PPC_DTPMOD32", - "R_PPC_DTPREL16", - "R_PPC_DTPREL16_HA", - "R_PPC_DTPREL16_HI", - "R_PPC_DTPREL16_LO", - "R_PPC_DTPREL32", - "R_PPC_EMB_BIT_FLD", - "R_PPC_EMB_MRKREF", - "R_PPC_EMB_NADDR16", - "R_PPC_EMB_NADDR16_HA", - "R_PPC_EMB_NADDR16_HI", - "R_PPC_EMB_NADDR16_LO", - "R_PPC_EMB_NADDR32", - "R_PPC_EMB_RELSDA", - "R_PPC_EMB_RELSEC16", - "R_PPC_EMB_RELST_HA", - "R_PPC_EMB_RELST_HI", - "R_PPC_EMB_RELST_LO", - "R_PPC_EMB_SDA21", - "R_PPC_EMB_SDA2I16", - "R_PPC_EMB_SDA2REL", - "R_PPC_EMB_SDAI16", - "R_PPC_GLOB_DAT", - "R_PPC_GOT16", - "R_PPC_GOT16_HA", - "R_PPC_GOT16_HI", - "R_PPC_GOT16_LO", - "R_PPC_GOT_TLSGD16", - "R_PPC_GOT_TLSGD16_HA", - "R_PPC_GOT_TLSGD16_HI", - "R_PPC_GOT_TLSGD16_LO", - "R_PPC_GOT_TLSLD16", - "R_PPC_GOT_TLSLD16_HA", - "R_PPC_GOT_TLSLD16_HI", - "R_PPC_GOT_TLSLD16_LO", - "R_PPC_GOT_TPREL16", - "R_PPC_GOT_TPREL16_HA", - "R_PPC_GOT_TPREL16_HI", - "R_PPC_GOT_TPREL16_LO", - "R_PPC_JMP_SLOT", - "R_PPC_LOCAL24PC", - "R_PPC_NONE", - "R_PPC_PLT16_HA", - "R_PPC_PLT16_HI", - "R_PPC_PLT16_LO", - "R_PPC_PLT32", - "R_PPC_PLTREL24", - "R_PPC_PLTREL32", - "R_PPC_REL14", - "R_PPC_REL14_BRNTAKEN", - "R_PPC_REL14_BRTAKEN", - "R_PPC_REL24", - "R_PPC_REL32", - "R_PPC_RELATIVE", - "R_PPC_SDAREL16", - "R_PPC_SECTOFF", - "R_PPC_SECTOFF_HA", - "R_PPC_SECTOFF_HI", - "R_PPC_SECTOFF_LO", - "R_PPC_TLS", - "R_PPC_TPREL16", - "R_PPC_TPREL16_HA", - "R_PPC_TPREL16_HI", - "R_PPC_TPREL16_LO", - "R_PPC_TPREL32", - "R_PPC_UADDR16", - "R_PPC_UADDR32", - "R_RISCV", - "R_RISCV_32", - "R_RISCV_32_PCREL", - "R_RISCV_64", - "R_RISCV_ADD16", - "R_RISCV_ADD32", - "R_RISCV_ADD64", - "R_RISCV_ADD8", - "R_RISCV_ALIGN", - "R_RISCV_BRANCH", - "R_RISCV_CALL", - "R_RISCV_CALL_PLT", - "R_RISCV_COPY", - "R_RISCV_GNU_VTENTRY", - "R_RISCV_GNU_VTINHERIT", - "R_RISCV_GOT_HI20", - "R_RISCV_GPREL_I", - "R_RISCV_GPREL_S", - "R_RISCV_HI20", - "R_RISCV_JAL", - "R_RISCV_JUMP_SLOT", - "R_RISCV_LO12_I", - "R_RISCV_LO12_S", - "R_RISCV_NONE", - "R_RISCV_PCREL_HI20", - "R_RISCV_PCREL_LO12_I", - "R_RISCV_PCREL_LO12_S", - "R_RISCV_RELATIVE", - "R_RISCV_RELAX", - "R_RISCV_RVC_BRANCH", - "R_RISCV_RVC_JUMP", - "R_RISCV_RVC_LUI", - "R_RISCV_SET16", - "R_RISCV_SET32", - "R_RISCV_SET6", - "R_RISCV_SET8", - "R_RISCV_SUB16", - "R_RISCV_SUB32", - "R_RISCV_SUB6", - "R_RISCV_SUB64", - "R_RISCV_SUB8", - "R_RISCV_TLS_DTPMOD32", - "R_RISCV_TLS_DTPMOD64", - "R_RISCV_TLS_DTPREL32", - "R_RISCV_TLS_DTPREL64", - "R_RISCV_TLS_GD_HI20", - "R_RISCV_TLS_GOT_HI20", - "R_RISCV_TLS_TPREL32", - "R_RISCV_TLS_TPREL64", - "R_RISCV_TPREL_ADD", - "R_RISCV_TPREL_HI20", - "R_RISCV_TPREL_I", - "R_RISCV_TPREL_LO12_I", - "R_RISCV_TPREL_LO12_S", - "R_RISCV_TPREL_S", - "R_SPARC", - "R_SPARC_10", - "R_SPARC_11", - "R_SPARC_13", - "R_SPARC_16", - "R_SPARC_22", - "R_SPARC_32", - "R_SPARC_5", - "R_SPARC_6", - "R_SPARC_64", - "R_SPARC_7", - "R_SPARC_8", - "R_SPARC_COPY", - "R_SPARC_DISP16", - "R_SPARC_DISP32", - "R_SPARC_DISP64", - "R_SPARC_DISP8", - "R_SPARC_GLOB_DAT", - "R_SPARC_GLOB_JMP", - "R_SPARC_GOT10", - "R_SPARC_GOT13", - "R_SPARC_GOT22", - "R_SPARC_H44", - "R_SPARC_HH22", - "R_SPARC_HI22", - "R_SPARC_HIPLT22", - "R_SPARC_HIX22", - "R_SPARC_HM10", - "R_SPARC_JMP_SLOT", - "R_SPARC_L44", - "R_SPARC_LM22", - "R_SPARC_LO10", - "R_SPARC_LOPLT10", - "R_SPARC_LOX10", - "R_SPARC_M44", - "R_SPARC_NONE", - "R_SPARC_OLO10", - "R_SPARC_PC10", - "R_SPARC_PC22", - "R_SPARC_PCPLT10", - "R_SPARC_PCPLT22", - "R_SPARC_PCPLT32", - "R_SPARC_PC_HH22", - "R_SPARC_PC_HM10", - "R_SPARC_PC_LM22", - "R_SPARC_PLT32", - "R_SPARC_PLT64", - "R_SPARC_REGISTER", - "R_SPARC_RELATIVE", - "R_SPARC_UA16", - "R_SPARC_UA32", - "R_SPARC_UA64", - "R_SPARC_WDISP16", - "R_SPARC_WDISP19", - "R_SPARC_WDISP22", - "R_SPARC_WDISP30", - "R_SPARC_WPLT30", - "R_SYM32", - "R_SYM64", - "R_TYPE32", - "R_TYPE64", - "R_X86_64", - "R_X86_64_16", - "R_X86_64_32", - "R_X86_64_32S", - "R_X86_64_64", - "R_X86_64_8", - "R_X86_64_COPY", - "R_X86_64_DTPMOD64", - "R_X86_64_DTPOFF32", - "R_X86_64_DTPOFF64", - "R_X86_64_GLOB_DAT", - "R_X86_64_GOT32", - "R_X86_64_GOT64", - "R_X86_64_GOTOFF64", - "R_X86_64_GOTPC32", - "R_X86_64_GOTPC32_TLSDESC", - "R_X86_64_GOTPC64", - "R_X86_64_GOTPCREL", - "R_X86_64_GOTPCREL64", - "R_X86_64_GOTPCRELX", - "R_X86_64_GOTPLT64", - "R_X86_64_GOTTPOFF", - "R_X86_64_IRELATIVE", - "R_X86_64_JMP_SLOT", - "R_X86_64_NONE", - "R_X86_64_PC16", - "R_X86_64_PC32", - "R_X86_64_PC32_BND", - "R_X86_64_PC64", - "R_X86_64_PC8", - "R_X86_64_PLT32", - "R_X86_64_PLT32_BND", - "R_X86_64_PLTOFF64", - "R_X86_64_RELATIVE", - "R_X86_64_RELATIVE64", - "R_X86_64_REX_GOTPCRELX", - "R_X86_64_SIZE32", - "R_X86_64_SIZE64", - "R_X86_64_TLSDESC", - "R_X86_64_TLSDESC_CALL", - "R_X86_64_TLSGD", - "R_X86_64_TLSLD", - "R_X86_64_TPOFF32", - "R_X86_64_TPOFF64", - "Rel32", - "Rel64", - "Rela32", - "Rela64", - "SHF_ALLOC", - "SHF_COMPRESSED", - "SHF_EXECINSTR", - "SHF_GROUP", - "SHF_INFO_LINK", - "SHF_LINK_ORDER", - "SHF_MASKOS", - "SHF_MASKPROC", - "SHF_MERGE", - "SHF_OS_NONCONFORMING", - "SHF_STRINGS", - "SHF_TLS", - "SHF_WRITE", - "SHN_ABS", - "SHN_COMMON", - "SHN_HIOS", - "SHN_HIPROC", - "SHN_HIRESERVE", - "SHN_LOOS", - "SHN_LOPROC", - "SHN_LORESERVE", - "SHN_UNDEF", - "SHN_XINDEX", - "SHT_DYNAMIC", - "SHT_DYNSYM", - "SHT_FINI_ARRAY", - "SHT_GNU_ATTRIBUTES", - "SHT_GNU_HASH", - "SHT_GNU_LIBLIST", - "SHT_GNU_VERDEF", - "SHT_GNU_VERNEED", - "SHT_GNU_VERSYM", - "SHT_GROUP", - "SHT_HASH", - "SHT_HIOS", - "SHT_HIPROC", - "SHT_HIUSER", - "SHT_INIT_ARRAY", - "SHT_LOOS", - "SHT_LOPROC", - "SHT_LOUSER", - "SHT_MIPS_ABIFLAGS", - "SHT_NOBITS", - "SHT_NOTE", - "SHT_NULL", - "SHT_PREINIT_ARRAY", - "SHT_PROGBITS", - "SHT_REL", - "SHT_RELA", - "SHT_SHLIB", - "SHT_STRTAB", - "SHT_SYMTAB", - "SHT_SYMTAB_SHNDX", - "STB_GLOBAL", - "STB_HIOS", - "STB_HIPROC", - "STB_LOCAL", - "STB_LOOS", - "STB_LOPROC", - "STB_WEAK", - "STT_COMMON", - "STT_FILE", - "STT_FUNC", - "STT_HIOS", - "STT_HIPROC", - "STT_LOOS", - "STT_LOPROC", - "STT_NOTYPE", - "STT_OBJECT", - "STT_SECTION", - "STT_TLS", - "STV_DEFAULT", - "STV_HIDDEN", - "STV_INTERNAL", - "STV_PROTECTED", - "ST_BIND", - "ST_INFO", - "ST_TYPE", - "ST_VISIBILITY", - "Section", - "Section32", - "Section64", - "SectionFlag", - "SectionHeader", - "SectionIndex", - "SectionType", - "Sym32", - "Sym32Size", - "Sym64", - "Sym64Size", - "SymBind", - "SymType", - "SymVis", - "Symbol", - "Type", - "Version", - }, - "debug/gosym": { - "DecodingError", - "Func", - "LineTable", - "NewLineTable", - "NewTable", - "Obj", - "Sym", - "Table", - "UnknownFileError", - "UnknownLineError", - }, - "debug/macho": { - "ARM64_RELOC_ADDEND", - "ARM64_RELOC_BRANCH26", - "ARM64_RELOC_GOT_LOAD_PAGE21", - "ARM64_RELOC_GOT_LOAD_PAGEOFF12", - "ARM64_RELOC_PAGE21", - "ARM64_RELOC_PAGEOFF12", - "ARM64_RELOC_POINTER_TO_GOT", - "ARM64_RELOC_SUBTRACTOR", - "ARM64_RELOC_TLVP_LOAD_PAGE21", - "ARM64_RELOC_TLVP_LOAD_PAGEOFF12", - "ARM64_RELOC_UNSIGNED", - "ARM_RELOC_BR24", - "ARM_RELOC_HALF", - "ARM_RELOC_HALF_SECTDIFF", - "ARM_RELOC_LOCAL_SECTDIFF", - "ARM_RELOC_PAIR", - "ARM_RELOC_PB_LA_PTR", - "ARM_RELOC_SECTDIFF", - "ARM_RELOC_VANILLA", - "ARM_THUMB_32BIT_BRANCH", - "ARM_THUMB_RELOC_BR22", - "Cpu", - "Cpu386", - "CpuAmd64", - "CpuArm", - "CpuArm64", - "CpuPpc", - "CpuPpc64", - "Dylib", - "DylibCmd", - "Dysymtab", - "DysymtabCmd", - "ErrNotFat", - "FatArch", - "FatArchHeader", - "FatFile", - "File", - "FileHeader", - "FlagAllModsBound", - "FlagAllowStackExecution", - "FlagAppExtensionSafe", - "FlagBindAtLoad", - "FlagBindsToWeak", - "FlagCanonical", - "FlagDeadStrippableDylib", - "FlagDyldLink", - "FlagForceFlat", - "FlagHasTLVDescriptors", - "FlagIncrLink", - "FlagLazyInit", - "FlagNoFixPrebinding", - "FlagNoHeapExecution", - "FlagNoMultiDefs", - "FlagNoReexportedDylibs", - "FlagNoUndefs", - "FlagPIE", - "FlagPrebindable", - "FlagPrebound", - "FlagRootSafe", - "FlagSetuidSafe", - "FlagSplitSegs", - "FlagSubsectionsViaSymbols", - "FlagTwoLevel", - "FlagWeakDefines", - "FormatError", - "GENERIC_RELOC_LOCAL_SECTDIFF", - "GENERIC_RELOC_PAIR", - "GENERIC_RELOC_PB_LA_PTR", - "GENERIC_RELOC_SECTDIFF", - "GENERIC_RELOC_TLV", - "GENERIC_RELOC_VANILLA", - "Load", - "LoadBytes", - "LoadCmd", - "LoadCmdDylib", - "LoadCmdDylinker", - "LoadCmdDysymtab", - "LoadCmdRpath", - "LoadCmdSegment", - "LoadCmdSegment64", - "LoadCmdSymtab", - "LoadCmdThread", - "LoadCmdUnixThread", - "Magic32", - "Magic64", - "MagicFat", - "NewFatFile", - "NewFile", - "Nlist32", - "Nlist64", - "Open", - "OpenFat", - "Regs386", - "RegsAMD64", - "Reloc", - "RelocTypeARM", - "RelocTypeARM64", - "RelocTypeGeneric", - "RelocTypeX86_64", - "Rpath", - "RpathCmd", - "Section", - "Section32", - "Section64", - "SectionHeader", - "Segment", - "Segment32", - "Segment64", - "SegmentHeader", - "Symbol", - "Symtab", - "SymtabCmd", - "Thread", - "Type", - "TypeBundle", - "TypeDylib", - "TypeExec", - "TypeObj", - "X86_64_RELOC_BRANCH", - "X86_64_RELOC_GOT", - "X86_64_RELOC_GOT_LOAD", - "X86_64_RELOC_SIGNED", - "X86_64_RELOC_SIGNED_1", - "X86_64_RELOC_SIGNED_2", - "X86_64_RELOC_SIGNED_4", - "X86_64_RELOC_SUBTRACTOR", - "X86_64_RELOC_TLV", - "X86_64_RELOC_UNSIGNED", - }, - "debug/pe": { - "COFFSymbol", - "COFFSymbolAuxFormat5", - "COFFSymbolSize", - "DataDirectory", - "File", - "FileHeader", - "FormatError", - "IMAGE_COMDAT_SELECT_ANY", - "IMAGE_COMDAT_SELECT_ASSOCIATIVE", - "IMAGE_COMDAT_SELECT_EXACT_MATCH", - "IMAGE_COMDAT_SELECT_LARGEST", - "IMAGE_COMDAT_SELECT_NODUPLICATES", - "IMAGE_COMDAT_SELECT_SAME_SIZE", - "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", - "IMAGE_DIRECTORY_ENTRY_BASERELOC", - "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", - "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", - "IMAGE_DIRECTORY_ENTRY_DEBUG", - "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", - "IMAGE_DIRECTORY_ENTRY_EXCEPTION", - "IMAGE_DIRECTORY_ENTRY_EXPORT", - "IMAGE_DIRECTORY_ENTRY_GLOBALPTR", - "IMAGE_DIRECTORY_ENTRY_IAT", - "IMAGE_DIRECTORY_ENTRY_IMPORT", - "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", - "IMAGE_DIRECTORY_ENTRY_RESOURCE", - "IMAGE_DIRECTORY_ENTRY_SECURITY", - "IMAGE_DIRECTORY_ENTRY_TLS", - "IMAGE_DLLCHARACTERISTICS_APPCONTAINER", - "IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", - "IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", - "IMAGE_DLLCHARACTERISTICS_GUARD_CF", - "IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", - "IMAGE_DLLCHARACTERISTICS_NO_BIND", - "IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", - "IMAGE_DLLCHARACTERISTICS_NO_SEH", - "IMAGE_DLLCHARACTERISTICS_NX_COMPAT", - "IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", - "IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", - "IMAGE_FILE_32BIT_MACHINE", - "IMAGE_FILE_AGGRESIVE_WS_TRIM", - "IMAGE_FILE_BYTES_REVERSED_HI", - "IMAGE_FILE_BYTES_REVERSED_LO", - "IMAGE_FILE_DEBUG_STRIPPED", - "IMAGE_FILE_DLL", - "IMAGE_FILE_EXECUTABLE_IMAGE", - "IMAGE_FILE_LARGE_ADDRESS_AWARE", - "IMAGE_FILE_LINE_NUMS_STRIPPED", - "IMAGE_FILE_LOCAL_SYMS_STRIPPED", - "IMAGE_FILE_MACHINE_AM33", - "IMAGE_FILE_MACHINE_AMD64", - "IMAGE_FILE_MACHINE_ARM", - "IMAGE_FILE_MACHINE_ARM64", - "IMAGE_FILE_MACHINE_ARMNT", - "IMAGE_FILE_MACHINE_EBC", - "IMAGE_FILE_MACHINE_I386", - "IMAGE_FILE_MACHINE_IA64", - "IMAGE_FILE_MACHINE_LOONGARCH32", - "IMAGE_FILE_MACHINE_LOONGARCH64", - "IMAGE_FILE_MACHINE_M32R", - "IMAGE_FILE_MACHINE_MIPS16", - "IMAGE_FILE_MACHINE_MIPSFPU", - "IMAGE_FILE_MACHINE_MIPSFPU16", - "IMAGE_FILE_MACHINE_POWERPC", - "IMAGE_FILE_MACHINE_POWERPCFP", - "IMAGE_FILE_MACHINE_R4000", - "IMAGE_FILE_MACHINE_RISCV128", - "IMAGE_FILE_MACHINE_RISCV32", - "IMAGE_FILE_MACHINE_RISCV64", - "IMAGE_FILE_MACHINE_SH3", - "IMAGE_FILE_MACHINE_SH3DSP", - "IMAGE_FILE_MACHINE_SH4", - "IMAGE_FILE_MACHINE_SH5", - "IMAGE_FILE_MACHINE_THUMB", - "IMAGE_FILE_MACHINE_UNKNOWN", - "IMAGE_FILE_MACHINE_WCEMIPSV2", - "IMAGE_FILE_NET_RUN_FROM_SWAP", - "IMAGE_FILE_RELOCS_STRIPPED", - "IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", - "IMAGE_FILE_SYSTEM", - "IMAGE_FILE_UP_SYSTEM_ONLY", - "IMAGE_SCN_CNT_CODE", - "IMAGE_SCN_CNT_INITIALIZED_DATA", - "IMAGE_SCN_CNT_UNINITIALIZED_DATA", - "IMAGE_SCN_LNK_COMDAT", - "IMAGE_SCN_MEM_DISCARDABLE", - "IMAGE_SCN_MEM_EXECUTE", - "IMAGE_SCN_MEM_READ", - "IMAGE_SCN_MEM_WRITE", - "IMAGE_SUBSYSTEM_EFI_APPLICATION", - "IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", - "IMAGE_SUBSYSTEM_EFI_ROM", - "IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", - "IMAGE_SUBSYSTEM_NATIVE", - "IMAGE_SUBSYSTEM_NATIVE_WINDOWS", - "IMAGE_SUBSYSTEM_OS2_CUI", - "IMAGE_SUBSYSTEM_POSIX_CUI", - "IMAGE_SUBSYSTEM_UNKNOWN", - "IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", - "IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", - "IMAGE_SUBSYSTEM_WINDOWS_CUI", - "IMAGE_SUBSYSTEM_WINDOWS_GUI", - "IMAGE_SUBSYSTEM_XBOX", - "ImportDirectory", - "NewFile", - "Open", - "OptionalHeader32", - "OptionalHeader64", - "Reloc", - "Section", - "SectionHeader", - "SectionHeader32", - "StringTable", - "Symbol", - }, - "debug/plan9obj": { - "ErrNoSymbols", - "File", - "FileHeader", - "Magic386", - "Magic64", - "MagicAMD64", - "MagicARM", - "NewFile", - "Open", - "Section", - "SectionHeader", - "Sym", - }, - "embed": { - "FS", - }, - "encoding": { - "BinaryMarshaler", - "BinaryUnmarshaler", - "TextMarshaler", - "TextUnmarshaler", - }, - "encoding/ascii85": { - "CorruptInputError", - "Decode", - "Encode", - "MaxEncodedLen", - "NewDecoder", - "NewEncoder", - }, - "encoding/asn1": { - "BitString", - "ClassApplication", - "ClassContextSpecific", - "ClassPrivate", - "ClassUniversal", - "Enumerated", - "Flag", - "Marshal", - "MarshalWithParams", - "NullBytes", - "NullRawValue", - "ObjectIdentifier", - "RawContent", - "RawValue", - "StructuralError", - "SyntaxError", - "TagBMPString", - "TagBitString", - "TagBoolean", - "TagEnum", - "TagGeneralString", - "TagGeneralizedTime", - "TagIA5String", - "TagInteger", - "TagNull", - "TagNumericString", - "TagOID", - "TagOctetString", - "TagPrintableString", - "TagSequence", - "TagSet", - "TagT61String", - "TagUTCTime", - "TagUTF8String", - "Unmarshal", - "UnmarshalWithParams", - }, - "encoding/base32": { - "CorruptInputError", - "Encoding", - "HexEncoding", - "NewDecoder", - "NewEncoder", - "NewEncoding", - "NoPadding", - "StdEncoding", - "StdPadding", - }, - "encoding/base64": { - "CorruptInputError", - "Encoding", - "NewDecoder", - "NewEncoder", - "NewEncoding", - "NoPadding", - "RawStdEncoding", - "RawURLEncoding", - "StdEncoding", - "StdPadding", - "URLEncoding", - }, - "encoding/binary": { - "AppendByteOrder", - "AppendUvarint", - "AppendVarint", - "BigEndian", - "ByteOrder", - "LittleEndian", - "MaxVarintLen16", - "MaxVarintLen32", - "MaxVarintLen64", - "NativeEndian", - "PutUvarint", - "PutVarint", - "Read", - "ReadUvarint", - "ReadVarint", - "Size", - "Uvarint", - "Varint", - "Write", - }, - "encoding/csv": { - "ErrBareQuote", - "ErrFieldCount", - "ErrQuote", - "ErrTrailingComma", - "NewReader", - "NewWriter", - "ParseError", - "Reader", - "Writer", - }, - "encoding/gob": { - "CommonType", - "Decoder", - "Encoder", - "GobDecoder", - "GobEncoder", - "NewDecoder", - "NewEncoder", - "Register", - "RegisterName", - }, - "encoding/hex": { - "Decode", - "DecodeString", - "DecodedLen", - "Dump", - "Dumper", - "Encode", - "EncodeToString", - "EncodedLen", - "ErrLength", - "InvalidByteError", - "NewDecoder", - "NewEncoder", - }, - "encoding/json": { - "Compact", - "Decoder", - "Delim", - "Encoder", - "HTMLEscape", - "Indent", - "InvalidUTF8Error", - "InvalidUnmarshalError", - "Marshal", - "MarshalIndent", - "Marshaler", - "MarshalerError", - "NewDecoder", - "NewEncoder", - "Number", - "RawMessage", - "SyntaxError", - "Token", - "Unmarshal", - "UnmarshalFieldError", - "UnmarshalTypeError", - "Unmarshaler", - "UnsupportedTypeError", - "UnsupportedValueError", - "Valid", - }, - "encoding/pem": { - "Block", - "Decode", - "Encode", - "EncodeToMemory", - }, - "encoding/xml": { - "Attr", - "CharData", - "Comment", - "CopyToken", - "Decoder", - "Directive", - "Encoder", - "EndElement", - "Escape", - "EscapeText", - "HTMLAutoClose", - "HTMLEntity", - "Header", - "Marshal", - "MarshalIndent", - "Marshaler", - "MarshalerAttr", - "Name", - "NewDecoder", - "NewEncoder", - "NewTokenDecoder", - "ProcInst", - "StartElement", - "SyntaxError", - "TagPathError", - "Token", - "TokenReader", - "Unmarshal", - "UnmarshalError", - "Unmarshaler", - "UnmarshalerAttr", - "UnsupportedTypeError", - }, - "errors": { - "As", - "ErrUnsupported", - "Is", - "Join", - "New", - "Unwrap", - }, - "expvar": { - "Do", - "Float", - "Func", - "Get", - "Handler", - "Int", - "KeyValue", - "Map", - "NewFloat", - "NewInt", - "NewMap", - "NewString", - "Publish", - "String", - "Var", - }, - "flag": { - "Arg", - "Args", - "Bool", - "BoolFunc", - "BoolVar", - "CommandLine", - "ContinueOnError", - "Duration", - "DurationVar", - "ErrHelp", - "ErrorHandling", - "ExitOnError", - "Flag", - "FlagSet", - "Float64", - "Float64Var", - "Func", - "Getter", - "Int", - "Int64", - "Int64Var", - "IntVar", - "Lookup", - "NArg", - "NFlag", - "NewFlagSet", - "PanicOnError", - "Parse", - "Parsed", - "PrintDefaults", - "Set", - "String", - "StringVar", - "TextVar", - "Uint", - "Uint64", - "Uint64Var", - "UintVar", - "UnquoteUsage", - "Usage", - "Value", - "Var", - "Visit", - "VisitAll", - }, - "fmt": { - "Append", - "Appendf", - "Appendln", - "Errorf", - "FormatString", - "Formatter", - "Fprint", - "Fprintf", - "Fprintln", - "Fscan", - "Fscanf", - "Fscanln", - "GoStringer", - "Print", - "Printf", - "Println", - "Scan", - "ScanState", - "Scanf", - "Scanln", - "Scanner", - "Sprint", - "Sprintf", - "Sprintln", - "Sscan", - "Sscanf", - "Sscanln", - "State", - "Stringer", - }, - "go/ast": { - "ArrayType", - "AssignStmt", - "Bad", - "BadDecl", - "BadExpr", - "BadStmt", - "BasicLit", - "BinaryExpr", - "BlockStmt", - "BranchStmt", - "CallExpr", - "CaseClause", - "ChanDir", - "ChanType", - "CommClause", - "Comment", - "CommentGroup", - "CommentMap", - "CompositeLit", - "Con", - "Decl", - "DeclStmt", - "DeferStmt", - "Ellipsis", - "EmptyStmt", - "Expr", - "ExprStmt", - "Field", - "FieldFilter", - "FieldList", - "File", - "FileExports", - "Filter", - "FilterDecl", - "FilterFile", - "FilterFuncDuplicates", - "FilterImportDuplicates", - "FilterPackage", - "FilterUnassociatedComments", - "ForStmt", - "Fprint", - "Fun", - "FuncDecl", - "FuncLit", - "FuncType", - "GenDecl", - "GoStmt", - "Ident", - "IfStmt", - "ImportSpec", - "Importer", - "IncDecStmt", - "IndexExpr", - "IndexListExpr", - "Inspect", - "InterfaceType", - "IsExported", - "IsGenerated", - "KeyValueExpr", - "LabeledStmt", - "Lbl", - "MapType", - "MergeMode", - "MergePackageFiles", - "NewCommentMap", - "NewIdent", - "NewObj", - "NewPackage", - "NewScope", - "Node", - "NotNilFilter", - "ObjKind", - "Object", - "Package", - "PackageExports", - "ParenExpr", - "Pkg", - "Print", - "RECV", - "RangeStmt", - "ReturnStmt", - "SEND", - "Scope", - "SelectStmt", - "SelectorExpr", - "SendStmt", - "SliceExpr", - "SortImports", - "Spec", - "StarExpr", - "Stmt", - "StructType", - "SwitchStmt", - "Typ", - "TypeAssertExpr", - "TypeSpec", - "TypeSwitchStmt", - "UnaryExpr", - "ValueSpec", - "Var", - "Visitor", - "Walk", - }, - "go/build": { - "AllowBinary", - "ArchChar", - "Context", - "Default", - "Directive", - "FindOnly", - "IgnoreVendor", - "Import", - "ImportComment", - "ImportDir", - "ImportMode", - "IsLocalImport", - "MultiplePackageError", - "NoGoError", - "Package", - "ToolDir", - }, - "go/build/constraint": { - "AndExpr", - "Expr", - "GoVersion", - "IsGoBuild", - "IsPlusBuild", - "NotExpr", - "OrExpr", - "Parse", - "PlusBuildLines", - "SyntaxError", - "TagExpr", - }, - "go/constant": { - "BinaryOp", - "BitLen", - "Bool", - "BoolVal", - "Bytes", - "Compare", - "Complex", - "Denom", - "Float", - "Float32Val", - "Float64Val", - "Imag", - "Int", - "Int64Val", - "Kind", - "Make", - "MakeBool", - "MakeFloat64", - "MakeFromBytes", - "MakeFromLiteral", - "MakeImag", - "MakeInt64", - "MakeString", - "MakeUint64", - "MakeUnknown", - "Num", - "Real", - "Shift", - "Sign", - "String", - "StringVal", - "ToComplex", - "ToFloat", - "ToInt", - "Uint64Val", - "UnaryOp", - "Unknown", - "Val", - "Value", - }, - "go/doc": { - "AllDecls", - "AllMethods", - "Example", - "Examples", - "Filter", - "Func", - "IllegalPrefixes", - "IsPredeclared", - "Mode", - "New", - "NewFromFiles", - "Note", - "Package", - "PreserveAST", - "Synopsis", - "ToHTML", - "ToText", - "Type", - "Value", - }, - "go/doc/comment": { - "Block", - "Code", - "DefaultLookupPackage", - "Doc", - "DocLink", - "Heading", - "Italic", - "Link", - "LinkDef", - "List", - "ListItem", - "Paragraph", - "Parser", - "Plain", - "Printer", - "Text", - }, - "go/format": { - "Node", - "Source", - }, - "go/importer": { - "Default", - "For", - "ForCompiler", - "Lookup", - }, - "go/parser": { - "AllErrors", - "DeclarationErrors", - "ImportsOnly", - "Mode", - "PackageClauseOnly", - "ParseComments", - "ParseDir", - "ParseExpr", - "ParseExprFrom", - "ParseFile", - "SkipObjectResolution", - "SpuriousErrors", - "Trace", - }, - "go/printer": { - "CommentedNode", - "Config", - "Fprint", - "Mode", - "RawFormat", - "SourcePos", - "TabIndent", - "UseSpaces", - }, - "go/scanner": { - "Error", - "ErrorHandler", - "ErrorList", - "Mode", - "PrintError", - "ScanComments", - "Scanner", - }, - "go/token": { - "ADD", - "ADD_ASSIGN", - "AND", - "AND_ASSIGN", - "AND_NOT", - "AND_NOT_ASSIGN", - "ARROW", - "ASSIGN", - "BREAK", - "CASE", - "CHAN", - "CHAR", - "COLON", - "COMMA", - "COMMENT", - "CONST", - "CONTINUE", - "DEC", - "DEFAULT", - "DEFER", - "DEFINE", - "ELLIPSIS", - "ELSE", - "EOF", - "EQL", - "FALLTHROUGH", - "FLOAT", - "FOR", - "FUNC", - "File", - "FileSet", - "GEQ", - "GO", - "GOTO", - "GTR", - "HighestPrec", - "IDENT", - "IF", - "ILLEGAL", - "IMAG", - "IMPORT", - "INC", - "INT", - "INTERFACE", - "IsExported", - "IsIdentifier", - "IsKeyword", - "LAND", - "LBRACE", - "LBRACK", - "LEQ", - "LOR", - "LPAREN", - "LSS", - "Lookup", - "LowestPrec", - "MAP", - "MUL", - "MUL_ASSIGN", - "NEQ", - "NOT", - "NewFileSet", - "NoPos", - "OR", - "OR_ASSIGN", - "PACKAGE", - "PERIOD", - "Pos", - "Position", - "QUO", - "QUO_ASSIGN", - "RANGE", - "RBRACE", - "RBRACK", - "REM", - "REM_ASSIGN", - "RETURN", - "RPAREN", - "SELECT", - "SEMICOLON", - "SHL", - "SHL_ASSIGN", - "SHR", - "SHR_ASSIGN", - "STRING", - "STRUCT", - "SUB", - "SUB_ASSIGN", - "SWITCH", - "TILDE", - "TYPE", - "Token", - "UnaryPrec", - "VAR", - "XOR", - "XOR_ASSIGN", - }, - "go/types": { - "ArgumentError", - "Array", - "AssertableTo", - "AssignableTo", - "Basic", - "BasicInfo", - "BasicKind", - "Bool", - "Builtin", - "Byte", - "Chan", - "ChanDir", - "CheckExpr", - "Checker", - "Comparable", - "Complex128", - "Complex64", - "Config", - "Const", - "Context", - "ConvertibleTo", - "DefPredeclaredTestFuncs", - "Default", - "Error", - "Eval", - "ExprString", - "FieldVal", - "Float32", - "Float64", - "Func", - "Id", - "Identical", - "IdenticalIgnoreTags", - "Implements", - "ImportMode", - "Importer", - "ImporterFrom", - "Info", - "Initializer", - "Instance", - "Instantiate", - "Int", - "Int16", - "Int32", - "Int64", - "Int8", - "Interface", - "Invalid", - "IsBoolean", - "IsComplex", - "IsConstType", - "IsFloat", - "IsInteger", - "IsInterface", - "IsNumeric", - "IsOrdered", - "IsString", - "IsUnsigned", - "IsUntyped", - "Label", - "LookupFieldOrMethod", - "Map", - "MethodExpr", - "MethodSet", - "MethodVal", - "MissingMethod", - "Named", - "NewArray", - "NewChan", - "NewChecker", - "NewConst", - "NewContext", - "NewField", - "NewFunc", - "NewInterface", - "NewInterfaceType", - "NewLabel", - "NewMap", - "NewMethodSet", - "NewNamed", - "NewPackage", - "NewParam", - "NewPkgName", - "NewPointer", - "NewScope", - "NewSignature", - "NewSignatureType", - "NewSlice", - "NewStruct", - "NewTerm", - "NewTuple", - "NewTypeName", - "NewTypeParam", - "NewUnion", - "NewVar", - "Nil", - "Object", - "ObjectString", - "Package", - "PkgName", - "Pointer", - "Qualifier", - "RecvOnly", - "RelativeTo", - "Rune", - "Satisfies", - "Scope", - "Selection", - "SelectionKind", - "SelectionString", - "SendOnly", - "SendRecv", - "Signature", - "Sizes", - "SizesFor", - "Slice", - "StdSizes", - "String", - "Struct", - "Term", - "Tuple", - "Typ", - "Type", - "TypeAndValue", - "TypeList", - "TypeName", - "TypeParam", - "TypeParamList", - "TypeString", - "Uint", - "Uint16", - "Uint32", - "Uint64", - "Uint8", - "Uintptr", - "Union", - "Universe", - "Unsafe", - "UnsafePointer", - "UntypedBool", - "UntypedComplex", - "UntypedFloat", - "UntypedInt", - "UntypedNil", - "UntypedRune", - "UntypedString", - "Var", - "WriteExpr", - "WriteSignature", - "WriteType", - }, - "hash": { - "Hash", - "Hash32", - "Hash64", - }, - "hash/adler32": { - "Checksum", - "New", - "Size", - }, - "hash/crc32": { - "Castagnoli", - "Checksum", - "ChecksumIEEE", - "IEEE", - "IEEETable", - "Koopman", - "MakeTable", - "New", - "NewIEEE", - "Size", - "Table", - "Update", - }, - "hash/crc64": { - "Checksum", - "ECMA", - "ISO", - "MakeTable", - "New", - "Size", - "Table", - "Update", - }, - "hash/fnv": { - "New128", - "New128a", - "New32", - "New32a", - "New64", - "New64a", - }, - "hash/maphash": { - "Bytes", - "Hash", - "MakeSeed", - "Seed", - "String", - }, - "html": { - "EscapeString", - "UnescapeString", - }, - "html/template": { - "CSS", - "ErrAmbigContext", - "ErrBadHTML", - "ErrBranchEnd", - "ErrEndContext", - "ErrJSTemplate", - "ErrNoSuchTemplate", - "ErrOutputContext", - "ErrPartialCharset", - "ErrPartialEscape", - "ErrPredefinedEscaper", - "ErrRangeLoopReentry", - "ErrSlashAmbig", - "Error", - "ErrorCode", - "FuncMap", - "HTML", - "HTMLAttr", - "HTMLEscape", - "HTMLEscapeString", - "HTMLEscaper", - "IsTrue", - "JS", - "JSEscape", - "JSEscapeString", - "JSEscaper", - "JSStr", - "Must", - "New", - "OK", - "ParseFS", - "ParseFiles", - "ParseGlob", - "Srcset", - "Template", - "URL", - "URLQueryEscaper", - }, - "image": { - "Alpha", - "Alpha16", - "Black", - "CMYK", - "Config", - "Decode", - "DecodeConfig", - "ErrFormat", - "Gray", - "Gray16", - "Image", - "NRGBA", - "NRGBA64", - "NYCbCrA", - "NewAlpha", - "NewAlpha16", - "NewCMYK", - "NewGray", - "NewGray16", - "NewNRGBA", - "NewNRGBA64", - "NewNYCbCrA", - "NewPaletted", - "NewRGBA", - "NewRGBA64", - "NewUniform", - "NewYCbCr", - "Opaque", - "Paletted", - "PalettedImage", - "Point", - "Pt", - "RGBA", - "RGBA64", - "RGBA64Image", - "Rect", - "Rectangle", - "RegisterFormat", - "Transparent", - "Uniform", - "White", - "YCbCr", - "YCbCrSubsampleRatio", - "YCbCrSubsampleRatio410", - "YCbCrSubsampleRatio411", - "YCbCrSubsampleRatio420", - "YCbCrSubsampleRatio422", - "YCbCrSubsampleRatio440", - "YCbCrSubsampleRatio444", - "ZP", - "ZR", - }, - "image/color": { - "Alpha", - "Alpha16", - "Alpha16Model", - "AlphaModel", - "Black", - "CMYK", - "CMYKModel", - "CMYKToRGB", - "Color", - "Gray", - "Gray16", - "Gray16Model", - "GrayModel", - "Model", - "ModelFunc", - "NRGBA", - "NRGBA64", - "NRGBA64Model", - "NRGBAModel", - "NYCbCrA", - "NYCbCrAModel", - "Opaque", - "Palette", - "RGBA", - "RGBA64", - "RGBA64Model", - "RGBAModel", - "RGBToCMYK", - "RGBToYCbCr", - "Transparent", - "White", - "YCbCr", - "YCbCrModel", - "YCbCrToRGB", - }, - "image/color/palette": { - "Plan9", - "WebSafe", - }, - "image/draw": { - "Draw", - "DrawMask", - "Drawer", - "FloydSteinberg", - "Image", - "Op", - "Over", - "Quantizer", - "RGBA64Image", - "Src", - }, - "image/gif": { - "Decode", - "DecodeAll", - "DecodeConfig", - "DisposalBackground", - "DisposalNone", - "DisposalPrevious", - "Encode", - "EncodeAll", - "GIF", - "Options", - }, - "image/jpeg": { - "Decode", - "DecodeConfig", - "DefaultQuality", - "Encode", - "FormatError", - "Options", - "Reader", - "UnsupportedError", - }, - "image/png": { - "BestCompression", - "BestSpeed", - "CompressionLevel", - "Decode", - "DecodeConfig", - "DefaultCompression", - "Encode", - "Encoder", - "EncoderBuffer", - "EncoderBufferPool", - "FormatError", - "NoCompression", - "UnsupportedError", - }, - "index/suffixarray": { - "Index", - "New", - }, - "io": { - "ByteReader", - "ByteScanner", - "ByteWriter", - "Closer", - "Copy", - "CopyBuffer", - "CopyN", - "Discard", - "EOF", - "ErrClosedPipe", - "ErrNoProgress", - "ErrShortBuffer", - "ErrShortWrite", - "ErrUnexpectedEOF", - "LimitReader", - "LimitedReader", - "MultiReader", - "MultiWriter", - "NewOffsetWriter", - "NewSectionReader", - "NopCloser", - "OffsetWriter", - "Pipe", - "PipeReader", - "PipeWriter", - "ReadAll", - "ReadAtLeast", - "ReadCloser", - "ReadFull", - "ReadSeekCloser", - "ReadSeeker", - "ReadWriteCloser", - "ReadWriteSeeker", - "ReadWriter", - "Reader", - "ReaderAt", - "ReaderFrom", - "RuneReader", - "RuneScanner", - "SectionReader", - "SeekCurrent", - "SeekEnd", - "SeekStart", - "Seeker", - "StringWriter", - "TeeReader", - "WriteCloser", - "WriteSeeker", - "WriteString", - "Writer", - "WriterAt", - "WriterTo", - }, - "io/fs": { - "DirEntry", - "ErrClosed", - "ErrExist", - "ErrInvalid", - "ErrNotExist", - "ErrPermission", - "FS", - "File", - "FileInfo", - "FileInfoToDirEntry", - "FileMode", - "FormatDirEntry", - "FormatFileInfo", - "Glob", - "GlobFS", - "ModeAppend", - "ModeCharDevice", - "ModeDevice", - "ModeDir", - "ModeExclusive", - "ModeIrregular", - "ModeNamedPipe", - "ModePerm", - "ModeSetgid", - "ModeSetuid", - "ModeSocket", - "ModeSticky", - "ModeSymlink", - "ModeTemporary", - "ModeType", - "PathError", - "ReadDir", - "ReadDirFS", - "ReadDirFile", - "ReadFile", - "ReadFileFS", - "SkipAll", - "SkipDir", - "Stat", - "StatFS", - "Sub", - "SubFS", - "ValidPath", - "WalkDir", - "WalkDirFunc", - }, - "io/ioutil": { - "Discard", - "NopCloser", - "ReadAll", - "ReadDir", - "ReadFile", - "TempDir", - "TempFile", - "WriteFile", - }, - "log": { - "Default", - "Fatal", - "Fatalf", - "Fatalln", - "Flags", - "LUTC", - "Ldate", - "Llongfile", - "Lmicroseconds", - "Lmsgprefix", - "Logger", - "Lshortfile", - "LstdFlags", - "Ltime", - "New", - "Output", - "Panic", - "Panicf", - "Panicln", - "Prefix", - "Print", - "Printf", - "Println", - "SetFlags", - "SetOutput", - "SetPrefix", - "Writer", - }, - "log/slog": { - "Any", - "AnyValue", - "Attr", - "Bool", - "BoolValue", - "Debug", - "DebugContext", - "Default", - "Duration", - "DurationValue", - "Error", - "ErrorContext", - "Float64", - "Float64Value", - "Group", - "GroupValue", - "Handler", - "HandlerOptions", - "Info", - "InfoContext", - "Int", - "Int64", - "Int64Value", - "IntValue", - "JSONHandler", - "Kind", - "KindAny", - "KindBool", - "KindDuration", - "KindFloat64", - "KindGroup", - "KindInt64", - "KindLogValuer", - "KindString", - "KindTime", - "KindUint64", - "Level", - "LevelDebug", - "LevelError", - "LevelInfo", - "LevelKey", - "LevelVar", - "LevelWarn", - "Leveler", - "Log", - "LogAttrs", - "LogValuer", - "Logger", - "MessageKey", - "New", - "NewJSONHandler", - "NewLogLogger", - "NewRecord", - "NewTextHandler", - "Record", - "SetDefault", - "Source", - "SourceKey", - "String", - "StringValue", - "TextHandler", - "Time", - "TimeKey", - "TimeValue", - "Uint64", - "Uint64Value", - "Value", - "Warn", - "WarnContext", - "With", - }, - "log/syslog": { - "Dial", - "LOG_ALERT", - "LOG_AUTH", - "LOG_AUTHPRIV", - "LOG_CRIT", - "LOG_CRON", - "LOG_DAEMON", - "LOG_DEBUG", - "LOG_EMERG", - "LOG_ERR", - "LOG_FTP", - "LOG_INFO", - "LOG_KERN", - "LOG_LOCAL0", - "LOG_LOCAL1", - "LOG_LOCAL2", - "LOG_LOCAL3", - "LOG_LOCAL4", - "LOG_LOCAL5", - "LOG_LOCAL6", - "LOG_LOCAL7", - "LOG_LPR", - "LOG_MAIL", - "LOG_NEWS", - "LOG_NOTICE", - "LOG_SYSLOG", - "LOG_USER", - "LOG_UUCP", - "LOG_WARNING", - "New", - "NewLogger", - "Priority", - "Writer", - }, - "maps": { - "Clone", - "Copy", - "DeleteFunc", - "Equal", - "EqualFunc", - }, - "math": { - "Abs", - "Acos", - "Acosh", - "Asin", - "Asinh", - "Atan", - "Atan2", - "Atanh", - "Cbrt", - "Ceil", - "Copysign", - "Cos", - "Cosh", - "Dim", - "E", - "Erf", - "Erfc", - "Erfcinv", - "Erfinv", - "Exp", - "Exp2", - "Expm1", - "FMA", - "Float32bits", - "Float32frombits", - "Float64bits", - "Float64frombits", - "Floor", - "Frexp", - "Gamma", - "Hypot", - "Ilogb", - "Inf", - "IsInf", - "IsNaN", - "J0", - "J1", - "Jn", - "Ldexp", - "Lgamma", - "Ln10", - "Ln2", - "Log", - "Log10", - "Log10E", - "Log1p", - "Log2", - "Log2E", - "Logb", - "Max", - "MaxFloat32", - "MaxFloat64", - "MaxInt", - "MaxInt16", - "MaxInt32", - "MaxInt64", - "MaxInt8", - "MaxUint", - "MaxUint16", - "MaxUint32", - "MaxUint64", - "MaxUint8", - "Min", - "MinInt", - "MinInt16", - "MinInt32", - "MinInt64", - "MinInt8", - "Mod", - "Modf", - "NaN", - "Nextafter", - "Nextafter32", - "Phi", - "Pi", - "Pow", - "Pow10", - "Remainder", - "Round", - "RoundToEven", - "Signbit", - "Sin", - "Sincos", - "Sinh", - "SmallestNonzeroFloat32", - "SmallestNonzeroFloat64", - "Sqrt", - "Sqrt2", - "SqrtE", - "SqrtPhi", - "SqrtPi", - "Tan", - "Tanh", - "Trunc", - "Y0", - "Y1", - "Yn", - }, - "math/big": { - "Above", - "Accuracy", - "AwayFromZero", - "Below", - "ErrNaN", - "Exact", - "Float", - "Int", - "Jacobi", - "MaxBase", - "MaxExp", - "MaxPrec", - "MinExp", - "NewFloat", - "NewInt", - "NewRat", - "ParseFloat", - "Rat", - "RoundingMode", - "ToNearestAway", - "ToNearestEven", - "ToNegativeInf", - "ToPositiveInf", - "ToZero", - "Word", - }, - "math/bits": { - "Add", - "Add32", - "Add64", - "Div", - "Div32", - "Div64", - "LeadingZeros", - "LeadingZeros16", - "LeadingZeros32", - "LeadingZeros64", - "LeadingZeros8", - "Len", - "Len16", - "Len32", - "Len64", - "Len8", - "Mul", - "Mul32", - "Mul64", - "OnesCount", - "OnesCount16", - "OnesCount32", - "OnesCount64", - "OnesCount8", - "Rem", - "Rem32", - "Rem64", - "Reverse", - "Reverse16", - "Reverse32", - "Reverse64", - "Reverse8", - "ReverseBytes", - "ReverseBytes16", - "ReverseBytes32", - "ReverseBytes64", - "RotateLeft", - "RotateLeft16", - "RotateLeft32", - "RotateLeft64", - "RotateLeft8", - "Sub", - "Sub32", - "Sub64", - "TrailingZeros", - "TrailingZeros16", - "TrailingZeros32", - "TrailingZeros64", - "TrailingZeros8", - "UintSize", - }, - "math/cmplx": { - "Abs", - "Acos", - "Acosh", - "Asin", - "Asinh", - "Atan", - "Atanh", - "Conj", - "Cos", - "Cosh", - "Cot", - "Exp", - "Inf", - "IsInf", - "IsNaN", - "Log", - "Log10", - "NaN", - "Phase", - "Polar", - "Pow", - "Rect", - "Sin", - "Sinh", - "Sqrt", - "Tan", - "Tanh", - }, - "math/rand": { - "ExpFloat64", - "Float32", - "Float64", - "Int", - "Int31", - "Int31n", - "Int63", - "Int63n", - "Intn", - "New", - "NewSource", - "NewZipf", - "NormFloat64", - "Perm", - "Rand", - "Read", - "Seed", - "Shuffle", - "Source", - "Source64", - "Uint32", - "Uint64", - "Zipf", - }, - "mime": { - "AddExtensionType", - "BEncoding", - "ErrInvalidMediaParameter", - "ExtensionsByType", - "FormatMediaType", - "ParseMediaType", - "QEncoding", - "TypeByExtension", - "WordDecoder", - "WordEncoder", - }, - "mime/multipart": { - "ErrMessageTooLarge", - "File", - "FileHeader", - "Form", - "NewReader", - "NewWriter", - "Part", - "Reader", - "Writer", - }, - "mime/quotedprintable": { - "NewReader", - "NewWriter", - "Reader", - "Writer", - }, - "net": { - "Addr", - "AddrError", - "Buffers", - "CIDRMask", - "Conn", - "DNSConfigError", - "DNSError", - "DefaultResolver", - "Dial", - "DialIP", - "DialTCP", - "DialTimeout", - "DialUDP", - "DialUnix", - "Dialer", - "ErrClosed", - "ErrWriteToConnected", - "Error", - "FileConn", - "FileListener", - "FilePacketConn", - "FlagBroadcast", - "FlagLoopback", - "FlagMulticast", - "FlagPointToPoint", - "FlagRunning", - "FlagUp", - "Flags", - "HardwareAddr", - "IP", - "IPAddr", - "IPConn", - "IPMask", - "IPNet", - "IPv4", - "IPv4Mask", - "IPv4allrouter", - "IPv4allsys", - "IPv4bcast", - "IPv4len", - "IPv4zero", - "IPv6interfacelocalallnodes", - "IPv6len", - "IPv6linklocalallnodes", - "IPv6linklocalallrouters", - "IPv6loopback", - "IPv6unspecified", - "IPv6zero", - "Interface", - "InterfaceAddrs", - "InterfaceByIndex", - "InterfaceByName", - "Interfaces", - "InvalidAddrError", - "JoinHostPort", - "Listen", - "ListenConfig", - "ListenIP", - "ListenMulticastUDP", - "ListenPacket", - "ListenTCP", - "ListenUDP", - "ListenUnix", - "ListenUnixgram", - "Listener", - "LookupAddr", - "LookupCNAME", - "LookupHost", - "LookupIP", - "LookupMX", - "LookupNS", - "LookupPort", - "LookupSRV", - "LookupTXT", - "MX", - "NS", - "OpError", - "PacketConn", - "ParseCIDR", - "ParseError", - "ParseIP", - "ParseMAC", - "Pipe", - "ResolveIPAddr", - "ResolveTCPAddr", - "ResolveUDPAddr", - "ResolveUnixAddr", - "Resolver", - "SRV", - "SplitHostPort", - "TCPAddr", - "TCPAddrFromAddrPort", - "TCPConn", - "TCPListener", - "UDPAddr", - "UDPAddrFromAddrPort", - "UDPConn", - "UnixAddr", - "UnixConn", - "UnixListener", - "UnknownNetworkError", - }, - "net/http": { - "AllowQuerySemicolons", - "CanonicalHeaderKey", - "Client", - "CloseNotifier", - "ConnState", - "Cookie", - "CookieJar", - "DefaultClient", - "DefaultMaxHeaderBytes", - "DefaultMaxIdleConnsPerHost", - "DefaultServeMux", - "DefaultTransport", - "DetectContentType", - "Dir", - "ErrAbortHandler", - "ErrBodyNotAllowed", - "ErrBodyReadAfterClose", - "ErrContentLength", - "ErrHandlerTimeout", - "ErrHeaderTooLong", - "ErrHijacked", - "ErrLineTooLong", - "ErrMissingBoundary", - "ErrMissingContentLength", - "ErrMissingFile", - "ErrNoCookie", - "ErrNoLocation", - "ErrNotMultipart", - "ErrNotSupported", - "ErrSchemeMismatch", - "ErrServerClosed", - "ErrShortBody", - "ErrSkipAltProtocol", - "ErrUnexpectedTrailer", - "ErrUseLastResponse", - "ErrWriteAfterFlush", - "Error", - "FS", - "File", - "FileServer", - "FileSystem", - "Flusher", - "Get", - "Handle", - "HandleFunc", - "Handler", - "HandlerFunc", - "Head", - "Header", - "Hijacker", - "ListenAndServe", - "ListenAndServeTLS", - "LocalAddrContextKey", - "MaxBytesError", - "MaxBytesHandler", - "MaxBytesReader", - "MethodConnect", - "MethodDelete", - "MethodGet", - "MethodHead", - "MethodOptions", - "MethodPatch", - "MethodPost", - "MethodPut", - "MethodTrace", - "NewFileTransport", - "NewRequest", - "NewRequestWithContext", - "NewResponseController", - "NewServeMux", - "NoBody", - "NotFound", - "NotFoundHandler", - "ParseHTTPVersion", - "ParseTime", - "Post", - "PostForm", - "ProtocolError", - "ProxyFromEnvironment", - "ProxyURL", - "PushOptions", - "Pusher", - "ReadRequest", - "ReadResponse", - "Redirect", - "RedirectHandler", - "Request", - "Response", - "ResponseController", - "ResponseWriter", - "RoundTripper", - "SameSite", - "SameSiteDefaultMode", - "SameSiteLaxMode", - "SameSiteNoneMode", - "SameSiteStrictMode", - "Serve", - "ServeContent", - "ServeFile", - "ServeMux", - "ServeTLS", - "Server", - "ServerContextKey", - "SetCookie", - "StateActive", - "StateClosed", - "StateHijacked", - "StateIdle", - "StateNew", - "StatusAccepted", - "StatusAlreadyReported", - "StatusBadGateway", - "StatusBadRequest", - "StatusConflict", - "StatusContinue", - "StatusCreated", - "StatusEarlyHints", - "StatusExpectationFailed", - "StatusFailedDependency", - "StatusForbidden", - "StatusFound", - "StatusGatewayTimeout", - "StatusGone", - "StatusHTTPVersionNotSupported", - "StatusIMUsed", - "StatusInsufficientStorage", - "StatusInternalServerError", - "StatusLengthRequired", - "StatusLocked", - "StatusLoopDetected", - "StatusMethodNotAllowed", - "StatusMisdirectedRequest", - "StatusMovedPermanently", - "StatusMultiStatus", - "StatusMultipleChoices", - "StatusNetworkAuthenticationRequired", - "StatusNoContent", - "StatusNonAuthoritativeInfo", - "StatusNotAcceptable", - "StatusNotExtended", - "StatusNotFound", - "StatusNotImplemented", - "StatusNotModified", - "StatusOK", - "StatusPartialContent", - "StatusPaymentRequired", - "StatusPermanentRedirect", - "StatusPreconditionFailed", - "StatusPreconditionRequired", - "StatusProcessing", - "StatusProxyAuthRequired", - "StatusRequestEntityTooLarge", - "StatusRequestHeaderFieldsTooLarge", - "StatusRequestTimeout", - "StatusRequestURITooLong", - "StatusRequestedRangeNotSatisfiable", - "StatusResetContent", - "StatusSeeOther", - "StatusServiceUnavailable", - "StatusSwitchingProtocols", - "StatusTeapot", - "StatusTemporaryRedirect", - "StatusText", - "StatusTooEarly", - "StatusTooManyRequests", - "StatusUnauthorized", - "StatusUnavailableForLegalReasons", - "StatusUnprocessableEntity", - "StatusUnsupportedMediaType", - "StatusUpgradeRequired", - "StatusUseProxy", - "StatusVariantAlsoNegotiates", - "StripPrefix", - "TimeFormat", - "TimeoutHandler", - "TrailerPrefix", - "Transport", - }, - "net/http/cgi": { - "Handler", - "Request", - "RequestFromMap", - "Serve", - }, - "net/http/cookiejar": { - "Jar", - "New", - "Options", - "PublicSuffixList", - }, - "net/http/fcgi": { - "ErrConnClosed", - "ErrRequestAborted", - "ProcessEnv", - "Serve", - }, - "net/http/httptest": { - "DefaultRemoteAddr", - "NewRecorder", - "NewRequest", - "NewServer", - "NewTLSServer", - "NewUnstartedServer", - "ResponseRecorder", - "Server", - }, - "net/http/httptrace": { - "ClientTrace", - "ContextClientTrace", - "DNSDoneInfo", - "DNSStartInfo", - "GotConnInfo", - "WithClientTrace", - "WroteRequestInfo", - }, - "net/http/httputil": { - "BufferPool", - "ClientConn", - "DumpRequest", - "DumpRequestOut", - "DumpResponse", - "ErrClosed", - "ErrLineTooLong", - "ErrPersistEOF", - "ErrPipeline", - "NewChunkedReader", - "NewChunkedWriter", - "NewClientConn", - "NewProxyClientConn", - "NewServerConn", - "NewSingleHostReverseProxy", - "ProxyRequest", - "ReverseProxy", - "ServerConn", - }, - "net/http/pprof": { - "Cmdline", - "Handler", - "Index", - "Profile", - "Symbol", - "Trace", - }, - "net/mail": { - "Address", - "AddressParser", - "ErrHeaderNotPresent", - "Header", - "Message", - "ParseAddress", - "ParseAddressList", - "ParseDate", - "ReadMessage", - }, - "net/netip": { - "Addr", - "AddrFrom16", - "AddrFrom4", - "AddrFromSlice", - "AddrPort", - "AddrPortFrom", - "IPv4Unspecified", - "IPv6LinkLocalAllNodes", - "IPv6LinkLocalAllRouters", - "IPv6Loopback", - "IPv6Unspecified", - "MustParseAddr", - "MustParseAddrPort", - "MustParsePrefix", - "ParseAddr", - "ParseAddrPort", - "ParsePrefix", - "Prefix", - "PrefixFrom", - }, - "net/rpc": { - "Accept", - "Call", - "Client", - "ClientCodec", - "DefaultDebugPath", - "DefaultRPCPath", - "DefaultServer", - "Dial", - "DialHTTP", - "DialHTTPPath", - "ErrShutdown", - "HandleHTTP", - "NewClient", - "NewClientWithCodec", - "NewServer", - "Register", - "RegisterName", - "Request", - "Response", - "ServeCodec", - "ServeConn", - "ServeRequest", - "Server", - "ServerCodec", - "ServerError", - }, - "net/rpc/jsonrpc": { - "Dial", - "NewClient", - "NewClientCodec", - "NewServerCodec", - "ServeConn", - }, - "net/smtp": { - "Auth", - "CRAMMD5Auth", - "Client", - "Dial", - "NewClient", - "PlainAuth", - "SendMail", - "ServerInfo", - }, - "net/textproto": { - "CanonicalMIMEHeaderKey", - "Conn", - "Dial", - "Error", - "MIMEHeader", - "NewConn", - "NewReader", - "NewWriter", - "Pipeline", - "ProtocolError", - "Reader", - "TrimBytes", - "TrimString", - "Writer", - }, - "net/url": { - "Error", - "EscapeError", - "InvalidHostError", - "JoinPath", - "Parse", - "ParseQuery", - "ParseRequestURI", - "PathEscape", - "PathUnescape", - "QueryEscape", - "QueryUnescape", - "URL", - "User", - "UserPassword", - "Userinfo", - "Values", - }, - "os": { - "Args", - "Chdir", - "Chmod", - "Chown", - "Chtimes", - "Clearenv", - "Create", - "CreateTemp", - "DevNull", - "DirEntry", - "DirFS", - "Environ", - "ErrClosed", - "ErrDeadlineExceeded", - "ErrExist", - "ErrInvalid", - "ErrNoDeadline", - "ErrNotExist", - "ErrPermission", - "ErrProcessDone", - "Executable", - "Exit", - "Expand", - "ExpandEnv", - "File", - "FileInfo", - "FileMode", - "FindProcess", - "Getegid", - "Getenv", - "Geteuid", - "Getgid", - "Getgroups", - "Getpagesize", - "Getpid", - "Getppid", - "Getuid", - "Getwd", - "Hostname", - "Interrupt", - "IsExist", - "IsNotExist", - "IsPathSeparator", - "IsPermission", - "IsTimeout", - "Kill", - "Lchown", - "Link", - "LinkError", - "LookupEnv", - "Lstat", - "Mkdir", - "MkdirAll", - "MkdirTemp", - "ModeAppend", - "ModeCharDevice", - "ModeDevice", - "ModeDir", - "ModeExclusive", - "ModeIrregular", - "ModeNamedPipe", - "ModePerm", - "ModeSetgid", - "ModeSetuid", - "ModeSocket", - "ModeSticky", - "ModeSymlink", - "ModeTemporary", - "ModeType", - "NewFile", - "NewSyscallError", - "O_APPEND", - "O_CREATE", - "O_EXCL", - "O_RDONLY", - "O_RDWR", - "O_SYNC", - "O_TRUNC", - "O_WRONLY", - "Open", - "OpenFile", - "PathError", - "PathListSeparator", - "PathSeparator", - "Pipe", - "ProcAttr", - "Process", - "ProcessState", - "ReadDir", - "ReadFile", - "Readlink", - "Remove", - "RemoveAll", - "Rename", - "SEEK_CUR", - "SEEK_END", - "SEEK_SET", - "SameFile", - "Setenv", - "Signal", - "StartProcess", - "Stat", - "Stderr", - "Stdin", - "Stdout", - "Symlink", - "SyscallError", - "TempDir", - "Truncate", - "Unsetenv", - "UserCacheDir", - "UserConfigDir", - "UserHomeDir", - "WriteFile", - }, - "os/exec": { - "Cmd", - "Command", - "CommandContext", - "ErrDot", - "ErrNotFound", - "ErrWaitDelay", - "Error", - "ExitError", - "LookPath", - }, - "os/signal": { - "Ignore", - "Ignored", - "Notify", - "NotifyContext", - "Reset", - "Stop", - }, - "os/user": { - "Current", - "Group", - "Lookup", - "LookupGroup", - "LookupGroupId", - "LookupId", - "UnknownGroupError", - "UnknownGroupIdError", - "UnknownUserError", - "UnknownUserIdError", - "User", - }, - "path": { - "Base", - "Clean", - "Dir", - "ErrBadPattern", - "Ext", - "IsAbs", - "Join", - "Match", - "Split", - }, - "path/filepath": { - "Abs", - "Base", - "Clean", - "Dir", - "ErrBadPattern", - "EvalSymlinks", - "Ext", - "FromSlash", - "Glob", - "HasPrefix", - "IsAbs", - "IsLocal", - "Join", - "ListSeparator", - "Match", - "Rel", - "Separator", - "SkipAll", - "SkipDir", - "Split", - "SplitList", - "ToSlash", - "VolumeName", - "Walk", - "WalkDir", - "WalkFunc", - }, - "plugin": { - "Open", - "Plugin", - "Symbol", - }, - "reflect": { - "Append", - "AppendSlice", - "Array", - "ArrayOf", - "Bool", - "BothDir", - "Chan", - "ChanDir", - "ChanOf", - "Complex128", - "Complex64", - "Copy", - "DeepEqual", - "Float32", - "Float64", - "Func", - "FuncOf", - "Indirect", - "Int", - "Int16", - "Int32", - "Int64", - "Int8", - "Interface", - "Invalid", - "Kind", - "MakeChan", - "MakeFunc", - "MakeMap", - "MakeMapWithSize", - "MakeSlice", - "Map", - "MapIter", - "MapOf", - "Method", - "New", - "NewAt", - "Pointer", - "PointerTo", - "Ptr", - "PtrTo", - "RecvDir", - "Select", - "SelectCase", - "SelectDefault", - "SelectDir", - "SelectRecv", - "SelectSend", - "SendDir", - "Slice", - "SliceHeader", - "SliceOf", - "String", - "StringHeader", - "Struct", - "StructField", - "StructOf", - "StructTag", - "Swapper", - "Type", - "TypeOf", - "Uint", - "Uint16", - "Uint32", - "Uint64", - "Uint8", - "Uintptr", - "UnsafePointer", - "Value", - "ValueError", - "ValueOf", - "VisibleFields", - "Zero", - }, - "regexp": { - "Compile", - "CompilePOSIX", - "Match", - "MatchReader", - "MatchString", - "MustCompile", - "MustCompilePOSIX", - "QuoteMeta", - "Regexp", - }, - "regexp/syntax": { - "ClassNL", - "Compile", - "DotNL", - "EmptyBeginLine", - "EmptyBeginText", - "EmptyEndLine", - "EmptyEndText", - "EmptyNoWordBoundary", - "EmptyOp", - "EmptyOpContext", - "EmptyWordBoundary", - "ErrInternalError", - "ErrInvalidCharClass", - "ErrInvalidCharRange", - "ErrInvalidEscape", - "ErrInvalidNamedCapture", - "ErrInvalidPerlOp", - "ErrInvalidRepeatOp", - "ErrInvalidRepeatSize", - "ErrInvalidUTF8", - "ErrLarge", - "ErrMissingBracket", - "ErrMissingParen", - "ErrMissingRepeatArgument", - "ErrNestingDepth", - "ErrTrailingBackslash", - "ErrUnexpectedParen", - "Error", - "ErrorCode", - "Flags", - "FoldCase", - "Inst", - "InstAlt", - "InstAltMatch", - "InstCapture", - "InstEmptyWidth", - "InstFail", - "InstMatch", - "InstNop", - "InstOp", - "InstRune", - "InstRune1", - "InstRuneAny", - "InstRuneAnyNotNL", - "IsWordChar", - "Literal", - "MatchNL", - "NonGreedy", - "OneLine", - "Op", - "OpAlternate", - "OpAnyChar", - "OpAnyCharNotNL", - "OpBeginLine", - "OpBeginText", - "OpCapture", - "OpCharClass", - "OpConcat", - "OpEmptyMatch", - "OpEndLine", - "OpEndText", - "OpLiteral", - "OpNoMatch", - "OpNoWordBoundary", - "OpPlus", - "OpQuest", - "OpRepeat", - "OpStar", - "OpWordBoundary", - "POSIX", - "Parse", - "Perl", - "PerlX", - "Prog", - "Regexp", - "Simple", - "UnicodeGroups", - "WasDollar", - }, - "runtime": { - "BlockProfile", - "BlockProfileRecord", - "Breakpoint", - "CPUProfile", - "Caller", - "Callers", - "CallersFrames", - "Compiler", - "Error", - "Frame", - "Frames", - "Func", - "FuncForPC", - "GC", - "GOARCH", - "GOMAXPROCS", - "GOOS", - "GOROOT", - "Goexit", - "GoroutineProfile", - "Gosched", - "KeepAlive", - "LockOSThread", - "MemProfile", - "MemProfileRate", - "MemProfileRecord", - "MemStats", - "MutexProfile", - "NumCPU", - "NumCgoCall", - "NumGoroutine", - "PanicNilError", - "Pinner", - "ReadMemStats", - "ReadTrace", - "SetBlockProfileRate", - "SetCPUProfileRate", - "SetCgoTraceback", - "SetFinalizer", - "SetMutexProfileFraction", - "Stack", - "StackRecord", - "StartTrace", - "StopTrace", - "ThreadCreateProfile", - "TypeAssertionError", - "UnlockOSThread", - "Version", - }, - "runtime/cgo": { - "Handle", - "Incomplete", - "NewHandle", - }, - "runtime/coverage": { - "ClearCounters", - "WriteCounters", - "WriteCountersDir", - "WriteMeta", - "WriteMetaDir", - }, - "runtime/debug": { - "BuildInfo", - "BuildSetting", - "FreeOSMemory", - "GCStats", - "Module", - "ParseBuildInfo", - "PrintStack", - "ReadBuildInfo", - "ReadGCStats", - "SetGCPercent", - "SetMaxStack", - "SetMaxThreads", - "SetMemoryLimit", - "SetPanicOnFault", - "SetTraceback", - "Stack", - "WriteHeapDump", - }, - "runtime/metrics": { - "All", - "Description", - "Float64Histogram", - "KindBad", - "KindFloat64", - "KindFloat64Histogram", - "KindUint64", - "Read", - "Sample", - "Value", - "ValueKind", - }, - "runtime/pprof": { - "Do", - "ForLabels", - "Label", - "LabelSet", - "Labels", - "Lookup", - "NewProfile", - "Profile", - "Profiles", - "SetGoroutineLabels", - "StartCPUProfile", - "StopCPUProfile", - "WithLabels", - "WriteHeapProfile", - }, - "runtime/trace": { - "IsEnabled", - "Log", - "Logf", - "NewTask", - "Region", - "Start", - "StartRegion", - "Stop", - "Task", - "WithRegion", - }, - "slices": { - "BinarySearch", - "BinarySearchFunc", - "Clip", - "Clone", - "Compact", - "CompactFunc", - "Compare", - "CompareFunc", - "Contains", - "ContainsFunc", - "Delete", - "DeleteFunc", - "Equal", - "EqualFunc", - "Grow", - "Index", - "IndexFunc", - "Insert", - "IsSorted", - "IsSortedFunc", - "Max", - "MaxFunc", - "Min", - "MinFunc", - "Replace", - "Reverse", - "Sort", - "SortFunc", - "SortStableFunc", - }, - "sort": { - "Find", - "Float64Slice", - "Float64s", - "Float64sAreSorted", - "IntSlice", - "Interface", - "Ints", - "IntsAreSorted", - "IsSorted", - "Reverse", - "Search", - "SearchFloat64s", - "SearchInts", - "SearchStrings", - "Slice", - "SliceIsSorted", - "SliceStable", - "Sort", - "Stable", - "StringSlice", - "Strings", - "StringsAreSorted", - }, - "strconv": { - "AppendBool", - "AppendFloat", - "AppendInt", - "AppendQuote", - "AppendQuoteRune", - "AppendQuoteRuneToASCII", - "AppendQuoteRuneToGraphic", - "AppendQuoteToASCII", - "AppendQuoteToGraphic", - "AppendUint", - "Atoi", - "CanBackquote", - "ErrRange", - "ErrSyntax", - "FormatBool", - "FormatComplex", - "FormatFloat", - "FormatInt", - "FormatUint", - "IntSize", - "IsGraphic", - "IsPrint", - "Itoa", - "NumError", - "ParseBool", - "ParseComplex", - "ParseFloat", - "ParseInt", - "ParseUint", - "Quote", - "QuoteRune", - "QuoteRuneToASCII", - "QuoteRuneToGraphic", - "QuoteToASCII", - "QuoteToGraphic", - "QuotedPrefix", - "Unquote", - "UnquoteChar", - }, - "strings": { - "Builder", - "Clone", - "Compare", - "Contains", - "ContainsAny", - "ContainsFunc", - "ContainsRune", - "Count", - "Cut", - "CutPrefix", - "CutSuffix", - "EqualFold", - "Fields", - "FieldsFunc", - "HasPrefix", - "HasSuffix", - "Index", - "IndexAny", - "IndexByte", - "IndexFunc", - "IndexRune", - "Join", - "LastIndex", - "LastIndexAny", - "LastIndexByte", - "LastIndexFunc", - "Map", - "NewReader", - "NewReplacer", - "Reader", - "Repeat", - "Replace", - "ReplaceAll", - "Replacer", - "Split", - "SplitAfter", - "SplitAfterN", - "SplitN", - "Title", - "ToLower", - "ToLowerSpecial", - "ToTitle", - "ToTitleSpecial", - "ToUpper", - "ToUpperSpecial", - "ToValidUTF8", - "Trim", - "TrimFunc", - "TrimLeft", - "TrimLeftFunc", - "TrimPrefix", - "TrimRight", - "TrimRightFunc", - "TrimSpace", - "TrimSuffix", - }, - "sync": { - "Cond", - "Locker", - "Map", - "Mutex", - "NewCond", - "Once", - "OnceFunc", - "OnceValue", - "OnceValues", - "Pool", - "RWMutex", - "WaitGroup", - }, - "sync/atomic": { - "AddInt32", - "AddInt64", - "AddUint32", - "AddUint64", - "AddUintptr", - "Bool", - "CompareAndSwapInt32", - "CompareAndSwapInt64", - "CompareAndSwapPointer", - "CompareAndSwapUint32", - "CompareAndSwapUint64", - "CompareAndSwapUintptr", - "Int32", - "Int64", - "LoadInt32", - "LoadInt64", - "LoadPointer", - "LoadUint32", - "LoadUint64", - "LoadUintptr", - "Pointer", - "StoreInt32", - "StoreInt64", - "StorePointer", - "StoreUint32", - "StoreUint64", - "StoreUintptr", - "SwapInt32", - "SwapInt64", - "SwapPointer", - "SwapUint32", - "SwapUint64", - "SwapUintptr", - "Uint32", - "Uint64", - "Uintptr", - "Value", - }, - "syscall": { - "AF_ALG", - "AF_APPLETALK", - "AF_ARP", - "AF_ASH", - "AF_ATM", - "AF_ATMPVC", - "AF_ATMSVC", - "AF_AX25", - "AF_BLUETOOTH", - "AF_BRIDGE", - "AF_CAIF", - "AF_CAN", - "AF_CCITT", - "AF_CHAOS", - "AF_CNT", - "AF_COIP", - "AF_DATAKIT", - "AF_DECnet", - "AF_DLI", - "AF_E164", - "AF_ECMA", - "AF_ECONET", - "AF_ENCAP", - "AF_FILE", - "AF_HYLINK", - "AF_IEEE80211", - "AF_IEEE802154", - "AF_IMPLINK", - "AF_INET", - "AF_INET6", - "AF_INET6_SDP", - "AF_INET_SDP", - "AF_IPX", - "AF_IRDA", - "AF_ISDN", - "AF_ISO", - "AF_IUCV", - "AF_KEY", - "AF_LAT", - "AF_LINK", - "AF_LLC", - "AF_LOCAL", - "AF_MAX", - "AF_MPLS", - "AF_NATM", - "AF_NDRV", - "AF_NETBEUI", - "AF_NETBIOS", - "AF_NETGRAPH", - "AF_NETLINK", - "AF_NETROM", - "AF_NS", - "AF_OROUTE", - "AF_OSI", - "AF_PACKET", - "AF_PHONET", - "AF_PPP", - "AF_PPPOX", - "AF_PUP", - "AF_RDS", - "AF_RESERVED_36", - "AF_ROSE", - "AF_ROUTE", - "AF_RXRPC", - "AF_SCLUSTER", - "AF_SECURITY", - "AF_SIP", - "AF_SLOW", - "AF_SNA", - "AF_SYSTEM", - "AF_TIPC", - "AF_UNIX", - "AF_UNSPEC", - "AF_UTUN", - "AF_VENDOR00", - "AF_VENDOR01", - "AF_VENDOR02", - "AF_VENDOR03", - "AF_VENDOR04", - "AF_VENDOR05", - "AF_VENDOR06", - "AF_VENDOR07", - "AF_VENDOR08", - "AF_VENDOR09", - "AF_VENDOR10", - "AF_VENDOR11", - "AF_VENDOR12", - "AF_VENDOR13", - "AF_VENDOR14", - "AF_VENDOR15", - "AF_VENDOR16", - "AF_VENDOR17", - "AF_VENDOR18", - "AF_VENDOR19", - "AF_VENDOR20", - "AF_VENDOR21", - "AF_VENDOR22", - "AF_VENDOR23", - "AF_VENDOR24", - "AF_VENDOR25", - "AF_VENDOR26", - "AF_VENDOR27", - "AF_VENDOR28", - "AF_VENDOR29", - "AF_VENDOR30", - "AF_VENDOR31", - "AF_VENDOR32", - "AF_VENDOR33", - "AF_VENDOR34", - "AF_VENDOR35", - "AF_VENDOR36", - "AF_VENDOR37", - "AF_VENDOR38", - "AF_VENDOR39", - "AF_VENDOR40", - "AF_VENDOR41", - "AF_VENDOR42", - "AF_VENDOR43", - "AF_VENDOR44", - "AF_VENDOR45", - "AF_VENDOR46", - "AF_VENDOR47", - "AF_WANPIPE", - "AF_X25", - "AI_CANONNAME", - "AI_NUMERICHOST", - "AI_PASSIVE", - "APPLICATION_ERROR", - "ARPHRD_ADAPT", - "ARPHRD_APPLETLK", - "ARPHRD_ARCNET", - "ARPHRD_ASH", - "ARPHRD_ATM", - "ARPHRD_AX25", - "ARPHRD_BIF", - "ARPHRD_CHAOS", - "ARPHRD_CISCO", - "ARPHRD_CSLIP", - "ARPHRD_CSLIP6", - "ARPHRD_DDCMP", - "ARPHRD_DLCI", - "ARPHRD_ECONET", - "ARPHRD_EETHER", - "ARPHRD_ETHER", - "ARPHRD_EUI64", - "ARPHRD_FCAL", - "ARPHRD_FCFABRIC", - "ARPHRD_FCPL", - "ARPHRD_FCPP", - "ARPHRD_FDDI", - "ARPHRD_FRAD", - "ARPHRD_FRELAY", - "ARPHRD_HDLC", - "ARPHRD_HIPPI", - "ARPHRD_HWX25", - "ARPHRD_IEEE1394", - "ARPHRD_IEEE802", - "ARPHRD_IEEE80211", - "ARPHRD_IEEE80211_PRISM", - "ARPHRD_IEEE80211_RADIOTAP", - "ARPHRD_IEEE802154", - "ARPHRD_IEEE802154_PHY", - "ARPHRD_IEEE802_TR", - "ARPHRD_INFINIBAND", - "ARPHRD_IPDDP", - "ARPHRD_IPGRE", - "ARPHRD_IRDA", - "ARPHRD_LAPB", - "ARPHRD_LOCALTLK", - "ARPHRD_LOOPBACK", - "ARPHRD_METRICOM", - "ARPHRD_NETROM", - "ARPHRD_NONE", - "ARPHRD_PIMREG", - "ARPHRD_PPP", - "ARPHRD_PRONET", - "ARPHRD_RAWHDLC", - "ARPHRD_ROSE", - "ARPHRD_RSRVD", - "ARPHRD_SIT", - "ARPHRD_SKIP", - "ARPHRD_SLIP", - "ARPHRD_SLIP6", - "ARPHRD_STRIP", - "ARPHRD_TUNNEL", - "ARPHRD_TUNNEL6", - "ARPHRD_VOID", - "ARPHRD_X25", - "AUTHTYPE_CLIENT", - "AUTHTYPE_SERVER", - "Accept", - "Accept4", - "AcceptEx", - "Access", - "Acct", - "AddrinfoW", - "Adjtime", - "Adjtimex", - "AllThreadsSyscall", - "AllThreadsSyscall6", - "AttachLsf", - "B0", - "B1000000", - "B110", - "B115200", - "B1152000", - "B1200", - "B134", - "B14400", - "B150", - "B1500000", - "B1800", - "B19200", - "B200", - "B2000000", - "B230400", - "B2400", - "B2500000", - "B28800", - "B300", - "B3000000", - "B3500000", - "B38400", - "B4000000", - "B460800", - "B4800", - "B50", - "B500000", - "B57600", - "B576000", - "B600", - "B7200", - "B75", - "B76800", - "B921600", - "B9600", - "BASE_PROTOCOL", - "BIOCFEEDBACK", - "BIOCFLUSH", - "BIOCGBLEN", - "BIOCGDIRECTION", - "BIOCGDIRFILT", - "BIOCGDLT", - "BIOCGDLTLIST", - "BIOCGETBUFMODE", - "BIOCGETIF", - "BIOCGETZMAX", - "BIOCGFEEDBACK", - "BIOCGFILDROP", - "BIOCGHDRCMPLT", - "BIOCGRSIG", - "BIOCGRTIMEOUT", - "BIOCGSEESENT", - "BIOCGSTATS", - "BIOCGSTATSOLD", - "BIOCGTSTAMP", - "BIOCIMMEDIATE", - "BIOCLOCK", - "BIOCPROMISC", - "BIOCROTZBUF", - "BIOCSBLEN", - "BIOCSDIRECTION", - "BIOCSDIRFILT", - "BIOCSDLT", - "BIOCSETBUFMODE", - "BIOCSETF", - "BIOCSETFNR", - "BIOCSETIF", - "BIOCSETWF", - "BIOCSETZBUF", - "BIOCSFEEDBACK", - "BIOCSFILDROP", - "BIOCSHDRCMPLT", - "BIOCSRSIG", - "BIOCSRTIMEOUT", - "BIOCSSEESENT", - "BIOCSTCPF", - "BIOCSTSTAMP", - "BIOCSUDPF", - "BIOCVERSION", - "BPF_A", - "BPF_ABS", - "BPF_ADD", - "BPF_ALIGNMENT", - "BPF_ALIGNMENT32", - "BPF_ALU", - "BPF_AND", - "BPF_B", - "BPF_BUFMODE_BUFFER", - "BPF_BUFMODE_ZBUF", - "BPF_DFLTBUFSIZE", - "BPF_DIRECTION_IN", - "BPF_DIRECTION_OUT", - "BPF_DIV", - "BPF_H", - "BPF_IMM", - "BPF_IND", - "BPF_JA", - "BPF_JEQ", - "BPF_JGE", - "BPF_JGT", - "BPF_JMP", - "BPF_JSET", - "BPF_K", - "BPF_LD", - "BPF_LDX", - "BPF_LEN", - "BPF_LSH", - "BPF_MAJOR_VERSION", - "BPF_MAXBUFSIZE", - "BPF_MAXINSNS", - "BPF_MEM", - "BPF_MEMWORDS", - "BPF_MINBUFSIZE", - "BPF_MINOR_VERSION", - "BPF_MISC", - "BPF_MSH", - "BPF_MUL", - "BPF_NEG", - "BPF_OR", - "BPF_RELEASE", - "BPF_RET", - "BPF_RSH", - "BPF_ST", - "BPF_STX", - "BPF_SUB", - "BPF_TAX", - "BPF_TXA", - "BPF_T_BINTIME", - "BPF_T_BINTIME_FAST", - "BPF_T_BINTIME_MONOTONIC", - "BPF_T_BINTIME_MONOTONIC_FAST", - "BPF_T_FAST", - "BPF_T_FLAG_MASK", - "BPF_T_FORMAT_MASK", - "BPF_T_MICROTIME", - "BPF_T_MICROTIME_FAST", - "BPF_T_MICROTIME_MONOTONIC", - "BPF_T_MICROTIME_MONOTONIC_FAST", - "BPF_T_MONOTONIC", - "BPF_T_MONOTONIC_FAST", - "BPF_T_NANOTIME", - "BPF_T_NANOTIME_FAST", - "BPF_T_NANOTIME_MONOTONIC", - "BPF_T_NANOTIME_MONOTONIC_FAST", - "BPF_T_NONE", - "BPF_T_NORMAL", - "BPF_W", - "BPF_X", - "BRKINT", - "Bind", - "BindToDevice", - "BpfBuflen", - "BpfDatalink", - "BpfHdr", - "BpfHeadercmpl", - "BpfInsn", - "BpfInterface", - "BpfJump", - "BpfProgram", - "BpfStat", - "BpfStats", - "BpfStmt", - "BpfTimeout", - "BpfTimeval", - "BpfVersion", - "BpfZbuf", - "BpfZbufHeader", - "ByHandleFileInformation", - "BytePtrFromString", - "ByteSliceFromString", - "CCR0_FLUSH", - "CERT_CHAIN_POLICY_AUTHENTICODE", - "CERT_CHAIN_POLICY_AUTHENTICODE_TS", - "CERT_CHAIN_POLICY_BASE", - "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", - "CERT_CHAIN_POLICY_EV", - "CERT_CHAIN_POLICY_MICROSOFT_ROOT", - "CERT_CHAIN_POLICY_NT_AUTH", - "CERT_CHAIN_POLICY_SSL", - "CERT_E_CN_NO_MATCH", - "CERT_E_EXPIRED", - "CERT_E_PURPOSE", - "CERT_E_ROLE", - "CERT_E_UNTRUSTEDROOT", - "CERT_STORE_ADD_ALWAYS", - "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", - "CERT_STORE_PROV_MEMORY", - "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", - "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", - "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", - "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", - "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", - "CERT_TRUST_INVALID_BASIC_CONSTRAINTS", - "CERT_TRUST_INVALID_EXTENSION", - "CERT_TRUST_INVALID_NAME_CONSTRAINTS", - "CERT_TRUST_INVALID_POLICY_CONSTRAINTS", - "CERT_TRUST_IS_CYCLIC", - "CERT_TRUST_IS_EXPLICIT_DISTRUST", - "CERT_TRUST_IS_NOT_SIGNATURE_VALID", - "CERT_TRUST_IS_NOT_TIME_VALID", - "CERT_TRUST_IS_NOT_VALID_FOR_USAGE", - "CERT_TRUST_IS_OFFLINE_REVOCATION", - "CERT_TRUST_IS_REVOKED", - "CERT_TRUST_IS_UNTRUSTED_ROOT", - "CERT_TRUST_NO_ERROR", - "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", - "CERT_TRUST_REVOCATION_STATUS_UNKNOWN", - "CFLUSH", - "CLOCAL", - "CLONE_CHILD_CLEARTID", - "CLONE_CHILD_SETTID", - "CLONE_CLEAR_SIGHAND", - "CLONE_CSIGNAL", - "CLONE_DETACHED", - "CLONE_FILES", - "CLONE_FS", - "CLONE_INTO_CGROUP", - "CLONE_IO", - "CLONE_NEWCGROUP", - "CLONE_NEWIPC", - "CLONE_NEWNET", - "CLONE_NEWNS", - "CLONE_NEWPID", - "CLONE_NEWTIME", - "CLONE_NEWUSER", - "CLONE_NEWUTS", - "CLONE_PARENT", - "CLONE_PARENT_SETTID", - "CLONE_PID", - "CLONE_PIDFD", - "CLONE_PTRACE", - "CLONE_SETTLS", - "CLONE_SIGHAND", - "CLONE_SYSVSEM", - "CLONE_THREAD", - "CLONE_UNTRACED", - "CLONE_VFORK", - "CLONE_VM", - "CPUID_CFLUSH", - "CREAD", - "CREATE_ALWAYS", - "CREATE_NEW", - "CREATE_NEW_PROCESS_GROUP", - "CREATE_UNICODE_ENVIRONMENT", - "CRYPT_DEFAULT_CONTAINER_OPTIONAL", - "CRYPT_DELETEKEYSET", - "CRYPT_MACHINE_KEYSET", - "CRYPT_NEWKEYSET", - "CRYPT_SILENT", - "CRYPT_VERIFYCONTEXT", - "CS5", - "CS6", - "CS7", - "CS8", - "CSIZE", - "CSTART", - "CSTATUS", - "CSTOP", - "CSTOPB", - "CSUSP", - "CTL_MAXNAME", - "CTL_NET", - "CTL_QUERY", - "CTRL_BREAK_EVENT", - "CTRL_CLOSE_EVENT", - "CTRL_C_EVENT", - "CTRL_LOGOFF_EVENT", - "CTRL_SHUTDOWN_EVENT", - "CancelIo", - "CancelIoEx", - "CertAddCertificateContextToStore", - "CertChainContext", - "CertChainElement", - "CertChainPara", - "CertChainPolicyPara", - "CertChainPolicyStatus", - "CertCloseStore", - "CertContext", - "CertCreateCertificateContext", - "CertEnhKeyUsage", - "CertEnumCertificatesInStore", - "CertFreeCertificateChain", - "CertFreeCertificateContext", - "CertGetCertificateChain", - "CertInfo", - "CertOpenStore", - "CertOpenSystemStore", - "CertRevocationCrlInfo", - "CertRevocationInfo", - "CertSimpleChain", - "CertTrustListInfo", - "CertTrustStatus", - "CertUsageMatch", - "CertVerifyCertificateChainPolicy", - "Chdir", - "CheckBpfVersion", - "Chflags", - "Chmod", - "Chown", - "Chroot", - "Clearenv", - "Close", - "CloseHandle", - "CloseOnExec", - "Closesocket", - "CmsgLen", - "CmsgSpace", - "Cmsghdr", - "CommandLineToArgv", - "ComputerName", - "Conn", - "Connect", - "ConnectEx", - "ConvertSidToStringSid", - "ConvertStringSidToSid", - "CopySid", - "Creat", - "CreateDirectory", - "CreateFile", - "CreateFileMapping", - "CreateHardLink", - "CreateIoCompletionPort", - "CreatePipe", - "CreateProcess", - "CreateProcessAsUser", - "CreateSymbolicLink", - "CreateToolhelp32Snapshot", - "Credential", - "CryptAcquireContext", - "CryptGenRandom", - "CryptReleaseContext", - "DIOCBSFLUSH", - "DIOCOSFPFLUSH", - "DLL", - "DLLError", - "DLT_A429", - "DLT_A653_ICM", - "DLT_AIRONET_HEADER", - "DLT_AOS", - "DLT_APPLE_IP_OVER_IEEE1394", - "DLT_ARCNET", - "DLT_ARCNET_LINUX", - "DLT_ATM_CLIP", - "DLT_ATM_RFC1483", - "DLT_AURORA", - "DLT_AX25", - "DLT_AX25_KISS", - "DLT_BACNET_MS_TP", - "DLT_BLUETOOTH_HCI_H4", - "DLT_BLUETOOTH_HCI_H4_WITH_PHDR", - "DLT_CAN20B", - "DLT_CAN_SOCKETCAN", - "DLT_CHAOS", - "DLT_CHDLC", - "DLT_CISCO_IOS", - "DLT_C_HDLC", - "DLT_C_HDLC_WITH_DIR", - "DLT_DBUS", - "DLT_DECT", - "DLT_DOCSIS", - "DLT_DVB_CI", - "DLT_ECONET", - "DLT_EN10MB", - "DLT_EN3MB", - "DLT_ENC", - "DLT_ERF", - "DLT_ERF_ETH", - "DLT_ERF_POS", - "DLT_FC_2", - "DLT_FC_2_WITH_FRAME_DELIMS", - "DLT_FDDI", - "DLT_FLEXRAY", - "DLT_FRELAY", - "DLT_FRELAY_WITH_DIR", - "DLT_GCOM_SERIAL", - "DLT_GCOM_T1E1", - "DLT_GPF_F", - "DLT_GPF_T", - "DLT_GPRS_LLC", - "DLT_GSMTAP_ABIS", - "DLT_GSMTAP_UM", - "DLT_HDLC", - "DLT_HHDLC", - "DLT_HIPPI", - "DLT_IBM_SN", - "DLT_IBM_SP", - "DLT_IEEE802", - "DLT_IEEE802_11", - "DLT_IEEE802_11_RADIO", - "DLT_IEEE802_11_RADIO_AVS", - "DLT_IEEE802_15_4", - "DLT_IEEE802_15_4_LINUX", - "DLT_IEEE802_15_4_NOFCS", - "DLT_IEEE802_15_4_NONASK_PHY", - "DLT_IEEE802_16_MAC_CPS", - "DLT_IEEE802_16_MAC_CPS_RADIO", - "DLT_IPFILTER", - "DLT_IPMB", - "DLT_IPMB_LINUX", - "DLT_IPNET", - "DLT_IPOIB", - "DLT_IPV4", - "DLT_IPV6", - "DLT_IP_OVER_FC", - "DLT_JUNIPER_ATM1", - "DLT_JUNIPER_ATM2", - "DLT_JUNIPER_ATM_CEMIC", - "DLT_JUNIPER_CHDLC", - "DLT_JUNIPER_ES", - "DLT_JUNIPER_ETHER", - "DLT_JUNIPER_FIBRECHANNEL", - "DLT_JUNIPER_FRELAY", - "DLT_JUNIPER_GGSN", - "DLT_JUNIPER_ISM", - "DLT_JUNIPER_MFR", - "DLT_JUNIPER_MLFR", - "DLT_JUNIPER_MLPPP", - "DLT_JUNIPER_MONITOR", - "DLT_JUNIPER_PIC_PEER", - "DLT_JUNIPER_PPP", - "DLT_JUNIPER_PPPOE", - "DLT_JUNIPER_PPPOE_ATM", - "DLT_JUNIPER_SERVICES", - "DLT_JUNIPER_SRX_E2E", - "DLT_JUNIPER_ST", - "DLT_JUNIPER_VP", - "DLT_JUNIPER_VS", - "DLT_LAPB_WITH_DIR", - "DLT_LAPD", - "DLT_LIN", - "DLT_LINUX_EVDEV", - "DLT_LINUX_IRDA", - "DLT_LINUX_LAPD", - "DLT_LINUX_PPP_WITHDIRECTION", - "DLT_LINUX_SLL", - "DLT_LOOP", - "DLT_LTALK", - "DLT_MATCHING_MAX", - "DLT_MATCHING_MIN", - "DLT_MFR", - "DLT_MOST", - "DLT_MPEG_2_TS", - "DLT_MPLS", - "DLT_MTP2", - "DLT_MTP2_WITH_PHDR", - "DLT_MTP3", - "DLT_MUX27010", - "DLT_NETANALYZER", - "DLT_NETANALYZER_TRANSPARENT", - "DLT_NFC_LLCP", - "DLT_NFLOG", - "DLT_NG40", - "DLT_NULL", - "DLT_PCI_EXP", - "DLT_PFLOG", - "DLT_PFSYNC", - "DLT_PPI", - "DLT_PPP", - "DLT_PPP_BSDOS", - "DLT_PPP_ETHER", - "DLT_PPP_PPPD", - "DLT_PPP_SERIAL", - "DLT_PPP_WITH_DIR", - "DLT_PPP_WITH_DIRECTION", - "DLT_PRISM_HEADER", - "DLT_PRONET", - "DLT_RAIF1", - "DLT_RAW", - "DLT_RAWAF_MASK", - "DLT_RIO", - "DLT_SCCP", - "DLT_SITA", - "DLT_SLIP", - "DLT_SLIP_BSDOS", - "DLT_STANAG_5066_D_PDU", - "DLT_SUNATM", - "DLT_SYMANTEC_FIREWALL", - "DLT_TZSP", - "DLT_USB", - "DLT_USB_LINUX", - "DLT_USB_LINUX_MMAPPED", - "DLT_USER0", - "DLT_USER1", - "DLT_USER10", - "DLT_USER11", - "DLT_USER12", - "DLT_USER13", - "DLT_USER14", - "DLT_USER15", - "DLT_USER2", - "DLT_USER3", - "DLT_USER4", - "DLT_USER5", - "DLT_USER6", - "DLT_USER7", - "DLT_USER8", - "DLT_USER9", - "DLT_WIHART", - "DLT_X2E_SERIAL", - "DLT_X2E_XORAYA", - "DNSMXData", - "DNSPTRData", - "DNSRecord", - "DNSSRVData", - "DNSTXTData", - "DNS_INFO_NO_RECORDS", - "DNS_TYPE_A", - "DNS_TYPE_A6", - "DNS_TYPE_AAAA", - "DNS_TYPE_ADDRS", - "DNS_TYPE_AFSDB", - "DNS_TYPE_ALL", - "DNS_TYPE_ANY", - "DNS_TYPE_ATMA", - "DNS_TYPE_AXFR", - "DNS_TYPE_CERT", - "DNS_TYPE_CNAME", - "DNS_TYPE_DHCID", - "DNS_TYPE_DNAME", - "DNS_TYPE_DNSKEY", - "DNS_TYPE_DS", - "DNS_TYPE_EID", - "DNS_TYPE_GID", - "DNS_TYPE_GPOS", - "DNS_TYPE_HINFO", - "DNS_TYPE_ISDN", - "DNS_TYPE_IXFR", - "DNS_TYPE_KEY", - "DNS_TYPE_KX", - "DNS_TYPE_LOC", - "DNS_TYPE_MAILA", - "DNS_TYPE_MAILB", - "DNS_TYPE_MB", - "DNS_TYPE_MD", - "DNS_TYPE_MF", - "DNS_TYPE_MG", - "DNS_TYPE_MINFO", - "DNS_TYPE_MR", - "DNS_TYPE_MX", - "DNS_TYPE_NAPTR", - "DNS_TYPE_NBSTAT", - "DNS_TYPE_NIMLOC", - "DNS_TYPE_NS", - "DNS_TYPE_NSAP", - "DNS_TYPE_NSAPPTR", - "DNS_TYPE_NSEC", - "DNS_TYPE_NULL", - "DNS_TYPE_NXT", - "DNS_TYPE_OPT", - "DNS_TYPE_PTR", - "DNS_TYPE_PX", - "DNS_TYPE_RP", - "DNS_TYPE_RRSIG", - "DNS_TYPE_RT", - "DNS_TYPE_SIG", - "DNS_TYPE_SINK", - "DNS_TYPE_SOA", - "DNS_TYPE_SRV", - "DNS_TYPE_TEXT", - "DNS_TYPE_TKEY", - "DNS_TYPE_TSIG", - "DNS_TYPE_UID", - "DNS_TYPE_UINFO", - "DNS_TYPE_UNSPEC", - "DNS_TYPE_WINS", - "DNS_TYPE_WINSR", - "DNS_TYPE_WKS", - "DNS_TYPE_X25", - "DT_BLK", - "DT_CHR", - "DT_DIR", - "DT_FIFO", - "DT_LNK", - "DT_REG", - "DT_SOCK", - "DT_UNKNOWN", - "DT_WHT", - "DUPLICATE_CLOSE_SOURCE", - "DUPLICATE_SAME_ACCESS", - "DeleteFile", - "DetachLsf", - "DeviceIoControl", - "Dirent", - "DnsNameCompare", - "DnsQuery", - "DnsRecordListFree", - "DnsSectionAdditional", - "DnsSectionAnswer", - "DnsSectionAuthority", - "DnsSectionQuestion", - "Dup", - "Dup2", - "Dup3", - "DuplicateHandle", - "E2BIG", - "EACCES", - "EADDRINUSE", - "EADDRNOTAVAIL", - "EADV", - "EAFNOSUPPORT", - "EAGAIN", - "EALREADY", - "EAUTH", - "EBADARCH", - "EBADE", - "EBADEXEC", - "EBADF", - "EBADFD", - "EBADMACHO", - "EBADMSG", - "EBADR", - "EBADRPC", - "EBADRQC", - "EBADSLT", - "EBFONT", - "EBUSY", - "ECANCELED", - "ECAPMODE", - "ECHILD", - "ECHO", - "ECHOCTL", - "ECHOE", - "ECHOK", - "ECHOKE", - "ECHONL", - "ECHOPRT", - "ECHRNG", - "ECOMM", - "ECONNABORTED", - "ECONNREFUSED", - "ECONNRESET", - "EDEADLK", - "EDEADLOCK", - "EDESTADDRREQ", - "EDEVERR", - "EDOM", - "EDOOFUS", - "EDOTDOT", - "EDQUOT", - "EEXIST", - "EFAULT", - "EFBIG", - "EFER_LMA", - "EFER_LME", - "EFER_NXE", - "EFER_SCE", - "EFTYPE", - "EHOSTDOWN", - "EHOSTUNREACH", - "EHWPOISON", - "EIDRM", - "EILSEQ", - "EINPROGRESS", - "EINTR", - "EINVAL", - "EIO", - "EIPSEC", - "EISCONN", - "EISDIR", - "EISNAM", - "EKEYEXPIRED", - "EKEYREJECTED", - "EKEYREVOKED", - "EL2HLT", - "EL2NSYNC", - "EL3HLT", - "EL3RST", - "ELAST", - "ELF_NGREG", - "ELF_PRARGSZ", - "ELIBACC", - "ELIBBAD", - "ELIBEXEC", - "ELIBMAX", - "ELIBSCN", - "ELNRNG", - "ELOOP", - "EMEDIUMTYPE", - "EMFILE", - "EMLINK", - "EMSGSIZE", - "EMT_TAGOVF", - "EMULTIHOP", - "EMUL_ENABLED", - "EMUL_LINUX", - "EMUL_LINUX32", - "EMUL_MAXID", - "EMUL_NATIVE", - "ENAMETOOLONG", - "ENAVAIL", - "ENDRUNDISC", - "ENEEDAUTH", - "ENETDOWN", - "ENETRESET", - "ENETUNREACH", - "ENFILE", - "ENOANO", - "ENOATTR", - "ENOBUFS", - "ENOCSI", - "ENODATA", - "ENODEV", - "ENOENT", - "ENOEXEC", - "ENOKEY", - "ENOLCK", - "ENOLINK", - "ENOMEDIUM", - "ENOMEM", - "ENOMSG", - "ENONET", - "ENOPKG", - "ENOPOLICY", - "ENOPROTOOPT", - "ENOSPC", - "ENOSR", - "ENOSTR", - "ENOSYS", - "ENOTBLK", - "ENOTCAPABLE", - "ENOTCONN", - "ENOTDIR", - "ENOTEMPTY", - "ENOTNAM", - "ENOTRECOVERABLE", - "ENOTSOCK", - "ENOTSUP", - "ENOTTY", - "ENOTUNIQ", - "ENXIO", - "EN_SW_CTL_INF", - "EN_SW_CTL_PREC", - "EN_SW_CTL_ROUND", - "EN_SW_DATACHAIN", - "EN_SW_DENORM", - "EN_SW_INVOP", - "EN_SW_OVERFLOW", - "EN_SW_PRECLOSS", - "EN_SW_UNDERFLOW", - "EN_SW_ZERODIV", - "EOPNOTSUPP", - "EOVERFLOW", - "EOWNERDEAD", - "EPERM", - "EPFNOSUPPORT", - "EPIPE", - "EPOLLERR", - "EPOLLET", - "EPOLLHUP", - "EPOLLIN", - "EPOLLMSG", - "EPOLLONESHOT", - "EPOLLOUT", - "EPOLLPRI", - "EPOLLRDBAND", - "EPOLLRDHUP", - "EPOLLRDNORM", - "EPOLLWRBAND", - "EPOLLWRNORM", - "EPOLL_CLOEXEC", - "EPOLL_CTL_ADD", - "EPOLL_CTL_DEL", - "EPOLL_CTL_MOD", - "EPOLL_NONBLOCK", - "EPROCLIM", - "EPROCUNAVAIL", - "EPROGMISMATCH", - "EPROGUNAVAIL", - "EPROTO", - "EPROTONOSUPPORT", - "EPROTOTYPE", - "EPWROFF", - "EQFULL", - "ERANGE", - "EREMCHG", - "EREMOTE", - "EREMOTEIO", - "ERESTART", - "ERFKILL", - "EROFS", - "ERPCMISMATCH", - "ERROR_ACCESS_DENIED", - "ERROR_ALREADY_EXISTS", - "ERROR_BROKEN_PIPE", - "ERROR_BUFFER_OVERFLOW", - "ERROR_DIR_NOT_EMPTY", - "ERROR_ENVVAR_NOT_FOUND", - "ERROR_FILE_EXISTS", - "ERROR_FILE_NOT_FOUND", - "ERROR_HANDLE_EOF", - "ERROR_INSUFFICIENT_BUFFER", - "ERROR_IO_PENDING", - "ERROR_MOD_NOT_FOUND", - "ERROR_MORE_DATA", - "ERROR_NETNAME_DELETED", - "ERROR_NOT_FOUND", - "ERROR_NO_MORE_FILES", - "ERROR_OPERATION_ABORTED", - "ERROR_PATH_NOT_FOUND", - "ERROR_PRIVILEGE_NOT_HELD", - "ERROR_PROC_NOT_FOUND", - "ESHLIBVERS", - "ESHUTDOWN", - "ESOCKTNOSUPPORT", - "ESPIPE", - "ESRCH", - "ESRMNT", - "ESTALE", - "ESTRPIPE", - "ETHERCAP_JUMBO_MTU", - "ETHERCAP_VLAN_HWTAGGING", - "ETHERCAP_VLAN_MTU", - "ETHERMIN", - "ETHERMTU", - "ETHERMTU_JUMBO", - "ETHERTYPE_8023", - "ETHERTYPE_AARP", - "ETHERTYPE_ACCTON", - "ETHERTYPE_AEONIC", - "ETHERTYPE_ALPHA", - "ETHERTYPE_AMBER", - "ETHERTYPE_AMOEBA", - "ETHERTYPE_AOE", - "ETHERTYPE_APOLLO", - "ETHERTYPE_APOLLODOMAIN", - "ETHERTYPE_APPLETALK", - "ETHERTYPE_APPLITEK", - "ETHERTYPE_ARGONAUT", - "ETHERTYPE_ARP", - "ETHERTYPE_AT", - "ETHERTYPE_ATALK", - "ETHERTYPE_ATOMIC", - "ETHERTYPE_ATT", - "ETHERTYPE_ATTSTANFORD", - "ETHERTYPE_AUTOPHON", - "ETHERTYPE_AXIS", - "ETHERTYPE_BCLOOP", - "ETHERTYPE_BOFL", - "ETHERTYPE_CABLETRON", - "ETHERTYPE_CHAOS", - "ETHERTYPE_COMDESIGN", - "ETHERTYPE_COMPUGRAPHIC", - "ETHERTYPE_COUNTERPOINT", - "ETHERTYPE_CRONUS", - "ETHERTYPE_CRONUSVLN", - "ETHERTYPE_DCA", - "ETHERTYPE_DDE", - "ETHERTYPE_DEBNI", - "ETHERTYPE_DECAM", - "ETHERTYPE_DECCUST", - "ETHERTYPE_DECDIAG", - "ETHERTYPE_DECDNS", - "ETHERTYPE_DECDTS", - "ETHERTYPE_DECEXPER", - "ETHERTYPE_DECLAST", - "ETHERTYPE_DECLTM", - "ETHERTYPE_DECMUMPS", - "ETHERTYPE_DECNETBIOS", - "ETHERTYPE_DELTACON", - "ETHERTYPE_DIDDLE", - "ETHERTYPE_DLOG1", - "ETHERTYPE_DLOG2", - "ETHERTYPE_DN", - "ETHERTYPE_DOGFIGHT", - "ETHERTYPE_DSMD", - "ETHERTYPE_ECMA", - "ETHERTYPE_ENCRYPT", - "ETHERTYPE_ES", - "ETHERTYPE_EXCELAN", - "ETHERTYPE_EXPERDATA", - "ETHERTYPE_FLIP", - "ETHERTYPE_FLOWCONTROL", - "ETHERTYPE_FRARP", - "ETHERTYPE_GENDYN", - "ETHERTYPE_HAYES", - "ETHERTYPE_HIPPI_FP", - "ETHERTYPE_HITACHI", - "ETHERTYPE_HP", - "ETHERTYPE_IEEEPUP", - "ETHERTYPE_IEEEPUPAT", - "ETHERTYPE_IMLBL", - "ETHERTYPE_IMLBLDIAG", - "ETHERTYPE_IP", - "ETHERTYPE_IPAS", - "ETHERTYPE_IPV6", - "ETHERTYPE_IPX", - "ETHERTYPE_IPXNEW", - "ETHERTYPE_KALPANA", - "ETHERTYPE_LANBRIDGE", - "ETHERTYPE_LANPROBE", - "ETHERTYPE_LAT", - "ETHERTYPE_LBACK", - "ETHERTYPE_LITTLE", - "ETHERTYPE_LLDP", - "ETHERTYPE_LOGICRAFT", - "ETHERTYPE_LOOPBACK", - "ETHERTYPE_MATRA", - "ETHERTYPE_MAX", - "ETHERTYPE_MERIT", - "ETHERTYPE_MICP", - "ETHERTYPE_MOPDL", - "ETHERTYPE_MOPRC", - "ETHERTYPE_MOTOROLA", - "ETHERTYPE_MPLS", - "ETHERTYPE_MPLS_MCAST", - "ETHERTYPE_MUMPS", - "ETHERTYPE_NBPCC", - "ETHERTYPE_NBPCLAIM", - "ETHERTYPE_NBPCLREQ", - "ETHERTYPE_NBPCLRSP", - "ETHERTYPE_NBPCREQ", - "ETHERTYPE_NBPCRSP", - "ETHERTYPE_NBPDG", - "ETHERTYPE_NBPDGB", - "ETHERTYPE_NBPDLTE", - "ETHERTYPE_NBPRAR", - "ETHERTYPE_NBPRAS", - "ETHERTYPE_NBPRST", - "ETHERTYPE_NBPSCD", - "ETHERTYPE_NBPVCD", - "ETHERTYPE_NBS", - "ETHERTYPE_NCD", - "ETHERTYPE_NESTAR", - "ETHERTYPE_NETBEUI", - "ETHERTYPE_NOVELL", - "ETHERTYPE_NS", - "ETHERTYPE_NSAT", - "ETHERTYPE_NSCOMPAT", - "ETHERTYPE_NTRAILER", - "ETHERTYPE_OS9", - "ETHERTYPE_OS9NET", - "ETHERTYPE_PACER", - "ETHERTYPE_PAE", - "ETHERTYPE_PCS", - "ETHERTYPE_PLANNING", - "ETHERTYPE_PPP", - "ETHERTYPE_PPPOE", - "ETHERTYPE_PPPOEDISC", - "ETHERTYPE_PRIMENTS", - "ETHERTYPE_PUP", - "ETHERTYPE_PUPAT", - "ETHERTYPE_QINQ", - "ETHERTYPE_RACAL", - "ETHERTYPE_RATIONAL", - "ETHERTYPE_RAWFR", - "ETHERTYPE_RCL", - "ETHERTYPE_RDP", - "ETHERTYPE_RETIX", - "ETHERTYPE_REVARP", - "ETHERTYPE_SCA", - "ETHERTYPE_SECTRA", - "ETHERTYPE_SECUREDATA", - "ETHERTYPE_SGITW", - "ETHERTYPE_SG_BOUNCE", - "ETHERTYPE_SG_DIAG", - "ETHERTYPE_SG_NETGAMES", - "ETHERTYPE_SG_RESV", - "ETHERTYPE_SIMNET", - "ETHERTYPE_SLOW", - "ETHERTYPE_SLOWPROTOCOLS", - "ETHERTYPE_SNA", - "ETHERTYPE_SNMP", - "ETHERTYPE_SONIX", - "ETHERTYPE_SPIDER", - "ETHERTYPE_SPRITE", - "ETHERTYPE_STP", - "ETHERTYPE_TALARIS", - "ETHERTYPE_TALARISMC", - "ETHERTYPE_TCPCOMP", - "ETHERTYPE_TCPSM", - "ETHERTYPE_TEC", - "ETHERTYPE_TIGAN", - "ETHERTYPE_TRAIL", - "ETHERTYPE_TRANSETHER", - "ETHERTYPE_TYMSHARE", - "ETHERTYPE_UBBST", - "ETHERTYPE_UBDEBUG", - "ETHERTYPE_UBDIAGLOOP", - "ETHERTYPE_UBDL", - "ETHERTYPE_UBNIU", - "ETHERTYPE_UBNMC", - "ETHERTYPE_VALID", - "ETHERTYPE_VARIAN", - "ETHERTYPE_VAXELN", - "ETHERTYPE_VEECO", - "ETHERTYPE_VEXP", - "ETHERTYPE_VGLAB", - "ETHERTYPE_VINES", - "ETHERTYPE_VINESECHO", - "ETHERTYPE_VINESLOOP", - "ETHERTYPE_VITAL", - "ETHERTYPE_VLAN", - "ETHERTYPE_VLTLMAN", - "ETHERTYPE_VPROD", - "ETHERTYPE_VURESERVED", - "ETHERTYPE_WATERLOO", - "ETHERTYPE_WELLFLEET", - "ETHERTYPE_X25", - "ETHERTYPE_X75", - "ETHERTYPE_XNSSM", - "ETHERTYPE_XTP", - "ETHER_ADDR_LEN", - "ETHER_ALIGN", - "ETHER_CRC_LEN", - "ETHER_CRC_POLY_BE", - "ETHER_CRC_POLY_LE", - "ETHER_HDR_LEN", - "ETHER_MAX_DIX_LEN", - "ETHER_MAX_LEN", - "ETHER_MAX_LEN_JUMBO", - "ETHER_MIN_LEN", - "ETHER_PPPOE_ENCAP_LEN", - "ETHER_TYPE_LEN", - "ETHER_VLAN_ENCAP_LEN", - "ETH_P_1588", - "ETH_P_8021Q", - "ETH_P_802_2", - "ETH_P_802_3", - "ETH_P_AARP", - "ETH_P_ALL", - "ETH_P_AOE", - "ETH_P_ARCNET", - "ETH_P_ARP", - "ETH_P_ATALK", - "ETH_P_ATMFATE", - "ETH_P_ATMMPOA", - "ETH_P_AX25", - "ETH_P_BPQ", - "ETH_P_CAIF", - "ETH_P_CAN", - "ETH_P_CONTROL", - "ETH_P_CUST", - "ETH_P_DDCMP", - "ETH_P_DEC", - "ETH_P_DIAG", - "ETH_P_DNA_DL", - "ETH_P_DNA_RC", - "ETH_P_DNA_RT", - "ETH_P_DSA", - "ETH_P_ECONET", - "ETH_P_EDSA", - "ETH_P_FCOE", - "ETH_P_FIP", - "ETH_P_HDLC", - "ETH_P_IEEE802154", - "ETH_P_IEEEPUP", - "ETH_P_IEEEPUPAT", - "ETH_P_IP", - "ETH_P_IPV6", - "ETH_P_IPX", - "ETH_P_IRDA", - "ETH_P_LAT", - "ETH_P_LINK_CTL", - "ETH_P_LOCALTALK", - "ETH_P_LOOP", - "ETH_P_MOBITEX", - "ETH_P_MPLS_MC", - "ETH_P_MPLS_UC", - "ETH_P_PAE", - "ETH_P_PAUSE", - "ETH_P_PHONET", - "ETH_P_PPPTALK", - "ETH_P_PPP_DISC", - "ETH_P_PPP_MP", - "ETH_P_PPP_SES", - "ETH_P_PUP", - "ETH_P_PUPAT", - "ETH_P_RARP", - "ETH_P_SCA", - "ETH_P_SLOW", - "ETH_P_SNAP", - "ETH_P_TEB", - "ETH_P_TIPC", - "ETH_P_TRAILER", - "ETH_P_TR_802_2", - "ETH_P_WAN_PPP", - "ETH_P_WCCP", - "ETH_P_X25", - "ETIME", - "ETIMEDOUT", - "ETOOMANYREFS", - "ETXTBSY", - "EUCLEAN", - "EUNATCH", - "EUSERS", - "EVFILT_AIO", - "EVFILT_FS", - "EVFILT_LIO", - "EVFILT_MACHPORT", - "EVFILT_PROC", - "EVFILT_READ", - "EVFILT_SIGNAL", - "EVFILT_SYSCOUNT", - "EVFILT_THREADMARKER", - "EVFILT_TIMER", - "EVFILT_USER", - "EVFILT_VM", - "EVFILT_VNODE", - "EVFILT_WRITE", - "EV_ADD", - "EV_CLEAR", - "EV_DELETE", - "EV_DISABLE", - "EV_DISPATCH", - "EV_DROP", - "EV_ENABLE", - "EV_EOF", - "EV_ERROR", - "EV_FLAG0", - "EV_FLAG1", - "EV_ONESHOT", - "EV_OOBAND", - "EV_POLL", - "EV_RECEIPT", - "EV_SYSFLAGS", - "EWINDOWS", - "EWOULDBLOCK", - "EXDEV", - "EXFULL", - "EXTA", - "EXTB", - "EXTPROC", - "Environ", - "EpollCreate", - "EpollCreate1", - "EpollCtl", - "EpollEvent", - "EpollWait", - "Errno", - "EscapeArg", - "Exchangedata", - "Exec", - "Exit", - "ExitProcess", - "FD_CLOEXEC", - "FD_SETSIZE", - "FILE_ACTION_ADDED", - "FILE_ACTION_MODIFIED", - "FILE_ACTION_REMOVED", - "FILE_ACTION_RENAMED_NEW_NAME", - "FILE_ACTION_RENAMED_OLD_NAME", - "FILE_APPEND_DATA", - "FILE_ATTRIBUTE_ARCHIVE", - "FILE_ATTRIBUTE_DIRECTORY", - "FILE_ATTRIBUTE_HIDDEN", - "FILE_ATTRIBUTE_NORMAL", - "FILE_ATTRIBUTE_READONLY", - "FILE_ATTRIBUTE_REPARSE_POINT", - "FILE_ATTRIBUTE_SYSTEM", - "FILE_BEGIN", - "FILE_CURRENT", - "FILE_END", - "FILE_FLAG_BACKUP_SEMANTICS", - "FILE_FLAG_OPEN_REPARSE_POINT", - "FILE_FLAG_OVERLAPPED", - "FILE_LIST_DIRECTORY", - "FILE_MAP_COPY", - "FILE_MAP_EXECUTE", - "FILE_MAP_READ", - "FILE_MAP_WRITE", - "FILE_NOTIFY_CHANGE_ATTRIBUTES", - "FILE_NOTIFY_CHANGE_CREATION", - "FILE_NOTIFY_CHANGE_DIR_NAME", - "FILE_NOTIFY_CHANGE_FILE_NAME", - "FILE_NOTIFY_CHANGE_LAST_ACCESS", - "FILE_NOTIFY_CHANGE_LAST_WRITE", - "FILE_NOTIFY_CHANGE_SIZE", - "FILE_SHARE_DELETE", - "FILE_SHARE_READ", - "FILE_SHARE_WRITE", - "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", - "FILE_SKIP_SET_EVENT_ON_HANDLE", - "FILE_TYPE_CHAR", - "FILE_TYPE_DISK", - "FILE_TYPE_PIPE", - "FILE_TYPE_REMOTE", - "FILE_TYPE_UNKNOWN", - "FILE_WRITE_ATTRIBUTES", - "FLUSHO", - "FORMAT_MESSAGE_ALLOCATE_BUFFER", - "FORMAT_MESSAGE_ARGUMENT_ARRAY", - "FORMAT_MESSAGE_FROM_HMODULE", - "FORMAT_MESSAGE_FROM_STRING", - "FORMAT_MESSAGE_FROM_SYSTEM", - "FORMAT_MESSAGE_IGNORE_INSERTS", - "FORMAT_MESSAGE_MAX_WIDTH_MASK", - "FSCTL_GET_REPARSE_POINT", - "F_ADDFILESIGS", - "F_ADDSIGS", - "F_ALLOCATEALL", - "F_ALLOCATECONTIG", - "F_CANCEL", - "F_CHKCLEAN", - "F_CLOSEM", - "F_DUP2FD", - "F_DUP2FD_CLOEXEC", - "F_DUPFD", - "F_DUPFD_CLOEXEC", - "F_EXLCK", - "F_FINDSIGS", - "F_FLUSH_DATA", - "F_FREEZE_FS", - "F_FSCTL", - "F_FSDIRMASK", - "F_FSIN", - "F_FSINOUT", - "F_FSOUT", - "F_FSPRIV", - "F_FSVOID", - "F_FULLFSYNC", - "F_GETCODEDIR", - "F_GETFD", - "F_GETFL", - "F_GETLEASE", - "F_GETLK", - "F_GETLK64", - "F_GETLKPID", - "F_GETNOSIGPIPE", - "F_GETOWN", - "F_GETOWN_EX", - "F_GETPATH", - "F_GETPATH_MTMINFO", - "F_GETPIPE_SZ", - "F_GETPROTECTIONCLASS", - "F_GETPROTECTIONLEVEL", - "F_GETSIG", - "F_GLOBAL_NOCACHE", - "F_LOCK", - "F_LOG2PHYS", - "F_LOG2PHYS_EXT", - "F_MARKDEPENDENCY", - "F_MAXFD", - "F_NOCACHE", - "F_NODIRECT", - "F_NOTIFY", - "F_OGETLK", - "F_OK", - "F_OSETLK", - "F_OSETLKW", - "F_PARAM_MASK", - "F_PARAM_MAX", - "F_PATHPKG_CHECK", - "F_PEOFPOSMODE", - "F_PREALLOCATE", - "F_RDADVISE", - "F_RDAHEAD", - "F_RDLCK", - "F_READAHEAD", - "F_READBOOTSTRAP", - "F_SETBACKINGSTORE", - "F_SETFD", - "F_SETFL", - "F_SETLEASE", - "F_SETLK", - "F_SETLK64", - "F_SETLKW", - "F_SETLKW64", - "F_SETLKWTIMEOUT", - "F_SETLK_REMOTE", - "F_SETNOSIGPIPE", - "F_SETOWN", - "F_SETOWN_EX", - "F_SETPIPE_SZ", - "F_SETPROTECTIONCLASS", - "F_SETSIG", - "F_SETSIZE", - "F_SHLCK", - "F_SINGLE_WRITER", - "F_TEST", - "F_THAW_FS", - "F_TLOCK", - "F_TRANSCODEKEY", - "F_ULOCK", - "F_UNLCK", - "F_UNLCKSYS", - "F_VOLPOSMODE", - "F_WRITEBOOTSTRAP", - "F_WRLCK", - "Faccessat", - "Fallocate", - "Fbootstraptransfer_t", - "Fchdir", - "Fchflags", - "Fchmod", - "Fchmodat", - "Fchown", - "Fchownat", - "FcntlFlock", - "FdSet", - "Fdatasync", - "FileNotifyInformation", - "Filetime", - "FindClose", - "FindFirstFile", - "FindNextFile", - "Flock", - "Flock_t", - "FlushBpf", - "FlushFileBuffers", - "FlushViewOfFile", - "ForkExec", - "ForkLock", - "FormatMessage", - "Fpathconf", - "FreeAddrInfoW", - "FreeEnvironmentStrings", - "FreeLibrary", - "Fsid", - "Fstat", - "Fstatat", - "Fstatfs", - "Fstore_t", - "Fsync", - "Ftruncate", - "FullPath", - "Futimes", - "Futimesat", - "GENERIC_ALL", - "GENERIC_EXECUTE", - "GENERIC_READ", - "GENERIC_WRITE", - "GUID", - "GetAcceptExSockaddrs", - "GetAdaptersInfo", - "GetAddrInfoW", - "GetCommandLine", - "GetComputerName", - "GetConsoleMode", - "GetCurrentDirectory", - "GetCurrentProcess", - "GetEnvironmentStrings", - "GetEnvironmentVariable", - "GetExitCodeProcess", - "GetFileAttributes", - "GetFileAttributesEx", - "GetFileExInfoStandard", - "GetFileExMaxInfoLevel", - "GetFileInformationByHandle", - "GetFileType", - "GetFullPathName", - "GetHostByName", - "GetIfEntry", - "GetLastError", - "GetLengthSid", - "GetLongPathName", - "GetProcAddress", - "GetProcessTimes", - "GetProtoByName", - "GetQueuedCompletionStatus", - "GetServByName", - "GetShortPathName", - "GetStartupInfo", - "GetStdHandle", - "GetSystemTimeAsFileTime", - "GetTempPath", - "GetTimeZoneInformation", - "GetTokenInformation", - "GetUserNameEx", - "GetUserProfileDirectory", - "GetVersion", - "Getcwd", - "Getdents", - "Getdirentries", - "Getdtablesize", - "Getegid", - "Getenv", - "Geteuid", - "Getfsstat", - "Getgid", - "Getgroups", - "Getpagesize", - "Getpeername", - "Getpgid", - "Getpgrp", - "Getpid", - "Getppid", - "Getpriority", - "Getrlimit", - "Getrusage", - "Getsid", - "Getsockname", - "Getsockopt", - "GetsockoptByte", - "GetsockoptICMPv6Filter", - "GetsockoptIPMreq", - "GetsockoptIPMreqn", - "GetsockoptIPv6MTUInfo", - "GetsockoptIPv6Mreq", - "GetsockoptInet4Addr", - "GetsockoptInt", - "GetsockoptUcred", - "Gettid", - "Gettimeofday", - "Getuid", - "Getwd", - "Getxattr", - "HANDLE_FLAG_INHERIT", - "HKEY_CLASSES_ROOT", - "HKEY_CURRENT_CONFIG", - "HKEY_CURRENT_USER", - "HKEY_DYN_DATA", - "HKEY_LOCAL_MACHINE", - "HKEY_PERFORMANCE_DATA", - "HKEY_USERS", - "HUPCL", - "Handle", - "Hostent", - "ICANON", - "ICMP6_FILTER", - "ICMPV6_FILTER", - "ICMPv6Filter", - "ICRNL", - "IEXTEN", - "IFAN_ARRIVAL", - "IFAN_DEPARTURE", - "IFA_ADDRESS", - "IFA_ANYCAST", - "IFA_BROADCAST", - "IFA_CACHEINFO", - "IFA_F_DADFAILED", - "IFA_F_DEPRECATED", - "IFA_F_HOMEADDRESS", - "IFA_F_NODAD", - "IFA_F_OPTIMISTIC", - "IFA_F_PERMANENT", - "IFA_F_SECONDARY", - "IFA_F_TEMPORARY", - "IFA_F_TENTATIVE", - "IFA_LABEL", - "IFA_LOCAL", - "IFA_MAX", - "IFA_MULTICAST", - "IFA_ROUTE", - "IFA_UNSPEC", - "IFF_ALLMULTI", - "IFF_ALTPHYS", - "IFF_AUTOMEDIA", - "IFF_BROADCAST", - "IFF_CANTCHANGE", - "IFF_CANTCONFIG", - "IFF_DEBUG", - "IFF_DRV_OACTIVE", - "IFF_DRV_RUNNING", - "IFF_DYING", - "IFF_DYNAMIC", - "IFF_LINK0", - "IFF_LINK1", - "IFF_LINK2", - "IFF_LOOPBACK", - "IFF_MASTER", - "IFF_MONITOR", - "IFF_MULTICAST", - "IFF_NOARP", - "IFF_NOTRAILERS", - "IFF_NO_PI", - "IFF_OACTIVE", - "IFF_ONE_QUEUE", - "IFF_POINTOPOINT", - "IFF_POINTTOPOINT", - "IFF_PORTSEL", - "IFF_PPROMISC", - "IFF_PROMISC", - "IFF_RENAMING", - "IFF_RUNNING", - "IFF_SIMPLEX", - "IFF_SLAVE", - "IFF_SMART", - "IFF_STATICARP", - "IFF_TAP", - "IFF_TUN", - "IFF_TUN_EXCL", - "IFF_UP", - "IFF_VNET_HDR", - "IFLA_ADDRESS", - "IFLA_BROADCAST", - "IFLA_COST", - "IFLA_IFALIAS", - "IFLA_IFNAME", - "IFLA_LINK", - "IFLA_LINKINFO", - "IFLA_LINKMODE", - "IFLA_MAP", - "IFLA_MASTER", - "IFLA_MAX", - "IFLA_MTU", - "IFLA_NET_NS_PID", - "IFLA_OPERSTATE", - "IFLA_PRIORITY", - "IFLA_PROTINFO", - "IFLA_QDISC", - "IFLA_STATS", - "IFLA_TXQLEN", - "IFLA_UNSPEC", - "IFLA_WEIGHT", - "IFLA_WIRELESS", - "IFNAMSIZ", - "IFT_1822", - "IFT_A12MPPSWITCH", - "IFT_AAL2", - "IFT_AAL5", - "IFT_ADSL", - "IFT_AFLANE8023", - "IFT_AFLANE8025", - "IFT_ARAP", - "IFT_ARCNET", - "IFT_ARCNETPLUS", - "IFT_ASYNC", - "IFT_ATM", - "IFT_ATMDXI", - "IFT_ATMFUNI", - "IFT_ATMIMA", - "IFT_ATMLOGICAL", - "IFT_ATMRADIO", - "IFT_ATMSUBINTERFACE", - "IFT_ATMVCIENDPT", - "IFT_ATMVIRTUAL", - "IFT_BGPPOLICYACCOUNTING", - "IFT_BLUETOOTH", - "IFT_BRIDGE", - "IFT_BSC", - "IFT_CARP", - "IFT_CCTEMUL", - "IFT_CELLULAR", - "IFT_CEPT", - "IFT_CES", - "IFT_CHANNEL", - "IFT_CNR", - "IFT_COFFEE", - "IFT_COMPOSITELINK", - "IFT_DCN", - "IFT_DIGITALPOWERLINE", - "IFT_DIGITALWRAPPEROVERHEADCHANNEL", - "IFT_DLSW", - "IFT_DOCSCABLEDOWNSTREAM", - "IFT_DOCSCABLEMACLAYER", - "IFT_DOCSCABLEUPSTREAM", - "IFT_DOCSCABLEUPSTREAMCHANNEL", - "IFT_DS0", - "IFT_DS0BUNDLE", - "IFT_DS1FDL", - "IFT_DS3", - "IFT_DTM", - "IFT_DUMMY", - "IFT_DVBASILN", - "IFT_DVBASIOUT", - "IFT_DVBRCCDOWNSTREAM", - "IFT_DVBRCCMACLAYER", - "IFT_DVBRCCUPSTREAM", - "IFT_ECONET", - "IFT_ENC", - "IFT_EON", - "IFT_EPLRS", - "IFT_ESCON", - "IFT_ETHER", - "IFT_FAITH", - "IFT_FAST", - "IFT_FASTETHER", - "IFT_FASTETHERFX", - "IFT_FDDI", - "IFT_FIBRECHANNEL", - "IFT_FRAMERELAYINTERCONNECT", - "IFT_FRAMERELAYMPI", - "IFT_FRDLCIENDPT", - "IFT_FRELAY", - "IFT_FRELAYDCE", - "IFT_FRF16MFRBUNDLE", - "IFT_FRFORWARD", - "IFT_G703AT2MB", - "IFT_G703AT64K", - "IFT_GIF", - "IFT_GIGABITETHERNET", - "IFT_GR303IDT", - "IFT_GR303RDT", - "IFT_H323GATEKEEPER", - "IFT_H323PROXY", - "IFT_HDH1822", - "IFT_HDLC", - "IFT_HDSL2", - "IFT_HIPERLAN2", - "IFT_HIPPI", - "IFT_HIPPIINTERFACE", - "IFT_HOSTPAD", - "IFT_HSSI", - "IFT_HY", - "IFT_IBM370PARCHAN", - "IFT_IDSL", - "IFT_IEEE1394", - "IFT_IEEE80211", - "IFT_IEEE80212", - "IFT_IEEE8023ADLAG", - "IFT_IFGSN", - "IFT_IMT", - "IFT_INFINIBAND", - "IFT_INTERLEAVE", - "IFT_IP", - "IFT_IPFORWARD", - "IFT_IPOVERATM", - "IFT_IPOVERCDLC", - "IFT_IPOVERCLAW", - "IFT_IPSWITCH", - "IFT_IPXIP", - "IFT_ISDN", - "IFT_ISDNBASIC", - "IFT_ISDNPRIMARY", - "IFT_ISDNS", - "IFT_ISDNU", - "IFT_ISO88022LLC", - "IFT_ISO88023", - "IFT_ISO88024", - "IFT_ISO88025", - "IFT_ISO88025CRFPINT", - "IFT_ISO88025DTR", - "IFT_ISO88025FIBER", - "IFT_ISO88026", - "IFT_ISUP", - "IFT_L2VLAN", - "IFT_L3IPVLAN", - "IFT_L3IPXVLAN", - "IFT_LAPB", - "IFT_LAPD", - "IFT_LAPF", - "IFT_LINEGROUP", - "IFT_LOCALTALK", - "IFT_LOOP", - "IFT_MEDIAMAILOVERIP", - "IFT_MFSIGLINK", - "IFT_MIOX25", - "IFT_MODEM", - "IFT_MPC", - "IFT_MPLS", - "IFT_MPLSTUNNEL", - "IFT_MSDSL", - "IFT_MVL", - "IFT_MYRINET", - "IFT_NFAS", - "IFT_NSIP", - "IFT_OPTICALCHANNEL", - "IFT_OPTICALTRANSPORT", - "IFT_OTHER", - "IFT_P10", - "IFT_P80", - "IFT_PARA", - "IFT_PDP", - "IFT_PFLOG", - "IFT_PFLOW", - "IFT_PFSYNC", - "IFT_PLC", - "IFT_PON155", - "IFT_PON622", - "IFT_POS", - "IFT_PPP", - "IFT_PPPMULTILINKBUNDLE", - "IFT_PROPATM", - "IFT_PROPBWAP2MP", - "IFT_PROPCNLS", - "IFT_PROPDOCSWIRELESSDOWNSTREAM", - "IFT_PROPDOCSWIRELESSMACLAYER", - "IFT_PROPDOCSWIRELESSUPSTREAM", - "IFT_PROPMUX", - "IFT_PROPVIRTUAL", - "IFT_PROPWIRELESSP2P", - "IFT_PTPSERIAL", - "IFT_PVC", - "IFT_Q2931", - "IFT_QLLC", - "IFT_RADIOMAC", - "IFT_RADSL", - "IFT_REACHDSL", - "IFT_RFC1483", - "IFT_RS232", - "IFT_RSRB", - "IFT_SDLC", - "IFT_SDSL", - "IFT_SHDSL", - "IFT_SIP", - "IFT_SIPSIG", - "IFT_SIPTG", - "IFT_SLIP", - "IFT_SMDSDXI", - "IFT_SMDSICIP", - "IFT_SONET", - "IFT_SONETOVERHEADCHANNEL", - "IFT_SONETPATH", - "IFT_SONETVT", - "IFT_SRP", - "IFT_SS7SIGLINK", - "IFT_STACKTOSTACK", - "IFT_STARLAN", - "IFT_STF", - "IFT_T1", - "IFT_TDLC", - "IFT_TELINK", - "IFT_TERMPAD", - "IFT_TR008", - "IFT_TRANSPHDLC", - "IFT_TUNNEL", - "IFT_ULTRA", - "IFT_USB", - "IFT_V11", - "IFT_V35", - "IFT_V36", - "IFT_V37", - "IFT_VDSL", - "IFT_VIRTUALIPADDRESS", - "IFT_VIRTUALTG", - "IFT_VOICEDID", - "IFT_VOICEEM", - "IFT_VOICEEMFGD", - "IFT_VOICEENCAP", - "IFT_VOICEFGDEANA", - "IFT_VOICEFXO", - "IFT_VOICEFXS", - "IFT_VOICEOVERATM", - "IFT_VOICEOVERCABLE", - "IFT_VOICEOVERFRAMERELAY", - "IFT_VOICEOVERIP", - "IFT_X213", - "IFT_X25", - "IFT_X25DDN", - "IFT_X25HUNTGROUP", - "IFT_X25MLP", - "IFT_X25PLE", - "IFT_XETHER", - "IGNBRK", - "IGNCR", - "IGNORE", - "IGNPAR", - "IMAXBEL", - "INFINITE", - "INLCR", - "INPCK", - "INVALID_FILE_ATTRIBUTES", - "IN_ACCESS", - "IN_ALL_EVENTS", - "IN_ATTRIB", - "IN_CLASSA_HOST", - "IN_CLASSA_MAX", - "IN_CLASSA_NET", - "IN_CLASSA_NSHIFT", - "IN_CLASSB_HOST", - "IN_CLASSB_MAX", - "IN_CLASSB_NET", - "IN_CLASSB_NSHIFT", - "IN_CLASSC_HOST", - "IN_CLASSC_NET", - "IN_CLASSC_NSHIFT", - "IN_CLASSD_HOST", - "IN_CLASSD_NET", - "IN_CLASSD_NSHIFT", - "IN_CLOEXEC", - "IN_CLOSE", - "IN_CLOSE_NOWRITE", - "IN_CLOSE_WRITE", - "IN_CREATE", - "IN_DELETE", - "IN_DELETE_SELF", - "IN_DONT_FOLLOW", - "IN_EXCL_UNLINK", - "IN_IGNORED", - "IN_ISDIR", - "IN_LINKLOCALNETNUM", - "IN_LOOPBACKNET", - "IN_MASK_ADD", - "IN_MODIFY", - "IN_MOVE", - "IN_MOVED_FROM", - "IN_MOVED_TO", - "IN_MOVE_SELF", - "IN_NONBLOCK", - "IN_ONESHOT", - "IN_ONLYDIR", - "IN_OPEN", - "IN_Q_OVERFLOW", - "IN_RFC3021_HOST", - "IN_RFC3021_MASK", - "IN_RFC3021_NET", - "IN_RFC3021_NSHIFT", - "IN_UNMOUNT", - "IOC_IN", - "IOC_INOUT", - "IOC_OUT", - "IOC_VENDOR", - "IOC_WS2", - "IO_REPARSE_TAG_SYMLINK", - "IPMreq", - "IPMreqn", - "IPPROTO_3PC", - "IPPROTO_ADFS", - "IPPROTO_AH", - "IPPROTO_AHIP", - "IPPROTO_APES", - "IPPROTO_ARGUS", - "IPPROTO_AX25", - "IPPROTO_BHA", - "IPPROTO_BLT", - "IPPROTO_BRSATMON", - "IPPROTO_CARP", - "IPPROTO_CFTP", - "IPPROTO_CHAOS", - "IPPROTO_CMTP", - "IPPROTO_COMP", - "IPPROTO_CPHB", - "IPPROTO_CPNX", - "IPPROTO_DCCP", - "IPPROTO_DDP", - "IPPROTO_DGP", - "IPPROTO_DIVERT", - "IPPROTO_DIVERT_INIT", - "IPPROTO_DIVERT_RESP", - "IPPROTO_DONE", - "IPPROTO_DSTOPTS", - "IPPROTO_EGP", - "IPPROTO_EMCON", - "IPPROTO_ENCAP", - "IPPROTO_EON", - "IPPROTO_ESP", - "IPPROTO_ETHERIP", - "IPPROTO_FRAGMENT", - "IPPROTO_GGP", - "IPPROTO_GMTP", - "IPPROTO_GRE", - "IPPROTO_HELLO", - "IPPROTO_HMP", - "IPPROTO_HOPOPTS", - "IPPROTO_ICMP", - "IPPROTO_ICMPV6", - "IPPROTO_IDP", - "IPPROTO_IDPR", - "IPPROTO_IDRP", - "IPPROTO_IGMP", - "IPPROTO_IGP", - "IPPROTO_IGRP", - "IPPROTO_IL", - "IPPROTO_INLSP", - "IPPROTO_INP", - "IPPROTO_IP", - "IPPROTO_IPCOMP", - "IPPROTO_IPCV", - "IPPROTO_IPEIP", - "IPPROTO_IPIP", - "IPPROTO_IPPC", - "IPPROTO_IPV4", - "IPPROTO_IPV6", - "IPPROTO_IPV6_ICMP", - "IPPROTO_IRTP", - "IPPROTO_KRYPTOLAN", - "IPPROTO_LARP", - "IPPROTO_LEAF1", - "IPPROTO_LEAF2", - "IPPROTO_MAX", - "IPPROTO_MAXID", - "IPPROTO_MEAS", - "IPPROTO_MH", - "IPPROTO_MHRP", - "IPPROTO_MICP", - "IPPROTO_MOBILE", - "IPPROTO_MPLS", - "IPPROTO_MTP", - "IPPROTO_MUX", - "IPPROTO_ND", - "IPPROTO_NHRP", - "IPPROTO_NONE", - "IPPROTO_NSP", - "IPPROTO_NVPII", - "IPPROTO_OLD_DIVERT", - "IPPROTO_OSPFIGP", - "IPPROTO_PFSYNC", - "IPPROTO_PGM", - "IPPROTO_PIGP", - "IPPROTO_PIM", - "IPPROTO_PRM", - "IPPROTO_PUP", - "IPPROTO_PVP", - "IPPROTO_RAW", - "IPPROTO_RCCMON", - "IPPROTO_RDP", - "IPPROTO_ROUTING", - "IPPROTO_RSVP", - "IPPROTO_RVD", - "IPPROTO_SATEXPAK", - "IPPROTO_SATMON", - "IPPROTO_SCCSP", - "IPPROTO_SCTP", - "IPPROTO_SDRP", - "IPPROTO_SEND", - "IPPROTO_SEP", - "IPPROTO_SKIP", - "IPPROTO_SPACER", - "IPPROTO_SRPC", - "IPPROTO_ST", - "IPPROTO_SVMTP", - "IPPROTO_SWIPE", - "IPPROTO_TCF", - "IPPROTO_TCP", - "IPPROTO_TLSP", - "IPPROTO_TP", - "IPPROTO_TPXX", - "IPPROTO_TRUNK1", - "IPPROTO_TRUNK2", - "IPPROTO_TTP", - "IPPROTO_UDP", - "IPPROTO_UDPLITE", - "IPPROTO_VINES", - "IPPROTO_VISA", - "IPPROTO_VMTP", - "IPPROTO_VRRP", - "IPPROTO_WBEXPAK", - "IPPROTO_WBMON", - "IPPROTO_WSN", - "IPPROTO_XNET", - "IPPROTO_XTP", - "IPV6_2292DSTOPTS", - "IPV6_2292HOPLIMIT", - "IPV6_2292HOPOPTS", - "IPV6_2292NEXTHOP", - "IPV6_2292PKTINFO", - "IPV6_2292PKTOPTIONS", - "IPV6_2292RTHDR", - "IPV6_ADDRFORM", - "IPV6_ADD_MEMBERSHIP", - "IPV6_AUTHHDR", - "IPV6_AUTH_LEVEL", - "IPV6_AUTOFLOWLABEL", - "IPV6_BINDANY", - "IPV6_BINDV6ONLY", - "IPV6_BOUND_IF", - "IPV6_CHECKSUM", - "IPV6_DEFAULT_MULTICAST_HOPS", - "IPV6_DEFAULT_MULTICAST_LOOP", - "IPV6_DEFHLIM", - "IPV6_DONTFRAG", - "IPV6_DROP_MEMBERSHIP", - "IPV6_DSTOPTS", - "IPV6_ESP_NETWORK_LEVEL", - "IPV6_ESP_TRANS_LEVEL", - "IPV6_FAITH", - "IPV6_FLOWINFO_MASK", - "IPV6_FLOWLABEL_MASK", - "IPV6_FRAGTTL", - "IPV6_FW_ADD", - "IPV6_FW_DEL", - "IPV6_FW_FLUSH", - "IPV6_FW_GET", - "IPV6_FW_ZERO", - "IPV6_HLIMDEC", - "IPV6_HOPLIMIT", - "IPV6_HOPOPTS", - "IPV6_IPCOMP_LEVEL", - "IPV6_IPSEC_POLICY", - "IPV6_JOIN_ANYCAST", - "IPV6_JOIN_GROUP", - "IPV6_LEAVE_ANYCAST", - "IPV6_LEAVE_GROUP", - "IPV6_MAXHLIM", - "IPV6_MAXOPTHDR", - "IPV6_MAXPACKET", - "IPV6_MAX_GROUP_SRC_FILTER", - "IPV6_MAX_MEMBERSHIPS", - "IPV6_MAX_SOCK_SRC_FILTER", - "IPV6_MIN_MEMBERSHIPS", - "IPV6_MMTU", - "IPV6_MSFILTER", - "IPV6_MTU", - "IPV6_MTU_DISCOVER", - "IPV6_MULTICAST_HOPS", - "IPV6_MULTICAST_IF", - "IPV6_MULTICAST_LOOP", - "IPV6_NEXTHOP", - "IPV6_OPTIONS", - "IPV6_PATHMTU", - "IPV6_PIPEX", - "IPV6_PKTINFO", - "IPV6_PMTUDISC_DO", - "IPV6_PMTUDISC_DONT", - "IPV6_PMTUDISC_PROBE", - "IPV6_PMTUDISC_WANT", - "IPV6_PORTRANGE", - "IPV6_PORTRANGE_DEFAULT", - "IPV6_PORTRANGE_HIGH", - "IPV6_PORTRANGE_LOW", - "IPV6_PREFER_TEMPADDR", - "IPV6_RECVDSTOPTS", - "IPV6_RECVDSTPORT", - "IPV6_RECVERR", - "IPV6_RECVHOPLIMIT", - "IPV6_RECVHOPOPTS", - "IPV6_RECVPATHMTU", - "IPV6_RECVPKTINFO", - "IPV6_RECVRTHDR", - "IPV6_RECVTCLASS", - "IPV6_ROUTER_ALERT", - "IPV6_RTABLE", - "IPV6_RTHDR", - "IPV6_RTHDRDSTOPTS", - "IPV6_RTHDR_LOOSE", - "IPV6_RTHDR_STRICT", - "IPV6_RTHDR_TYPE_0", - "IPV6_RXDSTOPTS", - "IPV6_RXHOPOPTS", - "IPV6_SOCKOPT_RESERVED1", - "IPV6_TCLASS", - "IPV6_UNICAST_HOPS", - "IPV6_USE_MIN_MTU", - "IPV6_V6ONLY", - "IPV6_VERSION", - "IPV6_VERSION_MASK", - "IPV6_XFRM_POLICY", - "IP_ADD_MEMBERSHIP", - "IP_ADD_SOURCE_MEMBERSHIP", - "IP_AUTH_LEVEL", - "IP_BINDANY", - "IP_BLOCK_SOURCE", - "IP_BOUND_IF", - "IP_DEFAULT_MULTICAST_LOOP", - "IP_DEFAULT_MULTICAST_TTL", - "IP_DF", - "IP_DIVERTFL", - "IP_DONTFRAG", - "IP_DROP_MEMBERSHIP", - "IP_DROP_SOURCE_MEMBERSHIP", - "IP_DUMMYNET3", - "IP_DUMMYNET_CONFIGURE", - "IP_DUMMYNET_DEL", - "IP_DUMMYNET_FLUSH", - "IP_DUMMYNET_GET", - "IP_EF", - "IP_ERRORMTU", - "IP_ESP_NETWORK_LEVEL", - "IP_ESP_TRANS_LEVEL", - "IP_FAITH", - "IP_FREEBIND", - "IP_FW3", - "IP_FW_ADD", - "IP_FW_DEL", - "IP_FW_FLUSH", - "IP_FW_GET", - "IP_FW_NAT_CFG", - "IP_FW_NAT_DEL", - "IP_FW_NAT_GET_CONFIG", - "IP_FW_NAT_GET_LOG", - "IP_FW_RESETLOG", - "IP_FW_TABLE_ADD", - "IP_FW_TABLE_DEL", - "IP_FW_TABLE_FLUSH", - "IP_FW_TABLE_GETSIZE", - "IP_FW_TABLE_LIST", - "IP_FW_ZERO", - "IP_HDRINCL", - "IP_IPCOMP_LEVEL", - "IP_IPSECFLOWINFO", - "IP_IPSEC_LOCAL_AUTH", - "IP_IPSEC_LOCAL_CRED", - "IP_IPSEC_LOCAL_ID", - "IP_IPSEC_POLICY", - "IP_IPSEC_REMOTE_AUTH", - "IP_IPSEC_REMOTE_CRED", - "IP_IPSEC_REMOTE_ID", - "IP_MAXPACKET", - "IP_MAX_GROUP_SRC_FILTER", - "IP_MAX_MEMBERSHIPS", - "IP_MAX_SOCK_MUTE_FILTER", - "IP_MAX_SOCK_SRC_FILTER", - "IP_MAX_SOURCE_FILTER", - "IP_MF", - "IP_MINFRAGSIZE", - "IP_MINTTL", - "IP_MIN_MEMBERSHIPS", - "IP_MSFILTER", - "IP_MSS", - "IP_MTU", - "IP_MTU_DISCOVER", - "IP_MULTICAST_IF", - "IP_MULTICAST_IFINDEX", - "IP_MULTICAST_LOOP", - "IP_MULTICAST_TTL", - "IP_MULTICAST_VIF", - "IP_NAT__XXX", - "IP_OFFMASK", - "IP_OLD_FW_ADD", - "IP_OLD_FW_DEL", - "IP_OLD_FW_FLUSH", - "IP_OLD_FW_GET", - "IP_OLD_FW_RESETLOG", - "IP_OLD_FW_ZERO", - "IP_ONESBCAST", - "IP_OPTIONS", - "IP_ORIGDSTADDR", - "IP_PASSSEC", - "IP_PIPEX", - "IP_PKTINFO", - "IP_PKTOPTIONS", - "IP_PMTUDISC", - "IP_PMTUDISC_DO", - "IP_PMTUDISC_DONT", - "IP_PMTUDISC_PROBE", - "IP_PMTUDISC_WANT", - "IP_PORTRANGE", - "IP_PORTRANGE_DEFAULT", - "IP_PORTRANGE_HIGH", - "IP_PORTRANGE_LOW", - "IP_RECVDSTADDR", - "IP_RECVDSTPORT", - "IP_RECVERR", - "IP_RECVIF", - "IP_RECVOPTS", - "IP_RECVORIGDSTADDR", - "IP_RECVPKTINFO", - "IP_RECVRETOPTS", - "IP_RECVRTABLE", - "IP_RECVTOS", - "IP_RECVTTL", - "IP_RETOPTS", - "IP_RF", - "IP_ROUTER_ALERT", - "IP_RSVP_OFF", - "IP_RSVP_ON", - "IP_RSVP_VIF_OFF", - "IP_RSVP_VIF_ON", - "IP_RTABLE", - "IP_SENDSRCADDR", - "IP_STRIPHDR", - "IP_TOS", - "IP_TRAFFIC_MGT_BACKGROUND", - "IP_TRANSPARENT", - "IP_TTL", - "IP_UNBLOCK_SOURCE", - "IP_XFRM_POLICY", - "IPv6MTUInfo", - "IPv6Mreq", - "ISIG", - "ISTRIP", - "IUCLC", - "IUTF8", - "IXANY", - "IXOFF", - "IXON", - "IfAddrmsg", - "IfAnnounceMsghdr", - "IfData", - "IfInfomsg", - "IfMsghdr", - "IfaMsghdr", - "IfmaMsghdr", - "IfmaMsghdr2", - "ImplementsGetwd", - "Inet4Pktinfo", - "Inet6Pktinfo", - "InotifyAddWatch", - "InotifyEvent", - "InotifyInit", - "InotifyInit1", - "InotifyRmWatch", - "InterfaceAddrMessage", - "InterfaceAnnounceMessage", - "InterfaceInfo", - "InterfaceMessage", - "InterfaceMulticastAddrMessage", - "InvalidHandle", - "Ioperm", - "Iopl", - "Iovec", - "IpAdapterInfo", - "IpAddrString", - "IpAddressString", - "IpMaskString", - "Issetugid", - "KEY_ALL_ACCESS", - "KEY_CREATE_LINK", - "KEY_CREATE_SUB_KEY", - "KEY_ENUMERATE_SUB_KEYS", - "KEY_EXECUTE", - "KEY_NOTIFY", - "KEY_QUERY_VALUE", - "KEY_READ", - "KEY_SET_VALUE", - "KEY_WOW64_32KEY", - "KEY_WOW64_64KEY", - "KEY_WRITE", - "Kevent", - "Kevent_t", - "Kill", - "Klogctl", - "Kqueue", - "LANG_ENGLISH", - "LAYERED_PROTOCOL", - "LCNT_OVERLOAD_FLUSH", - "LINUX_REBOOT_CMD_CAD_OFF", - "LINUX_REBOOT_CMD_CAD_ON", - "LINUX_REBOOT_CMD_HALT", - "LINUX_REBOOT_CMD_KEXEC", - "LINUX_REBOOT_CMD_POWER_OFF", - "LINUX_REBOOT_CMD_RESTART", - "LINUX_REBOOT_CMD_RESTART2", - "LINUX_REBOOT_CMD_SW_SUSPEND", - "LINUX_REBOOT_MAGIC1", - "LINUX_REBOOT_MAGIC2", - "LOCK_EX", - "LOCK_NB", - "LOCK_SH", - "LOCK_UN", - "LazyDLL", - "LazyProc", - "Lchown", - "Linger", - "Link", - "Listen", - "Listxattr", - "LoadCancelIoEx", - "LoadConnectEx", - "LoadCreateSymbolicLink", - "LoadDLL", - "LoadGetAddrInfo", - "LoadLibrary", - "LoadSetFileCompletionNotificationModes", - "LocalFree", - "Log2phys_t", - "LookupAccountName", - "LookupAccountSid", - "LookupSID", - "LsfJump", - "LsfSocket", - "LsfStmt", - "Lstat", - "MADV_AUTOSYNC", - "MADV_CAN_REUSE", - "MADV_CORE", - "MADV_DOFORK", - "MADV_DONTFORK", - "MADV_DONTNEED", - "MADV_FREE", - "MADV_FREE_REUSABLE", - "MADV_FREE_REUSE", - "MADV_HUGEPAGE", - "MADV_HWPOISON", - "MADV_MERGEABLE", - "MADV_NOCORE", - "MADV_NOHUGEPAGE", - "MADV_NORMAL", - "MADV_NOSYNC", - "MADV_PROTECT", - "MADV_RANDOM", - "MADV_REMOVE", - "MADV_SEQUENTIAL", - "MADV_SPACEAVAIL", - "MADV_UNMERGEABLE", - "MADV_WILLNEED", - "MADV_ZERO_WIRED_PAGES", - "MAP_32BIT", - "MAP_ALIGNED_SUPER", - "MAP_ALIGNMENT_16MB", - "MAP_ALIGNMENT_1TB", - "MAP_ALIGNMENT_256TB", - "MAP_ALIGNMENT_4GB", - "MAP_ALIGNMENT_64KB", - "MAP_ALIGNMENT_64PB", - "MAP_ALIGNMENT_MASK", - "MAP_ALIGNMENT_SHIFT", - "MAP_ANON", - "MAP_ANONYMOUS", - "MAP_COPY", - "MAP_DENYWRITE", - "MAP_EXECUTABLE", - "MAP_FILE", - "MAP_FIXED", - "MAP_FLAGMASK", - "MAP_GROWSDOWN", - "MAP_HASSEMAPHORE", - "MAP_HUGETLB", - "MAP_INHERIT", - "MAP_INHERIT_COPY", - "MAP_INHERIT_DEFAULT", - "MAP_INHERIT_DONATE_COPY", - "MAP_INHERIT_NONE", - "MAP_INHERIT_SHARE", - "MAP_JIT", - "MAP_LOCKED", - "MAP_NOCACHE", - "MAP_NOCORE", - "MAP_NOEXTEND", - "MAP_NONBLOCK", - "MAP_NORESERVE", - "MAP_NOSYNC", - "MAP_POPULATE", - "MAP_PREFAULT_READ", - "MAP_PRIVATE", - "MAP_RENAME", - "MAP_RESERVED0080", - "MAP_RESERVED0100", - "MAP_SHARED", - "MAP_STACK", - "MAP_TRYFIXED", - "MAP_TYPE", - "MAP_WIRED", - "MAXIMUM_REPARSE_DATA_BUFFER_SIZE", - "MAXLEN_IFDESCR", - "MAXLEN_PHYSADDR", - "MAX_ADAPTER_ADDRESS_LENGTH", - "MAX_ADAPTER_DESCRIPTION_LENGTH", - "MAX_ADAPTER_NAME_LENGTH", - "MAX_COMPUTERNAME_LENGTH", - "MAX_INTERFACE_NAME_LEN", - "MAX_LONG_PATH", - "MAX_PATH", - "MAX_PROTOCOL_CHAIN", - "MCL_CURRENT", - "MCL_FUTURE", - "MNT_DETACH", - "MNT_EXPIRE", - "MNT_FORCE", - "MSG_BCAST", - "MSG_CMSG_CLOEXEC", - "MSG_COMPAT", - "MSG_CONFIRM", - "MSG_CONTROLMBUF", - "MSG_CTRUNC", - "MSG_DONTROUTE", - "MSG_DONTWAIT", - "MSG_EOF", - "MSG_EOR", - "MSG_ERRQUEUE", - "MSG_FASTOPEN", - "MSG_FIN", - "MSG_FLUSH", - "MSG_HAVEMORE", - "MSG_HOLD", - "MSG_IOVUSRSPACE", - "MSG_LENUSRSPACE", - "MSG_MCAST", - "MSG_MORE", - "MSG_NAMEMBUF", - "MSG_NBIO", - "MSG_NEEDSA", - "MSG_NOSIGNAL", - "MSG_NOTIFICATION", - "MSG_OOB", - "MSG_PEEK", - "MSG_PROXY", - "MSG_RCVMORE", - "MSG_RST", - "MSG_SEND", - "MSG_SYN", - "MSG_TRUNC", - "MSG_TRYHARD", - "MSG_USERFLAGS", - "MSG_WAITALL", - "MSG_WAITFORONE", - "MSG_WAITSTREAM", - "MS_ACTIVE", - "MS_ASYNC", - "MS_BIND", - "MS_DEACTIVATE", - "MS_DIRSYNC", - "MS_INVALIDATE", - "MS_I_VERSION", - "MS_KERNMOUNT", - "MS_KILLPAGES", - "MS_MANDLOCK", - "MS_MGC_MSK", - "MS_MGC_VAL", - "MS_MOVE", - "MS_NOATIME", - "MS_NODEV", - "MS_NODIRATIME", - "MS_NOEXEC", - "MS_NOSUID", - "MS_NOUSER", - "MS_POSIXACL", - "MS_PRIVATE", - "MS_RDONLY", - "MS_REC", - "MS_RELATIME", - "MS_REMOUNT", - "MS_RMT_MASK", - "MS_SHARED", - "MS_SILENT", - "MS_SLAVE", - "MS_STRICTATIME", - "MS_SYNC", - "MS_SYNCHRONOUS", - "MS_UNBINDABLE", - "Madvise", - "MapViewOfFile", - "MaxTokenInfoClass", - "Mclpool", - "MibIfRow", - "Mkdir", - "Mkdirat", - "Mkfifo", - "Mknod", - "Mknodat", - "Mlock", - "Mlockall", - "Mmap", - "Mount", - "MoveFile", - "Mprotect", - "Msghdr", - "Munlock", - "Munlockall", - "Munmap", - "MustLoadDLL", - "NAME_MAX", - "NETLINK_ADD_MEMBERSHIP", - "NETLINK_AUDIT", - "NETLINK_BROADCAST_ERROR", - "NETLINK_CONNECTOR", - "NETLINK_DNRTMSG", - "NETLINK_DROP_MEMBERSHIP", - "NETLINK_ECRYPTFS", - "NETLINK_FIB_LOOKUP", - "NETLINK_FIREWALL", - "NETLINK_GENERIC", - "NETLINK_INET_DIAG", - "NETLINK_IP6_FW", - "NETLINK_ISCSI", - "NETLINK_KOBJECT_UEVENT", - "NETLINK_NETFILTER", - "NETLINK_NFLOG", - "NETLINK_NO_ENOBUFS", - "NETLINK_PKTINFO", - "NETLINK_RDMA", - "NETLINK_ROUTE", - "NETLINK_SCSITRANSPORT", - "NETLINK_SELINUX", - "NETLINK_UNUSED", - "NETLINK_USERSOCK", - "NETLINK_XFRM", - "NET_RT_DUMP", - "NET_RT_DUMP2", - "NET_RT_FLAGS", - "NET_RT_IFLIST", - "NET_RT_IFLIST2", - "NET_RT_IFLISTL", - "NET_RT_IFMALIST", - "NET_RT_MAXID", - "NET_RT_OIFLIST", - "NET_RT_OOIFLIST", - "NET_RT_STAT", - "NET_RT_STATS", - "NET_RT_TABLE", - "NET_RT_TRASH", - "NLA_ALIGNTO", - "NLA_F_NESTED", - "NLA_F_NET_BYTEORDER", - "NLA_HDRLEN", - "NLMSG_ALIGNTO", - "NLMSG_DONE", - "NLMSG_ERROR", - "NLMSG_HDRLEN", - "NLMSG_MIN_TYPE", - "NLMSG_NOOP", - "NLMSG_OVERRUN", - "NLM_F_ACK", - "NLM_F_APPEND", - "NLM_F_ATOMIC", - "NLM_F_CREATE", - "NLM_F_DUMP", - "NLM_F_ECHO", - "NLM_F_EXCL", - "NLM_F_MATCH", - "NLM_F_MULTI", - "NLM_F_REPLACE", - "NLM_F_REQUEST", - "NLM_F_ROOT", - "NOFLSH", - "NOTE_ABSOLUTE", - "NOTE_ATTRIB", - "NOTE_BACKGROUND", - "NOTE_CHILD", - "NOTE_CRITICAL", - "NOTE_DELETE", - "NOTE_EOF", - "NOTE_EXEC", - "NOTE_EXIT", - "NOTE_EXITSTATUS", - "NOTE_EXIT_CSERROR", - "NOTE_EXIT_DECRYPTFAIL", - "NOTE_EXIT_DETAIL", - "NOTE_EXIT_DETAIL_MASK", - "NOTE_EXIT_MEMORY", - "NOTE_EXIT_REPARENTED", - "NOTE_EXTEND", - "NOTE_FFAND", - "NOTE_FFCOPY", - "NOTE_FFCTRLMASK", - "NOTE_FFLAGSMASK", - "NOTE_FFNOP", - "NOTE_FFOR", - "NOTE_FORK", - "NOTE_LEEWAY", - "NOTE_LINK", - "NOTE_LOWAT", - "NOTE_NONE", - "NOTE_NSECONDS", - "NOTE_PCTRLMASK", - "NOTE_PDATAMASK", - "NOTE_REAP", - "NOTE_RENAME", - "NOTE_RESOURCEEND", - "NOTE_REVOKE", - "NOTE_SECONDS", - "NOTE_SIGNAL", - "NOTE_TRACK", - "NOTE_TRACKERR", - "NOTE_TRIGGER", - "NOTE_TRUNCATE", - "NOTE_USECONDS", - "NOTE_VM_ERROR", - "NOTE_VM_PRESSURE", - "NOTE_VM_PRESSURE_SUDDEN_TERMINATE", - "NOTE_VM_PRESSURE_TERMINATE", - "NOTE_WRITE", - "NameCanonical", - "NameCanonicalEx", - "NameDisplay", - "NameDnsDomain", - "NameFullyQualifiedDN", - "NameSamCompatible", - "NameServicePrincipal", - "NameUniqueId", - "NameUnknown", - "NameUserPrincipal", - "Nanosleep", - "NetApiBufferFree", - "NetGetJoinInformation", - "NetSetupDomainName", - "NetSetupUnjoined", - "NetSetupUnknownStatus", - "NetSetupWorkgroupName", - "NetUserGetInfo", - "NetlinkMessage", - "NetlinkRIB", - "NetlinkRouteAttr", - "NetlinkRouteRequest", - "NewCallback", - "NewCallbackCDecl", - "NewLazyDLL", - "NlAttr", - "NlMsgerr", - "NlMsghdr", - "NsecToFiletime", - "NsecToTimespec", - "NsecToTimeval", - "Ntohs", - "OCRNL", - "OFDEL", - "OFILL", - "OFIOGETBMAP", - "OID_PKIX_KP_SERVER_AUTH", - "OID_SERVER_GATED_CRYPTO", - "OID_SGC_NETSCAPE", - "OLCUC", - "ONLCR", - "ONLRET", - "ONOCR", - "ONOEOT", - "OPEN_ALWAYS", - "OPEN_EXISTING", - "OPOST", - "O_ACCMODE", - "O_ALERT", - "O_ALT_IO", - "O_APPEND", - "O_ASYNC", - "O_CLOEXEC", - "O_CREAT", - "O_DIRECT", - "O_DIRECTORY", - "O_DP_GETRAWENCRYPTED", - "O_DSYNC", - "O_EVTONLY", - "O_EXCL", - "O_EXEC", - "O_EXLOCK", - "O_FSYNC", - "O_LARGEFILE", - "O_NDELAY", - "O_NOATIME", - "O_NOCTTY", - "O_NOFOLLOW", - "O_NONBLOCK", - "O_NOSIGPIPE", - "O_POPUP", - "O_RDONLY", - "O_RDWR", - "O_RSYNC", - "O_SHLOCK", - "O_SYMLINK", - "O_SYNC", - "O_TRUNC", - "O_TTY_INIT", - "O_WRONLY", - "Open", - "OpenCurrentProcessToken", - "OpenProcess", - "OpenProcessToken", - "Openat", - "Overlapped", - "PACKET_ADD_MEMBERSHIP", - "PACKET_BROADCAST", - "PACKET_DROP_MEMBERSHIP", - "PACKET_FASTROUTE", - "PACKET_HOST", - "PACKET_LOOPBACK", - "PACKET_MR_ALLMULTI", - "PACKET_MR_MULTICAST", - "PACKET_MR_PROMISC", - "PACKET_MULTICAST", - "PACKET_OTHERHOST", - "PACKET_OUTGOING", - "PACKET_RECV_OUTPUT", - "PACKET_RX_RING", - "PACKET_STATISTICS", - "PAGE_EXECUTE_READ", - "PAGE_EXECUTE_READWRITE", - "PAGE_EXECUTE_WRITECOPY", - "PAGE_READONLY", - "PAGE_READWRITE", - "PAGE_WRITECOPY", - "PARENB", - "PARMRK", - "PARODD", - "PENDIN", - "PFL_HIDDEN", - "PFL_MATCHES_PROTOCOL_ZERO", - "PFL_MULTIPLE_PROTO_ENTRIES", - "PFL_NETWORKDIRECT_PROVIDER", - "PFL_RECOMMENDED_PROTO_ENTRY", - "PF_FLUSH", - "PKCS_7_ASN_ENCODING", - "PMC5_PIPELINE_FLUSH", - "PRIO_PGRP", - "PRIO_PROCESS", - "PRIO_USER", - "PRI_IOFLUSH", - "PROCESS_QUERY_INFORMATION", - "PROCESS_TERMINATE", - "PROT_EXEC", - "PROT_GROWSDOWN", - "PROT_GROWSUP", - "PROT_NONE", - "PROT_READ", - "PROT_WRITE", - "PROV_DH_SCHANNEL", - "PROV_DSS", - "PROV_DSS_DH", - "PROV_EC_ECDSA_FULL", - "PROV_EC_ECDSA_SIG", - "PROV_EC_ECNRA_FULL", - "PROV_EC_ECNRA_SIG", - "PROV_FORTEZZA", - "PROV_INTEL_SEC", - "PROV_MS_EXCHANGE", - "PROV_REPLACE_OWF", - "PROV_RNG", - "PROV_RSA_AES", - "PROV_RSA_FULL", - "PROV_RSA_SCHANNEL", - "PROV_RSA_SIG", - "PROV_SPYRUS_LYNKS", - "PROV_SSL", - "PR_CAPBSET_DROP", - "PR_CAPBSET_READ", - "PR_CLEAR_SECCOMP_FILTER", - "PR_ENDIAN_BIG", - "PR_ENDIAN_LITTLE", - "PR_ENDIAN_PPC_LITTLE", - "PR_FPEMU_NOPRINT", - "PR_FPEMU_SIGFPE", - "PR_FP_EXC_ASYNC", - "PR_FP_EXC_DISABLED", - "PR_FP_EXC_DIV", - "PR_FP_EXC_INV", - "PR_FP_EXC_NONRECOV", - "PR_FP_EXC_OVF", - "PR_FP_EXC_PRECISE", - "PR_FP_EXC_RES", - "PR_FP_EXC_SW_ENABLE", - "PR_FP_EXC_UND", - "PR_GET_DUMPABLE", - "PR_GET_ENDIAN", - "PR_GET_FPEMU", - "PR_GET_FPEXC", - "PR_GET_KEEPCAPS", - "PR_GET_NAME", - "PR_GET_PDEATHSIG", - "PR_GET_SECCOMP", - "PR_GET_SECCOMP_FILTER", - "PR_GET_SECUREBITS", - "PR_GET_TIMERSLACK", - "PR_GET_TIMING", - "PR_GET_TSC", - "PR_GET_UNALIGN", - "PR_MCE_KILL", - "PR_MCE_KILL_CLEAR", - "PR_MCE_KILL_DEFAULT", - "PR_MCE_KILL_EARLY", - "PR_MCE_KILL_GET", - "PR_MCE_KILL_LATE", - "PR_MCE_KILL_SET", - "PR_SECCOMP_FILTER_EVENT", - "PR_SECCOMP_FILTER_SYSCALL", - "PR_SET_DUMPABLE", - "PR_SET_ENDIAN", - "PR_SET_FPEMU", - "PR_SET_FPEXC", - "PR_SET_KEEPCAPS", - "PR_SET_NAME", - "PR_SET_PDEATHSIG", - "PR_SET_PTRACER", - "PR_SET_SECCOMP", - "PR_SET_SECCOMP_FILTER", - "PR_SET_SECUREBITS", - "PR_SET_TIMERSLACK", - "PR_SET_TIMING", - "PR_SET_TSC", - "PR_SET_UNALIGN", - "PR_TASK_PERF_EVENTS_DISABLE", - "PR_TASK_PERF_EVENTS_ENABLE", - "PR_TIMING_STATISTICAL", - "PR_TIMING_TIMESTAMP", - "PR_TSC_ENABLE", - "PR_TSC_SIGSEGV", - "PR_UNALIGN_NOPRINT", - "PR_UNALIGN_SIGBUS", - "PTRACE_ARCH_PRCTL", - "PTRACE_ATTACH", - "PTRACE_CONT", - "PTRACE_DETACH", - "PTRACE_EVENT_CLONE", - "PTRACE_EVENT_EXEC", - "PTRACE_EVENT_EXIT", - "PTRACE_EVENT_FORK", - "PTRACE_EVENT_VFORK", - "PTRACE_EVENT_VFORK_DONE", - "PTRACE_GETCRUNCHREGS", - "PTRACE_GETEVENTMSG", - "PTRACE_GETFPREGS", - "PTRACE_GETFPXREGS", - "PTRACE_GETHBPREGS", - "PTRACE_GETREGS", - "PTRACE_GETREGSET", - "PTRACE_GETSIGINFO", - "PTRACE_GETVFPREGS", - "PTRACE_GETWMMXREGS", - "PTRACE_GET_THREAD_AREA", - "PTRACE_KILL", - "PTRACE_OLDSETOPTIONS", - "PTRACE_O_MASK", - "PTRACE_O_TRACECLONE", - "PTRACE_O_TRACEEXEC", - "PTRACE_O_TRACEEXIT", - "PTRACE_O_TRACEFORK", - "PTRACE_O_TRACESYSGOOD", - "PTRACE_O_TRACEVFORK", - "PTRACE_O_TRACEVFORKDONE", - "PTRACE_PEEKDATA", - "PTRACE_PEEKTEXT", - "PTRACE_PEEKUSR", - "PTRACE_POKEDATA", - "PTRACE_POKETEXT", - "PTRACE_POKEUSR", - "PTRACE_SETCRUNCHREGS", - "PTRACE_SETFPREGS", - "PTRACE_SETFPXREGS", - "PTRACE_SETHBPREGS", - "PTRACE_SETOPTIONS", - "PTRACE_SETREGS", - "PTRACE_SETREGSET", - "PTRACE_SETSIGINFO", - "PTRACE_SETVFPREGS", - "PTRACE_SETWMMXREGS", - "PTRACE_SET_SYSCALL", - "PTRACE_SET_THREAD_AREA", - "PTRACE_SINGLEBLOCK", - "PTRACE_SINGLESTEP", - "PTRACE_SYSCALL", - "PTRACE_SYSEMU", - "PTRACE_SYSEMU_SINGLESTEP", - "PTRACE_TRACEME", - "PT_ATTACH", - "PT_ATTACHEXC", - "PT_CONTINUE", - "PT_DATA_ADDR", - "PT_DENY_ATTACH", - "PT_DETACH", - "PT_FIRSTMACH", - "PT_FORCEQUOTA", - "PT_KILL", - "PT_MASK", - "PT_READ_D", - "PT_READ_I", - "PT_READ_U", - "PT_SIGEXC", - "PT_STEP", - "PT_TEXT_ADDR", - "PT_TEXT_END_ADDR", - "PT_THUPDATE", - "PT_TRACE_ME", - "PT_WRITE_D", - "PT_WRITE_I", - "PT_WRITE_U", - "ParseDirent", - "ParseNetlinkMessage", - "ParseNetlinkRouteAttr", - "ParseRoutingMessage", - "ParseRoutingSockaddr", - "ParseSocketControlMessage", - "ParseUnixCredentials", - "ParseUnixRights", - "PathMax", - "Pathconf", - "Pause", - "Pipe", - "Pipe2", - "PivotRoot", - "Pointer", - "PostQueuedCompletionStatus", - "Pread", - "Proc", - "ProcAttr", - "Process32First", - "Process32Next", - "ProcessEntry32", - "ProcessInformation", - "Protoent", - "PtraceAttach", - "PtraceCont", - "PtraceDetach", - "PtraceGetEventMsg", - "PtraceGetRegs", - "PtracePeekData", - "PtracePeekText", - "PtracePokeData", - "PtracePokeText", - "PtraceRegs", - "PtraceSetOptions", - "PtraceSetRegs", - "PtraceSingleStep", - "PtraceSyscall", - "Pwrite", - "REG_BINARY", - "REG_DWORD", - "REG_DWORD_BIG_ENDIAN", - "REG_DWORD_LITTLE_ENDIAN", - "REG_EXPAND_SZ", - "REG_FULL_RESOURCE_DESCRIPTOR", - "REG_LINK", - "REG_MULTI_SZ", - "REG_NONE", - "REG_QWORD", - "REG_QWORD_LITTLE_ENDIAN", - "REG_RESOURCE_LIST", - "REG_RESOURCE_REQUIREMENTS_LIST", - "REG_SZ", - "RLIMIT_AS", - "RLIMIT_CORE", - "RLIMIT_CPU", - "RLIMIT_CPU_USAGE_MONITOR", - "RLIMIT_DATA", - "RLIMIT_FSIZE", - "RLIMIT_NOFILE", - "RLIMIT_STACK", - "RLIM_INFINITY", - "RTAX_ADVMSS", - "RTAX_AUTHOR", - "RTAX_BRD", - "RTAX_CWND", - "RTAX_DST", - "RTAX_FEATURES", - "RTAX_FEATURE_ALLFRAG", - "RTAX_FEATURE_ECN", - "RTAX_FEATURE_SACK", - "RTAX_FEATURE_TIMESTAMP", - "RTAX_GATEWAY", - "RTAX_GENMASK", - "RTAX_HOPLIMIT", - "RTAX_IFA", - "RTAX_IFP", - "RTAX_INITCWND", - "RTAX_INITRWND", - "RTAX_LABEL", - "RTAX_LOCK", - "RTAX_MAX", - "RTAX_MTU", - "RTAX_NETMASK", - "RTAX_REORDERING", - "RTAX_RTO_MIN", - "RTAX_RTT", - "RTAX_RTTVAR", - "RTAX_SRC", - "RTAX_SRCMASK", - "RTAX_SSTHRESH", - "RTAX_TAG", - "RTAX_UNSPEC", - "RTAX_WINDOW", - "RTA_ALIGNTO", - "RTA_AUTHOR", - "RTA_BRD", - "RTA_CACHEINFO", - "RTA_DST", - "RTA_FLOW", - "RTA_GATEWAY", - "RTA_GENMASK", - "RTA_IFA", - "RTA_IFP", - "RTA_IIF", - "RTA_LABEL", - "RTA_MAX", - "RTA_METRICS", - "RTA_MULTIPATH", - "RTA_NETMASK", - "RTA_OIF", - "RTA_PREFSRC", - "RTA_PRIORITY", - "RTA_SRC", - "RTA_SRCMASK", - "RTA_TABLE", - "RTA_TAG", - "RTA_UNSPEC", - "RTCF_DIRECTSRC", - "RTCF_DOREDIRECT", - "RTCF_LOG", - "RTCF_MASQ", - "RTCF_NAT", - "RTCF_VALVE", - "RTF_ADDRCLASSMASK", - "RTF_ADDRCONF", - "RTF_ALLONLINK", - "RTF_ANNOUNCE", - "RTF_BLACKHOLE", - "RTF_BROADCAST", - "RTF_CACHE", - "RTF_CLONED", - "RTF_CLONING", - "RTF_CONDEMNED", - "RTF_DEFAULT", - "RTF_DELCLONE", - "RTF_DONE", - "RTF_DYNAMIC", - "RTF_FLOW", - "RTF_FMASK", - "RTF_GATEWAY", - "RTF_GWFLAG_COMPAT", - "RTF_HOST", - "RTF_IFREF", - "RTF_IFSCOPE", - "RTF_INTERFACE", - "RTF_IRTT", - "RTF_LINKRT", - "RTF_LLDATA", - "RTF_LLINFO", - "RTF_LOCAL", - "RTF_MASK", - "RTF_MODIFIED", - "RTF_MPATH", - "RTF_MPLS", - "RTF_MSS", - "RTF_MTU", - "RTF_MULTICAST", - "RTF_NAT", - "RTF_NOFORWARD", - "RTF_NONEXTHOP", - "RTF_NOPMTUDISC", - "RTF_PERMANENT_ARP", - "RTF_PINNED", - "RTF_POLICY", - "RTF_PRCLONING", - "RTF_PROTO1", - "RTF_PROTO2", - "RTF_PROTO3", - "RTF_PROXY", - "RTF_REINSTATE", - "RTF_REJECT", - "RTF_RNH_LOCKED", - "RTF_ROUTER", - "RTF_SOURCE", - "RTF_SRC", - "RTF_STATIC", - "RTF_STICKY", - "RTF_THROW", - "RTF_TUNNEL", - "RTF_UP", - "RTF_USETRAILERS", - "RTF_WASCLONED", - "RTF_WINDOW", - "RTF_XRESOLVE", - "RTM_ADD", - "RTM_BASE", - "RTM_CHANGE", - "RTM_CHGADDR", - "RTM_DELACTION", - "RTM_DELADDR", - "RTM_DELADDRLABEL", - "RTM_DELETE", - "RTM_DELLINK", - "RTM_DELMADDR", - "RTM_DELNEIGH", - "RTM_DELQDISC", - "RTM_DELROUTE", - "RTM_DELRULE", - "RTM_DELTCLASS", - "RTM_DELTFILTER", - "RTM_DESYNC", - "RTM_F_CLONED", - "RTM_F_EQUALIZE", - "RTM_F_NOTIFY", - "RTM_F_PREFIX", - "RTM_GET", - "RTM_GET2", - "RTM_GETACTION", - "RTM_GETADDR", - "RTM_GETADDRLABEL", - "RTM_GETANYCAST", - "RTM_GETDCB", - "RTM_GETLINK", - "RTM_GETMULTICAST", - "RTM_GETNEIGH", - "RTM_GETNEIGHTBL", - "RTM_GETQDISC", - "RTM_GETROUTE", - "RTM_GETRULE", - "RTM_GETTCLASS", - "RTM_GETTFILTER", - "RTM_IEEE80211", - "RTM_IFANNOUNCE", - "RTM_IFINFO", - "RTM_IFINFO2", - "RTM_LLINFO_UPD", - "RTM_LOCK", - "RTM_LOSING", - "RTM_MAX", - "RTM_MAXSIZE", - "RTM_MISS", - "RTM_NEWACTION", - "RTM_NEWADDR", - "RTM_NEWADDRLABEL", - "RTM_NEWLINK", - "RTM_NEWMADDR", - "RTM_NEWMADDR2", - "RTM_NEWNDUSEROPT", - "RTM_NEWNEIGH", - "RTM_NEWNEIGHTBL", - "RTM_NEWPREFIX", - "RTM_NEWQDISC", - "RTM_NEWROUTE", - "RTM_NEWRULE", - "RTM_NEWTCLASS", - "RTM_NEWTFILTER", - "RTM_NR_FAMILIES", - "RTM_NR_MSGTYPES", - "RTM_OIFINFO", - "RTM_OLDADD", - "RTM_OLDDEL", - "RTM_OOIFINFO", - "RTM_REDIRECT", - "RTM_RESOLVE", - "RTM_RTTUNIT", - "RTM_SETDCB", - "RTM_SETGATE", - "RTM_SETLINK", - "RTM_SETNEIGHTBL", - "RTM_VERSION", - "RTNH_ALIGNTO", - "RTNH_F_DEAD", - "RTNH_F_ONLINK", - "RTNH_F_PERVASIVE", - "RTNLGRP_IPV4_IFADDR", - "RTNLGRP_IPV4_MROUTE", - "RTNLGRP_IPV4_ROUTE", - "RTNLGRP_IPV4_RULE", - "RTNLGRP_IPV6_IFADDR", - "RTNLGRP_IPV6_IFINFO", - "RTNLGRP_IPV6_MROUTE", - "RTNLGRP_IPV6_PREFIX", - "RTNLGRP_IPV6_ROUTE", - "RTNLGRP_IPV6_RULE", - "RTNLGRP_LINK", - "RTNLGRP_ND_USEROPT", - "RTNLGRP_NEIGH", - "RTNLGRP_NONE", - "RTNLGRP_NOTIFY", - "RTNLGRP_TC", - "RTN_ANYCAST", - "RTN_BLACKHOLE", - "RTN_BROADCAST", - "RTN_LOCAL", - "RTN_MAX", - "RTN_MULTICAST", - "RTN_NAT", - "RTN_PROHIBIT", - "RTN_THROW", - "RTN_UNICAST", - "RTN_UNREACHABLE", - "RTN_UNSPEC", - "RTN_XRESOLVE", - "RTPROT_BIRD", - "RTPROT_BOOT", - "RTPROT_DHCP", - "RTPROT_DNROUTED", - "RTPROT_GATED", - "RTPROT_KERNEL", - "RTPROT_MRT", - "RTPROT_NTK", - "RTPROT_RA", - "RTPROT_REDIRECT", - "RTPROT_STATIC", - "RTPROT_UNSPEC", - "RTPROT_XORP", - "RTPROT_ZEBRA", - "RTV_EXPIRE", - "RTV_HOPCOUNT", - "RTV_MTU", - "RTV_RPIPE", - "RTV_RTT", - "RTV_RTTVAR", - "RTV_SPIPE", - "RTV_SSTHRESH", - "RTV_WEIGHT", - "RT_CACHING_CONTEXT", - "RT_CLASS_DEFAULT", - "RT_CLASS_LOCAL", - "RT_CLASS_MAIN", - "RT_CLASS_MAX", - "RT_CLASS_UNSPEC", - "RT_DEFAULT_FIB", - "RT_NORTREF", - "RT_SCOPE_HOST", - "RT_SCOPE_LINK", - "RT_SCOPE_NOWHERE", - "RT_SCOPE_SITE", - "RT_SCOPE_UNIVERSE", - "RT_TABLEID_MAX", - "RT_TABLE_COMPAT", - "RT_TABLE_DEFAULT", - "RT_TABLE_LOCAL", - "RT_TABLE_MAIN", - "RT_TABLE_MAX", - "RT_TABLE_UNSPEC", - "RUSAGE_CHILDREN", - "RUSAGE_SELF", - "RUSAGE_THREAD", - "Radvisory_t", - "RawConn", - "RawSockaddr", - "RawSockaddrAny", - "RawSockaddrDatalink", - "RawSockaddrInet4", - "RawSockaddrInet6", - "RawSockaddrLinklayer", - "RawSockaddrNetlink", - "RawSockaddrUnix", - "RawSyscall", - "RawSyscall6", - "Read", - "ReadConsole", - "ReadDirectoryChanges", - "ReadDirent", - "ReadFile", - "Readlink", - "Reboot", - "Recvfrom", - "Recvmsg", - "RegCloseKey", - "RegEnumKeyEx", - "RegOpenKeyEx", - "RegQueryInfoKey", - "RegQueryValueEx", - "RemoveDirectory", - "Removexattr", - "Rename", - "Renameat", - "Revoke", - "Rlimit", - "Rmdir", - "RouteMessage", - "RouteRIB", - "RoutingMessage", - "RtAttr", - "RtGenmsg", - "RtMetrics", - "RtMsg", - "RtMsghdr", - "RtNexthop", - "Rusage", - "SCM_BINTIME", - "SCM_CREDENTIALS", - "SCM_CREDS", - "SCM_RIGHTS", - "SCM_TIMESTAMP", - "SCM_TIMESTAMPING", - "SCM_TIMESTAMPNS", - "SCM_TIMESTAMP_MONOTONIC", - "SHUT_RD", - "SHUT_RDWR", - "SHUT_WR", - "SID", - "SIDAndAttributes", - "SIGABRT", - "SIGALRM", - "SIGBUS", - "SIGCHLD", - "SIGCLD", - "SIGCONT", - "SIGEMT", - "SIGFPE", - "SIGHUP", - "SIGILL", - "SIGINFO", - "SIGINT", - "SIGIO", - "SIGIOT", - "SIGKILL", - "SIGLIBRT", - "SIGLWP", - "SIGPIPE", - "SIGPOLL", - "SIGPROF", - "SIGPWR", - "SIGQUIT", - "SIGSEGV", - "SIGSTKFLT", - "SIGSTOP", - "SIGSYS", - "SIGTERM", - "SIGTHR", - "SIGTRAP", - "SIGTSTP", - "SIGTTIN", - "SIGTTOU", - "SIGUNUSED", - "SIGURG", - "SIGUSR1", - "SIGUSR2", - "SIGVTALRM", - "SIGWINCH", - "SIGXCPU", - "SIGXFSZ", - "SIOCADDDLCI", - "SIOCADDMULTI", - "SIOCADDRT", - "SIOCAIFADDR", - "SIOCAIFGROUP", - "SIOCALIFADDR", - "SIOCARPIPLL", - "SIOCATMARK", - "SIOCAUTOADDR", - "SIOCAUTONETMASK", - "SIOCBRDGADD", - "SIOCBRDGADDS", - "SIOCBRDGARL", - "SIOCBRDGDADDR", - "SIOCBRDGDEL", - "SIOCBRDGDELS", - "SIOCBRDGFLUSH", - "SIOCBRDGFRL", - "SIOCBRDGGCACHE", - "SIOCBRDGGFD", - "SIOCBRDGGHT", - "SIOCBRDGGIFFLGS", - "SIOCBRDGGMA", - "SIOCBRDGGPARAM", - "SIOCBRDGGPRI", - "SIOCBRDGGRL", - "SIOCBRDGGSIFS", - "SIOCBRDGGTO", - "SIOCBRDGIFS", - "SIOCBRDGRTS", - "SIOCBRDGSADDR", - "SIOCBRDGSCACHE", - "SIOCBRDGSFD", - "SIOCBRDGSHT", - "SIOCBRDGSIFCOST", - "SIOCBRDGSIFFLGS", - "SIOCBRDGSIFPRIO", - "SIOCBRDGSMA", - "SIOCBRDGSPRI", - "SIOCBRDGSPROTO", - "SIOCBRDGSTO", - "SIOCBRDGSTXHC", - "SIOCDARP", - "SIOCDELDLCI", - "SIOCDELMULTI", - "SIOCDELRT", - "SIOCDEVPRIVATE", - "SIOCDIFADDR", - "SIOCDIFGROUP", - "SIOCDIFPHYADDR", - "SIOCDLIFADDR", - "SIOCDRARP", - "SIOCGARP", - "SIOCGDRVSPEC", - "SIOCGETKALIVE", - "SIOCGETLABEL", - "SIOCGETPFLOW", - "SIOCGETPFSYNC", - "SIOCGETSGCNT", - "SIOCGETVIFCNT", - "SIOCGETVLAN", - "SIOCGHIWAT", - "SIOCGIFADDR", - "SIOCGIFADDRPREF", - "SIOCGIFALIAS", - "SIOCGIFALTMTU", - "SIOCGIFASYNCMAP", - "SIOCGIFBOND", - "SIOCGIFBR", - "SIOCGIFBRDADDR", - "SIOCGIFCAP", - "SIOCGIFCONF", - "SIOCGIFCOUNT", - "SIOCGIFDATA", - "SIOCGIFDESCR", - "SIOCGIFDEVMTU", - "SIOCGIFDLT", - "SIOCGIFDSTADDR", - "SIOCGIFENCAP", - "SIOCGIFFIB", - "SIOCGIFFLAGS", - "SIOCGIFGATTR", - "SIOCGIFGENERIC", - "SIOCGIFGMEMB", - "SIOCGIFGROUP", - "SIOCGIFHARDMTU", - "SIOCGIFHWADDR", - "SIOCGIFINDEX", - "SIOCGIFKPI", - "SIOCGIFMAC", - "SIOCGIFMAP", - "SIOCGIFMEDIA", - "SIOCGIFMEM", - "SIOCGIFMETRIC", - "SIOCGIFMTU", - "SIOCGIFNAME", - "SIOCGIFNETMASK", - "SIOCGIFPDSTADDR", - "SIOCGIFPFLAGS", - "SIOCGIFPHYS", - "SIOCGIFPRIORITY", - "SIOCGIFPSRCADDR", - "SIOCGIFRDOMAIN", - "SIOCGIFRTLABEL", - "SIOCGIFSLAVE", - "SIOCGIFSTATUS", - "SIOCGIFTIMESLOT", - "SIOCGIFTXQLEN", - "SIOCGIFVLAN", - "SIOCGIFWAKEFLAGS", - "SIOCGIFXFLAGS", - "SIOCGLIFADDR", - "SIOCGLIFPHYADDR", - "SIOCGLIFPHYRTABLE", - "SIOCGLIFPHYTTL", - "SIOCGLINKSTR", - "SIOCGLOWAT", - "SIOCGPGRP", - "SIOCGPRIVATE_0", - "SIOCGPRIVATE_1", - "SIOCGRARP", - "SIOCGSPPPPARAMS", - "SIOCGSTAMP", - "SIOCGSTAMPNS", - "SIOCGVH", - "SIOCGVNETID", - "SIOCIFCREATE", - "SIOCIFCREATE2", - "SIOCIFDESTROY", - "SIOCIFGCLONERS", - "SIOCINITIFADDR", - "SIOCPROTOPRIVATE", - "SIOCRSLVMULTI", - "SIOCRTMSG", - "SIOCSARP", - "SIOCSDRVSPEC", - "SIOCSETKALIVE", - "SIOCSETLABEL", - "SIOCSETPFLOW", - "SIOCSETPFSYNC", - "SIOCSETVLAN", - "SIOCSHIWAT", - "SIOCSIFADDR", - "SIOCSIFADDRPREF", - "SIOCSIFALTMTU", - "SIOCSIFASYNCMAP", - "SIOCSIFBOND", - "SIOCSIFBR", - "SIOCSIFBRDADDR", - "SIOCSIFCAP", - "SIOCSIFDESCR", - "SIOCSIFDSTADDR", - "SIOCSIFENCAP", - "SIOCSIFFIB", - "SIOCSIFFLAGS", - "SIOCSIFGATTR", - "SIOCSIFGENERIC", - "SIOCSIFHWADDR", - "SIOCSIFHWBROADCAST", - "SIOCSIFKPI", - "SIOCSIFLINK", - "SIOCSIFLLADDR", - "SIOCSIFMAC", - "SIOCSIFMAP", - "SIOCSIFMEDIA", - "SIOCSIFMEM", - "SIOCSIFMETRIC", - "SIOCSIFMTU", - "SIOCSIFNAME", - "SIOCSIFNETMASK", - "SIOCSIFPFLAGS", - "SIOCSIFPHYADDR", - "SIOCSIFPHYS", - "SIOCSIFPRIORITY", - "SIOCSIFRDOMAIN", - "SIOCSIFRTLABEL", - "SIOCSIFRVNET", - "SIOCSIFSLAVE", - "SIOCSIFTIMESLOT", - "SIOCSIFTXQLEN", - "SIOCSIFVLAN", - "SIOCSIFVNET", - "SIOCSIFXFLAGS", - "SIOCSLIFPHYADDR", - "SIOCSLIFPHYRTABLE", - "SIOCSLIFPHYTTL", - "SIOCSLINKSTR", - "SIOCSLOWAT", - "SIOCSPGRP", - "SIOCSRARP", - "SIOCSSPPPPARAMS", - "SIOCSVH", - "SIOCSVNETID", - "SIOCZIFDATA", - "SIO_GET_EXTENSION_FUNCTION_POINTER", - "SIO_GET_INTERFACE_LIST", - "SIO_KEEPALIVE_VALS", - "SIO_UDP_CONNRESET", - "SOCK_CLOEXEC", - "SOCK_DCCP", - "SOCK_DGRAM", - "SOCK_FLAGS_MASK", - "SOCK_MAXADDRLEN", - "SOCK_NONBLOCK", - "SOCK_NOSIGPIPE", - "SOCK_PACKET", - "SOCK_RAW", - "SOCK_RDM", - "SOCK_SEQPACKET", - "SOCK_STREAM", - "SOL_AAL", - "SOL_ATM", - "SOL_DECNET", - "SOL_ICMPV6", - "SOL_IP", - "SOL_IPV6", - "SOL_IRDA", - "SOL_PACKET", - "SOL_RAW", - "SOL_SOCKET", - "SOL_TCP", - "SOL_X25", - "SOMAXCONN", - "SO_ACCEPTCONN", - "SO_ACCEPTFILTER", - "SO_ATTACH_FILTER", - "SO_BINDANY", - "SO_BINDTODEVICE", - "SO_BINTIME", - "SO_BROADCAST", - "SO_BSDCOMPAT", - "SO_DEBUG", - "SO_DETACH_FILTER", - "SO_DOMAIN", - "SO_DONTROUTE", - "SO_DONTTRUNC", - "SO_ERROR", - "SO_KEEPALIVE", - "SO_LABEL", - "SO_LINGER", - "SO_LINGER_SEC", - "SO_LISTENINCQLEN", - "SO_LISTENQLEN", - "SO_LISTENQLIMIT", - "SO_MARK", - "SO_NETPROC", - "SO_NKE", - "SO_NOADDRERR", - "SO_NOHEADER", - "SO_NOSIGPIPE", - "SO_NOTIFYCONFLICT", - "SO_NO_CHECK", - "SO_NO_DDP", - "SO_NO_OFFLOAD", - "SO_NP_EXTENSIONS", - "SO_NREAD", - "SO_NUMRCVPKT", - "SO_NWRITE", - "SO_OOBINLINE", - "SO_OVERFLOWED", - "SO_PASSCRED", - "SO_PASSSEC", - "SO_PEERCRED", - "SO_PEERLABEL", - "SO_PEERNAME", - "SO_PEERSEC", - "SO_PRIORITY", - "SO_PROTOCOL", - "SO_PROTOTYPE", - "SO_RANDOMPORT", - "SO_RCVBUF", - "SO_RCVBUFFORCE", - "SO_RCVLOWAT", - "SO_RCVTIMEO", - "SO_RESTRICTIONS", - "SO_RESTRICT_DENYIN", - "SO_RESTRICT_DENYOUT", - "SO_RESTRICT_DENYSET", - "SO_REUSEADDR", - "SO_REUSEPORT", - "SO_REUSESHAREUID", - "SO_RTABLE", - "SO_RXQ_OVFL", - "SO_SECURITY_AUTHENTICATION", - "SO_SECURITY_ENCRYPTION_NETWORK", - "SO_SECURITY_ENCRYPTION_TRANSPORT", - "SO_SETFIB", - "SO_SNDBUF", - "SO_SNDBUFFORCE", - "SO_SNDLOWAT", - "SO_SNDTIMEO", - "SO_SPLICE", - "SO_TIMESTAMP", - "SO_TIMESTAMPING", - "SO_TIMESTAMPNS", - "SO_TIMESTAMP_MONOTONIC", - "SO_TYPE", - "SO_UPCALLCLOSEWAIT", - "SO_UPDATE_ACCEPT_CONTEXT", - "SO_UPDATE_CONNECT_CONTEXT", - "SO_USELOOPBACK", - "SO_USER_COOKIE", - "SO_VENDOR", - "SO_WANTMORE", - "SO_WANTOOBFLAG", - "SSLExtraCertChainPolicyPara", - "STANDARD_RIGHTS_ALL", - "STANDARD_RIGHTS_EXECUTE", - "STANDARD_RIGHTS_READ", - "STANDARD_RIGHTS_REQUIRED", - "STANDARD_RIGHTS_WRITE", - "STARTF_USESHOWWINDOW", - "STARTF_USESTDHANDLES", - "STD_ERROR_HANDLE", - "STD_INPUT_HANDLE", - "STD_OUTPUT_HANDLE", - "SUBLANG_ENGLISH_US", - "SW_FORCEMINIMIZE", - "SW_HIDE", - "SW_MAXIMIZE", - "SW_MINIMIZE", - "SW_NORMAL", - "SW_RESTORE", - "SW_SHOW", - "SW_SHOWDEFAULT", - "SW_SHOWMAXIMIZED", - "SW_SHOWMINIMIZED", - "SW_SHOWMINNOACTIVE", - "SW_SHOWNA", - "SW_SHOWNOACTIVATE", - "SW_SHOWNORMAL", - "SYMBOLIC_LINK_FLAG_DIRECTORY", - "SYNCHRONIZE", - "SYSCTL_VERSION", - "SYSCTL_VERS_0", - "SYSCTL_VERS_1", - "SYSCTL_VERS_MASK", - "SYS_ABORT2", - "SYS_ACCEPT", - "SYS_ACCEPT4", - "SYS_ACCEPT_NOCANCEL", - "SYS_ACCESS", - "SYS_ACCESS_EXTENDED", - "SYS_ACCT", - "SYS_ADD_KEY", - "SYS_ADD_PROFIL", - "SYS_ADJFREQ", - "SYS_ADJTIME", - "SYS_ADJTIMEX", - "SYS_AFS_SYSCALL", - "SYS_AIO_CANCEL", - "SYS_AIO_ERROR", - "SYS_AIO_FSYNC", - "SYS_AIO_MLOCK", - "SYS_AIO_READ", - "SYS_AIO_RETURN", - "SYS_AIO_SUSPEND", - "SYS_AIO_SUSPEND_NOCANCEL", - "SYS_AIO_WAITCOMPLETE", - "SYS_AIO_WRITE", - "SYS_ALARM", - "SYS_ARCH_PRCTL", - "SYS_ARM_FADVISE64_64", - "SYS_ARM_SYNC_FILE_RANGE", - "SYS_ATGETMSG", - "SYS_ATPGETREQ", - "SYS_ATPGETRSP", - "SYS_ATPSNDREQ", - "SYS_ATPSNDRSP", - "SYS_ATPUTMSG", - "SYS_ATSOCKET", - "SYS_AUDIT", - "SYS_AUDITCTL", - "SYS_AUDITON", - "SYS_AUDIT_SESSION_JOIN", - "SYS_AUDIT_SESSION_PORT", - "SYS_AUDIT_SESSION_SELF", - "SYS_BDFLUSH", - "SYS_BIND", - "SYS_BINDAT", - "SYS_BREAK", - "SYS_BRK", - "SYS_BSDTHREAD_CREATE", - "SYS_BSDTHREAD_REGISTER", - "SYS_BSDTHREAD_TERMINATE", - "SYS_CAPGET", - "SYS_CAPSET", - "SYS_CAP_ENTER", - "SYS_CAP_FCNTLS_GET", - "SYS_CAP_FCNTLS_LIMIT", - "SYS_CAP_GETMODE", - "SYS_CAP_GETRIGHTS", - "SYS_CAP_IOCTLS_GET", - "SYS_CAP_IOCTLS_LIMIT", - "SYS_CAP_NEW", - "SYS_CAP_RIGHTS_GET", - "SYS_CAP_RIGHTS_LIMIT", - "SYS_CHDIR", - "SYS_CHFLAGS", - "SYS_CHFLAGSAT", - "SYS_CHMOD", - "SYS_CHMOD_EXTENDED", - "SYS_CHOWN", - "SYS_CHOWN32", - "SYS_CHROOT", - "SYS_CHUD", - "SYS_CLOCK_ADJTIME", - "SYS_CLOCK_GETCPUCLOCKID2", - "SYS_CLOCK_GETRES", - "SYS_CLOCK_GETTIME", - "SYS_CLOCK_NANOSLEEP", - "SYS_CLOCK_SETTIME", - "SYS_CLONE", - "SYS_CLOSE", - "SYS_CLOSEFROM", - "SYS_CLOSE_NOCANCEL", - "SYS_CONNECT", - "SYS_CONNECTAT", - "SYS_CONNECT_NOCANCEL", - "SYS_COPYFILE", - "SYS_CPUSET", - "SYS_CPUSET_GETAFFINITY", - "SYS_CPUSET_GETID", - "SYS_CPUSET_SETAFFINITY", - "SYS_CPUSET_SETID", - "SYS_CREAT", - "SYS_CREATE_MODULE", - "SYS_CSOPS", - "SYS_CSOPS_AUDITTOKEN", - "SYS_DELETE", - "SYS_DELETE_MODULE", - "SYS_DUP", - "SYS_DUP2", - "SYS_DUP3", - "SYS_EACCESS", - "SYS_EPOLL_CREATE", - "SYS_EPOLL_CREATE1", - "SYS_EPOLL_CTL", - "SYS_EPOLL_CTL_OLD", - "SYS_EPOLL_PWAIT", - "SYS_EPOLL_WAIT", - "SYS_EPOLL_WAIT_OLD", - "SYS_EVENTFD", - "SYS_EVENTFD2", - "SYS_EXCHANGEDATA", - "SYS_EXECVE", - "SYS_EXIT", - "SYS_EXIT_GROUP", - "SYS_EXTATTRCTL", - "SYS_EXTATTR_DELETE_FD", - "SYS_EXTATTR_DELETE_FILE", - "SYS_EXTATTR_DELETE_LINK", - "SYS_EXTATTR_GET_FD", - "SYS_EXTATTR_GET_FILE", - "SYS_EXTATTR_GET_LINK", - "SYS_EXTATTR_LIST_FD", - "SYS_EXTATTR_LIST_FILE", - "SYS_EXTATTR_LIST_LINK", - "SYS_EXTATTR_SET_FD", - "SYS_EXTATTR_SET_FILE", - "SYS_EXTATTR_SET_LINK", - "SYS_FACCESSAT", - "SYS_FADVISE64", - "SYS_FADVISE64_64", - "SYS_FALLOCATE", - "SYS_FANOTIFY_INIT", - "SYS_FANOTIFY_MARK", - "SYS_FCHDIR", - "SYS_FCHFLAGS", - "SYS_FCHMOD", - "SYS_FCHMODAT", - "SYS_FCHMOD_EXTENDED", - "SYS_FCHOWN", - "SYS_FCHOWN32", - "SYS_FCHOWNAT", - "SYS_FCHROOT", - "SYS_FCNTL", - "SYS_FCNTL64", - "SYS_FCNTL_NOCANCEL", - "SYS_FDATASYNC", - "SYS_FEXECVE", - "SYS_FFCLOCK_GETCOUNTER", - "SYS_FFCLOCK_GETESTIMATE", - "SYS_FFCLOCK_SETESTIMATE", - "SYS_FFSCTL", - "SYS_FGETATTRLIST", - "SYS_FGETXATTR", - "SYS_FHOPEN", - "SYS_FHSTAT", - "SYS_FHSTATFS", - "SYS_FILEPORT_MAKEFD", - "SYS_FILEPORT_MAKEPORT", - "SYS_FKTRACE", - "SYS_FLISTXATTR", - "SYS_FLOCK", - "SYS_FORK", - "SYS_FPATHCONF", - "SYS_FREEBSD6_FTRUNCATE", - "SYS_FREEBSD6_LSEEK", - "SYS_FREEBSD6_MMAP", - "SYS_FREEBSD6_PREAD", - "SYS_FREEBSD6_PWRITE", - "SYS_FREEBSD6_TRUNCATE", - "SYS_FREMOVEXATTR", - "SYS_FSCTL", - "SYS_FSETATTRLIST", - "SYS_FSETXATTR", - "SYS_FSGETPATH", - "SYS_FSTAT", - "SYS_FSTAT64", - "SYS_FSTAT64_EXTENDED", - "SYS_FSTATAT", - "SYS_FSTATAT64", - "SYS_FSTATFS", - "SYS_FSTATFS64", - "SYS_FSTATV", - "SYS_FSTATVFS1", - "SYS_FSTAT_EXTENDED", - "SYS_FSYNC", - "SYS_FSYNC_NOCANCEL", - "SYS_FSYNC_RANGE", - "SYS_FTIME", - "SYS_FTRUNCATE", - "SYS_FTRUNCATE64", - "SYS_FUTEX", - "SYS_FUTIMENS", - "SYS_FUTIMES", - "SYS_FUTIMESAT", - "SYS_GETATTRLIST", - "SYS_GETAUDIT", - "SYS_GETAUDIT_ADDR", - "SYS_GETAUID", - "SYS_GETCONTEXT", - "SYS_GETCPU", - "SYS_GETCWD", - "SYS_GETDENTS", - "SYS_GETDENTS64", - "SYS_GETDIRENTRIES", - "SYS_GETDIRENTRIES64", - "SYS_GETDIRENTRIESATTR", - "SYS_GETDTABLECOUNT", - "SYS_GETDTABLESIZE", - "SYS_GETEGID", - "SYS_GETEGID32", - "SYS_GETEUID", - "SYS_GETEUID32", - "SYS_GETFH", - "SYS_GETFSSTAT", - "SYS_GETFSSTAT64", - "SYS_GETGID", - "SYS_GETGID32", - "SYS_GETGROUPS", - "SYS_GETGROUPS32", - "SYS_GETHOSTUUID", - "SYS_GETITIMER", - "SYS_GETLCID", - "SYS_GETLOGIN", - "SYS_GETLOGINCLASS", - "SYS_GETPEERNAME", - "SYS_GETPGID", - "SYS_GETPGRP", - "SYS_GETPID", - "SYS_GETPMSG", - "SYS_GETPPID", - "SYS_GETPRIORITY", - "SYS_GETRESGID", - "SYS_GETRESGID32", - "SYS_GETRESUID", - "SYS_GETRESUID32", - "SYS_GETRLIMIT", - "SYS_GETRTABLE", - "SYS_GETRUSAGE", - "SYS_GETSGROUPS", - "SYS_GETSID", - "SYS_GETSOCKNAME", - "SYS_GETSOCKOPT", - "SYS_GETTHRID", - "SYS_GETTID", - "SYS_GETTIMEOFDAY", - "SYS_GETUID", - "SYS_GETUID32", - "SYS_GETVFSSTAT", - "SYS_GETWGROUPS", - "SYS_GETXATTR", - "SYS_GET_KERNEL_SYMS", - "SYS_GET_MEMPOLICY", - "SYS_GET_ROBUST_LIST", - "SYS_GET_THREAD_AREA", - "SYS_GSSD_SYSCALL", - "SYS_GTTY", - "SYS_IDENTITYSVC", - "SYS_IDLE", - "SYS_INITGROUPS", - "SYS_INIT_MODULE", - "SYS_INOTIFY_ADD_WATCH", - "SYS_INOTIFY_INIT", - "SYS_INOTIFY_INIT1", - "SYS_INOTIFY_RM_WATCH", - "SYS_IOCTL", - "SYS_IOPERM", - "SYS_IOPL", - "SYS_IOPOLICYSYS", - "SYS_IOPRIO_GET", - "SYS_IOPRIO_SET", - "SYS_IO_CANCEL", - "SYS_IO_DESTROY", - "SYS_IO_GETEVENTS", - "SYS_IO_SETUP", - "SYS_IO_SUBMIT", - "SYS_IPC", - "SYS_ISSETUGID", - "SYS_JAIL", - "SYS_JAIL_ATTACH", - "SYS_JAIL_GET", - "SYS_JAIL_REMOVE", - "SYS_JAIL_SET", - "SYS_KAS_INFO", - "SYS_KDEBUG_TRACE", - "SYS_KENV", - "SYS_KEVENT", - "SYS_KEVENT64", - "SYS_KEXEC_LOAD", - "SYS_KEYCTL", - "SYS_KILL", - "SYS_KLDFIND", - "SYS_KLDFIRSTMOD", - "SYS_KLDLOAD", - "SYS_KLDNEXT", - "SYS_KLDSTAT", - "SYS_KLDSYM", - "SYS_KLDUNLOAD", - "SYS_KLDUNLOADF", - "SYS_KMQ_NOTIFY", - "SYS_KMQ_OPEN", - "SYS_KMQ_SETATTR", - "SYS_KMQ_TIMEDRECEIVE", - "SYS_KMQ_TIMEDSEND", - "SYS_KMQ_UNLINK", - "SYS_KQUEUE", - "SYS_KQUEUE1", - "SYS_KSEM_CLOSE", - "SYS_KSEM_DESTROY", - "SYS_KSEM_GETVALUE", - "SYS_KSEM_INIT", - "SYS_KSEM_OPEN", - "SYS_KSEM_POST", - "SYS_KSEM_TIMEDWAIT", - "SYS_KSEM_TRYWAIT", - "SYS_KSEM_UNLINK", - "SYS_KSEM_WAIT", - "SYS_KTIMER_CREATE", - "SYS_KTIMER_DELETE", - "SYS_KTIMER_GETOVERRUN", - "SYS_KTIMER_GETTIME", - "SYS_KTIMER_SETTIME", - "SYS_KTRACE", - "SYS_LCHFLAGS", - "SYS_LCHMOD", - "SYS_LCHOWN", - "SYS_LCHOWN32", - "SYS_LEDGER", - "SYS_LGETFH", - "SYS_LGETXATTR", - "SYS_LINK", - "SYS_LINKAT", - "SYS_LIO_LISTIO", - "SYS_LISTEN", - "SYS_LISTXATTR", - "SYS_LLISTXATTR", - "SYS_LOCK", - "SYS_LOOKUP_DCOOKIE", - "SYS_LPATHCONF", - "SYS_LREMOVEXATTR", - "SYS_LSEEK", - "SYS_LSETXATTR", - "SYS_LSTAT", - "SYS_LSTAT64", - "SYS_LSTAT64_EXTENDED", - "SYS_LSTATV", - "SYS_LSTAT_EXTENDED", - "SYS_LUTIMES", - "SYS_MAC_SYSCALL", - "SYS_MADVISE", - "SYS_MADVISE1", - "SYS_MAXSYSCALL", - "SYS_MBIND", - "SYS_MIGRATE_PAGES", - "SYS_MINCORE", - "SYS_MINHERIT", - "SYS_MKCOMPLEX", - "SYS_MKDIR", - "SYS_MKDIRAT", - "SYS_MKDIR_EXTENDED", - "SYS_MKFIFO", - "SYS_MKFIFOAT", - "SYS_MKFIFO_EXTENDED", - "SYS_MKNOD", - "SYS_MKNODAT", - "SYS_MLOCK", - "SYS_MLOCKALL", - "SYS_MMAP", - "SYS_MMAP2", - "SYS_MODCTL", - "SYS_MODFIND", - "SYS_MODFNEXT", - "SYS_MODIFY_LDT", - "SYS_MODNEXT", - "SYS_MODSTAT", - "SYS_MODWATCH", - "SYS_MOUNT", - "SYS_MOVE_PAGES", - "SYS_MPROTECT", - "SYS_MPX", - "SYS_MQUERY", - "SYS_MQ_GETSETATTR", - "SYS_MQ_NOTIFY", - "SYS_MQ_OPEN", - "SYS_MQ_TIMEDRECEIVE", - "SYS_MQ_TIMEDSEND", - "SYS_MQ_UNLINK", - "SYS_MREMAP", - "SYS_MSGCTL", - "SYS_MSGGET", - "SYS_MSGRCV", - "SYS_MSGRCV_NOCANCEL", - "SYS_MSGSND", - "SYS_MSGSND_NOCANCEL", - "SYS_MSGSYS", - "SYS_MSYNC", - "SYS_MSYNC_NOCANCEL", - "SYS_MUNLOCK", - "SYS_MUNLOCKALL", - "SYS_MUNMAP", - "SYS_NAME_TO_HANDLE_AT", - "SYS_NANOSLEEP", - "SYS_NEWFSTATAT", - "SYS_NFSCLNT", - "SYS_NFSSERVCTL", - "SYS_NFSSVC", - "SYS_NFSTAT", - "SYS_NICE", - "SYS_NLM_SYSCALL", - "SYS_NLSTAT", - "SYS_NMOUNT", - "SYS_NSTAT", - "SYS_NTP_ADJTIME", - "SYS_NTP_GETTIME", - "SYS_NUMA_GETAFFINITY", - "SYS_NUMA_SETAFFINITY", - "SYS_OABI_SYSCALL_BASE", - "SYS_OBREAK", - "SYS_OLDFSTAT", - "SYS_OLDLSTAT", - "SYS_OLDOLDUNAME", - "SYS_OLDSTAT", - "SYS_OLDUNAME", - "SYS_OPEN", - "SYS_OPENAT", - "SYS_OPENBSD_POLL", - "SYS_OPEN_BY_HANDLE_AT", - "SYS_OPEN_DPROTECTED_NP", - "SYS_OPEN_EXTENDED", - "SYS_OPEN_NOCANCEL", - "SYS_OVADVISE", - "SYS_PACCEPT", - "SYS_PATHCONF", - "SYS_PAUSE", - "SYS_PCICONFIG_IOBASE", - "SYS_PCICONFIG_READ", - "SYS_PCICONFIG_WRITE", - "SYS_PDFORK", - "SYS_PDGETPID", - "SYS_PDKILL", - "SYS_PERF_EVENT_OPEN", - "SYS_PERSONALITY", - "SYS_PID_HIBERNATE", - "SYS_PID_RESUME", - "SYS_PID_SHUTDOWN_SOCKETS", - "SYS_PID_SUSPEND", - "SYS_PIPE", - "SYS_PIPE2", - "SYS_PIVOT_ROOT", - "SYS_PMC_CONTROL", - "SYS_PMC_GET_INFO", - "SYS_POLL", - "SYS_POLLTS", - "SYS_POLL_NOCANCEL", - "SYS_POSIX_FADVISE", - "SYS_POSIX_FALLOCATE", - "SYS_POSIX_OPENPT", - "SYS_POSIX_SPAWN", - "SYS_PPOLL", - "SYS_PRCTL", - "SYS_PREAD", - "SYS_PREAD64", - "SYS_PREADV", - "SYS_PREAD_NOCANCEL", - "SYS_PRLIMIT64", - "SYS_PROCCTL", - "SYS_PROCESS_POLICY", - "SYS_PROCESS_VM_READV", - "SYS_PROCESS_VM_WRITEV", - "SYS_PROC_INFO", - "SYS_PROF", - "SYS_PROFIL", - "SYS_PSELECT", - "SYS_PSELECT6", - "SYS_PSET_ASSIGN", - "SYS_PSET_CREATE", - "SYS_PSET_DESTROY", - "SYS_PSYNCH_CVBROAD", - "SYS_PSYNCH_CVCLRPREPOST", - "SYS_PSYNCH_CVSIGNAL", - "SYS_PSYNCH_CVWAIT", - "SYS_PSYNCH_MUTEXDROP", - "SYS_PSYNCH_MUTEXWAIT", - "SYS_PSYNCH_RW_DOWNGRADE", - "SYS_PSYNCH_RW_LONGRDLOCK", - "SYS_PSYNCH_RW_RDLOCK", - "SYS_PSYNCH_RW_UNLOCK", - "SYS_PSYNCH_RW_UNLOCK2", - "SYS_PSYNCH_RW_UPGRADE", - "SYS_PSYNCH_RW_WRLOCK", - "SYS_PSYNCH_RW_YIELDWRLOCK", - "SYS_PTRACE", - "SYS_PUTPMSG", - "SYS_PWRITE", - "SYS_PWRITE64", - "SYS_PWRITEV", - "SYS_PWRITE_NOCANCEL", - "SYS_QUERY_MODULE", - "SYS_QUOTACTL", - "SYS_RASCTL", - "SYS_RCTL_ADD_RULE", - "SYS_RCTL_GET_LIMITS", - "SYS_RCTL_GET_RACCT", - "SYS_RCTL_GET_RULES", - "SYS_RCTL_REMOVE_RULE", - "SYS_READ", - "SYS_READAHEAD", - "SYS_READDIR", - "SYS_READLINK", - "SYS_READLINKAT", - "SYS_READV", - "SYS_READV_NOCANCEL", - "SYS_READ_NOCANCEL", - "SYS_REBOOT", - "SYS_RECV", - "SYS_RECVFROM", - "SYS_RECVFROM_NOCANCEL", - "SYS_RECVMMSG", - "SYS_RECVMSG", - "SYS_RECVMSG_NOCANCEL", - "SYS_REMAP_FILE_PAGES", - "SYS_REMOVEXATTR", - "SYS_RENAME", - "SYS_RENAMEAT", - "SYS_REQUEST_KEY", - "SYS_RESTART_SYSCALL", - "SYS_REVOKE", - "SYS_RFORK", - "SYS_RMDIR", - "SYS_RTPRIO", - "SYS_RTPRIO_THREAD", - "SYS_RT_SIGACTION", - "SYS_RT_SIGPENDING", - "SYS_RT_SIGPROCMASK", - "SYS_RT_SIGQUEUEINFO", - "SYS_RT_SIGRETURN", - "SYS_RT_SIGSUSPEND", - "SYS_RT_SIGTIMEDWAIT", - "SYS_RT_TGSIGQUEUEINFO", - "SYS_SBRK", - "SYS_SCHED_GETAFFINITY", - "SYS_SCHED_GETPARAM", - "SYS_SCHED_GETSCHEDULER", - "SYS_SCHED_GET_PRIORITY_MAX", - "SYS_SCHED_GET_PRIORITY_MIN", - "SYS_SCHED_RR_GET_INTERVAL", - "SYS_SCHED_SETAFFINITY", - "SYS_SCHED_SETPARAM", - "SYS_SCHED_SETSCHEDULER", - "SYS_SCHED_YIELD", - "SYS_SCTP_GENERIC_RECVMSG", - "SYS_SCTP_GENERIC_SENDMSG", - "SYS_SCTP_GENERIC_SENDMSG_IOV", - "SYS_SCTP_PEELOFF", - "SYS_SEARCHFS", - "SYS_SECURITY", - "SYS_SELECT", - "SYS_SELECT_NOCANCEL", - "SYS_SEMCONFIG", - "SYS_SEMCTL", - "SYS_SEMGET", - "SYS_SEMOP", - "SYS_SEMSYS", - "SYS_SEMTIMEDOP", - "SYS_SEM_CLOSE", - "SYS_SEM_DESTROY", - "SYS_SEM_GETVALUE", - "SYS_SEM_INIT", - "SYS_SEM_OPEN", - "SYS_SEM_POST", - "SYS_SEM_TRYWAIT", - "SYS_SEM_UNLINK", - "SYS_SEM_WAIT", - "SYS_SEM_WAIT_NOCANCEL", - "SYS_SEND", - "SYS_SENDFILE", - "SYS_SENDFILE64", - "SYS_SENDMMSG", - "SYS_SENDMSG", - "SYS_SENDMSG_NOCANCEL", - "SYS_SENDTO", - "SYS_SENDTO_NOCANCEL", - "SYS_SETATTRLIST", - "SYS_SETAUDIT", - "SYS_SETAUDIT_ADDR", - "SYS_SETAUID", - "SYS_SETCONTEXT", - "SYS_SETDOMAINNAME", - "SYS_SETEGID", - "SYS_SETEUID", - "SYS_SETFIB", - "SYS_SETFSGID", - "SYS_SETFSGID32", - "SYS_SETFSUID", - "SYS_SETFSUID32", - "SYS_SETGID", - "SYS_SETGID32", - "SYS_SETGROUPS", - "SYS_SETGROUPS32", - "SYS_SETHOSTNAME", - "SYS_SETITIMER", - "SYS_SETLCID", - "SYS_SETLOGIN", - "SYS_SETLOGINCLASS", - "SYS_SETNS", - "SYS_SETPGID", - "SYS_SETPRIORITY", - "SYS_SETPRIVEXEC", - "SYS_SETREGID", - "SYS_SETREGID32", - "SYS_SETRESGID", - "SYS_SETRESGID32", - "SYS_SETRESUID", - "SYS_SETRESUID32", - "SYS_SETREUID", - "SYS_SETREUID32", - "SYS_SETRLIMIT", - "SYS_SETRTABLE", - "SYS_SETSGROUPS", - "SYS_SETSID", - "SYS_SETSOCKOPT", - "SYS_SETTID", - "SYS_SETTID_WITH_PID", - "SYS_SETTIMEOFDAY", - "SYS_SETUID", - "SYS_SETUID32", - "SYS_SETWGROUPS", - "SYS_SETXATTR", - "SYS_SET_MEMPOLICY", - "SYS_SET_ROBUST_LIST", - "SYS_SET_THREAD_AREA", - "SYS_SET_TID_ADDRESS", - "SYS_SGETMASK", - "SYS_SHARED_REGION_CHECK_NP", - "SYS_SHARED_REGION_MAP_AND_SLIDE_NP", - "SYS_SHMAT", - "SYS_SHMCTL", - "SYS_SHMDT", - "SYS_SHMGET", - "SYS_SHMSYS", - "SYS_SHM_OPEN", - "SYS_SHM_UNLINK", - "SYS_SHUTDOWN", - "SYS_SIGACTION", - "SYS_SIGALTSTACK", - "SYS_SIGNAL", - "SYS_SIGNALFD", - "SYS_SIGNALFD4", - "SYS_SIGPENDING", - "SYS_SIGPROCMASK", - "SYS_SIGQUEUE", - "SYS_SIGQUEUEINFO", - "SYS_SIGRETURN", - "SYS_SIGSUSPEND", - "SYS_SIGSUSPEND_NOCANCEL", - "SYS_SIGTIMEDWAIT", - "SYS_SIGWAIT", - "SYS_SIGWAITINFO", - "SYS_SOCKET", - "SYS_SOCKETCALL", - "SYS_SOCKETPAIR", - "SYS_SPLICE", - "SYS_SSETMASK", - "SYS_SSTK", - "SYS_STACK_SNAPSHOT", - "SYS_STAT", - "SYS_STAT64", - "SYS_STAT64_EXTENDED", - "SYS_STATFS", - "SYS_STATFS64", - "SYS_STATV", - "SYS_STATVFS1", - "SYS_STAT_EXTENDED", - "SYS_STIME", - "SYS_STTY", - "SYS_SWAPCONTEXT", - "SYS_SWAPCTL", - "SYS_SWAPOFF", - "SYS_SWAPON", - "SYS_SYMLINK", - "SYS_SYMLINKAT", - "SYS_SYNC", - "SYS_SYNCFS", - "SYS_SYNC_FILE_RANGE", - "SYS_SYSARCH", - "SYS_SYSCALL", - "SYS_SYSCALL_BASE", - "SYS_SYSFS", - "SYS_SYSINFO", - "SYS_SYSLOG", - "SYS_TEE", - "SYS_TGKILL", - "SYS_THREAD_SELFID", - "SYS_THR_CREATE", - "SYS_THR_EXIT", - "SYS_THR_KILL", - "SYS_THR_KILL2", - "SYS_THR_NEW", - "SYS_THR_SELF", - "SYS_THR_SET_NAME", - "SYS_THR_SUSPEND", - "SYS_THR_WAKE", - "SYS_TIME", - "SYS_TIMERFD_CREATE", - "SYS_TIMERFD_GETTIME", - "SYS_TIMERFD_SETTIME", - "SYS_TIMER_CREATE", - "SYS_TIMER_DELETE", - "SYS_TIMER_GETOVERRUN", - "SYS_TIMER_GETTIME", - "SYS_TIMER_SETTIME", - "SYS_TIMES", - "SYS_TKILL", - "SYS_TRUNCATE", - "SYS_TRUNCATE64", - "SYS_TUXCALL", - "SYS_UGETRLIMIT", - "SYS_ULIMIT", - "SYS_UMASK", - "SYS_UMASK_EXTENDED", - "SYS_UMOUNT", - "SYS_UMOUNT2", - "SYS_UNAME", - "SYS_UNDELETE", - "SYS_UNLINK", - "SYS_UNLINKAT", - "SYS_UNMOUNT", - "SYS_UNSHARE", - "SYS_USELIB", - "SYS_USTAT", - "SYS_UTIME", - "SYS_UTIMENSAT", - "SYS_UTIMES", - "SYS_UTRACE", - "SYS_UUIDGEN", - "SYS_VADVISE", - "SYS_VFORK", - "SYS_VHANGUP", - "SYS_VM86", - "SYS_VM86OLD", - "SYS_VMSPLICE", - "SYS_VM_PRESSURE_MONITOR", - "SYS_VSERVER", - "SYS_WAIT4", - "SYS_WAIT4_NOCANCEL", - "SYS_WAIT6", - "SYS_WAITEVENT", - "SYS_WAITID", - "SYS_WAITID_NOCANCEL", - "SYS_WAITPID", - "SYS_WATCHEVENT", - "SYS_WORKQ_KERNRETURN", - "SYS_WORKQ_OPEN", - "SYS_WRITE", - "SYS_WRITEV", - "SYS_WRITEV_NOCANCEL", - "SYS_WRITE_NOCANCEL", - "SYS_YIELD", - "SYS__LLSEEK", - "SYS__LWP_CONTINUE", - "SYS__LWP_CREATE", - "SYS__LWP_CTL", - "SYS__LWP_DETACH", - "SYS__LWP_EXIT", - "SYS__LWP_GETNAME", - "SYS__LWP_GETPRIVATE", - "SYS__LWP_KILL", - "SYS__LWP_PARK", - "SYS__LWP_SELF", - "SYS__LWP_SETNAME", - "SYS__LWP_SETPRIVATE", - "SYS__LWP_SUSPEND", - "SYS__LWP_UNPARK", - "SYS__LWP_UNPARK_ALL", - "SYS__LWP_WAIT", - "SYS__LWP_WAKEUP", - "SYS__NEWSELECT", - "SYS__PSET_BIND", - "SYS__SCHED_GETAFFINITY", - "SYS__SCHED_GETPARAM", - "SYS__SCHED_SETAFFINITY", - "SYS__SCHED_SETPARAM", - "SYS__SYSCTL", - "SYS__UMTX_LOCK", - "SYS__UMTX_OP", - "SYS__UMTX_UNLOCK", - "SYS___ACL_ACLCHECK_FD", - "SYS___ACL_ACLCHECK_FILE", - "SYS___ACL_ACLCHECK_LINK", - "SYS___ACL_DELETE_FD", - "SYS___ACL_DELETE_FILE", - "SYS___ACL_DELETE_LINK", - "SYS___ACL_GET_FD", - "SYS___ACL_GET_FILE", - "SYS___ACL_GET_LINK", - "SYS___ACL_SET_FD", - "SYS___ACL_SET_FILE", - "SYS___ACL_SET_LINK", - "SYS___CAP_RIGHTS_GET", - "SYS___CLONE", - "SYS___DISABLE_THREADSIGNAL", - "SYS___GETCWD", - "SYS___GETLOGIN", - "SYS___GET_TCB", - "SYS___MAC_EXECVE", - "SYS___MAC_GETFSSTAT", - "SYS___MAC_GET_FD", - "SYS___MAC_GET_FILE", - "SYS___MAC_GET_LCID", - "SYS___MAC_GET_LCTX", - "SYS___MAC_GET_LINK", - "SYS___MAC_GET_MOUNT", - "SYS___MAC_GET_PID", - "SYS___MAC_GET_PROC", - "SYS___MAC_MOUNT", - "SYS___MAC_SET_FD", - "SYS___MAC_SET_FILE", - "SYS___MAC_SET_LCTX", - "SYS___MAC_SET_LINK", - "SYS___MAC_SET_PROC", - "SYS___MAC_SYSCALL", - "SYS___OLD_SEMWAIT_SIGNAL", - "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", - "SYS___POSIX_CHOWN", - "SYS___POSIX_FCHOWN", - "SYS___POSIX_LCHOWN", - "SYS___POSIX_RENAME", - "SYS___PTHREAD_CANCELED", - "SYS___PTHREAD_CHDIR", - "SYS___PTHREAD_FCHDIR", - "SYS___PTHREAD_KILL", - "SYS___PTHREAD_MARKCANCEL", - "SYS___PTHREAD_SIGMASK", - "SYS___QUOTACTL", - "SYS___SEMCTL", - "SYS___SEMWAIT_SIGNAL", - "SYS___SEMWAIT_SIGNAL_NOCANCEL", - "SYS___SETLOGIN", - "SYS___SETUGID", - "SYS___SET_TCB", - "SYS___SIGACTION_SIGTRAMP", - "SYS___SIGTIMEDWAIT", - "SYS___SIGWAIT", - "SYS___SIGWAIT_NOCANCEL", - "SYS___SYSCTL", - "SYS___TFORK", - "SYS___THREXIT", - "SYS___THRSIGDIVERT", - "SYS___THRSLEEP", - "SYS___THRWAKEUP", - "S_ARCH1", - "S_ARCH2", - "S_BLKSIZE", - "S_IEXEC", - "S_IFBLK", - "S_IFCHR", - "S_IFDIR", - "S_IFIFO", - "S_IFLNK", - "S_IFMT", - "S_IFREG", - "S_IFSOCK", - "S_IFWHT", - "S_IREAD", - "S_IRGRP", - "S_IROTH", - "S_IRUSR", - "S_IRWXG", - "S_IRWXO", - "S_IRWXU", - "S_ISGID", - "S_ISTXT", - "S_ISUID", - "S_ISVTX", - "S_IWGRP", - "S_IWOTH", - "S_IWRITE", - "S_IWUSR", - "S_IXGRP", - "S_IXOTH", - "S_IXUSR", - "S_LOGIN_SET", - "SecurityAttributes", - "Seek", - "Select", - "Sendfile", - "Sendmsg", - "SendmsgN", - "Sendto", - "Servent", - "SetBpf", - "SetBpfBuflen", - "SetBpfDatalink", - "SetBpfHeadercmpl", - "SetBpfImmediate", - "SetBpfInterface", - "SetBpfPromisc", - "SetBpfTimeout", - "SetCurrentDirectory", - "SetEndOfFile", - "SetEnvironmentVariable", - "SetFileAttributes", - "SetFileCompletionNotificationModes", - "SetFilePointer", - "SetFileTime", - "SetHandleInformation", - "SetKevent", - "SetLsfPromisc", - "SetNonblock", - "Setdomainname", - "Setegid", - "Setenv", - "Seteuid", - "Setfsgid", - "Setfsuid", - "Setgid", - "Setgroups", - "Sethostname", - "Setlogin", - "Setpgid", - "Setpriority", - "Setprivexec", - "Setregid", - "Setresgid", - "Setresuid", - "Setreuid", - "Setrlimit", - "Setsid", - "Setsockopt", - "SetsockoptByte", - "SetsockoptICMPv6Filter", - "SetsockoptIPMreq", - "SetsockoptIPMreqn", - "SetsockoptIPv6Mreq", - "SetsockoptInet4Addr", - "SetsockoptInt", - "SetsockoptLinger", - "SetsockoptString", - "SetsockoptTimeval", - "Settimeofday", - "Setuid", - "Setxattr", - "Shutdown", - "SidTypeAlias", - "SidTypeComputer", - "SidTypeDeletedAccount", - "SidTypeDomain", - "SidTypeGroup", - "SidTypeInvalid", - "SidTypeLabel", - "SidTypeUnknown", - "SidTypeUser", - "SidTypeWellKnownGroup", - "Signal", - "SizeofBpfHdr", - "SizeofBpfInsn", - "SizeofBpfProgram", - "SizeofBpfStat", - "SizeofBpfVersion", - "SizeofBpfZbuf", - "SizeofBpfZbufHeader", - "SizeofCmsghdr", - "SizeofICMPv6Filter", - "SizeofIPMreq", - "SizeofIPMreqn", - "SizeofIPv6MTUInfo", - "SizeofIPv6Mreq", - "SizeofIfAddrmsg", - "SizeofIfAnnounceMsghdr", - "SizeofIfData", - "SizeofIfInfomsg", - "SizeofIfMsghdr", - "SizeofIfaMsghdr", - "SizeofIfmaMsghdr", - "SizeofIfmaMsghdr2", - "SizeofInet4Pktinfo", - "SizeofInet6Pktinfo", - "SizeofInotifyEvent", - "SizeofLinger", - "SizeofMsghdr", - "SizeofNlAttr", - "SizeofNlMsgerr", - "SizeofNlMsghdr", - "SizeofRtAttr", - "SizeofRtGenmsg", - "SizeofRtMetrics", - "SizeofRtMsg", - "SizeofRtMsghdr", - "SizeofRtNexthop", - "SizeofSockFilter", - "SizeofSockFprog", - "SizeofSockaddrAny", - "SizeofSockaddrDatalink", - "SizeofSockaddrInet4", - "SizeofSockaddrInet6", - "SizeofSockaddrLinklayer", - "SizeofSockaddrNetlink", - "SizeofSockaddrUnix", - "SizeofTCPInfo", - "SizeofUcred", - "SlicePtrFromStrings", - "SockFilter", - "SockFprog", - "Sockaddr", - "SockaddrDatalink", - "SockaddrGen", - "SockaddrInet4", - "SockaddrInet6", - "SockaddrLinklayer", - "SockaddrNetlink", - "SockaddrUnix", - "Socket", - "SocketControlMessage", - "SocketDisableIPv6", - "Socketpair", - "Splice", - "StartProcess", - "StartupInfo", - "Stat", - "Stat_t", - "Statfs", - "Statfs_t", - "Stderr", - "Stdin", - "Stdout", - "StringBytePtr", - "StringByteSlice", - "StringSlicePtr", - "StringToSid", - "StringToUTF16", - "StringToUTF16Ptr", - "Symlink", - "Sync", - "SyncFileRange", - "SysProcAttr", - "SysProcIDMap", - "Syscall", - "Syscall12", - "Syscall15", - "Syscall18", - "Syscall6", - "Syscall9", - "SyscallN", - "Sysctl", - "SysctlUint32", - "Sysctlnode", - "Sysinfo", - "Sysinfo_t", - "Systemtime", - "TCGETS", - "TCIFLUSH", - "TCIOFLUSH", - "TCOFLUSH", - "TCPInfo", - "TCPKeepalive", - "TCP_CA_NAME_MAX", - "TCP_CONGCTL", - "TCP_CONGESTION", - "TCP_CONNECTIONTIMEOUT", - "TCP_CORK", - "TCP_DEFER_ACCEPT", - "TCP_ENABLE_ECN", - "TCP_INFO", - "TCP_KEEPALIVE", - "TCP_KEEPCNT", - "TCP_KEEPIDLE", - "TCP_KEEPINIT", - "TCP_KEEPINTVL", - "TCP_LINGER2", - "TCP_MAXBURST", - "TCP_MAXHLEN", - "TCP_MAXOLEN", - "TCP_MAXSEG", - "TCP_MAXWIN", - "TCP_MAX_SACK", - "TCP_MAX_WINSHIFT", - "TCP_MD5SIG", - "TCP_MD5SIG_MAXKEYLEN", - "TCP_MINMSS", - "TCP_MINMSSOVERLOAD", - "TCP_MSS", - "TCP_NODELAY", - "TCP_NOOPT", - "TCP_NOPUSH", - "TCP_NOTSENT_LOWAT", - "TCP_NSTATES", - "TCP_QUICKACK", - "TCP_RXT_CONNDROPTIME", - "TCP_RXT_FINDROP", - "TCP_SACK_ENABLE", - "TCP_SENDMOREACKS", - "TCP_SYNCNT", - "TCP_VENDOR", - "TCP_WINDOW_CLAMP", - "TCSAFLUSH", - "TCSETS", - "TF_DISCONNECT", - "TF_REUSE_SOCKET", - "TF_USE_DEFAULT_WORKER", - "TF_USE_KERNEL_APC", - "TF_USE_SYSTEM_THREAD", - "TF_WRITE_BEHIND", - "TH32CS_INHERIT", - "TH32CS_SNAPALL", - "TH32CS_SNAPHEAPLIST", - "TH32CS_SNAPMODULE", - "TH32CS_SNAPMODULE32", - "TH32CS_SNAPPROCESS", - "TH32CS_SNAPTHREAD", - "TIME_ZONE_ID_DAYLIGHT", - "TIME_ZONE_ID_STANDARD", - "TIME_ZONE_ID_UNKNOWN", - "TIOCCBRK", - "TIOCCDTR", - "TIOCCONS", - "TIOCDCDTIMESTAMP", - "TIOCDRAIN", - "TIOCDSIMICROCODE", - "TIOCEXCL", - "TIOCEXT", - "TIOCFLAG_CDTRCTS", - "TIOCFLAG_CLOCAL", - "TIOCFLAG_CRTSCTS", - "TIOCFLAG_MDMBUF", - "TIOCFLAG_PPS", - "TIOCFLAG_SOFTCAR", - "TIOCFLUSH", - "TIOCGDEV", - "TIOCGDRAINWAIT", - "TIOCGETA", - "TIOCGETD", - "TIOCGFLAGS", - "TIOCGICOUNT", - "TIOCGLCKTRMIOS", - "TIOCGLINED", - "TIOCGPGRP", - "TIOCGPTN", - "TIOCGQSIZE", - "TIOCGRANTPT", - "TIOCGRS485", - "TIOCGSERIAL", - "TIOCGSID", - "TIOCGSIZE", - "TIOCGSOFTCAR", - "TIOCGTSTAMP", - "TIOCGWINSZ", - "TIOCINQ", - "TIOCIXOFF", - "TIOCIXON", - "TIOCLINUX", - "TIOCMBIC", - "TIOCMBIS", - "TIOCMGDTRWAIT", - "TIOCMGET", - "TIOCMIWAIT", - "TIOCMODG", - "TIOCMODS", - "TIOCMSDTRWAIT", - "TIOCMSET", - "TIOCM_CAR", - "TIOCM_CD", - "TIOCM_CTS", - "TIOCM_DCD", - "TIOCM_DSR", - "TIOCM_DTR", - "TIOCM_LE", - "TIOCM_RI", - "TIOCM_RNG", - "TIOCM_RTS", - "TIOCM_SR", - "TIOCM_ST", - "TIOCNOTTY", - "TIOCNXCL", - "TIOCOUTQ", - "TIOCPKT", - "TIOCPKT_DATA", - "TIOCPKT_DOSTOP", - "TIOCPKT_FLUSHREAD", - "TIOCPKT_FLUSHWRITE", - "TIOCPKT_IOCTL", - "TIOCPKT_NOSTOP", - "TIOCPKT_START", - "TIOCPKT_STOP", - "TIOCPTMASTER", - "TIOCPTMGET", - "TIOCPTSNAME", - "TIOCPTYGNAME", - "TIOCPTYGRANT", - "TIOCPTYUNLK", - "TIOCRCVFRAME", - "TIOCREMOTE", - "TIOCSBRK", - "TIOCSCONS", - "TIOCSCTTY", - "TIOCSDRAINWAIT", - "TIOCSDTR", - "TIOCSERCONFIG", - "TIOCSERGETLSR", - "TIOCSERGETMULTI", - "TIOCSERGSTRUCT", - "TIOCSERGWILD", - "TIOCSERSETMULTI", - "TIOCSERSWILD", - "TIOCSER_TEMT", - "TIOCSETA", - "TIOCSETAF", - "TIOCSETAW", - "TIOCSETD", - "TIOCSFLAGS", - "TIOCSIG", - "TIOCSLCKTRMIOS", - "TIOCSLINED", - "TIOCSPGRP", - "TIOCSPTLCK", - "TIOCSQSIZE", - "TIOCSRS485", - "TIOCSSERIAL", - "TIOCSSIZE", - "TIOCSSOFTCAR", - "TIOCSTART", - "TIOCSTAT", - "TIOCSTI", - "TIOCSTOP", - "TIOCSTSTAMP", - "TIOCSWINSZ", - "TIOCTIMESTAMP", - "TIOCUCNTL", - "TIOCVHANGUP", - "TIOCXMTFRAME", - "TOKEN_ADJUST_DEFAULT", - "TOKEN_ADJUST_GROUPS", - "TOKEN_ADJUST_PRIVILEGES", - "TOKEN_ADJUST_SESSIONID", - "TOKEN_ALL_ACCESS", - "TOKEN_ASSIGN_PRIMARY", - "TOKEN_DUPLICATE", - "TOKEN_EXECUTE", - "TOKEN_IMPERSONATE", - "TOKEN_QUERY", - "TOKEN_QUERY_SOURCE", - "TOKEN_READ", - "TOKEN_WRITE", - "TOSTOP", - "TRUNCATE_EXISTING", - "TUNATTACHFILTER", - "TUNDETACHFILTER", - "TUNGETFEATURES", - "TUNGETIFF", - "TUNGETSNDBUF", - "TUNGETVNETHDRSZ", - "TUNSETDEBUG", - "TUNSETGROUP", - "TUNSETIFF", - "TUNSETLINK", - "TUNSETNOCSUM", - "TUNSETOFFLOAD", - "TUNSETOWNER", - "TUNSETPERSIST", - "TUNSETSNDBUF", - "TUNSETTXFILTER", - "TUNSETVNETHDRSZ", - "Tee", - "TerminateProcess", - "Termios", - "Tgkill", - "Time", - "Time_t", - "Times", - "Timespec", - "TimespecToNsec", - "Timeval", - "Timeval32", - "TimevalToNsec", - "Timex", - "Timezoneinformation", - "Tms", - "Token", - "TokenAccessInformation", - "TokenAuditPolicy", - "TokenDefaultDacl", - "TokenElevation", - "TokenElevationType", - "TokenGroups", - "TokenGroupsAndPrivileges", - "TokenHasRestrictions", - "TokenImpersonationLevel", - "TokenIntegrityLevel", - "TokenLinkedToken", - "TokenLogonSid", - "TokenMandatoryPolicy", - "TokenOrigin", - "TokenOwner", - "TokenPrimaryGroup", - "TokenPrivileges", - "TokenRestrictedSids", - "TokenSandBoxInert", - "TokenSessionId", - "TokenSessionReference", - "TokenSource", - "TokenStatistics", - "TokenType", - "TokenUIAccess", - "TokenUser", - "TokenVirtualizationAllowed", - "TokenVirtualizationEnabled", - "Tokenprimarygroup", - "Tokenuser", - "TranslateAccountName", - "TranslateName", - "TransmitFile", - "TransmitFileBuffers", - "Truncate", - "UNIX_PATH_MAX", - "USAGE_MATCH_TYPE_AND", - "USAGE_MATCH_TYPE_OR", - "UTF16FromString", - "UTF16PtrFromString", - "UTF16ToString", - "Ucred", - "Umask", - "Uname", - "Undelete", - "UnixCredentials", - "UnixRights", - "Unlink", - "Unlinkat", - "UnmapViewOfFile", - "Unmount", - "Unsetenv", - "Unshare", - "UserInfo10", - "Ustat", - "Ustat_t", - "Utimbuf", - "Utime", - "Utimes", - "UtimesNano", - "Utsname", - "VDISCARD", - "VDSUSP", - "VEOF", - "VEOL", - "VEOL2", - "VERASE", - "VERASE2", - "VINTR", - "VKILL", - "VLNEXT", - "VMIN", - "VQUIT", - "VREPRINT", - "VSTART", - "VSTATUS", - "VSTOP", - "VSUSP", - "VSWTC", - "VT0", - "VT1", - "VTDLY", - "VTIME", - "VWERASE", - "VirtualLock", - "VirtualUnlock", - "WAIT_ABANDONED", - "WAIT_FAILED", - "WAIT_OBJECT_0", - "WAIT_TIMEOUT", - "WALL", - "WALLSIG", - "WALTSIG", - "WCLONE", - "WCONTINUED", - "WCOREFLAG", - "WEXITED", - "WLINUXCLONE", - "WNOHANG", - "WNOTHREAD", - "WNOWAIT", - "WNOZOMBIE", - "WOPTSCHECKED", - "WORDSIZE", - "WSABuf", - "WSACleanup", - "WSADESCRIPTION_LEN", - "WSAData", - "WSAEACCES", - "WSAECONNABORTED", - "WSAECONNRESET", - "WSAEnumProtocols", - "WSAID_CONNECTEX", - "WSAIoctl", - "WSAPROTOCOL_LEN", - "WSAProtocolChain", - "WSAProtocolInfo", - "WSARecv", - "WSARecvFrom", - "WSASYS_STATUS_LEN", - "WSASend", - "WSASendTo", - "WSASendto", - "WSAStartup", - "WSTOPPED", - "WTRAPPED", - "WUNTRACED", - "Wait4", - "WaitForSingleObject", - "WaitStatus", - "Win32FileAttributeData", - "Win32finddata", - "Write", - "WriteConsole", - "WriteFile", - "X509_ASN_ENCODING", - "XCASE", - "XP1_CONNECTIONLESS", - "XP1_CONNECT_DATA", - "XP1_DISCONNECT_DATA", - "XP1_EXPEDITED_DATA", - "XP1_GRACEFUL_CLOSE", - "XP1_GUARANTEED_DELIVERY", - "XP1_GUARANTEED_ORDER", - "XP1_IFS_HANDLES", - "XP1_MESSAGE_ORIENTED", - "XP1_MULTIPOINT_CONTROL_PLANE", - "XP1_MULTIPOINT_DATA_PLANE", - "XP1_PARTIAL_MESSAGE", - "XP1_PSEUDO_STREAM", - "XP1_QOS_SUPPORTED", - "XP1_SAN_SUPPORT_SDP", - "XP1_SUPPORT_BROADCAST", - "XP1_SUPPORT_MULTIPOINT", - "XP1_UNI_RECV", - "XP1_UNI_SEND", - }, - "syscall/js": { - "CopyBytesToGo", - "CopyBytesToJS", - "Error", - "Func", - "FuncOf", - "Global", - "Null", - "Type", - "TypeBoolean", - "TypeFunction", - "TypeNull", - "TypeNumber", - "TypeObject", - "TypeString", - "TypeSymbol", - "TypeUndefined", - "Undefined", - "Value", - "ValueError", - "ValueOf", - }, - "testing": { - "AllocsPerRun", - "B", - "Benchmark", - "BenchmarkResult", - "Cover", - "CoverBlock", - "CoverMode", - "Coverage", - "F", - "Init", - "InternalBenchmark", - "InternalExample", - "InternalFuzzTarget", - "InternalTest", - "M", - "Main", - "MainStart", - "PB", - "RegisterCover", - "RunBenchmarks", - "RunExamples", - "RunTests", - "Short", - "T", - "TB", - "Testing", - "Verbose", - }, - "testing/fstest": { - "MapFS", - "MapFile", - "TestFS", - }, - "testing/iotest": { - "DataErrReader", - "ErrReader", - "ErrTimeout", - "HalfReader", - "NewReadLogger", - "NewWriteLogger", - "OneByteReader", - "TestReader", - "TimeoutReader", - "TruncateWriter", - }, - "testing/quick": { - "Check", - "CheckEqual", - "CheckEqualError", - "CheckError", - "Config", - "Generator", - "SetupError", - "Value", - }, - "testing/slogtest": { - "TestHandler", - }, - "text/scanner": { - "Char", - "Comment", - "EOF", - "Float", - "GoTokens", - "GoWhitespace", - "Ident", - "Int", - "Position", - "RawString", - "ScanChars", - "ScanComments", - "ScanFloats", - "ScanIdents", - "ScanInts", - "ScanRawStrings", - "ScanStrings", - "Scanner", - "SkipComments", - "String", - "TokenString", - }, - "text/tabwriter": { - "AlignRight", - "Debug", - "DiscardEmptyColumns", - "Escape", - "FilterHTML", - "NewWriter", - "StripEscape", - "TabIndent", - "Writer", - }, - "text/template": { - "ExecError", - "FuncMap", - "HTMLEscape", - "HTMLEscapeString", - "HTMLEscaper", - "IsTrue", - "JSEscape", - "JSEscapeString", - "JSEscaper", - "Must", - "New", - "ParseFS", - "ParseFiles", - "ParseGlob", - "Template", - "URLQueryEscaper", - }, - "text/template/parse": { - "ActionNode", - "BoolNode", - "BranchNode", - "BreakNode", - "ChainNode", - "CommandNode", - "CommentNode", - "ContinueNode", - "DotNode", - "FieldNode", - "IdentifierNode", - "IfNode", - "IsEmptyTree", - "ListNode", - "Mode", - "New", - "NewIdentifier", - "NilNode", - "Node", - "NodeAction", - "NodeBool", - "NodeBreak", - "NodeChain", - "NodeCommand", - "NodeComment", - "NodeContinue", - "NodeDot", - "NodeField", - "NodeIdentifier", - "NodeIf", - "NodeList", - "NodeNil", - "NodeNumber", - "NodePipe", - "NodeRange", - "NodeString", - "NodeTemplate", - "NodeText", - "NodeType", - "NodeVariable", - "NodeWith", - "NumberNode", - "Parse", - "ParseComments", - "PipeNode", - "Pos", - "RangeNode", - "SkipFuncCheck", - "StringNode", - "TemplateNode", - "TextNode", - "Tree", - "VariableNode", - "WithNode", - }, - "time": { - "ANSIC", - "After", - "AfterFunc", - "April", - "August", - "Date", - "DateOnly", - "DateTime", - "December", - "Duration", - "February", - "FixedZone", - "Friday", - "Hour", - "January", - "July", - "June", - "Kitchen", - "Layout", - "LoadLocation", - "LoadLocationFromTZData", - "Local", - "Location", - "March", - "May", - "Microsecond", - "Millisecond", - "Minute", - "Monday", - "Month", - "Nanosecond", - "NewTicker", - "NewTimer", - "November", - "Now", - "October", - "Parse", - "ParseDuration", - "ParseError", - "ParseInLocation", - "RFC1123", - "RFC1123Z", - "RFC3339", - "RFC3339Nano", - "RFC822", - "RFC822Z", - "RFC850", - "RubyDate", - "Saturday", - "Second", - "September", - "Since", - "Sleep", - "Stamp", - "StampMicro", - "StampMilli", - "StampNano", - "Sunday", - "Thursday", - "Tick", - "Ticker", - "Time", - "TimeOnly", - "Timer", - "Tuesday", - "UTC", - "Unix", - "UnixDate", - "UnixMicro", - "UnixMilli", - "Until", - "Wednesday", - "Weekday", - }, - "unicode": { - "ASCII_Hex_Digit", - "Adlam", - "Ahom", - "Anatolian_Hieroglyphs", - "Arabic", - "Armenian", - "Avestan", - "AzeriCase", - "Balinese", - "Bamum", - "Bassa_Vah", - "Batak", - "Bengali", - "Bhaiksuki", - "Bidi_Control", - "Bopomofo", - "Brahmi", - "Braille", - "Buginese", - "Buhid", - "C", - "Canadian_Aboriginal", - "Carian", - "CaseRange", - "CaseRanges", - "Categories", - "Caucasian_Albanian", - "Cc", - "Cf", - "Chakma", - "Cham", - "Cherokee", - "Chorasmian", - "Co", - "Common", - "Coptic", - "Cs", - "Cuneiform", - "Cypriot", - "Cypro_Minoan", - "Cyrillic", - "Dash", - "Deprecated", - "Deseret", - "Devanagari", - "Diacritic", - "Digit", - "Dives_Akuru", - "Dogra", - "Duployan", - "Egyptian_Hieroglyphs", - "Elbasan", - "Elymaic", - "Ethiopic", - "Extender", - "FoldCategory", - "FoldScript", - "Georgian", - "Glagolitic", - "Gothic", - "Grantha", - "GraphicRanges", - "Greek", - "Gujarati", - "Gunjala_Gondi", - "Gurmukhi", - "Han", - "Hangul", - "Hanifi_Rohingya", - "Hanunoo", - "Hatran", - "Hebrew", - "Hex_Digit", - "Hiragana", - "Hyphen", - "IDS_Binary_Operator", - "IDS_Trinary_Operator", - "Ideographic", - "Imperial_Aramaic", - "In", - "Inherited", - "Inscriptional_Pahlavi", - "Inscriptional_Parthian", - "Is", - "IsControl", - "IsDigit", - "IsGraphic", - "IsLetter", - "IsLower", - "IsMark", - "IsNumber", - "IsOneOf", - "IsPrint", - "IsPunct", - "IsSpace", - "IsSymbol", - "IsTitle", - "IsUpper", - "Javanese", - "Join_Control", - "Kaithi", - "Kannada", - "Katakana", - "Kawi", - "Kayah_Li", - "Kharoshthi", - "Khitan_Small_Script", - "Khmer", - "Khojki", - "Khudawadi", - "L", - "Lao", - "Latin", - "Lepcha", - "Letter", - "Limbu", - "Linear_A", - "Linear_B", - "Lisu", - "Ll", - "Lm", - "Lo", - "Logical_Order_Exception", - "Lower", - "LowerCase", - "Lt", - "Lu", - "Lycian", - "Lydian", - "M", - "Mahajani", - "Makasar", - "Malayalam", - "Mandaic", - "Manichaean", - "Marchen", - "Mark", - "Masaram_Gondi", - "MaxASCII", - "MaxCase", - "MaxLatin1", - "MaxRune", - "Mc", - "Me", - "Medefaidrin", - "Meetei_Mayek", - "Mende_Kikakui", - "Meroitic_Cursive", - "Meroitic_Hieroglyphs", - "Miao", - "Mn", - "Modi", - "Mongolian", - "Mro", - "Multani", - "Myanmar", - "N", - "Nabataean", - "Nag_Mundari", - "Nandinagari", - "Nd", - "New_Tai_Lue", - "Newa", - "Nko", - "Nl", - "No", - "Noncharacter_Code_Point", - "Number", - "Nushu", - "Nyiakeng_Puachue_Hmong", - "Ogham", - "Ol_Chiki", - "Old_Hungarian", - "Old_Italic", - "Old_North_Arabian", - "Old_Permic", - "Old_Persian", - "Old_Sogdian", - "Old_South_Arabian", - "Old_Turkic", - "Old_Uyghur", - "Oriya", - "Osage", - "Osmanya", - "Other", - "Other_Alphabetic", - "Other_Default_Ignorable_Code_Point", - "Other_Grapheme_Extend", - "Other_ID_Continue", - "Other_ID_Start", - "Other_Lowercase", - "Other_Math", - "Other_Uppercase", - "P", - "Pahawh_Hmong", - "Palmyrene", - "Pattern_Syntax", - "Pattern_White_Space", - "Pau_Cin_Hau", - "Pc", - "Pd", - "Pe", - "Pf", - "Phags_Pa", - "Phoenician", - "Pi", - "Po", - "Prepended_Concatenation_Mark", - "PrintRanges", - "Properties", - "Ps", - "Psalter_Pahlavi", - "Punct", - "Quotation_Mark", - "Radical", - "Range16", - "Range32", - "RangeTable", - "Regional_Indicator", - "Rejang", - "ReplacementChar", - "Runic", - "S", - "STerm", - "Samaritan", - "Saurashtra", - "Sc", - "Scripts", - "Sentence_Terminal", - "Sharada", - "Shavian", - "Siddham", - "SignWriting", - "SimpleFold", - "Sinhala", - "Sk", - "Sm", - "So", - "Soft_Dotted", - "Sogdian", - "Sora_Sompeng", - "Soyombo", - "Space", - "SpecialCase", - "Sundanese", - "Syloti_Nagri", - "Symbol", - "Syriac", - "Tagalog", - "Tagbanwa", - "Tai_Le", - "Tai_Tham", - "Tai_Viet", - "Takri", - "Tamil", - "Tangsa", - "Tangut", - "Telugu", - "Terminal_Punctuation", - "Thaana", - "Thai", - "Tibetan", - "Tifinagh", - "Tirhuta", - "Title", - "TitleCase", - "To", - "ToLower", - "ToTitle", - "ToUpper", - "Toto", - "TurkishCase", - "Ugaritic", - "Unified_Ideograph", - "Upper", - "UpperCase", - "UpperLower", - "Vai", - "Variation_Selector", - "Version", - "Vithkuqi", - "Wancho", - "Warang_Citi", - "White_Space", - "Yezidi", - "Yi", - "Z", - "Zanabazar_Square", - "Zl", - "Zp", - "Zs", - }, - "unicode/utf16": { - "AppendRune", - "Decode", - "DecodeRune", - "Encode", - "EncodeRune", - "IsSurrogate", - }, - "unicode/utf8": { - "AppendRune", - "DecodeLastRune", - "DecodeLastRuneInString", - "DecodeRune", - "DecodeRuneInString", - "EncodeRune", - "FullRune", - "FullRuneInString", - "MaxRune", - "RuneCount", - "RuneCountInString", - "RuneError", - "RuneLen", - "RuneSelf", - "RuneStart", - "UTFMax", - "Valid", - "ValidRune", - "ValidString", - }, - "unsafe": { - "Add", - "Alignof", - "Offsetof", - "Pointer", - "Sizeof", - "Slice", - "SliceData", - "String", - "StringData", - }, -} diff --git a/internal/jsonrpc2/messages.go b/internal/jsonrpc2/messages.go index 58d285d994e..721168fd4f2 100644 --- a/internal/jsonrpc2/messages.go +++ b/internal/jsonrpc2/messages.go @@ -27,7 +27,7 @@ type Request interface { Message // Method is a string containing the method name to invoke. Method() string - // Params is either a struct or an array with the parameters of the method. + // Params is an JSON value (object, array, null, or "") with the parameters of the method. Params() json.RawMessage // isJSONRPC2Request is used to make the set of request implementations closed. isJSONRPC2Request() @@ -46,7 +46,7 @@ type Notification struct { type Call struct { // Method is a string containing the method name to invoke. method string - // Params is either a struct or an array with the parameters of the method. + // Params is a JSON value (object, array, null, or "") with the parameters of the method. params json.RawMessage // id of this request, used to tie the Response back to the request. id ID @@ -157,17 +157,17 @@ func (r *Response) MarshalJSON() ([]byte, error) { return data, nil } -func toWireError(err error) *wireError { +func toWireError(err error) *WireError { if err == nil { // no error, the response is complete return nil } - if err, ok := err.(*wireError); ok { + if err, ok := err.(*WireError); ok { // already a wire error, just use it return err } - result := &wireError{Message: err.Error()} - var wrapped *wireError + result := &WireError{Message: err.Error()} + var wrapped *WireError if errors.As(err, &wrapped) { // if we wrapped a wire error, keep the code from the wrapped error // but the message from the outer error diff --git a/internal/jsonrpc2/wire.go b/internal/jsonrpc2/wire.go index ac39f1601f0..f2aa2d63e8c 100644 --- a/internal/jsonrpc2/wire.go +++ b/internal/jsonrpc2/wire.go @@ -57,7 +57,7 @@ type wireResponse struct { // Result is the response value, and is required on success. Result *json.RawMessage `json:"result,omitempty"` // Error is a structured error response if the call fails. - Error *wireError `json:"error,omitempty"` + Error *WireError `json:"error,omitempty"` // ID must be set and is the identifier of the Request this is a response to. ID *ID `json:"id,omitempty"` } @@ -70,11 +70,11 @@ type wireCombined struct { Method string `json:"method"` Params *json.RawMessage `json:"params,omitempty"` Result *json.RawMessage `json:"result,omitempty"` - Error *wireError `json:"error,omitempty"` + Error *WireError `json:"error,omitempty"` } -// wireError represents a structured error in a Response. -type wireError struct { +// WireError represents a structured error in a Response. +type WireError struct { // Code is an error code indicating the type of failure. Code int64 `json:"code"` // Message is a short description of the error. @@ -96,13 +96,13 @@ type ID struct { } func NewError(code int64, message string) error { - return &wireError{ + return &WireError{ Code: code, Message: message, } } -func (err *wireError) Error() string { +func (err *WireError) Error() string { return err.Message } diff --git a/internal/jsonrpc2_v2/messages.go b/internal/jsonrpc2_v2/messages.go index af145641d6a..f02b879c3f2 100644 --- a/internal/jsonrpc2_v2/messages.go +++ b/internal/jsonrpc2_v2/messages.go @@ -96,17 +96,17 @@ func (msg *Response) marshal(to *wireCombined) { to.Result = msg.Result } -func toWireError(err error) *wireError { +func toWireError(err error) *WireError { if err == nil { // no error, the response is complete return nil } - if err, ok := err.(*wireError); ok { + if err, ok := err.(*WireError); ok { // already a wire error, just use it return err } - result := &wireError{Message: err.Error()} - var wrapped *wireError + result := &WireError{Message: err.Error()} + var wrapped *WireError if errors.As(err, &wrapped) { // if we wrapped a wire error, keep the code from the wrapped error // but the message from the outer error diff --git a/internal/jsonrpc2_v2/wire.go b/internal/jsonrpc2_v2/wire.go index c8dc9ebf1bf..8f60fc62766 100644 --- a/internal/jsonrpc2_v2/wire.go +++ b/internal/jsonrpc2_v2/wire.go @@ -49,11 +49,11 @@ type wireCombined struct { Method string `json:"method,omitempty"` Params json.RawMessage `json:"params,omitempty"` Result json.RawMessage `json:"result,omitempty"` - Error *wireError `json:"error,omitempty"` + Error *WireError `json:"error,omitempty"` } -// wireError represents a structured error in a Response. -type wireError struct { +// WireError represents a structured error in a Response. +type WireError struct { // Code is an error code indicating the type of failure. Code int64 `json:"code"` // Message is a short description of the error. @@ -67,18 +67,18 @@ type wireError struct { // only be used to build errors for application specific codes as allowed by the // specification. func NewError(code int64, message string) error { - return &wireError{ + return &WireError{ Code: code, Message: message, } } -func (err *wireError) Error() string { +func (err *WireError) Error() string { return err.Message } -func (err *wireError) Is(other error) bool { - w, ok := other.(*wireError) +func (err *WireError) Is(other error) bool { + w, ok := other.(*WireError) if !ok { return false } diff --git a/internal/packagesinternal/packages.go b/internal/packagesinternal/packages.go index d9950b1f0be..44719de173b 100644 --- a/internal/packagesinternal/packages.go +++ b/internal/packagesinternal/packages.go @@ -5,10 +5,6 @@ // Package packagesinternal exposes internal-only fields from go/packages. package packagesinternal -import ( - "golang.org/x/tools/internal/gocommand" -) - var GetForTest = func(p interface{}) string { return "" } var GetDepsErrors = func(p interface{}) []*PackageError { return nil } @@ -18,10 +14,6 @@ type PackageError struct { Err string // the error itself } -var GetGoCmdRunner = func(config interface{}) *gocommand.Runner { return nil } - -var SetGoCmdRunner = func(config interface{}, runner *gocommand.Runner) {} - var TypecheckCgo int var DepsErrors int // must be set as a LoadMode to call GetDepsErrors var ForTest int // must be set as a LoadMode to call GetForTest diff --git a/internal/refactor/inline/analyzer/analyzer.go b/internal/refactor/inline/analyzer/analyzer.go index bb7d9d0d512..704ef6ff945 100644 --- a/internal/refactor/inline/analyzer/analyzer.go +++ b/internal/refactor/inline/analyzer/analyzer.go @@ -104,7 +104,7 @@ func run(pass *analysis.Pass) (interface{}, error) { if !ok { var fact inlineMeFact if pass.ImportObjectFact(fn, &fact) { - callee = fact.callee + callee = fact.Callee inlinable[fn] = callee } } @@ -157,9 +157,9 @@ func run(pass *analysis.Pass) (interface{}, error) { return nil, nil } -type inlineMeFact struct{ callee *inline.Callee } +type inlineMeFact struct{ Callee *inline.Callee } -func (f *inlineMeFact) String() string { return "inlineme " + f.callee.String() } +func (f *inlineMeFact) String() string { return "inlineme " + f.Callee.String() } func (*inlineMeFact) AFact() {} func discard(string, ...any) {} diff --git a/internal/refactor/inline/callee.go b/internal/refactor/inline/callee.go index c9a7ea0c8f2..e5620592300 100644 --- a/internal/refactor/inline/callee.go +++ b/internal/refactor/inline/callee.go @@ -149,7 +149,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa case *ast.CompositeLit: // Check for struct literals that refer to unexported fields, // whether keyed or unkeyed. (Logic assumes well-typedness.) - litType := deref(info.TypeOf(n)) + litType := typeparams.Deref(info.TypeOf(n)) if s, ok := typeparams.CoreType(litType).(*types.Struct); ok { if n.Type != nil { visit(n.Type) @@ -496,14 +496,6 @@ func addShadows(shadows map[string]bool, info *types.Info, exclude string, stack return shadows } -// deref removes a pointer type constructor from the core type of t. -func deref(t types.Type) types.Type { - if ptr, ok := typeparams.CoreType(t).(*types.Pointer); ok { - return ptr.Elem() - } - return t -} - func isField(obj types.Object) bool { if v, ok := obj.(*types.Var); ok && v.IsField() { return true diff --git a/internal/refactor/inline/escape.go b/internal/refactor/inline/escape.go index 795ad4feab6..a3f5e555e9f 100644 --- a/internal/refactor/inline/escape.go +++ b/internal/refactor/inline/escape.go @@ -72,7 +72,7 @@ func escape(info *types.Info, root ast.Node, f func(v *types.Var, escapes bool)) if sel, ok := n.Fun.(*ast.SelectorExpr); ok { if seln, ok := info.Selections[sel]; ok && seln.Kind() == types.MethodVal && - isPointer(seln.Obj().Type().(*types.Signature).Recv().Type()) { + isPointer(seln.Obj().Type().Underlying().(*types.Signature).Recv().Type()) { tArg, indirect := effectiveReceiver(seln) if !indirect && !isPointer(tArg) { lvalue(sel.X, true) // &x.f diff --git a/internal/refactor/inline/falcon.go b/internal/refactor/inline/falcon.go index 9863e8dbcfb..de054342be3 100644 --- a/internal/refactor/inline/falcon.go +++ b/internal/refactor/inline/falcon.go @@ -17,6 +17,7 @@ import ( "strings" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -64,7 +65,7 @@ type falconType struct { // cannot be eliminated by substitution as its argument value is // negative. // -// - When inlining sub("", 2, 1), all three parameters cannot be be +// - When inlining sub("", 2, 1), all three parameters cannot be // simultaneously eliminated by substitution without violating i // <= len(s) and j <= len(s), but the parameters i and j could be // safely eliminated without s. @@ -422,7 +423,7 @@ func (st *falconState) expr(e ast.Expr) (res any) { // = types.TypeAndValue | as if e.Type != nil { _ = st.expr(e.Type) } - t := deref(typeparams.CoreType(deref(tv.Type))) + t := aliases.Unalias(typeparams.Deref(tv.Type)) var uniques []ast.Expr for _, elt := range e.Elts { if kv, ok := elt.(*ast.KeyValueExpr); ok { @@ -507,7 +508,7 @@ func (st *falconState) expr(e ast.Expr) (res any) { // = types.TypeAndValue | as if kX != nil { // string x = st.toExpr(kX) - } else if arr, ok := deref(st.info.TypeOf(e.X).Underlying()).(*types.Array); ok { + } else if arr, ok := typeparams.CoreType(typeparams.Deref(st.info.TypeOf(e.X))).(*types.Array); ok { // array, *array x = &ast.CompositeLit{ Type: &ast.ArrayType{ @@ -572,7 +573,7 @@ func (st *falconState) expr(e ast.Expr) (res any) { // = types.TypeAndValue | as if kX != nil { // string x = st.toExpr(kX) - } else if arr, ok := deref(st.info.TypeOf(e.X).Underlying()).(*types.Array); ok { + } else if arr, ok := typeparams.CoreType(typeparams.Deref(st.info.TypeOf(e.X))).(*types.Array); ok { // array, *array x = &ast.CompositeLit{ Type: &ast.ArrayType{ diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 06f64013c79..c7ffbb215dd 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -176,7 +176,7 @@ func Inline(logf func(string, ...any), caller *Caller, callee *Callee) ([]byte, // function body. In essence the question is: which // is more likely to have comments? // Usually the callee body will be larger and more - // statement-heavy than the the arguments, but a + // statement-heavy than the arguments, but a // strategy may widen the scope of the replacement // (res.old) from CallExpr to, say, its enclosing // block, so the caller nodes dominate. @@ -485,7 +485,7 @@ func inline(logf func(string, ...any), caller *Caller, callee *gobCallee) (*resu // check not shadowed at caller. found := caller.lookup(obj.Name) // always finds something if found.Pos().IsValid() { - return nil, fmt.Errorf("cannot inline because built-in %q is shadowed in caller by a %s (line %d)", + return nil, fmt.Errorf("cannot inline, because the callee refers to built-in %q, which in the caller is shadowed by a %s (declared at line %d)", obj.Name, objectKind(found), caller.Fset.PositionFor(found.Pos(), false).Line) } @@ -505,8 +505,9 @@ func inline(logf func(string, ...any), caller *Caller, callee *gobCallee) (*resu // around the refactored signature. found := caller.lookup(obj.Name) if found != nil && !isPkgLevel(found) { - return nil, fmt.Errorf("cannot inline because %q is shadowed in caller by a %s (line %d)", - obj.Name, objectKind(found), + return nil, fmt.Errorf("cannot inline, because the callee refers to %s %q, which in the caller is shadowed by a %s (declared at line %d)", + obj.Kind, obj.Name, + objectKind(found), caller.Fset.PositionFor(found.Pos(), false).Line) } } else { @@ -1133,8 +1134,7 @@ func arguments(caller *Caller, calleeDecl *ast.FuncDecl, assign1 func(*types.Var // updating arg.{expr,typ}. indices := seln.Index() for _, index := range indices[:len(indices)-1] { - t := deref(arg.typ) - fld := typeparams.CoreType(t).(*types.Struct).Field(index) + fld := typeparams.CoreType(typeparams.Deref(arg.typ)).(*types.Struct).Field(index) if fld.Pkg() != caller.Types && !fld.Exported() { return nil, fmt.Errorf("in %s, implicit reference to unexported field .%s cannot be made explicit", debugFormatNode(caller.Fset, caller.Call.Fun), @@ -1153,7 +1153,7 @@ func arguments(caller *Caller, calleeDecl *ast.FuncDecl, assign1 func(*types.Var // Make * or & explicit. argIsPtr := isPointer(arg.typ) - paramIsPtr := isPointer(seln.Obj().Type().(*types.Signature).Recv().Type()) + paramIsPtr := isPointer(seln.Obj().Type().Underlying().(*types.Signature).Recv().Type()) if !argIsPtr && paramIsPtr { // &recv arg.expr = &ast.UnaryExpr{Op: token.AND, X: arg.expr} @@ -1161,7 +1161,7 @@ func arguments(caller *Caller, calleeDecl *ast.FuncDecl, assign1 func(*types.Var } else if argIsPtr && !paramIsPtr { // *recv arg.expr = &ast.StarExpr{X: arg.expr} - arg.typ = deref(arg.typ) + arg.typ = typeparams.Deref(arg.typ) arg.duplicable = false arg.pure = false } diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index 525be74ea60..c90e35c1872 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -1285,7 +1285,7 @@ func runTests(t *testing.T, tests []testcase) { // Want error? if rest := strings.TrimPrefix(test.want, "error: "); rest != test.want { if err == nil { - t.Fatalf("unexpected sucess: want error matching %q", rest) + t.Fatalf("unexpected success: want error matching %q", rest) } msg := err.Error() if ok, err := regexp.MatchString(rest, msg); err != nil { diff --git a/internal/refactor/inline/testdata/err-shadow-builtin.txtar b/internal/refactor/inline/testdata/err-shadow-builtin.txtar index 543d38fe540..34ea586ab3e 100644 --- a/internal/refactor/inline/testdata/err-shadow-builtin.txtar +++ b/internal/refactor/inline/testdata/err-shadow-builtin.txtar @@ -10,7 +10,7 @@ package a func _() { const nil = 1 - _ = f() //@ inline(re"f", re"nil.*shadowed.*by.*const .line 4") + _ = f() //@ inline(re"f", re"nil.*shadowed.*by.*const.*line 4") } func f() *int { return nil } @@ -20,7 +20,7 @@ package a func _() { type append int - g(nil) //@ inline(re"g", re"append.*shadowed.*by.*typename .line 4") + g(nil) //@ inline(re"g", re"append.*shadowed.*by.*typename.*line 4") } func g(x []int) { _ = append(x, x...) } @@ -30,7 +30,7 @@ package a func _() { type int uint8 - _ = h(0) //@ inline(re"h", re"int.*shadowed.*by.*typename .line 4") + _ = h(0) //@ inline(re"h", re"int.*shadowed.*by.*typename.*line 4") } func h(x int) int { return x + 1 } diff --git a/internal/refactor/inline/testdata/err-shadow-pkg.txtar b/internal/refactor/inline/testdata/err-shadow-pkg.txtar index 4338b8b31cd..792418dd453 100644 --- a/internal/refactor/inline/testdata/err-shadow-pkg.txtar +++ b/internal/refactor/inline/testdata/err-shadow-pkg.txtar @@ -15,7 +15,7 @@ package a func _() { f() //@ inline(re"f", result) const v = 1 - f() //@ inline(re"f", re"v.*shadowed.*by.*const .line 5") + f() //@ inline(re"f", re"v.*shadowed.*by.*const.*line 5") } func f() int { return v } @@ -28,7 +28,7 @@ package a func _() { _ = v //@ inline(re"f", result) const v = 1 - f() //@ inline(re"f", re"v.*shadowed.*by.*const .line 5") + f() //@ inline(re"f", re"v.*shadowed.*by.*const.*line 5") } func f() int { return v } diff --git a/internal/refactor/inline/util.go b/internal/refactor/inline/util.go index 267ef745e32..475cc7141bc 100644 --- a/internal/refactor/inline/util.go +++ b/internal/refactor/inline/util.go @@ -120,8 +120,10 @@ func convert(T, x ast.Expr) *ast.CallExpr { } } -// isPointer reports whether t is a pointer type. -func isPointer(t types.Type) bool { return t != deref(t) } +// isPointer reports whether t's core type is a pointer. +func isPointer(t types.Type) bool { + return is[*types.Pointer](typeparams.CoreType(t)) +} // indirectSelection is like seln.Indirect() without bug #8353. func indirectSelection(seln *types.Selection) bool { @@ -132,7 +134,7 @@ func indirectSelection(seln *types.Selection) bool { return true } - tParam := seln.Obj().Type().(*types.Signature).Recv().Type() + tParam := seln.Obj().Type().Underlying().(*types.Signature).Recv().Type() return isPointer(tArg) && !isPointer(tParam) // implicit * } @@ -150,9 +152,9 @@ func effectiveReceiver(seln *types.Selection) (types.Type, bool) { indices := seln.Index() indirect := false for _, index := range indices[:len(indices)-1] { - if tElem := deref(t); tElem != t { + if isPointer(t) { indirect = true - t = tElem + t = typeparams.MustDeref(t) } t = typeparams.CoreType(t).(*types.Struct).Field(index).Type() } diff --git a/internal/robustio/robustio_posix.go b/internal/robustio/robustio_posix.go index 8aa13d02786..cf74865d0b5 100644 --- a/internal/robustio/robustio_posix.go +++ b/internal/robustio/robustio_posix.go @@ -5,8 +5,6 @@ //go:build !windows && !plan9 // +build !windows,!plan9 -// TODO(adonovan): use 'unix' tag when go1.19 can be assumed. - package robustio import ( diff --git a/internal/robustio/robustio_test.go b/internal/robustio/robustio_test.go index 10244e21d69..030090db93a 100644 --- a/internal/robustio/robustio_test.go +++ b/internal/robustio/robustio_test.go @@ -14,6 +14,21 @@ import ( "golang.org/x/tools/internal/robustio" ) +func checkOSLink(t *testing.T, err error) { + if err == nil { + return + } + + t.Helper() + switch runtime.GOOS { + case "aix", "darwin", "dragonfly", "freebsd", "illumos", "linux", "netbsd", "openbsd", "solaris": + // Non-mobile OS known to always support os.Symlink and os.Link. + t.Fatal(err) + default: + t.Skipf("skipping due to error on %v: %v", runtime.GOOS, err) + } +} + func TestFileInfo(t *testing.T) { // A nonexistent file has no ID. nonexistent := filepath.Join(t.TempDir(), "nonexistent") @@ -51,11 +66,10 @@ func TestFileInfo(t *testing.T) { } // A symbolic link has the same ID as its target. - if runtime.GOOS != "plan9" { + t.Run("symlink", func(t *testing.T) { symlink := filepath.Join(t.TempDir(), "symlink") - if err := os.Symlink(real, symlink); err != nil { - t.Fatalf("can't create symbolic link: %v", err) - } + checkOSLink(t, os.Symlink(real, symlink)) + symlinkID, symlinkMtime, err := robustio.GetFileID(symlink) if err != nil { t.Fatalf("can't get ID of symbolic link: %v", err) @@ -66,14 +80,13 @@ func TestFileInfo(t *testing.T) { if !realMtime.Equal(symlinkMtime) { t.Errorf("realMtime %v != symlinkMtime %v", realMtime, symlinkMtime) } - } + }) // Two hard-linked files have the same ID. - if runtime.GOOS != "plan9" && runtime.GOOS != "android" { + t.Run("hardlink", func(t *testing.T) { hardlink := filepath.Join(t.TempDir(), "hardlink") - if err := os.Link(real, hardlink); err != nil { - t.Fatal(err) - } + checkOSLink(t, os.Link(real, hardlink)) + hardlinkID, hardlinkMtime, err := robustio.GetFileID(hardlink) if err != nil { t.Fatalf("can't get ID of hard link: %v", err) @@ -84,5 +97,5 @@ func TestFileInfo(t *testing.T) { if !realMtime.Equal(hardlinkMtime) { t.Errorf("realMtime %v != hardlinkMtime %v", realMtime, hardlinkMtime) } - } + }) } diff --git a/internal/stdlib/generate.go b/internal/stdlib/generate.go new file mode 100644 index 00000000000..ff2691c8e60 --- /dev/null +++ b/internal/stdlib/generate.go @@ -0,0 +1,204 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore +// +build ignore + +// The generate command reads all the GOROOT/api/go1.*.txt files and +// generates a single combined manifest.go file containing the Go +// standard library API symbols along with versions. +package main + +import ( + "bytes" + "cmp" + "errors" + "fmt" + "go/format" + "go/types" + "io/fs" + "log" + "os" + "path/filepath" + "regexp" + "runtime" + "slices" + "strings" + + "golang.org/x/tools/go/packages" +) + +func main() { + // Read and parse the GOROOT/api manifests. + symRE := regexp.MustCompile(`^pkg (\S+).*?, (var|func|type|const|method \([^)]*\)) ([A-Z]\w*)(.*)`) + pkgs := make(map[string]map[string]symInfo) // package -> symbol -> info + for minor := 0; ; minor++ { + base := "go1.txt" + if minor > 0 { + base = fmt.Sprintf("go1.%d.txt", minor) + } + filename := filepath.Join(runtime.GOROOT(), "api", base) + data, err := os.ReadFile(filename) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + break // all caught up + } + log.Fatal(err) + } + + // parse + for linenum, line := range strings.Split(string(data), "\n") { + if line == "" || strings.HasPrefix(line, "#") { + continue + } + m := symRE.FindStringSubmatch(line) + if m == nil { + log.Fatalf("invalid input: %s:%d: %s", filename, linenum+1, line) + } + path, kind, sym, rest := m[1], m[2], m[3], m[4] + + if _, recv, ok := strings.Cut(kind, "method "); ok { + // e.g. "method (*Func) Pos() token.Pos" + kind = "method" + + recv := removeTypeParam(recv) // (*Foo[T]) -> (*Foo) + + sym = recv + "." + sym // (*T).m + + } else if _, field, ok := strings.Cut(rest, " struct, "); ok && kind == "type" { + // e.g. "type ParenExpr struct, Lparen token.Pos" + kind = "field" + name, typ, _ := strings.Cut(field, " ") + + // The api script uses the name + // "embedded" (ambiguously) for + // the name of an anonymous field. + if name == "embedded" { + // Strip "*pkg.T" down to "T". + typ = strings.TrimPrefix(typ, "*") + if _, after, ok := strings.Cut(typ, "."); ok { + typ = after + } + typ = removeTypeParam(typ) // embedded Foo[T] -> Foo + name = typ + } + + sym += "." + name // T.f + } + + symbols, ok := pkgs[path] + if !ok { + symbols = make(map[string]symInfo) + pkgs[path] = symbols + } + + // Don't overwrite earlier entries: + // enums are redeclared in later versions + // as their encoding changes; + // deprecations count as updates too. + if _, ok := symbols[sym]; !ok { + symbols[sym] = symInfo{kind, minor} + } + } + } + + // The APIs of the syscall/js and unsafe packages need to be computed explicitly, + // because they're not included in the GOROOT/api/go1.*.txt files at this time. + pkgs["syscall/js"] = loadSymbols("syscall/js", "GOOS=js", "GOARCH=wasm") + pkgs["unsafe"] = exportedSymbols(types.Unsafe) // TODO(adonovan): set correct versions + + // Write the combined manifest. + var buf bytes.Buffer + buf.WriteString(`// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate.go. DO NOT EDIT. + +package stdlib + +var PackageSymbols = map[string][]Symbol{ +`) + + for _, path := range sortedKeys(pkgs) { + pkg := pkgs[path] + fmt.Fprintf(&buf, "\t%q: {\n", path) + for _, name := range sortedKeys(pkg) { + info := pkg[name] + fmt.Fprintf(&buf, "\t\t{%q, %s, %d},\n", + name, strings.Title(info.kind), info.minor) + } + fmt.Fprintln(&buf, "},") + } + fmt.Fprintln(&buf, "}") + fmtbuf, err := format.Source(buf.Bytes()) + if err != nil { + log.Fatal(err) + } + if err := os.WriteFile("manifest.go", fmtbuf, 0666); err != nil { + log.Fatal(err) + } +} + +type symInfo struct { + kind string // e.g. "func" + minor int // go1.%d +} + +// loadSymbols computes the exported symbols in the specified package +// by parsing and type-checking the current source. +func loadSymbols(pkg string, extraEnv ...string) map[string]symInfo { + pkgs, err := packages.Load(&packages.Config{ + Mode: packages.NeedTypes, + Env: append(os.Environ(), extraEnv...), + }, pkg) + if err != nil { + log.Fatalln(err) + } else if len(pkgs) != 1 { + log.Fatalf("got %d packages, want one package %q", len(pkgs), pkg) + } + return exportedSymbols(pkgs[0].Types) +} + +func exportedSymbols(pkg *types.Package) map[string]symInfo { + symbols := make(map[string]symInfo) + for _, name := range pkg.Scope().Names() { + if obj := pkg.Scope().Lookup(name); obj.Exported() { + var kind string + switch obj.(type) { + case *types.Func, *types.Builtin: + kind = "func" + case *types.Const: + kind = "const" + case *types.Var: + kind = "var" + case *types.TypeName: + kind = "type" + // TODO(adonovan): expand fields and methods of syscall/js.* + default: + log.Fatalf("unexpected object type: %v", obj) + } + symbols[name] = symInfo{kind: kind, minor: 0} // pretend go1.0 + } + } + return symbols +} + +func sortedKeys[M ~map[K]V, K cmp.Ordered, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + slices.Sort(r) + return r +} + +func removeTypeParam(s string) string { + i := strings.IndexByte(s, '[') + j := strings.LastIndexByte(s, ']') + if i > 0 && j > i { + s = s[:i] + s[j+len("["):] + } + return s +} diff --git a/internal/stdlib/manifest.go b/internal/stdlib/manifest.go new file mode 100644 index 00000000000..fd6892075ee --- /dev/null +++ b/internal/stdlib/manifest.go @@ -0,0 +1,17320 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate.go. DO NOT EDIT. + +package stdlib + +var PackageSymbols = map[string][]Symbol{ + "archive/tar": { + {"(*Header).FileInfo", Method, 1}, + {"(*Reader).Next", Method, 0}, + {"(*Reader).Read", Method, 0}, + {"(*Writer).AddFS", Method, 22}, + {"(*Writer).Close", Method, 0}, + {"(*Writer).Flush", Method, 0}, + {"(*Writer).Write", Method, 0}, + {"(*Writer).WriteHeader", Method, 0}, + {"(Format).String", Method, 10}, + {"ErrFieldTooLong", Var, 0}, + {"ErrHeader", Var, 0}, + {"ErrInsecurePath", Var, 20}, + {"ErrWriteAfterClose", Var, 0}, + {"ErrWriteTooLong", Var, 0}, + {"FileInfoHeader", Func, 1}, + {"Format", Type, 10}, + {"FormatGNU", Const, 10}, + {"FormatPAX", Const, 10}, + {"FormatUSTAR", Const, 10}, + {"FormatUnknown", Const, 10}, + {"Header", Type, 0}, + {"Header.AccessTime", Field, 0}, + {"Header.ChangeTime", Field, 0}, + {"Header.Devmajor", Field, 0}, + {"Header.Devminor", Field, 0}, + {"Header.Format", Field, 10}, + {"Header.Gid", Field, 0}, + {"Header.Gname", Field, 0}, + {"Header.Linkname", Field, 0}, + {"Header.ModTime", Field, 0}, + {"Header.Mode", Field, 0}, + {"Header.Name", Field, 0}, + {"Header.PAXRecords", Field, 10}, + {"Header.Size", Field, 0}, + {"Header.Typeflag", Field, 0}, + {"Header.Uid", Field, 0}, + {"Header.Uname", Field, 0}, + {"Header.Xattrs", Field, 3}, + {"NewReader", Func, 0}, + {"NewWriter", Func, 0}, + {"Reader", Type, 0}, + {"TypeBlock", Const, 0}, + {"TypeChar", Const, 0}, + {"TypeCont", Const, 0}, + {"TypeDir", Const, 0}, + {"TypeFifo", Const, 0}, + {"TypeGNULongLink", Const, 1}, + {"TypeGNULongName", Const, 1}, + {"TypeGNUSparse", Const, 3}, + {"TypeLink", Const, 0}, + {"TypeReg", Const, 0}, + {"TypeRegA", Const, 0}, + {"TypeSymlink", Const, 0}, + {"TypeXGlobalHeader", Const, 0}, + {"TypeXHeader", Const, 0}, + {"Writer", Type, 0}, + }, + "archive/zip": { + {"(*File).DataOffset", Method, 2}, + {"(*File).FileInfo", Method, 0}, + {"(*File).ModTime", Method, 0}, + {"(*File).Mode", Method, 0}, + {"(*File).Open", Method, 0}, + {"(*File).OpenRaw", Method, 17}, + {"(*File).SetModTime", Method, 0}, + {"(*File).SetMode", Method, 0}, + {"(*FileHeader).FileInfo", Method, 0}, + {"(*FileHeader).ModTime", Method, 0}, + {"(*FileHeader).Mode", Method, 0}, + {"(*FileHeader).SetModTime", Method, 0}, + {"(*FileHeader).SetMode", Method, 0}, + {"(*ReadCloser).Close", Method, 0}, + {"(*ReadCloser).Open", Method, 16}, + {"(*ReadCloser).RegisterDecompressor", Method, 6}, + {"(*Reader).Open", Method, 16}, + {"(*Reader).RegisterDecompressor", Method, 6}, + {"(*Writer).AddFS", Method, 22}, + {"(*Writer).Close", Method, 0}, + {"(*Writer).Copy", Method, 17}, + {"(*Writer).Create", Method, 0}, + {"(*Writer).CreateHeader", Method, 0}, + {"(*Writer).CreateRaw", Method, 17}, + {"(*Writer).Flush", Method, 4}, + {"(*Writer).RegisterCompressor", Method, 6}, + {"(*Writer).SetComment", Method, 10}, + {"(*Writer).SetOffset", Method, 5}, + {"Compressor", Type, 2}, + {"Decompressor", Type, 2}, + {"Deflate", Const, 0}, + {"ErrAlgorithm", Var, 0}, + {"ErrChecksum", Var, 0}, + {"ErrFormat", Var, 0}, + {"ErrInsecurePath", Var, 20}, + {"File", Type, 0}, + {"File.FileHeader", Field, 0}, + {"FileHeader", Type, 0}, + {"FileHeader.CRC32", Field, 0}, + {"FileHeader.Comment", Field, 0}, + {"FileHeader.CompressedSize", Field, 0}, + {"FileHeader.CompressedSize64", Field, 1}, + {"FileHeader.CreatorVersion", Field, 0}, + {"FileHeader.ExternalAttrs", Field, 0}, + {"FileHeader.Extra", Field, 0}, + {"FileHeader.Flags", Field, 0}, + {"FileHeader.Method", Field, 0}, + {"FileHeader.Modified", Field, 10}, + {"FileHeader.ModifiedDate", Field, 0}, + {"FileHeader.ModifiedTime", Field, 0}, + {"FileHeader.Name", Field, 0}, + {"FileHeader.NonUTF8", Field, 10}, + {"FileHeader.ReaderVersion", Field, 0}, + {"FileHeader.UncompressedSize", Field, 0}, + {"FileHeader.UncompressedSize64", Field, 1}, + {"FileInfoHeader", Func, 0}, + {"NewReader", Func, 0}, + {"NewWriter", Func, 0}, + {"OpenReader", Func, 0}, + {"ReadCloser", Type, 0}, + {"ReadCloser.Reader", Field, 0}, + {"Reader", Type, 0}, + {"Reader.Comment", Field, 0}, + {"Reader.File", Field, 0}, + {"RegisterCompressor", Func, 2}, + {"RegisterDecompressor", Func, 2}, + {"Store", Const, 0}, + {"Writer", Type, 0}, + }, + "bufio": { + {"(*Reader).Buffered", Method, 0}, + {"(*Reader).Discard", Method, 5}, + {"(*Reader).Peek", Method, 0}, + {"(*Reader).Read", Method, 0}, + {"(*Reader).ReadByte", Method, 0}, + {"(*Reader).ReadBytes", Method, 0}, + {"(*Reader).ReadLine", Method, 0}, + {"(*Reader).ReadRune", Method, 0}, + {"(*Reader).ReadSlice", Method, 0}, + {"(*Reader).ReadString", Method, 0}, + {"(*Reader).Reset", Method, 2}, + {"(*Reader).Size", Method, 10}, + {"(*Reader).UnreadByte", Method, 0}, + {"(*Reader).UnreadRune", Method, 0}, + {"(*Reader).WriteTo", Method, 1}, + {"(*Scanner).Buffer", Method, 6}, + {"(*Scanner).Bytes", Method, 1}, + {"(*Scanner).Err", Method, 1}, + {"(*Scanner).Scan", Method, 1}, + {"(*Scanner).Split", Method, 1}, + {"(*Scanner).Text", Method, 1}, + {"(*Writer).Available", Method, 0}, + {"(*Writer).AvailableBuffer", Method, 18}, + {"(*Writer).Buffered", Method, 0}, + {"(*Writer).Flush", Method, 0}, + {"(*Writer).ReadFrom", Method, 1}, + {"(*Writer).Reset", Method, 2}, + {"(*Writer).Size", Method, 10}, + {"(*Writer).Write", Method, 0}, + {"(*Writer).WriteByte", Method, 0}, + {"(*Writer).WriteRune", Method, 0}, + {"(*Writer).WriteString", Method, 0}, + {"(ReadWriter).Available", Method, 0}, + {"(ReadWriter).AvailableBuffer", Method, 18}, + {"(ReadWriter).Discard", Method, 5}, + {"(ReadWriter).Flush", Method, 0}, + {"(ReadWriter).Peek", Method, 0}, + {"(ReadWriter).Read", Method, 0}, + {"(ReadWriter).ReadByte", Method, 0}, + {"(ReadWriter).ReadBytes", Method, 0}, + {"(ReadWriter).ReadFrom", Method, 1}, + {"(ReadWriter).ReadLine", Method, 0}, + {"(ReadWriter).ReadRune", Method, 0}, + {"(ReadWriter).ReadSlice", Method, 0}, + {"(ReadWriter).ReadString", Method, 0}, + {"(ReadWriter).UnreadByte", Method, 0}, + {"(ReadWriter).UnreadRune", Method, 0}, + {"(ReadWriter).Write", Method, 0}, + {"(ReadWriter).WriteByte", Method, 0}, + {"(ReadWriter).WriteRune", Method, 0}, + {"(ReadWriter).WriteString", Method, 0}, + {"(ReadWriter).WriteTo", Method, 1}, + {"ErrAdvanceTooFar", Var, 1}, + {"ErrBadReadCount", Var, 15}, + {"ErrBufferFull", Var, 0}, + {"ErrFinalToken", Var, 6}, + {"ErrInvalidUnreadByte", Var, 0}, + {"ErrInvalidUnreadRune", Var, 0}, + {"ErrNegativeAdvance", Var, 1}, + {"ErrNegativeCount", Var, 0}, + {"ErrTooLong", Var, 1}, + {"MaxScanTokenSize", Const, 1}, + {"NewReadWriter", Func, 0}, + {"NewReader", Func, 0}, + {"NewReaderSize", Func, 0}, + {"NewScanner", Func, 1}, + {"NewWriter", Func, 0}, + {"NewWriterSize", Func, 0}, + {"ReadWriter", Type, 0}, + {"ReadWriter.Reader", Field, 0}, + {"ReadWriter.Writer", Field, 0}, + {"Reader", Type, 0}, + {"ScanBytes", Func, 1}, + {"ScanLines", Func, 1}, + {"ScanRunes", Func, 1}, + {"ScanWords", Func, 1}, + {"Scanner", Type, 1}, + {"SplitFunc", Type, 1}, + {"Writer", Type, 0}, + }, + "bytes": { + {"(*Buffer).Available", Method, 21}, + {"(*Buffer).AvailableBuffer", Method, 21}, + {"(*Buffer).Bytes", Method, 0}, + {"(*Buffer).Cap", Method, 5}, + {"(*Buffer).Grow", Method, 1}, + {"(*Buffer).Len", Method, 0}, + {"(*Buffer).Next", Method, 0}, + {"(*Buffer).Read", Method, 0}, + {"(*Buffer).ReadByte", Method, 0}, + {"(*Buffer).ReadBytes", Method, 0}, + {"(*Buffer).ReadFrom", Method, 0}, + {"(*Buffer).ReadRune", Method, 0}, + {"(*Buffer).ReadString", Method, 0}, + {"(*Buffer).Reset", Method, 0}, + {"(*Buffer).String", Method, 0}, + {"(*Buffer).Truncate", Method, 0}, + {"(*Buffer).UnreadByte", Method, 0}, + {"(*Buffer).UnreadRune", Method, 0}, + {"(*Buffer).Write", Method, 0}, + {"(*Buffer).WriteByte", Method, 0}, + {"(*Buffer).WriteRune", Method, 0}, + {"(*Buffer).WriteString", Method, 0}, + {"(*Buffer).WriteTo", Method, 0}, + {"(*Reader).Len", Method, 0}, + {"(*Reader).Read", Method, 0}, + {"(*Reader).ReadAt", Method, 0}, + {"(*Reader).ReadByte", Method, 0}, + {"(*Reader).ReadRune", Method, 0}, + {"(*Reader).Reset", Method, 7}, + {"(*Reader).Seek", Method, 0}, + {"(*Reader).Size", Method, 5}, + {"(*Reader).UnreadByte", Method, 0}, + {"(*Reader).UnreadRune", Method, 0}, + {"(*Reader).WriteTo", Method, 1}, + {"Buffer", Type, 0}, + {"Clone", Func, 20}, + {"Compare", Func, 0}, + {"Contains", Func, 0}, + {"ContainsAny", Func, 7}, + {"ContainsFunc", Func, 21}, + {"ContainsRune", Func, 7}, + {"Count", Func, 0}, + {"Cut", Func, 18}, + {"CutPrefix", Func, 20}, + {"CutSuffix", Func, 20}, + {"Equal", Func, 0}, + {"EqualFold", Func, 0}, + {"ErrTooLarge", Var, 0}, + {"Fields", Func, 0}, + {"FieldsFunc", Func, 0}, + {"HasPrefix", Func, 0}, + {"HasSuffix", Func, 0}, + {"Index", Func, 0}, + {"IndexAny", Func, 0}, + {"IndexByte", Func, 0}, + {"IndexFunc", Func, 0}, + {"IndexRune", Func, 0}, + {"Join", Func, 0}, + {"LastIndex", Func, 0}, + {"LastIndexAny", Func, 0}, + {"LastIndexByte", Func, 5}, + {"LastIndexFunc", Func, 0}, + {"Map", Func, 0}, + {"MinRead", Const, 0}, + {"NewBuffer", Func, 0}, + {"NewBufferString", Func, 0}, + {"NewReader", Func, 0}, + {"Reader", Type, 0}, + {"Repeat", Func, 0}, + {"Replace", Func, 0}, + {"ReplaceAll", Func, 12}, + {"Runes", Func, 0}, + {"Split", Func, 0}, + {"SplitAfter", Func, 0}, + {"SplitAfterN", Func, 0}, + {"SplitN", Func, 0}, + {"Title", Func, 0}, + {"ToLower", Func, 0}, + {"ToLowerSpecial", Func, 0}, + {"ToTitle", Func, 0}, + {"ToTitleSpecial", Func, 0}, + {"ToUpper", Func, 0}, + {"ToUpperSpecial", Func, 0}, + {"ToValidUTF8", Func, 13}, + {"Trim", Func, 0}, + {"TrimFunc", Func, 0}, + {"TrimLeft", Func, 0}, + {"TrimLeftFunc", Func, 0}, + {"TrimPrefix", Func, 1}, + {"TrimRight", Func, 0}, + {"TrimRightFunc", Func, 0}, + {"TrimSpace", Func, 0}, + {"TrimSuffix", Func, 1}, + }, + "cmp": { + {"Compare", Func, 21}, + {"Less", Func, 21}, + {"Or", Func, 22}, + {"Ordered", Type, 21}, + }, + "compress/bzip2": { + {"(StructuralError).Error", Method, 0}, + {"NewReader", Func, 0}, + {"StructuralError", Type, 0}, + }, + "compress/flate": { + {"(*ReadError).Error", Method, 0}, + {"(*WriteError).Error", Method, 0}, + {"(*Writer).Close", Method, 0}, + {"(*Writer).Flush", Method, 0}, + {"(*Writer).Reset", Method, 2}, + {"(*Writer).Write", Method, 0}, + {"(CorruptInputError).Error", Method, 0}, + {"(InternalError).Error", Method, 0}, + {"BestCompression", Const, 0}, + {"BestSpeed", Const, 0}, + {"CorruptInputError", Type, 0}, + {"DefaultCompression", Const, 0}, + {"HuffmanOnly", Const, 7}, + {"InternalError", Type, 0}, + {"NewReader", Func, 0}, + {"NewReaderDict", Func, 0}, + {"NewWriter", Func, 0}, + {"NewWriterDict", Func, 0}, + {"NoCompression", Const, 0}, + {"ReadError", Type, 0}, + {"ReadError.Err", Field, 0}, + {"ReadError.Offset", Field, 0}, + {"Reader", Type, 0}, + {"Resetter", Type, 4}, + {"WriteError", Type, 0}, + {"WriteError.Err", Field, 0}, + {"WriteError.Offset", Field, 0}, + {"Writer", Type, 0}, + }, + "compress/gzip": { + {"(*Reader).Close", Method, 0}, + {"(*Reader).Multistream", Method, 4}, + {"(*Reader).Read", Method, 0}, + {"(*Reader).Reset", Method, 3}, + {"(*Writer).Close", Method, 0}, + {"(*Writer).Flush", Method, 1}, + {"(*Writer).Reset", Method, 2}, + {"(*Writer).Write", Method, 0}, + {"BestCompression", Const, 0}, + {"BestSpeed", Const, 0}, + {"DefaultCompression", Const, 0}, + {"ErrChecksum", Var, 0}, + {"ErrHeader", Var, 0}, + {"Header", Type, 0}, + {"Header.Comment", Field, 0}, + {"Header.Extra", Field, 0}, + {"Header.ModTime", Field, 0}, + {"Header.Name", Field, 0}, + {"Header.OS", Field, 0}, + {"HuffmanOnly", Const, 8}, + {"NewReader", Func, 0}, + {"NewWriter", Func, 0}, + {"NewWriterLevel", Func, 0}, + {"NoCompression", Const, 0}, + {"Reader", Type, 0}, + {"Reader.Header", Field, 0}, + {"Writer", Type, 0}, + {"Writer.Header", Field, 0}, + }, + "compress/lzw": { + {"(*Reader).Close", Method, 17}, + {"(*Reader).Read", Method, 17}, + {"(*Reader).Reset", Method, 17}, + {"(*Writer).Close", Method, 17}, + {"(*Writer).Reset", Method, 17}, + {"(*Writer).Write", Method, 17}, + {"LSB", Const, 0}, + {"MSB", Const, 0}, + {"NewReader", Func, 0}, + {"NewWriter", Func, 0}, + {"Order", Type, 0}, + {"Reader", Type, 17}, + {"Writer", Type, 17}, + }, + "compress/zlib": { + {"(*Writer).Close", Method, 0}, + {"(*Writer).Flush", Method, 0}, + {"(*Writer).Reset", Method, 2}, + {"(*Writer).Write", Method, 0}, + {"BestCompression", Const, 0}, + {"BestSpeed", Const, 0}, + {"DefaultCompression", Const, 0}, + {"ErrChecksum", Var, 0}, + {"ErrDictionary", Var, 0}, + {"ErrHeader", Var, 0}, + {"HuffmanOnly", Const, 8}, + {"NewReader", Func, 0}, + {"NewReaderDict", Func, 0}, + {"NewWriter", Func, 0}, + {"NewWriterLevel", Func, 0}, + {"NewWriterLevelDict", Func, 0}, + {"NoCompression", Const, 0}, + {"Resetter", Type, 4}, + {"Writer", Type, 0}, + }, + "container/heap": { + {"Fix", Func, 2}, + {"Init", Func, 0}, + {"Interface", Type, 0}, + {"Pop", Func, 0}, + {"Push", Func, 0}, + {"Remove", Func, 0}, + }, + "container/list": { + {"(*Element).Next", Method, 0}, + {"(*Element).Prev", Method, 0}, + {"(*List).Back", Method, 0}, + {"(*List).Front", Method, 0}, + {"(*List).Init", Method, 0}, + {"(*List).InsertAfter", Method, 0}, + {"(*List).InsertBefore", Method, 0}, + {"(*List).Len", Method, 0}, + {"(*List).MoveAfter", Method, 2}, + {"(*List).MoveBefore", Method, 2}, + {"(*List).MoveToBack", Method, 0}, + {"(*List).MoveToFront", Method, 0}, + {"(*List).PushBack", Method, 0}, + {"(*List).PushBackList", Method, 0}, + {"(*List).PushFront", Method, 0}, + {"(*List).PushFrontList", Method, 0}, + {"(*List).Remove", Method, 0}, + {"Element", Type, 0}, + {"Element.Value", Field, 0}, + {"List", Type, 0}, + {"New", Func, 0}, + }, + "container/ring": { + {"(*Ring).Do", Method, 0}, + {"(*Ring).Len", Method, 0}, + {"(*Ring).Link", Method, 0}, + {"(*Ring).Move", Method, 0}, + {"(*Ring).Next", Method, 0}, + {"(*Ring).Prev", Method, 0}, + {"(*Ring).Unlink", Method, 0}, + {"New", Func, 0}, + {"Ring", Type, 0}, + {"Ring.Value", Field, 0}, + }, + "context": { + {"AfterFunc", Func, 21}, + {"Background", Func, 7}, + {"CancelCauseFunc", Type, 20}, + {"CancelFunc", Type, 7}, + {"Canceled", Var, 7}, + {"Cause", Func, 20}, + {"Context", Type, 7}, + {"DeadlineExceeded", Var, 7}, + {"TODO", Func, 7}, + {"WithCancel", Func, 7}, + {"WithCancelCause", Func, 20}, + {"WithDeadline", Func, 7}, + {"WithDeadlineCause", Func, 21}, + {"WithTimeout", Func, 7}, + {"WithTimeoutCause", Func, 21}, + {"WithValue", Func, 7}, + {"WithoutCancel", Func, 21}, + }, + "crypto": { + {"(Hash).Available", Method, 0}, + {"(Hash).HashFunc", Method, 4}, + {"(Hash).New", Method, 0}, + {"(Hash).Size", Method, 0}, + {"(Hash).String", Method, 15}, + {"BLAKE2b_256", Const, 9}, + {"BLAKE2b_384", Const, 9}, + {"BLAKE2b_512", Const, 9}, + {"BLAKE2s_256", Const, 9}, + {"Decrypter", Type, 5}, + {"DecrypterOpts", Type, 5}, + {"Hash", Type, 0}, + {"MD4", Const, 0}, + {"MD5", Const, 0}, + {"MD5SHA1", Const, 0}, + {"PrivateKey", Type, 0}, + {"PublicKey", Type, 2}, + {"RIPEMD160", Const, 0}, + {"RegisterHash", Func, 0}, + {"SHA1", Const, 0}, + {"SHA224", Const, 0}, + {"SHA256", Const, 0}, + {"SHA384", Const, 0}, + {"SHA3_224", Const, 4}, + {"SHA3_256", Const, 4}, + {"SHA3_384", Const, 4}, + {"SHA3_512", Const, 4}, + {"SHA512", Const, 0}, + {"SHA512_224", Const, 5}, + {"SHA512_256", Const, 5}, + {"Signer", Type, 4}, + {"SignerOpts", Type, 4}, + }, + "crypto/aes": { + {"(KeySizeError).Error", Method, 0}, + {"BlockSize", Const, 0}, + {"KeySizeError", Type, 0}, + {"NewCipher", Func, 0}, + }, + "crypto/cipher": { + {"(StreamReader).Read", Method, 0}, + {"(StreamWriter).Close", Method, 0}, + {"(StreamWriter).Write", Method, 0}, + {"AEAD", Type, 2}, + {"Block", Type, 0}, + {"BlockMode", Type, 0}, + {"NewCBCDecrypter", Func, 0}, + {"NewCBCEncrypter", Func, 0}, + {"NewCFBDecrypter", Func, 0}, + {"NewCFBEncrypter", Func, 0}, + {"NewCTR", Func, 0}, + {"NewGCM", Func, 2}, + {"NewGCMWithNonceSize", Func, 5}, + {"NewGCMWithTagSize", Func, 11}, + {"NewOFB", Func, 0}, + {"Stream", Type, 0}, + {"StreamReader", Type, 0}, + {"StreamReader.R", Field, 0}, + {"StreamReader.S", Field, 0}, + {"StreamWriter", Type, 0}, + {"StreamWriter.Err", Field, 0}, + {"StreamWriter.S", Field, 0}, + {"StreamWriter.W", Field, 0}, + }, + "crypto/des": { + {"(KeySizeError).Error", Method, 0}, + {"BlockSize", Const, 0}, + {"KeySizeError", Type, 0}, + {"NewCipher", Func, 0}, + {"NewTripleDESCipher", Func, 0}, + }, + "crypto/dsa": { + {"ErrInvalidPublicKey", Var, 0}, + {"GenerateKey", Func, 0}, + {"GenerateParameters", Func, 0}, + {"L1024N160", Const, 0}, + {"L2048N224", Const, 0}, + {"L2048N256", Const, 0}, + {"L3072N256", Const, 0}, + {"ParameterSizes", Type, 0}, + {"Parameters", Type, 0}, + {"Parameters.G", Field, 0}, + {"Parameters.P", Field, 0}, + {"Parameters.Q", Field, 0}, + {"PrivateKey", Type, 0}, + {"PrivateKey.PublicKey", Field, 0}, + {"PrivateKey.X", Field, 0}, + {"PublicKey", Type, 0}, + {"PublicKey.Parameters", Field, 0}, + {"PublicKey.Y", Field, 0}, + {"Sign", Func, 0}, + {"Verify", Func, 0}, + }, + "crypto/ecdh": { + {"(*PrivateKey).Bytes", Method, 20}, + {"(*PrivateKey).Curve", Method, 20}, + {"(*PrivateKey).ECDH", Method, 20}, + {"(*PrivateKey).Equal", Method, 20}, + {"(*PrivateKey).Public", Method, 20}, + {"(*PrivateKey).PublicKey", Method, 20}, + {"(*PublicKey).Bytes", Method, 20}, + {"(*PublicKey).Curve", Method, 20}, + {"(*PublicKey).Equal", Method, 20}, + {"Curve", Type, 20}, + {"P256", Func, 20}, + {"P384", Func, 20}, + {"P521", Func, 20}, + {"PrivateKey", Type, 20}, + {"PublicKey", Type, 20}, + {"X25519", Func, 20}, + }, + "crypto/ecdsa": { + {"(*PrivateKey).ECDH", Method, 20}, + {"(*PrivateKey).Equal", Method, 15}, + {"(*PrivateKey).Public", Method, 4}, + {"(*PrivateKey).Sign", Method, 4}, + {"(*PublicKey).ECDH", Method, 20}, + {"(*PublicKey).Equal", Method, 15}, + {"(PrivateKey).Add", Method, 0}, + {"(PrivateKey).Double", Method, 0}, + {"(PrivateKey).IsOnCurve", Method, 0}, + {"(PrivateKey).Params", Method, 0}, + {"(PrivateKey).ScalarBaseMult", Method, 0}, + {"(PrivateKey).ScalarMult", Method, 0}, + {"(PublicKey).Add", Method, 0}, + {"(PublicKey).Double", Method, 0}, + {"(PublicKey).IsOnCurve", Method, 0}, + {"(PublicKey).Params", Method, 0}, + {"(PublicKey).ScalarBaseMult", Method, 0}, + {"(PublicKey).ScalarMult", Method, 0}, + {"GenerateKey", Func, 0}, + {"PrivateKey", Type, 0}, + {"PrivateKey.D", Field, 0}, + {"PrivateKey.PublicKey", Field, 0}, + {"PublicKey", Type, 0}, + {"PublicKey.Curve", Field, 0}, + {"PublicKey.X", Field, 0}, + {"PublicKey.Y", Field, 0}, + {"Sign", Func, 0}, + {"SignASN1", Func, 15}, + {"Verify", Func, 0}, + {"VerifyASN1", Func, 15}, + }, + "crypto/ed25519": { + {"(*Options).HashFunc", Method, 20}, + {"(PrivateKey).Equal", Method, 15}, + {"(PrivateKey).Public", Method, 13}, + {"(PrivateKey).Seed", Method, 13}, + {"(PrivateKey).Sign", Method, 13}, + {"(PublicKey).Equal", Method, 15}, + {"GenerateKey", Func, 13}, + {"NewKeyFromSeed", Func, 13}, + {"Options", Type, 20}, + {"Options.Context", Field, 20}, + {"Options.Hash", Field, 20}, + {"PrivateKey", Type, 13}, + {"PrivateKeySize", Const, 13}, + {"PublicKey", Type, 13}, + {"PublicKeySize", Const, 13}, + {"SeedSize", Const, 13}, + {"Sign", Func, 13}, + {"SignatureSize", Const, 13}, + {"Verify", Func, 13}, + {"VerifyWithOptions", Func, 20}, + }, + "crypto/elliptic": { + {"(*CurveParams).Add", Method, 0}, + {"(*CurveParams).Double", Method, 0}, + {"(*CurveParams).IsOnCurve", Method, 0}, + {"(*CurveParams).Params", Method, 0}, + {"(*CurveParams).ScalarBaseMult", Method, 0}, + {"(*CurveParams).ScalarMult", Method, 0}, + {"Curve", Type, 0}, + {"CurveParams", Type, 0}, + {"CurveParams.B", Field, 0}, + {"CurveParams.BitSize", Field, 0}, + {"CurveParams.Gx", Field, 0}, + {"CurveParams.Gy", Field, 0}, + {"CurveParams.N", Field, 0}, + {"CurveParams.Name", Field, 5}, + {"CurveParams.P", Field, 0}, + {"GenerateKey", Func, 0}, + {"Marshal", Func, 0}, + {"MarshalCompressed", Func, 15}, + {"P224", Func, 0}, + {"P256", Func, 0}, + {"P384", Func, 0}, + {"P521", Func, 0}, + {"Unmarshal", Func, 0}, + {"UnmarshalCompressed", Func, 15}, + }, + "crypto/hmac": { + {"Equal", Func, 1}, + {"New", Func, 0}, + }, + "crypto/md5": { + {"BlockSize", Const, 0}, + {"New", Func, 0}, + {"Size", Const, 0}, + {"Sum", Func, 2}, + }, + "crypto/rand": { + {"Int", Func, 0}, + {"Prime", Func, 0}, + {"Read", Func, 0}, + {"Reader", Var, 0}, + }, + "crypto/rc4": { + {"(*Cipher).Reset", Method, 0}, + {"(*Cipher).XORKeyStream", Method, 0}, + {"(KeySizeError).Error", Method, 0}, + {"Cipher", Type, 0}, + {"KeySizeError", Type, 0}, + {"NewCipher", Func, 0}, + }, + "crypto/rsa": { + {"(*PSSOptions).HashFunc", Method, 4}, + {"(*PrivateKey).Decrypt", Method, 5}, + {"(*PrivateKey).Equal", Method, 15}, + {"(*PrivateKey).Precompute", Method, 0}, + {"(*PrivateKey).Public", Method, 4}, + {"(*PrivateKey).Sign", Method, 4}, + {"(*PrivateKey).Size", Method, 11}, + {"(*PrivateKey).Validate", Method, 0}, + {"(*PublicKey).Equal", Method, 15}, + {"(*PublicKey).Size", Method, 11}, + {"CRTValue", Type, 0}, + {"CRTValue.Coeff", Field, 0}, + {"CRTValue.Exp", Field, 0}, + {"CRTValue.R", Field, 0}, + {"DecryptOAEP", Func, 0}, + {"DecryptPKCS1v15", Func, 0}, + {"DecryptPKCS1v15SessionKey", Func, 0}, + {"EncryptOAEP", Func, 0}, + {"EncryptPKCS1v15", Func, 0}, + {"ErrDecryption", Var, 0}, + {"ErrMessageTooLong", Var, 0}, + {"ErrVerification", Var, 0}, + {"GenerateKey", Func, 0}, + {"GenerateMultiPrimeKey", Func, 0}, + {"OAEPOptions", Type, 5}, + {"OAEPOptions.Hash", Field, 5}, + {"OAEPOptions.Label", Field, 5}, + {"OAEPOptions.MGFHash", Field, 20}, + {"PKCS1v15DecryptOptions", Type, 5}, + {"PKCS1v15DecryptOptions.SessionKeyLen", Field, 5}, + {"PSSOptions", Type, 2}, + {"PSSOptions.Hash", Field, 4}, + {"PSSOptions.SaltLength", Field, 2}, + {"PSSSaltLengthAuto", Const, 2}, + {"PSSSaltLengthEqualsHash", Const, 2}, + {"PrecomputedValues", Type, 0}, + {"PrecomputedValues.CRTValues", Field, 0}, + {"PrecomputedValues.Dp", Field, 0}, + {"PrecomputedValues.Dq", Field, 0}, + {"PrecomputedValues.Qinv", Field, 0}, + {"PrivateKey", Type, 0}, + {"PrivateKey.D", Field, 0}, + {"PrivateKey.Precomputed", Field, 0}, + {"PrivateKey.Primes", Field, 0}, + {"PrivateKey.PublicKey", Field, 0}, + {"PublicKey", Type, 0}, + {"PublicKey.E", Field, 0}, + {"PublicKey.N", Field, 0}, + {"SignPKCS1v15", Func, 0}, + {"SignPSS", Func, 2}, + {"VerifyPKCS1v15", Func, 0}, + {"VerifyPSS", Func, 2}, + }, + "crypto/sha1": { + {"BlockSize", Const, 0}, + {"New", Func, 0}, + {"Size", Const, 0}, + {"Sum", Func, 2}, + }, + "crypto/sha256": { + {"BlockSize", Const, 0}, + {"New", Func, 0}, + {"New224", Func, 0}, + {"Size", Const, 0}, + {"Size224", Const, 0}, + {"Sum224", Func, 2}, + {"Sum256", Func, 2}, + }, + "crypto/sha512": { + {"BlockSize", Const, 0}, + {"New", Func, 0}, + {"New384", Func, 0}, + {"New512_224", Func, 5}, + {"New512_256", Func, 5}, + {"Size", Const, 0}, + {"Size224", Const, 5}, + {"Size256", Const, 5}, + {"Size384", Const, 0}, + {"Sum384", Func, 2}, + {"Sum512", Func, 2}, + {"Sum512_224", Func, 5}, + {"Sum512_256", Func, 5}, + }, + "crypto/subtle": { + {"ConstantTimeByteEq", Func, 0}, + {"ConstantTimeCompare", Func, 0}, + {"ConstantTimeCopy", Func, 0}, + {"ConstantTimeEq", Func, 0}, + {"ConstantTimeLessOrEq", Func, 2}, + {"ConstantTimeSelect", Func, 0}, + {"XORBytes", Func, 20}, + }, + "crypto/tls": { + {"(*CertificateRequestInfo).Context", Method, 17}, + {"(*CertificateRequestInfo).SupportsCertificate", Method, 14}, + {"(*CertificateVerificationError).Error", Method, 20}, + {"(*CertificateVerificationError).Unwrap", Method, 20}, + {"(*ClientHelloInfo).Context", Method, 17}, + {"(*ClientHelloInfo).SupportsCertificate", Method, 14}, + {"(*ClientSessionState).ResumptionState", Method, 21}, + {"(*Config).BuildNameToCertificate", Method, 0}, + {"(*Config).Clone", Method, 8}, + {"(*Config).DecryptTicket", Method, 21}, + {"(*Config).EncryptTicket", Method, 21}, + {"(*Config).SetSessionTicketKeys", Method, 5}, + {"(*Conn).Close", Method, 0}, + {"(*Conn).CloseWrite", Method, 8}, + {"(*Conn).ConnectionState", Method, 0}, + {"(*Conn).Handshake", Method, 0}, + {"(*Conn).HandshakeContext", Method, 17}, + {"(*Conn).LocalAddr", Method, 0}, + {"(*Conn).NetConn", Method, 18}, + {"(*Conn).OCSPResponse", Method, 0}, + {"(*Conn).Read", Method, 0}, + {"(*Conn).RemoteAddr", Method, 0}, + {"(*Conn).SetDeadline", Method, 0}, + {"(*Conn).SetReadDeadline", Method, 0}, + {"(*Conn).SetWriteDeadline", Method, 0}, + {"(*Conn).VerifyHostname", Method, 0}, + {"(*Conn).Write", Method, 0}, + {"(*ConnectionState).ExportKeyingMaterial", Method, 11}, + {"(*Dialer).Dial", Method, 15}, + {"(*Dialer).DialContext", Method, 15}, + {"(*QUICConn).Close", Method, 21}, + {"(*QUICConn).ConnectionState", Method, 21}, + {"(*QUICConn).HandleData", Method, 21}, + {"(*QUICConn).NextEvent", Method, 21}, + {"(*QUICConn).SendSessionTicket", Method, 21}, + {"(*QUICConn).SetTransportParameters", Method, 21}, + {"(*QUICConn).Start", Method, 21}, + {"(*SessionState).Bytes", Method, 21}, + {"(AlertError).Error", Method, 21}, + {"(ClientAuthType).String", Method, 15}, + {"(CurveID).String", Method, 15}, + {"(QUICEncryptionLevel).String", Method, 21}, + {"(RecordHeaderError).Error", Method, 6}, + {"(SignatureScheme).String", Method, 15}, + {"AlertError", Type, 21}, + {"Certificate", Type, 0}, + {"Certificate.Certificate", Field, 0}, + {"Certificate.Leaf", Field, 0}, + {"Certificate.OCSPStaple", Field, 0}, + {"Certificate.PrivateKey", Field, 0}, + {"Certificate.SignedCertificateTimestamps", Field, 5}, + {"Certificate.SupportedSignatureAlgorithms", Field, 14}, + {"CertificateRequestInfo", Type, 8}, + {"CertificateRequestInfo.AcceptableCAs", Field, 8}, + {"CertificateRequestInfo.SignatureSchemes", Field, 8}, + {"CertificateRequestInfo.Version", Field, 14}, + {"CertificateVerificationError", Type, 20}, + {"CertificateVerificationError.Err", Field, 20}, + {"CertificateVerificationError.UnverifiedCertificates", Field, 20}, + {"CipherSuite", Type, 14}, + {"CipherSuite.ID", Field, 14}, + {"CipherSuite.Insecure", Field, 14}, + {"CipherSuite.Name", Field, 14}, + {"CipherSuite.SupportedVersions", Field, 14}, + {"CipherSuiteName", Func, 14}, + {"CipherSuites", Func, 14}, + {"Client", Func, 0}, + {"ClientAuthType", Type, 0}, + {"ClientHelloInfo", Type, 4}, + {"ClientHelloInfo.CipherSuites", Field, 4}, + {"ClientHelloInfo.Conn", Field, 8}, + {"ClientHelloInfo.ServerName", Field, 4}, + {"ClientHelloInfo.SignatureSchemes", Field, 8}, + {"ClientHelloInfo.SupportedCurves", Field, 4}, + {"ClientHelloInfo.SupportedPoints", Field, 4}, + {"ClientHelloInfo.SupportedProtos", Field, 8}, + {"ClientHelloInfo.SupportedVersions", Field, 8}, + {"ClientSessionCache", Type, 3}, + {"ClientSessionState", Type, 3}, + {"Config", Type, 0}, + {"Config.Certificates", Field, 0}, + {"Config.CipherSuites", Field, 0}, + {"Config.ClientAuth", Field, 0}, + {"Config.ClientCAs", Field, 0}, + {"Config.ClientSessionCache", Field, 3}, + {"Config.CurvePreferences", Field, 3}, + {"Config.DynamicRecordSizingDisabled", Field, 7}, + {"Config.GetCertificate", Field, 4}, + {"Config.GetClientCertificate", Field, 8}, + {"Config.GetConfigForClient", Field, 8}, + {"Config.InsecureSkipVerify", Field, 0}, + {"Config.KeyLogWriter", Field, 8}, + {"Config.MaxVersion", Field, 2}, + {"Config.MinVersion", Field, 2}, + {"Config.NameToCertificate", Field, 0}, + {"Config.NextProtos", Field, 0}, + {"Config.PreferServerCipherSuites", Field, 1}, + {"Config.Rand", Field, 0}, + {"Config.Renegotiation", Field, 7}, + {"Config.RootCAs", Field, 0}, + {"Config.ServerName", Field, 0}, + {"Config.SessionTicketKey", Field, 1}, + {"Config.SessionTicketsDisabled", Field, 1}, + {"Config.Time", Field, 0}, + {"Config.UnwrapSession", Field, 21}, + {"Config.VerifyConnection", Field, 15}, + {"Config.VerifyPeerCertificate", Field, 8}, + {"Config.WrapSession", Field, 21}, + {"Conn", Type, 0}, + {"ConnectionState", Type, 0}, + {"ConnectionState.CipherSuite", Field, 0}, + {"ConnectionState.DidResume", Field, 1}, + {"ConnectionState.HandshakeComplete", Field, 0}, + {"ConnectionState.NegotiatedProtocol", Field, 0}, + {"ConnectionState.NegotiatedProtocolIsMutual", Field, 0}, + {"ConnectionState.OCSPResponse", Field, 5}, + {"ConnectionState.PeerCertificates", Field, 0}, + {"ConnectionState.ServerName", Field, 0}, + {"ConnectionState.SignedCertificateTimestamps", Field, 5}, + {"ConnectionState.TLSUnique", Field, 4}, + {"ConnectionState.VerifiedChains", Field, 0}, + {"ConnectionState.Version", Field, 3}, + {"CurveID", Type, 3}, + {"CurveP256", Const, 3}, + {"CurveP384", Const, 3}, + {"CurveP521", Const, 3}, + {"Dial", Func, 0}, + {"DialWithDialer", Func, 3}, + {"Dialer", Type, 15}, + {"Dialer.Config", Field, 15}, + {"Dialer.NetDialer", Field, 15}, + {"ECDSAWithP256AndSHA256", Const, 8}, + {"ECDSAWithP384AndSHA384", Const, 8}, + {"ECDSAWithP521AndSHA512", Const, 8}, + {"ECDSAWithSHA1", Const, 10}, + {"Ed25519", Const, 13}, + {"InsecureCipherSuites", Func, 14}, + {"Listen", Func, 0}, + {"LoadX509KeyPair", Func, 0}, + {"NewLRUClientSessionCache", Func, 3}, + {"NewListener", Func, 0}, + {"NewResumptionState", Func, 21}, + {"NoClientCert", Const, 0}, + {"PKCS1WithSHA1", Const, 8}, + {"PKCS1WithSHA256", Const, 8}, + {"PKCS1WithSHA384", Const, 8}, + {"PKCS1WithSHA512", Const, 8}, + {"PSSWithSHA256", Const, 8}, + {"PSSWithSHA384", Const, 8}, + {"PSSWithSHA512", Const, 8}, + {"ParseSessionState", Func, 21}, + {"QUICClient", Func, 21}, + {"QUICConfig", Type, 21}, + {"QUICConfig.TLSConfig", Field, 21}, + {"QUICConn", Type, 21}, + {"QUICEncryptionLevel", Type, 21}, + {"QUICEncryptionLevelApplication", Const, 21}, + {"QUICEncryptionLevelEarly", Const, 21}, + {"QUICEncryptionLevelHandshake", Const, 21}, + {"QUICEncryptionLevelInitial", Const, 21}, + {"QUICEvent", Type, 21}, + {"QUICEvent.Data", Field, 21}, + {"QUICEvent.Kind", Field, 21}, + {"QUICEvent.Level", Field, 21}, + {"QUICEvent.Suite", Field, 21}, + {"QUICEventKind", Type, 21}, + {"QUICHandshakeDone", Const, 21}, + {"QUICNoEvent", Const, 21}, + {"QUICRejectedEarlyData", Const, 21}, + {"QUICServer", Func, 21}, + {"QUICSessionTicketOptions", Type, 21}, + {"QUICSessionTicketOptions.EarlyData", Field, 21}, + {"QUICSetReadSecret", Const, 21}, + {"QUICSetWriteSecret", Const, 21}, + {"QUICTransportParameters", Const, 21}, + {"QUICTransportParametersRequired", Const, 21}, + {"QUICWriteData", Const, 21}, + {"RecordHeaderError", Type, 6}, + {"RecordHeaderError.Conn", Field, 12}, + {"RecordHeaderError.Msg", Field, 6}, + {"RecordHeaderError.RecordHeader", Field, 6}, + {"RenegotiateFreelyAsClient", Const, 7}, + {"RenegotiateNever", Const, 7}, + {"RenegotiateOnceAsClient", Const, 7}, + {"RenegotiationSupport", Type, 7}, + {"RequestClientCert", Const, 0}, + {"RequireAndVerifyClientCert", Const, 0}, + {"RequireAnyClientCert", Const, 0}, + {"Server", Func, 0}, + {"SessionState", Type, 21}, + {"SessionState.EarlyData", Field, 21}, + {"SessionState.Extra", Field, 21}, + {"SignatureScheme", Type, 8}, + {"TLS_AES_128_GCM_SHA256", Const, 12}, + {"TLS_AES_256_GCM_SHA384", Const, 12}, + {"TLS_CHACHA20_POLY1305_SHA256", Const, 12}, + {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", Const, 2}, + {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", Const, 8}, + {"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", Const, 2}, + {"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", Const, 2}, + {"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", Const, 5}, + {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", Const, 8}, + {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14}, + {"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", Const, 2}, + {"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0}, + {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", Const, 0}, + {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", Const, 8}, + {"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", Const, 2}, + {"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", Const, 1}, + {"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", Const, 5}, + {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", Const, 8}, + {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14}, + {"TLS_ECDHE_RSA_WITH_RC4_128_SHA", Const, 0}, + {"TLS_FALLBACK_SCSV", Const, 4}, + {"TLS_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0}, + {"TLS_RSA_WITH_AES_128_CBC_SHA", Const, 0}, + {"TLS_RSA_WITH_AES_128_CBC_SHA256", Const, 8}, + {"TLS_RSA_WITH_AES_128_GCM_SHA256", Const, 6}, + {"TLS_RSA_WITH_AES_256_CBC_SHA", Const, 1}, + {"TLS_RSA_WITH_AES_256_GCM_SHA384", Const, 6}, + {"TLS_RSA_WITH_RC4_128_SHA", Const, 0}, + {"VerifyClientCertIfGiven", Const, 0}, + {"VersionName", Func, 21}, + {"VersionSSL30", Const, 2}, + {"VersionTLS10", Const, 2}, + {"VersionTLS11", Const, 2}, + {"VersionTLS12", Const, 2}, + {"VersionTLS13", Const, 12}, + {"X25519", Const, 8}, + {"X509KeyPair", Func, 0}, + }, + "crypto/x509": { + {"(*CertPool).AddCert", Method, 0}, + {"(*CertPool).AddCertWithConstraint", Method, 22}, + {"(*CertPool).AppendCertsFromPEM", Method, 0}, + {"(*CertPool).Clone", Method, 19}, + {"(*CertPool).Equal", Method, 19}, + {"(*CertPool).Subjects", Method, 0}, + {"(*Certificate).CheckCRLSignature", Method, 0}, + {"(*Certificate).CheckSignature", Method, 0}, + {"(*Certificate).CheckSignatureFrom", Method, 0}, + {"(*Certificate).CreateCRL", Method, 0}, + {"(*Certificate).Equal", Method, 0}, + {"(*Certificate).Verify", Method, 0}, + {"(*Certificate).VerifyHostname", Method, 0}, + {"(*CertificateRequest).CheckSignature", Method, 5}, + {"(*RevocationList).CheckSignatureFrom", Method, 19}, + {"(CertificateInvalidError).Error", Method, 0}, + {"(ConstraintViolationError).Error", Method, 0}, + {"(HostnameError).Error", Method, 0}, + {"(InsecureAlgorithmError).Error", Method, 6}, + {"(OID).Equal", Method, 22}, + {"(OID).EqualASN1OID", Method, 22}, + {"(OID).String", Method, 22}, + {"(PublicKeyAlgorithm).String", Method, 10}, + {"(SignatureAlgorithm).String", Method, 6}, + {"(SystemRootsError).Error", Method, 1}, + {"(SystemRootsError).Unwrap", Method, 16}, + {"(UnhandledCriticalExtension).Error", Method, 0}, + {"(UnknownAuthorityError).Error", Method, 0}, + {"CANotAuthorizedForExtKeyUsage", Const, 10}, + {"CANotAuthorizedForThisName", Const, 0}, + {"CertPool", Type, 0}, + {"Certificate", Type, 0}, + {"Certificate.AuthorityKeyId", Field, 0}, + {"Certificate.BasicConstraintsValid", Field, 0}, + {"Certificate.CRLDistributionPoints", Field, 2}, + {"Certificate.DNSNames", Field, 0}, + {"Certificate.EmailAddresses", Field, 0}, + {"Certificate.ExcludedDNSDomains", Field, 9}, + {"Certificate.ExcludedEmailAddresses", Field, 10}, + {"Certificate.ExcludedIPRanges", Field, 10}, + {"Certificate.ExcludedURIDomains", Field, 10}, + {"Certificate.ExtKeyUsage", Field, 0}, + {"Certificate.Extensions", Field, 2}, + {"Certificate.ExtraExtensions", Field, 2}, + {"Certificate.IPAddresses", Field, 1}, + {"Certificate.IsCA", Field, 0}, + {"Certificate.Issuer", Field, 0}, + {"Certificate.IssuingCertificateURL", Field, 2}, + {"Certificate.KeyUsage", Field, 0}, + {"Certificate.MaxPathLen", Field, 0}, + {"Certificate.MaxPathLenZero", Field, 4}, + {"Certificate.NotAfter", Field, 0}, + {"Certificate.NotBefore", Field, 0}, + {"Certificate.OCSPServer", Field, 2}, + {"Certificate.PermittedDNSDomains", Field, 0}, + {"Certificate.PermittedDNSDomainsCritical", Field, 0}, + {"Certificate.PermittedEmailAddresses", Field, 10}, + {"Certificate.PermittedIPRanges", Field, 10}, + {"Certificate.PermittedURIDomains", Field, 10}, + {"Certificate.Policies", Field, 22}, + {"Certificate.PolicyIdentifiers", Field, 0}, + {"Certificate.PublicKey", Field, 0}, + {"Certificate.PublicKeyAlgorithm", Field, 0}, + {"Certificate.Raw", Field, 0}, + {"Certificate.RawIssuer", Field, 0}, + {"Certificate.RawSubject", Field, 0}, + {"Certificate.RawSubjectPublicKeyInfo", Field, 0}, + {"Certificate.RawTBSCertificate", Field, 0}, + {"Certificate.SerialNumber", Field, 0}, + {"Certificate.Signature", Field, 0}, + {"Certificate.SignatureAlgorithm", Field, 0}, + {"Certificate.Subject", Field, 0}, + {"Certificate.SubjectKeyId", Field, 0}, + {"Certificate.URIs", Field, 10}, + {"Certificate.UnhandledCriticalExtensions", Field, 5}, + {"Certificate.UnknownExtKeyUsage", Field, 0}, + {"Certificate.Version", Field, 0}, + {"CertificateInvalidError", Type, 0}, + {"CertificateInvalidError.Cert", Field, 0}, + {"CertificateInvalidError.Detail", Field, 10}, + {"CertificateInvalidError.Reason", Field, 0}, + {"CertificateRequest", Type, 3}, + {"CertificateRequest.Attributes", Field, 3}, + {"CertificateRequest.DNSNames", Field, 3}, + {"CertificateRequest.EmailAddresses", Field, 3}, + {"CertificateRequest.Extensions", Field, 3}, + {"CertificateRequest.ExtraExtensions", Field, 3}, + {"CertificateRequest.IPAddresses", Field, 3}, + {"CertificateRequest.PublicKey", Field, 3}, + {"CertificateRequest.PublicKeyAlgorithm", Field, 3}, + {"CertificateRequest.Raw", Field, 3}, + {"CertificateRequest.RawSubject", Field, 3}, + {"CertificateRequest.RawSubjectPublicKeyInfo", Field, 3}, + {"CertificateRequest.RawTBSCertificateRequest", Field, 3}, + {"CertificateRequest.Signature", Field, 3}, + {"CertificateRequest.SignatureAlgorithm", Field, 3}, + {"CertificateRequest.Subject", Field, 3}, + {"CertificateRequest.URIs", Field, 10}, + {"CertificateRequest.Version", Field, 3}, + {"ConstraintViolationError", Type, 0}, + {"CreateCertificate", Func, 0}, + {"CreateCertificateRequest", Func, 3}, + {"CreateRevocationList", Func, 15}, + {"DSA", Const, 0}, + {"DSAWithSHA1", Const, 0}, + {"DSAWithSHA256", Const, 0}, + {"DecryptPEMBlock", Func, 1}, + {"ECDSA", Const, 1}, + {"ECDSAWithSHA1", Const, 1}, + {"ECDSAWithSHA256", Const, 1}, + {"ECDSAWithSHA384", Const, 1}, + {"ECDSAWithSHA512", Const, 1}, + {"Ed25519", Const, 13}, + {"EncryptPEMBlock", Func, 1}, + {"ErrUnsupportedAlgorithm", Var, 0}, + {"Expired", Const, 0}, + {"ExtKeyUsage", Type, 0}, + {"ExtKeyUsageAny", Const, 0}, + {"ExtKeyUsageClientAuth", Const, 0}, + {"ExtKeyUsageCodeSigning", Const, 0}, + {"ExtKeyUsageEmailProtection", Const, 0}, + {"ExtKeyUsageIPSECEndSystem", Const, 1}, + {"ExtKeyUsageIPSECTunnel", Const, 1}, + {"ExtKeyUsageIPSECUser", Const, 1}, + {"ExtKeyUsageMicrosoftCommercialCodeSigning", Const, 10}, + {"ExtKeyUsageMicrosoftKernelCodeSigning", Const, 10}, + {"ExtKeyUsageMicrosoftServerGatedCrypto", Const, 1}, + {"ExtKeyUsageNetscapeServerGatedCrypto", Const, 1}, + {"ExtKeyUsageOCSPSigning", Const, 0}, + {"ExtKeyUsageServerAuth", Const, 0}, + {"ExtKeyUsageTimeStamping", Const, 0}, + {"HostnameError", Type, 0}, + {"HostnameError.Certificate", Field, 0}, + {"HostnameError.Host", Field, 0}, + {"IncompatibleUsage", Const, 1}, + {"IncorrectPasswordError", Var, 1}, + {"InsecureAlgorithmError", Type, 6}, + {"InvalidReason", Type, 0}, + {"IsEncryptedPEMBlock", Func, 1}, + {"KeyUsage", Type, 0}, + {"KeyUsageCRLSign", Const, 0}, + {"KeyUsageCertSign", Const, 0}, + {"KeyUsageContentCommitment", Const, 0}, + {"KeyUsageDataEncipherment", Const, 0}, + {"KeyUsageDecipherOnly", Const, 0}, + {"KeyUsageDigitalSignature", Const, 0}, + {"KeyUsageEncipherOnly", Const, 0}, + {"KeyUsageKeyAgreement", Const, 0}, + {"KeyUsageKeyEncipherment", Const, 0}, + {"MD2WithRSA", Const, 0}, + {"MD5WithRSA", Const, 0}, + {"MarshalECPrivateKey", Func, 2}, + {"MarshalPKCS1PrivateKey", Func, 0}, + {"MarshalPKCS1PublicKey", Func, 10}, + {"MarshalPKCS8PrivateKey", Func, 10}, + {"MarshalPKIXPublicKey", Func, 0}, + {"NameConstraintsWithoutSANs", Const, 10}, + {"NameMismatch", Const, 8}, + {"NewCertPool", Func, 0}, + {"NotAuthorizedToSign", Const, 0}, + {"OID", Type, 22}, + {"OIDFromInts", Func, 22}, + {"PEMCipher", Type, 1}, + {"PEMCipher3DES", Const, 1}, + {"PEMCipherAES128", Const, 1}, + {"PEMCipherAES192", Const, 1}, + {"PEMCipherAES256", Const, 1}, + {"PEMCipherDES", Const, 1}, + {"ParseCRL", Func, 0}, + {"ParseCertificate", Func, 0}, + {"ParseCertificateRequest", Func, 3}, + {"ParseCertificates", Func, 0}, + {"ParseDERCRL", Func, 0}, + {"ParseECPrivateKey", Func, 1}, + {"ParsePKCS1PrivateKey", Func, 0}, + {"ParsePKCS1PublicKey", Func, 10}, + {"ParsePKCS8PrivateKey", Func, 0}, + {"ParsePKIXPublicKey", Func, 0}, + {"ParseRevocationList", Func, 19}, + {"PublicKeyAlgorithm", Type, 0}, + {"PureEd25519", Const, 13}, + {"RSA", Const, 0}, + {"RevocationList", Type, 15}, + {"RevocationList.AuthorityKeyId", Field, 19}, + {"RevocationList.Extensions", Field, 19}, + {"RevocationList.ExtraExtensions", Field, 15}, + {"RevocationList.Issuer", Field, 19}, + {"RevocationList.NextUpdate", Field, 15}, + {"RevocationList.Number", Field, 15}, + {"RevocationList.Raw", Field, 19}, + {"RevocationList.RawIssuer", Field, 19}, + {"RevocationList.RawTBSRevocationList", Field, 19}, + {"RevocationList.RevokedCertificateEntries", Field, 21}, + {"RevocationList.RevokedCertificates", Field, 15}, + {"RevocationList.Signature", Field, 19}, + {"RevocationList.SignatureAlgorithm", Field, 15}, + {"RevocationList.ThisUpdate", Field, 15}, + {"RevocationListEntry", Type, 21}, + {"RevocationListEntry.Extensions", Field, 21}, + {"RevocationListEntry.ExtraExtensions", Field, 21}, + {"RevocationListEntry.Raw", Field, 21}, + {"RevocationListEntry.ReasonCode", Field, 21}, + {"RevocationListEntry.RevocationTime", Field, 21}, + {"RevocationListEntry.SerialNumber", Field, 21}, + {"SHA1WithRSA", Const, 0}, + {"SHA256WithRSA", Const, 0}, + {"SHA256WithRSAPSS", Const, 8}, + {"SHA384WithRSA", Const, 0}, + {"SHA384WithRSAPSS", Const, 8}, + {"SHA512WithRSA", Const, 0}, + {"SHA512WithRSAPSS", Const, 8}, + {"SetFallbackRoots", Func, 20}, + {"SignatureAlgorithm", Type, 0}, + {"SystemCertPool", Func, 7}, + {"SystemRootsError", Type, 1}, + {"SystemRootsError.Err", Field, 7}, + {"TooManyConstraints", Const, 10}, + {"TooManyIntermediates", Const, 0}, + {"UnconstrainedName", Const, 10}, + {"UnhandledCriticalExtension", Type, 0}, + {"UnknownAuthorityError", Type, 0}, + {"UnknownAuthorityError.Cert", Field, 8}, + {"UnknownPublicKeyAlgorithm", Const, 0}, + {"UnknownSignatureAlgorithm", Const, 0}, + {"VerifyOptions", Type, 0}, + {"VerifyOptions.CurrentTime", Field, 0}, + {"VerifyOptions.DNSName", Field, 0}, + {"VerifyOptions.Intermediates", Field, 0}, + {"VerifyOptions.KeyUsages", Field, 1}, + {"VerifyOptions.MaxConstraintComparisions", Field, 10}, + {"VerifyOptions.Roots", Field, 0}, + }, + "crypto/x509/pkix": { + {"(*CertificateList).HasExpired", Method, 0}, + {"(*Name).FillFromRDNSequence", Method, 0}, + {"(Name).String", Method, 10}, + {"(Name).ToRDNSequence", Method, 0}, + {"(RDNSequence).String", Method, 10}, + {"AlgorithmIdentifier", Type, 0}, + {"AlgorithmIdentifier.Algorithm", Field, 0}, + {"AlgorithmIdentifier.Parameters", Field, 0}, + {"AttributeTypeAndValue", Type, 0}, + {"AttributeTypeAndValue.Type", Field, 0}, + {"AttributeTypeAndValue.Value", Field, 0}, + {"AttributeTypeAndValueSET", Type, 3}, + {"AttributeTypeAndValueSET.Type", Field, 3}, + {"AttributeTypeAndValueSET.Value", Field, 3}, + {"CertificateList", Type, 0}, + {"CertificateList.SignatureAlgorithm", Field, 0}, + {"CertificateList.SignatureValue", Field, 0}, + {"CertificateList.TBSCertList", Field, 0}, + {"Extension", Type, 0}, + {"Extension.Critical", Field, 0}, + {"Extension.Id", Field, 0}, + {"Extension.Value", Field, 0}, + {"Name", Type, 0}, + {"Name.CommonName", Field, 0}, + {"Name.Country", Field, 0}, + {"Name.ExtraNames", Field, 5}, + {"Name.Locality", Field, 0}, + {"Name.Names", Field, 0}, + {"Name.Organization", Field, 0}, + {"Name.OrganizationalUnit", Field, 0}, + {"Name.PostalCode", Field, 0}, + {"Name.Province", Field, 0}, + {"Name.SerialNumber", Field, 0}, + {"Name.StreetAddress", Field, 0}, + {"RDNSequence", Type, 0}, + {"RelativeDistinguishedNameSET", Type, 0}, + {"RevokedCertificate", Type, 0}, + {"RevokedCertificate.Extensions", Field, 0}, + {"RevokedCertificate.RevocationTime", Field, 0}, + {"RevokedCertificate.SerialNumber", Field, 0}, + {"TBSCertificateList", Type, 0}, + {"TBSCertificateList.Extensions", Field, 0}, + {"TBSCertificateList.Issuer", Field, 0}, + {"TBSCertificateList.NextUpdate", Field, 0}, + {"TBSCertificateList.Raw", Field, 0}, + {"TBSCertificateList.RevokedCertificates", Field, 0}, + {"TBSCertificateList.Signature", Field, 0}, + {"TBSCertificateList.ThisUpdate", Field, 0}, + {"TBSCertificateList.Version", Field, 0}, + }, + "database/sql": { + {"(*ColumnType).DatabaseTypeName", Method, 8}, + {"(*ColumnType).DecimalSize", Method, 8}, + {"(*ColumnType).Length", Method, 8}, + {"(*ColumnType).Name", Method, 8}, + {"(*ColumnType).Nullable", Method, 8}, + {"(*ColumnType).ScanType", Method, 8}, + {"(*Conn).BeginTx", Method, 9}, + {"(*Conn).Close", Method, 9}, + {"(*Conn).ExecContext", Method, 9}, + {"(*Conn).PingContext", Method, 9}, + {"(*Conn).PrepareContext", Method, 9}, + {"(*Conn).QueryContext", Method, 9}, + {"(*Conn).QueryRowContext", Method, 9}, + {"(*Conn).Raw", Method, 13}, + {"(*DB).Begin", Method, 0}, + {"(*DB).BeginTx", Method, 8}, + {"(*DB).Close", Method, 0}, + {"(*DB).Conn", Method, 9}, + {"(*DB).Driver", Method, 0}, + {"(*DB).Exec", Method, 0}, + {"(*DB).ExecContext", Method, 8}, + {"(*DB).Ping", Method, 1}, + {"(*DB).PingContext", Method, 8}, + {"(*DB).Prepare", Method, 0}, + {"(*DB).PrepareContext", Method, 8}, + {"(*DB).Query", Method, 0}, + {"(*DB).QueryContext", Method, 8}, + {"(*DB).QueryRow", Method, 0}, + {"(*DB).QueryRowContext", Method, 8}, + {"(*DB).SetConnMaxIdleTime", Method, 15}, + {"(*DB).SetConnMaxLifetime", Method, 6}, + {"(*DB).SetMaxIdleConns", Method, 1}, + {"(*DB).SetMaxOpenConns", Method, 2}, + {"(*DB).Stats", Method, 5}, + {"(*Null).Scan", Method, 22}, + {"(*NullBool).Scan", Method, 0}, + {"(*NullByte).Scan", Method, 17}, + {"(*NullFloat64).Scan", Method, 0}, + {"(*NullInt16).Scan", Method, 17}, + {"(*NullInt32).Scan", Method, 13}, + {"(*NullInt64).Scan", Method, 0}, + {"(*NullString).Scan", Method, 0}, + {"(*NullTime).Scan", Method, 13}, + {"(*Row).Err", Method, 15}, + {"(*Row).Scan", Method, 0}, + {"(*Rows).Close", Method, 0}, + {"(*Rows).ColumnTypes", Method, 8}, + {"(*Rows).Columns", Method, 0}, + {"(*Rows).Err", Method, 0}, + {"(*Rows).Next", Method, 0}, + {"(*Rows).NextResultSet", Method, 8}, + {"(*Rows).Scan", Method, 0}, + {"(*Stmt).Close", Method, 0}, + {"(*Stmt).Exec", Method, 0}, + {"(*Stmt).ExecContext", Method, 8}, + {"(*Stmt).Query", Method, 0}, + {"(*Stmt).QueryContext", Method, 8}, + {"(*Stmt).QueryRow", Method, 0}, + {"(*Stmt).QueryRowContext", Method, 8}, + {"(*Tx).Commit", Method, 0}, + {"(*Tx).Exec", Method, 0}, + {"(*Tx).ExecContext", Method, 8}, + {"(*Tx).Prepare", Method, 0}, + {"(*Tx).PrepareContext", Method, 8}, + {"(*Tx).Query", Method, 0}, + {"(*Tx).QueryContext", Method, 8}, + {"(*Tx).QueryRow", Method, 0}, + {"(*Tx).QueryRowContext", Method, 8}, + {"(*Tx).Rollback", Method, 0}, + {"(*Tx).Stmt", Method, 0}, + {"(*Tx).StmtContext", Method, 8}, + {"(IsolationLevel).String", Method, 11}, + {"(Null).Value", Method, 22}, + {"(NullBool).Value", Method, 0}, + {"(NullByte).Value", Method, 17}, + {"(NullFloat64).Value", Method, 0}, + {"(NullInt16).Value", Method, 17}, + {"(NullInt32).Value", Method, 13}, + {"(NullInt64).Value", Method, 0}, + {"(NullString).Value", Method, 0}, + {"(NullTime).Value", Method, 13}, + {"ColumnType", Type, 8}, + {"Conn", Type, 9}, + {"DB", Type, 0}, + {"DBStats", Type, 5}, + {"DBStats.Idle", Field, 11}, + {"DBStats.InUse", Field, 11}, + {"DBStats.MaxIdleClosed", Field, 11}, + {"DBStats.MaxIdleTimeClosed", Field, 15}, + {"DBStats.MaxLifetimeClosed", Field, 11}, + {"DBStats.MaxOpenConnections", Field, 11}, + {"DBStats.OpenConnections", Field, 5}, + {"DBStats.WaitCount", Field, 11}, + {"DBStats.WaitDuration", Field, 11}, + {"Drivers", Func, 4}, + {"ErrConnDone", Var, 9}, + {"ErrNoRows", Var, 0}, + {"ErrTxDone", Var, 0}, + {"IsolationLevel", Type, 8}, + {"LevelDefault", Const, 8}, + {"LevelLinearizable", Const, 8}, + {"LevelReadCommitted", Const, 8}, + {"LevelReadUncommitted", Const, 8}, + {"LevelRepeatableRead", Const, 8}, + {"LevelSerializable", Const, 8}, + {"LevelSnapshot", Const, 8}, + {"LevelWriteCommitted", Const, 8}, + {"Named", Func, 8}, + {"NamedArg", Type, 8}, + {"NamedArg.Name", Field, 8}, + {"NamedArg.Value", Field, 8}, + {"Null", Type, 22}, + {"Null.V", Field, 22}, + {"Null.Valid", Field, 22}, + {"NullBool", Type, 0}, + {"NullBool.Bool", Field, 0}, + {"NullBool.Valid", Field, 0}, + {"NullByte", Type, 17}, + {"NullByte.Byte", Field, 17}, + {"NullByte.Valid", Field, 17}, + {"NullFloat64", Type, 0}, + {"NullFloat64.Float64", Field, 0}, + {"NullFloat64.Valid", Field, 0}, + {"NullInt16", Type, 17}, + {"NullInt16.Int16", Field, 17}, + {"NullInt16.Valid", Field, 17}, + {"NullInt32", Type, 13}, + {"NullInt32.Int32", Field, 13}, + {"NullInt32.Valid", Field, 13}, + {"NullInt64", Type, 0}, + {"NullInt64.Int64", Field, 0}, + {"NullInt64.Valid", Field, 0}, + {"NullString", Type, 0}, + {"NullString.String", Field, 0}, + {"NullString.Valid", Field, 0}, + {"NullTime", Type, 13}, + {"NullTime.Time", Field, 13}, + {"NullTime.Valid", Field, 13}, + {"Open", Func, 0}, + {"OpenDB", Func, 10}, + {"Out", Type, 9}, + {"Out.Dest", Field, 9}, + {"Out.In", Field, 9}, + {"RawBytes", Type, 0}, + {"Register", Func, 0}, + {"Result", Type, 0}, + {"Row", Type, 0}, + {"Rows", Type, 0}, + {"Scanner", Type, 0}, + {"Stmt", Type, 0}, + {"Tx", Type, 0}, + {"TxOptions", Type, 8}, + {"TxOptions.Isolation", Field, 8}, + {"TxOptions.ReadOnly", Field, 8}, + }, + "database/sql/driver": { + {"(NotNull).ConvertValue", Method, 0}, + {"(Null).ConvertValue", Method, 0}, + {"(RowsAffected).LastInsertId", Method, 0}, + {"(RowsAffected).RowsAffected", Method, 0}, + {"Bool", Var, 0}, + {"ColumnConverter", Type, 0}, + {"Conn", Type, 0}, + {"ConnBeginTx", Type, 8}, + {"ConnPrepareContext", Type, 8}, + {"Connector", Type, 10}, + {"DefaultParameterConverter", Var, 0}, + {"Driver", Type, 0}, + {"DriverContext", Type, 10}, + {"ErrBadConn", Var, 0}, + {"ErrRemoveArgument", Var, 9}, + {"ErrSkip", Var, 0}, + {"Execer", Type, 0}, + {"ExecerContext", Type, 8}, + {"Int32", Var, 0}, + {"IsScanValue", Func, 0}, + {"IsValue", Func, 0}, + {"IsolationLevel", Type, 8}, + {"NamedValue", Type, 8}, + {"NamedValue.Name", Field, 8}, + {"NamedValue.Ordinal", Field, 8}, + {"NamedValue.Value", Field, 8}, + {"NamedValueChecker", Type, 9}, + {"NotNull", Type, 0}, + {"NotNull.Converter", Field, 0}, + {"Null", Type, 0}, + {"Null.Converter", Field, 0}, + {"Pinger", Type, 8}, + {"Queryer", Type, 1}, + {"QueryerContext", Type, 8}, + {"Result", Type, 0}, + {"ResultNoRows", Var, 0}, + {"Rows", Type, 0}, + {"RowsAffected", Type, 0}, + {"RowsColumnTypeDatabaseTypeName", Type, 8}, + {"RowsColumnTypeLength", Type, 8}, + {"RowsColumnTypeNullable", Type, 8}, + {"RowsColumnTypePrecisionScale", Type, 8}, + {"RowsColumnTypeScanType", Type, 8}, + {"RowsNextResultSet", Type, 8}, + {"SessionResetter", Type, 10}, + {"Stmt", Type, 0}, + {"StmtExecContext", Type, 8}, + {"StmtQueryContext", Type, 8}, + {"String", Var, 0}, + {"Tx", Type, 0}, + {"TxOptions", Type, 8}, + {"TxOptions.Isolation", Field, 8}, + {"TxOptions.ReadOnly", Field, 8}, + {"Validator", Type, 15}, + {"Value", Type, 0}, + {"ValueConverter", Type, 0}, + {"Valuer", Type, 0}, + }, + "debug/buildinfo": { + {"BuildInfo", Type, 18}, + {"Read", Func, 18}, + {"ReadFile", Func, 18}, + }, + "debug/dwarf": { + {"(*AddrType).Basic", Method, 0}, + {"(*AddrType).Common", Method, 0}, + {"(*AddrType).Size", Method, 0}, + {"(*AddrType).String", Method, 0}, + {"(*ArrayType).Common", Method, 0}, + {"(*ArrayType).Size", Method, 0}, + {"(*ArrayType).String", Method, 0}, + {"(*BasicType).Basic", Method, 0}, + {"(*BasicType).Common", Method, 0}, + {"(*BasicType).Size", Method, 0}, + {"(*BasicType).String", Method, 0}, + {"(*BoolType).Basic", Method, 0}, + {"(*BoolType).Common", Method, 0}, + {"(*BoolType).Size", Method, 0}, + {"(*BoolType).String", Method, 0}, + {"(*CharType).Basic", Method, 0}, + {"(*CharType).Common", Method, 0}, + {"(*CharType).Size", Method, 0}, + {"(*CharType).String", Method, 0}, + {"(*CommonType).Common", Method, 0}, + {"(*CommonType).Size", Method, 0}, + {"(*ComplexType).Basic", Method, 0}, + {"(*ComplexType).Common", Method, 0}, + {"(*ComplexType).Size", Method, 0}, + {"(*ComplexType).String", Method, 0}, + {"(*Data).AddSection", Method, 14}, + {"(*Data).AddTypes", Method, 3}, + {"(*Data).LineReader", Method, 5}, + {"(*Data).Ranges", Method, 7}, + {"(*Data).Reader", Method, 0}, + {"(*Data).Type", Method, 0}, + {"(*DotDotDotType).Common", Method, 0}, + {"(*DotDotDotType).Size", Method, 0}, + {"(*DotDotDotType).String", Method, 0}, + {"(*Entry).AttrField", Method, 5}, + {"(*Entry).Val", Method, 0}, + {"(*EnumType).Common", Method, 0}, + {"(*EnumType).Size", Method, 0}, + {"(*EnumType).String", Method, 0}, + {"(*FloatType).Basic", Method, 0}, + {"(*FloatType).Common", Method, 0}, + {"(*FloatType).Size", Method, 0}, + {"(*FloatType).String", Method, 0}, + {"(*FuncType).Common", Method, 0}, + {"(*FuncType).Size", Method, 0}, + {"(*FuncType).String", Method, 0}, + {"(*IntType).Basic", Method, 0}, + {"(*IntType).Common", Method, 0}, + {"(*IntType).Size", Method, 0}, + {"(*IntType).String", Method, 0}, + {"(*LineReader).Files", Method, 14}, + {"(*LineReader).Next", Method, 5}, + {"(*LineReader).Reset", Method, 5}, + {"(*LineReader).Seek", Method, 5}, + {"(*LineReader).SeekPC", Method, 5}, + {"(*LineReader).Tell", Method, 5}, + {"(*PtrType).Common", Method, 0}, + {"(*PtrType).Size", Method, 0}, + {"(*PtrType).String", Method, 0}, + {"(*QualType).Common", Method, 0}, + {"(*QualType).Size", Method, 0}, + {"(*QualType).String", Method, 0}, + {"(*Reader).AddressSize", Method, 5}, + {"(*Reader).ByteOrder", Method, 14}, + {"(*Reader).Next", Method, 0}, + {"(*Reader).Seek", Method, 0}, + {"(*Reader).SeekPC", Method, 7}, + {"(*Reader).SkipChildren", Method, 0}, + {"(*StructType).Common", Method, 0}, + {"(*StructType).Defn", Method, 0}, + {"(*StructType).Size", Method, 0}, + {"(*StructType).String", Method, 0}, + {"(*TypedefType).Common", Method, 0}, + {"(*TypedefType).Size", Method, 0}, + {"(*TypedefType).String", Method, 0}, + {"(*UcharType).Basic", Method, 0}, + {"(*UcharType).Common", Method, 0}, + {"(*UcharType).Size", Method, 0}, + {"(*UcharType).String", Method, 0}, + {"(*UintType).Basic", Method, 0}, + {"(*UintType).Common", Method, 0}, + {"(*UintType).Size", Method, 0}, + {"(*UintType).String", Method, 0}, + {"(*UnspecifiedType).Basic", Method, 4}, + {"(*UnspecifiedType).Common", Method, 4}, + {"(*UnspecifiedType).Size", Method, 4}, + {"(*UnspecifiedType).String", Method, 4}, + {"(*UnsupportedType).Common", Method, 13}, + {"(*UnsupportedType).Size", Method, 13}, + {"(*UnsupportedType).String", Method, 13}, + {"(*VoidType).Common", Method, 0}, + {"(*VoidType).Size", Method, 0}, + {"(*VoidType).String", Method, 0}, + {"(Attr).GoString", Method, 0}, + {"(Attr).String", Method, 0}, + {"(Class).GoString", Method, 5}, + {"(Class).String", Method, 5}, + {"(DecodeError).Error", Method, 0}, + {"(Tag).GoString", Method, 0}, + {"(Tag).String", Method, 0}, + {"AddrType", Type, 0}, + {"AddrType.BasicType", Field, 0}, + {"ArrayType", Type, 0}, + {"ArrayType.CommonType", Field, 0}, + {"ArrayType.Count", Field, 0}, + {"ArrayType.StrideBitSize", Field, 0}, + {"ArrayType.Type", Field, 0}, + {"Attr", Type, 0}, + {"AttrAbstractOrigin", Const, 0}, + {"AttrAccessibility", Const, 0}, + {"AttrAddrBase", Const, 14}, + {"AttrAddrClass", Const, 0}, + {"AttrAlignment", Const, 14}, + {"AttrAllocated", Const, 0}, + {"AttrArtificial", Const, 0}, + {"AttrAssociated", Const, 0}, + {"AttrBaseTypes", Const, 0}, + {"AttrBinaryScale", Const, 14}, + {"AttrBitOffset", Const, 0}, + {"AttrBitSize", Const, 0}, + {"AttrByteSize", Const, 0}, + {"AttrCallAllCalls", Const, 14}, + {"AttrCallAllSourceCalls", Const, 14}, + {"AttrCallAllTailCalls", Const, 14}, + {"AttrCallColumn", Const, 0}, + {"AttrCallDataLocation", Const, 14}, + {"AttrCallDataValue", Const, 14}, + {"AttrCallFile", Const, 0}, + {"AttrCallLine", Const, 0}, + {"AttrCallOrigin", Const, 14}, + {"AttrCallPC", Const, 14}, + {"AttrCallParameter", Const, 14}, + {"AttrCallReturnPC", Const, 14}, + {"AttrCallTailCall", Const, 14}, + {"AttrCallTarget", Const, 14}, + {"AttrCallTargetClobbered", Const, 14}, + {"AttrCallValue", Const, 14}, + {"AttrCalling", Const, 0}, + {"AttrCommonRef", Const, 0}, + {"AttrCompDir", Const, 0}, + {"AttrConstExpr", Const, 14}, + {"AttrConstValue", Const, 0}, + {"AttrContainingType", Const, 0}, + {"AttrCount", Const, 0}, + {"AttrDataBitOffset", Const, 14}, + {"AttrDataLocation", Const, 0}, + {"AttrDataMemberLoc", Const, 0}, + {"AttrDecimalScale", Const, 14}, + {"AttrDecimalSign", Const, 14}, + {"AttrDeclColumn", Const, 0}, + {"AttrDeclFile", Const, 0}, + {"AttrDeclLine", Const, 0}, + {"AttrDeclaration", Const, 0}, + {"AttrDefaultValue", Const, 0}, + {"AttrDefaulted", Const, 14}, + {"AttrDeleted", Const, 14}, + {"AttrDescription", Const, 0}, + {"AttrDigitCount", Const, 14}, + {"AttrDiscr", Const, 0}, + {"AttrDiscrList", Const, 0}, + {"AttrDiscrValue", Const, 0}, + {"AttrDwoName", Const, 14}, + {"AttrElemental", Const, 14}, + {"AttrEncoding", Const, 0}, + {"AttrEndianity", Const, 14}, + {"AttrEntrypc", Const, 0}, + {"AttrEnumClass", Const, 14}, + {"AttrExplicit", Const, 14}, + {"AttrExportSymbols", Const, 14}, + {"AttrExtension", Const, 0}, + {"AttrExternal", Const, 0}, + {"AttrFrameBase", Const, 0}, + {"AttrFriend", Const, 0}, + {"AttrHighpc", Const, 0}, + {"AttrIdentifierCase", Const, 0}, + {"AttrImport", Const, 0}, + {"AttrInline", Const, 0}, + {"AttrIsOptional", Const, 0}, + {"AttrLanguage", Const, 0}, + {"AttrLinkageName", Const, 14}, + {"AttrLocation", Const, 0}, + {"AttrLoclistsBase", Const, 14}, + {"AttrLowerBound", Const, 0}, + {"AttrLowpc", Const, 0}, + {"AttrMacroInfo", Const, 0}, + {"AttrMacros", Const, 14}, + {"AttrMainSubprogram", Const, 14}, + {"AttrMutable", Const, 14}, + {"AttrName", Const, 0}, + {"AttrNamelistItem", Const, 0}, + {"AttrNoreturn", Const, 14}, + {"AttrObjectPointer", Const, 14}, + {"AttrOrdering", Const, 0}, + {"AttrPictureString", Const, 14}, + {"AttrPriority", Const, 0}, + {"AttrProducer", Const, 0}, + {"AttrPrototyped", Const, 0}, + {"AttrPure", Const, 14}, + {"AttrRanges", Const, 0}, + {"AttrRank", Const, 14}, + {"AttrRecursive", Const, 14}, + {"AttrReference", Const, 14}, + {"AttrReturnAddr", Const, 0}, + {"AttrRnglistsBase", Const, 14}, + {"AttrRvalueReference", Const, 14}, + {"AttrSegment", Const, 0}, + {"AttrSibling", Const, 0}, + {"AttrSignature", Const, 14}, + {"AttrSmall", Const, 14}, + {"AttrSpecification", Const, 0}, + {"AttrStartScope", Const, 0}, + {"AttrStaticLink", Const, 0}, + {"AttrStmtList", Const, 0}, + {"AttrStrOffsetsBase", Const, 14}, + {"AttrStride", Const, 0}, + {"AttrStrideSize", Const, 0}, + {"AttrStringLength", Const, 0}, + {"AttrStringLengthBitSize", Const, 14}, + {"AttrStringLengthByteSize", Const, 14}, + {"AttrThreadsScaled", Const, 14}, + {"AttrTrampoline", Const, 0}, + {"AttrType", Const, 0}, + {"AttrUpperBound", Const, 0}, + {"AttrUseLocation", Const, 0}, + {"AttrUseUTF8", Const, 0}, + {"AttrVarParam", Const, 0}, + {"AttrVirtuality", Const, 0}, + {"AttrVisibility", Const, 0}, + {"AttrVtableElemLoc", Const, 0}, + {"BasicType", Type, 0}, + {"BasicType.BitOffset", Field, 0}, + {"BasicType.BitSize", Field, 0}, + {"BasicType.CommonType", Field, 0}, + {"BasicType.DataBitOffset", Field, 18}, + {"BoolType", Type, 0}, + {"BoolType.BasicType", Field, 0}, + {"CharType", Type, 0}, + {"CharType.BasicType", Field, 0}, + {"Class", Type, 5}, + {"ClassAddrPtr", Const, 14}, + {"ClassAddress", Const, 5}, + {"ClassBlock", Const, 5}, + {"ClassConstant", Const, 5}, + {"ClassExprLoc", Const, 5}, + {"ClassFlag", Const, 5}, + {"ClassLinePtr", Const, 5}, + {"ClassLocList", Const, 14}, + {"ClassLocListPtr", Const, 5}, + {"ClassMacPtr", Const, 5}, + {"ClassRangeListPtr", Const, 5}, + {"ClassReference", Const, 5}, + {"ClassReferenceAlt", Const, 5}, + {"ClassReferenceSig", Const, 5}, + {"ClassRngList", Const, 14}, + {"ClassRngListsPtr", Const, 14}, + {"ClassStrOffsetsPtr", Const, 14}, + {"ClassString", Const, 5}, + {"ClassStringAlt", Const, 5}, + {"ClassUnknown", Const, 6}, + {"CommonType", Type, 0}, + {"CommonType.ByteSize", Field, 0}, + {"CommonType.Name", Field, 0}, + {"ComplexType", Type, 0}, + {"ComplexType.BasicType", Field, 0}, + {"Data", Type, 0}, + {"DecodeError", Type, 0}, + {"DecodeError.Err", Field, 0}, + {"DecodeError.Name", Field, 0}, + {"DecodeError.Offset", Field, 0}, + {"DotDotDotType", Type, 0}, + {"DotDotDotType.CommonType", Field, 0}, + {"Entry", Type, 0}, + {"Entry.Children", Field, 0}, + {"Entry.Field", Field, 0}, + {"Entry.Offset", Field, 0}, + {"Entry.Tag", Field, 0}, + {"EnumType", Type, 0}, + {"EnumType.CommonType", Field, 0}, + {"EnumType.EnumName", Field, 0}, + {"EnumType.Val", Field, 0}, + {"EnumValue", Type, 0}, + {"EnumValue.Name", Field, 0}, + {"EnumValue.Val", Field, 0}, + {"ErrUnknownPC", Var, 5}, + {"Field", Type, 0}, + {"Field.Attr", Field, 0}, + {"Field.Class", Field, 5}, + {"Field.Val", Field, 0}, + {"FloatType", Type, 0}, + {"FloatType.BasicType", Field, 0}, + {"FuncType", Type, 0}, + {"FuncType.CommonType", Field, 0}, + {"FuncType.ParamType", Field, 0}, + {"FuncType.ReturnType", Field, 0}, + {"IntType", Type, 0}, + {"IntType.BasicType", Field, 0}, + {"LineEntry", Type, 5}, + {"LineEntry.Address", Field, 5}, + {"LineEntry.BasicBlock", Field, 5}, + {"LineEntry.Column", Field, 5}, + {"LineEntry.Discriminator", Field, 5}, + {"LineEntry.EndSequence", Field, 5}, + {"LineEntry.EpilogueBegin", Field, 5}, + {"LineEntry.File", Field, 5}, + {"LineEntry.ISA", Field, 5}, + {"LineEntry.IsStmt", Field, 5}, + {"LineEntry.Line", Field, 5}, + {"LineEntry.OpIndex", Field, 5}, + {"LineEntry.PrologueEnd", Field, 5}, + {"LineFile", Type, 5}, + {"LineFile.Length", Field, 5}, + {"LineFile.Mtime", Field, 5}, + {"LineFile.Name", Field, 5}, + {"LineReader", Type, 5}, + {"LineReaderPos", Type, 5}, + {"New", Func, 0}, + {"Offset", Type, 0}, + {"PtrType", Type, 0}, + {"PtrType.CommonType", Field, 0}, + {"PtrType.Type", Field, 0}, + {"QualType", Type, 0}, + {"QualType.CommonType", Field, 0}, + {"QualType.Qual", Field, 0}, + {"QualType.Type", Field, 0}, + {"Reader", Type, 0}, + {"StructField", Type, 0}, + {"StructField.BitOffset", Field, 0}, + {"StructField.BitSize", Field, 0}, + {"StructField.ByteOffset", Field, 0}, + {"StructField.ByteSize", Field, 0}, + {"StructField.DataBitOffset", Field, 18}, + {"StructField.Name", Field, 0}, + {"StructField.Type", Field, 0}, + {"StructType", Type, 0}, + {"StructType.CommonType", Field, 0}, + {"StructType.Field", Field, 0}, + {"StructType.Incomplete", Field, 0}, + {"StructType.Kind", Field, 0}, + {"StructType.StructName", Field, 0}, + {"Tag", Type, 0}, + {"TagAccessDeclaration", Const, 0}, + {"TagArrayType", Const, 0}, + {"TagAtomicType", Const, 14}, + {"TagBaseType", Const, 0}, + {"TagCallSite", Const, 14}, + {"TagCallSiteParameter", Const, 14}, + {"TagCatchDwarfBlock", Const, 0}, + {"TagClassType", Const, 0}, + {"TagCoarrayType", Const, 14}, + {"TagCommonDwarfBlock", Const, 0}, + {"TagCommonInclusion", Const, 0}, + {"TagCompileUnit", Const, 0}, + {"TagCondition", Const, 3}, + {"TagConstType", Const, 0}, + {"TagConstant", Const, 0}, + {"TagDwarfProcedure", Const, 0}, + {"TagDynamicType", Const, 14}, + {"TagEntryPoint", Const, 0}, + {"TagEnumerationType", Const, 0}, + {"TagEnumerator", Const, 0}, + {"TagFileType", Const, 0}, + {"TagFormalParameter", Const, 0}, + {"TagFriend", Const, 0}, + {"TagGenericSubrange", Const, 14}, + {"TagImmutableType", Const, 14}, + {"TagImportedDeclaration", Const, 0}, + {"TagImportedModule", Const, 0}, + {"TagImportedUnit", Const, 0}, + {"TagInheritance", Const, 0}, + {"TagInlinedSubroutine", Const, 0}, + {"TagInterfaceType", Const, 0}, + {"TagLabel", Const, 0}, + {"TagLexDwarfBlock", Const, 0}, + {"TagMember", Const, 0}, + {"TagModule", Const, 0}, + {"TagMutableType", Const, 0}, + {"TagNamelist", Const, 0}, + {"TagNamelistItem", Const, 0}, + {"TagNamespace", Const, 0}, + {"TagPackedType", Const, 0}, + {"TagPartialUnit", Const, 0}, + {"TagPointerType", Const, 0}, + {"TagPtrToMemberType", Const, 0}, + {"TagReferenceType", Const, 0}, + {"TagRestrictType", Const, 0}, + {"TagRvalueReferenceType", Const, 3}, + {"TagSetType", Const, 0}, + {"TagSharedType", Const, 3}, + {"TagSkeletonUnit", Const, 14}, + {"TagStringType", Const, 0}, + {"TagStructType", Const, 0}, + {"TagSubprogram", Const, 0}, + {"TagSubrangeType", Const, 0}, + {"TagSubroutineType", Const, 0}, + {"TagTemplateAlias", Const, 3}, + {"TagTemplateTypeParameter", Const, 0}, + {"TagTemplateValueParameter", Const, 0}, + {"TagThrownType", Const, 0}, + {"TagTryDwarfBlock", Const, 0}, + {"TagTypeUnit", Const, 3}, + {"TagTypedef", Const, 0}, + {"TagUnionType", Const, 0}, + {"TagUnspecifiedParameters", Const, 0}, + {"TagUnspecifiedType", Const, 0}, + {"TagVariable", Const, 0}, + {"TagVariant", Const, 0}, + {"TagVariantPart", Const, 0}, + {"TagVolatileType", Const, 0}, + {"TagWithStmt", Const, 0}, + {"Type", Type, 0}, + {"TypedefType", Type, 0}, + {"TypedefType.CommonType", Field, 0}, + {"TypedefType.Type", Field, 0}, + {"UcharType", Type, 0}, + {"UcharType.BasicType", Field, 0}, + {"UintType", Type, 0}, + {"UintType.BasicType", Field, 0}, + {"UnspecifiedType", Type, 4}, + {"UnspecifiedType.BasicType", Field, 4}, + {"UnsupportedType", Type, 13}, + {"UnsupportedType.CommonType", Field, 13}, + {"UnsupportedType.Tag", Field, 13}, + {"VoidType", Type, 0}, + {"VoidType.CommonType", Field, 0}, + }, + "debug/elf": { + {"(*File).Close", Method, 0}, + {"(*File).DWARF", Method, 0}, + {"(*File).DynString", Method, 1}, + {"(*File).DynValue", Method, 21}, + {"(*File).DynamicSymbols", Method, 4}, + {"(*File).ImportedLibraries", Method, 0}, + {"(*File).ImportedSymbols", Method, 0}, + {"(*File).Section", Method, 0}, + {"(*File).SectionByType", Method, 0}, + {"(*File).Symbols", Method, 0}, + {"(*FormatError).Error", Method, 0}, + {"(*Prog).Open", Method, 0}, + {"(*Section).Data", Method, 0}, + {"(*Section).Open", Method, 0}, + {"(Class).GoString", Method, 0}, + {"(Class).String", Method, 0}, + {"(CompressionType).GoString", Method, 6}, + {"(CompressionType).String", Method, 6}, + {"(Data).GoString", Method, 0}, + {"(Data).String", Method, 0}, + {"(DynFlag).GoString", Method, 0}, + {"(DynFlag).String", Method, 0}, + {"(DynFlag1).GoString", Method, 21}, + {"(DynFlag1).String", Method, 21}, + {"(DynTag).GoString", Method, 0}, + {"(DynTag).String", Method, 0}, + {"(Machine).GoString", Method, 0}, + {"(Machine).String", Method, 0}, + {"(NType).GoString", Method, 0}, + {"(NType).String", Method, 0}, + {"(OSABI).GoString", Method, 0}, + {"(OSABI).String", Method, 0}, + {"(Prog).ReadAt", Method, 0}, + {"(ProgFlag).GoString", Method, 0}, + {"(ProgFlag).String", Method, 0}, + {"(ProgType).GoString", Method, 0}, + {"(ProgType).String", Method, 0}, + {"(R_386).GoString", Method, 0}, + {"(R_386).String", Method, 0}, + {"(R_390).GoString", Method, 7}, + {"(R_390).String", Method, 7}, + {"(R_AARCH64).GoString", Method, 4}, + {"(R_AARCH64).String", Method, 4}, + {"(R_ALPHA).GoString", Method, 0}, + {"(R_ALPHA).String", Method, 0}, + {"(R_ARM).GoString", Method, 0}, + {"(R_ARM).String", Method, 0}, + {"(R_LARCH).GoString", Method, 19}, + {"(R_LARCH).String", Method, 19}, + {"(R_MIPS).GoString", Method, 6}, + {"(R_MIPS).String", Method, 6}, + {"(R_PPC).GoString", Method, 0}, + {"(R_PPC).String", Method, 0}, + {"(R_PPC64).GoString", Method, 5}, + {"(R_PPC64).String", Method, 5}, + {"(R_RISCV).GoString", Method, 11}, + {"(R_RISCV).String", Method, 11}, + {"(R_SPARC).GoString", Method, 0}, + {"(R_SPARC).String", Method, 0}, + {"(R_X86_64).GoString", Method, 0}, + {"(R_X86_64).String", Method, 0}, + {"(Section).ReadAt", Method, 0}, + {"(SectionFlag).GoString", Method, 0}, + {"(SectionFlag).String", Method, 0}, + {"(SectionIndex).GoString", Method, 0}, + {"(SectionIndex).String", Method, 0}, + {"(SectionType).GoString", Method, 0}, + {"(SectionType).String", Method, 0}, + {"(SymBind).GoString", Method, 0}, + {"(SymBind).String", Method, 0}, + {"(SymType).GoString", Method, 0}, + {"(SymType).String", Method, 0}, + {"(SymVis).GoString", Method, 0}, + {"(SymVis).String", Method, 0}, + {"(Type).GoString", Method, 0}, + {"(Type).String", Method, 0}, + {"(Version).GoString", Method, 0}, + {"(Version).String", Method, 0}, + {"ARM_MAGIC_TRAMP_NUMBER", Const, 0}, + {"COMPRESS_HIOS", Const, 6}, + {"COMPRESS_HIPROC", Const, 6}, + {"COMPRESS_LOOS", Const, 6}, + {"COMPRESS_LOPROC", Const, 6}, + {"COMPRESS_ZLIB", Const, 6}, + {"COMPRESS_ZSTD", Const, 21}, + {"Chdr32", Type, 6}, + {"Chdr32.Addralign", Field, 6}, + {"Chdr32.Size", Field, 6}, + {"Chdr32.Type", Field, 6}, + {"Chdr64", Type, 6}, + {"Chdr64.Addralign", Field, 6}, + {"Chdr64.Size", Field, 6}, + {"Chdr64.Type", Field, 6}, + {"Class", Type, 0}, + {"CompressionType", Type, 6}, + {"DF_1_CONFALT", Const, 21}, + {"DF_1_DIRECT", Const, 21}, + {"DF_1_DISPRELDNE", Const, 21}, + {"DF_1_DISPRELPND", Const, 21}, + {"DF_1_EDITED", Const, 21}, + {"DF_1_ENDFILTEE", Const, 21}, + {"DF_1_GLOBAL", Const, 21}, + {"DF_1_GLOBAUDIT", Const, 21}, + {"DF_1_GROUP", Const, 21}, + {"DF_1_IGNMULDEF", Const, 21}, + {"DF_1_INITFIRST", Const, 21}, + {"DF_1_INTERPOSE", Const, 21}, + {"DF_1_KMOD", Const, 21}, + {"DF_1_LOADFLTR", Const, 21}, + {"DF_1_NOCOMMON", Const, 21}, + {"DF_1_NODEFLIB", Const, 21}, + {"DF_1_NODELETE", Const, 21}, + {"DF_1_NODIRECT", Const, 21}, + {"DF_1_NODUMP", Const, 21}, + {"DF_1_NOHDR", Const, 21}, + {"DF_1_NOKSYMS", Const, 21}, + {"DF_1_NOOPEN", Const, 21}, + {"DF_1_NORELOC", Const, 21}, + {"DF_1_NOW", Const, 21}, + {"DF_1_ORIGIN", Const, 21}, + {"DF_1_PIE", Const, 21}, + {"DF_1_SINGLETON", Const, 21}, + {"DF_1_STUB", Const, 21}, + {"DF_1_SYMINTPOSE", Const, 21}, + {"DF_1_TRANS", Const, 21}, + {"DF_1_WEAKFILTER", Const, 21}, + {"DF_BIND_NOW", Const, 0}, + {"DF_ORIGIN", Const, 0}, + {"DF_STATIC_TLS", Const, 0}, + {"DF_SYMBOLIC", Const, 0}, + {"DF_TEXTREL", Const, 0}, + {"DT_ADDRRNGHI", Const, 16}, + {"DT_ADDRRNGLO", Const, 16}, + {"DT_AUDIT", Const, 16}, + {"DT_AUXILIARY", Const, 16}, + {"DT_BIND_NOW", Const, 0}, + {"DT_CHECKSUM", Const, 16}, + {"DT_CONFIG", Const, 16}, + {"DT_DEBUG", Const, 0}, + {"DT_DEPAUDIT", Const, 16}, + {"DT_ENCODING", Const, 0}, + {"DT_FEATURE", Const, 16}, + {"DT_FILTER", Const, 16}, + {"DT_FINI", Const, 0}, + {"DT_FINI_ARRAY", Const, 0}, + {"DT_FINI_ARRAYSZ", Const, 0}, + {"DT_FLAGS", Const, 0}, + {"DT_FLAGS_1", Const, 16}, + {"DT_GNU_CONFLICT", Const, 16}, + {"DT_GNU_CONFLICTSZ", Const, 16}, + {"DT_GNU_HASH", Const, 16}, + {"DT_GNU_LIBLIST", Const, 16}, + {"DT_GNU_LIBLISTSZ", Const, 16}, + {"DT_GNU_PRELINKED", Const, 16}, + {"DT_HASH", Const, 0}, + {"DT_HIOS", Const, 0}, + {"DT_HIPROC", Const, 0}, + {"DT_INIT", Const, 0}, + {"DT_INIT_ARRAY", Const, 0}, + {"DT_INIT_ARRAYSZ", Const, 0}, + {"DT_JMPREL", Const, 0}, + {"DT_LOOS", Const, 0}, + {"DT_LOPROC", Const, 0}, + {"DT_MIPS_AUX_DYNAMIC", Const, 16}, + {"DT_MIPS_BASE_ADDRESS", Const, 16}, + {"DT_MIPS_COMPACT_SIZE", Const, 16}, + {"DT_MIPS_CONFLICT", Const, 16}, + {"DT_MIPS_CONFLICTNO", Const, 16}, + {"DT_MIPS_CXX_FLAGS", Const, 16}, + {"DT_MIPS_DELTA_CLASS", Const, 16}, + {"DT_MIPS_DELTA_CLASSSYM", Const, 16}, + {"DT_MIPS_DELTA_CLASSSYM_NO", Const, 16}, + {"DT_MIPS_DELTA_CLASS_NO", Const, 16}, + {"DT_MIPS_DELTA_INSTANCE", Const, 16}, + {"DT_MIPS_DELTA_INSTANCE_NO", Const, 16}, + {"DT_MIPS_DELTA_RELOC", Const, 16}, + {"DT_MIPS_DELTA_RELOC_NO", Const, 16}, + {"DT_MIPS_DELTA_SYM", Const, 16}, + {"DT_MIPS_DELTA_SYM_NO", Const, 16}, + {"DT_MIPS_DYNSTR_ALIGN", Const, 16}, + {"DT_MIPS_FLAGS", Const, 16}, + {"DT_MIPS_GOTSYM", Const, 16}, + {"DT_MIPS_GP_VALUE", Const, 16}, + {"DT_MIPS_HIDDEN_GOTIDX", Const, 16}, + {"DT_MIPS_HIPAGENO", Const, 16}, + {"DT_MIPS_ICHECKSUM", Const, 16}, + {"DT_MIPS_INTERFACE", Const, 16}, + {"DT_MIPS_INTERFACE_SIZE", Const, 16}, + {"DT_MIPS_IVERSION", Const, 16}, + {"DT_MIPS_LIBLIST", Const, 16}, + {"DT_MIPS_LIBLISTNO", Const, 16}, + {"DT_MIPS_LOCALPAGE_GOTIDX", Const, 16}, + {"DT_MIPS_LOCAL_GOTIDX", Const, 16}, + {"DT_MIPS_LOCAL_GOTNO", Const, 16}, + {"DT_MIPS_MSYM", Const, 16}, + {"DT_MIPS_OPTIONS", Const, 16}, + {"DT_MIPS_PERF_SUFFIX", Const, 16}, + {"DT_MIPS_PIXIE_INIT", Const, 16}, + {"DT_MIPS_PLTGOT", Const, 16}, + {"DT_MIPS_PROTECTED_GOTIDX", Const, 16}, + {"DT_MIPS_RLD_MAP", Const, 16}, + {"DT_MIPS_RLD_MAP_REL", Const, 16}, + {"DT_MIPS_RLD_TEXT_RESOLVE_ADDR", Const, 16}, + {"DT_MIPS_RLD_VERSION", Const, 16}, + {"DT_MIPS_RWPLT", Const, 16}, + {"DT_MIPS_SYMBOL_LIB", Const, 16}, + {"DT_MIPS_SYMTABNO", Const, 16}, + {"DT_MIPS_TIME_STAMP", Const, 16}, + {"DT_MIPS_UNREFEXTNO", Const, 16}, + {"DT_MOVEENT", Const, 16}, + {"DT_MOVESZ", Const, 16}, + {"DT_MOVETAB", Const, 16}, + {"DT_NEEDED", Const, 0}, + {"DT_NULL", Const, 0}, + {"DT_PLTGOT", Const, 0}, + {"DT_PLTPAD", Const, 16}, + {"DT_PLTPADSZ", Const, 16}, + {"DT_PLTREL", Const, 0}, + {"DT_PLTRELSZ", Const, 0}, + {"DT_POSFLAG_1", Const, 16}, + {"DT_PPC64_GLINK", Const, 16}, + {"DT_PPC64_OPD", Const, 16}, + {"DT_PPC64_OPDSZ", Const, 16}, + {"DT_PPC64_OPT", Const, 16}, + {"DT_PPC_GOT", Const, 16}, + {"DT_PPC_OPT", Const, 16}, + {"DT_PREINIT_ARRAY", Const, 0}, + {"DT_PREINIT_ARRAYSZ", Const, 0}, + {"DT_REL", Const, 0}, + {"DT_RELA", Const, 0}, + {"DT_RELACOUNT", Const, 16}, + {"DT_RELAENT", Const, 0}, + {"DT_RELASZ", Const, 0}, + {"DT_RELCOUNT", Const, 16}, + {"DT_RELENT", Const, 0}, + {"DT_RELSZ", Const, 0}, + {"DT_RPATH", Const, 0}, + {"DT_RUNPATH", Const, 0}, + {"DT_SONAME", Const, 0}, + {"DT_SPARC_REGISTER", Const, 16}, + {"DT_STRSZ", Const, 0}, + {"DT_STRTAB", Const, 0}, + {"DT_SYMBOLIC", Const, 0}, + {"DT_SYMENT", Const, 0}, + {"DT_SYMINENT", Const, 16}, + {"DT_SYMINFO", Const, 16}, + {"DT_SYMINSZ", Const, 16}, + {"DT_SYMTAB", Const, 0}, + {"DT_SYMTAB_SHNDX", Const, 16}, + {"DT_TEXTREL", Const, 0}, + {"DT_TLSDESC_GOT", Const, 16}, + {"DT_TLSDESC_PLT", Const, 16}, + {"DT_USED", Const, 16}, + {"DT_VALRNGHI", Const, 16}, + {"DT_VALRNGLO", Const, 16}, + {"DT_VERDEF", Const, 16}, + {"DT_VERDEFNUM", Const, 16}, + {"DT_VERNEED", Const, 0}, + {"DT_VERNEEDNUM", Const, 0}, + {"DT_VERSYM", Const, 0}, + {"Data", Type, 0}, + {"Dyn32", Type, 0}, + {"Dyn32.Tag", Field, 0}, + {"Dyn32.Val", Field, 0}, + {"Dyn64", Type, 0}, + {"Dyn64.Tag", Field, 0}, + {"Dyn64.Val", Field, 0}, + {"DynFlag", Type, 0}, + {"DynFlag1", Type, 21}, + {"DynTag", Type, 0}, + {"EI_ABIVERSION", Const, 0}, + {"EI_CLASS", Const, 0}, + {"EI_DATA", Const, 0}, + {"EI_NIDENT", Const, 0}, + {"EI_OSABI", Const, 0}, + {"EI_PAD", Const, 0}, + {"EI_VERSION", Const, 0}, + {"ELFCLASS32", Const, 0}, + {"ELFCLASS64", Const, 0}, + {"ELFCLASSNONE", Const, 0}, + {"ELFDATA2LSB", Const, 0}, + {"ELFDATA2MSB", Const, 0}, + {"ELFDATANONE", Const, 0}, + {"ELFMAG", Const, 0}, + {"ELFOSABI_86OPEN", Const, 0}, + {"ELFOSABI_AIX", Const, 0}, + {"ELFOSABI_ARM", Const, 0}, + {"ELFOSABI_AROS", Const, 11}, + {"ELFOSABI_CLOUDABI", Const, 11}, + {"ELFOSABI_FENIXOS", Const, 11}, + {"ELFOSABI_FREEBSD", Const, 0}, + {"ELFOSABI_HPUX", Const, 0}, + {"ELFOSABI_HURD", Const, 0}, + {"ELFOSABI_IRIX", Const, 0}, + {"ELFOSABI_LINUX", Const, 0}, + {"ELFOSABI_MODESTO", Const, 0}, + {"ELFOSABI_NETBSD", Const, 0}, + {"ELFOSABI_NONE", Const, 0}, + {"ELFOSABI_NSK", Const, 0}, + {"ELFOSABI_OPENBSD", Const, 0}, + {"ELFOSABI_OPENVMS", Const, 0}, + {"ELFOSABI_SOLARIS", Const, 0}, + {"ELFOSABI_STANDALONE", Const, 0}, + {"ELFOSABI_TRU64", Const, 0}, + {"EM_386", Const, 0}, + {"EM_486", Const, 0}, + {"EM_56800EX", Const, 11}, + {"EM_68HC05", Const, 11}, + {"EM_68HC08", Const, 11}, + {"EM_68HC11", Const, 11}, + {"EM_68HC12", Const, 0}, + {"EM_68HC16", Const, 11}, + {"EM_68K", Const, 0}, + {"EM_78KOR", Const, 11}, + {"EM_8051", Const, 11}, + {"EM_860", Const, 0}, + {"EM_88K", Const, 0}, + {"EM_960", Const, 0}, + {"EM_AARCH64", Const, 4}, + {"EM_ALPHA", Const, 0}, + {"EM_ALPHA_STD", Const, 0}, + {"EM_ALTERA_NIOS2", Const, 11}, + {"EM_AMDGPU", Const, 11}, + {"EM_ARC", Const, 0}, + {"EM_ARCA", Const, 11}, + {"EM_ARC_COMPACT", Const, 11}, + {"EM_ARC_COMPACT2", Const, 11}, + {"EM_ARM", Const, 0}, + {"EM_AVR", Const, 11}, + {"EM_AVR32", Const, 11}, + {"EM_BA1", Const, 11}, + {"EM_BA2", Const, 11}, + {"EM_BLACKFIN", Const, 11}, + {"EM_BPF", Const, 11}, + {"EM_C166", Const, 11}, + {"EM_CDP", Const, 11}, + {"EM_CE", Const, 11}, + {"EM_CLOUDSHIELD", Const, 11}, + {"EM_COGE", Const, 11}, + {"EM_COLDFIRE", Const, 0}, + {"EM_COOL", Const, 11}, + {"EM_COREA_1ST", Const, 11}, + {"EM_COREA_2ND", Const, 11}, + {"EM_CR", Const, 11}, + {"EM_CR16", Const, 11}, + {"EM_CRAYNV2", Const, 11}, + {"EM_CRIS", Const, 11}, + {"EM_CRX", Const, 11}, + {"EM_CSR_KALIMBA", Const, 11}, + {"EM_CUDA", Const, 11}, + {"EM_CYPRESS_M8C", Const, 11}, + {"EM_D10V", Const, 11}, + {"EM_D30V", Const, 11}, + {"EM_DSP24", Const, 11}, + {"EM_DSPIC30F", Const, 11}, + {"EM_DXP", Const, 11}, + {"EM_ECOG1", Const, 11}, + {"EM_ECOG16", Const, 11}, + {"EM_ECOG1X", Const, 11}, + {"EM_ECOG2", Const, 11}, + {"EM_ETPU", Const, 11}, + {"EM_EXCESS", Const, 11}, + {"EM_F2MC16", Const, 11}, + {"EM_FIREPATH", Const, 11}, + {"EM_FR20", Const, 0}, + {"EM_FR30", Const, 11}, + {"EM_FT32", Const, 11}, + {"EM_FX66", Const, 11}, + {"EM_H8S", Const, 0}, + {"EM_H8_300", Const, 0}, + {"EM_H8_300H", Const, 0}, + {"EM_H8_500", Const, 0}, + {"EM_HUANY", Const, 11}, + {"EM_IA_64", Const, 0}, + {"EM_INTEL205", Const, 11}, + {"EM_INTEL206", Const, 11}, + {"EM_INTEL207", Const, 11}, + {"EM_INTEL208", Const, 11}, + {"EM_INTEL209", Const, 11}, + {"EM_IP2K", Const, 11}, + {"EM_JAVELIN", Const, 11}, + {"EM_K10M", Const, 11}, + {"EM_KM32", Const, 11}, + {"EM_KMX16", Const, 11}, + {"EM_KMX32", Const, 11}, + {"EM_KMX8", Const, 11}, + {"EM_KVARC", Const, 11}, + {"EM_L10M", Const, 11}, + {"EM_LANAI", Const, 11}, + {"EM_LATTICEMICO32", Const, 11}, + {"EM_LOONGARCH", Const, 19}, + {"EM_M16C", Const, 11}, + {"EM_M32", Const, 0}, + {"EM_M32C", Const, 11}, + {"EM_M32R", Const, 11}, + {"EM_MANIK", Const, 11}, + {"EM_MAX", Const, 11}, + {"EM_MAXQ30", Const, 11}, + {"EM_MCHP_PIC", Const, 11}, + {"EM_MCST_ELBRUS", Const, 11}, + {"EM_ME16", Const, 0}, + {"EM_METAG", Const, 11}, + {"EM_MICROBLAZE", Const, 11}, + {"EM_MIPS", Const, 0}, + {"EM_MIPS_RS3_LE", Const, 0}, + {"EM_MIPS_RS4_BE", Const, 0}, + {"EM_MIPS_X", Const, 0}, + {"EM_MMA", Const, 0}, + {"EM_MMDSP_PLUS", Const, 11}, + {"EM_MMIX", Const, 11}, + {"EM_MN10200", Const, 11}, + {"EM_MN10300", Const, 11}, + {"EM_MOXIE", Const, 11}, + {"EM_MSP430", Const, 11}, + {"EM_NCPU", Const, 0}, + {"EM_NDR1", Const, 0}, + {"EM_NDS32", Const, 11}, + {"EM_NONE", Const, 0}, + {"EM_NORC", Const, 11}, + {"EM_NS32K", Const, 11}, + {"EM_OPEN8", Const, 11}, + {"EM_OPENRISC", Const, 11}, + {"EM_PARISC", Const, 0}, + {"EM_PCP", Const, 0}, + {"EM_PDP10", Const, 11}, + {"EM_PDP11", Const, 11}, + {"EM_PDSP", Const, 11}, + {"EM_PJ", Const, 11}, + {"EM_PPC", Const, 0}, + {"EM_PPC64", Const, 0}, + {"EM_PRISM", Const, 11}, + {"EM_QDSP6", Const, 11}, + {"EM_R32C", Const, 11}, + {"EM_RCE", Const, 0}, + {"EM_RH32", Const, 0}, + {"EM_RISCV", Const, 11}, + {"EM_RL78", Const, 11}, + {"EM_RS08", Const, 11}, + {"EM_RX", Const, 11}, + {"EM_S370", Const, 0}, + {"EM_S390", Const, 0}, + {"EM_SCORE7", Const, 11}, + {"EM_SEP", Const, 11}, + {"EM_SE_C17", Const, 11}, + {"EM_SE_C33", Const, 11}, + {"EM_SH", Const, 0}, + {"EM_SHARC", Const, 11}, + {"EM_SLE9X", Const, 11}, + {"EM_SNP1K", Const, 11}, + {"EM_SPARC", Const, 0}, + {"EM_SPARC32PLUS", Const, 0}, + {"EM_SPARCV9", Const, 0}, + {"EM_ST100", Const, 0}, + {"EM_ST19", Const, 11}, + {"EM_ST200", Const, 11}, + {"EM_ST7", Const, 11}, + {"EM_ST9PLUS", Const, 11}, + {"EM_STARCORE", Const, 0}, + {"EM_STM8", Const, 11}, + {"EM_STXP7X", Const, 11}, + {"EM_SVX", Const, 11}, + {"EM_TILE64", Const, 11}, + {"EM_TILEGX", Const, 11}, + {"EM_TILEPRO", Const, 11}, + {"EM_TINYJ", Const, 0}, + {"EM_TI_ARP32", Const, 11}, + {"EM_TI_C2000", Const, 11}, + {"EM_TI_C5500", Const, 11}, + {"EM_TI_C6000", Const, 11}, + {"EM_TI_PRU", Const, 11}, + {"EM_TMM_GPP", Const, 11}, + {"EM_TPC", Const, 11}, + {"EM_TRICORE", Const, 0}, + {"EM_TRIMEDIA", Const, 11}, + {"EM_TSK3000", Const, 11}, + {"EM_UNICORE", Const, 11}, + {"EM_V800", Const, 0}, + {"EM_V850", Const, 11}, + {"EM_VAX", Const, 11}, + {"EM_VIDEOCORE", Const, 11}, + {"EM_VIDEOCORE3", Const, 11}, + {"EM_VIDEOCORE5", Const, 11}, + {"EM_VISIUM", Const, 11}, + {"EM_VPP500", Const, 0}, + {"EM_X86_64", Const, 0}, + {"EM_XCORE", Const, 11}, + {"EM_XGATE", Const, 11}, + {"EM_XIMO16", Const, 11}, + {"EM_XTENSA", Const, 11}, + {"EM_Z80", Const, 11}, + {"EM_ZSP", Const, 11}, + {"ET_CORE", Const, 0}, + {"ET_DYN", Const, 0}, + {"ET_EXEC", Const, 0}, + {"ET_HIOS", Const, 0}, + {"ET_HIPROC", Const, 0}, + {"ET_LOOS", Const, 0}, + {"ET_LOPROC", Const, 0}, + {"ET_NONE", Const, 0}, + {"ET_REL", Const, 0}, + {"EV_CURRENT", Const, 0}, + {"EV_NONE", Const, 0}, + {"ErrNoSymbols", Var, 4}, + {"File", Type, 0}, + {"File.FileHeader", Field, 0}, + {"File.Progs", Field, 0}, + {"File.Sections", Field, 0}, + {"FileHeader", Type, 0}, + {"FileHeader.ABIVersion", Field, 0}, + {"FileHeader.ByteOrder", Field, 0}, + {"FileHeader.Class", Field, 0}, + {"FileHeader.Data", Field, 0}, + {"FileHeader.Entry", Field, 1}, + {"FileHeader.Machine", Field, 0}, + {"FileHeader.OSABI", Field, 0}, + {"FileHeader.Type", Field, 0}, + {"FileHeader.Version", Field, 0}, + {"FormatError", Type, 0}, + {"Header32", Type, 0}, + {"Header32.Ehsize", Field, 0}, + {"Header32.Entry", Field, 0}, + {"Header32.Flags", Field, 0}, + {"Header32.Ident", Field, 0}, + {"Header32.Machine", Field, 0}, + {"Header32.Phentsize", Field, 0}, + {"Header32.Phnum", Field, 0}, + {"Header32.Phoff", Field, 0}, + {"Header32.Shentsize", Field, 0}, + {"Header32.Shnum", Field, 0}, + {"Header32.Shoff", Field, 0}, + {"Header32.Shstrndx", Field, 0}, + {"Header32.Type", Field, 0}, + {"Header32.Version", Field, 0}, + {"Header64", Type, 0}, + {"Header64.Ehsize", Field, 0}, + {"Header64.Entry", Field, 0}, + {"Header64.Flags", Field, 0}, + {"Header64.Ident", Field, 0}, + {"Header64.Machine", Field, 0}, + {"Header64.Phentsize", Field, 0}, + {"Header64.Phnum", Field, 0}, + {"Header64.Phoff", Field, 0}, + {"Header64.Shentsize", Field, 0}, + {"Header64.Shnum", Field, 0}, + {"Header64.Shoff", Field, 0}, + {"Header64.Shstrndx", Field, 0}, + {"Header64.Type", Field, 0}, + {"Header64.Version", Field, 0}, + {"ImportedSymbol", Type, 0}, + {"ImportedSymbol.Library", Field, 0}, + {"ImportedSymbol.Name", Field, 0}, + {"ImportedSymbol.Version", Field, 0}, + {"Machine", Type, 0}, + {"NT_FPREGSET", Const, 0}, + {"NT_PRPSINFO", Const, 0}, + {"NT_PRSTATUS", Const, 0}, + {"NType", Type, 0}, + {"NewFile", Func, 0}, + {"OSABI", Type, 0}, + {"Open", Func, 0}, + {"PF_MASKOS", Const, 0}, + {"PF_MASKPROC", Const, 0}, + {"PF_R", Const, 0}, + {"PF_W", Const, 0}, + {"PF_X", Const, 0}, + {"PT_AARCH64_ARCHEXT", Const, 16}, + {"PT_AARCH64_UNWIND", Const, 16}, + {"PT_ARM_ARCHEXT", Const, 16}, + {"PT_ARM_EXIDX", Const, 16}, + {"PT_DYNAMIC", Const, 0}, + {"PT_GNU_EH_FRAME", Const, 16}, + {"PT_GNU_MBIND_HI", Const, 16}, + {"PT_GNU_MBIND_LO", Const, 16}, + {"PT_GNU_PROPERTY", Const, 16}, + {"PT_GNU_RELRO", Const, 16}, + {"PT_GNU_STACK", Const, 16}, + {"PT_HIOS", Const, 0}, + {"PT_HIPROC", Const, 0}, + {"PT_INTERP", Const, 0}, + {"PT_LOAD", Const, 0}, + {"PT_LOOS", Const, 0}, + {"PT_LOPROC", Const, 0}, + {"PT_MIPS_ABIFLAGS", Const, 16}, + {"PT_MIPS_OPTIONS", Const, 16}, + {"PT_MIPS_REGINFO", Const, 16}, + {"PT_MIPS_RTPROC", Const, 16}, + {"PT_NOTE", Const, 0}, + {"PT_NULL", Const, 0}, + {"PT_OPENBSD_BOOTDATA", Const, 16}, + {"PT_OPENBSD_RANDOMIZE", Const, 16}, + {"PT_OPENBSD_WXNEEDED", Const, 16}, + {"PT_PAX_FLAGS", Const, 16}, + {"PT_PHDR", Const, 0}, + {"PT_S390_PGSTE", Const, 16}, + {"PT_SHLIB", Const, 0}, + {"PT_SUNWSTACK", Const, 16}, + {"PT_SUNW_EH_FRAME", Const, 16}, + {"PT_TLS", Const, 0}, + {"Prog", Type, 0}, + {"Prog.ProgHeader", Field, 0}, + {"Prog.ReaderAt", Field, 0}, + {"Prog32", Type, 0}, + {"Prog32.Align", Field, 0}, + {"Prog32.Filesz", Field, 0}, + {"Prog32.Flags", Field, 0}, + {"Prog32.Memsz", Field, 0}, + {"Prog32.Off", Field, 0}, + {"Prog32.Paddr", Field, 0}, + {"Prog32.Type", Field, 0}, + {"Prog32.Vaddr", Field, 0}, + {"Prog64", Type, 0}, + {"Prog64.Align", Field, 0}, + {"Prog64.Filesz", Field, 0}, + {"Prog64.Flags", Field, 0}, + {"Prog64.Memsz", Field, 0}, + {"Prog64.Off", Field, 0}, + {"Prog64.Paddr", Field, 0}, + {"Prog64.Type", Field, 0}, + {"Prog64.Vaddr", Field, 0}, + {"ProgFlag", Type, 0}, + {"ProgHeader", Type, 0}, + {"ProgHeader.Align", Field, 0}, + {"ProgHeader.Filesz", Field, 0}, + {"ProgHeader.Flags", Field, 0}, + {"ProgHeader.Memsz", Field, 0}, + {"ProgHeader.Off", Field, 0}, + {"ProgHeader.Paddr", Field, 0}, + {"ProgHeader.Type", Field, 0}, + {"ProgHeader.Vaddr", Field, 0}, + {"ProgType", Type, 0}, + {"R_386", Type, 0}, + {"R_386_16", Const, 10}, + {"R_386_32", Const, 0}, + {"R_386_32PLT", Const, 10}, + {"R_386_8", Const, 10}, + {"R_386_COPY", Const, 0}, + {"R_386_GLOB_DAT", Const, 0}, + {"R_386_GOT32", Const, 0}, + {"R_386_GOT32X", Const, 10}, + {"R_386_GOTOFF", Const, 0}, + {"R_386_GOTPC", Const, 0}, + {"R_386_IRELATIVE", Const, 10}, + {"R_386_JMP_SLOT", Const, 0}, + {"R_386_NONE", Const, 0}, + {"R_386_PC16", Const, 10}, + {"R_386_PC32", Const, 0}, + {"R_386_PC8", Const, 10}, + {"R_386_PLT32", Const, 0}, + {"R_386_RELATIVE", Const, 0}, + {"R_386_SIZE32", Const, 10}, + {"R_386_TLS_DESC", Const, 10}, + {"R_386_TLS_DESC_CALL", Const, 10}, + {"R_386_TLS_DTPMOD32", Const, 0}, + {"R_386_TLS_DTPOFF32", Const, 0}, + {"R_386_TLS_GD", Const, 0}, + {"R_386_TLS_GD_32", Const, 0}, + {"R_386_TLS_GD_CALL", Const, 0}, + {"R_386_TLS_GD_POP", Const, 0}, + {"R_386_TLS_GD_PUSH", Const, 0}, + {"R_386_TLS_GOTDESC", Const, 10}, + {"R_386_TLS_GOTIE", Const, 0}, + {"R_386_TLS_IE", Const, 0}, + {"R_386_TLS_IE_32", Const, 0}, + {"R_386_TLS_LDM", Const, 0}, + {"R_386_TLS_LDM_32", Const, 0}, + {"R_386_TLS_LDM_CALL", Const, 0}, + {"R_386_TLS_LDM_POP", Const, 0}, + {"R_386_TLS_LDM_PUSH", Const, 0}, + {"R_386_TLS_LDO_32", Const, 0}, + {"R_386_TLS_LE", Const, 0}, + {"R_386_TLS_LE_32", Const, 0}, + {"R_386_TLS_TPOFF", Const, 0}, + {"R_386_TLS_TPOFF32", Const, 0}, + {"R_390", Type, 7}, + {"R_390_12", Const, 7}, + {"R_390_16", Const, 7}, + {"R_390_20", Const, 7}, + {"R_390_32", Const, 7}, + {"R_390_64", Const, 7}, + {"R_390_8", Const, 7}, + {"R_390_COPY", Const, 7}, + {"R_390_GLOB_DAT", Const, 7}, + {"R_390_GOT12", Const, 7}, + {"R_390_GOT16", Const, 7}, + {"R_390_GOT20", Const, 7}, + {"R_390_GOT32", Const, 7}, + {"R_390_GOT64", Const, 7}, + {"R_390_GOTENT", Const, 7}, + {"R_390_GOTOFF", Const, 7}, + {"R_390_GOTOFF16", Const, 7}, + {"R_390_GOTOFF64", Const, 7}, + {"R_390_GOTPC", Const, 7}, + {"R_390_GOTPCDBL", Const, 7}, + {"R_390_GOTPLT12", Const, 7}, + {"R_390_GOTPLT16", Const, 7}, + {"R_390_GOTPLT20", Const, 7}, + {"R_390_GOTPLT32", Const, 7}, + {"R_390_GOTPLT64", Const, 7}, + {"R_390_GOTPLTENT", Const, 7}, + {"R_390_GOTPLTOFF16", Const, 7}, + {"R_390_GOTPLTOFF32", Const, 7}, + {"R_390_GOTPLTOFF64", Const, 7}, + {"R_390_JMP_SLOT", Const, 7}, + {"R_390_NONE", Const, 7}, + {"R_390_PC16", Const, 7}, + {"R_390_PC16DBL", Const, 7}, + {"R_390_PC32", Const, 7}, + {"R_390_PC32DBL", Const, 7}, + {"R_390_PC64", Const, 7}, + {"R_390_PLT16DBL", Const, 7}, + {"R_390_PLT32", Const, 7}, + {"R_390_PLT32DBL", Const, 7}, + {"R_390_PLT64", Const, 7}, + {"R_390_RELATIVE", Const, 7}, + {"R_390_TLS_DTPMOD", Const, 7}, + {"R_390_TLS_DTPOFF", Const, 7}, + {"R_390_TLS_GD32", Const, 7}, + {"R_390_TLS_GD64", Const, 7}, + {"R_390_TLS_GDCALL", Const, 7}, + {"R_390_TLS_GOTIE12", Const, 7}, + {"R_390_TLS_GOTIE20", Const, 7}, + {"R_390_TLS_GOTIE32", Const, 7}, + {"R_390_TLS_GOTIE64", Const, 7}, + {"R_390_TLS_IE32", Const, 7}, + {"R_390_TLS_IE64", Const, 7}, + {"R_390_TLS_IEENT", Const, 7}, + {"R_390_TLS_LDCALL", Const, 7}, + {"R_390_TLS_LDM32", Const, 7}, + {"R_390_TLS_LDM64", Const, 7}, + {"R_390_TLS_LDO32", Const, 7}, + {"R_390_TLS_LDO64", Const, 7}, + {"R_390_TLS_LE32", Const, 7}, + {"R_390_TLS_LE64", Const, 7}, + {"R_390_TLS_LOAD", Const, 7}, + {"R_390_TLS_TPOFF", Const, 7}, + {"R_AARCH64", Type, 4}, + {"R_AARCH64_ABS16", Const, 4}, + {"R_AARCH64_ABS32", Const, 4}, + {"R_AARCH64_ABS64", Const, 4}, + {"R_AARCH64_ADD_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_ADR_GOT_PAGE", Const, 4}, + {"R_AARCH64_ADR_PREL_LO21", Const, 4}, + {"R_AARCH64_ADR_PREL_PG_HI21", Const, 4}, + {"R_AARCH64_ADR_PREL_PG_HI21_NC", Const, 4}, + {"R_AARCH64_CALL26", Const, 4}, + {"R_AARCH64_CONDBR19", Const, 4}, + {"R_AARCH64_COPY", Const, 4}, + {"R_AARCH64_GLOB_DAT", Const, 4}, + {"R_AARCH64_GOT_LD_PREL19", Const, 4}, + {"R_AARCH64_IRELATIVE", Const, 4}, + {"R_AARCH64_JUMP26", Const, 4}, + {"R_AARCH64_JUMP_SLOT", Const, 4}, + {"R_AARCH64_LD64_GOTOFF_LO15", Const, 10}, + {"R_AARCH64_LD64_GOTPAGE_LO15", Const, 10}, + {"R_AARCH64_LD64_GOT_LO12_NC", Const, 4}, + {"R_AARCH64_LDST128_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_LDST16_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_LDST32_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_LDST64_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_LDST8_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_LD_PREL_LO19", Const, 4}, + {"R_AARCH64_MOVW_SABS_G0", Const, 4}, + {"R_AARCH64_MOVW_SABS_G1", Const, 4}, + {"R_AARCH64_MOVW_SABS_G2", Const, 4}, + {"R_AARCH64_MOVW_UABS_G0", Const, 4}, + {"R_AARCH64_MOVW_UABS_G0_NC", Const, 4}, + {"R_AARCH64_MOVW_UABS_G1", Const, 4}, + {"R_AARCH64_MOVW_UABS_G1_NC", Const, 4}, + {"R_AARCH64_MOVW_UABS_G2", Const, 4}, + {"R_AARCH64_MOVW_UABS_G2_NC", Const, 4}, + {"R_AARCH64_MOVW_UABS_G3", Const, 4}, + {"R_AARCH64_NONE", Const, 4}, + {"R_AARCH64_NULL", Const, 4}, + {"R_AARCH64_P32_ABS16", Const, 4}, + {"R_AARCH64_P32_ABS32", Const, 4}, + {"R_AARCH64_P32_ADD_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_P32_ADR_GOT_PAGE", Const, 4}, + {"R_AARCH64_P32_ADR_PREL_LO21", Const, 4}, + {"R_AARCH64_P32_ADR_PREL_PG_HI21", Const, 4}, + {"R_AARCH64_P32_CALL26", Const, 4}, + {"R_AARCH64_P32_CONDBR19", Const, 4}, + {"R_AARCH64_P32_COPY", Const, 4}, + {"R_AARCH64_P32_GLOB_DAT", Const, 4}, + {"R_AARCH64_P32_GOT_LD_PREL19", Const, 4}, + {"R_AARCH64_P32_IRELATIVE", Const, 4}, + {"R_AARCH64_P32_JUMP26", Const, 4}, + {"R_AARCH64_P32_JUMP_SLOT", Const, 4}, + {"R_AARCH64_P32_LD32_GOT_LO12_NC", Const, 4}, + {"R_AARCH64_P32_LDST128_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_P32_LDST16_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_P32_LDST32_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_P32_LDST64_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_P32_LDST8_ABS_LO12_NC", Const, 4}, + {"R_AARCH64_P32_LD_PREL_LO19", Const, 4}, + {"R_AARCH64_P32_MOVW_SABS_G0", Const, 4}, + {"R_AARCH64_P32_MOVW_UABS_G0", Const, 4}, + {"R_AARCH64_P32_MOVW_UABS_G0_NC", Const, 4}, + {"R_AARCH64_P32_MOVW_UABS_G1", Const, 4}, + {"R_AARCH64_P32_PREL16", Const, 4}, + {"R_AARCH64_P32_PREL32", Const, 4}, + {"R_AARCH64_P32_RELATIVE", Const, 4}, + {"R_AARCH64_P32_TLSDESC", Const, 4}, + {"R_AARCH64_P32_TLSDESC_ADD_LO12_NC", Const, 4}, + {"R_AARCH64_P32_TLSDESC_ADR_PAGE21", Const, 4}, + {"R_AARCH64_P32_TLSDESC_ADR_PREL21", Const, 4}, + {"R_AARCH64_P32_TLSDESC_CALL", Const, 4}, + {"R_AARCH64_P32_TLSDESC_LD32_LO12_NC", Const, 4}, + {"R_AARCH64_P32_TLSDESC_LD_PREL19", Const, 4}, + {"R_AARCH64_P32_TLSGD_ADD_LO12_NC", Const, 4}, + {"R_AARCH64_P32_TLSGD_ADR_PAGE21", Const, 4}, + {"R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4}, + {"R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", Const, 4}, + {"R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", Const, 4}, + {"R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", Const, 4}, + {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", Const, 4}, + {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", Const, 4}, + {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", Const, 4}, + {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", Const, 4}, + {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", Const, 4}, + {"R_AARCH64_P32_TLS_DTPMOD", Const, 4}, + {"R_AARCH64_P32_TLS_DTPREL", Const, 4}, + {"R_AARCH64_P32_TLS_TPREL", Const, 4}, + {"R_AARCH64_P32_TSTBR14", Const, 4}, + {"R_AARCH64_PREL16", Const, 4}, + {"R_AARCH64_PREL32", Const, 4}, + {"R_AARCH64_PREL64", Const, 4}, + {"R_AARCH64_RELATIVE", Const, 4}, + {"R_AARCH64_TLSDESC", Const, 4}, + {"R_AARCH64_TLSDESC_ADD", Const, 4}, + {"R_AARCH64_TLSDESC_ADD_LO12_NC", Const, 4}, + {"R_AARCH64_TLSDESC_ADR_PAGE21", Const, 4}, + {"R_AARCH64_TLSDESC_ADR_PREL21", Const, 4}, + {"R_AARCH64_TLSDESC_CALL", Const, 4}, + {"R_AARCH64_TLSDESC_LD64_LO12_NC", Const, 4}, + {"R_AARCH64_TLSDESC_LDR", Const, 4}, + {"R_AARCH64_TLSDESC_LD_PREL19", Const, 4}, + {"R_AARCH64_TLSDESC_OFF_G0_NC", Const, 4}, + {"R_AARCH64_TLSDESC_OFF_G1", Const, 4}, + {"R_AARCH64_TLSGD_ADD_LO12_NC", Const, 4}, + {"R_AARCH64_TLSGD_ADR_PAGE21", Const, 4}, + {"R_AARCH64_TLSGD_ADR_PREL21", Const, 10}, + {"R_AARCH64_TLSGD_MOVW_G0_NC", Const, 10}, + {"R_AARCH64_TLSGD_MOVW_G1", Const, 10}, + {"R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4}, + {"R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", Const, 4}, + {"R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", Const, 4}, + {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", Const, 4}, + {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", Const, 4}, + {"R_AARCH64_TLSLD_ADR_PAGE21", Const, 10}, + {"R_AARCH64_TLSLD_ADR_PREL21", Const, 10}, + {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12", Const, 10}, + {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", Const, 10}, + {"R_AARCH64_TLSLE_ADD_TPREL_HI12", Const, 4}, + {"R_AARCH64_TLSLE_ADD_TPREL_LO12", Const, 4}, + {"R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", Const, 4}, + {"R_AARCH64_TLSLE_LDST128_TPREL_LO12", Const, 10}, + {"R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", Const, 10}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G0", Const, 4}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", Const, 4}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G1", Const, 4}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", Const, 4}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G2", Const, 4}, + {"R_AARCH64_TLS_DTPMOD64", Const, 4}, + {"R_AARCH64_TLS_DTPREL64", Const, 4}, + {"R_AARCH64_TLS_TPREL64", Const, 4}, + {"R_AARCH64_TSTBR14", Const, 4}, + {"R_ALPHA", Type, 0}, + {"R_ALPHA_BRADDR", Const, 0}, + {"R_ALPHA_COPY", Const, 0}, + {"R_ALPHA_GLOB_DAT", Const, 0}, + {"R_ALPHA_GPDISP", Const, 0}, + {"R_ALPHA_GPREL32", Const, 0}, + {"R_ALPHA_GPRELHIGH", Const, 0}, + {"R_ALPHA_GPRELLOW", Const, 0}, + {"R_ALPHA_GPVALUE", Const, 0}, + {"R_ALPHA_HINT", Const, 0}, + {"R_ALPHA_IMMED_BR_HI32", Const, 0}, + {"R_ALPHA_IMMED_GP_16", Const, 0}, + {"R_ALPHA_IMMED_GP_HI32", Const, 0}, + {"R_ALPHA_IMMED_LO32", Const, 0}, + {"R_ALPHA_IMMED_SCN_HI32", Const, 0}, + {"R_ALPHA_JMP_SLOT", Const, 0}, + {"R_ALPHA_LITERAL", Const, 0}, + {"R_ALPHA_LITUSE", Const, 0}, + {"R_ALPHA_NONE", Const, 0}, + {"R_ALPHA_OP_PRSHIFT", Const, 0}, + {"R_ALPHA_OP_PSUB", Const, 0}, + {"R_ALPHA_OP_PUSH", Const, 0}, + {"R_ALPHA_OP_STORE", Const, 0}, + {"R_ALPHA_REFLONG", Const, 0}, + {"R_ALPHA_REFQUAD", Const, 0}, + {"R_ALPHA_RELATIVE", Const, 0}, + {"R_ALPHA_SREL16", Const, 0}, + {"R_ALPHA_SREL32", Const, 0}, + {"R_ALPHA_SREL64", Const, 0}, + {"R_ARM", Type, 0}, + {"R_ARM_ABS12", Const, 0}, + {"R_ARM_ABS16", Const, 0}, + {"R_ARM_ABS32", Const, 0}, + {"R_ARM_ABS32_NOI", Const, 10}, + {"R_ARM_ABS8", Const, 0}, + {"R_ARM_ALU_PCREL_15_8", Const, 10}, + {"R_ARM_ALU_PCREL_23_15", Const, 10}, + {"R_ARM_ALU_PCREL_7_0", Const, 10}, + {"R_ARM_ALU_PC_G0", Const, 10}, + {"R_ARM_ALU_PC_G0_NC", Const, 10}, + {"R_ARM_ALU_PC_G1", Const, 10}, + {"R_ARM_ALU_PC_G1_NC", Const, 10}, + {"R_ARM_ALU_PC_G2", Const, 10}, + {"R_ARM_ALU_SBREL_19_12_NC", Const, 10}, + {"R_ARM_ALU_SBREL_27_20_CK", Const, 10}, + {"R_ARM_ALU_SB_G0", Const, 10}, + {"R_ARM_ALU_SB_G0_NC", Const, 10}, + {"R_ARM_ALU_SB_G1", Const, 10}, + {"R_ARM_ALU_SB_G1_NC", Const, 10}, + {"R_ARM_ALU_SB_G2", Const, 10}, + {"R_ARM_AMP_VCALL9", Const, 0}, + {"R_ARM_BASE_ABS", Const, 10}, + {"R_ARM_CALL", Const, 10}, + {"R_ARM_COPY", Const, 0}, + {"R_ARM_GLOB_DAT", Const, 0}, + {"R_ARM_GNU_VTENTRY", Const, 0}, + {"R_ARM_GNU_VTINHERIT", Const, 0}, + {"R_ARM_GOT32", Const, 0}, + {"R_ARM_GOTOFF", Const, 0}, + {"R_ARM_GOTOFF12", Const, 10}, + {"R_ARM_GOTPC", Const, 0}, + {"R_ARM_GOTRELAX", Const, 10}, + {"R_ARM_GOT_ABS", Const, 10}, + {"R_ARM_GOT_BREL12", Const, 10}, + {"R_ARM_GOT_PREL", Const, 10}, + {"R_ARM_IRELATIVE", Const, 10}, + {"R_ARM_JUMP24", Const, 10}, + {"R_ARM_JUMP_SLOT", Const, 0}, + {"R_ARM_LDC_PC_G0", Const, 10}, + {"R_ARM_LDC_PC_G1", Const, 10}, + {"R_ARM_LDC_PC_G2", Const, 10}, + {"R_ARM_LDC_SB_G0", Const, 10}, + {"R_ARM_LDC_SB_G1", Const, 10}, + {"R_ARM_LDC_SB_G2", Const, 10}, + {"R_ARM_LDRS_PC_G0", Const, 10}, + {"R_ARM_LDRS_PC_G1", Const, 10}, + {"R_ARM_LDRS_PC_G2", Const, 10}, + {"R_ARM_LDRS_SB_G0", Const, 10}, + {"R_ARM_LDRS_SB_G1", Const, 10}, + {"R_ARM_LDRS_SB_G2", Const, 10}, + {"R_ARM_LDR_PC_G1", Const, 10}, + {"R_ARM_LDR_PC_G2", Const, 10}, + {"R_ARM_LDR_SBREL_11_10_NC", Const, 10}, + {"R_ARM_LDR_SB_G0", Const, 10}, + {"R_ARM_LDR_SB_G1", Const, 10}, + {"R_ARM_LDR_SB_G2", Const, 10}, + {"R_ARM_ME_TOO", Const, 10}, + {"R_ARM_MOVT_ABS", Const, 10}, + {"R_ARM_MOVT_BREL", Const, 10}, + {"R_ARM_MOVT_PREL", Const, 10}, + {"R_ARM_MOVW_ABS_NC", Const, 10}, + {"R_ARM_MOVW_BREL", Const, 10}, + {"R_ARM_MOVW_BREL_NC", Const, 10}, + {"R_ARM_MOVW_PREL_NC", Const, 10}, + {"R_ARM_NONE", Const, 0}, + {"R_ARM_PC13", Const, 0}, + {"R_ARM_PC24", Const, 0}, + {"R_ARM_PLT32", Const, 0}, + {"R_ARM_PLT32_ABS", Const, 10}, + {"R_ARM_PREL31", Const, 10}, + {"R_ARM_PRIVATE_0", Const, 10}, + {"R_ARM_PRIVATE_1", Const, 10}, + {"R_ARM_PRIVATE_10", Const, 10}, + {"R_ARM_PRIVATE_11", Const, 10}, + {"R_ARM_PRIVATE_12", Const, 10}, + {"R_ARM_PRIVATE_13", Const, 10}, + {"R_ARM_PRIVATE_14", Const, 10}, + {"R_ARM_PRIVATE_15", Const, 10}, + {"R_ARM_PRIVATE_2", Const, 10}, + {"R_ARM_PRIVATE_3", Const, 10}, + {"R_ARM_PRIVATE_4", Const, 10}, + {"R_ARM_PRIVATE_5", Const, 10}, + {"R_ARM_PRIVATE_6", Const, 10}, + {"R_ARM_PRIVATE_7", Const, 10}, + {"R_ARM_PRIVATE_8", Const, 10}, + {"R_ARM_PRIVATE_9", Const, 10}, + {"R_ARM_RABS32", Const, 0}, + {"R_ARM_RBASE", Const, 0}, + {"R_ARM_REL32", Const, 0}, + {"R_ARM_REL32_NOI", Const, 10}, + {"R_ARM_RELATIVE", Const, 0}, + {"R_ARM_RPC24", Const, 0}, + {"R_ARM_RREL32", Const, 0}, + {"R_ARM_RSBREL32", Const, 0}, + {"R_ARM_RXPC25", Const, 10}, + {"R_ARM_SBREL31", Const, 10}, + {"R_ARM_SBREL32", Const, 0}, + {"R_ARM_SWI24", Const, 0}, + {"R_ARM_TARGET1", Const, 10}, + {"R_ARM_TARGET2", Const, 10}, + {"R_ARM_THM_ABS5", Const, 0}, + {"R_ARM_THM_ALU_ABS_G0_NC", Const, 10}, + {"R_ARM_THM_ALU_ABS_G1_NC", Const, 10}, + {"R_ARM_THM_ALU_ABS_G2_NC", Const, 10}, + {"R_ARM_THM_ALU_ABS_G3", Const, 10}, + {"R_ARM_THM_ALU_PREL_11_0", Const, 10}, + {"R_ARM_THM_GOT_BREL12", Const, 10}, + {"R_ARM_THM_JUMP11", Const, 10}, + {"R_ARM_THM_JUMP19", Const, 10}, + {"R_ARM_THM_JUMP24", Const, 10}, + {"R_ARM_THM_JUMP6", Const, 10}, + {"R_ARM_THM_JUMP8", Const, 10}, + {"R_ARM_THM_MOVT_ABS", Const, 10}, + {"R_ARM_THM_MOVT_BREL", Const, 10}, + {"R_ARM_THM_MOVT_PREL", Const, 10}, + {"R_ARM_THM_MOVW_ABS_NC", Const, 10}, + {"R_ARM_THM_MOVW_BREL", Const, 10}, + {"R_ARM_THM_MOVW_BREL_NC", Const, 10}, + {"R_ARM_THM_MOVW_PREL_NC", Const, 10}, + {"R_ARM_THM_PC12", Const, 10}, + {"R_ARM_THM_PC22", Const, 0}, + {"R_ARM_THM_PC8", Const, 0}, + {"R_ARM_THM_RPC22", Const, 0}, + {"R_ARM_THM_SWI8", Const, 0}, + {"R_ARM_THM_TLS_CALL", Const, 10}, + {"R_ARM_THM_TLS_DESCSEQ16", Const, 10}, + {"R_ARM_THM_TLS_DESCSEQ32", Const, 10}, + {"R_ARM_THM_XPC22", Const, 0}, + {"R_ARM_TLS_CALL", Const, 10}, + {"R_ARM_TLS_DESCSEQ", Const, 10}, + {"R_ARM_TLS_DTPMOD32", Const, 10}, + {"R_ARM_TLS_DTPOFF32", Const, 10}, + {"R_ARM_TLS_GD32", Const, 10}, + {"R_ARM_TLS_GOTDESC", Const, 10}, + {"R_ARM_TLS_IE12GP", Const, 10}, + {"R_ARM_TLS_IE32", Const, 10}, + {"R_ARM_TLS_LDM32", Const, 10}, + {"R_ARM_TLS_LDO12", Const, 10}, + {"R_ARM_TLS_LDO32", Const, 10}, + {"R_ARM_TLS_LE12", Const, 10}, + {"R_ARM_TLS_LE32", Const, 10}, + {"R_ARM_TLS_TPOFF32", Const, 10}, + {"R_ARM_V4BX", Const, 10}, + {"R_ARM_XPC25", Const, 0}, + {"R_INFO", Func, 0}, + {"R_INFO32", Func, 0}, + {"R_LARCH", Type, 19}, + {"R_LARCH_32", Const, 19}, + {"R_LARCH_32_PCREL", Const, 20}, + {"R_LARCH_64", Const, 19}, + {"R_LARCH_64_PCREL", Const, 22}, + {"R_LARCH_ABS64_HI12", Const, 20}, + {"R_LARCH_ABS64_LO20", Const, 20}, + {"R_LARCH_ABS_HI20", Const, 20}, + {"R_LARCH_ABS_LO12", Const, 20}, + {"R_LARCH_ADD16", Const, 19}, + {"R_LARCH_ADD24", Const, 19}, + {"R_LARCH_ADD32", Const, 19}, + {"R_LARCH_ADD6", Const, 22}, + {"R_LARCH_ADD64", Const, 19}, + {"R_LARCH_ADD8", Const, 19}, + {"R_LARCH_ADD_ULEB128", Const, 22}, + {"R_LARCH_ALIGN", Const, 22}, + {"R_LARCH_B16", Const, 20}, + {"R_LARCH_B21", Const, 20}, + {"R_LARCH_B26", Const, 20}, + {"R_LARCH_CFA", Const, 22}, + {"R_LARCH_COPY", Const, 19}, + {"R_LARCH_DELETE", Const, 22}, + {"R_LARCH_GNU_VTENTRY", Const, 20}, + {"R_LARCH_GNU_VTINHERIT", Const, 20}, + {"R_LARCH_GOT64_HI12", Const, 20}, + {"R_LARCH_GOT64_LO20", Const, 20}, + {"R_LARCH_GOT64_PC_HI12", Const, 20}, + {"R_LARCH_GOT64_PC_LO20", Const, 20}, + {"R_LARCH_GOT_HI20", Const, 20}, + {"R_LARCH_GOT_LO12", Const, 20}, + {"R_LARCH_GOT_PC_HI20", Const, 20}, + {"R_LARCH_GOT_PC_LO12", Const, 20}, + {"R_LARCH_IRELATIVE", Const, 19}, + {"R_LARCH_JUMP_SLOT", Const, 19}, + {"R_LARCH_MARK_LA", Const, 19}, + {"R_LARCH_MARK_PCREL", Const, 19}, + {"R_LARCH_NONE", Const, 19}, + {"R_LARCH_PCALA64_HI12", Const, 20}, + {"R_LARCH_PCALA64_LO20", Const, 20}, + {"R_LARCH_PCALA_HI20", Const, 20}, + {"R_LARCH_PCALA_LO12", Const, 20}, + {"R_LARCH_PCREL20_S2", Const, 22}, + {"R_LARCH_RELATIVE", Const, 19}, + {"R_LARCH_RELAX", Const, 20}, + {"R_LARCH_SOP_ADD", Const, 19}, + {"R_LARCH_SOP_AND", Const, 19}, + {"R_LARCH_SOP_ASSERT", Const, 19}, + {"R_LARCH_SOP_IF_ELSE", Const, 19}, + {"R_LARCH_SOP_NOT", Const, 19}, + {"R_LARCH_SOP_POP_32_S_0_10_10_16_S2", Const, 19}, + {"R_LARCH_SOP_POP_32_S_0_5_10_16_S2", Const, 19}, + {"R_LARCH_SOP_POP_32_S_10_12", Const, 19}, + {"R_LARCH_SOP_POP_32_S_10_16", Const, 19}, + {"R_LARCH_SOP_POP_32_S_10_16_S2", Const, 19}, + {"R_LARCH_SOP_POP_32_S_10_5", Const, 19}, + {"R_LARCH_SOP_POP_32_S_5_20", Const, 19}, + {"R_LARCH_SOP_POP_32_U", Const, 19}, + {"R_LARCH_SOP_POP_32_U_10_12", Const, 19}, + {"R_LARCH_SOP_PUSH_ABSOLUTE", Const, 19}, + {"R_LARCH_SOP_PUSH_DUP", Const, 19}, + {"R_LARCH_SOP_PUSH_GPREL", Const, 19}, + {"R_LARCH_SOP_PUSH_PCREL", Const, 19}, + {"R_LARCH_SOP_PUSH_PLT_PCREL", Const, 19}, + {"R_LARCH_SOP_PUSH_TLS_GD", Const, 19}, + {"R_LARCH_SOP_PUSH_TLS_GOT", Const, 19}, + {"R_LARCH_SOP_PUSH_TLS_TPREL", Const, 19}, + {"R_LARCH_SOP_SL", Const, 19}, + {"R_LARCH_SOP_SR", Const, 19}, + {"R_LARCH_SOP_SUB", Const, 19}, + {"R_LARCH_SUB16", Const, 19}, + {"R_LARCH_SUB24", Const, 19}, + {"R_LARCH_SUB32", Const, 19}, + {"R_LARCH_SUB6", Const, 22}, + {"R_LARCH_SUB64", Const, 19}, + {"R_LARCH_SUB8", Const, 19}, + {"R_LARCH_SUB_ULEB128", Const, 22}, + {"R_LARCH_TLS_DTPMOD32", Const, 19}, + {"R_LARCH_TLS_DTPMOD64", Const, 19}, + {"R_LARCH_TLS_DTPREL32", Const, 19}, + {"R_LARCH_TLS_DTPREL64", Const, 19}, + {"R_LARCH_TLS_GD_HI20", Const, 20}, + {"R_LARCH_TLS_GD_PC_HI20", Const, 20}, + {"R_LARCH_TLS_IE64_HI12", Const, 20}, + {"R_LARCH_TLS_IE64_LO20", Const, 20}, + {"R_LARCH_TLS_IE64_PC_HI12", Const, 20}, + {"R_LARCH_TLS_IE64_PC_LO20", Const, 20}, + {"R_LARCH_TLS_IE_HI20", Const, 20}, + {"R_LARCH_TLS_IE_LO12", Const, 20}, + {"R_LARCH_TLS_IE_PC_HI20", Const, 20}, + {"R_LARCH_TLS_IE_PC_LO12", Const, 20}, + {"R_LARCH_TLS_LD_HI20", Const, 20}, + {"R_LARCH_TLS_LD_PC_HI20", Const, 20}, + {"R_LARCH_TLS_LE64_HI12", Const, 20}, + {"R_LARCH_TLS_LE64_LO20", Const, 20}, + {"R_LARCH_TLS_LE_HI20", Const, 20}, + {"R_LARCH_TLS_LE_LO12", Const, 20}, + {"R_LARCH_TLS_TPREL32", Const, 19}, + {"R_LARCH_TLS_TPREL64", Const, 19}, + {"R_MIPS", Type, 6}, + {"R_MIPS_16", Const, 6}, + {"R_MIPS_26", Const, 6}, + {"R_MIPS_32", Const, 6}, + {"R_MIPS_64", Const, 6}, + {"R_MIPS_ADD_IMMEDIATE", Const, 6}, + {"R_MIPS_CALL16", Const, 6}, + {"R_MIPS_CALL_HI16", Const, 6}, + {"R_MIPS_CALL_LO16", Const, 6}, + {"R_MIPS_DELETE", Const, 6}, + {"R_MIPS_GOT16", Const, 6}, + {"R_MIPS_GOT_DISP", Const, 6}, + {"R_MIPS_GOT_HI16", Const, 6}, + {"R_MIPS_GOT_LO16", Const, 6}, + {"R_MIPS_GOT_OFST", Const, 6}, + {"R_MIPS_GOT_PAGE", Const, 6}, + {"R_MIPS_GPREL16", Const, 6}, + {"R_MIPS_GPREL32", Const, 6}, + {"R_MIPS_HI16", Const, 6}, + {"R_MIPS_HIGHER", Const, 6}, + {"R_MIPS_HIGHEST", Const, 6}, + {"R_MIPS_INSERT_A", Const, 6}, + {"R_MIPS_INSERT_B", Const, 6}, + {"R_MIPS_JALR", Const, 6}, + {"R_MIPS_LITERAL", Const, 6}, + {"R_MIPS_LO16", Const, 6}, + {"R_MIPS_NONE", Const, 6}, + {"R_MIPS_PC16", Const, 6}, + {"R_MIPS_PC32", Const, 22}, + {"R_MIPS_PJUMP", Const, 6}, + {"R_MIPS_REL16", Const, 6}, + {"R_MIPS_REL32", Const, 6}, + {"R_MIPS_RELGOT", Const, 6}, + {"R_MIPS_SCN_DISP", Const, 6}, + {"R_MIPS_SHIFT5", Const, 6}, + {"R_MIPS_SHIFT6", Const, 6}, + {"R_MIPS_SUB", Const, 6}, + {"R_MIPS_TLS_DTPMOD32", Const, 6}, + {"R_MIPS_TLS_DTPMOD64", Const, 6}, + {"R_MIPS_TLS_DTPREL32", Const, 6}, + {"R_MIPS_TLS_DTPREL64", Const, 6}, + {"R_MIPS_TLS_DTPREL_HI16", Const, 6}, + {"R_MIPS_TLS_DTPREL_LO16", Const, 6}, + {"R_MIPS_TLS_GD", Const, 6}, + {"R_MIPS_TLS_GOTTPREL", Const, 6}, + {"R_MIPS_TLS_LDM", Const, 6}, + {"R_MIPS_TLS_TPREL32", Const, 6}, + {"R_MIPS_TLS_TPREL64", Const, 6}, + {"R_MIPS_TLS_TPREL_HI16", Const, 6}, + {"R_MIPS_TLS_TPREL_LO16", Const, 6}, + {"R_PPC", Type, 0}, + {"R_PPC64", Type, 5}, + {"R_PPC64_ADDR14", Const, 5}, + {"R_PPC64_ADDR14_BRNTAKEN", Const, 5}, + {"R_PPC64_ADDR14_BRTAKEN", Const, 5}, + {"R_PPC64_ADDR16", Const, 5}, + {"R_PPC64_ADDR16_DS", Const, 5}, + {"R_PPC64_ADDR16_HA", Const, 5}, + {"R_PPC64_ADDR16_HI", Const, 5}, + {"R_PPC64_ADDR16_HIGH", Const, 10}, + {"R_PPC64_ADDR16_HIGHA", Const, 10}, + {"R_PPC64_ADDR16_HIGHER", Const, 5}, + {"R_PPC64_ADDR16_HIGHER34", Const, 20}, + {"R_PPC64_ADDR16_HIGHERA", Const, 5}, + {"R_PPC64_ADDR16_HIGHERA34", Const, 20}, + {"R_PPC64_ADDR16_HIGHEST", Const, 5}, + {"R_PPC64_ADDR16_HIGHEST34", Const, 20}, + {"R_PPC64_ADDR16_HIGHESTA", Const, 5}, + {"R_PPC64_ADDR16_HIGHESTA34", Const, 20}, + {"R_PPC64_ADDR16_LO", Const, 5}, + {"R_PPC64_ADDR16_LO_DS", Const, 5}, + {"R_PPC64_ADDR24", Const, 5}, + {"R_PPC64_ADDR32", Const, 5}, + {"R_PPC64_ADDR64", Const, 5}, + {"R_PPC64_ADDR64_LOCAL", Const, 10}, + {"R_PPC64_COPY", Const, 20}, + {"R_PPC64_D28", Const, 20}, + {"R_PPC64_D34", Const, 20}, + {"R_PPC64_D34_HA30", Const, 20}, + {"R_PPC64_D34_HI30", Const, 20}, + {"R_PPC64_D34_LO", Const, 20}, + {"R_PPC64_DTPMOD64", Const, 5}, + {"R_PPC64_DTPREL16", Const, 5}, + {"R_PPC64_DTPREL16_DS", Const, 5}, + {"R_PPC64_DTPREL16_HA", Const, 5}, + {"R_PPC64_DTPREL16_HI", Const, 5}, + {"R_PPC64_DTPREL16_HIGH", Const, 10}, + {"R_PPC64_DTPREL16_HIGHA", Const, 10}, + {"R_PPC64_DTPREL16_HIGHER", Const, 5}, + {"R_PPC64_DTPREL16_HIGHERA", Const, 5}, + {"R_PPC64_DTPREL16_HIGHEST", Const, 5}, + {"R_PPC64_DTPREL16_HIGHESTA", Const, 5}, + {"R_PPC64_DTPREL16_LO", Const, 5}, + {"R_PPC64_DTPREL16_LO_DS", Const, 5}, + {"R_PPC64_DTPREL34", Const, 20}, + {"R_PPC64_DTPREL64", Const, 5}, + {"R_PPC64_ENTRY", Const, 10}, + {"R_PPC64_GLOB_DAT", Const, 20}, + {"R_PPC64_GNU_VTENTRY", Const, 20}, + {"R_PPC64_GNU_VTINHERIT", Const, 20}, + {"R_PPC64_GOT16", Const, 5}, + {"R_PPC64_GOT16_DS", Const, 5}, + {"R_PPC64_GOT16_HA", Const, 5}, + {"R_PPC64_GOT16_HI", Const, 5}, + {"R_PPC64_GOT16_LO", Const, 5}, + {"R_PPC64_GOT16_LO_DS", Const, 5}, + {"R_PPC64_GOT_DTPREL16_DS", Const, 5}, + {"R_PPC64_GOT_DTPREL16_HA", Const, 5}, + {"R_PPC64_GOT_DTPREL16_HI", Const, 5}, + {"R_PPC64_GOT_DTPREL16_LO_DS", Const, 5}, + {"R_PPC64_GOT_DTPREL_PCREL34", Const, 20}, + {"R_PPC64_GOT_PCREL34", Const, 20}, + {"R_PPC64_GOT_TLSGD16", Const, 5}, + {"R_PPC64_GOT_TLSGD16_HA", Const, 5}, + {"R_PPC64_GOT_TLSGD16_HI", Const, 5}, + {"R_PPC64_GOT_TLSGD16_LO", Const, 5}, + {"R_PPC64_GOT_TLSGD_PCREL34", Const, 20}, + {"R_PPC64_GOT_TLSLD16", Const, 5}, + {"R_PPC64_GOT_TLSLD16_HA", Const, 5}, + {"R_PPC64_GOT_TLSLD16_HI", Const, 5}, + {"R_PPC64_GOT_TLSLD16_LO", Const, 5}, + {"R_PPC64_GOT_TLSLD_PCREL34", Const, 20}, + {"R_PPC64_GOT_TPREL16_DS", Const, 5}, + {"R_PPC64_GOT_TPREL16_HA", Const, 5}, + {"R_PPC64_GOT_TPREL16_HI", Const, 5}, + {"R_PPC64_GOT_TPREL16_LO_DS", Const, 5}, + {"R_PPC64_GOT_TPREL_PCREL34", Const, 20}, + {"R_PPC64_IRELATIVE", Const, 10}, + {"R_PPC64_JMP_IREL", Const, 10}, + {"R_PPC64_JMP_SLOT", Const, 5}, + {"R_PPC64_NONE", Const, 5}, + {"R_PPC64_PCREL28", Const, 20}, + {"R_PPC64_PCREL34", Const, 20}, + {"R_PPC64_PCREL_OPT", Const, 20}, + {"R_PPC64_PLT16_HA", Const, 20}, + {"R_PPC64_PLT16_HI", Const, 20}, + {"R_PPC64_PLT16_LO", Const, 20}, + {"R_PPC64_PLT16_LO_DS", Const, 10}, + {"R_PPC64_PLT32", Const, 20}, + {"R_PPC64_PLT64", Const, 20}, + {"R_PPC64_PLTCALL", Const, 20}, + {"R_PPC64_PLTCALL_NOTOC", Const, 20}, + {"R_PPC64_PLTGOT16", Const, 10}, + {"R_PPC64_PLTGOT16_DS", Const, 10}, + {"R_PPC64_PLTGOT16_HA", Const, 10}, + {"R_PPC64_PLTGOT16_HI", Const, 10}, + {"R_PPC64_PLTGOT16_LO", Const, 10}, + {"R_PPC64_PLTGOT_LO_DS", Const, 10}, + {"R_PPC64_PLTREL32", Const, 20}, + {"R_PPC64_PLTREL64", Const, 20}, + {"R_PPC64_PLTSEQ", Const, 20}, + {"R_PPC64_PLTSEQ_NOTOC", Const, 20}, + {"R_PPC64_PLT_PCREL34", Const, 20}, + {"R_PPC64_PLT_PCREL34_NOTOC", Const, 20}, + {"R_PPC64_REL14", Const, 5}, + {"R_PPC64_REL14_BRNTAKEN", Const, 5}, + {"R_PPC64_REL14_BRTAKEN", Const, 5}, + {"R_PPC64_REL16", Const, 5}, + {"R_PPC64_REL16DX_HA", Const, 10}, + {"R_PPC64_REL16_HA", Const, 5}, + {"R_PPC64_REL16_HI", Const, 5}, + {"R_PPC64_REL16_HIGH", Const, 20}, + {"R_PPC64_REL16_HIGHA", Const, 20}, + {"R_PPC64_REL16_HIGHER", Const, 20}, + {"R_PPC64_REL16_HIGHER34", Const, 20}, + {"R_PPC64_REL16_HIGHERA", Const, 20}, + {"R_PPC64_REL16_HIGHERA34", Const, 20}, + {"R_PPC64_REL16_HIGHEST", Const, 20}, + {"R_PPC64_REL16_HIGHEST34", Const, 20}, + {"R_PPC64_REL16_HIGHESTA", Const, 20}, + {"R_PPC64_REL16_HIGHESTA34", Const, 20}, + {"R_PPC64_REL16_LO", Const, 5}, + {"R_PPC64_REL24", Const, 5}, + {"R_PPC64_REL24_NOTOC", Const, 10}, + {"R_PPC64_REL24_P9NOTOC", Const, 21}, + {"R_PPC64_REL30", Const, 20}, + {"R_PPC64_REL32", Const, 5}, + {"R_PPC64_REL64", Const, 5}, + {"R_PPC64_RELATIVE", Const, 18}, + {"R_PPC64_SECTOFF", Const, 20}, + {"R_PPC64_SECTOFF_DS", Const, 10}, + {"R_PPC64_SECTOFF_HA", Const, 20}, + {"R_PPC64_SECTOFF_HI", Const, 20}, + {"R_PPC64_SECTOFF_LO", Const, 20}, + {"R_PPC64_SECTOFF_LO_DS", Const, 10}, + {"R_PPC64_TLS", Const, 5}, + {"R_PPC64_TLSGD", Const, 5}, + {"R_PPC64_TLSLD", Const, 5}, + {"R_PPC64_TOC", Const, 5}, + {"R_PPC64_TOC16", Const, 5}, + {"R_PPC64_TOC16_DS", Const, 5}, + {"R_PPC64_TOC16_HA", Const, 5}, + {"R_PPC64_TOC16_HI", Const, 5}, + {"R_PPC64_TOC16_LO", Const, 5}, + {"R_PPC64_TOC16_LO_DS", Const, 5}, + {"R_PPC64_TOCSAVE", Const, 10}, + {"R_PPC64_TPREL16", Const, 5}, + {"R_PPC64_TPREL16_DS", Const, 5}, + {"R_PPC64_TPREL16_HA", Const, 5}, + {"R_PPC64_TPREL16_HI", Const, 5}, + {"R_PPC64_TPREL16_HIGH", Const, 10}, + {"R_PPC64_TPREL16_HIGHA", Const, 10}, + {"R_PPC64_TPREL16_HIGHER", Const, 5}, + {"R_PPC64_TPREL16_HIGHERA", Const, 5}, + {"R_PPC64_TPREL16_HIGHEST", Const, 5}, + {"R_PPC64_TPREL16_HIGHESTA", Const, 5}, + {"R_PPC64_TPREL16_LO", Const, 5}, + {"R_PPC64_TPREL16_LO_DS", Const, 5}, + {"R_PPC64_TPREL34", Const, 20}, + {"R_PPC64_TPREL64", Const, 5}, + {"R_PPC64_UADDR16", Const, 20}, + {"R_PPC64_UADDR32", Const, 20}, + {"R_PPC64_UADDR64", Const, 20}, + {"R_PPC_ADDR14", Const, 0}, + {"R_PPC_ADDR14_BRNTAKEN", Const, 0}, + {"R_PPC_ADDR14_BRTAKEN", Const, 0}, + {"R_PPC_ADDR16", Const, 0}, + {"R_PPC_ADDR16_HA", Const, 0}, + {"R_PPC_ADDR16_HI", Const, 0}, + {"R_PPC_ADDR16_LO", Const, 0}, + {"R_PPC_ADDR24", Const, 0}, + {"R_PPC_ADDR32", Const, 0}, + {"R_PPC_COPY", Const, 0}, + {"R_PPC_DTPMOD32", Const, 0}, + {"R_PPC_DTPREL16", Const, 0}, + {"R_PPC_DTPREL16_HA", Const, 0}, + {"R_PPC_DTPREL16_HI", Const, 0}, + {"R_PPC_DTPREL16_LO", Const, 0}, + {"R_PPC_DTPREL32", Const, 0}, + {"R_PPC_EMB_BIT_FLD", Const, 0}, + {"R_PPC_EMB_MRKREF", Const, 0}, + {"R_PPC_EMB_NADDR16", Const, 0}, + {"R_PPC_EMB_NADDR16_HA", Const, 0}, + {"R_PPC_EMB_NADDR16_HI", Const, 0}, + {"R_PPC_EMB_NADDR16_LO", Const, 0}, + {"R_PPC_EMB_NADDR32", Const, 0}, + {"R_PPC_EMB_RELSDA", Const, 0}, + {"R_PPC_EMB_RELSEC16", Const, 0}, + {"R_PPC_EMB_RELST_HA", Const, 0}, + {"R_PPC_EMB_RELST_HI", Const, 0}, + {"R_PPC_EMB_RELST_LO", Const, 0}, + {"R_PPC_EMB_SDA21", Const, 0}, + {"R_PPC_EMB_SDA2I16", Const, 0}, + {"R_PPC_EMB_SDA2REL", Const, 0}, + {"R_PPC_EMB_SDAI16", Const, 0}, + {"R_PPC_GLOB_DAT", Const, 0}, + {"R_PPC_GOT16", Const, 0}, + {"R_PPC_GOT16_HA", Const, 0}, + {"R_PPC_GOT16_HI", Const, 0}, + {"R_PPC_GOT16_LO", Const, 0}, + {"R_PPC_GOT_TLSGD16", Const, 0}, + {"R_PPC_GOT_TLSGD16_HA", Const, 0}, + {"R_PPC_GOT_TLSGD16_HI", Const, 0}, + {"R_PPC_GOT_TLSGD16_LO", Const, 0}, + {"R_PPC_GOT_TLSLD16", Const, 0}, + {"R_PPC_GOT_TLSLD16_HA", Const, 0}, + {"R_PPC_GOT_TLSLD16_HI", Const, 0}, + {"R_PPC_GOT_TLSLD16_LO", Const, 0}, + {"R_PPC_GOT_TPREL16", Const, 0}, + {"R_PPC_GOT_TPREL16_HA", Const, 0}, + {"R_PPC_GOT_TPREL16_HI", Const, 0}, + {"R_PPC_GOT_TPREL16_LO", Const, 0}, + {"R_PPC_JMP_SLOT", Const, 0}, + {"R_PPC_LOCAL24PC", Const, 0}, + {"R_PPC_NONE", Const, 0}, + {"R_PPC_PLT16_HA", Const, 0}, + {"R_PPC_PLT16_HI", Const, 0}, + {"R_PPC_PLT16_LO", Const, 0}, + {"R_PPC_PLT32", Const, 0}, + {"R_PPC_PLTREL24", Const, 0}, + {"R_PPC_PLTREL32", Const, 0}, + {"R_PPC_REL14", Const, 0}, + {"R_PPC_REL14_BRNTAKEN", Const, 0}, + {"R_PPC_REL14_BRTAKEN", Const, 0}, + {"R_PPC_REL24", Const, 0}, + {"R_PPC_REL32", Const, 0}, + {"R_PPC_RELATIVE", Const, 0}, + {"R_PPC_SDAREL16", Const, 0}, + {"R_PPC_SECTOFF", Const, 0}, + {"R_PPC_SECTOFF_HA", Const, 0}, + {"R_PPC_SECTOFF_HI", Const, 0}, + {"R_PPC_SECTOFF_LO", Const, 0}, + {"R_PPC_TLS", Const, 0}, + {"R_PPC_TPREL16", Const, 0}, + {"R_PPC_TPREL16_HA", Const, 0}, + {"R_PPC_TPREL16_HI", Const, 0}, + {"R_PPC_TPREL16_LO", Const, 0}, + {"R_PPC_TPREL32", Const, 0}, + {"R_PPC_UADDR16", Const, 0}, + {"R_PPC_UADDR32", Const, 0}, + {"R_RISCV", Type, 11}, + {"R_RISCV_32", Const, 11}, + {"R_RISCV_32_PCREL", Const, 12}, + {"R_RISCV_64", Const, 11}, + {"R_RISCV_ADD16", Const, 11}, + {"R_RISCV_ADD32", Const, 11}, + {"R_RISCV_ADD64", Const, 11}, + {"R_RISCV_ADD8", Const, 11}, + {"R_RISCV_ALIGN", Const, 11}, + {"R_RISCV_BRANCH", Const, 11}, + {"R_RISCV_CALL", Const, 11}, + {"R_RISCV_CALL_PLT", Const, 11}, + {"R_RISCV_COPY", Const, 11}, + {"R_RISCV_GNU_VTENTRY", Const, 11}, + {"R_RISCV_GNU_VTINHERIT", Const, 11}, + {"R_RISCV_GOT_HI20", Const, 11}, + {"R_RISCV_GPREL_I", Const, 11}, + {"R_RISCV_GPREL_S", Const, 11}, + {"R_RISCV_HI20", Const, 11}, + {"R_RISCV_JAL", Const, 11}, + {"R_RISCV_JUMP_SLOT", Const, 11}, + {"R_RISCV_LO12_I", Const, 11}, + {"R_RISCV_LO12_S", Const, 11}, + {"R_RISCV_NONE", Const, 11}, + {"R_RISCV_PCREL_HI20", Const, 11}, + {"R_RISCV_PCREL_LO12_I", Const, 11}, + {"R_RISCV_PCREL_LO12_S", Const, 11}, + {"R_RISCV_RELATIVE", Const, 11}, + {"R_RISCV_RELAX", Const, 11}, + {"R_RISCV_RVC_BRANCH", Const, 11}, + {"R_RISCV_RVC_JUMP", Const, 11}, + {"R_RISCV_RVC_LUI", Const, 11}, + {"R_RISCV_SET16", Const, 11}, + {"R_RISCV_SET32", Const, 11}, + {"R_RISCV_SET6", Const, 11}, + {"R_RISCV_SET8", Const, 11}, + {"R_RISCV_SUB16", Const, 11}, + {"R_RISCV_SUB32", Const, 11}, + {"R_RISCV_SUB6", Const, 11}, + {"R_RISCV_SUB64", Const, 11}, + {"R_RISCV_SUB8", Const, 11}, + {"R_RISCV_TLS_DTPMOD32", Const, 11}, + {"R_RISCV_TLS_DTPMOD64", Const, 11}, + {"R_RISCV_TLS_DTPREL32", Const, 11}, + {"R_RISCV_TLS_DTPREL64", Const, 11}, + {"R_RISCV_TLS_GD_HI20", Const, 11}, + {"R_RISCV_TLS_GOT_HI20", Const, 11}, + {"R_RISCV_TLS_TPREL32", Const, 11}, + {"R_RISCV_TLS_TPREL64", Const, 11}, + {"R_RISCV_TPREL_ADD", Const, 11}, + {"R_RISCV_TPREL_HI20", Const, 11}, + {"R_RISCV_TPREL_I", Const, 11}, + {"R_RISCV_TPREL_LO12_I", Const, 11}, + {"R_RISCV_TPREL_LO12_S", Const, 11}, + {"R_RISCV_TPREL_S", Const, 11}, + {"R_SPARC", Type, 0}, + {"R_SPARC_10", Const, 0}, + {"R_SPARC_11", Const, 0}, + {"R_SPARC_13", Const, 0}, + {"R_SPARC_16", Const, 0}, + {"R_SPARC_22", Const, 0}, + {"R_SPARC_32", Const, 0}, + {"R_SPARC_5", Const, 0}, + {"R_SPARC_6", Const, 0}, + {"R_SPARC_64", Const, 0}, + {"R_SPARC_7", Const, 0}, + {"R_SPARC_8", Const, 0}, + {"R_SPARC_COPY", Const, 0}, + {"R_SPARC_DISP16", Const, 0}, + {"R_SPARC_DISP32", Const, 0}, + {"R_SPARC_DISP64", Const, 0}, + {"R_SPARC_DISP8", Const, 0}, + {"R_SPARC_GLOB_DAT", Const, 0}, + {"R_SPARC_GLOB_JMP", Const, 0}, + {"R_SPARC_GOT10", Const, 0}, + {"R_SPARC_GOT13", Const, 0}, + {"R_SPARC_GOT22", Const, 0}, + {"R_SPARC_H44", Const, 0}, + {"R_SPARC_HH22", Const, 0}, + {"R_SPARC_HI22", Const, 0}, + {"R_SPARC_HIPLT22", Const, 0}, + {"R_SPARC_HIX22", Const, 0}, + {"R_SPARC_HM10", Const, 0}, + {"R_SPARC_JMP_SLOT", Const, 0}, + {"R_SPARC_L44", Const, 0}, + {"R_SPARC_LM22", Const, 0}, + {"R_SPARC_LO10", Const, 0}, + {"R_SPARC_LOPLT10", Const, 0}, + {"R_SPARC_LOX10", Const, 0}, + {"R_SPARC_M44", Const, 0}, + {"R_SPARC_NONE", Const, 0}, + {"R_SPARC_OLO10", Const, 0}, + {"R_SPARC_PC10", Const, 0}, + {"R_SPARC_PC22", Const, 0}, + {"R_SPARC_PCPLT10", Const, 0}, + {"R_SPARC_PCPLT22", Const, 0}, + {"R_SPARC_PCPLT32", Const, 0}, + {"R_SPARC_PC_HH22", Const, 0}, + {"R_SPARC_PC_HM10", Const, 0}, + {"R_SPARC_PC_LM22", Const, 0}, + {"R_SPARC_PLT32", Const, 0}, + {"R_SPARC_PLT64", Const, 0}, + {"R_SPARC_REGISTER", Const, 0}, + {"R_SPARC_RELATIVE", Const, 0}, + {"R_SPARC_UA16", Const, 0}, + {"R_SPARC_UA32", Const, 0}, + {"R_SPARC_UA64", Const, 0}, + {"R_SPARC_WDISP16", Const, 0}, + {"R_SPARC_WDISP19", Const, 0}, + {"R_SPARC_WDISP22", Const, 0}, + {"R_SPARC_WDISP30", Const, 0}, + {"R_SPARC_WPLT30", Const, 0}, + {"R_SYM32", Func, 0}, + {"R_SYM64", Func, 0}, + {"R_TYPE32", Func, 0}, + {"R_TYPE64", Func, 0}, + {"R_X86_64", Type, 0}, + {"R_X86_64_16", Const, 0}, + {"R_X86_64_32", Const, 0}, + {"R_X86_64_32S", Const, 0}, + {"R_X86_64_64", Const, 0}, + {"R_X86_64_8", Const, 0}, + {"R_X86_64_COPY", Const, 0}, + {"R_X86_64_DTPMOD64", Const, 0}, + {"R_X86_64_DTPOFF32", Const, 0}, + {"R_X86_64_DTPOFF64", Const, 0}, + {"R_X86_64_GLOB_DAT", Const, 0}, + {"R_X86_64_GOT32", Const, 0}, + {"R_X86_64_GOT64", Const, 10}, + {"R_X86_64_GOTOFF64", Const, 10}, + {"R_X86_64_GOTPC32", Const, 10}, + {"R_X86_64_GOTPC32_TLSDESC", Const, 10}, + {"R_X86_64_GOTPC64", Const, 10}, + {"R_X86_64_GOTPCREL", Const, 0}, + {"R_X86_64_GOTPCREL64", Const, 10}, + {"R_X86_64_GOTPCRELX", Const, 10}, + {"R_X86_64_GOTPLT64", Const, 10}, + {"R_X86_64_GOTTPOFF", Const, 0}, + {"R_X86_64_IRELATIVE", Const, 10}, + {"R_X86_64_JMP_SLOT", Const, 0}, + {"R_X86_64_NONE", Const, 0}, + {"R_X86_64_PC16", Const, 0}, + {"R_X86_64_PC32", Const, 0}, + {"R_X86_64_PC32_BND", Const, 10}, + {"R_X86_64_PC64", Const, 10}, + {"R_X86_64_PC8", Const, 0}, + {"R_X86_64_PLT32", Const, 0}, + {"R_X86_64_PLT32_BND", Const, 10}, + {"R_X86_64_PLTOFF64", Const, 10}, + {"R_X86_64_RELATIVE", Const, 0}, + {"R_X86_64_RELATIVE64", Const, 10}, + {"R_X86_64_REX_GOTPCRELX", Const, 10}, + {"R_X86_64_SIZE32", Const, 10}, + {"R_X86_64_SIZE64", Const, 10}, + {"R_X86_64_TLSDESC", Const, 10}, + {"R_X86_64_TLSDESC_CALL", Const, 10}, + {"R_X86_64_TLSGD", Const, 0}, + {"R_X86_64_TLSLD", Const, 0}, + {"R_X86_64_TPOFF32", Const, 0}, + {"R_X86_64_TPOFF64", Const, 0}, + {"Rel32", Type, 0}, + {"Rel32.Info", Field, 0}, + {"Rel32.Off", Field, 0}, + {"Rel64", Type, 0}, + {"Rel64.Info", Field, 0}, + {"Rel64.Off", Field, 0}, + {"Rela32", Type, 0}, + {"Rela32.Addend", Field, 0}, + {"Rela32.Info", Field, 0}, + {"Rela32.Off", Field, 0}, + {"Rela64", Type, 0}, + {"Rela64.Addend", Field, 0}, + {"Rela64.Info", Field, 0}, + {"Rela64.Off", Field, 0}, + {"SHF_ALLOC", Const, 0}, + {"SHF_COMPRESSED", Const, 6}, + {"SHF_EXECINSTR", Const, 0}, + {"SHF_GROUP", Const, 0}, + {"SHF_INFO_LINK", Const, 0}, + {"SHF_LINK_ORDER", Const, 0}, + {"SHF_MASKOS", Const, 0}, + {"SHF_MASKPROC", Const, 0}, + {"SHF_MERGE", Const, 0}, + {"SHF_OS_NONCONFORMING", Const, 0}, + {"SHF_STRINGS", Const, 0}, + {"SHF_TLS", Const, 0}, + {"SHF_WRITE", Const, 0}, + {"SHN_ABS", Const, 0}, + {"SHN_COMMON", Const, 0}, + {"SHN_HIOS", Const, 0}, + {"SHN_HIPROC", Const, 0}, + {"SHN_HIRESERVE", Const, 0}, + {"SHN_LOOS", Const, 0}, + {"SHN_LOPROC", Const, 0}, + {"SHN_LORESERVE", Const, 0}, + {"SHN_UNDEF", Const, 0}, + {"SHN_XINDEX", Const, 0}, + {"SHT_DYNAMIC", Const, 0}, + {"SHT_DYNSYM", Const, 0}, + {"SHT_FINI_ARRAY", Const, 0}, + {"SHT_GNU_ATTRIBUTES", Const, 0}, + {"SHT_GNU_HASH", Const, 0}, + {"SHT_GNU_LIBLIST", Const, 0}, + {"SHT_GNU_VERDEF", Const, 0}, + {"SHT_GNU_VERNEED", Const, 0}, + {"SHT_GNU_VERSYM", Const, 0}, + {"SHT_GROUP", Const, 0}, + {"SHT_HASH", Const, 0}, + {"SHT_HIOS", Const, 0}, + {"SHT_HIPROC", Const, 0}, + {"SHT_HIUSER", Const, 0}, + {"SHT_INIT_ARRAY", Const, 0}, + {"SHT_LOOS", Const, 0}, + {"SHT_LOPROC", Const, 0}, + {"SHT_LOUSER", Const, 0}, + {"SHT_MIPS_ABIFLAGS", Const, 17}, + {"SHT_NOBITS", Const, 0}, + {"SHT_NOTE", Const, 0}, + {"SHT_NULL", Const, 0}, + {"SHT_PREINIT_ARRAY", Const, 0}, + {"SHT_PROGBITS", Const, 0}, + {"SHT_REL", Const, 0}, + {"SHT_RELA", Const, 0}, + {"SHT_SHLIB", Const, 0}, + {"SHT_STRTAB", Const, 0}, + {"SHT_SYMTAB", Const, 0}, + {"SHT_SYMTAB_SHNDX", Const, 0}, + {"STB_GLOBAL", Const, 0}, + {"STB_HIOS", Const, 0}, + {"STB_HIPROC", Const, 0}, + {"STB_LOCAL", Const, 0}, + {"STB_LOOS", Const, 0}, + {"STB_LOPROC", Const, 0}, + {"STB_WEAK", Const, 0}, + {"STT_COMMON", Const, 0}, + {"STT_FILE", Const, 0}, + {"STT_FUNC", Const, 0}, + {"STT_HIOS", Const, 0}, + {"STT_HIPROC", Const, 0}, + {"STT_LOOS", Const, 0}, + {"STT_LOPROC", Const, 0}, + {"STT_NOTYPE", Const, 0}, + {"STT_OBJECT", Const, 0}, + {"STT_SECTION", Const, 0}, + {"STT_TLS", Const, 0}, + {"STV_DEFAULT", Const, 0}, + {"STV_HIDDEN", Const, 0}, + {"STV_INTERNAL", Const, 0}, + {"STV_PROTECTED", Const, 0}, + {"ST_BIND", Func, 0}, + {"ST_INFO", Func, 0}, + {"ST_TYPE", Func, 0}, + {"ST_VISIBILITY", Func, 0}, + {"Section", Type, 0}, + {"Section.ReaderAt", Field, 0}, + {"Section.SectionHeader", Field, 0}, + {"Section32", Type, 0}, + {"Section32.Addr", Field, 0}, + {"Section32.Addralign", Field, 0}, + {"Section32.Entsize", Field, 0}, + {"Section32.Flags", Field, 0}, + {"Section32.Info", Field, 0}, + {"Section32.Link", Field, 0}, + {"Section32.Name", Field, 0}, + {"Section32.Off", Field, 0}, + {"Section32.Size", Field, 0}, + {"Section32.Type", Field, 0}, + {"Section64", Type, 0}, + {"Section64.Addr", Field, 0}, + {"Section64.Addralign", Field, 0}, + {"Section64.Entsize", Field, 0}, + {"Section64.Flags", Field, 0}, + {"Section64.Info", Field, 0}, + {"Section64.Link", Field, 0}, + {"Section64.Name", Field, 0}, + {"Section64.Off", Field, 0}, + {"Section64.Size", Field, 0}, + {"Section64.Type", Field, 0}, + {"SectionFlag", Type, 0}, + {"SectionHeader", Type, 0}, + {"SectionHeader.Addr", Field, 0}, + {"SectionHeader.Addralign", Field, 0}, + {"SectionHeader.Entsize", Field, 0}, + {"SectionHeader.FileSize", Field, 6}, + {"SectionHeader.Flags", Field, 0}, + {"SectionHeader.Info", Field, 0}, + {"SectionHeader.Link", Field, 0}, + {"SectionHeader.Name", Field, 0}, + {"SectionHeader.Offset", Field, 0}, + {"SectionHeader.Size", Field, 0}, + {"SectionHeader.Type", Field, 0}, + {"SectionIndex", Type, 0}, + {"SectionType", Type, 0}, + {"Sym32", Type, 0}, + {"Sym32.Info", Field, 0}, + {"Sym32.Name", Field, 0}, + {"Sym32.Other", Field, 0}, + {"Sym32.Shndx", Field, 0}, + {"Sym32.Size", Field, 0}, + {"Sym32.Value", Field, 0}, + {"Sym32Size", Const, 0}, + {"Sym64", Type, 0}, + {"Sym64.Info", Field, 0}, + {"Sym64.Name", Field, 0}, + {"Sym64.Other", Field, 0}, + {"Sym64.Shndx", Field, 0}, + {"Sym64.Size", Field, 0}, + {"Sym64.Value", Field, 0}, + {"Sym64Size", Const, 0}, + {"SymBind", Type, 0}, + {"SymType", Type, 0}, + {"SymVis", Type, 0}, + {"Symbol", Type, 0}, + {"Symbol.Info", Field, 0}, + {"Symbol.Library", Field, 13}, + {"Symbol.Name", Field, 0}, + {"Symbol.Other", Field, 0}, + {"Symbol.Section", Field, 0}, + {"Symbol.Size", Field, 0}, + {"Symbol.Value", Field, 0}, + {"Symbol.Version", Field, 13}, + {"Type", Type, 0}, + {"Version", Type, 0}, + }, + "debug/gosym": { + {"(*DecodingError).Error", Method, 0}, + {"(*LineTable).LineToPC", Method, 0}, + {"(*LineTable).PCToLine", Method, 0}, + {"(*Sym).BaseName", Method, 0}, + {"(*Sym).PackageName", Method, 0}, + {"(*Sym).ReceiverName", Method, 0}, + {"(*Sym).Static", Method, 0}, + {"(*Table).LineToPC", Method, 0}, + {"(*Table).LookupFunc", Method, 0}, + {"(*Table).LookupSym", Method, 0}, + {"(*Table).PCToFunc", Method, 0}, + {"(*Table).PCToLine", Method, 0}, + {"(*Table).SymByAddr", Method, 0}, + {"(*UnknownLineError).Error", Method, 0}, + {"(Func).BaseName", Method, 0}, + {"(Func).PackageName", Method, 0}, + {"(Func).ReceiverName", Method, 0}, + {"(Func).Static", Method, 0}, + {"(UnknownFileError).Error", Method, 0}, + {"DecodingError", Type, 0}, + {"Func", Type, 0}, + {"Func.End", Field, 0}, + {"Func.Entry", Field, 0}, + {"Func.FrameSize", Field, 0}, + {"Func.LineTable", Field, 0}, + {"Func.Locals", Field, 0}, + {"Func.Obj", Field, 0}, + {"Func.Params", Field, 0}, + {"Func.Sym", Field, 0}, + {"LineTable", Type, 0}, + {"LineTable.Data", Field, 0}, + {"LineTable.Line", Field, 0}, + {"LineTable.PC", Field, 0}, + {"NewLineTable", Func, 0}, + {"NewTable", Func, 0}, + {"Obj", Type, 0}, + {"Obj.Funcs", Field, 0}, + {"Obj.Paths", Field, 0}, + {"Sym", Type, 0}, + {"Sym.Func", Field, 0}, + {"Sym.GoType", Field, 0}, + {"Sym.Name", Field, 0}, + {"Sym.Type", Field, 0}, + {"Sym.Value", Field, 0}, + {"Table", Type, 0}, + {"Table.Files", Field, 0}, + {"Table.Funcs", Field, 0}, + {"Table.Objs", Field, 0}, + {"Table.Syms", Field, 0}, + {"UnknownFileError", Type, 0}, + {"UnknownLineError", Type, 0}, + {"UnknownLineError.File", Field, 0}, + {"UnknownLineError.Line", Field, 0}, + }, + "debug/macho": { + {"(*FatFile).Close", Method, 3}, + {"(*File).Close", Method, 0}, + {"(*File).DWARF", Method, 0}, + {"(*File).ImportedLibraries", Method, 0}, + {"(*File).ImportedSymbols", Method, 0}, + {"(*File).Section", Method, 0}, + {"(*File).Segment", Method, 0}, + {"(*FormatError).Error", Method, 0}, + {"(*Section).Data", Method, 0}, + {"(*Section).Open", Method, 0}, + {"(*Segment).Data", Method, 0}, + {"(*Segment).Open", Method, 0}, + {"(Cpu).GoString", Method, 0}, + {"(Cpu).String", Method, 0}, + {"(Dylib).Raw", Method, 0}, + {"(Dysymtab).Raw", Method, 0}, + {"(FatArch).Close", Method, 3}, + {"(FatArch).DWARF", Method, 3}, + {"(FatArch).ImportedLibraries", Method, 3}, + {"(FatArch).ImportedSymbols", Method, 3}, + {"(FatArch).Section", Method, 3}, + {"(FatArch).Segment", Method, 3}, + {"(LoadBytes).Raw", Method, 0}, + {"(LoadCmd).GoString", Method, 0}, + {"(LoadCmd).String", Method, 0}, + {"(RelocTypeARM).GoString", Method, 10}, + {"(RelocTypeARM).String", Method, 10}, + {"(RelocTypeARM64).GoString", Method, 10}, + {"(RelocTypeARM64).String", Method, 10}, + {"(RelocTypeGeneric).GoString", Method, 10}, + {"(RelocTypeGeneric).String", Method, 10}, + {"(RelocTypeX86_64).GoString", Method, 10}, + {"(RelocTypeX86_64).String", Method, 10}, + {"(Rpath).Raw", Method, 10}, + {"(Section).ReadAt", Method, 0}, + {"(Segment).Raw", Method, 0}, + {"(Segment).ReadAt", Method, 0}, + {"(Symtab).Raw", Method, 0}, + {"(Type).GoString", Method, 10}, + {"(Type).String", Method, 10}, + {"ARM64_RELOC_ADDEND", Const, 10}, + {"ARM64_RELOC_BRANCH26", Const, 10}, + {"ARM64_RELOC_GOT_LOAD_PAGE21", Const, 10}, + {"ARM64_RELOC_GOT_LOAD_PAGEOFF12", Const, 10}, + {"ARM64_RELOC_PAGE21", Const, 10}, + {"ARM64_RELOC_PAGEOFF12", Const, 10}, + {"ARM64_RELOC_POINTER_TO_GOT", Const, 10}, + {"ARM64_RELOC_SUBTRACTOR", Const, 10}, + {"ARM64_RELOC_TLVP_LOAD_PAGE21", Const, 10}, + {"ARM64_RELOC_TLVP_LOAD_PAGEOFF12", Const, 10}, + {"ARM64_RELOC_UNSIGNED", Const, 10}, + {"ARM_RELOC_BR24", Const, 10}, + {"ARM_RELOC_HALF", Const, 10}, + {"ARM_RELOC_HALF_SECTDIFF", Const, 10}, + {"ARM_RELOC_LOCAL_SECTDIFF", Const, 10}, + {"ARM_RELOC_PAIR", Const, 10}, + {"ARM_RELOC_PB_LA_PTR", Const, 10}, + {"ARM_RELOC_SECTDIFF", Const, 10}, + {"ARM_RELOC_VANILLA", Const, 10}, + {"ARM_THUMB_32BIT_BRANCH", Const, 10}, + {"ARM_THUMB_RELOC_BR22", Const, 10}, + {"Cpu", Type, 0}, + {"Cpu386", Const, 0}, + {"CpuAmd64", Const, 0}, + {"CpuArm", Const, 3}, + {"CpuArm64", Const, 11}, + {"CpuPpc", Const, 3}, + {"CpuPpc64", Const, 3}, + {"Dylib", Type, 0}, + {"Dylib.CompatVersion", Field, 0}, + {"Dylib.CurrentVersion", Field, 0}, + {"Dylib.LoadBytes", Field, 0}, + {"Dylib.Name", Field, 0}, + {"Dylib.Time", Field, 0}, + {"DylibCmd", Type, 0}, + {"DylibCmd.Cmd", Field, 0}, + {"DylibCmd.CompatVersion", Field, 0}, + {"DylibCmd.CurrentVersion", Field, 0}, + {"DylibCmd.Len", Field, 0}, + {"DylibCmd.Name", Field, 0}, + {"DylibCmd.Time", Field, 0}, + {"Dysymtab", Type, 0}, + {"Dysymtab.DysymtabCmd", Field, 0}, + {"Dysymtab.IndirectSyms", Field, 0}, + {"Dysymtab.LoadBytes", Field, 0}, + {"DysymtabCmd", Type, 0}, + {"DysymtabCmd.Cmd", Field, 0}, + {"DysymtabCmd.Extrefsymoff", Field, 0}, + {"DysymtabCmd.Extreloff", Field, 0}, + {"DysymtabCmd.Iextdefsym", Field, 0}, + {"DysymtabCmd.Ilocalsym", Field, 0}, + {"DysymtabCmd.Indirectsymoff", Field, 0}, + {"DysymtabCmd.Iundefsym", Field, 0}, + {"DysymtabCmd.Len", Field, 0}, + {"DysymtabCmd.Locreloff", Field, 0}, + {"DysymtabCmd.Modtaboff", Field, 0}, + {"DysymtabCmd.Nextdefsym", Field, 0}, + {"DysymtabCmd.Nextrefsyms", Field, 0}, + {"DysymtabCmd.Nextrel", Field, 0}, + {"DysymtabCmd.Nindirectsyms", Field, 0}, + {"DysymtabCmd.Nlocalsym", Field, 0}, + {"DysymtabCmd.Nlocrel", Field, 0}, + {"DysymtabCmd.Nmodtab", Field, 0}, + {"DysymtabCmd.Ntoc", Field, 0}, + {"DysymtabCmd.Nundefsym", Field, 0}, + {"DysymtabCmd.Tocoffset", Field, 0}, + {"ErrNotFat", Var, 3}, + {"FatArch", Type, 3}, + {"FatArch.FatArchHeader", Field, 3}, + {"FatArch.File", Field, 3}, + {"FatArchHeader", Type, 3}, + {"FatArchHeader.Align", Field, 3}, + {"FatArchHeader.Cpu", Field, 3}, + {"FatArchHeader.Offset", Field, 3}, + {"FatArchHeader.Size", Field, 3}, + {"FatArchHeader.SubCpu", Field, 3}, + {"FatFile", Type, 3}, + {"FatFile.Arches", Field, 3}, + {"FatFile.Magic", Field, 3}, + {"File", Type, 0}, + {"File.ByteOrder", Field, 0}, + {"File.Dysymtab", Field, 0}, + {"File.FileHeader", Field, 0}, + {"File.Loads", Field, 0}, + {"File.Sections", Field, 0}, + {"File.Symtab", Field, 0}, + {"FileHeader", Type, 0}, + {"FileHeader.Cmdsz", Field, 0}, + {"FileHeader.Cpu", Field, 0}, + {"FileHeader.Flags", Field, 0}, + {"FileHeader.Magic", Field, 0}, + {"FileHeader.Ncmd", Field, 0}, + {"FileHeader.SubCpu", Field, 0}, + {"FileHeader.Type", Field, 0}, + {"FlagAllModsBound", Const, 10}, + {"FlagAllowStackExecution", Const, 10}, + {"FlagAppExtensionSafe", Const, 10}, + {"FlagBindAtLoad", Const, 10}, + {"FlagBindsToWeak", Const, 10}, + {"FlagCanonical", Const, 10}, + {"FlagDeadStrippableDylib", Const, 10}, + {"FlagDyldLink", Const, 10}, + {"FlagForceFlat", Const, 10}, + {"FlagHasTLVDescriptors", Const, 10}, + {"FlagIncrLink", Const, 10}, + {"FlagLazyInit", Const, 10}, + {"FlagNoFixPrebinding", Const, 10}, + {"FlagNoHeapExecution", Const, 10}, + {"FlagNoMultiDefs", Const, 10}, + {"FlagNoReexportedDylibs", Const, 10}, + {"FlagNoUndefs", Const, 10}, + {"FlagPIE", Const, 10}, + {"FlagPrebindable", Const, 10}, + {"FlagPrebound", Const, 10}, + {"FlagRootSafe", Const, 10}, + {"FlagSetuidSafe", Const, 10}, + {"FlagSplitSegs", Const, 10}, + {"FlagSubsectionsViaSymbols", Const, 10}, + {"FlagTwoLevel", Const, 10}, + {"FlagWeakDefines", Const, 10}, + {"FormatError", Type, 0}, + {"GENERIC_RELOC_LOCAL_SECTDIFF", Const, 10}, + {"GENERIC_RELOC_PAIR", Const, 10}, + {"GENERIC_RELOC_PB_LA_PTR", Const, 10}, + {"GENERIC_RELOC_SECTDIFF", Const, 10}, + {"GENERIC_RELOC_TLV", Const, 10}, + {"GENERIC_RELOC_VANILLA", Const, 10}, + {"Load", Type, 0}, + {"LoadBytes", Type, 0}, + {"LoadCmd", Type, 0}, + {"LoadCmdDylib", Const, 0}, + {"LoadCmdDylinker", Const, 0}, + {"LoadCmdDysymtab", Const, 0}, + {"LoadCmdRpath", Const, 10}, + {"LoadCmdSegment", Const, 0}, + {"LoadCmdSegment64", Const, 0}, + {"LoadCmdSymtab", Const, 0}, + {"LoadCmdThread", Const, 0}, + {"LoadCmdUnixThread", Const, 0}, + {"Magic32", Const, 0}, + {"Magic64", Const, 0}, + {"MagicFat", Const, 3}, + {"NewFatFile", Func, 3}, + {"NewFile", Func, 0}, + {"Nlist32", Type, 0}, + {"Nlist32.Desc", Field, 0}, + {"Nlist32.Name", Field, 0}, + {"Nlist32.Sect", Field, 0}, + {"Nlist32.Type", Field, 0}, + {"Nlist32.Value", Field, 0}, + {"Nlist64", Type, 0}, + {"Nlist64.Desc", Field, 0}, + {"Nlist64.Name", Field, 0}, + {"Nlist64.Sect", Field, 0}, + {"Nlist64.Type", Field, 0}, + {"Nlist64.Value", Field, 0}, + {"Open", Func, 0}, + {"OpenFat", Func, 3}, + {"Regs386", Type, 0}, + {"Regs386.AX", Field, 0}, + {"Regs386.BP", Field, 0}, + {"Regs386.BX", Field, 0}, + {"Regs386.CS", Field, 0}, + {"Regs386.CX", Field, 0}, + {"Regs386.DI", Field, 0}, + {"Regs386.DS", Field, 0}, + {"Regs386.DX", Field, 0}, + {"Regs386.ES", Field, 0}, + {"Regs386.FLAGS", Field, 0}, + {"Regs386.FS", Field, 0}, + {"Regs386.GS", Field, 0}, + {"Regs386.IP", Field, 0}, + {"Regs386.SI", Field, 0}, + {"Regs386.SP", Field, 0}, + {"Regs386.SS", Field, 0}, + {"RegsAMD64", Type, 0}, + {"RegsAMD64.AX", Field, 0}, + {"RegsAMD64.BP", Field, 0}, + {"RegsAMD64.BX", Field, 0}, + {"RegsAMD64.CS", Field, 0}, + {"RegsAMD64.CX", Field, 0}, + {"RegsAMD64.DI", Field, 0}, + {"RegsAMD64.DX", Field, 0}, + {"RegsAMD64.FLAGS", Field, 0}, + {"RegsAMD64.FS", Field, 0}, + {"RegsAMD64.GS", Field, 0}, + {"RegsAMD64.IP", Field, 0}, + {"RegsAMD64.R10", Field, 0}, + {"RegsAMD64.R11", Field, 0}, + {"RegsAMD64.R12", Field, 0}, + {"RegsAMD64.R13", Field, 0}, + {"RegsAMD64.R14", Field, 0}, + {"RegsAMD64.R15", Field, 0}, + {"RegsAMD64.R8", Field, 0}, + {"RegsAMD64.R9", Field, 0}, + {"RegsAMD64.SI", Field, 0}, + {"RegsAMD64.SP", Field, 0}, + {"Reloc", Type, 10}, + {"Reloc.Addr", Field, 10}, + {"Reloc.Extern", Field, 10}, + {"Reloc.Len", Field, 10}, + {"Reloc.Pcrel", Field, 10}, + {"Reloc.Scattered", Field, 10}, + {"Reloc.Type", Field, 10}, + {"Reloc.Value", Field, 10}, + {"RelocTypeARM", Type, 10}, + {"RelocTypeARM64", Type, 10}, + {"RelocTypeGeneric", Type, 10}, + {"RelocTypeX86_64", Type, 10}, + {"Rpath", Type, 10}, + {"Rpath.LoadBytes", Field, 10}, + {"Rpath.Path", Field, 10}, + {"RpathCmd", Type, 10}, + {"RpathCmd.Cmd", Field, 10}, + {"RpathCmd.Len", Field, 10}, + {"RpathCmd.Path", Field, 10}, + {"Section", Type, 0}, + {"Section.ReaderAt", Field, 0}, + {"Section.Relocs", Field, 10}, + {"Section.SectionHeader", Field, 0}, + {"Section32", Type, 0}, + {"Section32.Addr", Field, 0}, + {"Section32.Align", Field, 0}, + {"Section32.Flags", Field, 0}, + {"Section32.Name", Field, 0}, + {"Section32.Nreloc", Field, 0}, + {"Section32.Offset", Field, 0}, + {"Section32.Reloff", Field, 0}, + {"Section32.Reserve1", Field, 0}, + {"Section32.Reserve2", Field, 0}, + {"Section32.Seg", Field, 0}, + {"Section32.Size", Field, 0}, + {"Section64", Type, 0}, + {"Section64.Addr", Field, 0}, + {"Section64.Align", Field, 0}, + {"Section64.Flags", Field, 0}, + {"Section64.Name", Field, 0}, + {"Section64.Nreloc", Field, 0}, + {"Section64.Offset", Field, 0}, + {"Section64.Reloff", Field, 0}, + {"Section64.Reserve1", Field, 0}, + {"Section64.Reserve2", Field, 0}, + {"Section64.Reserve3", Field, 0}, + {"Section64.Seg", Field, 0}, + {"Section64.Size", Field, 0}, + {"SectionHeader", Type, 0}, + {"SectionHeader.Addr", Field, 0}, + {"SectionHeader.Align", Field, 0}, + {"SectionHeader.Flags", Field, 0}, + {"SectionHeader.Name", Field, 0}, + {"SectionHeader.Nreloc", Field, 0}, + {"SectionHeader.Offset", Field, 0}, + {"SectionHeader.Reloff", Field, 0}, + {"SectionHeader.Seg", Field, 0}, + {"SectionHeader.Size", Field, 0}, + {"Segment", Type, 0}, + {"Segment.LoadBytes", Field, 0}, + {"Segment.ReaderAt", Field, 0}, + {"Segment.SegmentHeader", Field, 0}, + {"Segment32", Type, 0}, + {"Segment32.Addr", Field, 0}, + {"Segment32.Cmd", Field, 0}, + {"Segment32.Filesz", Field, 0}, + {"Segment32.Flag", Field, 0}, + {"Segment32.Len", Field, 0}, + {"Segment32.Maxprot", Field, 0}, + {"Segment32.Memsz", Field, 0}, + {"Segment32.Name", Field, 0}, + {"Segment32.Nsect", Field, 0}, + {"Segment32.Offset", Field, 0}, + {"Segment32.Prot", Field, 0}, + {"Segment64", Type, 0}, + {"Segment64.Addr", Field, 0}, + {"Segment64.Cmd", Field, 0}, + {"Segment64.Filesz", Field, 0}, + {"Segment64.Flag", Field, 0}, + {"Segment64.Len", Field, 0}, + {"Segment64.Maxprot", Field, 0}, + {"Segment64.Memsz", Field, 0}, + {"Segment64.Name", Field, 0}, + {"Segment64.Nsect", Field, 0}, + {"Segment64.Offset", Field, 0}, + {"Segment64.Prot", Field, 0}, + {"SegmentHeader", Type, 0}, + {"SegmentHeader.Addr", Field, 0}, + {"SegmentHeader.Cmd", Field, 0}, + {"SegmentHeader.Filesz", Field, 0}, + {"SegmentHeader.Flag", Field, 0}, + {"SegmentHeader.Len", Field, 0}, + {"SegmentHeader.Maxprot", Field, 0}, + {"SegmentHeader.Memsz", Field, 0}, + {"SegmentHeader.Name", Field, 0}, + {"SegmentHeader.Nsect", Field, 0}, + {"SegmentHeader.Offset", Field, 0}, + {"SegmentHeader.Prot", Field, 0}, + {"Symbol", Type, 0}, + {"Symbol.Desc", Field, 0}, + {"Symbol.Name", Field, 0}, + {"Symbol.Sect", Field, 0}, + {"Symbol.Type", Field, 0}, + {"Symbol.Value", Field, 0}, + {"Symtab", Type, 0}, + {"Symtab.LoadBytes", Field, 0}, + {"Symtab.Syms", Field, 0}, + {"Symtab.SymtabCmd", Field, 0}, + {"SymtabCmd", Type, 0}, + {"SymtabCmd.Cmd", Field, 0}, + {"SymtabCmd.Len", Field, 0}, + {"SymtabCmd.Nsyms", Field, 0}, + {"SymtabCmd.Stroff", Field, 0}, + {"SymtabCmd.Strsize", Field, 0}, + {"SymtabCmd.Symoff", Field, 0}, + {"Thread", Type, 0}, + {"Thread.Cmd", Field, 0}, + {"Thread.Data", Field, 0}, + {"Thread.Len", Field, 0}, + {"Thread.Type", Field, 0}, + {"Type", Type, 0}, + {"TypeBundle", Const, 3}, + {"TypeDylib", Const, 3}, + {"TypeExec", Const, 0}, + {"TypeObj", Const, 0}, + {"X86_64_RELOC_BRANCH", Const, 10}, + {"X86_64_RELOC_GOT", Const, 10}, + {"X86_64_RELOC_GOT_LOAD", Const, 10}, + {"X86_64_RELOC_SIGNED", Const, 10}, + {"X86_64_RELOC_SIGNED_1", Const, 10}, + {"X86_64_RELOC_SIGNED_2", Const, 10}, + {"X86_64_RELOC_SIGNED_4", Const, 10}, + {"X86_64_RELOC_SUBTRACTOR", Const, 10}, + {"X86_64_RELOC_TLV", Const, 10}, + {"X86_64_RELOC_UNSIGNED", Const, 10}, + }, + "debug/pe": { + {"(*COFFSymbol).FullName", Method, 8}, + {"(*File).COFFSymbolReadSectionDefAux", Method, 19}, + {"(*File).Close", Method, 0}, + {"(*File).DWARF", Method, 0}, + {"(*File).ImportedLibraries", Method, 0}, + {"(*File).ImportedSymbols", Method, 0}, + {"(*File).Section", Method, 0}, + {"(*FormatError).Error", Method, 0}, + {"(*Section).Data", Method, 0}, + {"(*Section).Open", Method, 0}, + {"(Section).ReadAt", Method, 0}, + {"(StringTable).String", Method, 8}, + {"COFFSymbol", Type, 1}, + {"COFFSymbol.Name", Field, 1}, + {"COFFSymbol.NumberOfAuxSymbols", Field, 1}, + {"COFFSymbol.SectionNumber", Field, 1}, + {"COFFSymbol.StorageClass", Field, 1}, + {"COFFSymbol.Type", Field, 1}, + {"COFFSymbol.Value", Field, 1}, + {"COFFSymbolAuxFormat5", Type, 19}, + {"COFFSymbolAuxFormat5.Checksum", Field, 19}, + {"COFFSymbolAuxFormat5.NumLineNumbers", Field, 19}, + {"COFFSymbolAuxFormat5.NumRelocs", Field, 19}, + {"COFFSymbolAuxFormat5.SecNum", Field, 19}, + {"COFFSymbolAuxFormat5.Selection", Field, 19}, + {"COFFSymbolAuxFormat5.Size", Field, 19}, + {"COFFSymbolSize", Const, 1}, + {"DataDirectory", Type, 3}, + {"DataDirectory.Size", Field, 3}, + {"DataDirectory.VirtualAddress", Field, 3}, + {"File", Type, 0}, + {"File.COFFSymbols", Field, 8}, + {"File.FileHeader", Field, 0}, + {"File.OptionalHeader", Field, 3}, + {"File.Sections", Field, 0}, + {"File.StringTable", Field, 8}, + {"File.Symbols", Field, 1}, + {"FileHeader", Type, 0}, + {"FileHeader.Characteristics", Field, 0}, + {"FileHeader.Machine", Field, 0}, + {"FileHeader.NumberOfSections", Field, 0}, + {"FileHeader.NumberOfSymbols", Field, 0}, + {"FileHeader.PointerToSymbolTable", Field, 0}, + {"FileHeader.SizeOfOptionalHeader", Field, 0}, + {"FileHeader.TimeDateStamp", Field, 0}, + {"FormatError", Type, 0}, + {"IMAGE_COMDAT_SELECT_ANY", Const, 19}, + {"IMAGE_COMDAT_SELECT_ASSOCIATIVE", Const, 19}, + {"IMAGE_COMDAT_SELECT_EXACT_MATCH", Const, 19}, + {"IMAGE_COMDAT_SELECT_LARGEST", Const, 19}, + {"IMAGE_COMDAT_SELECT_NODUPLICATES", Const, 19}, + {"IMAGE_COMDAT_SELECT_SAME_SIZE", Const, 19}, + {"IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_BASERELOC", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_DEBUG", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_EXCEPTION", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_EXPORT", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_GLOBALPTR", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_IAT", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_IMPORT", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_RESOURCE", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_SECURITY", Const, 11}, + {"IMAGE_DIRECTORY_ENTRY_TLS", Const, 11}, + {"IMAGE_DLLCHARACTERISTICS_APPCONTAINER", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_GUARD_CF", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_NO_BIND", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_NO_SEH", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_NX_COMPAT", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", Const, 15}, + {"IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", Const, 15}, + {"IMAGE_FILE_32BIT_MACHINE", Const, 15}, + {"IMAGE_FILE_AGGRESIVE_WS_TRIM", Const, 15}, + {"IMAGE_FILE_BYTES_REVERSED_HI", Const, 15}, + {"IMAGE_FILE_BYTES_REVERSED_LO", Const, 15}, + {"IMAGE_FILE_DEBUG_STRIPPED", Const, 15}, + {"IMAGE_FILE_DLL", Const, 15}, + {"IMAGE_FILE_EXECUTABLE_IMAGE", Const, 15}, + {"IMAGE_FILE_LARGE_ADDRESS_AWARE", Const, 15}, + {"IMAGE_FILE_LINE_NUMS_STRIPPED", Const, 15}, + {"IMAGE_FILE_LOCAL_SYMS_STRIPPED", Const, 15}, + {"IMAGE_FILE_MACHINE_AM33", Const, 0}, + {"IMAGE_FILE_MACHINE_AMD64", Const, 0}, + {"IMAGE_FILE_MACHINE_ARM", Const, 0}, + {"IMAGE_FILE_MACHINE_ARM64", Const, 11}, + {"IMAGE_FILE_MACHINE_ARMNT", Const, 12}, + {"IMAGE_FILE_MACHINE_EBC", Const, 0}, + {"IMAGE_FILE_MACHINE_I386", Const, 0}, + {"IMAGE_FILE_MACHINE_IA64", Const, 0}, + {"IMAGE_FILE_MACHINE_LOONGARCH32", Const, 19}, + {"IMAGE_FILE_MACHINE_LOONGARCH64", Const, 19}, + {"IMAGE_FILE_MACHINE_M32R", Const, 0}, + {"IMAGE_FILE_MACHINE_MIPS16", Const, 0}, + {"IMAGE_FILE_MACHINE_MIPSFPU", Const, 0}, + {"IMAGE_FILE_MACHINE_MIPSFPU16", Const, 0}, + {"IMAGE_FILE_MACHINE_POWERPC", Const, 0}, + {"IMAGE_FILE_MACHINE_POWERPCFP", Const, 0}, + {"IMAGE_FILE_MACHINE_R4000", Const, 0}, + {"IMAGE_FILE_MACHINE_RISCV128", Const, 20}, + {"IMAGE_FILE_MACHINE_RISCV32", Const, 20}, + {"IMAGE_FILE_MACHINE_RISCV64", Const, 20}, + {"IMAGE_FILE_MACHINE_SH3", Const, 0}, + {"IMAGE_FILE_MACHINE_SH3DSP", Const, 0}, + {"IMAGE_FILE_MACHINE_SH4", Const, 0}, + {"IMAGE_FILE_MACHINE_SH5", Const, 0}, + {"IMAGE_FILE_MACHINE_THUMB", Const, 0}, + {"IMAGE_FILE_MACHINE_UNKNOWN", Const, 0}, + {"IMAGE_FILE_MACHINE_WCEMIPSV2", Const, 0}, + {"IMAGE_FILE_NET_RUN_FROM_SWAP", Const, 15}, + {"IMAGE_FILE_RELOCS_STRIPPED", Const, 15}, + {"IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", Const, 15}, + {"IMAGE_FILE_SYSTEM", Const, 15}, + {"IMAGE_FILE_UP_SYSTEM_ONLY", Const, 15}, + {"IMAGE_SCN_CNT_CODE", Const, 19}, + {"IMAGE_SCN_CNT_INITIALIZED_DATA", Const, 19}, + {"IMAGE_SCN_CNT_UNINITIALIZED_DATA", Const, 19}, + {"IMAGE_SCN_LNK_COMDAT", Const, 19}, + {"IMAGE_SCN_MEM_DISCARDABLE", Const, 19}, + {"IMAGE_SCN_MEM_EXECUTE", Const, 19}, + {"IMAGE_SCN_MEM_READ", Const, 19}, + {"IMAGE_SCN_MEM_WRITE", Const, 19}, + {"IMAGE_SUBSYSTEM_EFI_APPLICATION", Const, 15}, + {"IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", Const, 15}, + {"IMAGE_SUBSYSTEM_EFI_ROM", Const, 15}, + {"IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", Const, 15}, + {"IMAGE_SUBSYSTEM_NATIVE", Const, 15}, + {"IMAGE_SUBSYSTEM_NATIVE_WINDOWS", Const, 15}, + {"IMAGE_SUBSYSTEM_OS2_CUI", Const, 15}, + {"IMAGE_SUBSYSTEM_POSIX_CUI", Const, 15}, + {"IMAGE_SUBSYSTEM_UNKNOWN", Const, 15}, + {"IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", Const, 15}, + {"IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", Const, 15}, + {"IMAGE_SUBSYSTEM_WINDOWS_CUI", Const, 15}, + {"IMAGE_SUBSYSTEM_WINDOWS_GUI", Const, 15}, + {"IMAGE_SUBSYSTEM_XBOX", Const, 15}, + {"ImportDirectory", Type, 0}, + {"ImportDirectory.FirstThunk", Field, 0}, + {"ImportDirectory.ForwarderChain", Field, 0}, + {"ImportDirectory.Name", Field, 0}, + {"ImportDirectory.OriginalFirstThunk", Field, 0}, + {"ImportDirectory.TimeDateStamp", Field, 0}, + {"NewFile", Func, 0}, + {"Open", Func, 0}, + {"OptionalHeader32", Type, 3}, + {"OptionalHeader32.AddressOfEntryPoint", Field, 3}, + {"OptionalHeader32.BaseOfCode", Field, 3}, + {"OptionalHeader32.BaseOfData", Field, 3}, + {"OptionalHeader32.CheckSum", Field, 3}, + {"OptionalHeader32.DataDirectory", Field, 3}, + {"OptionalHeader32.DllCharacteristics", Field, 3}, + {"OptionalHeader32.FileAlignment", Field, 3}, + {"OptionalHeader32.ImageBase", Field, 3}, + {"OptionalHeader32.LoaderFlags", Field, 3}, + {"OptionalHeader32.Magic", Field, 3}, + {"OptionalHeader32.MajorImageVersion", Field, 3}, + {"OptionalHeader32.MajorLinkerVersion", Field, 3}, + {"OptionalHeader32.MajorOperatingSystemVersion", Field, 3}, + {"OptionalHeader32.MajorSubsystemVersion", Field, 3}, + {"OptionalHeader32.MinorImageVersion", Field, 3}, + {"OptionalHeader32.MinorLinkerVersion", Field, 3}, + {"OptionalHeader32.MinorOperatingSystemVersion", Field, 3}, + {"OptionalHeader32.MinorSubsystemVersion", Field, 3}, + {"OptionalHeader32.NumberOfRvaAndSizes", Field, 3}, + {"OptionalHeader32.SectionAlignment", Field, 3}, + {"OptionalHeader32.SizeOfCode", Field, 3}, + {"OptionalHeader32.SizeOfHeaders", Field, 3}, + {"OptionalHeader32.SizeOfHeapCommit", Field, 3}, + {"OptionalHeader32.SizeOfHeapReserve", Field, 3}, + {"OptionalHeader32.SizeOfImage", Field, 3}, + {"OptionalHeader32.SizeOfInitializedData", Field, 3}, + {"OptionalHeader32.SizeOfStackCommit", Field, 3}, + {"OptionalHeader32.SizeOfStackReserve", Field, 3}, + {"OptionalHeader32.SizeOfUninitializedData", Field, 3}, + {"OptionalHeader32.Subsystem", Field, 3}, + {"OptionalHeader32.Win32VersionValue", Field, 3}, + {"OptionalHeader64", Type, 3}, + {"OptionalHeader64.AddressOfEntryPoint", Field, 3}, + {"OptionalHeader64.BaseOfCode", Field, 3}, + {"OptionalHeader64.CheckSum", Field, 3}, + {"OptionalHeader64.DataDirectory", Field, 3}, + {"OptionalHeader64.DllCharacteristics", Field, 3}, + {"OptionalHeader64.FileAlignment", Field, 3}, + {"OptionalHeader64.ImageBase", Field, 3}, + {"OptionalHeader64.LoaderFlags", Field, 3}, + {"OptionalHeader64.Magic", Field, 3}, + {"OptionalHeader64.MajorImageVersion", Field, 3}, + {"OptionalHeader64.MajorLinkerVersion", Field, 3}, + {"OptionalHeader64.MajorOperatingSystemVersion", Field, 3}, + {"OptionalHeader64.MajorSubsystemVersion", Field, 3}, + {"OptionalHeader64.MinorImageVersion", Field, 3}, + {"OptionalHeader64.MinorLinkerVersion", Field, 3}, + {"OptionalHeader64.MinorOperatingSystemVersion", Field, 3}, + {"OptionalHeader64.MinorSubsystemVersion", Field, 3}, + {"OptionalHeader64.NumberOfRvaAndSizes", Field, 3}, + {"OptionalHeader64.SectionAlignment", Field, 3}, + {"OptionalHeader64.SizeOfCode", Field, 3}, + {"OptionalHeader64.SizeOfHeaders", Field, 3}, + {"OptionalHeader64.SizeOfHeapCommit", Field, 3}, + {"OptionalHeader64.SizeOfHeapReserve", Field, 3}, + {"OptionalHeader64.SizeOfImage", Field, 3}, + {"OptionalHeader64.SizeOfInitializedData", Field, 3}, + {"OptionalHeader64.SizeOfStackCommit", Field, 3}, + {"OptionalHeader64.SizeOfStackReserve", Field, 3}, + {"OptionalHeader64.SizeOfUninitializedData", Field, 3}, + {"OptionalHeader64.Subsystem", Field, 3}, + {"OptionalHeader64.Win32VersionValue", Field, 3}, + {"Reloc", Type, 8}, + {"Reloc.SymbolTableIndex", Field, 8}, + {"Reloc.Type", Field, 8}, + {"Reloc.VirtualAddress", Field, 8}, + {"Section", Type, 0}, + {"Section.ReaderAt", Field, 0}, + {"Section.Relocs", Field, 8}, + {"Section.SectionHeader", Field, 0}, + {"SectionHeader", Type, 0}, + {"SectionHeader.Characteristics", Field, 0}, + {"SectionHeader.Name", Field, 0}, + {"SectionHeader.NumberOfLineNumbers", Field, 0}, + {"SectionHeader.NumberOfRelocations", Field, 0}, + {"SectionHeader.Offset", Field, 0}, + {"SectionHeader.PointerToLineNumbers", Field, 0}, + {"SectionHeader.PointerToRelocations", Field, 0}, + {"SectionHeader.Size", Field, 0}, + {"SectionHeader.VirtualAddress", Field, 0}, + {"SectionHeader.VirtualSize", Field, 0}, + {"SectionHeader32", Type, 0}, + {"SectionHeader32.Characteristics", Field, 0}, + {"SectionHeader32.Name", Field, 0}, + {"SectionHeader32.NumberOfLineNumbers", Field, 0}, + {"SectionHeader32.NumberOfRelocations", Field, 0}, + {"SectionHeader32.PointerToLineNumbers", Field, 0}, + {"SectionHeader32.PointerToRawData", Field, 0}, + {"SectionHeader32.PointerToRelocations", Field, 0}, + {"SectionHeader32.SizeOfRawData", Field, 0}, + {"SectionHeader32.VirtualAddress", Field, 0}, + {"SectionHeader32.VirtualSize", Field, 0}, + {"StringTable", Type, 8}, + {"Symbol", Type, 1}, + {"Symbol.Name", Field, 1}, + {"Symbol.SectionNumber", Field, 1}, + {"Symbol.StorageClass", Field, 1}, + {"Symbol.Type", Field, 1}, + {"Symbol.Value", Field, 1}, + }, + "debug/plan9obj": { + {"(*File).Close", Method, 3}, + {"(*File).Section", Method, 3}, + {"(*File).Symbols", Method, 3}, + {"(*Section).Data", Method, 3}, + {"(*Section).Open", Method, 3}, + {"(Section).ReadAt", Method, 3}, + {"ErrNoSymbols", Var, 18}, + {"File", Type, 3}, + {"File.FileHeader", Field, 3}, + {"File.Sections", Field, 3}, + {"FileHeader", Type, 3}, + {"FileHeader.Bss", Field, 3}, + {"FileHeader.Entry", Field, 3}, + {"FileHeader.HdrSize", Field, 4}, + {"FileHeader.LoadAddress", Field, 4}, + {"FileHeader.Magic", Field, 3}, + {"FileHeader.PtrSize", Field, 3}, + {"Magic386", Const, 3}, + {"Magic64", Const, 3}, + {"MagicAMD64", Const, 3}, + {"MagicARM", Const, 3}, + {"NewFile", Func, 3}, + {"Open", Func, 3}, + {"Section", Type, 3}, + {"Section.ReaderAt", Field, 3}, + {"Section.SectionHeader", Field, 3}, + {"SectionHeader", Type, 3}, + {"SectionHeader.Name", Field, 3}, + {"SectionHeader.Offset", Field, 3}, + {"SectionHeader.Size", Field, 3}, + {"Sym", Type, 3}, + {"Sym.Name", Field, 3}, + {"Sym.Type", Field, 3}, + {"Sym.Value", Field, 3}, + }, + "embed": { + {"(FS).Open", Method, 16}, + {"(FS).ReadDir", Method, 16}, + {"(FS).ReadFile", Method, 16}, + {"FS", Type, 16}, + }, + "encoding": { + {"BinaryMarshaler", Type, 2}, + {"BinaryUnmarshaler", Type, 2}, + {"TextMarshaler", Type, 2}, + {"TextUnmarshaler", Type, 2}, + }, + "encoding/ascii85": { + {"(CorruptInputError).Error", Method, 0}, + {"CorruptInputError", Type, 0}, + {"Decode", Func, 0}, + {"Encode", Func, 0}, + {"MaxEncodedLen", Func, 0}, + {"NewDecoder", Func, 0}, + {"NewEncoder", Func, 0}, + }, + "encoding/asn1": { + {"(BitString).At", Method, 0}, + {"(BitString).RightAlign", Method, 0}, + {"(ObjectIdentifier).Equal", Method, 0}, + {"(ObjectIdentifier).String", Method, 3}, + {"(StructuralError).Error", Method, 0}, + {"(SyntaxError).Error", Method, 0}, + {"BitString", Type, 0}, + {"BitString.BitLength", Field, 0}, + {"BitString.Bytes", Field, 0}, + {"ClassApplication", Const, 6}, + {"ClassContextSpecific", Const, 6}, + {"ClassPrivate", Const, 6}, + {"ClassUniversal", Const, 6}, + {"Enumerated", Type, 0}, + {"Flag", Type, 0}, + {"Marshal", Func, 0}, + {"MarshalWithParams", Func, 10}, + {"NullBytes", Var, 9}, + {"NullRawValue", Var, 9}, + {"ObjectIdentifier", Type, 0}, + {"RawContent", Type, 0}, + {"RawValue", Type, 0}, + {"RawValue.Bytes", Field, 0}, + {"RawValue.Class", Field, 0}, + {"RawValue.FullBytes", Field, 0}, + {"RawValue.IsCompound", Field, 0}, + {"RawValue.Tag", Field, 0}, + {"StructuralError", Type, 0}, + {"StructuralError.Msg", Field, 0}, + {"SyntaxError", Type, 0}, + {"SyntaxError.Msg", Field, 0}, + {"TagBMPString", Const, 14}, + {"TagBitString", Const, 6}, + {"TagBoolean", Const, 6}, + {"TagEnum", Const, 6}, + {"TagGeneralString", Const, 6}, + {"TagGeneralizedTime", Const, 6}, + {"TagIA5String", Const, 6}, + {"TagInteger", Const, 6}, + {"TagNull", Const, 9}, + {"TagNumericString", Const, 10}, + {"TagOID", Const, 6}, + {"TagOctetString", Const, 6}, + {"TagPrintableString", Const, 6}, + {"TagSequence", Const, 6}, + {"TagSet", Const, 6}, + {"TagT61String", Const, 6}, + {"TagUTCTime", Const, 6}, + {"TagUTF8String", Const, 6}, + {"Unmarshal", Func, 0}, + {"UnmarshalWithParams", Func, 0}, + }, + "encoding/base32": { + {"(*Encoding).AppendDecode", Method, 22}, + {"(*Encoding).AppendEncode", Method, 22}, + {"(*Encoding).Decode", Method, 0}, + {"(*Encoding).DecodeString", Method, 0}, + {"(*Encoding).DecodedLen", Method, 0}, + {"(*Encoding).Encode", Method, 0}, + {"(*Encoding).EncodeToString", Method, 0}, + {"(*Encoding).EncodedLen", Method, 0}, + {"(CorruptInputError).Error", Method, 0}, + {"(Encoding).WithPadding", Method, 9}, + {"CorruptInputError", Type, 0}, + {"Encoding", Type, 0}, + {"HexEncoding", Var, 0}, + {"NewDecoder", Func, 0}, + {"NewEncoder", Func, 0}, + {"NewEncoding", Func, 0}, + {"NoPadding", Const, 9}, + {"StdEncoding", Var, 0}, + {"StdPadding", Const, 9}, + }, + "encoding/base64": { + {"(*Encoding).AppendDecode", Method, 22}, + {"(*Encoding).AppendEncode", Method, 22}, + {"(*Encoding).Decode", Method, 0}, + {"(*Encoding).DecodeString", Method, 0}, + {"(*Encoding).DecodedLen", Method, 0}, + {"(*Encoding).Encode", Method, 0}, + {"(*Encoding).EncodeToString", Method, 0}, + {"(*Encoding).EncodedLen", Method, 0}, + {"(CorruptInputError).Error", Method, 0}, + {"(Encoding).Strict", Method, 8}, + {"(Encoding).WithPadding", Method, 5}, + {"CorruptInputError", Type, 0}, + {"Encoding", Type, 0}, + {"NewDecoder", Func, 0}, + {"NewEncoder", Func, 0}, + {"NewEncoding", Func, 0}, + {"NoPadding", Const, 5}, + {"RawStdEncoding", Var, 5}, + {"RawURLEncoding", Var, 5}, + {"StdEncoding", Var, 0}, + {"StdPadding", Const, 5}, + {"URLEncoding", Var, 0}, + }, + "encoding/binary": { + {"AppendByteOrder", Type, 19}, + {"AppendUvarint", Func, 19}, + {"AppendVarint", Func, 19}, + {"BigEndian", Var, 0}, + {"ByteOrder", Type, 0}, + {"LittleEndian", Var, 0}, + {"MaxVarintLen16", Const, 0}, + {"MaxVarintLen32", Const, 0}, + {"MaxVarintLen64", Const, 0}, + {"NativeEndian", Var, 21}, + {"PutUvarint", Func, 0}, + {"PutVarint", Func, 0}, + {"Read", Func, 0}, + {"ReadUvarint", Func, 0}, + {"ReadVarint", Func, 0}, + {"Size", Func, 0}, + {"Uvarint", Func, 0}, + {"Varint", Func, 0}, + {"Write", Func, 0}, + }, + "encoding/csv": { + {"(*ParseError).Error", Method, 0}, + {"(*ParseError).Unwrap", Method, 13}, + {"(*Reader).FieldPos", Method, 17}, + {"(*Reader).InputOffset", Method, 19}, + {"(*Reader).Read", Method, 0}, + {"(*Reader).ReadAll", Method, 0}, + {"(*Writer).Error", Method, 1}, + {"(*Writer).Flush", Method, 0}, + {"(*Writer).Write", Method, 0}, + {"(*Writer).WriteAll", Method, 0}, + {"ErrBareQuote", Var, 0}, + {"ErrFieldCount", Var, 0}, + {"ErrQuote", Var, 0}, + {"ErrTrailingComma", Var, 0}, + {"NewReader", Func, 0}, + {"NewWriter", Func, 0}, + {"ParseError", Type, 0}, + {"ParseError.Column", Field, 0}, + {"ParseError.Err", Field, 0}, + {"ParseError.Line", Field, 0}, + {"ParseError.StartLine", Field, 10}, + {"Reader", Type, 0}, + {"Reader.Comma", Field, 0}, + {"Reader.Comment", Field, 0}, + {"Reader.FieldsPerRecord", Field, 0}, + {"Reader.LazyQuotes", Field, 0}, + {"Reader.ReuseRecord", Field, 9}, + {"Reader.TrailingComma", Field, 0}, + {"Reader.TrimLeadingSpace", Field, 0}, + {"Writer", Type, 0}, + {"Writer.Comma", Field, 0}, + {"Writer.UseCRLF", Field, 0}, + }, + "encoding/gob": { + {"(*Decoder).Decode", Method, 0}, + {"(*Decoder).DecodeValue", Method, 0}, + {"(*Encoder).Encode", Method, 0}, + {"(*Encoder).EncodeValue", Method, 0}, + {"CommonType", Type, 0}, + {"CommonType.Id", Field, 0}, + {"CommonType.Name", Field, 0}, + {"Decoder", Type, 0}, + {"Encoder", Type, 0}, + {"GobDecoder", Type, 0}, + {"GobEncoder", Type, 0}, + {"NewDecoder", Func, 0}, + {"NewEncoder", Func, 0}, + {"Register", Func, 0}, + {"RegisterName", Func, 0}, + }, + "encoding/hex": { + {"(InvalidByteError).Error", Method, 0}, + {"AppendDecode", Func, 22}, + {"AppendEncode", Func, 22}, + {"Decode", Func, 0}, + {"DecodeString", Func, 0}, + {"DecodedLen", Func, 0}, + {"Dump", Func, 0}, + {"Dumper", Func, 0}, + {"Encode", Func, 0}, + {"EncodeToString", Func, 0}, + {"EncodedLen", Func, 0}, + {"ErrLength", Var, 0}, + {"InvalidByteError", Type, 0}, + {"NewDecoder", Func, 10}, + {"NewEncoder", Func, 10}, + }, + "encoding/json": { + {"(*Decoder).Buffered", Method, 1}, + {"(*Decoder).Decode", Method, 0}, + {"(*Decoder).DisallowUnknownFields", Method, 10}, + {"(*Decoder).InputOffset", Method, 14}, + {"(*Decoder).More", Method, 5}, + {"(*Decoder).Token", Method, 5}, + {"(*Decoder).UseNumber", Method, 1}, + {"(*Encoder).Encode", Method, 0}, + {"(*Encoder).SetEscapeHTML", Method, 7}, + {"(*Encoder).SetIndent", Method, 7}, + {"(*InvalidUTF8Error).Error", Method, 0}, + {"(*InvalidUnmarshalError).Error", Method, 0}, + {"(*MarshalerError).Error", Method, 0}, + {"(*MarshalerError).Unwrap", Method, 13}, + {"(*RawMessage).MarshalJSON", Method, 0}, + {"(*RawMessage).UnmarshalJSON", Method, 0}, + {"(*SyntaxError).Error", Method, 0}, + {"(*UnmarshalFieldError).Error", Method, 0}, + {"(*UnmarshalTypeError).Error", Method, 0}, + {"(*UnsupportedTypeError).Error", Method, 0}, + {"(*UnsupportedValueError).Error", Method, 0}, + {"(Delim).String", Method, 5}, + {"(Number).Float64", Method, 1}, + {"(Number).Int64", Method, 1}, + {"(Number).String", Method, 1}, + {"(RawMessage).MarshalJSON", Method, 8}, + {"Compact", Func, 0}, + {"Decoder", Type, 0}, + {"Delim", Type, 5}, + {"Encoder", Type, 0}, + {"HTMLEscape", Func, 0}, + {"Indent", Func, 0}, + {"InvalidUTF8Error", Type, 0}, + {"InvalidUTF8Error.S", Field, 0}, + {"InvalidUnmarshalError", Type, 0}, + {"InvalidUnmarshalError.Type", Field, 0}, + {"Marshal", Func, 0}, + {"MarshalIndent", Func, 0}, + {"Marshaler", Type, 0}, + {"MarshalerError", Type, 0}, + {"MarshalerError.Err", Field, 0}, + {"MarshalerError.Type", Field, 0}, + {"NewDecoder", Func, 0}, + {"NewEncoder", Func, 0}, + {"Number", Type, 1}, + {"RawMessage", Type, 0}, + {"SyntaxError", Type, 0}, + {"SyntaxError.Offset", Field, 0}, + {"Token", Type, 5}, + {"Unmarshal", Func, 0}, + {"UnmarshalFieldError", Type, 0}, + {"UnmarshalFieldError.Field", Field, 0}, + {"UnmarshalFieldError.Key", Field, 0}, + {"UnmarshalFieldError.Type", Field, 0}, + {"UnmarshalTypeError", Type, 0}, + {"UnmarshalTypeError.Field", Field, 8}, + {"UnmarshalTypeError.Offset", Field, 5}, + {"UnmarshalTypeError.Struct", Field, 8}, + {"UnmarshalTypeError.Type", Field, 0}, + {"UnmarshalTypeError.Value", Field, 0}, + {"Unmarshaler", Type, 0}, + {"UnsupportedTypeError", Type, 0}, + {"UnsupportedTypeError.Type", Field, 0}, + {"UnsupportedValueError", Type, 0}, + {"UnsupportedValueError.Str", Field, 0}, + {"UnsupportedValueError.Value", Field, 0}, + {"Valid", Func, 9}, + }, + "encoding/pem": { + {"Block", Type, 0}, + {"Block.Bytes", Field, 0}, + {"Block.Headers", Field, 0}, + {"Block.Type", Field, 0}, + {"Decode", Func, 0}, + {"Encode", Func, 0}, + {"EncodeToMemory", Func, 0}, + }, + "encoding/xml": { + {"(*Decoder).Decode", Method, 0}, + {"(*Decoder).DecodeElement", Method, 0}, + {"(*Decoder).InputOffset", Method, 4}, + {"(*Decoder).InputPos", Method, 19}, + {"(*Decoder).RawToken", Method, 0}, + {"(*Decoder).Skip", Method, 0}, + {"(*Decoder).Token", Method, 0}, + {"(*Encoder).Close", Method, 20}, + {"(*Encoder).Encode", Method, 0}, + {"(*Encoder).EncodeElement", Method, 2}, + {"(*Encoder).EncodeToken", Method, 2}, + {"(*Encoder).Flush", Method, 2}, + {"(*Encoder).Indent", Method, 1}, + {"(*SyntaxError).Error", Method, 0}, + {"(*TagPathError).Error", Method, 0}, + {"(*UnsupportedTypeError).Error", Method, 0}, + {"(CharData).Copy", Method, 0}, + {"(Comment).Copy", Method, 0}, + {"(Directive).Copy", Method, 0}, + {"(ProcInst).Copy", Method, 0}, + {"(StartElement).Copy", Method, 0}, + {"(StartElement).End", Method, 2}, + {"(UnmarshalError).Error", Method, 0}, + {"Attr", Type, 0}, + {"Attr.Name", Field, 0}, + {"Attr.Value", Field, 0}, + {"CharData", Type, 0}, + {"Comment", Type, 0}, + {"CopyToken", Func, 0}, + {"Decoder", Type, 0}, + {"Decoder.AutoClose", Field, 0}, + {"Decoder.CharsetReader", Field, 0}, + {"Decoder.DefaultSpace", Field, 1}, + {"Decoder.Entity", Field, 0}, + {"Decoder.Strict", Field, 0}, + {"Directive", Type, 0}, + {"Encoder", Type, 0}, + {"EndElement", Type, 0}, + {"EndElement.Name", Field, 0}, + {"Escape", Func, 0}, + {"EscapeText", Func, 1}, + {"HTMLAutoClose", Var, 0}, + {"HTMLEntity", Var, 0}, + {"Header", Const, 0}, + {"Marshal", Func, 0}, + {"MarshalIndent", Func, 0}, + {"Marshaler", Type, 2}, + {"MarshalerAttr", Type, 2}, + {"Name", Type, 0}, + {"Name.Local", Field, 0}, + {"Name.Space", Field, 0}, + {"NewDecoder", Func, 0}, + {"NewEncoder", Func, 0}, + {"NewTokenDecoder", Func, 10}, + {"ProcInst", Type, 0}, + {"ProcInst.Inst", Field, 0}, + {"ProcInst.Target", Field, 0}, + {"StartElement", Type, 0}, + {"StartElement.Attr", Field, 0}, + {"StartElement.Name", Field, 0}, + {"SyntaxError", Type, 0}, + {"SyntaxError.Line", Field, 0}, + {"SyntaxError.Msg", Field, 0}, + {"TagPathError", Type, 0}, + {"TagPathError.Field1", Field, 0}, + {"TagPathError.Field2", Field, 0}, + {"TagPathError.Struct", Field, 0}, + {"TagPathError.Tag1", Field, 0}, + {"TagPathError.Tag2", Field, 0}, + {"Token", Type, 0}, + {"TokenReader", Type, 10}, + {"Unmarshal", Func, 0}, + {"UnmarshalError", Type, 0}, + {"Unmarshaler", Type, 2}, + {"UnmarshalerAttr", Type, 2}, + {"UnsupportedTypeError", Type, 0}, + {"UnsupportedTypeError.Type", Field, 0}, + }, + "errors": { + {"As", Func, 13}, + {"ErrUnsupported", Var, 21}, + {"Is", Func, 13}, + {"Join", Func, 20}, + {"New", Func, 0}, + {"Unwrap", Func, 13}, + }, + "expvar": { + {"(*Float).Add", Method, 0}, + {"(*Float).Set", Method, 0}, + {"(*Float).String", Method, 0}, + {"(*Float).Value", Method, 8}, + {"(*Int).Add", Method, 0}, + {"(*Int).Set", Method, 0}, + {"(*Int).String", Method, 0}, + {"(*Int).Value", Method, 8}, + {"(*Map).Add", Method, 0}, + {"(*Map).AddFloat", Method, 0}, + {"(*Map).Delete", Method, 12}, + {"(*Map).Do", Method, 0}, + {"(*Map).Get", Method, 0}, + {"(*Map).Init", Method, 0}, + {"(*Map).Set", Method, 0}, + {"(*Map).String", Method, 0}, + {"(*String).Set", Method, 0}, + {"(*String).String", Method, 0}, + {"(*String).Value", Method, 8}, + {"(Func).String", Method, 0}, + {"(Func).Value", Method, 8}, + {"Do", Func, 0}, + {"Float", Type, 0}, + {"Func", Type, 0}, + {"Get", Func, 0}, + {"Handler", Func, 8}, + {"Int", Type, 0}, + {"KeyValue", Type, 0}, + {"KeyValue.Key", Field, 0}, + {"KeyValue.Value", Field, 0}, + {"Map", Type, 0}, + {"NewFloat", Func, 0}, + {"NewInt", Func, 0}, + {"NewMap", Func, 0}, + {"NewString", Func, 0}, + {"Publish", Func, 0}, + {"String", Type, 0}, + {"Var", Type, 0}, + }, + "flag": { + {"(*FlagSet).Arg", Method, 0}, + {"(*FlagSet).Args", Method, 0}, + {"(*FlagSet).Bool", Method, 0}, + {"(*FlagSet).BoolFunc", Method, 21}, + {"(*FlagSet).BoolVar", Method, 0}, + {"(*FlagSet).Duration", Method, 0}, + {"(*FlagSet).DurationVar", Method, 0}, + {"(*FlagSet).ErrorHandling", Method, 10}, + {"(*FlagSet).Float64", Method, 0}, + {"(*FlagSet).Float64Var", Method, 0}, + {"(*FlagSet).Func", Method, 16}, + {"(*FlagSet).Init", Method, 0}, + {"(*FlagSet).Int", Method, 0}, + {"(*FlagSet).Int64", Method, 0}, + {"(*FlagSet).Int64Var", Method, 0}, + {"(*FlagSet).IntVar", Method, 0}, + {"(*FlagSet).Lookup", Method, 0}, + {"(*FlagSet).NArg", Method, 0}, + {"(*FlagSet).NFlag", Method, 0}, + {"(*FlagSet).Name", Method, 10}, + {"(*FlagSet).Output", Method, 10}, + {"(*FlagSet).Parse", Method, 0}, + {"(*FlagSet).Parsed", Method, 0}, + {"(*FlagSet).PrintDefaults", Method, 0}, + {"(*FlagSet).Set", Method, 0}, + {"(*FlagSet).SetOutput", Method, 0}, + {"(*FlagSet).String", Method, 0}, + {"(*FlagSet).StringVar", Method, 0}, + {"(*FlagSet).TextVar", Method, 19}, + {"(*FlagSet).Uint", Method, 0}, + {"(*FlagSet).Uint64", Method, 0}, + {"(*FlagSet).Uint64Var", Method, 0}, + {"(*FlagSet).UintVar", Method, 0}, + {"(*FlagSet).Var", Method, 0}, + {"(*FlagSet).Visit", Method, 0}, + {"(*FlagSet).VisitAll", Method, 0}, + {"Arg", Func, 0}, + {"Args", Func, 0}, + {"Bool", Func, 0}, + {"BoolFunc", Func, 21}, + {"BoolVar", Func, 0}, + {"CommandLine", Var, 2}, + {"ContinueOnError", Const, 0}, + {"Duration", Func, 0}, + {"DurationVar", Func, 0}, + {"ErrHelp", Var, 0}, + {"ErrorHandling", Type, 0}, + {"ExitOnError", Const, 0}, + {"Flag", Type, 0}, + {"Flag.DefValue", Field, 0}, + {"Flag.Name", Field, 0}, + {"Flag.Usage", Field, 0}, + {"Flag.Value", Field, 0}, + {"FlagSet", Type, 0}, + {"FlagSet.Usage", Field, 0}, + {"Float64", Func, 0}, + {"Float64Var", Func, 0}, + {"Func", Func, 16}, + {"Getter", Type, 2}, + {"Int", Func, 0}, + {"Int64", Func, 0}, + {"Int64Var", Func, 0}, + {"IntVar", Func, 0}, + {"Lookup", Func, 0}, + {"NArg", Func, 0}, + {"NFlag", Func, 0}, + {"NewFlagSet", Func, 0}, + {"PanicOnError", Const, 0}, + {"Parse", Func, 0}, + {"Parsed", Func, 0}, + {"PrintDefaults", Func, 0}, + {"Set", Func, 0}, + {"String", Func, 0}, + {"StringVar", Func, 0}, + {"TextVar", Func, 19}, + {"Uint", Func, 0}, + {"Uint64", Func, 0}, + {"Uint64Var", Func, 0}, + {"UintVar", Func, 0}, + {"UnquoteUsage", Func, 5}, + {"Usage", Var, 0}, + {"Value", Type, 0}, + {"Var", Func, 0}, + {"Visit", Func, 0}, + {"VisitAll", Func, 0}, + }, + "fmt": { + {"Append", Func, 19}, + {"Appendf", Func, 19}, + {"Appendln", Func, 19}, + {"Errorf", Func, 0}, + {"FormatString", Func, 20}, + {"Formatter", Type, 0}, + {"Fprint", Func, 0}, + {"Fprintf", Func, 0}, + {"Fprintln", Func, 0}, + {"Fscan", Func, 0}, + {"Fscanf", Func, 0}, + {"Fscanln", Func, 0}, + {"GoStringer", Type, 0}, + {"Print", Func, 0}, + {"Printf", Func, 0}, + {"Println", Func, 0}, + {"Scan", Func, 0}, + {"ScanState", Type, 0}, + {"Scanf", Func, 0}, + {"Scanln", Func, 0}, + {"Scanner", Type, 0}, + {"Sprint", Func, 0}, + {"Sprintf", Func, 0}, + {"Sprintln", Func, 0}, + {"Sscan", Func, 0}, + {"Sscanf", Func, 0}, + {"Sscanln", Func, 0}, + {"State", Type, 0}, + {"Stringer", Type, 0}, + }, + "go/ast": { + {"(*ArrayType).End", Method, 0}, + {"(*ArrayType).Pos", Method, 0}, + {"(*AssignStmt).End", Method, 0}, + {"(*AssignStmt).Pos", Method, 0}, + {"(*BadDecl).End", Method, 0}, + {"(*BadDecl).Pos", Method, 0}, + {"(*BadExpr).End", Method, 0}, + {"(*BadExpr).Pos", Method, 0}, + {"(*BadStmt).End", Method, 0}, + {"(*BadStmt).Pos", Method, 0}, + {"(*BasicLit).End", Method, 0}, + {"(*BasicLit).Pos", Method, 0}, + {"(*BinaryExpr).End", Method, 0}, + {"(*BinaryExpr).Pos", Method, 0}, + {"(*BlockStmt).End", Method, 0}, + {"(*BlockStmt).Pos", Method, 0}, + {"(*BranchStmt).End", Method, 0}, + {"(*BranchStmt).Pos", Method, 0}, + {"(*CallExpr).End", Method, 0}, + {"(*CallExpr).Pos", Method, 0}, + {"(*CaseClause).End", Method, 0}, + {"(*CaseClause).Pos", Method, 0}, + {"(*ChanType).End", Method, 0}, + {"(*ChanType).Pos", Method, 0}, + {"(*CommClause).End", Method, 0}, + {"(*CommClause).Pos", Method, 0}, + {"(*Comment).End", Method, 0}, + {"(*Comment).Pos", Method, 0}, + {"(*CommentGroup).End", Method, 0}, + {"(*CommentGroup).Pos", Method, 0}, + {"(*CommentGroup).Text", Method, 0}, + {"(*CompositeLit).End", Method, 0}, + {"(*CompositeLit).Pos", Method, 0}, + {"(*DeclStmt).End", Method, 0}, + {"(*DeclStmt).Pos", Method, 0}, + {"(*DeferStmt).End", Method, 0}, + {"(*DeferStmt).Pos", Method, 0}, + {"(*Ellipsis).End", Method, 0}, + {"(*Ellipsis).Pos", Method, 0}, + {"(*EmptyStmt).End", Method, 0}, + {"(*EmptyStmt).Pos", Method, 0}, + {"(*ExprStmt).End", Method, 0}, + {"(*ExprStmt).Pos", Method, 0}, + {"(*Field).End", Method, 0}, + {"(*Field).Pos", Method, 0}, + {"(*FieldList).End", Method, 0}, + {"(*FieldList).NumFields", Method, 0}, + {"(*FieldList).Pos", Method, 0}, + {"(*File).End", Method, 0}, + {"(*File).Pos", Method, 0}, + {"(*ForStmt).End", Method, 0}, + {"(*ForStmt).Pos", Method, 0}, + {"(*FuncDecl).End", Method, 0}, + {"(*FuncDecl).Pos", Method, 0}, + {"(*FuncLit).End", Method, 0}, + {"(*FuncLit).Pos", Method, 0}, + {"(*FuncType).End", Method, 0}, + {"(*FuncType).Pos", Method, 0}, + {"(*GenDecl).End", Method, 0}, + {"(*GenDecl).Pos", Method, 0}, + {"(*GoStmt).End", Method, 0}, + {"(*GoStmt).Pos", Method, 0}, + {"(*Ident).End", Method, 0}, + {"(*Ident).IsExported", Method, 0}, + {"(*Ident).Pos", Method, 0}, + {"(*Ident).String", Method, 0}, + {"(*IfStmt).End", Method, 0}, + {"(*IfStmt).Pos", Method, 0}, + {"(*ImportSpec).End", Method, 0}, + {"(*ImportSpec).Pos", Method, 0}, + {"(*IncDecStmt).End", Method, 0}, + {"(*IncDecStmt).Pos", Method, 0}, + {"(*IndexExpr).End", Method, 0}, + {"(*IndexExpr).Pos", Method, 0}, + {"(*IndexListExpr).End", Method, 18}, + {"(*IndexListExpr).Pos", Method, 18}, + {"(*InterfaceType).End", Method, 0}, + {"(*InterfaceType).Pos", Method, 0}, + {"(*KeyValueExpr).End", Method, 0}, + {"(*KeyValueExpr).Pos", Method, 0}, + {"(*LabeledStmt).End", Method, 0}, + {"(*LabeledStmt).Pos", Method, 0}, + {"(*MapType).End", Method, 0}, + {"(*MapType).Pos", Method, 0}, + {"(*Object).Pos", Method, 0}, + {"(*Package).End", Method, 0}, + {"(*Package).Pos", Method, 0}, + {"(*ParenExpr).End", Method, 0}, + {"(*ParenExpr).Pos", Method, 0}, + {"(*RangeStmt).End", Method, 0}, + {"(*RangeStmt).Pos", Method, 0}, + {"(*ReturnStmt).End", Method, 0}, + {"(*ReturnStmt).Pos", Method, 0}, + {"(*Scope).Insert", Method, 0}, + {"(*Scope).Lookup", Method, 0}, + {"(*Scope).String", Method, 0}, + {"(*SelectStmt).End", Method, 0}, + {"(*SelectStmt).Pos", Method, 0}, + {"(*SelectorExpr).End", Method, 0}, + {"(*SelectorExpr).Pos", Method, 0}, + {"(*SendStmt).End", Method, 0}, + {"(*SendStmt).Pos", Method, 0}, + {"(*SliceExpr).End", Method, 0}, + {"(*SliceExpr).Pos", Method, 0}, + {"(*StarExpr).End", Method, 0}, + {"(*StarExpr).Pos", Method, 0}, + {"(*StructType).End", Method, 0}, + {"(*StructType).Pos", Method, 0}, + {"(*SwitchStmt).End", Method, 0}, + {"(*SwitchStmt).Pos", Method, 0}, + {"(*TypeAssertExpr).End", Method, 0}, + {"(*TypeAssertExpr).Pos", Method, 0}, + {"(*TypeSpec).End", Method, 0}, + {"(*TypeSpec).Pos", Method, 0}, + {"(*TypeSwitchStmt).End", Method, 0}, + {"(*TypeSwitchStmt).Pos", Method, 0}, + {"(*UnaryExpr).End", Method, 0}, + {"(*UnaryExpr).Pos", Method, 0}, + {"(*ValueSpec).End", Method, 0}, + {"(*ValueSpec).Pos", Method, 0}, + {"(CommentMap).Comments", Method, 1}, + {"(CommentMap).Filter", Method, 1}, + {"(CommentMap).String", Method, 1}, + {"(CommentMap).Update", Method, 1}, + {"(ObjKind).String", Method, 0}, + {"ArrayType", Type, 0}, + {"ArrayType.Elt", Field, 0}, + {"ArrayType.Lbrack", Field, 0}, + {"ArrayType.Len", Field, 0}, + {"AssignStmt", Type, 0}, + {"AssignStmt.Lhs", Field, 0}, + {"AssignStmt.Rhs", Field, 0}, + {"AssignStmt.Tok", Field, 0}, + {"AssignStmt.TokPos", Field, 0}, + {"Bad", Const, 0}, + {"BadDecl", Type, 0}, + {"BadDecl.From", Field, 0}, + {"BadDecl.To", Field, 0}, + {"BadExpr", Type, 0}, + {"BadExpr.From", Field, 0}, + {"BadExpr.To", Field, 0}, + {"BadStmt", Type, 0}, + {"BadStmt.From", Field, 0}, + {"BadStmt.To", Field, 0}, + {"BasicLit", Type, 0}, + {"BasicLit.Kind", Field, 0}, + {"BasicLit.Value", Field, 0}, + {"BasicLit.ValuePos", Field, 0}, + {"BinaryExpr", Type, 0}, + {"BinaryExpr.Op", Field, 0}, + {"BinaryExpr.OpPos", Field, 0}, + {"BinaryExpr.X", Field, 0}, + {"BinaryExpr.Y", Field, 0}, + {"BlockStmt", Type, 0}, + {"BlockStmt.Lbrace", Field, 0}, + {"BlockStmt.List", Field, 0}, + {"BlockStmt.Rbrace", Field, 0}, + {"BranchStmt", Type, 0}, + {"BranchStmt.Label", Field, 0}, + {"BranchStmt.Tok", Field, 0}, + {"BranchStmt.TokPos", Field, 0}, + {"CallExpr", Type, 0}, + {"CallExpr.Args", Field, 0}, + {"CallExpr.Ellipsis", Field, 0}, + {"CallExpr.Fun", Field, 0}, + {"CallExpr.Lparen", Field, 0}, + {"CallExpr.Rparen", Field, 0}, + {"CaseClause", Type, 0}, + {"CaseClause.Body", Field, 0}, + {"CaseClause.Case", Field, 0}, + {"CaseClause.Colon", Field, 0}, + {"CaseClause.List", Field, 0}, + {"ChanDir", Type, 0}, + {"ChanType", Type, 0}, + {"ChanType.Arrow", Field, 1}, + {"ChanType.Begin", Field, 0}, + {"ChanType.Dir", Field, 0}, + {"ChanType.Value", Field, 0}, + {"CommClause", Type, 0}, + {"CommClause.Body", Field, 0}, + {"CommClause.Case", Field, 0}, + {"CommClause.Colon", Field, 0}, + {"CommClause.Comm", Field, 0}, + {"Comment", Type, 0}, + {"Comment.Slash", Field, 0}, + {"Comment.Text", Field, 0}, + {"CommentGroup", Type, 0}, + {"CommentGroup.List", Field, 0}, + {"CommentMap", Type, 1}, + {"CompositeLit", Type, 0}, + {"CompositeLit.Elts", Field, 0}, + {"CompositeLit.Incomplete", Field, 11}, + {"CompositeLit.Lbrace", Field, 0}, + {"CompositeLit.Rbrace", Field, 0}, + {"CompositeLit.Type", Field, 0}, + {"Con", Const, 0}, + {"Decl", Type, 0}, + {"DeclStmt", Type, 0}, + {"DeclStmt.Decl", Field, 0}, + {"DeferStmt", Type, 0}, + {"DeferStmt.Call", Field, 0}, + {"DeferStmt.Defer", Field, 0}, + {"Ellipsis", Type, 0}, + {"Ellipsis.Ellipsis", Field, 0}, + {"Ellipsis.Elt", Field, 0}, + {"EmptyStmt", Type, 0}, + {"EmptyStmt.Implicit", Field, 5}, + {"EmptyStmt.Semicolon", Field, 0}, + {"Expr", Type, 0}, + {"ExprStmt", Type, 0}, + {"ExprStmt.X", Field, 0}, + {"Field", Type, 0}, + {"Field.Comment", Field, 0}, + {"Field.Doc", Field, 0}, + {"Field.Names", Field, 0}, + {"Field.Tag", Field, 0}, + {"Field.Type", Field, 0}, + {"FieldFilter", Type, 0}, + {"FieldList", Type, 0}, + {"FieldList.Closing", Field, 0}, + {"FieldList.List", Field, 0}, + {"FieldList.Opening", Field, 0}, + {"File", Type, 0}, + {"File.Comments", Field, 0}, + {"File.Decls", Field, 0}, + {"File.Doc", Field, 0}, + {"File.FileEnd", Field, 20}, + {"File.FileStart", Field, 20}, + {"File.GoVersion", Field, 21}, + {"File.Imports", Field, 0}, + {"File.Name", Field, 0}, + {"File.Package", Field, 0}, + {"File.Scope", Field, 0}, + {"File.Unresolved", Field, 0}, + {"FileExports", Func, 0}, + {"Filter", Type, 0}, + {"FilterDecl", Func, 0}, + {"FilterFile", Func, 0}, + {"FilterFuncDuplicates", Const, 0}, + {"FilterImportDuplicates", Const, 0}, + {"FilterPackage", Func, 0}, + {"FilterUnassociatedComments", Const, 0}, + {"ForStmt", Type, 0}, + {"ForStmt.Body", Field, 0}, + {"ForStmt.Cond", Field, 0}, + {"ForStmt.For", Field, 0}, + {"ForStmt.Init", Field, 0}, + {"ForStmt.Post", Field, 0}, + {"Fprint", Func, 0}, + {"Fun", Const, 0}, + {"FuncDecl", Type, 0}, + {"FuncDecl.Body", Field, 0}, + {"FuncDecl.Doc", Field, 0}, + {"FuncDecl.Name", Field, 0}, + {"FuncDecl.Recv", Field, 0}, + {"FuncDecl.Type", Field, 0}, + {"FuncLit", Type, 0}, + {"FuncLit.Body", Field, 0}, + {"FuncLit.Type", Field, 0}, + {"FuncType", Type, 0}, + {"FuncType.Func", Field, 0}, + {"FuncType.Params", Field, 0}, + {"FuncType.Results", Field, 0}, + {"FuncType.TypeParams", Field, 18}, + {"GenDecl", Type, 0}, + {"GenDecl.Doc", Field, 0}, + {"GenDecl.Lparen", Field, 0}, + {"GenDecl.Rparen", Field, 0}, + {"GenDecl.Specs", Field, 0}, + {"GenDecl.Tok", Field, 0}, + {"GenDecl.TokPos", Field, 0}, + {"GoStmt", Type, 0}, + {"GoStmt.Call", Field, 0}, + {"GoStmt.Go", Field, 0}, + {"Ident", Type, 0}, + {"Ident.Name", Field, 0}, + {"Ident.NamePos", Field, 0}, + {"Ident.Obj", Field, 0}, + {"IfStmt", Type, 0}, + {"IfStmt.Body", Field, 0}, + {"IfStmt.Cond", Field, 0}, + {"IfStmt.Else", Field, 0}, + {"IfStmt.If", Field, 0}, + {"IfStmt.Init", Field, 0}, + {"ImportSpec", Type, 0}, + {"ImportSpec.Comment", Field, 0}, + {"ImportSpec.Doc", Field, 0}, + {"ImportSpec.EndPos", Field, 0}, + {"ImportSpec.Name", Field, 0}, + {"ImportSpec.Path", Field, 0}, + {"Importer", Type, 0}, + {"IncDecStmt", Type, 0}, + {"IncDecStmt.Tok", Field, 0}, + {"IncDecStmt.TokPos", Field, 0}, + {"IncDecStmt.X", Field, 0}, + {"IndexExpr", Type, 0}, + {"IndexExpr.Index", Field, 0}, + {"IndexExpr.Lbrack", Field, 0}, + {"IndexExpr.Rbrack", Field, 0}, + {"IndexExpr.X", Field, 0}, + {"IndexListExpr", Type, 18}, + {"IndexListExpr.Indices", Field, 18}, + {"IndexListExpr.Lbrack", Field, 18}, + {"IndexListExpr.Rbrack", Field, 18}, + {"IndexListExpr.X", Field, 18}, + {"Inspect", Func, 0}, + {"InterfaceType", Type, 0}, + {"InterfaceType.Incomplete", Field, 0}, + {"InterfaceType.Interface", Field, 0}, + {"InterfaceType.Methods", Field, 0}, + {"IsExported", Func, 0}, + {"IsGenerated", Func, 21}, + {"KeyValueExpr", Type, 0}, + {"KeyValueExpr.Colon", Field, 0}, + {"KeyValueExpr.Key", Field, 0}, + {"KeyValueExpr.Value", Field, 0}, + {"LabeledStmt", Type, 0}, + {"LabeledStmt.Colon", Field, 0}, + {"LabeledStmt.Label", Field, 0}, + {"LabeledStmt.Stmt", Field, 0}, + {"Lbl", Const, 0}, + {"MapType", Type, 0}, + {"MapType.Key", Field, 0}, + {"MapType.Map", Field, 0}, + {"MapType.Value", Field, 0}, + {"MergeMode", Type, 0}, + {"MergePackageFiles", Func, 0}, + {"NewCommentMap", Func, 1}, + {"NewIdent", Func, 0}, + {"NewObj", Func, 0}, + {"NewPackage", Func, 0}, + {"NewScope", Func, 0}, + {"Node", Type, 0}, + {"NotNilFilter", Func, 0}, + {"ObjKind", Type, 0}, + {"Object", Type, 0}, + {"Object.Data", Field, 0}, + {"Object.Decl", Field, 0}, + {"Object.Kind", Field, 0}, + {"Object.Name", Field, 0}, + {"Object.Type", Field, 0}, + {"Package", Type, 0}, + {"Package.Files", Field, 0}, + {"Package.Imports", Field, 0}, + {"Package.Name", Field, 0}, + {"Package.Scope", Field, 0}, + {"PackageExports", Func, 0}, + {"ParenExpr", Type, 0}, + {"ParenExpr.Lparen", Field, 0}, + {"ParenExpr.Rparen", Field, 0}, + {"ParenExpr.X", Field, 0}, + {"Pkg", Const, 0}, + {"Print", Func, 0}, + {"RECV", Const, 0}, + {"RangeStmt", Type, 0}, + {"RangeStmt.Body", Field, 0}, + {"RangeStmt.For", Field, 0}, + {"RangeStmt.Key", Field, 0}, + {"RangeStmt.Range", Field, 20}, + {"RangeStmt.Tok", Field, 0}, + {"RangeStmt.TokPos", Field, 0}, + {"RangeStmt.Value", Field, 0}, + {"RangeStmt.X", Field, 0}, + {"ReturnStmt", Type, 0}, + {"ReturnStmt.Results", Field, 0}, + {"ReturnStmt.Return", Field, 0}, + {"SEND", Const, 0}, + {"Scope", Type, 0}, + {"Scope.Objects", Field, 0}, + {"Scope.Outer", Field, 0}, + {"SelectStmt", Type, 0}, + {"SelectStmt.Body", Field, 0}, + {"SelectStmt.Select", Field, 0}, + {"SelectorExpr", Type, 0}, + {"SelectorExpr.Sel", Field, 0}, + {"SelectorExpr.X", Field, 0}, + {"SendStmt", Type, 0}, + {"SendStmt.Arrow", Field, 0}, + {"SendStmt.Chan", Field, 0}, + {"SendStmt.Value", Field, 0}, + {"SliceExpr", Type, 0}, + {"SliceExpr.High", Field, 0}, + {"SliceExpr.Lbrack", Field, 0}, + {"SliceExpr.Low", Field, 0}, + {"SliceExpr.Max", Field, 2}, + {"SliceExpr.Rbrack", Field, 0}, + {"SliceExpr.Slice3", Field, 2}, + {"SliceExpr.X", Field, 0}, + {"SortImports", Func, 0}, + {"Spec", Type, 0}, + {"StarExpr", Type, 0}, + {"StarExpr.Star", Field, 0}, + {"StarExpr.X", Field, 0}, + {"Stmt", Type, 0}, + {"StructType", Type, 0}, + {"StructType.Fields", Field, 0}, + {"StructType.Incomplete", Field, 0}, + {"StructType.Struct", Field, 0}, + {"SwitchStmt", Type, 0}, + {"SwitchStmt.Body", Field, 0}, + {"SwitchStmt.Init", Field, 0}, + {"SwitchStmt.Switch", Field, 0}, + {"SwitchStmt.Tag", Field, 0}, + {"Typ", Const, 0}, + {"TypeAssertExpr", Type, 0}, + {"TypeAssertExpr.Lparen", Field, 2}, + {"TypeAssertExpr.Rparen", Field, 2}, + {"TypeAssertExpr.Type", Field, 0}, + {"TypeAssertExpr.X", Field, 0}, + {"TypeSpec", Type, 0}, + {"TypeSpec.Assign", Field, 9}, + {"TypeSpec.Comment", Field, 0}, + {"TypeSpec.Doc", Field, 0}, + {"TypeSpec.Name", Field, 0}, + {"TypeSpec.Type", Field, 0}, + {"TypeSpec.TypeParams", Field, 18}, + {"TypeSwitchStmt", Type, 0}, + {"TypeSwitchStmt.Assign", Field, 0}, + {"TypeSwitchStmt.Body", Field, 0}, + {"TypeSwitchStmt.Init", Field, 0}, + {"TypeSwitchStmt.Switch", Field, 0}, + {"UnaryExpr", Type, 0}, + {"UnaryExpr.Op", Field, 0}, + {"UnaryExpr.OpPos", Field, 0}, + {"UnaryExpr.X", Field, 0}, + {"Unparen", Func, 22}, + {"ValueSpec", Type, 0}, + {"ValueSpec.Comment", Field, 0}, + {"ValueSpec.Doc", Field, 0}, + {"ValueSpec.Names", Field, 0}, + {"ValueSpec.Type", Field, 0}, + {"ValueSpec.Values", Field, 0}, + {"Var", Const, 0}, + {"Visitor", Type, 0}, + {"Walk", Func, 0}, + }, + "go/build": { + {"(*Context).Import", Method, 0}, + {"(*Context).ImportDir", Method, 0}, + {"(*Context).MatchFile", Method, 2}, + {"(*Context).SrcDirs", Method, 0}, + {"(*MultiplePackageError).Error", Method, 4}, + {"(*NoGoError).Error", Method, 0}, + {"(*Package).IsCommand", Method, 0}, + {"AllowBinary", Const, 0}, + {"ArchChar", Func, 0}, + {"Context", Type, 0}, + {"Context.BuildTags", Field, 0}, + {"Context.CgoEnabled", Field, 0}, + {"Context.Compiler", Field, 0}, + {"Context.Dir", Field, 14}, + {"Context.GOARCH", Field, 0}, + {"Context.GOOS", Field, 0}, + {"Context.GOPATH", Field, 0}, + {"Context.GOROOT", Field, 0}, + {"Context.HasSubdir", Field, 0}, + {"Context.InstallSuffix", Field, 1}, + {"Context.IsAbsPath", Field, 0}, + {"Context.IsDir", Field, 0}, + {"Context.JoinPath", Field, 0}, + {"Context.OpenFile", Field, 0}, + {"Context.ReadDir", Field, 0}, + {"Context.ReleaseTags", Field, 1}, + {"Context.SplitPathList", Field, 0}, + {"Context.ToolTags", Field, 17}, + {"Context.UseAllFiles", Field, 0}, + {"Default", Var, 0}, + {"Directive", Type, 21}, + {"Directive.Pos", Field, 21}, + {"Directive.Text", Field, 21}, + {"FindOnly", Const, 0}, + {"IgnoreVendor", Const, 6}, + {"Import", Func, 0}, + {"ImportComment", Const, 4}, + {"ImportDir", Func, 0}, + {"ImportMode", Type, 0}, + {"IsLocalImport", Func, 0}, + {"MultiplePackageError", Type, 4}, + {"MultiplePackageError.Dir", Field, 4}, + {"MultiplePackageError.Files", Field, 4}, + {"MultiplePackageError.Packages", Field, 4}, + {"NoGoError", Type, 0}, + {"NoGoError.Dir", Field, 0}, + {"Package", Type, 0}, + {"Package.AllTags", Field, 2}, + {"Package.BinDir", Field, 0}, + {"Package.BinaryOnly", Field, 7}, + {"Package.CFiles", Field, 0}, + {"Package.CXXFiles", Field, 2}, + {"Package.CgoCFLAGS", Field, 0}, + {"Package.CgoCPPFLAGS", Field, 2}, + {"Package.CgoCXXFLAGS", Field, 2}, + {"Package.CgoFFLAGS", Field, 7}, + {"Package.CgoFiles", Field, 0}, + {"Package.CgoLDFLAGS", Field, 0}, + {"Package.CgoPkgConfig", Field, 0}, + {"Package.ConflictDir", Field, 2}, + {"Package.Dir", Field, 0}, + {"Package.Directives", Field, 21}, + {"Package.Doc", Field, 0}, + {"Package.EmbedPatternPos", Field, 16}, + {"Package.EmbedPatterns", Field, 16}, + {"Package.FFiles", Field, 7}, + {"Package.GoFiles", Field, 0}, + {"Package.Goroot", Field, 0}, + {"Package.HFiles", Field, 0}, + {"Package.IgnoredGoFiles", Field, 1}, + {"Package.IgnoredOtherFiles", Field, 16}, + {"Package.ImportComment", Field, 4}, + {"Package.ImportPath", Field, 0}, + {"Package.ImportPos", Field, 0}, + {"Package.Imports", Field, 0}, + {"Package.InvalidGoFiles", Field, 6}, + {"Package.MFiles", Field, 3}, + {"Package.Name", Field, 0}, + {"Package.PkgObj", Field, 0}, + {"Package.PkgRoot", Field, 0}, + {"Package.PkgTargetRoot", Field, 5}, + {"Package.Root", Field, 0}, + {"Package.SFiles", Field, 0}, + {"Package.SrcRoot", Field, 0}, + {"Package.SwigCXXFiles", Field, 1}, + {"Package.SwigFiles", Field, 1}, + {"Package.SysoFiles", Field, 0}, + {"Package.TestDirectives", Field, 21}, + {"Package.TestEmbedPatternPos", Field, 16}, + {"Package.TestEmbedPatterns", Field, 16}, + {"Package.TestGoFiles", Field, 0}, + {"Package.TestImportPos", Field, 0}, + {"Package.TestImports", Field, 0}, + {"Package.XTestDirectives", Field, 21}, + {"Package.XTestEmbedPatternPos", Field, 16}, + {"Package.XTestEmbedPatterns", Field, 16}, + {"Package.XTestGoFiles", Field, 0}, + {"Package.XTestImportPos", Field, 0}, + {"Package.XTestImports", Field, 0}, + {"ToolDir", Var, 0}, + }, + "go/build/constraint": { + {"(*AndExpr).Eval", Method, 16}, + {"(*AndExpr).String", Method, 16}, + {"(*NotExpr).Eval", Method, 16}, + {"(*NotExpr).String", Method, 16}, + {"(*OrExpr).Eval", Method, 16}, + {"(*OrExpr).String", Method, 16}, + {"(*SyntaxError).Error", Method, 16}, + {"(*TagExpr).Eval", Method, 16}, + {"(*TagExpr).String", Method, 16}, + {"AndExpr", Type, 16}, + {"AndExpr.X", Field, 16}, + {"AndExpr.Y", Field, 16}, + {"Expr", Type, 16}, + {"GoVersion", Func, 21}, + {"IsGoBuild", Func, 16}, + {"IsPlusBuild", Func, 16}, + {"NotExpr", Type, 16}, + {"NotExpr.X", Field, 16}, + {"OrExpr", Type, 16}, + {"OrExpr.X", Field, 16}, + {"OrExpr.Y", Field, 16}, + {"Parse", Func, 16}, + {"PlusBuildLines", Func, 16}, + {"SyntaxError", Type, 16}, + {"SyntaxError.Err", Field, 16}, + {"SyntaxError.Offset", Field, 16}, + {"TagExpr", Type, 16}, + {"TagExpr.Tag", Field, 16}, + }, + "go/constant": { + {"(Kind).String", Method, 18}, + {"BinaryOp", Func, 5}, + {"BitLen", Func, 5}, + {"Bool", Const, 5}, + {"BoolVal", Func, 5}, + {"Bytes", Func, 5}, + {"Compare", Func, 5}, + {"Complex", Const, 5}, + {"Denom", Func, 5}, + {"Float", Const, 5}, + {"Float32Val", Func, 5}, + {"Float64Val", Func, 5}, + {"Imag", Func, 5}, + {"Int", Const, 5}, + {"Int64Val", Func, 5}, + {"Kind", Type, 5}, + {"Make", Func, 13}, + {"MakeBool", Func, 5}, + {"MakeFloat64", Func, 5}, + {"MakeFromBytes", Func, 5}, + {"MakeFromLiteral", Func, 5}, + {"MakeImag", Func, 5}, + {"MakeInt64", Func, 5}, + {"MakeString", Func, 5}, + {"MakeUint64", Func, 5}, + {"MakeUnknown", Func, 5}, + {"Num", Func, 5}, + {"Real", Func, 5}, + {"Shift", Func, 5}, + {"Sign", Func, 5}, + {"String", Const, 5}, + {"StringVal", Func, 5}, + {"ToComplex", Func, 6}, + {"ToFloat", Func, 6}, + {"ToInt", Func, 6}, + {"Uint64Val", Func, 5}, + {"UnaryOp", Func, 5}, + {"Unknown", Const, 5}, + {"Val", Func, 13}, + {"Value", Type, 5}, + }, + "go/doc": { + {"(*Package).Filter", Method, 0}, + {"(*Package).HTML", Method, 19}, + {"(*Package).Markdown", Method, 19}, + {"(*Package).Parser", Method, 19}, + {"(*Package).Printer", Method, 19}, + {"(*Package).Synopsis", Method, 19}, + {"(*Package).Text", Method, 19}, + {"AllDecls", Const, 0}, + {"AllMethods", Const, 0}, + {"Example", Type, 0}, + {"Example.Code", Field, 0}, + {"Example.Comments", Field, 0}, + {"Example.Doc", Field, 0}, + {"Example.EmptyOutput", Field, 1}, + {"Example.Name", Field, 0}, + {"Example.Order", Field, 1}, + {"Example.Output", Field, 0}, + {"Example.Play", Field, 1}, + {"Example.Suffix", Field, 14}, + {"Example.Unordered", Field, 7}, + {"Examples", Func, 0}, + {"Filter", Type, 0}, + {"Func", Type, 0}, + {"Func.Decl", Field, 0}, + {"Func.Doc", Field, 0}, + {"Func.Examples", Field, 14}, + {"Func.Level", Field, 0}, + {"Func.Name", Field, 0}, + {"Func.Orig", Field, 0}, + {"Func.Recv", Field, 0}, + {"IllegalPrefixes", Var, 1}, + {"IsPredeclared", Func, 8}, + {"Mode", Type, 0}, + {"New", Func, 0}, + {"NewFromFiles", Func, 14}, + {"Note", Type, 1}, + {"Note.Body", Field, 1}, + {"Note.End", Field, 1}, + {"Note.Pos", Field, 1}, + {"Note.UID", Field, 1}, + {"Package", Type, 0}, + {"Package.Bugs", Field, 0}, + {"Package.Consts", Field, 0}, + {"Package.Doc", Field, 0}, + {"Package.Examples", Field, 14}, + {"Package.Filenames", Field, 0}, + {"Package.Funcs", Field, 0}, + {"Package.ImportPath", Field, 0}, + {"Package.Imports", Field, 0}, + {"Package.Name", Field, 0}, + {"Package.Notes", Field, 1}, + {"Package.Types", Field, 0}, + {"Package.Vars", Field, 0}, + {"PreserveAST", Const, 12}, + {"Synopsis", Func, 0}, + {"ToHTML", Func, 0}, + {"ToText", Func, 0}, + {"Type", Type, 0}, + {"Type.Consts", Field, 0}, + {"Type.Decl", Field, 0}, + {"Type.Doc", Field, 0}, + {"Type.Examples", Field, 14}, + {"Type.Funcs", Field, 0}, + {"Type.Methods", Field, 0}, + {"Type.Name", Field, 0}, + {"Type.Vars", Field, 0}, + {"Value", Type, 0}, + {"Value.Decl", Field, 0}, + {"Value.Doc", Field, 0}, + {"Value.Names", Field, 0}, + }, + "go/doc/comment": { + {"(*DocLink).DefaultURL", Method, 19}, + {"(*Heading).DefaultID", Method, 19}, + {"(*List).BlankBefore", Method, 19}, + {"(*List).BlankBetween", Method, 19}, + {"(*Parser).Parse", Method, 19}, + {"(*Printer).Comment", Method, 19}, + {"(*Printer).HTML", Method, 19}, + {"(*Printer).Markdown", Method, 19}, + {"(*Printer).Text", Method, 19}, + {"Block", Type, 19}, + {"Code", Type, 19}, + {"Code.Text", Field, 19}, + {"DefaultLookupPackage", Func, 19}, + {"Doc", Type, 19}, + {"Doc.Content", Field, 19}, + {"Doc.Links", Field, 19}, + {"DocLink", Type, 19}, + {"DocLink.ImportPath", Field, 19}, + {"DocLink.Name", Field, 19}, + {"DocLink.Recv", Field, 19}, + {"DocLink.Text", Field, 19}, + {"Heading", Type, 19}, + {"Heading.Text", Field, 19}, + {"Italic", Type, 19}, + {"Link", Type, 19}, + {"Link.Auto", Field, 19}, + {"Link.Text", Field, 19}, + {"Link.URL", Field, 19}, + {"LinkDef", Type, 19}, + {"LinkDef.Text", Field, 19}, + {"LinkDef.URL", Field, 19}, + {"LinkDef.Used", Field, 19}, + {"List", Type, 19}, + {"List.ForceBlankBefore", Field, 19}, + {"List.ForceBlankBetween", Field, 19}, + {"List.Items", Field, 19}, + {"ListItem", Type, 19}, + {"ListItem.Content", Field, 19}, + {"ListItem.Number", Field, 19}, + {"Paragraph", Type, 19}, + {"Paragraph.Text", Field, 19}, + {"Parser", Type, 19}, + {"Parser.LookupPackage", Field, 19}, + {"Parser.LookupSym", Field, 19}, + {"Parser.Words", Field, 19}, + {"Plain", Type, 19}, + {"Printer", Type, 19}, + {"Printer.DocLinkBaseURL", Field, 19}, + {"Printer.DocLinkURL", Field, 19}, + {"Printer.HeadingID", Field, 19}, + {"Printer.HeadingLevel", Field, 19}, + {"Printer.TextCodePrefix", Field, 19}, + {"Printer.TextPrefix", Field, 19}, + {"Printer.TextWidth", Field, 19}, + {"Text", Type, 19}, + }, + "go/format": { + {"Node", Func, 1}, + {"Source", Func, 1}, + }, + "go/importer": { + {"Default", Func, 5}, + {"For", Func, 5}, + {"ForCompiler", Func, 12}, + {"Lookup", Type, 5}, + }, + "go/parser": { + {"AllErrors", Const, 1}, + {"DeclarationErrors", Const, 0}, + {"ImportsOnly", Const, 0}, + {"Mode", Type, 0}, + {"PackageClauseOnly", Const, 0}, + {"ParseComments", Const, 0}, + {"ParseDir", Func, 0}, + {"ParseExpr", Func, 0}, + {"ParseExprFrom", Func, 5}, + {"ParseFile", Func, 0}, + {"SkipObjectResolution", Const, 17}, + {"SpuriousErrors", Const, 0}, + {"Trace", Const, 0}, + }, + "go/printer": { + {"(*Config).Fprint", Method, 0}, + {"CommentedNode", Type, 0}, + {"CommentedNode.Comments", Field, 0}, + {"CommentedNode.Node", Field, 0}, + {"Config", Type, 0}, + {"Config.Indent", Field, 1}, + {"Config.Mode", Field, 0}, + {"Config.Tabwidth", Field, 0}, + {"Fprint", Func, 0}, + {"Mode", Type, 0}, + {"RawFormat", Const, 0}, + {"SourcePos", Const, 0}, + {"TabIndent", Const, 0}, + {"UseSpaces", Const, 0}, + }, + "go/scanner": { + {"(*ErrorList).Add", Method, 0}, + {"(*ErrorList).RemoveMultiples", Method, 0}, + {"(*ErrorList).Reset", Method, 0}, + {"(*Scanner).Init", Method, 0}, + {"(*Scanner).Scan", Method, 0}, + {"(Error).Error", Method, 0}, + {"(ErrorList).Err", Method, 0}, + {"(ErrorList).Error", Method, 0}, + {"(ErrorList).Len", Method, 0}, + {"(ErrorList).Less", Method, 0}, + {"(ErrorList).Sort", Method, 0}, + {"(ErrorList).Swap", Method, 0}, + {"Error", Type, 0}, + {"Error.Msg", Field, 0}, + {"Error.Pos", Field, 0}, + {"ErrorHandler", Type, 0}, + {"ErrorList", Type, 0}, + {"Mode", Type, 0}, + {"PrintError", Func, 0}, + {"ScanComments", Const, 0}, + {"Scanner", Type, 0}, + {"Scanner.ErrorCount", Field, 0}, + }, + "go/token": { + {"(*File).AddLine", Method, 0}, + {"(*File).AddLineColumnInfo", Method, 11}, + {"(*File).AddLineInfo", Method, 0}, + {"(*File).Base", Method, 0}, + {"(*File).Line", Method, 0}, + {"(*File).LineCount", Method, 0}, + {"(*File).LineStart", Method, 12}, + {"(*File).Lines", Method, 21}, + {"(*File).MergeLine", Method, 2}, + {"(*File).Name", Method, 0}, + {"(*File).Offset", Method, 0}, + {"(*File).Pos", Method, 0}, + {"(*File).Position", Method, 0}, + {"(*File).PositionFor", Method, 4}, + {"(*File).SetLines", Method, 0}, + {"(*File).SetLinesForContent", Method, 0}, + {"(*File).Size", Method, 0}, + {"(*FileSet).AddFile", Method, 0}, + {"(*FileSet).Base", Method, 0}, + {"(*FileSet).File", Method, 0}, + {"(*FileSet).Iterate", Method, 0}, + {"(*FileSet).Position", Method, 0}, + {"(*FileSet).PositionFor", Method, 4}, + {"(*FileSet).Read", Method, 0}, + {"(*FileSet).RemoveFile", Method, 20}, + {"(*FileSet).Write", Method, 0}, + {"(*Position).IsValid", Method, 0}, + {"(Pos).IsValid", Method, 0}, + {"(Position).String", Method, 0}, + {"(Token).IsKeyword", Method, 0}, + {"(Token).IsLiteral", Method, 0}, + {"(Token).IsOperator", Method, 0}, + {"(Token).Precedence", Method, 0}, + {"(Token).String", Method, 0}, + {"ADD", Const, 0}, + {"ADD_ASSIGN", Const, 0}, + {"AND", Const, 0}, + {"AND_ASSIGN", Const, 0}, + {"AND_NOT", Const, 0}, + {"AND_NOT_ASSIGN", Const, 0}, + {"ARROW", Const, 0}, + {"ASSIGN", Const, 0}, + {"BREAK", Const, 0}, + {"CASE", Const, 0}, + {"CHAN", Const, 0}, + {"CHAR", Const, 0}, + {"COLON", Const, 0}, + {"COMMA", Const, 0}, + {"COMMENT", Const, 0}, + {"CONST", Const, 0}, + {"CONTINUE", Const, 0}, + {"DEC", Const, 0}, + {"DEFAULT", Const, 0}, + {"DEFER", Const, 0}, + {"DEFINE", Const, 0}, + {"ELLIPSIS", Const, 0}, + {"ELSE", Const, 0}, + {"EOF", Const, 0}, + {"EQL", Const, 0}, + {"FALLTHROUGH", Const, 0}, + {"FLOAT", Const, 0}, + {"FOR", Const, 0}, + {"FUNC", Const, 0}, + {"File", Type, 0}, + {"FileSet", Type, 0}, + {"GEQ", Const, 0}, + {"GO", Const, 0}, + {"GOTO", Const, 0}, + {"GTR", Const, 0}, + {"HighestPrec", Const, 0}, + {"IDENT", Const, 0}, + {"IF", Const, 0}, + {"ILLEGAL", Const, 0}, + {"IMAG", Const, 0}, + {"IMPORT", Const, 0}, + {"INC", Const, 0}, + {"INT", Const, 0}, + {"INTERFACE", Const, 0}, + {"IsExported", Func, 13}, + {"IsIdentifier", Func, 13}, + {"IsKeyword", Func, 13}, + {"LAND", Const, 0}, + {"LBRACE", Const, 0}, + {"LBRACK", Const, 0}, + {"LEQ", Const, 0}, + {"LOR", Const, 0}, + {"LPAREN", Const, 0}, + {"LSS", Const, 0}, + {"Lookup", Func, 0}, + {"LowestPrec", Const, 0}, + {"MAP", Const, 0}, + {"MUL", Const, 0}, + {"MUL_ASSIGN", Const, 0}, + {"NEQ", Const, 0}, + {"NOT", Const, 0}, + {"NewFileSet", Func, 0}, + {"NoPos", Const, 0}, + {"OR", Const, 0}, + {"OR_ASSIGN", Const, 0}, + {"PACKAGE", Const, 0}, + {"PERIOD", Const, 0}, + {"Pos", Type, 0}, + {"Position", Type, 0}, + {"Position.Column", Field, 0}, + {"Position.Filename", Field, 0}, + {"Position.Line", Field, 0}, + {"Position.Offset", Field, 0}, + {"QUO", Const, 0}, + {"QUO_ASSIGN", Const, 0}, + {"RANGE", Const, 0}, + {"RBRACE", Const, 0}, + {"RBRACK", Const, 0}, + {"REM", Const, 0}, + {"REM_ASSIGN", Const, 0}, + {"RETURN", Const, 0}, + {"RPAREN", Const, 0}, + {"SELECT", Const, 0}, + {"SEMICOLON", Const, 0}, + {"SHL", Const, 0}, + {"SHL_ASSIGN", Const, 0}, + {"SHR", Const, 0}, + {"SHR_ASSIGN", Const, 0}, + {"STRING", Const, 0}, + {"STRUCT", Const, 0}, + {"SUB", Const, 0}, + {"SUB_ASSIGN", Const, 0}, + {"SWITCH", Const, 0}, + {"TILDE", Const, 18}, + {"TYPE", Const, 0}, + {"Token", Type, 0}, + {"UnaryPrec", Const, 0}, + {"VAR", Const, 0}, + {"XOR", Const, 0}, + {"XOR_ASSIGN", Const, 0}, + }, + "go/types": { + {"(*Alias).Obj", Method, 22}, + {"(*Alias).String", Method, 22}, + {"(*Alias).Underlying", Method, 22}, + {"(*ArgumentError).Error", Method, 18}, + {"(*ArgumentError).Unwrap", Method, 18}, + {"(*Array).Elem", Method, 5}, + {"(*Array).Len", Method, 5}, + {"(*Array).String", Method, 5}, + {"(*Array).Underlying", Method, 5}, + {"(*Basic).Info", Method, 5}, + {"(*Basic).Kind", Method, 5}, + {"(*Basic).Name", Method, 5}, + {"(*Basic).String", Method, 5}, + {"(*Basic).Underlying", Method, 5}, + {"(*Builtin).Exported", Method, 5}, + {"(*Builtin).Id", Method, 5}, + {"(*Builtin).Name", Method, 5}, + {"(*Builtin).Parent", Method, 5}, + {"(*Builtin).Pkg", Method, 5}, + {"(*Builtin).Pos", Method, 5}, + {"(*Builtin).String", Method, 5}, + {"(*Builtin).Type", Method, 5}, + {"(*Chan).Dir", Method, 5}, + {"(*Chan).Elem", Method, 5}, + {"(*Chan).String", Method, 5}, + {"(*Chan).Underlying", Method, 5}, + {"(*Checker).Files", Method, 5}, + {"(*Config).Check", Method, 5}, + {"(*Const).Exported", Method, 5}, + {"(*Const).Id", Method, 5}, + {"(*Const).Name", Method, 5}, + {"(*Const).Parent", Method, 5}, + {"(*Const).Pkg", Method, 5}, + {"(*Const).Pos", Method, 5}, + {"(*Const).String", Method, 5}, + {"(*Const).Type", Method, 5}, + {"(*Const).Val", Method, 5}, + {"(*Func).Exported", Method, 5}, + {"(*Func).FullName", Method, 5}, + {"(*Func).Id", Method, 5}, + {"(*Func).Name", Method, 5}, + {"(*Func).Origin", Method, 19}, + {"(*Func).Parent", Method, 5}, + {"(*Func).Pkg", Method, 5}, + {"(*Func).Pos", Method, 5}, + {"(*Func).Scope", Method, 5}, + {"(*Func).String", Method, 5}, + {"(*Func).Type", Method, 5}, + {"(*Info).ObjectOf", Method, 5}, + {"(*Info).PkgNameOf", Method, 22}, + {"(*Info).TypeOf", Method, 5}, + {"(*Initializer).String", Method, 5}, + {"(*Interface).Complete", Method, 5}, + {"(*Interface).Embedded", Method, 5}, + {"(*Interface).EmbeddedType", Method, 11}, + {"(*Interface).Empty", Method, 5}, + {"(*Interface).ExplicitMethod", Method, 5}, + {"(*Interface).IsComparable", Method, 18}, + {"(*Interface).IsImplicit", Method, 18}, + {"(*Interface).IsMethodSet", Method, 18}, + {"(*Interface).MarkImplicit", Method, 18}, + {"(*Interface).Method", Method, 5}, + {"(*Interface).NumEmbeddeds", Method, 5}, + {"(*Interface).NumExplicitMethods", Method, 5}, + {"(*Interface).NumMethods", Method, 5}, + {"(*Interface).String", Method, 5}, + {"(*Interface).Underlying", Method, 5}, + {"(*Label).Exported", Method, 5}, + {"(*Label).Id", Method, 5}, + {"(*Label).Name", Method, 5}, + {"(*Label).Parent", Method, 5}, + {"(*Label).Pkg", Method, 5}, + {"(*Label).Pos", Method, 5}, + {"(*Label).String", Method, 5}, + {"(*Label).Type", Method, 5}, + {"(*Map).Elem", Method, 5}, + {"(*Map).Key", Method, 5}, + {"(*Map).String", Method, 5}, + {"(*Map).Underlying", Method, 5}, + {"(*MethodSet).At", Method, 5}, + {"(*MethodSet).Len", Method, 5}, + {"(*MethodSet).Lookup", Method, 5}, + {"(*MethodSet).String", Method, 5}, + {"(*Named).AddMethod", Method, 5}, + {"(*Named).Method", Method, 5}, + {"(*Named).NumMethods", Method, 5}, + {"(*Named).Obj", Method, 5}, + {"(*Named).Origin", Method, 18}, + {"(*Named).SetTypeParams", Method, 18}, + {"(*Named).SetUnderlying", Method, 5}, + {"(*Named).String", Method, 5}, + {"(*Named).TypeArgs", Method, 18}, + {"(*Named).TypeParams", Method, 18}, + {"(*Named).Underlying", Method, 5}, + {"(*Nil).Exported", Method, 5}, + {"(*Nil).Id", Method, 5}, + {"(*Nil).Name", Method, 5}, + {"(*Nil).Parent", Method, 5}, + {"(*Nil).Pkg", Method, 5}, + {"(*Nil).Pos", Method, 5}, + {"(*Nil).String", Method, 5}, + {"(*Nil).Type", Method, 5}, + {"(*Package).Complete", Method, 5}, + {"(*Package).GoVersion", Method, 21}, + {"(*Package).Imports", Method, 5}, + {"(*Package).MarkComplete", Method, 5}, + {"(*Package).Name", Method, 5}, + {"(*Package).Path", Method, 5}, + {"(*Package).Scope", Method, 5}, + {"(*Package).SetImports", Method, 5}, + {"(*Package).SetName", Method, 6}, + {"(*Package).String", Method, 5}, + {"(*PkgName).Exported", Method, 5}, + {"(*PkgName).Id", Method, 5}, + {"(*PkgName).Imported", Method, 5}, + {"(*PkgName).Name", Method, 5}, + {"(*PkgName).Parent", Method, 5}, + {"(*PkgName).Pkg", Method, 5}, + {"(*PkgName).Pos", Method, 5}, + {"(*PkgName).String", Method, 5}, + {"(*PkgName).Type", Method, 5}, + {"(*Pointer).Elem", Method, 5}, + {"(*Pointer).String", Method, 5}, + {"(*Pointer).Underlying", Method, 5}, + {"(*Scope).Child", Method, 5}, + {"(*Scope).Contains", Method, 5}, + {"(*Scope).End", Method, 5}, + {"(*Scope).Innermost", Method, 5}, + {"(*Scope).Insert", Method, 5}, + {"(*Scope).Len", Method, 5}, + {"(*Scope).Lookup", Method, 5}, + {"(*Scope).LookupParent", Method, 5}, + {"(*Scope).Names", Method, 5}, + {"(*Scope).NumChildren", Method, 5}, + {"(*Scope).Parent", Method, 5}, + {"(*Scope).Pos", Method, 5}, + {"(*Scope).String", Method, 5}, + {"(*Scope).WriteTo", Method, 5}, + {"(*Selection).Index", Method, 5}, + {"(*Selection).Indirect", Method, 5}, + {"(*Selection).Kind", Method, 5}, + {"(*Selection).Obj", Method, 5}, + {"(*Selection).Recv", Method, 5}, + {"(*Selection).String", Method, 5}, + {"(*Selection).Type", Method, 5}, + {"(*Signature).Params", Method, 5}, + {"(*Signature).Recv", Method, 5}, + {"(*Signature).RecvTypeParams", Method, 18}, + {"(*Signature).Results", Method, 5}, + {"(*Signature).String", Method, 5}, + {"(*Signature).TypeParams", Method, 18}, + {"(*Signature).Underlying", Method, 5}, + {"(*Signature).Variadic", Method, 5}, + {"(*Slice).Elem", Method, 5}, + {"(*Slice).String", Method, 5}, + {"(*Slice).Underlying", Method, 5}, + {"(*StdSizes).Alignof", Method, 5}, + {"(*StdSizes).Offsetsof", Method, 5}, + {"(*StdSizes).Sizeof", Method, 5}, + {"(*Struct).Field", Method, 5}, + {"(*Struct).NumFields", Method, 5}, + {"(*Struct).String", Method, 5}, + {"(*Struct).Tag", Method, 5}, + {"(*Struct).Underlying", Method, 5}, + {"(*Term).String", Method, 18}, + {"(*Term).Tilde", Method, 18}, + {"(*Term).Type", Method, 18}, + {"(*Tuple).At", Method, 5}, + {"(*Tuple).Len", Method, 5}, + {"(*Tuple).String", Method, 5}, + {"(*Tuple).Underlying", Method, 5}, + {"(*TypeList).At", Method, 18}, + {"(*TypeList).Len", Method, 18}, + {"(*TypeName).Exported", Method, 5}, + {"(*TypeName).Id", Method, 5}, + {"(*TypeName).IsAlias", Method, 9}, + {"(*TypeName).Name", Method, 5}, + {"(*TypeName).Parent", Method, 5}, + {"(*TypeName).Pkg", Method, 5}, + {"(*TypeName).Pos", Method, 5}, + {"(*TypeName).String", Method, 5}, + {"(*TypeName).Type", Method, 5}, + {"(*TypeParam).Constraint", Method, 18}, + {"(*TypeParam).Index", Method, 18}, + {"(*TypeParam).Obj", Method, 18}, + {"(*TypeParam).SetConstraint", Method, 18}, + {"(*TypeParam).String", Method, 18}, + {"(*TypeParam).Underlying", Method, 18}, + {"(*TypeParamList).At", Method, 18}, + {"(*TypeParamList).Len", Method, 18}, + {"(*Union).Len", Method, 18}, + {"(*Union).String", Method, 18}, + {"(*Union).Term", Method, 18}, + {"(*Union).Underlying", Method, 18}, + {"(*Var).Anonymous", Method, 5}, + {"(*Var).Embedded", Method, 11}, + {"(*Var).Exported", Method, 5}, + {"(*Var).Id", Method, 5}, + {"(*Var).IsField", Method, 5}, + {"(*Var).Name", Method, 5}, + {"(*Var).Origin", Method, 19}, + {"(*Var).Parent", Method, 5}, + {"(*Var).Pkg", Method, 5}, + {"(*Var).Pos", Method, 5}, + {"(*Var).String", Method, 5}, + {"(*Var).Type", Method, 5}, + {"(Checker).ObjectOf", Method, 5}, + {"(Checker).PkgNameOf", Method, 22}, + {"(Checker).TypeOf", Method, 5}, + {"(Error).Error", Method, 5}, + {"(TypeAndValue).Addressable", Method, 5}, + {"(TypeAndValue).Assignable", Method, 5}, + {"(TypeAndValue).HasOk", Method, 5}, + {"(TypeAndValue).IsBuiltin", Method, 5}, + {"(TypeAndValue).IsNil", Method, 5}, + {"(TypeAndValue).IsType", Method, 5}, + {"(TypeAndValue).IsValue", Method, 5}, + {"(TypeAndValue).IsVoid", Method, 5}, + {"Alias", Type, 22}, + {"ArgumentError", Type, 18}, + {"ArgumentError.Err", Field, 18}, + {"ArgumentError.Index", Field, 18}, + {"Array", Type, 5}, + {"AssertableTo", Func, 5}, + {"AssignableTo", Func, 5}, + {"Basic", Type, 5}, + {"BasicInfo", Type, 5}, + {"BasicKind", Type, 5}, + {"Bool", Const, 5}, + {"Builtin", Type, 5}, + {"Byte", Const, 5}, + {"Chan", Type, 5}, + {"ChanDir", Type, 5}, + {"CheckExpr", Func, 13}, + {"Checker", Type, 5}, + {"Checker.Info", Field, 5}, + {"Comparable", Func, 5}, + {"Complex128", Const, 5}, + {"Complex64", Const, 5}, + {"Config", Type, 5}, + {"Config.Context", Field, 18}, + {"Config.DisableUnusedImportCheck", Field, 5}, + {"Config.Error", Field, 5}, + {"Config.FakeImportC", Field, 5}, + {"Config.GoVersion", Field, 18}, + {"Config.IgnoreFuncBodies", Field, 5}, + {"Config.Importer", Field, 5}, + {"Config.Sizes", Field, 5}, + {"Const", Type, 5}, + {"Context", Type, 18}, + {"ConvertibleTo", Func, 5}, + {"DefPredeclaredTestFuncs", Func, 5}, + {"Default", Func, 8}, + {"Error", Type, 5}, + {"Error.Fset", Field, 5}, + {"Error.Msg", Field, 5}, + {"Error.Pos", Field, 5}, + {"Error.Soft", Field, 5}, + {"Eval", Func, 5}, + {"ExprString", Func, 5}, + {"FieldVal", Const, 5}, + {"Float32", Const, 5}, + {"Float64", Const, 5}, + {"Func", Type, 5}, + {"Id", Func, 5}, + {"Identical", Func, 5}, + {"IdenticalIgnoreTags", Func, 8}, + {"Implements", Func, 5}, + {"ImportMode", Type, 6}, + {"Importer", Type, 5}, + {"ImporterFrom", Type, 6}, + {"Info", Type, 5}, + {"Info.Defs", Field, 5}, + {"Info.FileVersions", Field, 22}, + {"Info.Implicits", Field, 5}, + {"Info.InitOrder", Field, 5}, + {"Info.Instances", Field, 18}, + {"Info.Scopes", Field, 5}, + {"Info.Selections", Field, 5}, + {"Info.Types", Field, 5}, + {"Info.Uses", Field, 5}, + {"Initializer", Type, 5}, + {"Initializer.Lhs", Field, 5}, + {"Initializer.Rhs", Field, 5}, + {"Instance", Type, 18}, + {"Instance.Type", Field, 18}, + {"Instance.TypeArgs", Field, 18}, + {"Instantiate", Func, 18}, + {"Int", Const, 5}, + {"Int16", Const, 5}, + {"Int32", Const, 5}, + {"Int64", Const, 5}, + {"Int8", Const, 5}, + {"Interface", Type, 5}, + {"Invalid", Const, 5}, + {"IsBoolean", Const, 5}, + {"IsComplex", Const, 5}, + {"IsConstType", Const, 5}, + {"IsFloat", Const, 5}, + {"IsInteger", Const, 5}, + {"IsInterface", Func, 5}, + {"IsNumeric", Const, 5}, + {"IsOrdered", Const, 5}, + {"IsString", Const, 5}, + {"IsUnsigned", Const, 5}, + {"IsUntyped", Const, 5}, + {"Label", Type, 5}, + {"LookupFieldOrMethod", Func, 5}, + {"Map", Type, 5}, + {"MethodExpr", Const, 5}, + {"MethodSet", Type, 5}, + {"MethodVal", Const, 5}, + {"MissingMethod", Func, 5}, + {"Named", Type, 5}, + {"NewAlias", Func, 22}, + {"NewArray", Func, 5}, + {"NewChan", Func, 5}, + {"NewChecker", Func, 5}, + {"NewConst", Func, 5}, + {"NewContext", Func, 18}, + {"NewField", Func, 5}, + {"NewFunc", Func, 5}, + {"NewInterface", Func, 5}, + {"NewInterfaceType", Func, 11}, + {"NewLabel", Func, 5}, + {"NewMap", Func, 5}, + {"NewMethodSet", Func, 5}, + {"NewNamed", Func, 5}, + {"NewPackage", Func, 5}, + {"NewParam", Func, 5}, + {"NewPkgName", Func, 5}, + {"NewPointer", Func, 5}, + {"NewScope", Func, 5}, + {"NewSignature", Func, 5}, + {"NewSignatureType", Func, 18}, + {"NewSlice", Func, 5}, + {"NewStruct", Func, 5}, + {"NewTerm", Func, 18}, + {"NewTuple", Func, 5}, + {"NewTypeName", Func, 5}, + {"NewTypeParam", Func, 18}, + {"NewUnion", Func, 18}, + {"NewVar", Func, 5}, + {"Nil", Type, 5}, + {"Object", Type, 5}, + {"ObjectString", Func, 5}, + {"Package", Type, 5}, + {"PkgName", Type, 5}, + {"Pointer", Type, 5}, + {"Qualifier", Type, 5}, + {"RecvOnly", Const, 5}, + {"RelativeTo", Func, 5}, + {"Rune", Const, 5}, + {"Satisfies", Func, 20}, + {"Scope", Type, 5}, + {"Selection", Type, 5}, + {"SelectionKind", Type, 5}, + {"SelectionString", Func, 5}, + {"SendOnly", Const, 5}, + {"SendRecv", Const, 5}, + {"Signature", Type, 5}, + {"Sizes", Type, 5}, + {"SizesFor", Func, 9}, + {"Slice", Type, 5}, + {"StdSizes", Type, 5}, + {"StdSizes.MaxAlign", Field, 5}, + {"StdSizes.WordSize", Field, 5}, + {"String", Const, 5}, + {"Struct", Type, 5}, + {"Term", Type, 18}, + {"Tuple", Type, 5}, + {"Typ", Var, 5}, + {"Type", Type, 5}, + {"TypeAndValue", Type, 5}, + {"TypeAndValue.Type", Field, 5}, + {"TypeAndValue.Value", Field, 5}, + {"TypeList", Type, 18}, + {"TypeName", Type, 5}, + {"TypeParam", Type, 18}, + {"TypeParamList", Type, 18}, + {"TypeString", Func, 5}, + {"Uint", Const, 5}, + {"Uint16", Const, 5}, + {"Uint32", Const, 5}, + {"Uint64", Const, 5}, + {"Uint8", Const, 5}, + {"Uintptr", Const, 5}, + {"Unalias", Func, 22}, + {"Union", Type, 18}, + {"Universe", Var, 5}, + {"Unsafe", Var, 5}, + {"UnsafePointer", Const, 5}, + {"UntypedBool", Const, 5}, + {"UntypedComplex", Const, 5}, + {"UntypedFloat", Const, 5}, + {"UntypedInt", Const, 5}, + {"UntypedNil", Const, 5}, + {"UntypedRune", Const, 5}, + {"UntypedString", Const, 5}, + {"Var", Type, 5}, + {"WriteExpr", Func, 5}, + {"WriteSignature", Func, 5}, + {"WriteType", Func, 5}, + }, + "go/version": { + {"Compare", Func, 22}, + {"IsValid", Func, 22}, + {"Lang", Func, 22}, + }, + "hash": { + {"Hash", Type, 0}, + {"Hash32", Type, 0}, + {"Hash64", Type, 0}, + }, + "hash/adler32": { + {"Checksum", Func, 0}, + {"New", Func, 0}, + {"Size", Const, 0}, + }, + "hash/crc32": { + {"Castagnoli", Const, 0}, + {"Checksum", Func, 0}, + {"ChecksumIEEE", Func, 0}, + {"IEEE", Const, 0}, + {"IEEETable", Var, 0}, + {"Koopman", Const, 0}, + {"MakeTable", Func, 0}, + {"New", Func, 0}, + {"NewIEEE", Func, 0}, + {"Size", Const, 0}, + {"Table", Type, 0}, + {"Update", Func, 0}, + }, + "hash/crc64": { + {"Checksum", Func, 0}, + {"ECMA", Const, 0}, + {"ISO", Const, 0}, + {"MakeTable", Func, 0}, + {"New", Func, 0}, + {"Size", Const, 0}, + {"Table", Type, 0}, + {"Update", Func, 0}, + }, + "hash/fnv": { + {"New128", Func, 9}, + {"New128a", Func, 9}, + {"New32", Func, 0}, + {"New32a", Func, 0}, + {"New64", Func, 0}, + {"New64a", Func, 0}, + }, + "hash/maphash": { + {"(*Hash).BlockSize", Method, 14}, + {"(*Hash).Reset", Method, 14}, + {"(*Hash).Seed", Method, 14}, + {"(*Hash).SetSeed", Method, 14}, + {"(*Hash).Size", Method, 14}, + {"(*Hash).Sum", Method, 14}, + {"(*Hash).Sum64", Method, 14}, + {"(*Hash).Write", Method, 14}, + {"(*Hash).WriteByte", Method, 14}, + {"(*Hash).WriteString", Method, 14}, + {"Bytes", Func, 19}, + {"Hash", Type, 14}, + {"MakeSeed", Func, 14}, + {"Seed", Type, 14}, + {"String", Func, 19}, + }, + "html": { + {"EscapeString", Func, 0}, + {"UnescapeString", Func, 0}, + }, + "html/template": { + {"(*Error).Error", Method, 0}, + {"(*Template).AddParseTree", Method, 0}, + {"(*Template).Clone", Method, 0}, + {"(*Template).DefinedTemplates", Method, 6}, + {"(*Template).Delims", Method, 0}, + {"(*Template).Execute", Method, 0}, + {"(*Template).ExecuteTemplate", Method, 0}, + {"(*Template).Funcs", Method, 0}, + {"(*Template).Lookup", Method, 0}, + {"(*Template).Name", Method, 0}, + {"(*Template).New", Method, 0}, + {"(*Template).Option", Method, 5}, + {"(*Template).Parse", Method, 0}, + {"(*Template).ParseFS", Method, 16}, + {"(*Template).ParseFiles", Method, 0}, + {"(*Template).ParseGlob", Method, 0}, + {"(*Template).Templates", Method, 0}, + {"CSS", Type, 0}, + {"ErrAmbigContext", Const, 0}, + {"ErrBadHTML", Const, 0}, + {"ErrBranchEnd", Const, 0}, + {"ErrEndContext", Const, 0}, + {"ErrJSTemplate", Const, 21}, + {"ErrNoSuchTemplate", Const, 0}, + {"ErrOutputContext", Const, 0}, + {"ErrPartialCharset", Const, 0}, + {"ErrPartialEscape", Const, 0}, + {"ErrPredefinedEscaper", Const, 9}, + {"ErrRangeLoopReentry", Const, 0}, + {"ErrSlashAmbig", Const, 0}, + {"Error", Type, 0}, + {"Error.Description", Field, 0}, + {"Error.ErrorCode", Field, 0}, + {"Error.Line", Field, 0}, + {"Error.Name", Field, 0}, + {"Error.Node", Field, 4}, + {"ErrorCode", Type, 0}, + {"FuncMap", Type, 0}, + {"HTML", Type, 0}, + {"HTMLAttr", Type, 0}, + {"HTMLEscape", Func, 0}, + {"HTMLEscapeString", Func, 0}, + {"HTMLEscaper", Func, 0}, + {"IsTrue", Func, 6}, + {"JS", Type, 0}, + {"JSEscape", Func, 0}, + {"JSEscapeString", Func, 0}, + {"JSEscaper", Func, 0}, + {"JSStr", Type, 0}, + {"Must", Func, 0}, + {"New", Func, 0}, + {"OK", Const, 0}, + {"ParseFS", Func, 16}, + {"ParseFiles", Func, 0}, + {"ParseGlob", Func, 0}, + {"Srcset", Type, 10}, + {"Template", Type, 0}, + {"Template.Tree", Field, 2}, + {"URL", Type, 0}, + {"URLQueryEscaper", Func, 0}, + }, + "image": { + {"(*Alpha).AlphaAt", Method, 4}, + {"(*Alpha).At", Method, 0}, + {"(*Alpha).Bounds", Method, 0}, + {"(*Alpha).ColorModel", Method, 0}, + {"(*Alpha).Opaque", Method, 0}, + {"(*Alpha).PixOffset", Method, 0}, + {"(*Alpha).RGBA64At", Method, 17}, + {"(*Alpha).Set", Method, 0}, + {"(*Alpha).SetAlpha", Method, 0}, + {"(*Alpha).SetRGBA64", Method, 17}, + {"(*Alpha).SubImage", Method, 0}, + {"(*Alpha16).Alpha16At", Method, 4}, + {"(*Alpha16).At", Method, 0}, + {"(*Alpha16).Bounds", Method, 0}, + {"(*Alpha16).ColorModel", Method, 0}, + {"(*Alpha16).Opaque", Method, 0}, + {"(*Alpha16).PixOffset", Method, 0}, + {"(*Alpha16).RGBA64At", Method, 17}, + {"(*Alpha16).Set", Method, 0}, + {"(*Alpha16).SetAlpha16", Method, 0}, + {"(*Alpha16).SetRGBA64", Method, 17}, + {"(*Alpha16).SubImage", Method, 0}, + {"(*CMYK).At", Method, 5}, + {"(*CMYK).Bounds", Method, 5}, + {"(*CMYK).CMYKAt", Method, 5}, + {"(*CMYK).ColorModel", Method, 5}, + {"(*CMYK).Opaque", Method, 5}, + {"(*CMYK).PixOffset", Method, 5}, + {"(*CMYK).RGBA64At", Method, 17}, + {"(*CMYK).Set", Method, 5}, + {"(*CMYK).SetCMYK", Method, 5}, + {"(*CMYK).SetRGBA64", Method, 17}, + {"(*CMYK).SubImage", Method, 5}, + {"(*Gray).At", Method, 0}, + {"(*Gray).Bounds", Method, 0}, + {"(*Gray).ColorModel", Method, 0}, + {"(*Gray).GrayAt", Method, 4}, + {"(*Gray).Opaque", Method, 0}, + {"(*Gray).PixOffset", Method, 0}, + {"(*Gray).RGBA64At", Method, 17}, + {"(*Gray).Set", Method, 0}, + {"(*Gray).SetGray", Method, 0}, + {"(*Gray).SetRGBA64", Method, 17}, + {"(*Gray).SubImage", Method, 0}, + {"(*Gray16).At", Method, 0}, + {"(*Gray16).Bounds", Method, 0}, + {"(*Gray16).ColorModel", Method, 0}, + {"(*Gray16).Gray16At", Method, 4}, + {"(*Gray16).Opaque", Method, 0}, + {"(*Gray16).PixOffset", Method, 0}, + {"(*Gray16).RGBA64At", Method, 17}, + {"(*Gray16).Set", Method, 0}, + {"(*Gray16).SetGray16", Method, 0}, + {"(*Gray16).SetRGBA64", Method, 17}, + {"(*Gray16).SubImage", Method, 0}, + {"(*NRGBA).At", Method, 0}, + {"(*NRGBA).Bounds", Method, 0}, + {"(*NRGBA).ColorModel", Method, 0}, + {"(*NRGBA).NRGBAAt", Method, 4}, + {"(*NRGBA).Opaque", Method, 0}, + {"(*NRGBA).PixOffset", Method, 0}, + {"(*NRGBA).RGBA64At", Method, 17}, + {"(*NRGBA).Set", Method, 0}, + {"(*NRGBA).SetNRGBA", Method, 0}, + {"(*NRGBA).SetRGBA64", Method, 17}, + {"(*NRGBA).SubImage", Method, 0}, + {"(*NRGBA64).At", Method, 0}, + {"(*NRGBA64).Bounds", Method, 0}, + {"(*NRGBA64).ColorModel", Method, 0}, + {"(*NRGBA64).NRGBA64At", Method, 4}, + {"(*NRGBA64).Opaque", Method, 0}, + {"(*NRGBA64).PixOffset", Method, 0}, + {"(*NRGBA64).RGBA64At", Method, 17}, + {"(*NRGBA64).Set", Method, 0}, + {"(*NRGBA64).SetNRGBA64", Method, 0}, + {"(*NRGBA64).SetRGBA64", Method, 17}, + {"(*NRGBA64).SubImage", Method, 0}, + {"(*NYCbCrA).AOffset", Method, 6}, + {"(*NYCbCrA).At", Method, 6}, + {"(*NYCbCrA).Bounds", Method, 6}, + {"(*NYCbCrA).COffset", Method, 6}, + {"(*NYCbCrA).ColorModel", Method, 6}, + {"(*NYCbCrA).NYCbCrAAt", Method, 6}, + {"(*NYCbCrA).Opaque", Method, 6}, + {"(*NYCbCrA).RGBA64At", Method, 17}, + {"(*NYCbCrA).SubImage", Method, 6}, + {"(*NYCbCrA).YCbCrAt", Method, 6}, + {"(*NYCbCrA).YOffset", Method, 6}, + {"(*Paletted).At", Method, 0}, + {"(*Paletted).Bounds", Method, 0}, + {"(*Paletted).ColorIndexAt", Method, 0}, + {"(*Paletted).ColorModel", Method, 0}, + {"(*Paletted).Opaque", Method, 0}, + {"(*Paletted).PixOffset", Method, 0}, + {"(*Paletted).RGBA64At", Method, 17}, + {"(*Paletted).Set", Method, 0}, + {"(*Paletted).SetColorIndex", Method, 0}, + {"(*Paletted).SetRGBA64", Method, 17}, + {"(*Paletted).SubImage", Method, 0}, + {"(*RGBA).At", Method, 0}, + {"(*RGBA).Bounds", Method, 0}, + {"(*RGBA).ColorModel", Method, 0}, + {"(*RGBA).Opaque", Method, 0}, + {"(*RGBA).PixOffset", Method, 0}, + {"(*RGBA).RGBA64At", Method, 17}, + {"(*RGBA).RGBAAt", Method, 4}, + {"(*RGBA).Set", Method, 0}, + {"(*RGBA).SetRGBA", Method, 0}, + {"(*RGBA).SetRGBA64", Method, 17}, + {"(*RGBA).SubImage", Method, 0}, + {"(*RGBA64).At", Method, 0}, + {"(*RGBA64).Bounds", Method, 0}, + {"(*RGBA64).ColorModel", Method, 0}, + {"(*RGBA64).Opaque", Method, 0}, + {"(*RGBA64).PixOffset", Method, 0}, + {"(*RGBA64).RGBA64At", Method, 4}, + {"(*RGBA64).Set", Method, 0}, + {"(*RGBA64).SetRGBA64", Method, 0}, + {"(*RGBA64).SubImage", Method, 0}, + {"(*Uniform).At", Method, 0}, + {"(*Uniform).Bounds", Method, 0}, + {"(*Uniform).ColorModel", Method, 0}, + {"(*Uniform).Convert", Method, 0}, + {"(*Uniform).Opaque", Method, 0}, + {"(*Uniform).RGBA", Method, 0}, + {"(*Uniform).RGBA64At", Method, 17}, + {"(*YCbCr).At", Method, 0}, + {"(*YCbCr).Bounds", Method, 0}, + {"(*YCbCr).COffset", Method, 0}, + {"(*YCbCr).ColorModel", Method, 0}, + {"(*YCbCr).Opaque", Method, 0}, + {"(*YCbCr).RGBA64At", Method, 17}, + {"(*YCbCr).SubImage", Method, 0}, + {"(*YCbCr).YCbCrAt", Method, 4}, + {"(*YCbCr).YOffset", Method, 0}, + {"(Point).Add", Method, 0}, + {"(Point).Div", Method, 0}, + {"(Point).Eq", Method, 0}, + {"(Point).In", Method, 0}, + {"(Point).Mod", Method, 0}, + {"(Point).Mul", Method, 0}, + {"(Point).String", Method, 0}, + {"(Point).Sub", Method, 0}, + {"(Rectangle).Add", Method, 0}, + {"(Rectangle).At", Method, 5}, + {"(Rectangle).Bounds", Method, 5}, + {"(Rectangle).Canon", Method, 0}, + {"(Rectangle).ColorModel", Method, 5}, + {"(Rectangle).Dx", Method, 0}, + {"(Rectangle).Dy", Method, 0}, + {"(Rectangle).Empty", Method, 0}, + {"(Rectangle).Eq", Method, 0}, + {"(Rectangle).In", Method, 0}, + {"(Rectangle).Inset", Method, 0}, + {"(Rectangle).Intersect", Method, 0}, + {"(Rectangle).Overlaps", Method, 0}, + {"(Rectangle).RGBA64At", Method, 17}, + {"(Rectangle).Size", Method, 0}, + {"(Rectangle).String", Method, 0}, + {"(Rectangle).Sub", Method, 0}, + {"(Rectangle).Union", Method, 0}, + {"(YCbCrSubsampleRatio).String", Method, 0}, + {"Alpha", Type, 0}, + {"Alpha.Pix", Field, 0}, + {"Alpha.Rect", Field, 0}, + {"Alpha.Stride", Field, 0}, + {"Alpha16", Type, 0}, + {"Alpha16.Pix", Field, 0}, + {"Alpha16.Rect", Field, 0}, + {"Alpha16.Stride", Field, 0}, + {"Black", Var, 0}, + {"CMYK", Type, 5}, + {"CMYK.Pix", Field, 5}, + {"CMYK.Rect", Field, 5}, + {"CMYK.Stride", Field, 5}, + {"Config", Type, 0}, + {"Config.ColorModel", Field, 0}, + {"Config.Height", Field, 0}, + {"Config.Width", Field, 0}, + {"Decode", Func, 0}, + {"DecodeConfig", Func, 0}, + {"ErrFormat", Var, 0}, + {"Gray", Type, 0}, + {"Gray.Pix", Field, 0}, + {"Gray.Rect", Field, 0}, + {"Gray.Stride", Field, 0}, + {"Gray16", Type, 0}, + {"Gray16.Pix", Field, 0}, + {"Gray16.Rect", Field, 0}, + {"Gray16.Stride", Field, 0}, + {"Image", Type, 0}, + {"NRGBA", Type, 0}, + {"NRGBA.Pix", Field, 0}, + {"NRGBA.Rect", Field, 0}, + {"NRGBA.Stride", Field, 0}, + {"NRGBA64", Type, 0}, + {"NRGBA64.Pix", Field, 0}, + {"NRGBA64.Rect", Field, 0}, + {"NRGBA64.Stride", Field, 0}, + {"NYCbCrA", Type, 6}, + {"NYCbCrA.A", Field, 6}, + {"NYCbCrA.AStride", Field, 6}, + {"NYCbCrA.YCbCr", Field, 6}, + {"NewAlpha", Func, 0}, + {"NewAlpha16", Func, 0}, + {"NewCMYK", Func, 5}, + {"NewGray", Func, 0}, + {"NewGray16", Func, 0}, + {"NewNRGBA", Func, 0}, + {"NewNRGBA64", Func, 0}, + {"NewNYCbCrA", Func, 6}, + {"NewPaletted", Func, 0}, + {"NewRGBA", Func, 0}, + {"NewRGBA64", Func, 0}, + {"NewUniform", Func, 0}, + {"NewYCbCr", Func, 0}, + {"Opaque", Var, 0}, + {"Paletted", Type, 0}, + {"Paletted.Palette", Field, 0}, + {"Paletted.Pix", Field, 0}, + {"Paletted.Rect", Field, 0}, + {"Paletted.Stride", Field, 0}, + {"PalettedImage", Type, 0}, + {"Point", Type, 0}, + {"Point.X", Field, 0}, + {"Point.Y", Field, 0}, + {"Pt", Func, 0}, + {"RGBA", Type, 0}, + {"RGBA.Pix", Field, 0}, + {"RGBA.Rect", Field, 0}, + {"RGBA.Stride", Field, 0}, + {"RGBA64", Type, 0}, + {"RGBA64.Pix", Field, 0}, + {"RGBA64.Rect", Field, 0}, + {"RGBA64.Stride", Field, 0}, + {"RGBA64Image", Type, 17}, + {"Rect", Func, 0}, + {"Rectangle", Type, 0}, + {"Rectangle.Max", Field, 0}, + {"Rectangle.Min", Field, 0}, + {"RegisterFormat", Func, 0}, + {"Transparent", Var, 0}, + {"Uniform", Type, 0}, + {"Uniform.C", Field, 0}, + {"White", Var, 0}, + {"YCbCr", Type, 0}, + {"YCbCr.CStride", Field, 0}, + {"YCbCr.Cb", Field, 0}, + {"YCbCr.Cr", Field, 0}, + {"YCbCr.Rect", Field, 0}, + {"YCbCr.SubsampleRatio", Field, 0}, + {"YCbCr.Y", Field, 0}, + {"YCbCr.YStride", Field, 0}, + {"YCbCrSubsampleRatio", Type, 0}, + {"YCbCrSubsampleRatio410", Const, 5}, + {"YCbCrSubsampleRatio411", Const, 5}, + {"YCbCrSubsampleRatio420", Const, 0}, + {"YCbCrSubsampleRatio422", Const, 0}, + {"YCbCrSubsampleRatio440", Const, 1}, + {"YCbCrSubsampleRatio444", Const, 0}, + {"ZP", Var, 0}, + {"ZR", Var, 0}, + }, + "image/color": { + {"(Alpha).RGBA", Method, 0}, + {"(Alpha16).RGBA", Method, 0}, + {"(CMYK).RGBA", Method, 5}, + {"(Gray).RGBA", Method, 0}, + {"(Gray16).RGBA", Method, 0}, + {"(NRGBA).RGBA", Method, 0}, + {"(NRGBA64).RGBA", Method, 0}, + {"(NYCbCrA).RGBA", Method, 6}, + {"(Palette).Convert", Method, 0}, + {"(Palette).Index", Method, 0}, + {"(RGBA).RGBA", Method, 0}, + {"(RGBA64).RGBA", Method, 0}, + {"(YCbCr).RGBA", Method, 0}, + {"Alpha", Type, 0}, + {"Alpha.A", Field, 0}, + {"Alpha16", Type, 0}, + {"Alpha16.A", Field, 0}, + {"Alpha16Model", Var, 0}, + {"AlphaModel", Var, 0}, + {"Black", Var, 0}, + {"CMYK", Type, 5}, + {"CMYK.C", Field, 5}, + {"CMYK.K", Field, 5}, + {"CMYK.M", Field, 5}, + {"CMYK.Y", Field, 5}, + {"CMYKModel", Var, 5}, + {"CMYKToRGB", Func, 5}, + {"Color", Type, 0}, + {"Gray", Type, 0}, + {"Gray.Y", Field, 0}, + {"Gray16", Type, 0}, + {"Gray16.Y", Field, 0}, + {"Gray16Model", Var, 0}, + {"GrayModel", Var, 0}, + {"Model", Type, 0}, + {"ModelFunc", Func, 0}, + {"NRGBA", Type, 0}, + {"NRGBA.A", Field, 0}, + {"NRGBA.B", Field, 0}, + {"NRGBA.G", Field, 0}, + {"NRGBA.R", Field, 0}, + {"NRGBA64", Type, 0}, + {"NRGBA64.A", Field, 0}, + {"NRGBA64.B", Field, 0}, + {"NRGBA64.G", Field, 0}, + {"NRGBA64.R", Field, 0}, + {"NRGBA64Model", Var, 0}, + {"NRGBAModel", Var, 0}, + {"NYCbCrA", Type, 6}, + {"NYCbCrA.A", Field, 6}, + {"NYCbCrA.YCbCr", Field, 6}, + {"NYCbCrAModel", Var, 6}, + {"Opaque", Var, 0}, + {"Palette", Type, 0}, + {"RGBA", Type, 0}, + {"RGBA.A", Field, 0}, + {"RGBA.B", Field, 0}, + {"RGBA.G", Field, 0}, + {"RGBA.R", Field, 0}, + {"RGBA64", Type, 0}, + {"RGBA64.A", Field, 0}, + {"RGBA64.B", Field, 0}, + {"RGBA64.G", Field, 0}, + {"RGBA64.R", Field, 0}, + {"RGBA64Model", Var, 0}, + {"RGBAModel", Var, 0}, + {"RGBToCMYK", Func, 5}, + {"RGBToYCbCr", Func, 0}, + {"Transparent", Var, 0}, + {"White", Var, 0}, + {"YCbCr", Type, 0}, + {"YCbCr.Cb", Field, 0}, + {"YCbCr.Cr", Field, 0}, + {"YCbCr.Y", Field, 0}, + {"YCbCrModel", Var, 0}, + {"YCbCrToRGB", Func, 0}, + }, + "image/color/palette": { + {"Plan9", Var, 2}, + {"WebSafe", Var, 2}, + }, + "image/draw": { + {"(Op).Draw", Method, 2}, + {"Draw", Func, 0}, + {"DrawMask", Func, 0}, + {"Drawer", Type, 2}, + {"FloydSteinberg", Var, 2}, + {"Image", Type, 0}, + {"Op", Type, 0}, + {"Over", Const, 0}, + {"Quantizer", Type, 2}, + {"RGBA64Image", Type, 17}, + {"Src", Const, 0}, + }, + "image/gif": { + {"Decode", Func, 0}, + {"DecodeAll", Func, 0}, + {"DecodeConfig", Func, 0}, + {"DisposalBackground", Const, 5}, + {"DisposalNone", Const, 5}, + {"DisposalPrevious", Const, 5}, + {"Encode", Func, 2}, + {"EncodeAll", Func, 2}, + {"GIF", Type, 0}, + {"GIF.BackgroundIndex", Field, 5}, + {"GIF.Config", Field, 5}, + {"GIF.Delay", Field, 0}, + {"GIF.Disposal", Field, 5}, + {"GIF.Image", Field, 0}, + {"GIF.LoopCount", Field, 0}, + {"Options", Type, 2}, + {"Options.Drawer", Field, 2}, + {"Options.NumColors", Field, 2}, + {"Options.Quantizer", Field, 2}, + }, + "image/jpeg": { + {"(FormatError).Error", Method, 0}, + {"(UnsupportedError).Error", Method, 0}, + {"Decode", Func, 0}, + {"DecodeConfig", Func, 0}, + {"DefaultQuality", Const, 0}, + {"Encode", Func, 0}, + {"FormatError", Type, 0}, + {"Options", Type, 0}, + {"Options.Quality", Field, 0}, + {"Reader", Type, 0}, + {"UnsupportedError", Type, 0}, + }, + "image/png": { + {"(*Encoder).Encode", Method, 4}, + {"(FormatError).Error", Method, 0}, + {"(UnsupportedError).Error", Method, 0}, + {"BestCompression", Const, 4}, + {"BestSpeed", Const, 4}, + {"CompressionLevel", Type, 4}, + {"Decode", Func, 0}, + {"DecodeConfig", Func, 0}, + {"DefaultCompression", Const, 4}, + {"Encode", Func, 0}, + {"Encoder", Type, 4}, + {"Encoder.BufferPool", Field, 9}, + {"Encoder.CompressionLevel", Field, 4}, + {"EncoderBuffer", Type, 9}, + {"EncoderBufferPool", Type, 9}, + {"FormatError", Type, 0}, + {"NoCompression", Const, 4}, + {"UnsupportedError", Type, 0}, + }, + "index/suffixarray": { + {"(*Index).Bytes", Method, 0}, + {"(*Index).FindAllIndex", Method, 0}, + {"(*Index).Lookup", Method, 0}, + {"(*Index).Read", Method, 0}, + {"(*Index).Write", Method, 0}, + {"Index", Type, 0}, + {"New", Func, 0}, + }, + "io": { + {"(*LimitedReader).Read", Method, 0}, + {"(*OffsetWriter).Seek", Method, 20}, + {"(*OffsetWriter).Write", Method, 20}, + {"(*OffsetWriter).WriteAt", Method, 20}, + {"(*PipeReader).Close", Method, 0}, + {"(*PipeReader).CloseWithError", Method, 0}, + {"(*PipeReader).Read", Method, 0}, + {"(*PipeWriter).Close", Method, 0}, + {"(*PipeWriter).CloseWithError", Method, 0}, + {"(*PipeWriter).Write", Method, 0}, + {"(*SectionReader).Outer", Method, 22}, + {"(*SectionReader).Read", Method, 0}, + {"(*SectionReader).ReadAt", Method, 0}, + {"(*SectionReader).Seek", Method, 0}, + {"(*SectionReader).Size", Method, 0}, + {"ByteReader", Type, 0}, + {"ByteScanner", Type, 0}, + {"ByteWriter", Type, 1}, + {"Closer", Type, 0}, + {"Copy", Func, 0}, + {"CopyBuffer", Func, 5}, + {"CopyN", Func, 0}, + {"Discard", Var, 16}, + {"EOF", Var, 0}, + {"ErrClosedPipe", Var, 0}, + {"ErrNoProgress", Var, 1}, + {"ErrShortBuffer", Var, 0}, + {"ErrShortWrite", Var, 0}, + {"ErrUnexpectedEOF", Var, 0}, + {"LimitReader", Func, 0}, + {"LimitedReader", Type, 0}, + {"LimitedReader.N", Field, 0}, + {"LimitedReader.R", Field, 0}, + {"MultiReader", Func, 0}, + {"MultiWriter", Func, 0}, + {"NewOffsetWriter", Func, 20}, + {"NewSectionReader", Func, 0}, + {"NopCloser", Func, 16}, + {"OffsetWriter", Type, 20}, + {"Pipe", Func, 0}, + {"PipeReader", Type, 0}, + {"PipeWriter", Type, 0}, + {"ReadAll", Func, 16}, + {"ReadAtLeast", Func, 0}, + {"ReadCloser", Type, 0}, + {"ReadFull", Func, 0}, + {"ReadSeekCloser", Type, 16}, + {"ReadSeeker", Type, 0}, + {"ReadWriteCloser", Type, 0}, + {"ReadWriteSeeker", Type, 0}, + {"ReadWriter", Type, 0}, + {"Reader", Type, 0}, + {"ReaderAt", Type, 0}, + {"ReaderFrom", Type, 0}, + {"RuneReader", Type, 0}, + {"RuneScanner", Type, 0}, + {"SectionReader", Type, 0}, + {"SeekCurrent", Const, 7}, + {"SeekEnd", Const, 7}, + {"SeekStart", Const, 7}, + {"Seeker", Type, 0}, + {"StringWriter", Type, 12}, + {"TeeReader", Func, 0}, + {"WriteCloser", Type, 0}, + {"WriteSeeker", Type, 0}, + {"WriteString", Func, 0}, + {"Writer", Type, 0}, + {"WriterAt", Type, 0}, + {"WriterTo", Type, 0}, + }, + "io/fs": { + {"(*PathError).Error", Method, 16}, + {"(*PathError).Timeout", Method, 16}, + {"(*PathError).Unwrap", Method, 16}, + {"(FileMode).IsDir", Method, 16}, + {"(FileMode).IsRegular", Method, 16}, + {"(FileMode).Perm", Method, 16}, + {"(FileMode).String", Method, 16}, + {"(FileMode).Type", Method, 16}, + {"DirEntry", Type, 16}, + {"ErrClosed", Var, 16}, + {"ErrExist", Var, 16}, + {"ErrInvalid", Var, 16}, + {"ErrNotExist", Var, 16}, + {"ErrPermission", Var, 16}, + {"FS", Type, 16}, + {"File", Type, 16}, + {"FileInfo", Type, 16}, + {"FileInfoToDirEntry", Func, 17}, + {"FileMode", Type, 16}, + {"FormatDirEntry", Func, 21}, + {"FormatFileInfo", Func, 21}, + {"Glob", Func, 16}, + {"GlobFS", Type, 16}, + {"ModeAppend", Const, 16}, + {"ModeCharDevice", Const, 16}, + {"ModeDevice", Const, 16}, + {"ModeDir", Const, 16}, + {"ModeExclusive", Const, 16}, + {"ModeIrregular", Const, 16}, + {"ModeNamedPipe", Const, 16}, + {"ModePerm", Const, 16}, + {"ModeSetgid", Const, 16}, + {"ModeSetuid", Const, 16}, + {"ModeSocket", Const, 16}, + {"ModeSticky", Const, 16}, + {"ModeSymlink", Const, 16}, + {"ModeTemporary", Const, 16}, + {"ModeType", Const, 16}, + {"PathError", Type, 16}, + {"PathError.Err", Field, 16}, + {"PathError.Op", Field, 16}, + {"PathError.Path", Field, 16}, + {"ReadDir", Func, 16}, + {"ReadDirFS", Type, 16}, + {"ReadDirFile", Type, 16}, + {"ReadFile", Func, 16}, + {"ReadFileFS", Type, 16}, + {"SkipAll", Var, 20}, + {"SkipDir", Var, 16}, + {"Stat", Func, 16}, + {"StatFS", Type, 16}, + {"Sub", Func, 16}, + {"SubFS", Type, 16}, + {"ValidPath", Func, 16}, + {"WalkDir", Func, 16}, + {"WalkDirFunc", Type, 16}, + }, + "io/ioutil": { + {"Discard", Var, 0}, + {"NopCloser", Func, 0}, + {"ReadAll", Func, 0}, + {"ReadDir", Func, 0}, + {"ReadFile", Func, 0}, + {"TempDir", Func, 0}, + {"TempFile", Func, 0}, + {"WriteFile", Func, 0}, + }, + "log": { + {"(*Logger).Fatal", Method, 0}, + {"(*Logger).Fatalf", Method, 0}, + {"(*Logger).Fatalln", Method, 0}, + {"(*Logger).Flags", Method, 0}, + {"(*Logger).Output", Method, 0}, + {"(*Logger).Panic", Method, 0}, + {"(*Logger).Panicf", Method, 0}, + {"(*Logger).Panicln", Method, 0}, + {"(*Logger).Prefix", Method, 0}, + {"(*Logger).Print", Method, 0}, + {"(*Logger).Printf", Method, 0}, + {"(*Logger).Println", Method, 0}, + {"(*Logger).SetFlags", Method, 0}, + {"(*Logger).SetOutput", Method, 5}, + {"(*Logger).SetPrefix", Method, 0}, + {"(*Logger).Writer", Method, 12}, + {"Default", Func, 16}, + {"Fatal", Func, 0}, + {"Fatalf", Func, 0}, + {"Fatalln", Func, 0}, + {"Flags", Func, 0}, + {"LUTC", Const, 5}, + {"Ldate", Const, 0}, + {"Llongfile", Const, 0}, + {"Lmicroseconds", Const, 0}, + {"Lmsgprefix", Const, 14}, + {"Logger", Type, 0}, + {"Lshortfile", Const, 0}, + {"LstdFlags", Const, 0}, + {"Ltime", Const, 0}, + {"New", Func, 0}, + {"Output", Func, 5}, + {"Panic", Func, 0}, + {"Panicf", Func, 0}, + {"Panicln", Func, 0}, + {"Prefix", Func, 0}, + {"Print", Func, 0}, + {"Printf", Func, 0}, + {"Println", Func, 0}, + {"SetFlags", Func, 0}, + {"SetOutput", Func, 0}, + {"SetPrefix", Func, 0}, + {"Writer", Func, 13}, + }, + "log/slog": { + {"(*JSONHandler).Enabled", Method, 21}, + {"(*JSONHandler).Handle", Method, 21}, + {"(*JSONHandler).WithAttrs", Method, 21}, + {"(*JSONHandler).WithGroup", Method, 21}, + {"(*Level).UnmarshalJSON", Method, 21}, + {"(*Level).UnmarshalText", Method, 21}, + {"(*LevelVar).Level", Method, 21}, + {"(*LevelVar).MarshalText", Method, 21}, + {"(*LevelVar).Set", Method, 21}, + {"(*LevelVar).String", Method, 21}, + {"(*LevelVar).UnmarshalText", Method, 21}, + {"(*Logger).Debug", Method, 21}, + {"(*Logger).DebugContext", Method, 21}, + {"(*Logger).Enabled", Method, 21}, + {"(*Logger).Error", Method, 21}, + {"(*Logger).ErrorContext", Method, 21}, + {"(*Logger).Handler", Method, 21}, + {"(*Logger).Info", Method, 21}, + {"(*Logger).InfoContext", Method, 21}, + {"(*Logger).Log", Method, 21}, + {"(*Logger).LogAttrs", Method, 21}, + {"(*Logger).Warn", Method, 21}, + {"(*Logger).WarnContext", Method, 21}, + {"(*Logger).With", Method, 21}, + {"(*Logger).WithGroup", Method, 21}, + {"(*Record).Add", Method, 21}, + {"(*Record).AddAttrs", Method, 21}, + {"(*TextHandler).Enabled", Method, 21}, + {"(*TextHandler).Handle", Method, 21}, + {"(*TextHandler).WithAttrs", Method, 21}, + {"(*TextHandler).WithGroup", Method, 21}, + {"(Attr).Equal", Method, 21}, + {"(Attr).String", Method, 21}, + {"(Kind).String", Method, 21}, + {"(Level).Level", Method, 21}, + {"(Level).MarshalJSON", Method, 21}, + {"(Level).MarshalText", Method, 21}, + {"(Level).String", Method, 21}, + {"(Record).Attrs", Method, 21}, + {"(Record).Clone", Method, 21}, + {"(Record).NumAttrs", Method, 21}, + {"(Value).Any", Method, 21}, + {"(Value).Bool", Method, 21}, + {"(Value).Duration", Method, 21}, + {"(Value).Equal", Method, 21}, + {"(Value).Float64", Method, 21}, + {"(Value).Group", Method, 21}, + {"(Value).Int64", Method, 21}, + {"(Value).Kind", Method, 21}, + {"(Value).LogValuer", Method, 21}, + {"(Value).Resolve", Method, 21}, + {"(Value).String", Method, 21}, + {"(Value).Time", Method, 21}, + {"(Value).Uint64", Method, 21}, + {"Any", Func, 21}, + {"AnyValue", Func, 21}, + {"Attr", Type, 21}, + {"Attr.Key", Field, 21}, + {"Attr.Value", Field, 21}, + {"Bool", Func, 21}, + {"BoolValue", Func, 21}, + {"Debug", Func, 21}, + {"DebugContext", Func, 21}, + {"Default", Func, 21}, + {"Duration", Func, 21}, + {"DurationValue", Func, 21}, + {"Error", Func, 21}, + {"ErrorContext", Func, 21}, + {"Float64", Func, 21}, + {"Float64Value", Func, 21}, + {"Group", Func, 21}, + {"GroupValue", Func, 21}, + {"Handler", Type, 21}, + {"HandlerOptions", Type, 21}, + {"HandlerOptions.AddSource", Field, 21}, + {"HandlerOptions.Level", Field, 21}, + {"HandlerOptions.ReplaceAttr", Field, 21}, + {"Info", Func, 21}, + {"InfoContext", Func, 21}, + {"Int", Func, 21}, + {"Int64", Func, 21}, + {"Int64Value", Func, 21}, + {"IntValue", Func, 21}, + {"JSONHandler", Type, 21}, + {"Kind", Type, 21}, + {"KindAny", Const, 21}, + {"KindBool", Const, 21}, + {"KindDuration", Const, 21}, + {"KindFloat64", Const, 21}, + {"KindGroup", Const, 21}, + {"KindInt64", Const, 21}, + {"KindLogValuer", Const, 21}, + {"KindString", Const, 21}, + {"KindTime", Const, 21}, + {"KindUint64", Const, 21}, + {"Level", Type, 21}, + {"LevelDebug", Const, 21}, + {"LevelError", Const, 21}, + {"LevelInfo", Const, 21}, + {"LevelKey", Const, 21}, + {"LevelVar", Type, 21}, + {"LevelWarn", Const, 21}, + {"Leveler", Type, 21}, + {"Log", Func, 21}, + {"LogAttrs", Func, 21}, + {"LogValuer", Type, 21}, + {"Logger", Type, 21}, + {"MessageKey", Const, 21}, + {"New", Func, 21}, + {"NewJSONHandler", Func, 21}, + {"NewLogLogger", Func, 21}, + {"NewRecord", Func, 21}, + {"NewTextHandler", Func, 21}, + {"Record", Type, 21}, + {"Record.Level", Field, 21}, + {"Record.Message", Field, 21}, + {"Record.PC", Field, 21}, + {"Record.Time", Field, 21}, + {"SetDefault", Func, 21}, + {"SetLogLoggerLevel", Func, 22}, + {"Source", Type, 21}, + {"Source.File", Field, 21}, + {"Source.Function", Field, 21}, + {"Source.Line", Field, 21}, + {"SourceKey", Const, 21}, + {"String", Func, 21}, + {"StringValue", Func, 21}, + {"TextHandler", Type, 21}, + {"Time", Func, 21}, + {"TimeKey", Const, 21}, + {"TimeValue", Func, 21}, + {"Uint64", Func, 21}, + {"Uint64Value", Func, 21}, + {"Value", Type, 21}, + {"Warn", Func, 21}, + {"WarnContext", Func, 21}, + {"With", Func, 21}, + }, + "log/syslog": { + {"(*Writer).Alert", Method, 0}, + {"(*Writer).Close", Method, 0}, + {"(*Writer).Crit", Method, 0}, + {"(*Writer).Debug", Method, 0}, + {"(*Writer).Emerg", Method, 0}, + {"(*Writer).Err", Method, 0}, + {"(*Writer).Info", Method, 0}, + {"(*Writer).Notice", Method, 0}, + {"(*Writer).Warning", Method, 0}, + {"(*Writer).Write", Method, 0}, + {"Dial", Func, 0}, + {"LOG_ALERT", Const, 0}, + {"LOG_AUTH", Const, 1}, + {"LOG_AUTHPRIV", Const, 1}, + {"LOG_CRIT", Const, 0}, + {"LOG_CRON", Const, 1}, + {"LOG_DAEMON", Const, 1}, + {"LOG_DEBUG", Const, 0}, + {"LOG_EMERG", Const, 0}, + {"LOG_ERR", Const, 0}, + {"LOG_FTP", Const, 1}, + {"LOG_INFO", Const, 0}, + {"LOG_KERN", Const, 1}, + {"LOG_LOCAL0", Const, 1}, + {"LOG_LOCAL1", Const, 1}, + {"LOG_LOCAL2", Const, 1}, + {"LOG_LOCAL3", Const, 1}, + {"LOG_LOCAL4", Const, 1}, + {"LOG_LOCAL5", Const, 1}, + {"LOG_LOCAL6", Const, 1}, + {"LOG_LOCAL7", Const, 1}, + {"LOG_LPR", Const, 1}, + {"LOG_MAIL", Const, 1}, + {"LOG_NEWS", Const, 1}, + {"LOG_NOTICE", Const, 0}, + {"LOG_SYSLOG", Const, 1}, + {"LOG_USER", Const, 1}, + {"LOG_UUCP", Const, 1}, + {"LOG_WARNING", Const, 0}, + {"New", Func, 0}, + {"NewLogger", Func, 0}, + {"Priority", Type, 0}, + {"Writer", Type, 0}, + }, + "maps": { + {"Clone", Func, 21}, + {"Copy", Func, 21}, + {"DeleteFunc", Func, 21}, + {"Equal", Func, 21}, + {"EqualFunc", Func, 21}, + }, + "math": { + {"Abs", Func, 0}, + {"Acos", Func, 0}, + {"Acosh", Func, 0}, + {"Asin", Func, 0}, + {"Asinh", Func, 0}, + {"Atan", Func, 0}, + {"Atan2", Func, 0}, + {"Atanh", Func, 0}, + {"Cbrt", Func, 0}, + {"Ceil", Func, 0}, + {"Copysign", Func, 0}, + {"Cos", Func, 0}, + {"Cosh", Func, 0}, + {"Dim", Func, 0}, + {"E", Const, 0}, + {"Erf", Func, 0}, + {"Erfc", Func, 0}, + {"Erfcinv", Func, 10}, + {"Erfinv", Func, 10}, + {"Exp", Func, 0}, + {"Exp2", Func, 0}, + {"Expm1", Func, 0}, + {"FMA", Func, 14}, + {"Float32bits", Func, 0}, + {"Float32frombits", Func, 0}, + {"Float64bits", Func, 0}, + {"Float64frombits", Func, 0}, + {"Floor", Func, 0}, + {"Frexp", Func, 0}, + {"Gamma", Func, 0}, + {"Hypot", Func, 0}, + {"Ilogb", Func, 0}, + {"Inf", Func, 0}, + {"IsInf", Func, 0}, + {"IsNaN", Func, 0}, + {"J0", Func, 0}, + {"J1", Func, 0}, + {"Jn", Func, 0}, + {"Ldexp", Func, 0}, + {"Lgamma", Func, 0}, + {"Ln10", Const, 0}, + {"Ln2", Const, 0}, + {"Log", Func, 0}, + {"Log10", Func, 0}, + {"Log10E", Const, 0}, + {"Log1p", Func, 0}, + {"Log2", Func, 0}, + {"Log2E", Const, 0}, + {"Logb", Func, 0}, + {"Max", Func, 0}, + {"MaxFloat32", Const, 0}, + {"MaxFloat64", Const, 0}, + {"MaxInt", Const, 17}, + {"MaxInt16", Const, 0}, + {"MaxInt32", Const, 0}, + {"MaxInt64", Const, 0}, + {"MaxInt8", Const, 0}, + {"MaxUint", Const, 17}, + {"MaxUint16", Const, 0}, + {"MaxUint32", Const, 0}, + {"MaxUint64", Const, 0}, + {"MaxUint8", Const, 0}, + {"Min", Func, 0}, + {"MinInt", Const, 17}, + {"MinInt16", Const, 0}, + {"MinInt32", Const, 0}, + {"MinInt64", Const, 0}, + {"MinInt8", Const, 0}, + {"Mod", Func, 0}, + {"Modf", Func, 0}, + {"NaN", Func, 0}, + {"Nextafter", Func, 0}, + {"Nextafter32", Func, 4}, + {"Phi", Const, 0}, + {"Pi", Const, 0}, + {"Pow", Func, 0}, + {"Pow10", Func, 0}, + {"Remainder", Func, 0}, + {"Round", Func, 10}, + {"RoundToEven", Func, 10}, + {"Signbit", Func, 0}, + {"Sin", Func, 0}, + {"Sincos", Func, 0}, + {"Sinh", Func, 0}, + {"SmallestNonzeroFloat32", Const, 0}, + {"SmallestNonzeroFloat64", Const, 0}, + {"Sqrt", Func, 0}, + {"Sqrt2", Const, 0}, + {"SqrtE", Const, 0}, + {"SqrtPhi", Const, 0}, + {"SqrtPi", Const, 0}, + {"Tan", Func, 0}, + {"Tanh", Func, 0}, + {"Trunc", Func, 0}, + {"Y0", Func, 0}, + {"Y1", Func, 0}, + {"Yn", Func, 0}, + }, + "math/big": { + {"(*Float).Abs", Method, 5}, + {"(*Float).Acc", Method, 5}, + {"(*Float).Add", Method, 5}, + {"(*Float).Append", Method, 5}, + {"(*Float).Cmp", Method, 5}, + {"(*Float).Copy", Method, 5}, + {"(*Float).Float32", Method, 5}, + {"(*Float).Float64", Method, 5}, + {"(*Float).Format", Method, 5}, + {"(*Float).GobDecode", Method, 7}, + {"(*Float).GobEncode", Method, 7}, + {"(*Float).Int", Method, 5}, + {"(*Float).Int64", Method, 5}, + {"(*Float).IsInf", Method, 5}, + {"(*Float).IsInt", Method, 5}, + {"(*Float).MantExp", Method, 5}, + {"(*Float).MarshalText", Method, 6}, + {"(*Float).MinPrec", Method, 5}, + {"(*Float).Mode", Method, 5}, + {"(*Float).Mul", Method, 5}, + {"(*Float).Neg", Method, 5}, + {"(*Float).Parse", Method, 5}, + {"(*Float).Prec", Method, 5}, + {"(*Float).Quo", Method, 5}, + {"(*Float).Rat", Method, 5}, + {"(*Float).Scan", Method, 8}, + {"(*Float).Set", Method, 5}, + {"(*Float).SetFloat64", Method, 5}, + {"(*Float).SetInf", Method, 5}, + {"(*Float).SetInt", Method, 5}, + {"(*Float).SetInt64", Method, 5}, + {"(*Float).SetMantExp", Method, 5}, + {"(*Float).SetMode", Method, 5}, + {"(*Float).SetPrec", Method, 5}, + {"(*Float).SetRat", Method, 5}, + {"(*Float).SetString", Method, 5}, + {"(*Float).SetUint64", Method, 5}, + {"(*Float).Sign", Method, 5}, + {"(*Float).Signbit", Method, 5}, + {"(*Float).Sqrt", Method, 10}, + {"(*Float).String", Method, 5}, + {"(*Float).Sub", Method, 5}, + {"(*Float).Text", Method, 5}, + {"(*Float).Uint64", Method, 5}, + {"(*Float).UnmarshalText", Method, 6}, + {"(*Int).Abs", Method, 0}, + {"(*Int).Add", Method, 0}, + {"(*Int).And", Method, 0}, + {"(*Int).AndNot", Method, 0}, + {"(*Int).Append", Method, 6}, + {"(*Int).Binomial", Method, 0}, + {"(*Int).Bit", Method, 0}, + {"(*Int).BitLen", Method, 0}, + {"(*Int).Bits", Method, 0}, + {"(*Int).Bytes", Method, 0}, + {"(*Int).Cmp", Method, 0}, + {"(*Int).CmpAbs", Method, 10}, + {"(*Int).Div", Method, 0}, + {"(*Int).DivMod", Method, 0}, + {"(*Int).Exp", Method, 0}, + {"(*Int).FillBytes", Method, 15}, + {"(*Int).Float64", Method, 21}, + {"(*Int).Format", Method, 0}, + {"(*Int).GCD", Method, 0}, + {"(*Int).GobDecode", Method, 0}, + {"(*Int).GobEncode", Method, 0}, + {"(*Int).Int64", Method, 0}, + {"(*Int).IsInt64", Method, 9}, + {"(*Int).IsUint64", Method, 9}, + {"(*Int).Lsh", Method, 0}, + {"(*Int).MarshalJSON", Method, 1}, + {"(*Int).MarshalText", Method, 3}, + {"(*Int).Mod", Method, 0}, + {"(*Int).ModInverse", Method, 0}, + {"(*Int).ModSqrt", Method, 5}, + {"(*Int).Mul", Method, 0}, + {"(*Int).MulRange", Method, 0}, + {"(*Int).Neg", Method, 0}, + {"(*Int).Not", Method, 0}, + {"(*Int).Or", Method, 0}, + {"(*Int).ProbablyPrime", Method, 0}, + {"(*Int).Quo", Method, 0}, + {"(*Int).QuoRem", Method, 0}, + {"(*Int).Rand", Method, 0}, + {"(*Int).Rem", Method, 0}, + {"(*Int).Rsh", Method, 0}, + {"(*Int).Scan", Method, 0}, + {"(*Int).Set", Method, 0}, + {"(*Int).SetBit", Method, 0}, + {"(*Int).SetBits", Method, 0}, + {"(*Int).SetBytes", Method, 0}, + {"(*Int).SetInt64", Method, 0}, + {"(*Int).SetString", Method, 0}, + {"(*Int).SetUint64", Method, 1}, + {"(*Int).Sign", Method, 0}, + {"(*Int).Sqrt", Method, 8}, + {"(*Int).String", Method, 0}, + {"(*Int).Sub", Method, 0}, + {"(*Int).Text", Method, 6}, + {"(*Int).TrailingZeroBits", Method, 13}, + {"(*Int).Uint64", Method, 1}, + {"(*Int).UnmarshalJSON", Method, 1}, + {"(*Int).UnmarshalText", Method, 3}, + {"(*Int).Xor", Method, 0}, + {"(*Rat).Abs", Method, 0}, + {"(*Rat).Add", Method, 0}, + {"(*Rat).Cmp", Method, 0}, + {"(*Rat).Denom", Method, 0}, + {"(*Rat).Float32", Method, 4}, + {"(*Rat).Float64", Method, 1}, + {"(*Rat).FloatPrec", Method, 22}, + {"(*Rat).FloatString", Method, 0}, + {"(*Rat).GobDecode", Method, 0}, + {"(*Rat).GobEncode", Method, 0}, + {"(*Rat).Inv", Method, 0}, + {"(*Rat).IsInt", Method, 0}, + {"(*Rat).MarshalText", Method, 3}, + {"(*Rat).Mul", Method, 0}, + {"(*Rat).Neg", Method, 0}, + {"(*Rat).Num", Method, 0}, + {"(*Rat).Quo", Method, 0}, + {"(*Rat).RatString", Method, 0}, + {"(*Rat).Scan", Method, 0}, + {"(*Rat).Set", Method, 0}, + {"(*Rat).SetFloat64", Method, 1}, + {"(*Rat).SetFrac", Method, 0}, + {"(*Rat).SetFrac64", Method, 0}, + {"(*Rat).SetInt", Method, 0}, + {"(*Rat).SetInt64", Method, 0}, + {"(*Rat).SetString", Method, 0}, + {"(*Rat).SetUint64", Method, 13}, + {"(*Rat).Sign", Method, 0}, + {"(*Rat).String", Method, 0}, + {"(*Rat).Sub", Method, 0}, + {"(*Rat).UnmarshalText", Method, 3}, + {"(Accuracy).String", Method, 5}, + {"(ErrNaN).Error", Method, 5}, + {"(RoundingMode).String", Method, 5}, + {"Above", Const, 5}, + {"Accuracy", Type, 5}, + {"AwayFromZero", Const, 5}, + {"Below", Const, 5}, + {"ErrNaN", Type, 5}, + {"Exact", Const, 5}, + {"Float", Type, 5}, + {"Int", Type, 0}, + {"Jacobi", Func, 5}, + {"MaxBase", Const, 0}, + {"MaxExp", Const, 5}, + {"MaxPrec", Const, 5}, + {"MinExp", Const, 5}, + {"NewFloat", Func, 5}, + {"NewInt", Func, 0}, + {"NewRat", Func, 0}, + {"ParseFloat", Func, 5}, + {"Rat", Type, 0}, + {"RoundingMode", Type, 5}, + {"ToNearestAway", Const, 5}, + {"ToNearestEven", Const, 5}, + {"ToNegativeInf", Const, 5}, + {"ToPositiveInf", Const, 5}, + {"ToZero", Const, 5}, + {"Word", Type, 0}, + }, + "math/bits": { + {"Add", Func, 12}, + {"Add32", Func, 12}, + {"Add64", Func, 12}, + {"Div", Func, 12}, + {"Div32", Func, 12}, + {"Div64", Func, 12}, + {"LeadingZeros", Func, 9}, + {"LeadingZeros16", Func, 9}, + {"LeadingZeros32", Func, 9}, + {"LeadingZeros64", Func, 9}, + {"LeadingZeros8", Func, 9}, + {"Len", Func, 9}, + {"Len16", Func, 9}, + {"Len32", Func, 9}, + {"Len64", Func, 9}, + {"Len8", Func, 9}, + {"Mul", Func, 12}, + {"Mul32", Func, 12}, + {"Mul64", Func, 12}, + {"OnesCount", Func, 9}, + {"OnesCount16", Func, 9}, + {"OnesCount32", Func, 9}, + {"OnesCount64", Func, 9}, + {"OnesCount8", Func, 9}, + {"Rem", Func, 14}, + {"Rem32", Func, 14}, + {"Rem64", Func, 14}, + {"Reverse", Func, 9}, + {"Reverse16", Func, 9}, + {"Reverse32", Func, 9}, + {"Reverse64", Func, 9}, + {"Reverse8", Func, 9}, + {"ReverseBytes", Func, 9}, + {"ReverseBytes16", Func, 9}, + {"ReverseBytes32", Func, 9}, + {"ReverseBytes64", Func, 9}, + {"RotateLeft", Func, 9}, + {"RotateLeft16", Func, 9}, + {"RotateLeft32", Func, 9}, + {"RotateLeft64", Func, 9}, + {"RotateLeft8", Func, 9}, + {"Sub", Func, 12}, + {"Sub32", Func, 12}, + {"Sub64", Func, 12}, + {"TrailingZeros", Func, 9}, + {"TrailingZeros16", Func, 9}, + {"TrailingZeros32", Func, 9}, + {"TrailingZeros64", Func, 9}, + {"TrailingZeros8", Func, 9}, + {"UintSize", Const, 9}, + }, + "math/cmplx": { + {"Abs", Func, 0}, + {"Acos", Func, 0}, + {"Acosh", Func, 0}, + {"Asin", Func, 0}, + {"Asinh", Func, 0}, + {"Atan", Func, 0}, + {"Atanh", Func, 0}, + {"Conj", Func, 0}, + {"Cos", Func, 0}, + {"Cosh", Func, 0}, + {"Cot", Func, 0}, + {"Exp", Func, 0}, + {"Inf", Func, 0}, + {"IsInf", Func, 0}, + {"IsNaN", Func, 0}, + {"Log", Func, 0}, + {"Log10", Func, 0}, + {"NaN", Func, 0}, + {"Phase", Func, 0}, + {"Polar", Func, 0}, + {"Pow", Func, 0}, + {"Rect", Func, 0}, + {"Sin", Func, 0}, + {"Sinh", Func, 0}, + {"Sqrt", Func, 0}, + {"Tan", Func, 0}, + {"Tanh", Func, 0}, + }, + "math/rand": { + {"(*Rand).ExpFloat64", Method, 0}, + {"(*Rand).Float32", Method, 0}, + {"(*Rand).Float64", Method, 0}, + {"(*Rand).Int", Method, 0}, + {"(*Rand).Int31", Method, 0}, + {"(*Rand).Int31n", Method, 0}, + {"(*Rand).Int63", Method, 0}, + {"(*Rand).Int63n", Method, 0}, + {"(*Rand).Intn", Method, 0}, + {"(*Rand).NormFloat64", Method, 0}, + {"(*Rand).Perm", Method, 0}, + {"(*Rand).Read", Method, 6}, + {"(*Rand).Seed", Method, 0}, + {"(*Rand).Shuffle", Method, 10}, + {"(*Rand).Uint32", Method, 0}, + {"(*Rand).Uint64", Method, 8}, + {"(*Zipf).Uint64", Method, 0}, + {"ExpFloat64", Func, 0}, + {"Float32", Func, 0}, + {"Float64", Func, 0}, + {"Int", Func, 0}, + {"Int31", Func, 0}, + {"Int31n", Func, 0}, + {"Int63", Func, 0}, + {"Int63n", Func, 0}, + {"Intn", Func, 0}, + {"New", Func, 0}, + {"NewSource", Func, 0}, + {"NewZipf", Func, 0}, + {"NormFloat64", Func, 0}, + {"Perm", Func, 0}, + {"Rand", Type, 0}, + {"Read", Func, 6}, + {"Seed", Func, 0}, + {"Shuffle", Func, 10}, + {"Source", Type, 0}, + {"Source64", Type, 8}, + {"Uint32", Func, 0}, + {"Uint64", Func, 8}, + {"Zipf", Type, 0}, + }, + "math/rand/v2": { + {"(*ChaCha8).MarshalBinary", Method, 22}, + {"(*ChaCha8).Seed", Method, 22}, + {"(*ChaCha8).Uint64", Method, 22}, + {"(*ChaCha8).UnmarshalBinary", Method, 22}, + {"(*PCG).MarshalBinary", Method, 22}, + {"(*PCG).Seed", Method, 22}, + {"(*PCG).Uint64", Method, 22}, + {"(*PCG).UnmarshalBinary", Method, 22}, + {"(*Rand).ExpFloat64", Method, 22}, + {"(*Rand).Float32", Method, 22}, + {"(*Rand).Float64", Method, 22}, + {"(*Rand).Int", Method, 22}, + {"(*Rand).Int32", Method, 22}, + {"(*Rand).Int32N", Method, 22}, + {"(*Rand).Int64", Method, 22}, + {"(*Rand).Int64N", Method, 22}, + {"(*Rand).IntN", Method, 22}, + {"(*Rand).NormFloat64", Method, 22}, + {"(*Rand).Perm", Method, 22}, + {"(*Rand).Shuffle", Method, 22}, + {"(*Rand).Uint32", Method, 22}, + {"(*Rand).Uint32N", Method, 22}, + {"(*Rand).Uint64", Method, 22}, + {"(*Rand).Uint64N", Method, 22}, + {"(*Rand).UintN", Method, 22}, + {"(*Zipf).Uint64", Method, 22}, + {"ChaCha8", Type, 22}, + {"ExpFloat64", Func, 22}, + {"Float32", Func, 22}, + {"Float64", Func, 22}, + {"Int", Func, 22}, + {"Int32", Func, 22}, + {"Int32N", Func, 22}, + {"Int64", Func, 22}, + {"Int64N", Func, 22}, + {"IntN", Func, 22}, + {"N", Func, 22}, + {"New", Func, 22}, + {"NewChaCha8", Func, 22}, + {"NewPCG", Func, 22}, + {"NewZipf", Func, 22}, + {"NormFloat64", Func, 22}, + {"PCG", Type, 22}, + {"Perm", Func, 22}, + {"Rand", Type, 22}, + {"Shuffle", Func, 22}, + {"Source", Type, 22}, + {"Uint32", Func, 22}, + {"Uint32N", Func, 22}, + {"Uint64", Func, 22}, + {"Uint64N", Func, 22}, + {"UintN", Func, 22}, + {"Zipf", Type, 22}, + }, + "mime": { + {"(*WordDecoder).Decode", Method, 5}, + {"(*WordDecoder).DecodeHeader", Method, 5}, + {"(WordEncoder).Encode", Method, 5}, + {"AddExtensionType", Func, 0}, + {"BEncoding", Const, 5}, + {"ErrInvalidMediaParameter", Var, 9}, + {"ExtensionsByType", Func, 5}, + {"FormatMediaType", Func, 0}, + {"ParseMediaType", Func, 0}, + {"QEncoding", Const, 5}, + {"TypeByExtension", Func, 0}, + {"WordDecoder", Type, 5}, + {"WordDecoder.CharsetReader", Field, 5}, + {"WordEncoder", Type, 5}, + }, + "mime/multipart": { + {"(*FileHeader).Open", Method, 0}, + {"(*Form).RemoveAll", Method, 0}, + {"(*Part).Close", Method, 0}, + {"(*Part).FileName", Method, 0}, + {"(*Part).FormName", Method, 0}, + {"(*Part).Read", Method, 0}, + {"(*Reader).NextPart", Method, 0}, + {"(*Reader).NextRawPart", Method, 14}, + {"(*Reader).ReadForm", Method, 0}, + {"(*Writer).Boundary", Method, 0}, + {"(*Writer).Close", Method, 0}, + {"(*Writer).CreateFormField", Method, 0}, + {"(*Writer).CreateFormFile", Method, 0}, + {"(*Writer).CreatePart", Method, 0}, + {"(*Writer).FormDataContentType", Method, 0}, + {"(*Writer).SetBoundary", Method, 1}, + {"(*Writer).WriteField", Method, 0}, + {"ErrMessageTooLarge", Var, 9}, + {"File", Type, 0}, + {"FileHeader", Type, 0}, + {"FileHeader.Filename", Field, 0}, + {"FileHeader.Header", Field, 0}, + {"FileHeader.Size", Field, 9}, + {"Form", Type, 0}, + {"Form.File", Field, 0}, + {"Form.Value", Field, 0}, + {"NewReader", Func, 0}, + {"NewWriter", Func, 0}, + {"Part", Type, 0}, + {"Part.Header", Field, 0}, + {"Reader", Type, 0}, + {"Writer", Type, 0}, + }, + "mime/quotedprintable": { + {"(*Reader).Read", Method, 5}, + {"(*Writer).Close", Method, 5}, + {"(*Writer).Write", Method, 5}, + {"NewReader", Func, 5}, + {"NewWriter", Func, 5}, + {"Reader", Type, 5}, + {"Writer", Type, 5}, + {"Writer.Binary", Field, 5}, + }, + "net": { + {"(*AddrError).Error", Method, 0}, + {"(*AddrError).Temporary", Method, 0}, + {"(*AddrError).Timeout", Method, 0}, + {"(*Buffers).Read", Method, 8}, + {"(*Buffers).WriteTo", Method, 8}, + {"(*DNSConfigError).Error", Method, 0}, + {"(*DNSConfigError).Temporary", Method, 0}, + {"(*DNSConfigError).Timeout", Method, 0}, + {"(*DNSConfigError).Unwrap", Method, 13}, + {"(*DNSError).Error", Method, 0}, + {"(*DNSError).Temporary", Method, 0}, + {"(*DNSError).Timeout", Method, 0}, + {"(*Dialer).Dial", Method, 1}, + {"(*Dialer).DialContext", Method, 7}, + {"(*Dialer).MultipathTCP", Method, 21}, + {"(*Dialer).SetMultipathTCP", Method, 21}, + {"(*IP).UnmarshalText", Method, 2}, + {"(*IPAddr).Network", Method, 0}, + {"(*IPAddr).String", Method, 0}, + {"(*IPConn).Close", Method, 0}, + {"(*IPConn).File", Method, 0}, + {"(*IPConn).LocalAddr", Method, 0}, + {"(*IPConn).Read", Method, 0}, + {"(*IPConn).ReadFrom", Method, 0}, + {"(*IPConn).ReadFromIP", Method, 0}, + {"(*IPConn).ReadMsgIP", Method, 1}, + {"(*IPConn).RemoteAddr", Method, 0}, + {"(*IPConn).SetDeadline", Method, 0}, + {"(*IPConn).SetReadBuffer", Method, 0}, + {"(*IPConn).SetReadDeadline", Method, 0}, + {"(*IPConn).SetWriteBuffer", Method, 0}, + {"(*IPConn).SetWriteDeadline", Method, 0}, + {"(*IPConn).SyscallConn", Method, 9}, + {"(*IPConn).Write", Method, 0}, + {"(*IPConn).WriteMsgIP", Method, 1}, + {"(*IPConn).WriteTo", Method, 0}, + {"(*IPConn).WriteToIP", Method, 0}, + {"(*IPNet).Contains", Method, 0}, + {"(*IPNet).Network", Method, 0}, + {"(*IPNet).String", Method, 0}, + {"(*Interface).Addrs", Method, 0}, + {"(*Interface).MulticastAddrs", Method, 0}, + {"(*ListenConfig).Listen", Method, 11}, + {"(*ListenConfig).ListenPacket", Method, 11}, + {"(*ListenConfig).MultipathTCP", Method, 21}, + {"(*ListenConfig).SetMultipathTCP", Method, 21}, + {"(*OpError).Error", Method, 0}, + {"(*OpError).Temporary", Method, 0}, + {"(*OpError).Timeout", Method, 0}, + {"(*OpError).Unwrap", Method, 13}, + {"(*ParseError).Error", Method, 0}, + {"(*ParseError).Temporary", Method, 17}, + {"(*ParseError).Timeout", Method, 17}, + {"(*Resolver).LookupAddr", Method, 8}, + {"(*Resolver).LookupCNAME", Method, 8}, + {"(*Resolver).LookupHost", Method, 8}, + {"(*Resolver).LookupIP", Method, 15}, + {"(*Resolver).LookupIPAddr", Method, 8}, + {"(*Resolver).LookupMX", Method, 8}, + {"(*Resolver).LookupNS", Method, 8}, + {"(*Resolver).LookupNetIP", Method, 18}, + {"(*Resolver).LookupPort", Method, 8}, + {"(*Resolver).LookupSRV", Method, 8}, + {"(*Resolver).LookupTXT", Method, 8}, + {"(*TCPAddr).AddrPort", Method, 18}, + {"(*TCPAddr).Network", Method, 0}, + {"(*TCPAddr).String", Method, 0}, + {"(*TCPConn).Close", Method, 0}, + {"(*TCPConn).CloseRead", Method, 0}, + {"(*TCPConn).CloseWrite", Method, 0}, + {"(*TCPConn).File", Method, 0}, + {"(*TCPConn).LocalAddr", Method, 0}, + {"(*TCPConn).MultipathTCP", Method, 21}, + {"(*TCPConn).Read", Method, 0}, + {"(*TCPConn).ReadFrom", Method, 0}, + {"(*TCPConn).RemoteAddr", Method, 0}, + {"(*TCPConn).SetDeadline", Method, 0}, + {"(*TCPConn).SetKeepAlive", Method, 0}, + {"(*TCPConn).SetKeepAlivePeriod", Method, 2}, + {"(*TCPConn).SetLinger", Method, 0}, + {"(*TCPConn).SetNoDelay", Method, 0}, + {"(*TCPConn).SetReadBuffer", Method, 0}, + {"(*TCPConn).SetReadDeadline", Method, 0}, + {"(*TCPConn).SetWriteBuffer", Method, 0}, + {"(*TCPConn).SetWriteDeadline", Method, 0}, + {"(*TCPConn).SyscallConn", Method, 9}, + {"(*TCPConn).Write", Method, 0}, + {"(*TCPConn).WriteTo", Method, 22}, + {"(*TCPListener).Accept", Method, 0}, + {"(*TCPListener).AcceptTCP", Method, 0}, + {"(*TCPListener).Addr", Method, 0}, + {"(*TCPListener).Close", Method, 0}, + {"(*TCPListener).File", Method, 0}, + {"(*TCPListener).SetDeadline", Method, 0}, + {"(*TCPListener).SyscallConn", Method, 10}, + {"(*UDPAddr).AddrPort", Method, 18}, + {"(*UDPAddr).Network", Method, 0}, + {"(*UDPAddr).String", Method, 0}, + {"(*UDPConn).Close", Method, 0}, + {"(*UDPConn).File", Method, 0}, + {"(*UDPConn).LocalAddr", Method, 0}, + {"(*UDPConn).Read", Method, 0}, + {"(*UDPConn).ReadFrom", Method, 0}, + {"(*UDPConn).ReadFromUDP", Method, 0}, + {"(*UDPConn).ReadFromUDPAddrPort", Method, 18}, + {"(*UDPConn).ReadMsgUDP", Method, 1}, + {"(*UDPConn).ReadMsgUDPAddrPort", Method, 18}, + {"(*UDPConn).RemoteAddr", Method, 0}, + {"(*UDPConn).SetDeadline", Method, 0}, + {"(*UDPConn).SetReadBuffer", Method, 0}, + {"(*UDPConn).SetReadDeadline", Method, 0}, + {"(*UDPConn).SetWriteBuffer", Method, 0}, + {"(*UDPConn).SetWriteDeadline", Method, 0}, + {"(*UDPConn).SyscallConn", Method, 9}, + {"(*UDPConn).Write", Method, 0}, + {"(*UDPConn).WriteMsgUDP", Method, 1}, + {"(*UDPConn).WriteMsgUDPAddrPort", Method, 18}, + {"(*UDPConn).WriteTo", Method, 0}, + {"(*UDPConn).WriteToUDP", Method, 0}, + {"(*UDPConn).WriteToUDPAddrPort", Method, 18}, + {"(*UnixAddr).Network", Method, 0}, + {"(*UnixAddr).String", Method, 0}, + {"(*UnixConn).Close", Method, 0}, + {"(*UnixConn).CloseRead", Method, 1}, + {"(*UnixConn).CloseWrite", Method, 1}, + {"(*UnixConn).File", Method, 0}, + {"(*UnixConn).LocalAddr", Method, 0}, + {"(*UnixConn).Read", Method, 0}, + {"(*UnixConn).ReadFrom", Method, 0}, + {"(*UnixConn).ReadFromUnix", Method, 0}, + {"(*UnixConn).ReadMsgUnix", Method, 0}, + {"(*UnixConn).RemoteAddr", Method, 0}, + {"(*UnixConn).SetDeadline", Method, 0}, + {"(*UnixConn).SetReadBuffer", Method, 0}, + {"(*UnixConn).SetReadDeadline", Method, 0}, + {"(*UnixConn).SetWriteBuffer", Method, 0}, + {"(*UnixConn).SetWriteDeadline", Method, 0}, + {"(*UnixConn).SyscallConn", Method, 9}, + {"(*UnixConn).Write", Method, 0}, + {"(*UnixConn).WriteMsgUnix", Method, 0}, + {"(*UnixConn).WriteTo", Method, 0}, + {"(*UnixConn).WriteToUnix", Method, 0}, + {"(*UnixListener).Accept", Method, 0}, + {"(*UnixListener).AcceptUnix", Method, 0}, + {"(*UnixListener).Addr", Method, 0}, + {"(*UnixListener).Close", Method, 0}, + {"(*UnixListener).File", Method, 0}, + {"(*UnixListener).SetDeadline", Method, 0}, + {"(*UnixListener).SetUnlinkOnClose", Method, 8}, + {"(*UnixListener).SyscallConn", Method, 10}, + {"(Flags).String", Method, 0}, + {"(HardwareAddr).String", Method, 0}, + {"(IP).DefaultMask", Method, 0}, + {"(IP).Equal", Method, 0}, + {"(IP).IsGlobalUnicast", Method, 0}, + {"(IP).IsInterfaceLocalMulticast", Method, 0}, + {"(IP).IsLinkLocalMulticast", Method, 0}, + {"(IP).IsLinkLocalUnicast", Method, 0}, + {"(IP).IsLoopback", Method, 0}, + {"(IP).IsMulticast", Method, 0}, + {"(IP).IsPrivate", Method, 17}, + {"(IP).IsUnspecified", Method, 0}, + {"(IP).MarshalText", Method, 2}, + {"(IP).Mask", Method, 0}, + {"(IP).String", Method, 0}, + {"(IP).To16", Method, 0}, + {"(IP).To4", Method, 0}, + {"(IPMask).Size", Method, 0}, + {"(IPMask).String", Method, 0}, + {"(InvalidAddrError).Error", Method, 0}, + {"(InvalidAddrError).Temporary", Method, 0}, + {"(InvalidAddrError).Timeout", Method, 0}, + {"(UnknownNetworkError).Error", Method, 0}, + {"(UnknownNetworkError).Temporary", Method, 0}, + {"(UnknownNetworkError).Timeout", Method, 0}, + {"Addr", Type, 0}, + {"AddrError", Type, 0}, + {"AddrError.Addr", Field, 0}, + {"AddrError.Err", Field, 0}, + {"Buffers", Type, 8}, + {"CIDRMask", Func, 0}, + {"Conn", Type, 0}, + {"DNSConfigError", Type, 0}, + {"DNSConfigError.Err", Field, 0}, + {"DNSError", Type, 0}, + {"DNSError.Err", Field, 0}, + {"DNSError.IsNotFound", Field, 13}, + {"DNSError.IsTemporary", Field, 6}, + {"DNSError.IsTimeout", Field, 0}, + {"DNSError.Name", Field, 0}, + {"DNSError.Server", Field, 0}, + {"DefaultResolver", Var, 8}, + {"Dial", Func, 0}, + {"DialIP", Func, 0}, + {"DialTCP", Func, 0}, + {"DialTimeout", Func, 0}, + {"DialUDP", Func, 0}, + {"DialUnix", Func, 0}, + {"Dialer", Type, 1}, + {"Dialer.Cancel", Field, 6}, + {"Dialer.Control", Field, 11}, + {"Dialer.ControlContext", Field, 20}, + {"Dialer.Deadline", Field, 1}, + {"Dialer.DualStack", Field, 2}, + {"Dialer.FallbackDelay", Field, 5}, + {"Dialer.KeepAlive", Field, 3}, + {"Dialer.LocalAddr", Field, 1}, + {"Dialer.Resolver", Field, 8}, + {"Dialer.Timeout", Field, 1}, + {"ErrClosed", Var, 16}, + {"ErrWriteToConnected", Var, 0}, + {"Error", Type, 0}, + {"FileConn", Func, 0}, + {"FileListener", Func, 0}, + {"FilePacketConn", Func, 0}, + {"FlagBroadcast", Const, 0}, + {"FlagLoopback", Const, 0}, + {"FlagMulticast", Const, 0}, + {"FlagPointToPoint", Const, 0}, + {"FlagRunning", Const, 20}, + {"FlagUp", Const, 0}, + {"Flags", Type, 0}, + {"HardwareAddr", Type, 0}, + {"IP", Type, 0}, + {"IPAddr", Type, 0}, + {"IPAddr.IP", Field, 0}, + {"IPAddr.Zone", Field, 1}, + {"IPConn", Type, 0}, + {"IPMask", Type, 0}, + {"IPNet", Type, 0}, + {"IPNet.IP", Field, 0}, + {"IPNet.Mask", Field, 0}, + {"IPv4", Func, 0}, + {"IPv4Mask", Func, 0}, + {"IPv4allrouter", Var, 0}, + {"IPv4allsys", Var, 0}, + {"IPv4bcast", Var, 0}, + {"IPv4len", Const, 0}, + {"IPv4zero", Var, 0}, + {"IPv6interfacelocalallnodes", Var, 0}, + {"IPv6len", Const, 0}, + {"IPv6linklocalallnodes", Var, 0}, + {"IPv6linklocalallrouters", Var, 0}, + {"IPv6loopback", Var, 0}, + {"IPv6unspecified", Var, 0}, + {"IPv6zero", Var, 0}, + {"Interface", Type, 0}, + {"Interface.Flags", Field, 0}, + {"Interface.HardwareAddr", Field, 0}, + {"Interface.Index", Field, 0}, + {"Interface.MTU", Field, 0}, + {"Interface.Name", Field, 0}, + {"InterfaceAddrs", Func, 0}, + {"InterfaceByIndex", Func, 0}, + {"InterfaceByName", Func, 0}, + {"Interfaces", Func, 0}, + {"InvalidAddrError", Type, 0}, + {"JoinHostPort", Func, 0}, + {"Listen", Func, 0}, + {"ListenConfig", Type, 11}, + {"ListenConfig.Control", Field, 11}, + {"ListenConfig.KeepAlive", Field, 13}, + {"ListenIP", Func, 0}, + {"ListenMulticastUDP", Func, 0}, + {"ListenPacket", Func, 0}, + {"ListenTCP", Func, 0}, + {"ListenUDP", Func, 0}, + {"ListenUnix", Func, 0}, + {"ListenUnixgram", Func, 0}, + {"Listener", Type, 0}, + {"LookupAddr", Func, 0}, + {"LookupCNAME", Func, 0}, + {"LookupHost", Func, 0}, + {"LookupIP", Func, 0}, + {"LookupMX", Func, 0}, + {"LookupNS", Func, 1}, + {"LookupPort", Func, 0}, + {"LookupSRV", Func, 0}, + {"LookupTXT", Func, 0}, + {"MX", Type, 0}, + {"MX.Host", Field, 0}, + {"MX.Pref", Field, 0}, + {"NS", Type, 1}, + {"NS.Host", Field, 1}, + {"OpError", Type, 0}, + {"OpError.Addr", Field, 0}, + {"OpError.Err", Field, 0}, + {"OpError.Net", Field, 0}, + {"OpError.Op", Field, 0}, + {"OpError.Source", Field, 5}, + {"PacketConn", Type, 0}, + {"ParseCIDR", Func, 0}, + {"ParseError", Type, 0}, + {"ParseError.Text", Field, 0}, + {"ParseError.Type", Field, 0}, + {"ParseIP", Func, 0}, + {"ParseMAC", Func, 0}, + {"Pipe", Func, 0}, + {"ResolveIPAddr", Func, 0}, + {"ResolveTCPAddr", Func, 0}, + {"ResolveUDPAddr", Func, 0}, + {"ResolveUnixAddr", Func, 0}, + {"Resolver", Type, 8}, + {"Resolver.Dial", Field, 9}, + {"Resolver.PreferGo", Field, 8}, + {"Resolver.StrictErrors", Field, 9}, + {"SRV", Type, 0}, + {"SRV.Port", Field, 0}, + {"SRV.Priority", Field, 0}, + {"SRV.Target", Field, 0}, + {"SRV.Weight", Field, 0}, + {"SplitHostPort", Func, 0}, + {"TCPAddr", Type, 0}, + {"TCPAddr.IP", Field, 0}, + {"TCPAddr.Port", Field, 0}, + {"TCPAddr.Zone", Field, 1}, + {"TCPAddrFromAddrPort", Func, 18}, + {"TCPConn", Type, 0}, + {"TCPListener", Type, 0}, + {"UDPAddr", Type, 0}, + {"UDPAddr.IP", Field, 0}, + {"UDPAddr.Port", Field, 0}, + {"UDPAddr.Zone", Field, 1}, + {"UDPAddrFromAddrPort", Func, 18}, + {"UDPConn", Type, 0}, + {"UnixAddr", Type, 0}, + {"UnixAddr.Name", Field, 0}, + {"UnixAddr.Net", Field, 0}, + {"UnixConn", Type, 0}, + {"UnixListener", Type, 0}, + {"UnknownNetworkError", Type, 0}, + }, + "net/http": { + {"(*Client).CloseIdleConnections", Method, 12}, + {"(*Client).Do", Method, 0}, + {"(*Client).Get", Method, 0}, + {"(*Client).Head", Method, 0}, + {"(*Client).Post", Method, 0}, + {"(*Client).PostForm", Method, 0}, + {"(*Cookie).String", Method, 0}, + {"(*Cookie).Valid", Method, 18}, + {"(*MaxBytesError).Error", Method, 19}, + {"(*ProtocolError).Error", Method, 0}, + {"(*ProtocolError).Is", Method, 21}, + {"(*Request).AddCookie", Method, 0}, + {"(*Request).BasicAuth", Method, 4}, + {"(*Request).Clone", Method, 13}, + {"(*Request).Context", Method, 7}, + {"(*Request).Cookie", Method, 0}, + {"(*Request).Cookies", Method, 0}, + {"(*Request).FormFile", Method, 0}, + {"(*Request).FormValue", Method, 0}, + {"(*Request).MultipartReader", Method, 0}, + {"(*Request).ParseForm", Method, 0}, + {"(*Request).ParseMultipartForm", Method, 0}, + {"(*Request).PathValue", Method, 22}, + {"(*Request).PostFormValue", Method, 1}, + {"(*Request).ProtoAtLeast", Method, 0}, + {"(*Request).Referer", Method, 0}, + {"(*Request).SetBasicAuth", Method, 0}, + {"(*Request).SetPathValue", Method, 22}, + {"(*Request).UserAgent", Method, 0}, + {"(*Request).WithContext", Method, 7}, + {"(*Request).Write", Method, 0}, + {"(*Request).WriteProxy", Method, 0}, + {"(*Response).Cookies", Method, 0}, + {"(*Response).Location", Method, 0}, + {"(*Response).ProtoAtLeast", Method, 0}, + {"(*Response).Write", Method, 0}, + {"(*ResponseController).EnableFullDuplex", Method, 21}, + {"(*ResponseController).Flush", Method, 20}, + {"(*ResponseController).Hijack", Method, 20}, + {"(*ResponseController).SetReadDeadline", Method, 20}, + {"(*ResponseController).SetWriteDeadline", Method, 20}, + {"(*ServeMux).Handle", Method, 0}, + {"(*ServeMux).HandleFunc", Method, 0}, + {"(*ServeMux).Handler", Method, 1}, + {"(*ServeMux).ServeHTTP", Method, 0}, + {"(*Server).Close", Method, 8}, + {"(*Server).ListenAndServe", Method, 0}, + {"(*Server).ListenAndServeTLS", Method, 0}, + {"(*Server).RegisterOnShutdown", Method, 9}, + {"(*Server).Serve", Method, 0}, + {"(*Server).ServeTLS", Method, 9}, + {"(*Server).SetKeepAlivesEnabled", Method, 3}, + {"(*Server).Shutdown", Method, 8}, + {"(*Transport).CancelRequest", Method, 1}, + {"(*Transport).Clone", Method, 13}, + {"(*Transport).CloseIdleConnections", Method, 0}, + {"(*Transport).RegisterProtocol", Method, 0}, + {"(*Transport).RoundTrip", Method, 0}, + {"(ConnState).String", Method, 3}, + {"(Dir).Open", Method, 0}, + {"(HandlerFunc).ServeHTTP", Method, 0}, + {"(Header).Add", Method, 0}, + {"(Header).Clone", Method, 13}, + {"(Header).Del", Method, 0}, + {"(Header).Get", Method, 0}, + {"(Header).Set", Method, 0}, + {"(Header).Values", Method, 14}, + {"(Header).Write", Method, 0}, + {"(Header).WriteSubset", Method, 0}, + {"AllowQuerySemicolons", Func, 17}, + {"CanonicalHeaderKey", Func, 0}, + {"Client", Type, 0}, + {"Client.CheckRedirect", Field, 0}, + {"Client.Jar", Field, 0}, + {"Client.Timeout", Field, 3}, + {"Client.Transport", Field, 0}, + {"CloseNotifier", Type, 1}, + {"ConnState", Type, 3}, + {"Cookie", Type, 0}, + {"Cookie.Domain", Field, 0}, + {"Cookie.Expires", Field, 0}, + {"Cookie.HttpOnly", Field, 0}, + {"Cookie.MaxAge", Field, 0}, + {"Cookie.Name", Field, 0}, + {"Cookie.Path", Field, 0}, + {"Cookie.Raw", Field, 0}, + {"Cookie.RawExpires", Field, 0}, + {"Cookie.SameSite", Field, 11}, + {"Cookie.Secure", Field, 0}, + {"Cookie.Unparsed", Field, 0}, + {"Cookie.Value", Field, 0}, + {"CookieJar", Type, 0}, + {"DefaultClient", Var, 0}, + {"DefaultMaxHeaderBytes", Const, 0}, + {"DefaultMaxIdleConnsPerHost", Const, 0}, + {"DefaultServeMux", Var, 0}, + {"DefaultTransport", Var, 0}, + {"DetectContentType", Func, 0}, + {"Dir", Type, 0}, + {"ErrAbortHandler", Var, 8}, + {"ErrBodyNotAllowed", Var, 0}, + {"ErrBodyReadAfterClose", Var, 0}, + {"ErrContentLength", Var, 0}, + {"ErrHandlerTimeout", Var, 0}, + {"ErrHeaderTooLong", Var, 0}, + {"ErrHijacked", Var, 0}, + {"ErrLineTooLong", Var, 0}, + {"ErrMissingBoundary", Var, 0}, + {"ErrMissingContentLength", Var, 0}, + {"ErrMissingFile", Var, 0}, + {"ErrNoCookie", Var, 0}, + {"ErrNoLocation", Var, 0}, + {"ErrNotMultipart", Var, 0}, + {"ErrNotSupported", Var, 0}, + {"ErrSchemeMismatch", Var, 21}, + {"ErrServerClosed", Var, 8}, + {"ErrShortBody", Var, 0}, + {"ErrSkipAltProtocol", Var, 6}, + {"ErrUnexpectedTrailer", Var, 0}, + {"ErrUseLastResponse", Var, 7}, + {"ErrWriteAfterFlush", Var, 0}, + {"Error", Func, 0}, + {"FS", Func, 16}, + {"File", Type, 0}, + {"FileServer", Func, 0}, + {"FileServerFS", Func, 22}, + {"FileSystem", Type, 0}, + {"Flusher", Type, 0}, + {"Get", Func, 0}, + {"Handle", Func, 0}, + {"HandleFunc", Func, 0}, + {"Handler", Type, 0}, + {"HandlerFunc", Type, 0}, + {"Head", Func, 0}, + {"Header", Type, 0}, + {"Hijacker", Type, 0}, + {"ListenAndServe", Func, 0}, + {"ListenAndServeTLS", Func, 0}, + {"LocalAddrContextKey", Var, 7}, + {"MaxBytesError", Type, 19}, + {"MaxBytesError.Limit", Field, 19}, + {"MaxBytesHandler", Func, 18}, + {"MaxBytesReader", Func, 0}, + {"MethodConnect", Const, 6}, + {"MethodDelete", Const, 6}, + {"MethodGet", Const, 6}, + {"MethodHead", Const, 6}, + {"MethodOptions", Const, 6}, + {"MethodPatch", Const, 6}, + {"MethodPost", Const, 6}, + {"MethodPut", Const, 6}, + {"MethodTrace", Const, 6}, + {"NewFileTransport", Func, 0}, + {"NewFileTransportFS", Func, 22}, + {"NewRequest", Func, 0}, + {"NewRequestWithContext", Func, 13}, + {"NewResponseController", Func, 20}, + {"NewServeMux", Func, 0}, + {"NoBody", Var, 8}, + {"NotFound", Func, 0}, + {"NotFoundHandler", Func, 0}, + {"ParseHTTPVersion", Func, 0}, + {"ParseTime", Func, 1}, + {"Post", Func, 0}, + {"PostForm", Func, 0}, + {"ProtocolError", Type, 0}, + {"ProtocolError.ErrorString", Field, 0}, + {"ProxyFromEnvironment", Func, 0}, + {"ProxyURL", Func, 0}, + {"PushOptions", Type, 8}, + {"PushOptions.Header", Field, 8}, + {"PushOptions.Method", Field, 8}, + {"Pusher", Type, 8}, + {"ReadRequest", Func, 0}, + {"ReadResponse", Func, 0}, + {"Redirect", Func, 0}, + {"RedirectHandler", Func, 0}, + {"Request", Type, 0}, + {"Request.Body", Field, 0}, + {"Request.Cancel", Field, 5}, + {"Request.Close", Field, 0}, + {"Request.ContentLength", Field, 0}, + {"Request.Form", Field, 0}, + {"Request.GetBody", Field, 8}, + {"Request.Header", Field, 0}, + {"Request.Host", Field, 0}, + {"Request.Method", Field, 0}, + {"Request.MultipartForm", Field, 0}, + {"Request.PostForm", Field, 1}, + {"Request.Proto", Field, 0}, + {"Request.ProtoMajor", Field, 0}, + {"Request.ProtoMinor", Field, 0}, + {"Request.RemoteAddr", Field, 0}, + {"Request.RequestURI", Field, 0}, + {"Request.Response", Field, 7}, + {"Request.TLS", Field, 0}, + {"Request.Trailer", Field, 0}, + {"Request.TransferEncoding", Field, 0}, + {"Request.URL", Field, 0}, + {"Response", Type, 0}, + {"Response.Body", Field, 0}, + {"Response.Close", Field, 0}, + {"Response.ContentLength", Field, 0}, + {"Response.Header", Field, 0}, + {"Response.Proto", Field, 0}, + {"Response.ProtoMajor", Field, 0}, + {"Response.ProtoMinor", Field, 0}, + {"Response.Request", Field, 0}, + {"Response.Status", Field, 0}, + {"Response.StatusCode", Field, 0}, + {"Response.TLS", Field, 3}, + {"Response.Trailer", Field, 0}, + {"Response.TransferEncoding", Field, 0}, + {"Response.Uncompressed", Field, 7}, + {"ResponseController", Type, 20}, + {"ResponseWriter", Type, 0}, + {"RoundTripper", Type, 0}, + {"SameSite", Type, 11}, + {"SameSiteDefaultMode", Const, 11}, + {"SameSiteLaxMode", Const, 11}, + {"SameSiteNoneMode", Const, 13}, + {"SameSiteStrictMode", Const, 11}, + {"Serve", Func, 0}, + {"ServeContent", Func, 0}, + {"ServeFile", Func, 0}, + {"ServeFileFS", Func, 22}, + {"ServeMux", Type, 0}, + {"ServeTLS", Func, 9}, + {"Server", Type, 0}, + {"Server.Addr", Field, 0}, + {"Server.BaseContext", Field, 13}, + {"Server.ConnContext", Field, 13}, + {"Server.ConnState", Field, 3}, + {"Server.DisableGeneralOptionsHandler", Field, 20}, + {"Server.ErrorLog", Field, 3}, + {"Server.Handler", Field, 0}, + {"Server.IdleTimeout", Field, 8}, + {"Server.MaxHeaderBytes", Field, 0}, + {"Server.ReadHeaderTimeout", Field, 8}, + {"Server.ReadTimeout", Field, 0}, + {"Server.TLSConfig", Field, 0}, + {"Server.TLSNextProto", Field, 1}, + {"Server.WriteTimeout", Field, 0}, + {"ServerContextKey", Var, 7}, + {"SetCookie", Func, 0}, + {"StateActive", Const, 3}, + {"StateClosed", Const, 3}, + {"StateHijacked", Const, 3}, + {"StateIdle", Const, 3}, + {"StateNew", Const, 3}, + {"StatusAccepted", Const, 0}, + {"StatusAlreadyReported", Const, 7}, + {"StatusBadGateway", Const, 0}, + {"StatusBadRequest", Const, 0}, + {"StatusConflict", Const, 0}, + {"StatusContinue", Const, 0}, + {"StatusCreated", Const, 0}, + {"StatusEarlyHints", Const, 13}, + {"StatusExpectationFailed", Const, 0}, + {"StatusFailedDependency", Const, 7}, + {"StatusForbidden", Const, 0}, + {"StatusFound", Const, 0}, + {"StatusGatewayTimeout", Const, 0}, + {"StatusGone", Const, 0}, + {"StatusHTTPVersionNotSupported", Const, 0}, + {"StatusIMUsed", Const, 7}, + {"StatusInsufficientStorage", Const, 7}, + {"StatusInternalServerError", Const, 0}, + {"StatusLengthRequired", Const, 0}, + {"StatusLocked", Const, 7}, + {"StatusLoopDetected", Const, 7}, + {"StatusMethodNotAllowed", Const, 0}, + {"StatusMisdirectedRequest", Const, 11}, + {"StatusMovedPermanently", Const, 0}, + {"StatusMultiStatus", Const, 7}, + {"StatusMultipleChoices", Const, 0}, + {"StatusNetworkAuthenticationRequired", Const, 6}, + {"StatusNoContent", Const, 0}, + {"StatusNonAuthoritativeInfo", Const, 0}, + {"StatusNotAcceptable", Const, 0}, + {"StatusNotExtended", Const, 7}, + {"StatusNotFound", Const, 0}, + {"StatusNotImplemented", Const, 0}, + {"StatusNotModified", Const, 0}, + {"StatusOK", Const, 0}, + {"StatusPartialContent", Const, 0}, + {"StatusPaymentRequired", Const, 0}, + {"StatusPermanentRedirect", Const, 7}, + {"StatusPreconditionFailed", Const, 0}, + {"StatusPreconditionRequired", Const, 6}, + {"StatusProcessing", Const, 7}, + {"StatusProxyAuthRequired", Const, 0}, + {"StatusRequestEntityTooLarge", Const, 0}, + {"StatusRequestHeaderFieldsTooLarge", Const, 6}, + {"StatusRequestTimeout", Const, 0}, + {"StatusRequestURITooLong", Const, 0}, + {"StatusRequestedRangeNotSatisfiable", Const, 0}, + {"StatusResetContent", Const, 0}, + {"StatusSeeOther", Const, 0}, + {"StatusServiceUnavailable", Const, 0}, + {"StatusSwitchingProtocols", Const, 0}, + {"StatusTeapot", Const, 0}, + {"StatusTemporaryRedirect", Const, 0}, + {"StatusText", Func, 0}, + {"StatusTooEarly", Const, 12}, + {"StatusTooManyRequests", Const, 6}, + {"StatusUnauthorized", Const, 0}, + {"StatusUnavailableForLegalReasons", Const, 6}, + {"StatusUnprocessableEntity", Const, 7}, + {"StatusUnsupportedMediaType", Const, 0}, + {"StatusUpgradeRequired", Const, 7}, + {"StatusUseProxy", Const, 0}, + {"StatusVariantAlsoNegotiates", Const, 7}, + {"StripPrefix", Func, 0}, + {"TimeFormat", Const, 0}, + {"TimeoutHandler", Func, 0}, + {"TrailerPrefix", Const, 8}, + {"Transport", Type, 0}, + {"Transport.Dial", Field, 0}, + {"Transport.DialContext", Field, 7}, + {"Transport.DialTLS", Field, 4}, + {"Transport.DialTLSContext", Field, 14}, + {"Transport.DisableCompression", Field, 0}, + {"Transport.DisableKeepAlives", Field, 0}, + {"Transport.ExpectContinueTimeout", Field, 6}, + {"Transport.ForceAttemptHTTP2", Field, 13}, + {"Transport.GetProxyConnectHeader", Field, 16}, + {"Transport.IdleConnTimeout", Field, 7}, + {"Transport.MaxConnsPerHost", Field, 11}, + {"Transport.MaxIdleConns", Field, 7}, + {"Transport.MaxIdleConnsPerHost", Field, 0}, + {"Transport.MaxResponseHeaderBytes", Field, 7}, + {"Transport.OnProxyConnectResponse", Field, 20}, + {"Transport.Proxy", Field, 0}, + {"Transport.ProxyConnectHeader", Field, 8}, + {"Transport.ReadBufferSize", Field, 13}, + {"Transport.ResponseHeaderTimeout", Field, 1}, + {"Transport.TLSClientConfig", Field, 0}, + {"Transport.TLSHandshakeTimeout", Field, 3}, + {"Transport.TLSNextProto", Field, 6}, + {"Transport.WriteBufferSize", Field, 13}, + }, + "net/http/cgi": { + {"(*Handler).ServeHTTP", Method, 0}, + {"Handler", Type, 0}, + {"Handler.Args", Field, 0}, + {"Handler.Dir", Field, 0}, + {"Handler.Env", Field, 0}, + {"Handler.InheritEnv", Field, 0}, + {"Handler.Logger", Field, 0}, + {"Handler.Path", Field, 0}, + {"Handler.PathLocationHandler", Field, 0}, + {"Handler.Root", Field, 0}, + {"Handler.Stderr", Field, 7}, + {"Request", Func, 0}, + {"RequestFromMap", Func, 0}, + {"Serve", Func, 0}, + }, + "net/http/cookiejar": { + {"(*Jar).Cookies", Method, 1}, + {"(*Jar).SetCookies", Method, 1}, + {"Jar", Type, 1}, + {"New", Func, 1}, + {"Options", Type, 1}, + {"Options.PublicSuffixList", Field, 1}, + {"PublicSuffixList", Type, 1}, + }, + "net/http/fcgi": { + {"ErrConnClosed", Var, 5}, + {"ErrRequestAborted", Var, 5}, + {"ProcessEnv", Func, 9}, + {"Serve", Func, 0}, + }, + "net/http/httptest": { + {"(*ResponseRecorder).Flush", Method, 0}, + {"(*ResponseRecorder).Header", Method, 0}, + {"(*ResponseRecorder).Result", Method, 7}, + {"(*ResponseRecorder).Write", Method, 0}, + {"(*ResponseRecorder).WriteHeader", Method, 0}, + {"(*ResponseRecorder).WriteString", Method, 6}, + {"(*Server).Certificate", Method, 9}, + {"(*Server).Client", Method, 9}, + {"(*Server).Close", Method, 0}, + {"(*Server).CloseClientConnections", Method, 0}, + {"(*Server).Start", Method, 0}, + {"(*Server).StartTLS", Method, 0}, + {"DefaultRemoteAddr", Const, 0}, + {"NewRecorder", Func, 0}, + {"NewRequest", Func, 7}, + {"NewServer", Func, 0}, + {"NewTLSServer", Func, 0}, + {"NewUnstartedServer", Func, 0}, + {"ResponseRecorder", Type, 0}, + {"ResponseRecorder.Body", Field, 0}, + {"ResponseRecorder.Code", Field, 0}, + {"ResponseRecorder.Flushed", Field, 0}, + {"ResponseRecorder.HeaderMap", Field, 0}, + {"Server", Type, 0}, + {"Server.Config", Field, 0}, + {"Server.EnableHTTP2", Field, 14}, + {"Server.Listener", Field, 0}, + {"Server.TLS", Field, 0}, + {"Server.URL", Field, 0}, + }, + "net/http/httptrace": { + {"ClientTrace", Type, 7}, + {"ClientTrace.ConnectDone", Field, 7}, + {"ClientTrace.ConnectStart", Field, 7}, + {"ClientTrace.DNSDone", Field, 7}, + {"ClientTrace.DNSStart", Field, 7}, + {"ClientTrace.GetConn", Field, 7}, + {"ClientTrace.Got100Continue", Field, 7}, + {"ClientTrace.Got1xxResponse", Field, 11}, + {"ClientTrace.GotConn", Field, 7}, + {"ClientTrace.GotFirstResponseByte", Field, 7}, + {"ClientTrace.PutIdleConn", Field, 7}, + {"ClientTrace.TLSHandshakeDone", Field, 8}, + {"ClientTrace.TLSHandshakeStart", Field, 8}, + {"ClientTrace.Wait100Continue", Field, 7}, + {"ClientTrace.WroteHeaderField", Field, 11}, + {"ClientTrace.WroteHeaders", Field, 7}, + {"ClientTrace.WroteRequest", Field, 7}, + {"ContextClientTrace", Func, 7}, + {"DNSDoneInfo", Type, 7}, + {"DNSDoneInfo.Addrs", Field, 7}, + {"DNSDoneInfo.Coalesced", Field, 7}, + {"DNSDoneInfo.Err", Field, 7}, + {"DNSStartInfo", Type, 7}, + {"DNSStartInfo.Host", Field, 7}, + {"GotConnInfo", Type, 7}, + {"GotConnInfo.Conn", Field, 7}, + {"GotConnInfo.IdleTime", Field, 7}, + {"GotConnInfo.Reused", Field, 7}, + {"GotConnInfo.WasIdle", Field, 7}, + {"WithClientTrace", Func, 7}, + {"WroteRequestInfo", Type, 7}, + {"WroteRequestInfo.Err", Field, 7}, + }, + "net/http/httputil": { + {"(*ClientConn).Close", Method, 0}, + {"(*ClientConn).Do", Method, 0}, + {"(*ClientConn).Hijack", Method, 0}, + {"(*ClientConn).Pending", Method, 0}, + {"(*ClientConn).Read", Method, 0}, + {"(*ClientConn).Write", Method, 0}, + {"(*ProxyRequest).SetURL", Method, 20}, + {"(*ProxyRequest).SetXForwarded", Method, 20}, + {"(*ReverseProxy).ServeHTTP", Method, 0}, + {"(*ServerConn).Close", Method, 0}, + {"(*ServerConn).Hijack", Method, 0}, + {"(*ServerConn).Pending", Method, 0}, + {"(*ServerConn).Read", Method, 0}, + {"(*ServerConn).Write", Method, 0}, + {"BufferPool", Type, 6}, + {"ClientConn", Type, 0}, + {"DumpRequest", Func, 0}, + {"DumpRequestOut", Func, 0}, + {"DumpResponse", Func, 0}, + {"ErrClosed", Var, 0}, + {"ErrLineTooLong", Var, 0}, + {"ErrPersistEOF", Var, 0}, + {"ErrPipeline", Var, 0}, + {"NewChunkedReader", Func, 0}, + {"NewChunkedWriter", Func, 0}, + {"NewClientConn", Func, 0}, + {"NewProxyClientConn", Func, 0}, + {"NewServerConn", Func, 0}, + {"NewSingleHostReverseProxy", Func, 0}, + {"ProxyRequest", Type, 20}, + {"ProxyRequest.In", Field, 20}, + {"ProxyRequest.Out", Field, 20}, + {"ReverseProxy", Type, 0}, + {"ReverseProxy.BufferPool", Field, 6}, + {"ReverseProxy.Director", Field, 0}, + {"ReverseProxy.ErrorHandler", Field, 11}, + {"ReverseProxy.ErrorLog", Field, 4}, + {"ReverseProxy.FlushInterval", Field, 0}, + {"ReverseProxy.ModifyResponse", Field, 8}, + {"ReverseProxy.Rewrite", Field, 20}, + {"ReverseProxy.Transport", Field, 0}, + {"ServerConn", Type, 0}, + }, + "net/http/pprof": { + {"Cmdline", Func, 0}, + {"Handler", Func, 0}, + {"Index", Func, 0}, + {"Profile", Func, 0}, + {"Symbol", Func, 0}, + {"Trace", Func, 5}, + }, + "net/mail": { + {"(*Address).String", Method, 0}, + {"(*AddressParser).Parse", Method, 5}, + {"(*AddressParser).ParseList", Method, 5}, + {"(Header).AddressList", Method, 0}, + {"(Header).Date", Method, 0}, + {"(Header).Get", Method, 0}, + {"Address", Type, 0}, + {"Address.Address", Field, 0}, + {"Address.Name", Field, 0}, + {"AddressParser", Type, 5}, + {"AddressParser.WordDecoder", Field, 5}, + {"ErrHeaderNotPresent", Var, 0}, + {"Header", Type, 0}, + {"Message", Type, 0}, + {"Message.Body", Field, 0}, + {"Message.Header", Field, 0}, + {"ParseAddress", Func, 1}, + {"ParseAddressList", Func, 1}, + {"ParseDate", Func, 8}, + {"ReadMessage", Func, 0}, + }, + "net/netip": { + {"(*Addr).UnmarshalBinary", Method, 18}, + {"(*Addr).UnmarshalText", Method, 18}, + {"(*AddrPort).UnmarshalBinary", Method, 18}, + {"(*AddrPort).UnmarshalText", Method, 18}, + {"(*Prefix).UnmarshalBinary", Method, 18}, + {"(*Prefix).UnmarshalText", Method, 18}, + {"(Addr).AppendTo", Method, 18}, + {"(Addr).As16", Method, 18}, + {"(Addr).As4", Method, 18}, + {"(Addr).AsSlice", Method, 18}, + {"(Addr).BitLen", Method, 18}, + {"(Addr).Compare", Method, 18}, + {"(Addr).Is4", Method, 18}, + {"(Addr).Is4In6", Method, 18}, + {"(Addr).Is6", Method, 18}, + {"(Addr).IsGlobalUnicast", Method, 18}, + {"(Addr).IsInterfaceLocalMulticast", Method, 18}, + {"(Addr).IsLinkLocalMulticast", Method, 18}, + {"(Addr).IsLinkLocalUnicast", Method, 18}, + {"(Addr).IsLoopback", Method, 18}, + {"(Addr).IsMulticast", Method, 18}, + {"(Addr).IsPrivate", Method, 18}, + {"(Addr).IsUnspecified", Method, 18}, + {"(Addr).IsValid", Method, 18}, + {"(Addr).Less", Method, 18}, + {"(Addr).MarshalBinary", Method, 18}, + {"(Addr).MarshalText", Method, 18}, + {"(Addr).Next", Method, 18}, + {"(Addr).Prefix", Method, 18}, + {"(Addr).Prev", Method, 18}, + {"(Addr).String", Method, 18}, + {"(Addr).StringExpanded", Method, 18}, + {"(Addr).Unmap", Method, 18}, + {"(Addr).WithZone", Method, 18}, + {"(Addr).Zone", Method, 18}, + {"(AddrPort).Addr", Method, 18}, + {"(AddrPort).AppendTo", Method, 18}, + {"(AddrPort).Compare", Method, 22}, + {"(AddrPort).IsValid", Method, 18}, + {"(AddrPort).MarshalBinary", Method, 18}, + {"(AddrPort).MarshalText", Method, 18}, + {"(AddrPort).Port", Method, 18}, + {"(AddrPort).String", Method, 18}, + {"(Prefix).Addr", Method, 18}, + {"(Prefix).AppendTo", Method, 18}, + {"(Prefix).Bits", Method, 18}, + {"(Prefix).Contains", Method, 18}, + {"(Prefix).IsSingleIP", Method, 18}, + {"(Prefix).IsValid", Method, 18}, + {"(Prefix).MarshalBinary", Method, 18}, + {"(Prefix).MarshalText", Method, 18}, + {"(Prefix).Masked", Method, 18}, + {"(Prefix).Overlaps", Method, 18}, + {"(Prefix).String", Method, 18}, + {"Addr", Type, 18}, + {"AddrFrom16", Func, 18}, + {"AddrFrom4", Func, 18}, + {"AddrFromSlice", Func, 18}, + {"AddrPort", Type, 18}, + {"AddrPortFrom", Func, 18}, + {"IPv4Unspecified", Func, 18}, + {"IPv6LinkLocalAllNodes", Func, 18}, + {"IPv6LinkLocalAllRouters", Func, 20}, + {"IPv6Loopback", Func, 20}, + {"IPv6Unspecified", Func, 18}, + {"MustParseAddr", Func, 18}, + {"MustParseAddrPort", Func, 18}, + {"MustParsePrefix", Func, 18}, + {"ParseAddr", Func, 18}, + {"ParseAddrPort", Func, 18}, + {"ParsePrefix", Func, 18}, + {"Prefix", Type, 18}, + {"PrefixFrom", Func, 18}, + }, + "net/rpc": { + {"(*Client).Call", Method, 0}, + {"(*Client).Close", Method, 0}, + {"(*Client).Go", Method, 0}, + {"(*Server).Accept", Method, 0}, + {"(*Server).HandleHTTP", Method, 0}, + {"(*Server).Register", Method, 0}, + {"(*Server).RegisterName", Method, 0}, + {"(*Server).ServeCodec", Method, 0}, + {"(*Server).ServeConn", Method, 0}, + {"(*Server).ServeHTTP", Method, 0}, + {"(*Server).ServeRequest", Method, 0}, + {"(ServerError).Error", Method, 0}, + {"Accept", Func, 0}, + {"Call", Type, 0}, + {"Call.Args", Field, 0}, + {"Call.Done", Field, 0}, + {"Call.Error", Field, 0}, + {"Call.Reply", Field, 0}, + {"Call.ServiceMethod", Field, 0}, + {"Client", Type, 0}, + {"ClientCodec", Type, 0}, + {"DefaultDebugPath", Const, 0}, + {"DefaultRPCPath", Const, 0}, + {"DefaultServer", Var, 0}, + {"Dial", Func, 0}, + {"DialHTTP", Func, 0}, + {"DialHTTPPath", Func, 0}, + {"ErrShutdown", Var, 0}, + {"HandleHTTP", Func, 0}, + {"NewClient", Func, 0}, + {"NewClientWithCodec", Func, 0}, + {"NewServer", Func, 0}, + {"Register", Func, 0}, + {"RegisterName", Func, 0}, + {"Request", Type, 0}, + {"Request.Seq", Field, 0}, + {"Request.ServiceMethod", Field, 0}, + {"Response", Type, 0}, + {"Response.Error", Field, 0}, + {"Response.Seq", Field, 0}, + {"Response.ServiceMethod", Field, 0}, + {"ServeCodec", Func, 0}, + {"ServeConn", Func, 0}, + {"ServeRequest", Func, 0}, + {"Server", Type, 0}, + {"ServerCodec", Type, 0}, + {"ServerError", Type, 0}, + }, + "net/rpc/jsonrpc": { + {"Dial", Func, 0}, + {"NewClient", Func, 0}, + {"NewClientCodec", Func, 0}, + {"NewServerCodec", Func, 0}, + {"ServeConn", Func, 0}, + }, + "net/smtp": { + {"(*Client).Auth", Method, 0}, + {"(*Client).Close", Method, 2}, + {"(*Client).Data", Method, 0}, + {"(*Client).Extension", Method, 0}, + {"(*Client).Hello", Method, 1}, + {"(*Client).Mail", Method, 0}, + {"(*Client).Noop", Method, 10}, + {"(*Client).Quit", Method, 0}, + {"(*Client).Rcpt", Method, 0}, + {"(*Client).Reset", Method, 0}, + {"(*Client).StartTLS", Method, 0}, + {"(*Client).TLSConnectionState", Method, 5}, + {"(*Client).Verify", Method, 0}, + {"Auth", Type, 0}, + {"CRAMMD5Auth", Func, 0}, + {"Client", Type, 0}, + {"Client.Text", Field, 0}, + {"Dial", Func, 0}, + {"NewClient", Func, 0}, + {"PlainAuth", Func, 0}, + {"SendMail", Func, 0}, + {"ServerInfo", Type, 0}, + {"ServerInfo.Auth", Field, 0}, + {"ServerInfo.Name", Field, 0}, + {"ServerInfo.TLS", Field, 0}, + }, + "net/textproto": { + {"(*Conn).Close", Method, 0}, + {"(*Conn).Cmd", Method, 0}, + {"(*Conn).DotReader", Method, 0}, + {"(*Conn).DotWriter", Method, 0}, + {"(*Conn).EndRequest", Method, 0}, + {"(*Conn).EndResponse", Method, 0}, + {"(*Conn).Next", Method, 0}, + {"(*Conn).PrintfLine", Method, 0}, + {"(*Conn).ReadCodeLine", Method, 0}, + {"(*Conn).ReadContinuedLine", Method, 0}, + {"(*Conn).ReadContinuedLineBytes", Method, 0}, + {"(*Conn).ReadDotBytes", Method, 0}, + {"(*Conn).ReadDotLines", Method, 0}, + {"(*Conn).ReadLine", Method, 0}, + {"(*Conn).ReadLineBytes", Method, 0}, + {"(*Conn).ReadMIMEHeader", Method, 0}, + {"(*Conn).ReadResponse", Method, 0}, + {"(*Conn).StartRequest", Method, 0}, + {"(*Conn).StartResponse", Method, 0}, + {"(*Error).Error", Method, 0}, + {"(*Pipeline).EndRequest", Method, 0}, + {"(*Pipeline).EndResponse", Method, 0}, + {"(*Pipeline).Next", Method, 0}, + {"(*Pipeline).StartRequest", Method, 0}, + {"(*Pipeline).StartResponse", Method, 0}, + {"(*Reader).DotReader", Method, 0}, + {"(*Reader).ReadCodeLine", Method, 0}, + {"(*Reader).ReadContinuedLine", Method, 0}, + {"(*Reader).ReadContinuedLineBytes", Method, 0}, + {"(*Reader).ReadDotBytes", Method, 0}, + {"(*Reader).ReadDotLines", Method, 0}, + {"(*Reader).ReadLine", Method, 0}, + {"(*Reader).ReadLineBytes", Method, 0}, + {"(*Reader).ReadMIMEHeader", Method, 0}, + {"(*Reader).ReadResponse", Method, 0}, + {"(*Writer).DotWriter", Method, 0}, + {"(*Writer).PrintfLine", Method, 0}, + {"(MIMEHeader).Add", Method, 0}, + {"(MIMEHeader).Del", Method, 0}, + {"(MIMEHeader).Get", Method, 0}, + {"(MIMEHeader).Set", Method, 0}, + {"(MIMEHeader).Values", Method, 14}, + {"(ProtocolError).Error", Method, 0}, + {"CanonicalMIMEHeaderKey", Func, 0}, + {"Conn", Type, 0}, + {"Conn.Pipeline", Field, 0}, + {"Conn.Reader", Field, 0}, + {"Conn.Writer", Field, 0}, + {"Dial", Func, 0}, + {"Error", Type, 0}, + {"Error.Code", Field, 0}, + {"Error.Msg", Field, 0}, + {"MIMEHeader", Type, 0}, + {"NewConn", Func, 0}, + {"NewReader", Func, 0}, + {"NewWriter", Func, 0}, + {"Pipeline", Type, 0}, + {"ProtocolError", Type, 0}, + {"Reader", Type, 0}, + {"Reader.R", Field, 0}, + {"TrimBytes", Func, 1}, + {"TrimString", Func, 1}, + {"Writer", Type, 0}, + {"Writer.W", Field, 0}, + }, + "net/url": { + {"(*Error).Error", Method, 0}, + {"(*Error).Temporary", Method, 6}, + {"(*Error).Timeout", Method, 6}, + {"(*Error).Unwrap", Method, 13}, + {"(*URL).EscapedFragment", Method, 15}, + {"(*URL).EscapedPath", Method, 5}, + {"(*URL).Hostname", Method, 8}, + {"(*URL).IsAbs", Method, 0}, + {"(*URL).JoinPath", Method, 19}, + {"(*URL).MarshalBinary", Method, 8}, + {"(*URL).Parse", Method, 0}, + {"(*URL).Port", Method, 8}, + {"(*URL).Query", Method, 0}, + {"(*URL).Redacted", Method, 15}, + {"(*URL).RequestURI", Method, 0}, + {"(*URL).ResolveReference", Method, 0}, + {"(*URL).String", Method, 0}, + {"(*URL).UnmarshalBinary", Method, 8}, + {"(*Userinfo).Password", Method, 0}, + {"(*Userinfo).String", Method, 0}, + {"(*Userinfo).Username", Method, 0}, + {"(EscapeError).Error", Method, 0}, + {"(InvalidHostError).Error", Method, 6}, + {"(Values).Add", Method, 0}, + {"(Values).Del", Method, 0}, + {"(Values).Encode", Method, 0}, + {"(Values).Get", Method, 0}, + {"(Values).Has", Method, 17}, + {"(Values).Set", Method, 0}, + {"Error", Type, 0}, + {"Error.Err", Field, 0}, + {"Error.Op", Field, 0}, + {"Error.URL", Field, 0}, + {"EscapeError", Type, 0}, + {"InvalidHostError", Type, 6}, + {"JoinPath", Func, 19}, + {"Parse", Func, 0}, + {"ParseQuery", Func, 0}, + {"ParseRequestURI", Func, 0}, + {"PathEscape", Func, 8}, + {"PathUnescape", Func, 8}, + {"QueryEscape", Func, 0}, + {"QueryUnescape", Func, 0}, + {"URL", Type, 0}, + {"URL.ForceQuery", Field, 7}, + {"URL.Fragment", Field, 0}, + {"URL.Host", Field, 0}, + {"URL.OmitHost", Field, 19}, + {"URL.Opaque", Field, 0}, + {"URL.Path", Field, 0}, + {"URL.RawFragment", Field, 15}, + {"URL.RawPath", Field, 5}, + {"URL.RawQuery", Field, 0}, + {"URL.Scheme", Field, 0}, + {"URL.User", Field, 0}, + {"User", Func, 0}, + {"UserPassword", Func, 0}, + {"Userinfo", Type, 0}, + {"Values", Type, 0}, + }, + "os": { + {"(*File).Chdir", Method, 0}, + {"(*File).Chmod", Method, 0}, + {"(*File).Chown", Method, 0}, + {"(*File).Close", Method, 0}, + {"(*File).Fd", Method, 0}, + {"(*File).Name", Method, 0}, + {"(*File).Read", Method, 0}, + {"(*File).ReadAt", Method, 0}, + {"(*File).ReadDir", Method, 16}, + {"(*File).ReadFrom", Method, 15}, + {"(*File).Readdir", Method, 0}, + {"(*File).Readdirnames", Method, 0}, + {"(*File).Seek", Method, 0}, + {"(*File).SetDeadline", Method, 10}, + {"(*File).SetReadDeadline", Method, 10}, + {"(*File).SetWriteDeadline", Method, 10}, + {"(*File).Stat", Method, 0}, + {"(*File).Sync", Method, 0}, + {"(*File).SyscallConn", Method, 12}, + {"(*File).Truncate", Method, 0}, + {"(*File).Write", Method, 0}, + {"(*File).WriteAt", Method, 0}, + {"(*File).WriteString", Method, 0}, + {"(*File).WriteTo", Method, 22}, + {"(*LinkError).Error", Method, 0}, + {"(*LinkError).Unwrap", Method, 13}, + {"(*PathError).Error", Method, 0}, + {"(*PathError).Timeout", Method, 10}, + {"(*PathError).Unwrap", Method, 13}, + {"(*Process).Kill", Method, 0}, + {"(*Process).Release", Method, 0}, + {"(*Process).Signal", Method, 0}, + {"(*Process).Wait", Method, 0}, + {"(*ProcessState).ExitCode", Method, 12}, + {"(*ProcessState).Exited", Method, 0}, + {"(*ProcessState).Pid", Method, 0}, + {"(*ProcessState).String", Method, 0}, + {"(*ProcessState).Success", Method, 0}, + {"(*ProcessState).Sys", Method, 0}, + {"(*ProcessState).SysUsage", Method, 0}, + {"(*ProcessState).SystemTime", Method, 0}, + {"(*ProcessState).UserTime", Method, 0}, + {"(*SyscallError).Error", Method, 0}, + {"(*SyscallError).Timeout", Method, 10}, + {"(*SyscallError).Unwrap", Method, 13}, + {"(FileMode).IsDir", Method, 0}, + {"(FileMode).IsRegular", Method, 1}, + {"(FileMode).Perm", Method, 0}, + {"(FileMode).String", Method, 0}, + {"Args", Var, 0}, + {"Chdir", Func, 0}, + {"Chmod", Func, 0}, + {"Chown", Func, 0}, + {"Chtimes", Func, 0}, + {"Clearenv", Func, 0}, + {"Create", Func, 0}, + {"CreateTemp", Func, 16}, + {"DevNull", Const, 0}, + {"DirEntry", Type, 16}, + {"DirFS", Func, 16}, + {"Environ", Func, 0}, + {"ErrClosed", Var, 8}, + {"ErrDeadlineExceeded", Var, 15}, + {"ErrExist", Var, 0}, + {"ErrInvalid", Var, 0}, + {"ErrNoDeadline", Var, 10}, + {"ErrNotExist", Var, 0}, + {"ErrPermission", Var, 0}, + {"ErrProcessDone", Var, 16}, + {"Executable", Func, 8}, + {"Exit", Func, 0}, + {"Expand", Func, 0}, + {"ExpandEnv", Func, 0}, + {"File", Type, 0}, + {"FileInfo", Type, 0}, + {"FileMode", Type, 0}, + {"FindProcess", Func, 0}, + {"Getegid", Func, 0}, + {"Getenv", Func, 0}, + {"Geteuid", Func, 0}, + {"Getgid", Func, 0}, + {"Getgroups", Func, 0}, + {"Getpagesize", Func, 0}, + {"Getpid", Func, 0}, + {"Getppid", Func, 0}, + {"Getuid", Func, 0}, + {"Getwd", Func, 0}, + {"Hostname", Func, 0}, + {"Interrupt", Var, 0}, + {"IsExist", Func, 0}, + {"IsNotExist", Func, 0}, + {"IsPathSeparator", Func, 0}, + {"IsPermission", Func, 0}, + {"IsTimeout", Func, 10}, + {"Kill", Var, 0}, + {"Lchown", Func, 0}, + {"Link", Func, 0}, + {"LinkError", Type, 0}, + {"LinkError.Err", Field, 0}, + {"LinkError.New", Field, 0}, + {"LinkError.Old", Field, 0}, + {"LinkError.Op", Field, 0}, + {"LookupEnv", Func, 5}, + {"Lstat", Func, 0}, + {"Mkdir", Func, 0}, + {"MkdirAll", Func, 0}, + {"MkdirTemp", Func, 16}, + {"ModeAppend", Const, 0}, + {"ModeCharDevice", Const, 0}, + {"ModeDevice", Const, 0}, + {"ModeDir", Const, 0}, + {"ModeExclusive", Const, 0}, + {"ModeIrregular", Const, 11}, + {"ModeNamedPipe", Const, 0}, + {"ModePerm", Const, 0}, + {"ModeSetgid", Const, 0}, + {"ModeSetuid", Const, 0}, + {"ModeSocket", Const, 0}, + {"ModeSticky", Const, 0}, + {"ModeSymlink", Const, 0}, + {"ModeTemporary", Const, 0}, + {"ModeType", Const, 0}, + {"NewFile", Func, 0}, + {"NewSyscallError", Func, 0}, + {"O_APPEND", Const, 0}, + {"O_CREATE", Const, 0}, + {"O_EXCL", Const, 0}, + {"O_RDONLY", Const, 0}, + {"O_RDWR", Const, 0}, + {"O_SYNC", Const, 0}, + {"O_TRUNC", Const, 0}, + {"O_WRONLY", Const, 0}, + {"Open", Func, 0}, + {"OpenFile", Func, 0}, + {"PathError", Type, 0}, + {"PathError.Err", Field, 0}, + {"PathError.Op", Field, 0}, + {"PathError.Path", Field, 0}, + {"PathListSeparator", Const, 0}, + {"PathSeparator", Const, 0}, + {"Pipe", Func, 0}, + {"ProcAttr", Type, 0}, + {"ProcAttr.Dir", Field, 0}, + {"ProcAttr.Env", Field, 0}, + {"ProcAttr.Files", Field, 0}, + {"ProcAttr.Sys", Field, 0}, + {"Process", Type, 0}, + {"Process.Pid", Field, 0}, + {"ProcessState", Type, 0}, + {"ReadDir", Func, 16}, + {"ReadFile", Func, 16}, + {"Readlink", Func, 0}, + {"Remove", Func, 0}, + {"RemoveAll", Func, 0}, + {"Rename", Func, 0}, + {"SEEK_CUR", Const, 0}, + {"SEEK_END", Const, 0}, + {"SEEK_SET", Const, 0}, + {"SameFile", Func, 0}, + {"Setenv", Func, 0}, + {"Signal", Type, 0}, + {"StartProcess", Func, 0}, + {"Stat", Func, 0}, + {"Stderr", Var, 0}, + {"Stdin", Var, 0}, + {"Stdout", Var, 0}, + {"Symlink", Func, 0}, + {"SyscallError", Type, 0}, + {"SyscallError.Err", Field, 0}, + {"SyscallError.Syscall", Field, 0}, + {"TempDir", Func, 0}, + {"Truncate", Func, 0}, + {"Unsetenv", Func, 4}, + {"UserCacheDir", Func, 11}, + {"UserConfigDir", Func, 13}, + {"UserHomeDir", Func, 12}, + {"WriteFile", Func, 16}, + }, + "os/exec": { + {"(*Cmd).CombinedOutput", Method, 0}, + {"(*Cmd).Environ", Method, 19}, + {"(*Cmd).Output", Method, 0}, + {"(*Cmd).Run", Method, 0}, + {"(*Cmd).Start", Method, 0}, + {"(*Cmd).StderrPipe", Method, 0}, + {"(*Cmd).StdinPipe", Method, 0}, + {"(*Cmd).StdoutPipe", Method, 0}, + {"(*Cmd).String", Method, 13}, + {"(*Cmd).Wait", Method, 0}, + {"(*Error).Error", Method, 0}, + {"(*Error).Unwrap", Method, 13}, + {"(*ExitError).Error", Method, 0}, + {"(ExitError).ExitCode", Method, 12}, + {"(ExitError).Exited", Method, 0}, + {"(ExitError).Pid", Method, 0}, + {"(ExitError).String", Method, 0}, + {"(ExitError).Success", Method, 0}, + {"(ExitError).Sys", Method, 0}, + {"(ExitError).SysUsage", Method, 0}, + {"(ExitError).SystemTime", Method, 0}, + {"(ExitError).UserTime", Method, 0}, + {"Cmd", Type, 0}, + {"Cmd.Args", Field, 0}, + {"Cmd.Cancel", Field, 20}, + {"Cmd.Dir", Field, 0}, + {"Cmd.Env", Field, 0}, + {"Cmd.Err", Field, 19}, + {"Cmd.ExtraFiles", Field, 0}, + {"Cmd.Path", Field, 0}, + {"Cmd.Process", Field, 0}, + {"Cmd.ProcessState", Field, 0}, + {"Cmd.Stderr", Field, 0}, + {"Cmd.Stdin", Field, 0}, + {"Cmd.Stdout", Field, 0}, + {"Cmd.SysProcAttr", Field, 0}, + {"Cmd.WaitDelay", Field, 20}, + {"Command", Func, 0}, + {"CommandContext", Func, 7}, + {"ErrDot", Var, 19}, + {"ErrNotFound", Var, 0}, + {"ErrWaitDelay", Var, 20}, + {"Error", Type, 0}, + {"Error.Err", Field, 0}, + {"Error.Name", Field, 0}, + {"ExitError", Type, 0}, + {"ExitError.ProcessState", Field, 0}, + {"ExitError.Stderr", Field, 6}, + {"LookPath", Func, 0}, + }, + "os/signal": { + {"Ignore", Func, 5}, + {"Ignored", Func, 11}, + {"Notify", Func, 0}, + {"NotifyContext", Func, 16}, + {"Reset", Func, 5}, + {"Stop", Func, 1}, + }, + "os/user": { + {"(*User).GroupIds", Method, 7}, + {"(UnknownGroupError).Error", Method, 7}, + {"(UnknownGroupIdError).Error", Method, 7}, + {"(UnknownUserError).Error", Method, 0}, + {"(UnknownUserIdError).Error", Method, 0}, + {"Current", Func, 0}, + {"Group", Type, 7}, + {"Group.Gid", Field, 7}, + {"Group.Name", Field, 7}, + {"Lookup", Func, 0}, + {"LookupGroup", Func, 7}, + {"LookupGroupId", Func, 7}, + {"LookupId", Func, 0}, + {"UnknownGroupError", Type, 7}, + {"UnknownGroupIdError", Type, 7}, + {"UnknownUserError", Type, 0}, + {"UnknownUserIdError", Type, 0}, + {"User", Type, 0}, + {"User.Gid", Field, 0}, + {"User.HomeDir", Field, 0}, + {"User.Name", Field, 0}, + {"User.Uid", Field, 0}, + {"User.Username", Field, 0}, + }, + "path": { + {"Base", Func, 0}, + {"Clean", Func, 0}, + {"Dir", Func, 0}, + {"ErrBadPattern", Var, 0}, + {"Ext", Func, 0}, + {"IsAbs", Func, 0}, + {"Join", Func, 0}, + {"Match", Func, 0}, + {"Split", Func, 0}, + }, + "path/filepath": { + {"Abs", Func, 0}, + {"Base", Func, 0}, + {"Clean", Func, 0}, + {"Dir", Func, 0}, + {"ErrBadPattern", Var, 0}, + {"EvalSymlinks", Func, 0}, + {"Ext", Func, 0}, + {"FromSlash", Func, 0}, + {"Glob", Func, 0}, + {"HasPrefix", Func, 0}, + {"IsAbs", Func, 0}, + {"IsLocal", Func, 20}, + {"Join", Func, 0}, + {"ListSeparator", Const, 0}, + {"Match", Func, 0}, + {"Rel", Func, 0}, + {"Separator", Const, 0}, + {"SkipAll", Var, 20}, + {"SkipDir", Var, 0}, + {"Split", Func, 0}, + {"SplitList", Func, 0}, + {"ToSlash", Func, 0}, + {"VolumeName", Func, 0}, + {"Walk", Func, 0}, + {"WalkDir", Func, 16}, + {"WalkFunc", Type, 0}, + }, + "plugin": { + {"(*Plugin).Lookup", Method, 8}, + {"Open", Func, 8}, + {"Plugin", Type, 8}, + {"Symbol", Type, 8}, + }, + "reflect": { + {"(*MapIter).Key", Method, 12}, + {"(*MapIter).Next", Method, 12}, + {"(*MapIter).Reset", Method, 18}, + {"(*MapIter).Value", Method, 12}, + {"(*ValueError).Error", Method, 0}, + {"(ChanDir).String", Method, 0}, + {"(Kind).String", Method, 0}, + {"(Method).IsExported", Method, 17}, + {"(StructField).IsExported", Method, 17}, + {"(StructTag).Get", Method, 0}, + {"(StructTag).Lookup", Method, 7}, + {"(Value).Addr", Method, 0}, + {"(Value).Bool", Method, 0}, + {"(Value).Bytes", Method, 0}, + {"(Value).Call", Method, 0}, + {"(Value).CallSlice", Method, 0}, + {"(Value).CanAddr", Method, 0}, + {"(Value).CanComplex", Method, 18}, + {"(Value).CanConvert", Method, 17}, + {"(Value).CanFloat", Method, 18}, + {"(Value).CanInt", Method, 18}, + {"(Value).CanInterface", Method, 0}, + {"(Value).CanSet", Method, 0}, + {"(Value).CanUint", Method, 18}, + {"(Value).Cap", Method, 0}, + {"(Value).Clear", Method, 21}, + {"(Value).Close", Method, 0}, + {"(Value).Comparable", Method, 20}, + {"(Value).Complex", Method, 0}, + {"(Value).Convert", Method, 1}, + {"(Value).Elem", Method, 0}, + {"(Value).Equal", Method, 20}, + {"(Value).Field", Method, 0}, + {"(Value).FieldByIndex", Method, 0}, + {"(Value).FieldByIndexErr", Method, 18}, + {"(Value).FieldByName", Method, 0}, + {"(Value).FieldByNameFunc", Method, 0}, + {"(Value).Float", Method, 0}, + {"(Value).Grow", Method, 20}, + {"(Value).Index", Method, 0}, + {"(Value).Int", Method, 0}, + {"(Value).Interface", Method, 0}, + {"(Value).InterfaceData", Method, 0}, + {"(Value).IsNil", Method, 0}, + {"(Value).IsValid", Method, 0}, + {"(Value).IsZero", Method, 13}, + {"(Value).Kind", Method, 0}, + {"(Value).Len", Method, 0}, + {"(Value).MapIndex", Method, 0}, + {"(Value).MapKeys", Method, 0}, + {"(Value).MapRange", Method, 12}, + {"(Value).Method", Method, 0}, + {"(Value).MethodByName", Method, 0}, + {"(Value).NumField", Method, 0}, + {"(Value).NumMethod", Method, 0}, + {"(Value).OverflowComplex", Method, 0}, + {"(Value).OverflowFloat", Method, 0}, + {"(Value).OverflowInt", Method, 0}, + {"(Value).OverflowUint", Method, 0}, + {"(Value).Pointer", Method, 0}, + {"(Value).Recv", Method, 0}, + {"(Value).Send", Method, 0}, + {"(Value).Set", Method, 0}, + {"(Value).SetBool", Method, 0}, + {"(Value).SetBytes", Method, 0}, + {"(Value).SetCap", Method, 2}, + {"(Value).SetComplex", Method, 0}, + {"(Value).SetFloat", Method, 0}, + {"(Value).SetInt", Method, 0}, + {"(Value).SetIterKey", Method, 18}, + {"(Value).SetIterValue", Method, 18}, + {"(Value).SetLen", Method, 0}, + {"(Value).SetMapIndex", Method, 0}, + {"(Value).SetPointer", Method, 0}, + {"(Value).SetString", Method, 0}, + {"(Value).SetUint", Method, 0}, + {"(Value).SetZero", Method, 20}, + {"(Value).Slice", Method, 0}, + {"(Value).Slice3", Method, 2}, + {"(Value).String", Method, 0}, + {"(Value).TryRecv", Method, 0}, + {"(Value).TrySend", Method, 0}, + {"(Value).Type", Method, 0}, + {"(Value).Uint", Method, 0}, + {"(Value).UnsafeAddr", Method, 0}, + {"(Value).UnsafePointer", Method, 18}, + {"Append", Func, 0}, + {"AppendSlice", Func, 0}, + {"Array", Const, 0}, + {"ArrayOf", Func, 5}, + {"Bool", Const, 0}, + {"BothDir", Const, 0}, + {"Chan", Const, 0}, + {"ChanDir", Type, 0}, + {"ChanOf", Func, 1}, + {"Complex128", Const, 0}, + {"Complex64", Const, 0}, + {"Copy", Func, 0}, + {"DeepEqual", Func, 0}, + {"Float32", Const, 0}, + {"Float64", Const, 0}, + {"Func", Const, 0}, + {"FuncOf", Func, 5}, + {"Indirect", Func, 0}, + {"Int", Const, 0}, + {"Int16", Const, 0}, + {"Int32", Const, 0}, + {"Int64", Const, 0}, + {"Int8", Const, 0}, + {"Interface", Const, 0}, + {"Invalid", Const, 0}, + {"Kind", Type, 0}, + {"MakeChan", Func, 0}, + {"MakeFunc", Func, 1}, + {"MakeMap", Func, 0}, + {"MakeMapWithSize", Func, 9}, + {"MakeSlice", Func, 0}, + {"Map", Const, 0}, + {"MapIter", Type, 12}, + {"MapOf", Func, 1}, + {"Method", Type, 0}, + {"Method.Func", Field, 0}, + {"Method.Index", Field, 0}, + {"Method.Name", Field, 0}, + {"Method.PkgPath", Field, 0}, + {"Method.Type", Field, 0}, + {"New", Func, 0}, + {"NewAt", Func, 0}, + {"Pointer", Const, 18}, + {"PointerTo", Func, 18}, + {"Ptr", Const, 0}, + {"PtrTo", Func, 0}, + {"RecvDir", Const, 0}, + {"Select", Func, 1}, + {"SelectCase", Type, 1}, + {"SelectCase.Chan", Field, 1}, + {"SelectCase.Dir", Field, 1}, + {"SelectCase.Send", Field, 1}, + {"SelectDefault", Const, 1}, + {"SelectDir", Type, 1}, + {"SelectRecv", Const, 1}, + {"SelectSend", Const, 1}, + {"SendDir", Const, 0}, + {"Slice", Const, 0}, + {"SliceHeader", Type, 0}, + {"SliceHeader.Cap", Field, 0}, + {"SliceHeader.Data", Field, 0}, + {"SliceHeader.Len", Field, 0}, + {"SliceOf", Func, 1}, + {"String", Const, 0}, + {"StringHeader", Type, 0}, + {"StringHeader.Data", Field, 0}, + {"StringHeader.Len", Field, 0}, + {"Struct", Const, 0}, + {"StructField", Type, 0}, + {"StructField.Anonymous", Field, 0}, + {"StructField.Index", Field, 0}, + {"StructField.Name", Field, 0}, + {"StructField.Offset", Field, 0}, + {"StructField.PkgPath", Field, 0}, + {"StructField.Tag", Field, 0}, + {"StructField.Type", Field, 0}, + {"StructOf", Func, 7}, + {"StructTag", Type, 0}, + {"Swapper", Func, 8}, + {"Type", Type, 0}, + {"TypeFor", Func, 22}, + {"TypeOf", Func, 0}, + {"Uint", Const, 0}, + {"Uint16", Const, 0}, + {"Uint32", Const, 0}, + {"Uint64", Const, 0}, + {"Uint8", Const, 0}, + {"Uintptr", Const, 0}, + {"UnsafePointer", Const, 0}, + {"Value", Type, 0}, + {"ValueError", Type, 0}, + {"ValueError.Kind", Field, 0}, + {"ValueError.Method", Field, 0}, + {"ValueOf", Func, 0}, + {"VisibleFields", Func, 17}, + {"Zero", Func, 0}, + }, + "regexp": { + {"(*Regexp).Copy", Method, 6}, + {"(*Regexp).Expand", Method, 0}, + {"(*Regexp).ExpandString", Method, 0}, + {"(*Regexp).Find", Method, 0}, + {"(*Regexp).FindAll", Method, 0}, + {"(*Regexp).FindAllIndex", Method, 0}, + {"(*Regexp).FindAllString", Method, 0}, + {"(*Regexp).FindAllStringIndex", Method, 0}, + {"(*Regexp).FindAllStringSubmatch", Method, 0}, + {"(*Regexp).FindAllStringSubmatchIndex", Method, 0}, + {"(*Regexp).FindAllSubmatch", Method, 0}, + {"(*Regexp).FindAllSubmatchIndex", Method, 0}, + {"(*Regexp).FindIndex", Method, 0}, + {"(*Regexp).FindReaderIndex", Method, 0}, + {"(*Regexp).FindReaderSubmatchIndex", Method, 0}, + {"(*Regexp).FindString", Method, 0}, + {"(*Regexp).FindStringIndex", Method, 0}, + {"(*Regexp).FindStringSubmatch", Method, 0}, + {"(*Regexp).FindStringSubmatchIndex", Method, 0}, + {"(*Regexp).FindSubmatch", Method, 0}, + {"(*Regexp).FindSubmatchIndex", Method, 0}, + {"(*Regexp).LiteralPrefix", Method, 0}, + {"(*Regexp).Longest", Method, 1}, + {"(*Regexp).MarshalText", Method, 21}, + {"(*Regexp).Match", Method, 0}, + {"(*Regexp).MatchReader", Method, 0}, + {"(*Regexp).MatchString", Method, 0}, + {"(*Regexp).NumSubexp", Method, 0}, + {"(*Regexp).ReplaceAll", Method, 0}, + {"(*Regexp).ReplaceAllFunc", Method, 0}, + {"(*Regexp).ReplaceAllLiteral", Method, 0}, + {"(*Regexp).ReplaceAllLiteralString", Method, 0}, + {"(*Regexp).ReplaceAllString", Method, 0}, + {"(*Regexp).ReplaceAllStringFunc", Method, 0}, + {"(*Regexp).Split", Method, 1}, + {"(*Regexp).String", Method, 0}, + {"(*Regexp).SubexpIndex", Method, 15}, + {"(*Regexp).SubexpNames", Method, 0}, + {"(*Regexp).UnmarshalText", Method, 21}, + {"Compile", Func, 0}, + {"CompilePOSIX", Func, 0}, + {"Match", Func, 0}, + {"MatchReader", Func, 0}, + {"MatchString", Func, 0}, + {"MustCompile", Func, 0}, + {"MustCompilePOSIX", Func, 0}, + {"QuoteMeta", Func, 0}, + {"Regexp", Type, 0}, + }, + "regexp/syntax": { + {"(*Error).Error", Method, 0}, + {"(*Inst).MatchEmptyWidth", Method, 0}, + {"(*Inst).MatchRune", Method, 0}, + {"(*Inst).MatchRunePos", Method, 3}, + {"(*Inst).String", Method, 0}, + {"(*Prog).Prefix", Method, 0}, + {"(*Prog).StartCond", Method, 0}, + {"(*Prog).String", Method, 0}, + {"(*Regexp).CapNames", Method, 0}, + {"(*Regexp).Equal", Method, 0}, + {"(*Regexp).MaxCap", Method, 0}, + {"(*Regexp).Simplify", Method, 0}, + {"(*Regexp).String", Method, 0}, + {"(ErrorCode).String", Method, 0}, + {"(InstOp).String", Method, 3}, + {"(Op).String", Method, 11}, + {"ClassNL", Const, 0}, + {"Compile", Func, 0}, + {"DotNL", Const, 0}, + {"EmptyBeginLine", Const, 0}, + {"EmptyBeginText", Const, 0}, + {"EmptyEndLine", Const, 0}, + {"EmptyEndText", Const, 0}, + {"EmptyNoWordBoundary", Const, 0}, + {"EmptyOp", Type, 0}, + {"EmptyOpContext", Func, 0}, + {"EmptyWordBoundary", Const, 0}, + {"ErrInternalError", Const, 0}, + {"ErrInvalidCharClass", Const, 0}, + {"ErrInvalidCharRange", Const, 0}, + {"ErrInvalidEscape", Const, 0}, + {"ErrInvalidNamedCapture", Const, 0}, + {"ErrInvalidPerlOp", Const, 0}, + {"ErrInvalidRepeatOp", Const, 0}, + {"ErrInvalidRepeatSize", Const, 0}, + {"ErrInvalidUTF8", Const, 0}, + {"ErrLarge", Const, 20}, + {"ErrMissingBracket", Const, 0}, + {"ErrMissingParen", Const, 0}, + {"ErrMissingRepeatArgument", Const, 0}, + {"ErrNestingDepth", Const, 19}, + {"ErrTrailingBackslash", Const, 0}, + {"ErrUnexpectedParen", Const, 1}, + {"Error", Type, 0}, + {"Error.Code", Field, 0}, + {"Error.Expr", Field, 0}, + {"ErrorCode", Type, 0}, + {"Flags", Type, 0}, + {"FoldCase", Const, 0}, + {"Inst", Type, 0}, + {"Inst.Arg", Field, 0}, + {"Inst.Op", Field, 0}, + {"Inst.Out", Field, 0}, + {"Inst.Rune", Field, 0}, + {"InstAlt", Const, 0}, + {"InstAltMatch", Const, 0}, + {"InstCapture", Const, 0}, + {"InstEmptyWidth", Const, 0}, + {"InstFail", Const, 0}, + {"InstMatch", Const, 0}, + {"InstNop", Const, 0}, + {"InstOp", Type, 0}, + {"InstRune", Const, 0}, + {"InstRune1", Const, 0}, + {"InstRuneAny", Const, 0}, + {"InstRuneAnyNotNL", Const, 0}, + {"IsWordChar", Func, 0}, + {"Literal", Const, 0}, + {"MatchNL", Const, 0}, + {"NonGreedy", Const, 0}, + {"OneLine", Const, 0}, + {"Op", Type, 0}, + {"OpAlternate", Const, 0}, + {"OpAnyChar", Const, 0}, + {"OpAnyCharNotNL", Const, 0}, + {"OpBeginLine", Const, 0}, + {"OpBeginText", Const, 0}, + {"OpCapture", Const, 0}, + {"OpCharClass", Const, 0}, + {"OpConcat", Const, 0}, + {"OpEmptyMatch", Const, 0}, + {"OpEndLine", Const, 0}, + {"OpEndText", Const, 0}, + {"OpLiteral", Const, 0}, + {"OpNoMatch", Const, 0}, + {"OpNoWordBoundary", Const, 0}, + {"OpPlus", Const, 0}, + {"OpQuest", Const, 0}, + {"OpRepeat", Const, 0}, + {"OpStar", Const, 0}, + {"OpWordBoundary", Const, 0}, + {"POSIX", Const, 0}, + {"Parse", Func, 0}, + {"Perl", Const, 0}, + {"PerlX", Const, 0}, + {"Prog", Type, 0}, + {"Prog.Inst", Field, 0}, + {"Prog.NumCap", Field, 0}, + {"Prog.Start", Field, 0}, + {"Regexp", Type, 0}, + {"Regexp.Cap", Field, 0}, + {"Regexp.Flags", Field, 0}, + {"Regexp.Max", Field, 0}, + {"Regexp.Min", Field, 0}, + {"Regexp.Name", Field, 0}, + {"Regexp.Op", Field, 0}, + {"Regexp.Rune", Field, 0}, + {"Regexp.Rune0", Field, 0}, + {"Regexp.Sub", Field, 0}, + {"Regexp.Sub0", Field, 0}, + {"Simple", Const, 0}, + {"UnicodeGroups", Const, 0}, + {"WasDollar", Const, 0}, + }, + "runtime": { + {"(*BlockProfileRecord).Stack", Method, 1}, + {"(*Frames).Next", Method, 7}, + {"(*Func).Entry", Method, 0}, + {"(*Func).FileLine", Method, 0}, + {"(*Func).Name", Method, 0}, + {"(*MemProfileRecord).InUseBytes", Method, 0}, + {"(*MemProfileRecord).InUseObjects", Method, 0}, + {"(*MemProfileRecord).Stack", Method, 0}, + {"(*PanicNilError).Error", Method, 21}, + {"(*PanicNilError).RuntimeError", Method, 21}, + {"(*Pinner).Pin", Method, 21}, + {"(*Pinner).Unpin", Method, 21}, + {"(*StackRecord).Stack", Method, 0}, + {"(*TypeAssertionError).Error", Method, 0}, + {"(*TypeAssertionError).RuntimeError", Method, 0}, + {"BlockProfile", Func, 1}, + {"BlockProfileRecord", Type, 1}, + {"BlockProfileRecord.Count", Field, 1}, + {"BlockProfileRecord.Cycles", Field, 1}, + {"BlockProfileRecord.StackRecord", Field, 1}, + {"Breakpoint", Func, 0}, + {"CPUProfile", Func, 0}, + {"Caller", Func, 0}, + {"Callers", Func, 0}, + {"CallersFrames", Func, 7}, + {"Compiler", Const, 0}, + {"Error", Type, 0}, + {"Frame", Type, 7}, + {"Frame.Entry", Field, 7}, + {"Frame.File", Field, 7}, + {"Frame.Func", Field, 7}, + {"Frame.Function", Field, 7}, + {"Frame.Line", Field, 7}, + {"Frame.PC", Field, 7}, + {"Frames", Type, 7}, + {"Func", Type, 0}, + {"FuncForPC", Func, 0}, + {"GC", Func, 0}, + {"GOARCH", Const, 0}, + {"GOMAXPROCS", Func, 0}, + {"GOOS", Const, 0}, + {"GOROOT", Func, 0}, + {"Goexit", Func, 0}, + {"GoroutineProfile", Func, 0}, + {"Gosched", Func, 0}, + {"KeepAlive", Func, 7}, + {"LockOSThread", Func, 0}, + {"MemProfile", Func, 0}, + {"MemProfileRate", Var, 0}, + {"MemProfileRecord", Type, 0}, + {"MemProfileRecord.AllocBytes", Field, 0}, + {"MemProfileRecord.AllocObjects", Field, 0}, + {"MemProfileRecord.FreeBytes", Field, 0}, + {"MemProfileRecord.FreeObjects", Field, 0}, + {"MemProfileRecord.Stack0", Field, 0}, + {"MemStats", Type, 0}, + {"MemStats.Alloc", Field, 0}, + {"MemStats.BuckHashSys", Field, 0}, + {"MemStats.BySize", Field, 0}, + {"MemStats.DebugGC", Field, 0}, + {"MemStats.EnableGC", Field, 0}, + {"MemStats.Frees", Field, 0}, + {"MemStats.GCCPUFraction", Field, 5}, + {"MemStats.GCSys", Field, 2}, + {"MemStats.HeapAlloc", Field, 0}, + {"MemStats.HeapIdle", Field, 0}, + {"MemStats.HeapInuse", Field, 0}, + {"MemStats.HeapObjects", Field, 0}, + {"MemStats.HeapReleased", Field, 0}, + {"MemStats.HeapSys", Field, 0}, + {"MemStats.LastGC", Field, 0}, + {"MemStats.Lookups", Field, 0}, + {"MemStats.MCacheInuse", Field, 0}, + {"MemStats.MCacheSys", Field, 0}, + {"MemStats.MSpanInuse", Field, 0}, + {"MemStats.MSpanSys", Field, 0}, + {"MemStats.Mallocs", Field, 0}, + {"MemStats.NextGC", Field, 0}, + {"MemStats.NumForcedGC", Field, 8}, + {"MemStats.NumGC", Field, 0}, + {"MemStats.OtherSys", Field, 2}, + {"MemStats.PauseEnd", Field, 4}, + {"MemStats.PauseNs", Field, 0}, + {"MemStats.PauseTotalNs", Field, 0}, + {"MemStats.StackInuse", Field, 0}, + {"MemStats.StackSys", Field, 0}, + {"MemStats.Sys", Field, 0}, + {"MemStats.TotalAlloc", Field, 0}, + {"MutexProfile", Func, 8}, + {"NumCPU", Func, 0}, + {"NumCgoCall", Func, 0}, + {"NumGoroutine", Func, 0}, + {"PanicNilError", Type, 21}, + {"Pinner", Type, 21}, + {"ReadMemStats", Func, 0}, + {"ReadTrace", Func, 5}, + {"SetBlockProfileRate", Func, 1}, + {"SetCPUProfileRate", Func, 0}, + {"SetCgoTraceback", Func, 7}, + {"SetFinalizer", Func, 0}, + {"SetMutexProfileFraction", Func, 8}, + {"Stack", Func, 0}, + {"StackRecord", Type, 0}, + {"StackRecord.Stack0", Field, 0}, + {"StartTrace", Func, 5}, + {"StopTrace", Func, 5}, + {"ThreadCreateProfile", Func, 0}, + {"TypeAssertionError", Type, 0}, + {"UnlockOSThread", Func, 0}, + {"Version", Func, 0}, + }, + "runtime/cgo": { + {"(Handle).Delete", Method, 17}, + {"(Handle).Value", Method, 17}, + {"Handle", Type, 17}, + {"Incomplete", Type, 20}, + {"NewHandle", Func, 17}, + }, + "runtime/coverage": { + {"ClearCounters", Func, 20}, + {"WriteCounters", Func, 20}, + {"WriteCountersDir", Func, 20}, + {"WriteMeta", Func, 20}, + {"WriteMetaDir", Func, 20}, + }, + "runtime/debug": { + {"(*BuildInfo).String", Method, 18}, + {"BuildInfo", Type, 12}, + {"BuildInfo.Deps", Field, 12}, + {"BuildInfo.GoVersion", Field, 18}, + {"BuildInfo.Main", Field, 12}, + {"BuildInfo.Path", Field, 12}, + {"BuildInfo.Settings", Field, 18}, + {"BuildSetting", Type, 18}, + {"BuildSetting.Key", Field, 18}, + {"BuildSetting.Value", Field, 18}, + {"FreeOSMemory", Func, 1}, + {"GCStats", Type, 1}, + {"GCStats.LastGC", Field, 1}, + {"GCStats.NumGC", Field, 1}, + {"GCStats.Pause", Field, 1}, + {"GCStats.PauseEnd", Field, 4}, + {"GCStats.PauseQuantiles", Field, 1}, + {"GCStats.PauseTotal", Field, 1}, + {"Module", Type, 12}, + {"Module.Path", Field, 12}, + {"Module.Replace", Field, 12}, + {"Module.Sum", Field, 12}, + {"Module.Version", Field, 12}, + {"ParseBuildInfo", Func, 18}, + {"PrintStack", Func, 0}, + {"ReadBuildInfo", Func, 12}, + {"ReadGCStats", Func, 1}, + {"SetGCPercent", Func, 1}, + {"SetMaxStack", Func, 2}, + {"SetMaxThreads", Func, 2}, + {"SetMemoryLimit", Func, 19}, + {"SetPanicOnFault", Func, 3}, + {"SetTraceback", Func, 6}, + {"Stack", Func, 0}, + {"WriteHeapDump", Func, 3}, + }, + "runtime/metrics": { + {"(Value).Float64", Method, 16}, + {"(Value).Float64Histogram", Method, 16}, + {"(Value).Kind", Method, 16}, + {"(Value).Uint64", Method, 16}, + {"All", Func, 16}, + {"Description", Type, 16}, + {"Description.Cumulative", Field, 16}, + {"Description.Description", Field, 16}, + {"Description.Kind", Field, 16}, + {"Description.Name", Field, 16}, + {"Float64Histogram", Type, 16}, + {"Float64Histogram.Buckets", Field, 16}, + {"Float64Histogram.Counts", Field, 16}, + {"KindBad", Const, 16}, + {"KindFloat64", Const, 16}, + {"KindFloat64Histogram", Const, 16}, + {"KindUint64", Const, 16}, + {"Read", Func, 16}, + {"Sample", Type, 16}, + {"Sample.Name", Field, 16}, + {"Sample.Value", Field, 16}, + {"Value", Type, 16}, + {"ValueKind", Type, 16}, + }, + "runtime/pprof": { + {"(*Profile).Add", Method, 0}, + {"(*Profile).Count", Method, 0}, + {"(*Profile).Name", Method, 0}, + {"(*Profile).Remove", Method, 0}, + {"(*Profile).WriteTo", Method, 0}, + {"Do", Func, 9}, + {"ForLabels", Func, 9}, + {"Label", Func, 9}, + {"LabelSet", Type, 9}, + {"Labels", Func, 9}, + {"Lookup", Func, 0}, + {"NewProfile", Func, 0}, + {"Profile", Type, 0}, + {"Profiles", Func, 0}, + {"SetGoroutineLabels", Func, 9}, + {"StartCPUProfile", Func, 0}, + {"StopCPUProfile", Func, 0}, + {"WithLabels", Func, 9}, + {"WriteHeapProfile", Func, 0}, + }, + "runtime/trace": { + {"(*Region).End", Method, 11}, + {"(*Task).End", Method, 11}, + {"IsEnabled", Func, 11}, + {"Log", Func, 11}, + {"Logf", Func, 11}, + {"NewTask", Func, 11}, + {"Region", Type, 11}, + {"Start", Func, 5}, + {"StartRegion", Func, 11}, + {"Stop", Func, 5}, + {"Task", Type, 11}, + {"WithRegion", Func, 11}, + }, + "slices": { + {"BinarySearch", Func, 21}, + {"BinarySearchFunc", Func, 21}, + {"Clip", Func, 21}, + {"Clone", Func, 21}, + {"Compact", Func, 21}, + {"CompactFunc", Func, 21}, + {"Compare", Func, 21}, + {"CompareFunc", Func, 21}, + {"Concat", Func, 22}, + {"Contains", Func, 21}, + {"ContainsFunc", Func, 21}, + {"Delete", Func, 21}, + {"DeleteFunc", Func, 21}, + {"Equal", Func, 21}, + {"EqualFunc", Func, 21}, + {"Grow", Func, 21}, + {"Index", Func, 21}, + {"IndexFunc", Func, 21}, + {"Insert", Func, 21}, + {"IsSorted", Func, 21}, + {"IsSortedFunc", Func, 21}, + {"Max", Func, 21}, + {"MaxFunc", Func, 21}, + {"Min", Func, 21}, + {"MinFunc", Func, 21}, + {"Replace", Func, 21}, + {"Reverse", Func, 21}, + {"Sort", Func, 21}, + {"SortFunc", Func, 21}, + {"SortStableFunc", Func, 21}, + }, + "sort": { + {"(Float64Slice).Len", Method, 0}, + {"(Float64Slice).Less", Method, 0}, + {"(Float64Slice).Search", Method, 0}, + {"(Float64Slice).Sort", Method, 0}, + {"(Float64Slice).Swap", Method, 0}, + {"(IntSlice).Len", Method, 0}, + {"(IntSlice).Less", Method, 0}, + {"(IntSlice).Search", Method, 0}, + {"(IntSlice).Sort", Method, 0}, + {"(IntSlice).Swap", Method, 0}, + {"(StringSlice).Len", Method, 0}, + {"(StringSlice).Less", Method, 0}, + {"(StringSlice).Search", Method, 0}, + {"(StringSlice).Sort", Method, 0}, + {"(StringSlice).Swap", Method, 0}, + {"Find", Func, 19}, + {"Float64Slice", Type, 0}, + {"Float64s", Func, 0}, + {"Float64sAreSorted", Func, 0}, + {"IntSlice", Type, 0}, + {"Interface", Type, 0}, + {"Ints", Func, 0}, + {"IntsAreSorted", Func, 0}, + {"IsSorted", Func, 0}, + {"Reverse", Func, 1}, + {"Search", Func, 0}, + {"SearchFloat64s", Func, 0}, + {"SearchInts", Func, 0}, + {"SearchStrings", Func, 0}, + {"Slice", Func, 8}, + {"SliceIsSorted", Func, 8}, + {"SliceStable", Func, 8}, + {"Sort", Func, 0}, + {"Stable", Func, 2}, + {"StringSlice", Type, 0}, + {"Strings", Func, 0}, + {"StringsAreSorted", Func, 0}, + }, + "strconv": { + {"(*NumError).Error", Method, 0}, + {"(*NumError).Unwrap", Method, 14}, + {"AppendBool", Func, 0}, + {"AppendFloat", Func, 0}, + {"AppendInt", Func, 0}, + {"AppendQuote", Func, 0}, + {"AppendQuoteRune", Func, 0}, + {"AppendQuoteRuneToASCII", Func, 0}, + {"AppendQuoteRuneToGraphic", Func, 6}, + {"AppendQuoteToASCII", Func, 0}, + {"AppendQuoteToGraphic", Func, 6}, + {"AppendUint", Func, 0}, + {"Atoi", Func, 0}, + {"CanBackquote", Func, 0}, + {"ErrRange", Var, 0}, + {"ErrSyntax", Var, 0}, + {"FormatBool", Func, 0}, + {"FormatComplex", Func, 15}, + {"FormatFloat", Func, 0}, + {"FormatInt", Func, 0}, + {"FormatUint", Func, 0}, + {"IntSize", Const, 0}, + {"IsGraphic", Func, 6}, + {"IsPrint", Func, 0}, + {"Itoa", Func, 0}, + {"NumError", Type, 0}, + {"NumError.Err", Field, 0}, + {"NumError.Func", Field, 0}, + {"NumError.Num", Field, 0}, + {"ParseBool", Func, 0}, + {"ParseComplex", Func, 15}, + {"ParseFloat", Func, 0}, + {"ParseInt", Func, 0}, + {"ParseUint", Func, 0}, + {"Quote", Func, 0}, + {"QuoteRune", Func, 0}, + {"QuoteRuneToASCII", Func, 0}, + {"QuoteRuneToGraphic", Func, 6}, + {"QuoteToASCII", Func, 0}, + {"QuoteToGraphic", Func, 6}, + {"QuotedPrefix", Func, 17}, + {"Unquote", Func, 0}, + {"UnquoteChar", Func, 0}, + }, + "strings": { + {"(*Builder).Cap", Method, 12}, + {"(*Builder).Grow", Method, 10}, + {"(*Builder).Len", Method, 10}, + {"(*Builder).Reset", Method, 10}, + {"(*Builder).String", Method, 10}, + {"(*Builder).Write", Method, 10}, + {"(*Builder).WriteByte", Method, 10}, + {"(*Builder).WriteRune", Method, 10}, + {"(*Builder).WriteString", Method, 10}, + {"(*Reader).Len", Method, 0}, + {"(*Reader).Read", Method, 0}, + {"(*Reader).ReadAt", Method, 0}, + {"(*Reader).ReadByte", Method, 0}, + {"(*Reader).ReadRune", Method, 0}, + {"(*Reader).Reset", Method, 7}, + {"(*Reader).Seek", Method, 0}, + {"(*Reader).Size", Method, 5}, + {"(*Reader).UnreadByte", Method, 0}, + {"(*Reader).UnreadRune", Method, 0}, + {"(*Reader).WriteTo", Method, 1}, + {"(*Replacer).Replace", Method, 0}, + {"(*Replacer).WriteString", Method, 0}, + {"Builder", Type, 10}, + {"Clone", Func, 18}, + {"Compare", Func, 5}, + {"Contains", Func, 0}, + {"ContainsAny", Func, 0}, + {"ContainsFunc", Func, 21}, + {"ContainsRune", Func, 0}, + {"Count", Func, 0}, + {"Cut", Func, 18}, + {"CutPrefix", Func, 20}, + {"CutSuffix", Func, 20}, + {"EqualFold", Func, 0}, + {"Fields", Func, 0}, + {"FieldsFunc", Func, 0}, + {"HasPrefix", Func, 0}, + {"HasSuffix", Func, 0}, + {"Index", Func, 0}, + {"IndexAny", Func, 0}, + {"IndexByte", Func, 2}, + {"IndexFunc", Func, 0}, + {"IndexRune", Func, 0}, + {"Join", Func, 0}, + {"LastIndex", Func, 0}, + {"LastIndexAny", Func, 0}, + {"LastIndexByte", Func, 5}, + {"LastIndexFunc", Func, 0}, + {"Map", Func, 0}, + {"NewReader", Func, 0}, + {"NewReplacer", Func, 0}, + {"Reader", Type, 0}, + {"Repeat", Func, 0}, + {"Replace", Func, 0}, + {"ReplaceAll", Func, 12}, + {"Replacer", Type, 0}, + {"Split", Func, 0}, + {"SplitAfter", Func, 0}, + {"SplitAfterN", Func, 0}, + {"SplitN", Func, 0}, + {"Title", Func, 0}, + {"ToLower", Func, 0}, + {"ToLowerSpecial", Func, 0}, + {"ToTitle", Func, 0}, + {"ToTitleSpecial", Func, 0}, + {"ToUpper", Func, 0}, + {"ToUpperSpecial", Func, 0}, + {"ToValidUTF8", Func, 13}, + {"Trim", Func, 0}, + {"TrimFunc", Func, 0}, + {"TrimLeft", Func, 0}, + {"TrimLeftFunc", Func, 0}, + {"TrimPrefix", Func, 1}, + {"TrimRight", Func, 0}, + {"TrimRightFunc", Func, 0}, + {"TrimSpace", Func, 0}, + {"TrimSuffix", Func, 1}, + }, + "sync": { + {"(*Cond).Broadcast", Method, 0}, + {"(*Cond).Signal", Method, 0}, + {"(*Cond).Wait", Method, 0}, + {"(*Map).CompareAndDelete", Method, 20}, + {"(*Map).CompareAndSwap", Method, 20}, + {"(*Map).Delete", Method, 9}, + {"(*Map).Load", Method, 9}, + {"(*Map).LoadAndDelete", Method, 15}, + {"(*Map).LoadOrStore", Method, 9}, + {"(*Map).Range", Method, 9}, + {"(*Map).Store", Method, 9}, + {"(*Map).Swap", Method, 20}, + {"(*Mutex).Lock", Method, 0}, + {"(*Mutex).TryLock", Method, 18}, + {"(*Mutex).Unlock", Method, 0}, + {"(*Once).Do", Method, 0}, + {"(*Pool).Get", Method, 3}, + {"(*Pool).Put", Method, 3}, + {"(*RWMutex).Lock", Method, 0}, + {"(*RWMutex).RLock", Method, 0}, + {"(*RWMutex).RLocker", Method, 0}, + {"(*RWMutex).RUnlock", Method, 0}, + {"(*RWMutex).TryLock", Method, 18}, + {"(*RWMutex).TryRLock", Method, 18}, + {"(*RWMutex).Unlock", Method, 0}, + {"(*WaitGroup).Add", Method, 0}, + {"(*WaitGroup).Done", Method, 0}, + {"(*WaitGroup).Wait", Method, 0}, + {"Cond", Type, 0}, + {"Cond.L", Field, 0}, + {"Locker", Type, 0}, + {"Map", Type, 9}, + {"Mutex", Type, 0}, + {"NewCond", Func, 0}, + {"Once", Type, 0}, + {"OnceFunc", Func, 21}, + {"OnceValue", Func, 21}, + {"OnceValues", Func, 21}, + {"Pool", Type, 3}, + {"Pool.New", Field, 3}, + {"RWMutex", Type, 0}, + {"WaitGroup", Type, 0}, + }, + "sync/atomic": { + {"(*Bool).CompareAndSwap", Method, 19}, + {"(*Bool).Load", Method, 19}, + {"(*Bool).Store", Method, 19}, + {"(*Bool).Swap", Method, 19}, + {"(*Int32).Add", Method, 19}, + {"(*Int32).CompareAndSwap", Method, 19}, + {"(*Int32).Load", Method, 19}, + {"(*Int32).Store", Method, 19}, + {"(*Int32).Swap", Method, 19}, + {"(*Int64).Add", Method, 19}, + {"(*Int64).CompareAndSwap", Method, 19}, + {"(*Int64).Load", Method, 19}, + {"(*Int64).Store", Method, 19}, + {"(*Int64).Swap", Method, 19}, + {"(*Pointer).CompareAndSwap", Method, 19}, + {"(*Pointer).Load", Method, 19}, + {"(*Pointer).Store", Method, 19}, + {"(*Pointer).Swap", Method, 19}, + {"(*Uint32).Add", Method, 19}, + {"(*Uint32).CompareAndSwap", Method, 19}, + {"(*Uint32).Load", Method, 19}, + {"(*Uint32).Store", Method, 19}, + {"(*Uint32).Swap", Method, 19}, + {"(*Uint64).Add", Method, 19}, + {"(*Uint64).CompareAndSwap", Method, 19}, + {"(*Uint64).Load", Method, 19}, + {"(*Uint64).Store", Method, 19}, + {"(*Uint64).Swap", Method, 19}, + {"(*Uintptr).Add", Method, 19}, + {"(*Uintptr).CompareAndSwap", Method, 19}, + {"(*Uintptr).Load", Method, 19}, + {"(*Uintptr).Store", Method, 19}, + {"(*Uintptr).Swap", Method, 19}, + {"(*Value).CompareAndSwap", Method, 17}, + {"(*Value).Load", Method, 4}, + {"(*Value).Store", Method, 4}, + {"(*Value).Swap", Method, 17}, + {"AddInt32", Func, 0}, + {"AddInt64", Func, 0}, + {"AddUint32", Func, 0}, + {"AddUint64", Func, 0}, + {"AddUintptr", Func, 0}, + {"Bool", Type, 19}, + {"CompareAndSwapInt32", Func, 0}, + {"CompareAndSwapInt64", Func, 0}, + {"CompareAndSwapPointer", Func, 0}, + {"CompareAndSwapUint32", Func, 0}, + {"CompareAndSwapUint64", Func, 0}, + {"CompareAndSwapUintptr", Func, 0}, + {"Int32", Type, 19}, + {"Int64", Type, 19}, + {"LoadInt32", Func, 0}, + {"LoadInt64", Func, 0}, + {"LoadPointer", Func, 0}, + {"LoadUint32", Func, 0}, + {"LoadUint64", Func, 0}, + {"LoadUintptr", Func, 0}, + {"Pointer", Type, 19}, + {"StoreInt32", Func, 0}, + {"StoreInt64", Func, 0}, + {"StorePointer", Func, 0}, + {"StoreUint32", Func, 0}, + {"StoreUint64", Func, 0}, + {"StoreUintptr", Func, 0}, + {"SwapInt32", Func, 2}, + {"SwapInt64", Func, 2}, + {"SwapPointer", Func, 2}, + {"SwapUint32", Func, 2}, + {"SwapUint64", Func, 2}, + {"SwapUintptr", Func, 2}, + {"Uint32", Type, 19}, + {"Uint64", Type, 19}, + {"Uintptr", Type, 19}, + {"Value", Type, 4}, + }, + "syscall": { + {"(*Cmsghdr).SetLen", Method, 0}, + {"(*DLL).FindProc", Method, 0}, + {"(*DLL).MustFindProc", Method, 0}, + {"(*DLL).Release", Method, 0}, + {"(*DLLError).Error", Method, 0}, + {"(*DLLError).Unwrap", Method, 16}, + {"(*Filetime).Nanoseconds", Method, 0}, + {"(*Iovec).SetLen", Method, 0}, + {"(*LazyDLL).Handle", Method, 0}, + {"(*LazyDLL).Load", Method, 0}, + {"(*LazyDLL).NewProc", Method, 0}, + {"(*LazyProc).Addr", Method, 0}, + {"(*LazyProc).Call", Method, 0}, + {"(*LazyProc).Find", Method, 0}, + {"(*Msghdr).SetControllen", Method, 0}, + {"(*Proc).Addr", Method, 0}, + {"(*Proc).Call", Method, 0}, + {"(*PtraceRegs).PC", Method, 0}, + {"(*PtraceRegs).SetPC", Method, 0}, + {"(*RawSockaddrAny).Sockaddr", Method, 0}, + {"(*SID).Copy", Method, 0}, + {"(*SID).Len", Method, 0}, + {"(*SID).LookupAccount", Method, 0}, + {"(*SID).String", Method, 0}, + {"(*Timespec).Nano", Method, 0}, + {"(*Timespec).Unix", Method, 0}, + {"(*Timeval).Nano", Method, 0}, + {"(*Timeval).Nanoseconds", Method, 0}, + {"(*Timeval).Unix", Method, 0}, + {"(Errno).Error", Method, 0}, + {"(Errno).Is", Method, 13}, + {"(Errno).Temporary", Method, 0}, + {"(Errno).Timeout", Method, 0}, + {"(Signal).Signal", Method, 0}, + {"(Signal).String", Method, 0}, + {"(Token).Close", Method, 0}, + {"(Token).GetTokenPrimaryGroup", Method, 0}, + {"(Token).GetTokenUser", Method, 0}, + {"(Token).GetUserProfileDirectory", Method, 0}, + {"(WaitStatus).Continued", Method, 0}, + {"(WaitStatus).CoreDump", Method, 0}, + {"(WaitStatus).ExitStatus", Method, 0}, + {"(WaitStatus).Exited", Method, 0}, + {"(WaitStatus).Signal", Method, 0}, + {"(WaitStatus).Signaled", Method, 0}, + {"(WaitStatus).StopSignal", Method, 0}, + {"(WaitStatus).Stopped", Method, 0}, + {"(WaitStatus).TrapCause", Method, 0}, + {"AF_ALG", Const, 0}, + {"AF_APPLETALK", Const, 0}, + {"AF_ARP", Const, 0}, + {"AF_ASH", Const, 0}, + {"AF_ATM", Const, 0}, + {"AF_ATMPVC", Const, 0}, + {"AF_ATMSVC", Const, 0}, + {"AF_AX25", Const, 0}, + {"AF_BLUETOOTH", Const, 0}, + {"AF_BRIDGE", Const, 0}, + {"AF_CAIF", Const, 0}, + {"AF_CAN", Const, 0}, + {"AF_CCITT", Const, 0}, + {"AF_CHAOS", Const, 0}, + {"AF_CNT", Const, 0}, + {"AF_COIP", Const, 0}, + {"AF_DATAKIT", Const, 0}, + {"AF_DECnet", Const, 0}, + {"AF_DLI", Const, 0}, + {"AF_E164", Const, 0}, + {"AF_ECMA", Const, 0}, + {"AF_ECONET", Const, 0}, + {"AF_ENCAP", Const, 1}, + {"AF_FILE", Const, 0}, + {"AF_HYLINK", Const, 0}, + {"AF_IEEE80211", Const, 0}, + {"AF_IEEE802154", Const, 0}, + {"AF_IMPLINK", Const, 0}, + {"AF_INET", Const, 0}, + {"AF_INET6", Const, 0}, + {"AF_INET6_SDP", Const, 3}, + {"AF_INET_SDP", Const, 3}, + {"AF_IPX", Const, 0}, + {"AF_IRDA", Const, 0}, + {"AF_ISDN", Const, 0}, + {"AF_ISO", Const, 0}, + {"AF_IUCV", Const, 0}, + {"AF_KEY", Const, 0}, + {"AF_LAT", Const, 0}, + {"AF_LINK", Const, 0}, + {"AF_LLC", Const, 0}, + {"AF_LOCAL", Const, 0}, + {"AF_MAX", Const, 0}, + {"AF_MPLS", Const, 1}, + {"AF_NATM", Const, 0}, + {"AF_NDRV", Const, 0}, + {"AF_NETBEUI", Const, 0}, + {"AF_NETBIOS", Const, 0}, + {"AF_NETGRAPH", Const, 0}, + {"AF_NETLINK", Const, 0}, + {"AF_NETROM", Const, 0}, + {"AF_NS", Const, 0}, + {"AF_OROUTE", Const, 1}, + {"AF_OSI", Const, 0}, + {"AF_PACKET", Const, 0}, + {"AF_PHONET", Const, 0}, + {"AF_PPP", Const, 0}, + {"AF_PPPOX", Const, 0}, + {"AF_PUP", Const, 0}, + {"AF_RDS", Const, 0}, + {"AF_RESERVED_36", Const, 0}, + {"AF_ROSE", Const, 0}, + {"AF_ROUTE", Const, 0}, + {"AF_RXRPC", Const, 0}, + {"AF_SCLUSTER", Const, 0}, + {"AF_SECURITY", Const, 0}, + {"AF_SIP", Const, 0}, + {"AF_SLOW", Const, 0}, + {"AF_SNA", Const, 0}, + {"AF_SYSTEM", Const, 0}, + {"AF_TIPC", Const, 0}, + {"AF_UNIX", Const, 0}, + {"AF_UNSPEC", Const, 0}, + {"AF_UTUN", Const, 16}, + {"AF_VENDOR00", Const, 0}, + {"AF_VENDOR01", Const, 0}, + {"AF_VENDOR02", Const, 0}, + {"AF_VENDOR03", Const, 0}, + {"AF_VENDOR04", Const, 0}, + {"AF_VENDOR05", Const, 0}, + {"AF_VENDOR06", Const, 0}, + {"AF_VENDOR07", Const, 0}, + {"AF_VENDOR08", Const, 0}, + {"AF_VENDOR09", Const, 0}, + {"AF_VENDOR10", Const, 0}, + {"AF_VENDOR11", Const, 0}, + {"AF_VENDOR12", Const, 0}, + {"AF_VENDOR13", Const, 0}, + {"AF_VENDOR14", Const, 0}, + {"AF_VENDOR15", Const, 0}, + {"AF_VENDOR16", Const, 0}, + {"AF_VENDOR17", Const, 0}, + {"AF_VENDOR18", Const, 0}, + {"AF_VENDOR19", Const, 0}, + {"AF_VENDOR20", Const, 0}, + {"AF_VENDOR21", Const, 0}, + {"AF_VENDOR22", Const, 0}, + {"AF_VENDOR23", Const, 0}, + {"AF_VENDOR24", Const, 0}, + {"AF_VENDOR25", Const, 0}, + {"AF_VENDOR26", Const, 0}, + {"AF_VENDOR27", Const, 0}, + {"AF_VENDOR28", Const, 0}, + {"AF_VENDOR29", Const, 0}, + {"AF_VENDOR30", Const, 0}, + {"AF_VENDOR31", Const, 0}, + {"AF_VENDOR32", Const, 0}, + {"AF_VENDOR33", Const, 0}, + {"AF_VENDOR34", Const, 0}, + {"AF_VENDOR35", Const, 0}, + {"AF_VENDOR36", Const, 0}, + {"AF_VENDOR37", Const, 0}, + {"AF_VENDOR38", Const, 0}, + {"AF_VENDOR39", Const, 0}, + {"AF_VENDOR40", Const, 0}, + {"AF_VENDOR41", Const, 0}, + {"AF_VENDOR42", Const, 0}, + {"AF_VENDOR43", Const, 0}, + {"AF_VENDOR44", Const, 0}, + {"AF_VENDOR45", Const, 0}, + {"AF_VENDOR46", Const, 0}, + {"AF_VENDOR47", Const, 0}, + {"AF_WANPIPE", Const, 0}, + {"AF_X25", Const, 0}, + {"AI_CANONNAME", Const, 1}, + {"AI_NUMERICHOST", Const, 1}, + {"AI_PASSIVE", Const, 1}, + {"APPLICATION_ERROR", Const, 0}, + {"ARPHRD_ADAPT", Const, 0}, + {"ARPHRD_APPLETLK", Const, 0}, + {"ARPHRD_ARCNET", Const, 0}, + {"ARPHRD_ASH", Const, 0}, + {"ARPHRD_ATM", Const, 0}, + {"ARPHRD_AX25", Const, 0}, + {"ARPHRD_BIF", Const, 0}, + {"ARPHRD_CHAOS", Const, 0}, + {"ARPHRD_CISCO", Const, 0}, + {"ARPHRD_CSLIP", Const, 0}, + {"ARPHRD_CSLIP6", Const, 0}, + {"ARPHRD_DDCMP", Const, 0}, + {"ARPHRD_DLCI", Const, 0}, + {"ARPHRD_ECONET", Const, 0}, + {"ARPHRD_EETHER", Const, 0}, + {"ARPHRD_ETHER", Const, 0}, + {"ARPHRD_EUI64", Const, 0}, + {"ARPHRD_FCAL", Const, 0}, + {"ARPHRD_FCFABRIC", Const, 0}, + {"ARPHRD_FCPL", Const, 0}, + {"ARPHRD_FCPP", Const, 0}, + {"ARPHRD_FDDI", Const, 0}, + {"ARPHRD_FRAD", Const, 0}, + {"ARPHRD_FRELAY", Const, 1}, + {"ARPHRD_HDLC", Const, 0}, + {"ARPHRD_HIPPI", Const, 0}, + {"ARPHRD_HWX25", Const, 0}, + {"ARPHRD_IEEE1394", Const, 0}, + {"ARPHRD_IEEE802", Const, 0}, + {"ARPHRD_IEEE80211", Const, 0}, + {"ARPHRD_IEEE80211_PRISM", Const, 0}, + {"ARPHRD_IEEE80211_RADIOTAP", Const, 0}, + {"ARPHRD_IEEE802154", Const, 0}, + {"ARPHRD_IEEE802154_PHY", Const, 0}, + {"ARPHRD_IEEE802_TR", Const, 0}, + {"ARPHRD_INFINIBAND", Const, 0}, + {"ARPHRD_IPDDP", Const, 0}, + {"ARPHRD_IPGRE", Const, 0}, + {"ARPHRD_IRDA", Const, 0}, + {"ARPHRD_LAPB", Const, 0}, + {"ARPHRD_LOCALTLK", Const, 0}, + {"ARPHRD_LOOPBACK", Const, 0}, + {"ARPHRD_METRICOM", Const, 0}, + {"ARPHRD_NETROM", Const, 0}, + {"ARPHRD_NONE", Const, 0}, + {"ARPHRD_PIMREG", Const, 0}, + {"ARPHRD_PPP", Const, 0}, + {"ARPHRD_PRONET", Const, 0}, + {"ARPHRD_RAWHDLC", Const, 0}, + {"ARPHRD_ROSE", Const, 0}, + {"ARPHRD_RSRVD", Const, 0}, + {"ARPHRD_SIT", Const, 0}, + {"ARPHRD_SKIP", Const, 0}, + {"ARPHRD_SLIP", Const, 0}, + {"ARPHRD_SLIP6", Const, 0}, + {"ARPHRD_STRIP", Const, 1}, + {"ARPHRD_TUNNEL", Const, 0}, + {"ARPHRD_TUNNEL6", Const, 0}, + {"ARPHRD_VOID", Const, 0}, + {"ARPHRD_X25", Const, 0}, + {"AUTHTYPE_CLIENT", Const, 0}, + {"AUTHTYPE_SERVER", Const, 0}, + {"Accept", Func, 0}, + {"Accept4", Func, 1}, + {"AcceptEx", Func, 0}, + {"Access", Func, 0}, + {"Acct", Func, 0}, + {"AddrinfoW", Type, 1}, + {"AddrinfoW.Addr", Field, 1}, + {"AddrinfoW.Addrlen", Field, 1}, + {"AddrinfoW.Canonname", Field, 1}, + {"AddrinfoW.Family", Field, 1}, + {"AddrinfoW.Flags", Field, 1}, + {"AddrinfoW.Next", Field, 1}, + {"AddrinfoW.Protocol", Field, 1}, + {"AddrinfoW.Socktype", Field, 1}, + {"Adjtime", Func, 0}, + {"Adjtimex", Func, 0}, + {"AllThreadsSyscall", Func, 16}, + {"AllThreadsSyscall6", Func, 16}, + {"AttachLsf", Func, 0}, + {"B0", Const, 0}, + {"B1000000", Const, 0}, + {"B110", Const, 0}, + {"B115200", Const, 0}, + {"B1152000", Const, 0}, + {"B1200", Const, 0}, + {"B134", Const, 0}, + {"B14400", Const, 1}, + {"B150", Const, 0}, + {"B1500000", Const, 0}, + {"B1800", Const, 0}, + {"B19200", Const, 0}, + {"B200", Const, 0}, + {"B2000000", Const, 0}, + {"B230400", Const, 0}, + {"B2400", Const, 0}, + {"B2500000", Const, 0}, + {"B28800", Const, 1}, + {"B300", Const, 0}, + {"B3000000", Const, 0}, + {"B3500000", Const, 0}, + {"B38400", Const, 0}, + {"B4000000", Const, 0}, + {"B460800", Const, 0}, + {"B4800", Const, 0}, + {"B50", Const, 0}, + {"B500000", Const, 0}, + {"B57600", Const, 0}, + {"B576000", Const, 0}, + {"B600", Const, 0}, + {"B7200", Const, 1}, + {"B75", Const, 0}, + {"B76800", Const, 1}, + {"B921600", Const, 0}, + {"B9600", Const, 0}, + {"BASE_PROTOCOL", Const, 2}, + {"BIOCFEEDBACK", Const, 0}, + {"BIOCFLUSH", Const, 0}, + {"BIOCGBLEN", Const, 0}, + {"BIOCGDIRECTION", Const, 0}, + {"BIOCGDIRFILT", Const, 1}, + {"BIOCGDLT", Const, 0}, + {"BIOCGDLTLIST", Const, 0}, + {"BIOCGETBUFMODE", Const, 0}, + {"BIOCGETIF", Const, 0}, + {"BIOCGETZMAX", Const, 0}, + {"BIOCGFEEDBACK", Const, 1}, + {"BIOCGFILDROP", Const, 1}, + {"BIOCGHDRCMPLT", Const, 0}, + {"BIOCGRSIG", Const, 0}, + {"BIOCGRTIMEOUT", Const, 0}, + {"BIOCGSEESENT", Const, 0}, + {"BIOCGSTATS", Const, 0}, + {"BIOCGSTATSOLD", Const, 1}, + {"BIOCGTSTAMP", Const, 1}, + {"BIOCIMMEDIATE", Const, 0}, + {"BIOCLOCK", Const, 0}, + {"BIOCPROMISC", Const, 0}, + {"BIOCROTZBUF", Const, 0}, + {"BIOCSBLEN", Const, 0}, + {"BIOCSDIRECTION", Const, 0}, + {"BIOCSDIRFILT", Const, 1}, + {"BIOCSDLT", Const, 0}, + {"BIOCSETBUFMODE", Const, 0}, + {"BIOCSETF", Const, 0}, + {"BIOCSETFNR", Const, 0}, + {"BIOCSETIF", Const, 0}, + {"BIOCSETWF", Const, 0}, + {"BIOCSETZBUF", Const, 0}, + {"BIOCSFEEDBACK", Const, 1}, + {"BIOCSFILDROP", Const, 1}, + {"BIOCSHDRCMPLT", Const, 0}, + {"BIOCSRSIG", Const, 0}, + {"BIOCSRTIMEOUT", Const, 0}, + {"BIOCSSEESENT", Const, 0}, + {"BIOCSTCPF", Const, 1}, + {"BIOCSTSTAMP", Const, 1}, + {"BIOCSUDPF", Const, 1}, + {"BIOCVERSION", Const, 0}, + {"BPF_A", Const, 0}, + {"BPF_ABS", Const, 0}, + {"BPF_ADD", Const, 0}, + {"BPF_ALIGNMENT", Const, 0}, + {"BPF_ALIGNMENT32", Const, 1}, + {"BPF_ALU", Const, 0}, + {"BPF_AND", Const, 0}, + {"BPF_B", Const, 0}, + {"BPF_BUFMODE_BUFFER", Const, 0}, + {"BPF_BUFMODE_ZBUF", Const, 0}, + {"BPF_DFLTBUFSIZE", Const, 1}, + {"BPF_DIRECTION_IN", Const, 1}, + {"BPF_DIRECTION_OUT", Const, 1}, + {"BPF_DIV", Const, 0}, + {"BPF_H", Const, 0}, + {"BPF_IMM", Const, 0}, + {"BPF_IND", Const, 0}, + {"BPF_JA", Const, 0}, + {"BPF_JEQ", Const, 0}, + {"BPF_JGE", Const, 0}, + {"BPF_JGT", Const, 0}, + {"BPF_JMP", Const, 0}, + {"BPF_JSET", Const, 0}, + {"BPF_K", Const, 0}, + {"BPF_LD", Const, 0}, + {"BPF_LDX", Const, 0}, + {"BPF_LEN", Const, 0}, + {"BPF_LSH", Const, 0}, + {"BPF_MAJOR_VERSION", Const, 0}, + {"BPF_MAXBUFSIZE", Const, 0}, + {"BPF_MAXINSNS", Const, 0}, + {"BPF_MEM", Const, 0}, + {"BPF_MEMWORDS", Const, 0}, + {"BPF_MINBUFSIZE", Const, 0}, + {"BPF_MINOR_VERSION", Const, 0}, + {"BPF_MISC", Const, 0}, + {"BPF_MSH", Const, 0}, + {"BPF_MUL", Const, 0}, + {"BPF_NEG", Const, 0}, + {"BPF_OR", Const, 0}, + {"BPF_RELEASE", Const, 0}, + {"BPF_RET", Const, 0}, + {"BPF_RSH", Const, 0}, + {"BPF_ST", Const, 0}, + {"BPF_STX", Const, 0}, + {"BPF_SUB", Const, 0}, + {"BPF_TAX", Const, 0}, + {"BPF_TXA", Const, 0}, + {"BPF_T_BINTIME", Const, 1}, + {"BPF_T_BINTIME_FAST", Const, 1}, + {"BPF_T_BINTIME_MONOTONIC", Const, 1}, + {"BPF_T_BINTIME_MONOTONIC_FAST", Const, 1}, + {"BPF_T_FAST", Const, 1}, + {"BPF_T_FLAG_MASK", Const, 1}, + {"BPF_T_FORMAT_MASK", Const, 1}, + {"BPF_T_MICROTIME", Const, 1}, + {"BPF_T_MICROTIME_FAST", Const, 1}, + {"BPF_T_MICROTIME_MONOTONIC", Const, 1}, + {"BPF_T_MICROTIME_MONOTONIC_FAST", Const, 1}, + {"BPF_T_MONOTONIC", Const, 1}, + {"BPF_T_MONOTONIC_FAST", Const, 1}, + {"BPF_T_NANOTIME", Const, 1}, + {"BPF_T_NANOTIME_FAST", Const, 1}, + {"BPF_T_NANOTIME_MONOTONIC", Const, 1}, + {"BPF_T_NANOTIME_MONOTONIC_FAST", Const, 1}, + {"BPF_T_NONE", Const, 1}, + {"BPF_T_NORMAL", Const, 1}, + {"BPF_W", Const, 0}, + {"BPF_X", Const, 0}, + {"BRKINT", Const, 0}, + {"Bind", Func, 0}, + {"BindToDevice", Func, 0}, + {"BpfBuflen", Func, 0}, + {"BpfDatalink", Func, 0}, + {"BpfHdr", Type, 0}, + {"BpfHdr.Caplen", Field, 0}, + {"BpfHdr.Datalen", Field, 0}, + {"BpfHdr.Hdrlen", Field, 0}, + {"BpfHdr.Pad_cgo_0", Field, 0}, + {"BpfHdr.Tstamp", Field, 0}, + {"BpfHeadercmpl", Func, 0}, + {"BpfInsn", Type, 0}, + {"BpfInsn.Code", Field, 0}, + {"BpfInsn.Jf", Field, 0}, + {"BpfInsn.Jt", Field, 0}, + {"BpfInsn.K", Field, 0}, + {"BpfInterface", Func, 0}, + {"BpfJump", Func, 0}, + {"BpfProgram", Type, 0}, + {"BpfProgram.Insns", Field, 0}, + {"BpfProgram.Len", Field, 0}, + {"BpfProgram.Pad_cgo_0", Field, 0}, + {"BpfStat", Type, 0}, + {"BpfStat.Capt", Field, 2}, + {"BpfStat.Drop", Field, 0}, + {"BpfStat.Padding", Field, 2}, + {"BpfStat.Recv", Field, 0}, + {"BpfStats", Func, 0}, + {"BpfStmt", Func, 0}, + {"BpfTimeout", Func, 0}, + {"BpfTimeval", Type, 2}, + {"BpfTimeval.Sec", Field, 2}, + {"BpfTimeval.Usec", Field, 2}, + {"BpfVersion", Type, 0}, + {"BpfVersion.Major", Field, 0}, + {"BpfVersion.Minor", Field, 0}, + {"BpfZbuf", Type, 0}, + {"BpfZbuf.Bufa", Field, 0}, + {"BpfZbuf.Bufb", Field, 0}, + {"BpfZbuf.Buflen", Field, 0}, + {"BpfZbufHeader", Type, 0}, + {"BpfZbufHeader.Kernel_gen", Field, 0}, + {"BpfZbufHeader.Kernel_len", Field, 0}, + {"BpfZbufHeader.User_gen", Field, 0}, + {"BpfZbufHeader.X_bzh_pad", Field, 0}, + {"ByHandleFileInformation", Type, 0}, + {"ByHandleFileInformation.CreationTime", Field, 0}, + {"ByHandleFileInformation.FileAttributes", Field, 0}, + {"ByHandleFileInformation.FileIndexHigh", Field, 0}, + {"ByHandleFileInformation.FileIndexLow", Field, 0}, + {"ByHandleFileInformation.FileSizeHigh", Field, 0}, + {"ByHandleFileInformation.FileSizeLow", Field, 0}, + {"ByHandleFileInformation.LastAccessTime", Field, 0}, + {"ByHandleFileInformation.LastWriteTime", Field, 0}, + {"ByHandleFileInformation.NumberOfLinks", Field, 0}, + {"ByHandleFileInformation.VolumeSerialNumber", Field, 0}, + {"BytePtrFromString", Func, 1}, + {"ByteSliceFromString", Func, 1}, + {"CCR0_FLUSH", Const, 1}, + {"CERT_CHAIN_POLICY_AUTHENTICODE", Const, 0}, + {"CERT_CHAIN_POLICY_AUTHENTICODE_TS", Const, 0}, + {"CERT_CHAIN_POLICY_BASE", Const, 0}, + {"CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", Const, 0}, + {"CERT_CHAIN_POLICY_EV", Const, 0}, + {"CERT_CHAIN_POLICY_MICROSOFT_ROOT", Const, 0}, + {"CERT_CHAIN_POLICY_NT_AUTH", Const, 0}, + {"CERT_CHAIN_POLICY_SSL", Const, 0}, + {"CERT_E_CN_NO_MATCH", Const, 0}, + {"CERT_E_EXPIRED", Const, 0}, + {"CERT_E_PURPOSE", Const, 0}, + {"CERT_E_ROLE", Const, 0}, + {"CERT_E_UNTRUSTEDROOT", Const, 0}, + {"CERT_STORE_ADD_ALWAYS", Const, 0}, + {"CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", Const, 0}, + {"CERT_STORE_PROV_MEMORY", Const, 0}, + {"CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", Const, 0}, + {"CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", Const, 0}, + {"CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", Const, 0}, + {"CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", Const, 0}, + {"CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", Const, 0}, + {"CERT_TRUST_INVALID_BASIC_CONSTRAINTS", Const, 0}, + {"CERT_TRUST_INVALID_EXTENSION", Const, 0}, + {"CERT_TRUST_INVALID_NAME_CONSTRAINTS", Const, 0}, + {"CERT_TRUST_INVALID_POLICY_CONSTRAINTS", Const, 0}, + {"CERT_TRUST_IS_CYCLIC", Const, 0}, + {"CERT_TRUST_IS_EXPLICIT_DISTRUST", Const, 0}, + {"CERT_TRUST_IS_NOT_SIGNATURE_VALID", Const, 0}, + {"CERT_TRUST_IS_NOT_TIME_VALID", Const, 0}, + {"CERT_TRUST_IS_NOT_VALID_FOR_USAGE", Const, 0}, + {"CERT_TRUST_IS_OFFLINE_REVOCATION", Const, 0}, + {"CERT_TRUST_IS_REVOKED", Const, 0}, + {"CERT_TRUST_IS_UNTRUSTED_ROOT", Const, 0}, + {"CERT_TRUST_NO_ERROR", Const, 0}, + {"CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", Const, 0}, + {"CERT_TRUST_REVOCATION_STATUS_UNKNOWN", Const, 0}, + {"CFLUSH", Const, 1}, + {"CLOCAL", Const, 0}, + {"CLONE_CHILD_CLEARTID", Const, 2}, + {"CLONE_CHILD_SETTID", Const, 2}, + {"CLONE_CLEAR_SIGHAND", Const, 20}, + {"CLONE_CSIGNAL", Const, 3}, + {"CLONE_DETACHED", Const, 2}, + {"CLONE_FILES", Const, 2}, + {"CLONE_FS", Const, 2}, + {"CLONE_INTO_CGROUP", Const, 20}, + {"CLONE_IO", Const, 2}, + {"CLONE_NEWCGROUP", Const, 20}, + {"CLONE_NEWIPC", Const, 2}, + {"CLONE_NEWNET", Const, 2}, + {"CLONE_NEWNS", Const, 2}, + {"CLONE_NEWPID", Const, 2}, + {"CLONE_NEWTIME", Const, 20}, + {"CLONE_NEWUSER", Const, 2}, + {"CLONE_NEWUTS", Const, 2}, + {"CLONE_PARENT", Const, 2}, + {"CLONE_PARENT_SETTID", Const, 2}, + {"CLONE_PID", Const, 3}, + {"CLONE_PIDFD", Const, 20}, + {"CLONE_PTRACE", Const, 2}, + {"CLONE_SETTLS", Const, 2}, + {"CLONE_SIGHAND", Const, 2}, + {"CLONE_SYSVSEM", Const, 2}, + {"CLONE_THREAD", Const, 2}, + {"CLONE_UNTRACED", Const, 2}, + {"CLONE_VFORK", Const, 2}, + {"CLONE_VM", Const, 2}, + {"CPUID_CFLUSH", Const, 1}, + {"CREAD", Const, 0}, + {"CREATE_ALWAYS", Const, 0}, + {"CREATE_NEW", Const, 0}, + {"CREATE_NEW_PROCESS_GROUP", Const, 1}, + {"CREATE_UNICODE_ENVIRONMENT", Const, 0}, + {"CRYPT_DEFAULT_CONTAINER_OPTIONAL", Const, 0}, + {"CRYPT_DELETEKEYSET", Const, 0}, + {"CRYPT_MACHINE_KEYSET", Const, 0}, + {"CRYPT_NEWKEYSET", Const, 0}, + {"CRYPT_SILENT", Const, 0}, + {"CRYPT_VERIFYCONTEXT", Const, 0}, + {"CS5", Const, 0}, + {"CS6", Const, 0}, + {"CS7", Const, 0}, + {"CS8", Const, 0}, + {"CSIZE", Const, 0}, + {"CSTART", Const, 1}, + {"CSTATUS", Const, 1}, + {"CSTOP", Const, 1}, + {"CSTOPB", Const, 0}, + {"CSUSP", Const, 1}, + {"CTL_MAXNAME", Const, 0}, + {"CTL_NET", Const, 0}, + {"CTL_QUERY", Const, 1}, + {"CTRL_BREAK_EVENT", Const, 1}, + {"CTRL_CLOSE_EVENT", Const, 14}, + {"CTRL_C_EVENT", Const, 1}, + {"CTRL_LOGOFF_EVENT", Const, 14}, + {"CTRL_SHUTDOWN_EVENT", Const, 14}, + {"CancelIo", Func, 0}, + {"CancelIoEx", Func, 1}, + {"CertAddCertificateContextToStore", Func, 0}, + {"CertChainContext", Type, 0}, + {"CertChainContext.ChainCount", Field, 0}, + {"CertChainContext.Chains", Field, 0}, + {"CertChainContext.HasRevocationFreshnessTime", Field, 0}, + {"CertChainContext.LowerQualityChainCount", Field, 0}, + {"CertChainContext.LowerQualityChains", Field, 0}, + {"CertChainContext.RevocationFreshnessTime", Field, 0}, + {"CertChainContext.Size", Field, 0}, + {"CertChainContext.TrustStatus", Field, 0}, + {"CertChainElement", Type, 0}, + {"CertChainElement.ApplicationUsage", Field, 0}, + {"CertChainElement.CertContext", Field, 0}, + {"CertChainElement.ExtendedErrorInfo", Field, 0}, + {"CertChainElement.IssuanceUsage", Field, 0}, + {"CertChainElement.RevocationInfo", Field, 0}, + {"CertChainElement.Size", Field, 0}, + {"CertChainElement.TrustStatus", Field, 0}, + {"CertChainPara", Type, 0}, + {"CertChainPara.CacheResync", Field, 0}, + {"CertChainPara.CheckRevocationFreshnessTime", Field, 0}, + {"CertChainPara.RequestedUsage", Field, 0}, + {"CertChainPara.RequstedIssuancePolicy", Field, 0}, + {"CertChainPara.RevocationFreshnessTime", Field, 0}, + {"CertChainPara.Size", Field, 0}, + {"CertChainPara.URLRetrievalTimeout", Field, 0}, + {"CertChainPolicyPara", Type, 0}, + {"CertChainPolicyPara.ExtraPolicyPara", Field, 0}, + {"CertChainPolicyPara.Flags", Field, 0}, + {"CertChainPolicyPara.Size", Field, 0}, + {"CertChainPolicyStatus", Type, 0}, + {"CertChainPolicyStatus.ChainIndex", Field, 0}, + {"CertChainPolicyStatus.ElementIndex", Field, 0}, + {"CertChainPolicyStatus.Error", Field, 0}, + {"CertChainPolicyStatus.ExtraPolicyStatus", Field, 0}, + {"CertChainPolicyStatus.Size", Field, 0}, + {"CertCloseStore", Func, 0}, + {"CertContext", Type, 0}, + {"CertContext.CertInfo", Field, 0}, + {"CertContext.EncodedCert", Field, 0}, + {"CertContext.EncodingType", Field, 0}, + {"CertContext.Length", Field, 0}, + {"CertContext.Store", Field, 0}, + {"CertCreateCertificateContext", Func, 0}, + {"CertEnhKeyUsage", Type, 0}, + {"CertEnhKeyUsage.Length", Field, 0}, + {"CertEnhKeyUsage.UsageIdentifiers", Field, 0}, + {"CertEnumCertificatesInStore", Func, 0}, + {"CertFreeCertificateChain", Func, 0}, + {"CertFreeCertificateContext", Func, 0}, + {"CertGetCertificateChain", Func, 0}, + {"CertInfo", Type, 11}, + {"CertOpenStore", Func, 0}, + {"CertOpenSystemStore", Func, 0}, + {"CertRevocationCrlInfo", Type, 11}, + {"CertRevocationInfo", Type, 0}, + {"CertRevocationInfo.CrlInfo", Field, 0}, + {"CertRevocationInfo.FreshnessTime", Field, 0}, + {"CertRevocationInfo.HasFreshnessTime", Field, 0}, + {"CertRevocationInfo.OidSpecificInfo", Field, 0}, + {"CertRevocationInfo.RevocationOid", Field, 0}, + {"CertRevocationInfo.RevocationResult", Field, 0}, + {"CertRevocationInfo.Size", Field, 0}, + {"CertSimpleChain", Type, 0}, + {"CertSimpleChain.Elements", Field, 0}, + {"CertSimpleChain.HasRevocationFreshnessTime", Field, 0}, + {"CertSimpleChain.NumElements", Field, 0}, + {"CertSimpleChain.RevocationFreshnessTime", Field, 0}, + {"CertSimpleChain.Size", Field, 0}, + {"CertSimpleChain.TrustListInfo", Field, 0}, + {"CertSimpleChain.TrustStatus", Field, 0}, + {"CertTrustListInfo", Type, 11}, + {"CertTrustStatus", Type, 0}, + {"CertTrustStatus.ErrorStatus", Field, 0}, + {"CertTrustStatus.InfoStatus", Field, 0}, + {"CertUsageMatch", Type, 0}, + {"CertUsageMatch.Type", Field, 0}, + {"CertUsageMatch.Usage", Field, 0}, + {"CertVerifyCertificateChainPolicy", Func, 0}, + {"Chdir", Func, 0}, + {"CheckBpfVersion", Func, 0}, + {"Chflags", Func, 0}, + {"Chmod", Func, 0}, + {"Chown", Func, 0}, + {"Chroot", Func, 0}, + {"Clearenv", Func, 0}, + {"Close", Func, 0}, + {"CloseHandle", Func, 0}, + {"CloseOnExec", Func, 0}, + {"Closesocket", Func, 0}, + {"CmsgLen", Func, 0}, + {"CmsgSpace", Func, 0}, + {"Cmsghdr", Type, 0}, + {"Cmsghdr.Len", Field, 0}, + {"Cmsghdr.Level", Field, 0}, + {"Cmsghdr.Type", Field, 0}, + {"Cmsghdr.X__cmsg_data", Field, 0}, + {"CommandLineToArgv", Func, 0}, + {"ComputerName", Func, 0}, + {"Conn", Type, 9}, + {"Connect", Func, 0}, + {"ConnectEx", Func, 1}, + {"ConvertSidToStringSid", Func, 0}, + {"ConvertStringSidToSid", Func, 0}, + {"CopySid", Func, 0}, + {"Creat", Func, 0}, + {"CreateDirectory", Func, 0}, + {"CreateFile", Func, 0}, + {"CreateFileMapping", Func, 0}, + {"CreateHardLink", Func, 4}, + {"CreateIoCompletionPort", Func, 0}, + {"CreatePipe", Func, 0}, + {"CreateProcess", Func, 0}, + {"CreateProcessAsUser", Func, 10}, + {"CreateSymbolicLink", Func, 4}, + {"CreateToolhelp32Snapshot", Func, 4}, + {"Credential", Type, 0}, + {"Credential.Gid", Field, 0}, + {"Credential.Groups", Field, 0}, + {"Credential.NoSetGroups", Field, 9}, + {"Credential.Uid", Field, 0}, + {"CryptAcquireContext", Func, 0}, + {"CryptGenRandom", Func, 0}, + {"CryptReleaseContext", Func, 0}, + {"DIOCBSFLUSH", Const, 1}, + {"DIOCOSFPFLUSH", Const, 1}, + {"DLL", Type, 0}, + {"DLL.Handle", Field, 0}, + {"DLL.Name", Field, 0}, + {"DLLError", Type, 0}, + {"DLLError.Err", Field, 0}, + {"DLLError.Msg", Field, 0}, + {"DLLError.ObjName", Field, 0}, + {"DLT_A429", Const, 0}, + {"DLT_A653_ICM", Const, 0}, + {"DLT_AIRONET_HEADER", Const, 0}, + {"DLT_AOS", Const, 1}, + {"DLT_APPLE_IP_OVER_IEEE1394", Const, 0}, + {"DLT_ARCNET", Const, 0}, + {"DLT_ARCNET_LINUX", Const, 0}, + {"DLT_ATM_CLIP", Const, 0}, + {"DLT_ATM_RFC1483", Const, 0}, + {"DLT_AURORA", Const, 0}, + {"DLT_AX25", Const, 0}, + {"DLT_AX25_KISS", Const, 0}, + {"DLT_BACNET_MS_TP", Const, 0}, + {"DLT_BLUETOOTH_HCI_H4", Const, 0}, + {"DLT_BLUETOOTH_HCI_H4_WITH_PHDR", Const, 0}, + {"DLT_CAN20B", Const, 0}, + {"DLT_CAN_SOCKETCAN", Const, 1}, + {"DLT_CHAOS", Const, 0}, + {"DLT_CHDLC", Const, 0}, + {"DLT_CISCO_IOS", Const, 0}, + {"DLT_C_HDLC", Const, 0}, + {"DLT_C_HDLC_WITH_DIR", Const, 0}, + {"DLT_DBUS", Const, 1}, + {"DLT_DECT", Const, 1}, + {"DLT_DOCSIS", Const, 0}, + {"DLT_DVB_CI", Const, 1}, + {"DLT_ECONET", Const, 0}, + {"DLT_EN10MB", Const, 0}, + {"DLT_EN3MB", Const, 0}, + {"DLT_ENC", Const, 0}, + {"DLT_ERF", Const, 0}, + {"DLT_ERF_ETH", Const, 0}, + {"DLT_ERF_POS", Const, 0}, + {"DLT_FC_2", Const, 1}, + {"DLT_FC_2_WITH_FRAME_DELIMS", Const, 1}, + {"DLT_FDDI", Const, 0}, + {"DLT_FLEXRAY", Const, 0}, + {"DLT_FRELAY", Const, 0}, + {"DLT_FRELAY_WITH_DIR", Const, 0}, + {"DLT_GCOM_SERIAL", Const, 0}, + {"DLT_GCOM_T1E1", Const, 0}, + {"DLT_GPF_F", Const, 0}, + {"DLT_GPF_T", Const, 0}, + {"DLT_GPRS_LLC", Const, 0}, + {"DLT_GSMTAP_ABIS", Const, 1}, + {"DLT_GSMTAP_UM", Const, 1}, + {"DLT_HDLC", Const, 1}, + {"DLT_HHDLC", Const, 0}, + {"DLT_HIPPI", Const, 1}, + {"DLT_IBM_SN", Const, 0}, + {"DLT_IBM_SP", Const, 0}, + {"DLT_IEEE802", Const, 0}, + {"DLT_IEEE802_11", Const, 0}, + {"DLT_IEEE802_11_RADIO", Const, 0}, + {"DLT_IEEE802_11_RADIO_AVS", Const, 0}, + {"DLT_IEEE802_15_4", Const, 0}, + {"DLT_IEEE802_15_4_LINUX", Const, 0}, + {"DLT_IEEE802_15_4_NOFCS", Const, 1}, + {"DLT_IEEE802_15_4_NONASK_PHY", Const, 0}, + {"DLT_IEEE802_16_MAC_CPS", Const, 0}, + {"DLT_IEEE802_16_MAC_CPS_RADIO", Const, 0}, + {"DLT_IPFILTER", Const, 0}, + {"DLT_IPMB", Const, 0}, + {"DLT_IPMB_LINUX", Const, 0}, + {"DLT_IPNET", Const, 1}, + {"DLT_IPOIB", Const, 1}, + {"DLT_IPV4", Const, 1}, + {"DLT_IPV6", Const, 1}, + {"DLT_IP_OVER_FC", Const, 0}, + {"DLT_JUNIPER_ATM1", Const, 0}, + {"DLT_JUNIPER_ATM2", Const, 0}, + {"DLT_JUNIPER_ATM_CEMIC", Const, 1}, + {"DLT_JUNIPER_CHDLC", Const, 0}, + {"DLT_JUNIPER_ES", Const, 0}, + {"DLT_JUNIPER_ETHER", Const, 0}, + {"DLT_JUNIPER_FIBRECHANNEL", Const, 1}, + {"DLT_JUNIPER_FRELAY", Const, 0}, + {"DLT_JUNIPER_GGSN", Const, 0}, + {"DLT_JUNIPER_ISM", Const, 0}, + {"DLT_JUNIPER_MFR", Const, 0}, + {"DLT_JUNIPER_MLFR", Const, 0}, + {"DLT_JUNIPER_MLPPP", Const, 0}, + {"DLT_JUNIPER_MONITOR", Const, 0}, + {"DLT_JUNIPER_PIC_PEER", Const, 0}, + {"DLT_JUNIPER_PPP", Const, 0}, + {"DLT_JUNIPER_PPPOE", Const, 0}, + {"DLT_JUNIPER_PPPOE_ATM", Const, 0}, + {"DLT_JUNIPER_SERVICES", Const, 0}, + {"DLT_JUNIPER_SRX_E2E", Const, 1}, + {"DLT_JUNIPER_ST", Const, 0}, + {"DLT_JUNIPER_VP", Const, 0}, + {"DLT_JUNIPER_VS", Const, 1}, + {"DLT_LAPB_WITH_DIR", Const, 0}, + {"DLT_LAPD", Const, 0}, + {"DLT_LIN", Const, 0}, + {"DLT_LINUX_EVDEV", Const, 1}, + {"DLT_LINUX_IRDA", Const, 0}, + {"DLT_LINUX_LAPD", Const, 0}, + {"DLT_LINUX_PPP_WITHDIRECTION", Const, 0}, + {"DLT_LINUX_SLL", Const, 0}, + {"DLT_LOOP", Const, 0}, + {"DLT_LTALK", Const, 0}, + {"DLT_MATCHING_MAX", Const, 1}, + {"DLT_MATCHING_MIN", Const, 1}, + {"DLT_MFR", Const, 0}, + {"DLT_MOST", Const, 0}, + {"DLT_MPEG_2_TS", Const, 1}, + {"DLT_MPLS", Const, 1}, + {"DLT_MTP2", Const, 0}, + {"DLT_MTP2_WITH_PHDR", Const, 0}, + {"DLT_MTP3", Const, 0}, + {"DLT_MUX27010", Const, 1}, + {"DLT_NETANALYZER", Const, 1}, + {"DLT_NETANALYZER_TRANSPARENT", Const, 1}, + {"DLT_NFC_LLCP", Const, 1}, + {"DLT_NFLOG", Const, 1}, + {"DLT_NG40", Const, 1}, + {"DLT_NULL", Const, 0}, + {"DLT_PCI_EXP", Const, 0}, + {"DLT_PFLOG", Const, 0}, + {"DLT_PFSYNC", Const, 0}, + {"DLT_PPI", Const, 0}, + {"DLT_PPP", Const, 0}, + {"DLT_PPP_BSDOS", Const, 0}, + {"DLT_PPP_ETHER", Const, 0}, + {"DLT_PPP_PPPD", Const, 0}, + {"DLT_PPP_SERIAL", Const, 0}, + {"DLT_PPP_WITH_DIR", Const, 0}, + {"DLT_PPP_WITH_DIRECTION", Const, 0}, + {"DLT_PRISM_HEADER", Const, 0}, + {"DLT_PRONET", Const, 0}, + {"DLT_RAIF1", Const, 0}, + {"DLT_RAW", Const, 0}, + {"DLT_RAWAF_MASK", Const, 1}, + {"DLT_RIO", Const, 0}, + {"DLT_SCCP", Const, 0}, + {"DLT_SITA", Const, 0}, + {"DLT_SLIP", Const, 0}, + {"DLT_SLIP_BSDOS", Const, 0}, + {"DLT_STANAG_5066_D_PDU", Const, 1}, + {"DLT_SUNATM", Const, 0}, + {"DLT_SYMANTEC_FIREWALL", Const, 0}, + {"DLT_TZSP", Const, 0}, + {"DLT_USB", Const, 0}, + {"DLT_USB_LINUX", Const, 0}, + {"DLT_USB_LINUX_MMAPPED", Const, 1}, + {"DLT_USER0", Const, 0}, + {"DLT_USER1", Const, 0}, + {"DLT_USER10", Const, 0}, + {"DLT_USER11", Const, 0}, + {"DLT_USER12", Const, 0}, + {"DLT_USER13", Const, 0}, + {"DLT_USER14", Const, 0}, + {"DLT_USER15", Const, 0}, + {"DLT_USER2", Const, 0}, + {"DLT_USER3", Const, 0}, + {"DLT_USER4", Const, 0}, + {"DLT_USER5", Const, 0}, + {"DLT_USER6", Const, 0}, + {"DLT_USER7", Const, 0}, + {"DLT_USER8", Const, 0}, + {"DLT_USER9", Const, 0}, + {"DLT_WIHART", Const, 1}, + {"DLT_X2E_SERIAL", Const, 0}, + {"DLT_X2E_XORAYA", Const, 0}, + {"DNSMXData", Type, 0}, + {"DNSMXData.NameExchange", Field, 0}, + {"DNSMXData.Pad", Field, 0}, + {"DNSMXData.Preference", Field, 0}, + {"DNSPTRData", Type, 0}, + {"DNSPTRData.Host", Field, 0}, + {"DNSRecord", Type, 0}, + {"DNSRecord.Data", Field, 0}, + {"DNSRecord.Dw", Field, 0}, + {"DNSRecord.Length", Field, 0}, + {"DNSRecord.Name", Field, 0}, + {"DNSRecord.Next", Field, 0}, + {"DNSRecord.Reserved", Field, 0}, + {"DNSRecord.Ttl", Field, 0}, + {"DNSRecord.Type", Field, 0}, + {"DNSSRVData", Type, 0}, + {"DNSSRVData.Pad", Field, 0}, + {"DNSSRVData.Port", Field, 0}, + {"DNSSRVData.Priority", Field, 0}, + {"DNSSRVData.Target", Field, 0}, + {"DNSSRVData.Weight", Field, 0}, + {"DNSTXTData", Type, 0}, + {"DNSTXTData.StringArray", Field, 0}, + {"DNSTXTData.StringCount", Field, 0}, + {"DNS_INFO_NO_RECORDS", Const, 4}, + {"DNS_TYPE_A", Const, 0}, + {"DNS_TYPE_A6", Const, 0}, + {"DNS_TYPE_AAAA", Const, 0}, + {"DNS_TYPE_ADDRS", Const, 0}, + {"DNS_TYPE_AFSDB", Const, 0}, + {"DNS_TYPE_ALL", Const, 0}, + {"DNS_TYPE_ANY", Const, 0}, + {"DNS_TYPE_ATMA", Const, 0}, + {"DNS_TYPE_AXFR", Const, 0}, + {"DNS_TYPE_CERT", Const, 0}, + {"DNS_TYPE_CNAME", Const, 0}, + {"DNS_TYPE_DHCID", Const, 0}, + {"DNS_TYPE_DNAME", Const, 0}, + {"DNS_TYPE_DNSKEY", Const, 0}, + {"DNS_TYPE_DS", Const, 0}, + {"DNS_TYPE_EID", Const, 0}, + {"DNS_TYPE_GID", Const, 0}, + {"DNS_TYPE_GPOS", Const, 0}, + {"DNS_TYPE_HINFO", Const, 0}, + {"DNS_TYPE_ISDN", Const, 0}, + {"DNS_TYPE_IXFR", Const, 0}, + {"DNS_TYPE_KEY", Const, 0}, + {"DNS_TYPE_KX", Const, 0}, + {"DNS_TYPE_LOC", Const, 0}, + {"DNS_TYPE_MAILA", Const, 0}, + {"DNS_TYPE_MAILB", Const, 0}, + {"DNS_TYPE_MB", Const, 0}, + {"DNS_TYPE_MD", Const, 0}, + {"DNS_TYPE_MF", Const, 0}, + {"DNS_TYPE_MG", Const, 0}, + {"DNS_TYPE_MINFO", Const, 0}, + {"DNS_TYPE_MR", Const, 0}, + {"DNS_TYPE_MX", Const, 0}, + {"DNS_TYPE_NAPTR", Const, 0}, + {"DNS_TYPE_NBSTAT", Const, 0}, + {"DNS_TYPE_NIMLOC", Const, 0}, + {"DNS_TYPE_NS", Const, 0}, + {"DNS_TYPE_NSAP", Const, 0}, + {"DNS_TYPE_NSAPPTR", Const, 0}, + {"DNS_TYPE_NSEC", Const, 0}, + {"DNS_TYPE_NULL", Const, 0}, + {"DNS_TYPE_NXT", Const, 0}, + {"DNS_TYPE_OPT", Const, 0}, + {"DNS_TYPE_PTR", Const, 0}, + {"DNS_TYPE_PX", Const, 0}, + {"DNS_TYPE_RP", Const, 0}, + {"DNS_TYPE_RRSIG", Const, 0}, + {"DNS_TYPE_RT", Const, 0}, + {"DNS_TYPE_SIG", Const, 0}, + {"DNS_TYPE_SINK", Const, 0}, + {"DNS_TYPE_SOA", Const, 0}, + {"DNS_TYPE_SRV", Const, 0}, + {"DNS_TYPE_TEXT", Const, 0}, + {"DNS_TYPE_TKEY", Const, 0}, + {"DNS_TYPE_TSIG", Const, 0}, + {"DNS_TYPE_UID", Const, 0}, + {"DNS_TYPE_UINFO", Const, 0}, + {"DNS_TYPE_UNSPEC", Const, 0}, + {"DNS_TYPE_WINS", Const, 0}, + {"DNS_TYPE_WINSR", Const, 0}, + {"DNS_TYPE_WKS", Const, 0}, + {"DNS_TYPE_X25", Const, 0}, + {"DT_BLK", Const, 0}, + {"DT_CHR", Const, 0}, + {"DT_DIR", Const, 0}, + {"DT_FIFO", Const, 0}, + {"DT_LNK", Const, 0}, + {"DT_REG", Const, 0}, + {"DT_SOCK", Const, 0}, + {"DT_UNKNOWN", Const, 0}, + {"DT_WHT", Const, 0}, + {"DUPLICATE_CLOSE_SOURCE", Const, 0}, + {"DUPLICATE_SAME_ACCESS", Const, 0}, + {"DeleteFile", Func, 0}, + {"DetachLsf", Func, 0}, + {"DeviceIoControl", Func, 4}, + {"Dirent", Type, 0}, + {"Dirent.Fileno", Field, 0}, + {"Dirent.Ino", Field, 0}, + {"Dirent.Name", Field, 0}, + {"Dirent.Namlen", Field, 0}, + {"Dirent.Off", Field, 0}, + {"Dirent.Pad0", Field, 12}, + {"Dirent.Pad1", Field, 12}, + {"Dirent.Pad_cgo_0", Field, 0}, + {"Dirent.Reclen", Field, 0}, + {"Dirent.Seekoff", Field, 0}, + {"Dirent.Type", Field, 0}, + {"Dirent.X__d_padding", Field, 3}, + {"DnsNameCompare", Func, 4}, + {"DnsQuery", Func, 0}, + {"DnsRecordListFree", Func, 0}, + {"DnsSectionAdditional", Const, 4}, + {"DnsSectionAnswer", Const, 4}, + {"DnsSectionAuthority", Const, 4}, + {"DnsSectionQuestion", Const, 4}, + {"Dup", Func, 0}, + {"Dup2", Func, 0}, + {"Dup3", Func, 2}, + {"DuplicateHandle", Func, 0}, + {"E2BIG", Const, 0}, + {"EACCES", Const, 0}, + {"EADDRINUSE", Const, 0}, + {"EADDRNOTAVAIL", Const, 0}, + {"EADV", Const, 0}, + {"EAFNOSUPPORT", Const, 0}, + {"EAGAIN", Const, 0}, + {"EALREADY", Const, 0}, + {"EAUTH", Const, 0}, + {"EBADARCH", Const, 0}, + {"EBADE", Const, 0}, + {"EBADEXEC", Const, 0}, + {"EBADF", Const, 0}, + {"EBADFD", Const, 0}, + {"EBADMACHO", Const, 0}, + {"EBADMSG", Const, 0}, + {"EBADR", Const, 0}, + {"EBADRPC", Const, 0}, + {"EBADRQC", Const, 0}, + {"EBADSLT", Const, 0}, + {"EBFONT", Const, 0}, + {"EBUSY", Const, 0}, + {"ECANCELED", Const, 0}, + {"ECAPMODE", Const, 1}, + {"ECHILD", Const, 0}, + {"ECHO", Const, 0}, + {"ECHOCTL", Const, 0}, + {"ECHOE", Const, 0}, + {"ECHOK", Const, 0}, + {"ECHOKE", Const, 0}, + {"ECHONL", Const, 0}, + {"ECHOPRT", Const, 0}, + {"ECHRNG", Const, 0}, + {"ECOMM", Const, 0}, + {"ECONNABORTED", Const, 0}, + {"ECONNREFUSED", Const, 0}, + {"ECONNRESET", Const, 0}, + {"EDEADLK", Const, 0}, + {"EDEADLOCK", Const, 0}, + {"EDESTADDRREQ", Const, 0}, + {"EDEVERR", Const, 0}, + {"EDOM", Const, 0}, + {"EDOOFUS", Const, 0}, + {"EDOTDOT", Const, 0}, + {"EDQUOT", Const, 0}, + {"EEXIST", Const, 0}, + {"EFAULT", Const, 0}, + {"EFBIG", Const, 0}, + {"EFER_LMA", Const, 1}, + {"EFER_LME", Const, 1}, + {"EFER_NXE", Const, 1}, + {"EFER_SCE", Const, 1}, + {"EFTYPE", Const, 0}, + {"EHOSTDOWN", Const, 0}, + {"EHOSTUNREACH", Const, 0}, + {"EHWPOISON", Const, 0}, + {"EIDRM", Const, 0}, + {"EILSEQ", Const, 0}, + {"EINPROGRESS", Const, 0}, + {"EINTR", Const, 0}, + {"EINVAL", Const, 0}, + {"EIO", Const, 0}, + {"EIPSEC", Const, 1}, + {"EISCONN", Const, 0}, + {"EISDIR", Const, 0}, + {"EISNAM", Const, 0}, + {"EKEYEXPIRED", Const, 0}, + {"EKEYREJECTED", Const, 0}, + {"EKEYREVOKED", Const, 0}, + {"EL2HLT", Const, 0}, + {"EL2NSYNC", Const, 0}, + {"EL3HLT", Const, 0}, + {"EL3RST", Const, 0}, + {"ELAST", Const, 0}, + {"ELF_NGREG", Const, 0}, + {"ELF_PRARGSZ", Const, 0}, + {"ELIBACC", Const, 0}, + {"ELIBBAD", Const, 0}, + {"ELIBEXEC", Const, 0}, + {"ELIBMAX", Const, 0}, + {"ELIBSCN", Const, 0}, + {"ELNRNG", Const, 0}, + {"ELOOP", Const, 0}, + {"EMEDIUMTYPE", Const, 0}, + {"EMFILE", Const, 0}, + {"EMLINK", Const, 0}, + {"EMSGSIZE", Const, 0}, + {"EMT_TAGOVF", Const, 1}, + {"EMULTIHOP", Const, 0}, + {"EMUL_ENABLED", Const, 1}, + {"EMUL_LINUX", Const, 1}, + {"EMUL_LINUX32", Const, 1}, + {"EMUL_MAXID", Const, 1}, + {"EMUL_NATIVE", Const, 1}, + {"ENAMETOOLONG", Const, 0}, + {"ENAVAIL", Const, 0}, + {"ENDRUNDISC", Const, 1}, + {"ENEEDAUTH", Const, 0}, + {"ENETDOWN", Const, 0}, + {"ENETRESET", Const, 0}, + {"ENETUNREACH", Const, 0}, + {"ENFILE", Const, 0}, + {"ENOANO", Const, 0}, + {"ENOATTR", Const, 0}, + {"ENOBUFS", Const, 0}, + {"ENOCSI", Const, 0}, + {"ENODATA", Const, 0}, + {"ENODEV", Const, 0}, + {"ENOENT", Const, 0}, + {"ENOEXEC", Const, 0}, + {"ENOKEY", Const, 0}, + {"ENOLCK", Const, 0}, + {"ENOLINK", Const, 0}, + {"ENOMEDIUM", Const, 0}, + {"ENOMEM", Const, 0}, + {"ENOMSG", Const, 0}, + {"ENONET", Const, 0}, + {"ENOPKG", Const, 0}, + {"ENOPOLICY", Const, 0}, + {"ENOPROTOOPT", Const, 0}, + {"ENOSPC", Const, 0}, + {"ENOSR", Const, 0}, + {"ENOSTR", Const, 0}, + {"ENOSYS", Const, 0}, + {"ENOTBLK", Const, 0}, + {"ENOTCAPABLE", Const, 0}, + {"ENOTCONN", Const, 0}, + {"ENOTDIR", Const, 0}, + {"ENOTEMPTY", Const, 0}, + {"ENOTNAM", Const, 0}, + {"ENOTRECOVERABLE", Const, 0}, + {"ENOTSOCK", Const, 0}, + {"ENOTSUP", Const, 0}, + {"ENOTTY", Const, 0}, + {"ENOTUNIQ", Const, 0}, + {"ENXIO", Const, 0}, + {"EN_SW_CTL_INF", Const, 1}, + {"EN_SW_CTL_PREC", Const, 1}, + {"EN_SW_CTL_ROUND", Const, 1}, + {"EN_SW_DATACHAIN", Const, 1}, + {"EN_SW_DENORM", Const, 1}, + {"EN_SW_INVOP", Const, 1}, + {"EN_SW_OVERFLOW", Const, 1}, + {"EN_SW_PRECLOSS", Const, 1}, + {"EN_SW_UNDERFLOW", Const, 1}, + {"EN_SW_ZERODIV", Const, 1}, + {"EOPNOTSUPP", Const, 0}, + {"EOVERFLOW", Const, 0}, + {"EOWNERDEAD", Const, 0}, + {"EPERM", Const, 0}, + {"EPFNOSUPPORT", Const, 0}, + {"EPIPE", Const, 0}, + {"EPOLLERR", Const, 0}, + {"EPOLLET", Const, 0}, + {"EPOLLHUP", Const, 0}, + {"EPOLLIN", Const, 0}, + {"EPOLLMSG", Const, 0}, + {"EPOLLONESHOT", Const, 0}, + {"EPOLLOUT", Const, 0}, + {"EPOLLPRI", Const, 0}, + {"EPOLLRDBAND", Const, 0}, + {"EPOLLRDHUP", Const, 0}, + {"EPOLLRDNORM", Const, 0}, + {"EPOLLWRBAND", Const, 0}, + {"EPOLLWRNORM", Const, 0}, + {"EPOLL_CLOEXEC", Const, 0}, + {"EPOLL_CTL_ADD", Const, 0}, + {"EPOLL_CTL_DEL", Const, 0}, + {"EPOLL_CTL_MOD", Const, 0}, + {"EPOLL_NONBLOCK", Const, 0}, + {"EPROCLIM", Const, 0}, + {"EPROCUNAVAIL", Const, 0}, + {"EPROGMISMATCH", Const, 0}, + {"EPROGUNAVAIL", Const, 0}, + {"EPROTO", Const, 0}, + {"EPROTONOSUPPORT", Const, 0}, + {"EPROTOTYPE", Const, 0}, + {"EPWROFF", Const, 0}, + {"EQFULL", Const, 16}, + {"ERANGE", Const, 0}, + {"EREMCHG", Const, 0}, + {"EREMOTE", Const, 0}, + {"EREMOTEIO", Const, 0}, + {"ERESTART", Const, 0}, + {"ERFKILL", Const, 0}, + {"EROFS", Const, 0}, + {"ERPCMISMATCH", Const, 0}, + {"ERROR_ACCESS_DENIED", Const, 0}, + {"ERROR_ALREADY_EXISTS", Const, 0}, + {"ERROR_BROKEN_PIPE", Const, 0}, + {"ERROR_BUFFER_OVERFLOW", Const, 0}, + {"ERROR_DIR_NOT_EMPTY", Const, 8}, + {"ERROR_ENVVAR_NOT_FOUND", Const, 0}, + {"ERROR_FILE_EXISTS", Const, 0}, + {"ERROR_FILE_NOT_FOUND", Const, 0}, + {"ERROR_HANDLE_EOF", Const, 2}, + {"ERROR_INSUFFICIENT_BUFFER", Const, 0}, + {"ERROR_IO_PENDING", Const, 0}, + {"ERROR_MOD_NOT_FOUND", Const, 0}, + {"ERROR_MORE_DATA", Const, 3}, + {"ERROR_NETNAME_DELETED", Const, 3}, + {"ERROR_NOT_FOUND", Const, 1}, + {"ERROR_NO_MORE_FILES", Const, 0}, + {"ERROR_OPERATION_ABORTED", Const, 0}, + {"ERROR_PATH_NOT_FOUND", Const, 0}, + {"ERROR_PRIVILEGE_NOT_HELD", Const, 4}, + {"ERROR_PROC_NOT_FOUND", Const, 0}, + {"ESHLIBVERS", Const, 0}, + {"ESHUTDOWN", Const, 0}, + {"ESOCKTNOSUPPORT", Const, 0}, + {"ESPIPE", Const, 0}, + {"ESRCH", Const, 0}, + {"ESRMNT", Const, 0}, + {"ESTALE", Const, 0}, + {"ESTRPIPE", Const, 0}, + {"ETHERCAP_JUMBO_MTU", Const, 1}, + {"ETHERCAP_VLAN_HWTAGGING", Const, 1}, + {"ETHERCAP_VLAN_MTU", Const, 1}, + {"ETHERMIN", Const, 1}, + {"ETHERMTU", Const, 1}, + {"ETHERMTU_JUMBO", Const, 1}, + {"ETHERTYPE_8023", Const, 1}, + {"ETHERTYPE_AARP", Const, 1}, + {"ETHERTYPE_ACCTON", Const, 1}, + {"ETHERTYPE_AEONIC", Const, 1}, + {"ETHERTYPE_ALPHA", Const, 1}, + {"ETHERTYPE_AMBER", Const, 1}, + {"ETHERTYPE_AMOEBA", Const, 1}, + {"ETHERTYPE_AOE", Const, 1}, + {"ETHERTYPE_APOLLO", Const, 1}, + {"ETHERTYPE_APOLLODOMAIN", Const, 1}, + {"ETHERTYPE_APPLETALK", Const, 1}, + {"ETHERTYPE_APPLITEK", Const, 1}, + {"ETHERTYPE_ARGONAUT", Const, 1}, + {"ETHERTYPE_ARP", Const, 1}, + {"ETHERTYPE_AT", Const, 1}, + {"ETHERTYPE_ATALK", Const, 1}, + {"ETHERTYPE_ATOMIC", Const, 1}, + {"ETHERTYPE_ATT", Const, 1}, + {"ETHERTYPE_ATTSTANFORD", Const, 1}, + {"ETHERTYPE_AUTOPHON", Const, 1}, + {"ETHERTYPE_AXIS", Const, 1}, + {"ETHERTYPE_BCLOOP", Const, 1}, + {"ETHERTYPE_BOFL", Const, 1}, + {"ETHERTYPE_CABLETRON", Const, 1}, + {"ETHERTYPE_CHAOS", Const, 1}, + {"ETHERTYPE_COMDESIGN", Const, 1}, + {"ETHERTYPE_COMPUGRAPHIC", Const, 1}, + {"ETHERTYPE_COUNTERPOINT", Const, 1}, + {"ETHERTYPE_CRONUS", Const, 1}, + {"ETHERTYPE_CRONUSVLN", Const, 1}, + {"ETHERTYPE_DCA", Const, 1}, + {"ETHERTYPE_DDE", Const, 1}, + {"ETHERTYPE_DEBNI", Const, 1}, + {"ETHERTYPE_DECAM", Const, 1}, + {"ETHERTYPE_DECCUST", Const, 1}, + {"ETHERTYPE_DECDIAG", Const, 1}, + {"ETHERTYPE_DECDNS", Const, 1}, + {"ETHERTYPE_DECDTS", Const, 1}, + {"ETHERTYPE_DECEXPER", Const, 1}, + {"ETHERTYPE_DECLAST", Const, 1}, + {"ETHERTYPE_DECLTM", Const, 1}, + {"ETHERTYPE_DECMUMPS", Const, 1}, + {"ETHERTYPE_DECNETBIOS", Const, 1}, + {"ETHERTYPE_DELTACON", Const, 1}, + {"ETHERTYPE_DIDDLE", Const, 1}, + {"ETHERTYPE_DLOG1", Const, 1}, + {"ETHERTYPE_DLOG2", Const, 1}, + {"ETHERTYPE_DN", Const, 1}, + {"ETHERTYPE_DOGFIGHT", Const, 1}, + {"ETHERTYPE_DSMD", Const, 1}, + {"ETHERTYPE_ECMA", Const, 1}, + {"ETHERTYPE_ENCRYPT", Const, 1}, + {"ETHERTYPE_ES", Const, 1}, + {"ETHERTYPE_EXCELAN", Const, 1}, + {"ETHERTYPE_EXPERDATA", Const, 1}, + {"ETHERTYPE_FLIP", Const, 1}, + {"ETHERTYPE_FLOWCONTROL", Const, 1}, + {"ETHERTYPE_FRARP", Const, 1}, + {"ETHERTYPE_GENDYN", Const, 1}, + {"ETHERTYPE_HAYES", Const, 1}, + {"ETHERTYPE_HIPPI_FP", Const, 1}, + {"ETHERTYPE_HITACHI", Const, 1}, + {"ETHERTYPE_HP", Const, 1}, + {"ETHERTYPE_IEEEPUP", Const, 1}, + {"ETHERTYPE_IEEEPUPAT", Const, 1}, + {"ETHERTYPE_IMLBL", Const, 1}, + {"ETHERTYPE_IMLBLDIAG", Const, 1}, + {"ETHERTYPE_IP", Const, 1}, + {"ETHERTYPE_IPAS", Const, 1}, + {"ETHERTYPE_IPV6", Const, 1}, + {"ETHERTYPE_IPX", Const, 1}, + {"ETHERTYPE_IPXNEW", Const, 1}, + {"ETHERTYPE_KALPANA", Const, 1}, + {"ETHERTYPE_LANBRIDGE", Const, 1}, + {"ETHERTYPE_LANPROBE", Const, 1}, + {"ETHERTYPE_LAT", Const, 1}, + {"ETHERTYPE_LBACK", Const, 1}, + {"ETHERTYPE_LITTLE", Const, 1}, + {"ETHERTYPE_LLDP", Const, 1}, + {"ETHERTYPE_LOGICRAFT", Const, 1}, + {"ETHERTYPE_LOOPBACK", Const, 1}, + {"ETHERTYPE_MATRA", Const, 1}, + {"ETHERTYPE_MAX", Const, 1}, + {"ETHERTYPE_MERIT", Const, 1}, + {"ETHERTYPE_MICP", Const, 1}, + {"ETHERTYPE_MOPDL", Const, 1}, + {"ETHERTYPE_MOPRC", Const, 1}, + {"ETHERTYPE_MOTOROLA", Const, 1}, + {"ETHERTYPE_MPLS", Const, 1}, + {"ETHERTYPE_MPLS_MCAST", Const, 1}, + {"ETHERTYPE_MUMPS", Const, 1}, + {"ETHERTYPE_NBPCC", Const, 1}, + {"ETHERTYPE_NBPCLAIM", Const, 1}, + {"ETHERTYPE_NBPCLREQ", Const, 1}, + {"ETHERTYPE_NBPCLRSP", Const, 1}, + {"ETHERTYPE_NBPCREQ", Const, 1}, + {"ETHERTYPE_NBPCRSP", Const, 1}, + {"ETHERTYPE_NBPDG", Const, 1}, + {"ETHERTYPE_NBPDGB", Const, 1}, + {"ETHERTYPE_NBPDLTE", Const, 1}, + {"ETHERTYPE_NBPRAR", Const, 1}, + {"ETHERTYPE_NBPRAS", Const, 1}, + {"ETHERTYPE_NBPRST", Const, 1}, + {"ETHERTYPE_NBPSCD", Const, 1}, + {"ETHERTYPE_NBPVCD", Const, 1}, + {"ETHERTYPE_NBS", Const, 1}, + {"ETHERTYPE_NCD", Const, 1}, + {"ETHERTYPE_NESTAR", Const, 1}, + {"ETHERTYPE_NETBEUI", Const, 1}, + {"ETHERTYPE_NOVELL", Const, 1}, + {"ETHERTYPE_NS", Const, 1}, + {"ETHERTYPE_NSAT", Const, 1}, + {"ETHERTYPE_NSCOMPAT", Const, 1}, + {"ETHERTYPE_NTRAILER", Const, 1}, + {"ETHERTYPE_OS9", Const, 1}, + {"ETHERTYPE_OS9NET", Const, 1}, + {"ETHERTYPE_PACER", Const, 1}, + {"ETHERTYPE_PAE", Const, 1}, + {"ETHERTYPE_PCS", Const, 1}, + {"ETHERTYPE_PLANNING", Const, 1}, + {"ETHERTYPE_PPP", Const, 1}, + {"ETHERTYPE_PPPOE", Const, 1}, + {"ETHERTYPE_PPPOEDISC", Const, 1}, + {"ETHERTYPE_PRIMENTS", Const, 1}, + {"ETHERTYPE_PUP", Const, 1}, + {"ETHERTYPE_PUPAT", Const, 1}, + {"ETHERTYPE_QINQ", Const, 1}, + {"ETHERTYPE_RACAL", Const, 1}, + {"ETHERTYPE_RATIONAL", Const, 1}, + {"ETHERTYPE_RAWFR", Const, 1}, + {"ETHERTYPE_RCL", Const, 1}, + {"ETHERTYPE_RDP", Const, 1}, + {"ETHERTYPE_RETIX", Const, 1}, + {"ETHERTYPE_REVARP", Const, 1}, + {"ETHERTYPE_SCA", Const, 1}, + {"ETHERTYPE_SECTRA", Const, 1}, + {"ETHERTYPE_SECUREDATA", Const, 1}, + {"ETHERTYPE_SGITW", Const, 1}, + {"ETHERTYPE_SG_BOUNCE", Const, 1}, + {"ETHERTYPE_SG_DIAG", Const, 1}, + {"ETHERTYPE_SG_NETGAMES", Const, 1}, + {"ETHERTYPE_SG_RESV", Const, 1}, + {"ETHERTYPE_SIMNET", Const, 1}, + {"ETHERTYPE_SLOW", Const, 1}, + {"ETHERTYPE_SLOWPROTOCOLS", Const, 1}, + {"ETHERTYPE_SNA", Const, 1}, + {"ETHERTYPE_SNMP", Const, 1}, + {"ETHERTYPE_SONIX", Const, 1}, + {"ETHERTYPE_SPIDER", Const, 1}, + {"ETHERTYPE_SPRITE", Const, 1}, + {"ETHERTYPE_STP", Const, 1}, + {"ETHERTYPE_TALARIS", Const, 1}, + {"ETHERTYPE_TALARISMC", Const, 1}, + {"ETHERTYPE_TCPCOMP", Const, 1}, + {"ETHERTYPE_TCPSM", Const, 1}, + {"ETHERTYPE_TEC", Const, 1}, + {"ETHERTYPE_TIGAN", Const, 1}, + {"ETHERTYPE_TRAIL", Const, 1}, + {"ETHERTYPE_TRANSETHER", Const, 1}, + {"ETHERTYPE_TYMSHARE", Const, 1}, + {"ETHERTYPE_UBBST", Const, 1}, + {"ETHERTYPE_UBDEBUG", Const, 1}, + {"ETHERTYPE_UBDIAGLOOP", Const, 1}, + {"ETHERTYPE_UBDL", Const, 1}, + {"ETHERTYPE_UBNIU", Const, 1}, + {"ETHERTYPE_UBNMC", Const, 1}, + {"ETHERTYPE_VALID", Const, 1}, + {"ETHERTYPE_VARIAN", Const, 1}, + {"ETHERTYPE_VAXELN", Const, 1}, + {"ETHERTYPE_VEECO", Const, 1}, + {"ETHERTYPE_VEXP", Const, 1}, + {"ETHERTYPE_VGLAB", Const, 1}, + {"ETHERTYPE_VINES", Const, 1}, + {"ETHERTYPE_VINESECHO", Const, 1}, + {"ETHERTYPE_VINESLOOP", Const, 1}, + {"ETHERTYPE_VITAL", Const, 1}, + {"ETHERTYPE_VLAN", Const, 1}, + {"ETHERTYPE_VLTLMAN", Const, 1}, + {"ETHERTYPE_VPROD", Const, 1}, + {"ETHERTYPE_VURESERVED", Const, 1}, + {"ETHERTYPE_WATERLOO", Const, 1}, + {"ETHERTYPE_WELLFLEET", Const, 1}, + {"ETHERTYPE_X25", Const, 1}, + {"ETHERTYPE_X75", Const, 1}, + {"ETHERTYPE_XNSSM", Const, 1}, + {"ETHERTYPE_XTP", Const, 1}, + {"ETHER_ADDR_LEN", Const, 1}, + {"ETHER_ALIGN", Const, 1}, + {"ETHER_CRC_LEN", Const, 1}, + {"ETHER_CRC_POLY_BE", Const, 1}, + {"ETHER_CRC_POLY_LE", Const, 1}, + {"ETHER_HDR_LEN", Const, 1}, + {"ETHER_MAX_DIX_LEN", Const, 1}, + {"ETHER_MAX_LEN", Const, 1}, + {"ETHER_MAX_LEN_JUMBO", Const, 1}, + {"ETHER_MIN_LEN", Const, 1}, + {"ETHER_PPPOE_ENCAP_LEN", Const, 1}, + {"ETHER_TYPE_LEN", Const, 1}, + {"ETHER_VLAN_ENCAP_LEN", Const, 1}, + {"ETH_P_1588", Const, 0}, + {"ETH_P_8021Q", Const, 0}, + {"ETH_P_802_2", Const, 0}, + {"ETH_P_802_3", Const, 0}, + {"ETH_P_AARP", Const, 0}, + {"ETH_P_ALL", Const, 0}, + {"ETH_P_AOE", Const, 0}, + {"ETH_P_ARCNET", Const, 0}, + {"ETH_P_ARP", Const, 0}, + {"ETH_P_ATALK", Const, 0}, + {"ETH_P_ATMFATE", Const, 0}, + {"ETH_P_ATMMPOA", Const, 0}, + {"ETH_P_AX25", Const, 0}, + {"ETH_P_BPQ", Const, 0}, + {"ETH_P_CAIF", Const, 0}, + {"ETH_P_CAN", Const, 0}, + {"ETH_P_CONTROL", Const, 0}, + {"ETH_P_CUST", Const, 0}, + {"ETH_P_DDCMP", Const, 0}, + {"ETH_P_DEC", Const, 0}, + {"ETH_P_DIAG", Const, 0}, + {"ETH_P_DNA_DL", Const, 0}, + {"ETH_P_DNA_RC", Const, 0}, + {"ETH_P_DNA_RT", Const, 0}, + {"ETH_P_DSA", Const, 0}, + {"ETH_P_ECONET", Const, 0}, + {"ETH_P_EDSA", Const, 0}, + {"ETH_P_FCOE", Const, 0}, + {"ETH_P_FIP", Const, 0}, + {"ETH_P_HDLC", Const, 0}, + {"ETH_P_IEEE802154", Const, 0}, + {"ETH_P_IEEEPUP", Const, 0}, + {"ETH_P_IEEEPUPAT", Const, 0}, + {"ETH_P_IP", Const, 0}, + {"ETH_P_IPV6", Const, 0}, + {"ETH_P_IPX", Const, 0}, + {"ETH_P_IRDA", Const, 0}, + {"ETH_P_LAT", Const, 0}, + {"ETH_P_LINK_CTL", Const, 0}, + {"ETH_P_LOCALTALK", Const, 0}, + {"ETH_P_LOOP", Const, 0}, + {"ETH_P_MOBITEX", Const, 0}, + {"ETH_P_MPLS_MC", Const, 0}, + {"ETH_P_MPLS_UC", Const, 0}, + {"ETH_P_PAE", Const, 0}, + {"ETH_P_PAUSE", Const, 0}, + {"ETH_P_PHONET", Const, 0}, + {"ETH_P_PPPTALK", Const, 0}, + {"ETH_P_PPP_DISC", Const, 0}, + {"ETH_P_PPP_MP", Const, 0}, + {"ETH_P_PPP_SES", Const, 0}, + {"ETH_P_PUP", Const, 0}, + {"ETH_P_PUPAT", Const, 0}, + {"ETH_P_RARP", Const, 0}, + {"ETH_P_SCA", Const, 0}, + {"ETH_P_SLOW", Const, 0}, + {"ETH_P_SNAP", Const, 0}, + {"ETH_P_TEB", Const, 0}, + {"ETH_P_TIPC", Const, 0}, + {"ETH_P_TRAILER", Const, 0}, + {"ETH_P_TR_802_2", Const, 0}, + {"ETH_P_WAN_PPP", Const, 0}, + {"ETH_P_WCCP", Const, 0}, + {"ETH_P_X25", Const, 0}, + {"ETIME", Const, 0}, + {"ETIMEDOUT", Const, 0}, + {"ETOOMANYREFS", Const, 0}, + {"ETXTBSY", Const, 0}, + {"EUCLEAN", Const, 0}, + {"EUNATCH", Const, 0}, + {"EUSERS", Const, 0}, + {"EVFILT_AIO", Const, 0}, + {"EVFILT_FS", Const, 0}, + {"EVFILT_LIO", Const, 0}, + {"EVFILT_MACHPORT", Const, 0}, + {"EVFILT_PROC", Const, 0}, + {"EVFILT_READ", Const, 0}, + {"EVFILT_SIGNAL", Const, 0}, + {"EVFILT_SYSCOUNT", Const, 0}, + {"EVFILT_THREADMARKER", Const, 0}, + {"EVFILT_TIMER", Const, 0}, + {"EVFILT_USER", Const, 0}, + {"EVFILT_VM", Const, 0}, + {"EVFILT_VNODE", Const, 0}, + {"EVFILT_WRITE", Const, 0}, + {"EV_ADD", Const, 0}, + {"EV_CLEAR", Const, 0}, + {"EV_DELETE", Const, 0}, + {"EV_DISABLE", Const, 0}, + {"EV_DISPATCH", Const, 0}, + {"EV_DROP", Const, 3}, + {"EV_ENABLE", Const, 0}, + {"EV_EOF", Const, 0}, + {"EV_ERROR", Const, 0}, + {"EV_FLAG0", Const, 0}, + {"EV_FLAG1", Const, 0}, + {"EV_ONESHOT", Const, 0}, + {"EV_OOBAND", Const, 0}, + {"EV_POLL", Const, 0}, + {"EV_RECEIPT", Const, 0}, + {"EV_SYSFLAGS", Const, 0}, + {"EWINDOWS", Const, 0}, + {"EWOULDBLOCK", Const, 0}, + {"EXDEV", Const, 0}, + {"EXFULL", Const, 0}, + {"EXTA", Const, 0}, + {"EXTB", Const, 0}, + {"EXTPROC", Const, 0}, + {"Environ", Func, 0}, + {"EpollCreate", Func, 0}, + {"EpollCreate1", Func, 0}, + {"EpollCtl", Func, 0}, + {"EpollEvent", Type, 0}, + {"EpollEvent.Events", Field, 0}, + {"EpollEvent.Fd", Field, 0}, + {"EpollEvent.Pad", Field, 0}, + {"EpollEvent.PadFd", Field, 0}, + {"EpollWait", Func, 0}, + {"Errno", Type, 0}, + {"EscapeArg", Func, 0}, + {"Exchangedata", Func, 0}, + {"Exec", Func, 0}, + {"Exit", Func, 0}, + {"ExitProcess", Func, 0}, + {"FD_CLOEXEC", Const, 0}, + {"FD_SETSIZE", Const, 0}, + {"FILE_ACTION_ADDED", Const, 0}, + {"FILE_ACTION_MODIFIED", Const, 0}, + {"FILE_ACTION_REMOVED", Const, 0}, + {"FILE_ACTION_RENAMED_NEW_NAME", Const, 0}, + {"FILE_ACTION_RENAMED_OLD_NAME", Const, 0}, + {"FILE_APPEND_DATA", Const, 0}, + {"FILE_ATTRIBUTE_ARCHIVE", Const, 0}, + {"FILE_ATTRIBUTE_DIRECTORY", Const, 0}, + {"FILE_ATTRIBUTE_HIDDEN", Const, 0}, + {"FILE_ATTRIBUTE_NORMAL", Const, 0}, + {"FILE_ATTRIBUTE_READONLY", Const, 0}, + {"FILE_ATTRIBUTE_REPARSE_POINT", Const, 4}, + {"FILE_ATTRIBUTE_SYSTEM", Const, 0}, + {"FILE_BEGIN", Const, 0}, + {"FILE_CURRENT", Const, 0}, + {"FILE_END", Const, 0}, + {"FILE_FLAG_BACKUP_SEMANTICS", Const, 0}, + {"FILE_FLAG_OPEN_REPARSE_POINT", Const, 4}, + {"FILE_FLAG_OVERLAPPED", Const, 0}, + {"FILE_LIST_DIRECTORY", Const, 0}, + {"FILE_MAP_COPY", Const, 0}, + {"FILE_MAP_EXECUTE", Const, 0}, + {"FILE_MAP_READ", Const, 0}, + {"FILE_MAP_WRITE", Const, 0}, + {"FILE_NOTIFY_CHANGE_ATTRIBUTES", Const, 0}, + {"FILE_NOTIFY_CHANGE_CREATION", Const, 0}, + {"FILE_NOTIFY_CHANGE_DIR_NAME", Const, 0}, + {"FILE_NOTIFY_CHANGE_FILE_NAME", Const, 0}, + {"FILE_NOTIFY_CHANGE_LAST_ACCESS", Const, 0}, + {"FILE_NOTIFY_CHANGE_LAST_WRITE", Const, 0}, + {"FILE_NOTIFY_CHANGE_SIZE", Const, 0}, + {"FILE_SHARE_DELETE", Const, 0}, + {"FILE_SHARE_READ", Const, 0}, + {"FILE_SHARE_WRITE", Const, 0}, + {"FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", Const, 2}, + {"FILE_SKIP_SET_EVENT_ON_HANDLE", Const, 2}, + {"FILE_TYPE_CHAR", Const, 0}, + {"FILE_TYPE_DISK", Const, 0}, + {"FILE_TYPE_PIPE", Const, 0}, + {"FILE_TYPE_REMOTE", Const, 0}, + {"FILE_TYPE_UNKNOWN", Const, 0}, + {"FILE_WRITE_ATTRIBUTES", Const, 0}, + {"FLUSHO", Const, 0}, + {"FORMAT_MESSAGE_ALLOCATE_BUFFER", Const, 0}, + {"FORMAT_MESSAGE_ARGUMENT_ARRAY", Const, 0}, + {"FORMAT_MESSAGE_FROM_HMODULE", Const, 0}, + {"FORMAT_MESSAGE_FROM_STRING", Const, 0}, + {"FORMAT_MESSAGE_FROM_SYSTEM", Const, 0}, + {"FORMAT_MESSAGE_IGNORE_INSERTS", Const, 0}, + {"FORMAT_MESSAGE_MAX_WIDTH_MASK", Const, 0}, + {"FSCTL_GET_REPARSE_POINT", Const, 4}, + {"F_ADDFILESIGS", Const, 0}, + {"F_ADDSIGS", Const, 0}, + {"F_ALLOCATEALL", Const, 0}, + {"F_ALLOCATECONTIG", Const, 0}, + {"F_CANCEL", Const, 0}, + {"F_CHKCLEAN", Const, 0}, + {"F_CLOSEM", Const, 1}, + {"F_DUP2FD", Const, 0}, + {"F_DUP2FD_CLOEXEC", Const, 1}, + {"F_DUPFD", Const, 0}, + {"F_DUPFD_CLOEXEC", Const, 0}, + {"F_EXLCK", Const, 0}, + {"F_FINDSIGS", Const, 16}, + {"F_FLUSH_DATA", Const, 0}, + {"F_FREEZE_FS", Const, 0}, + {"F_FSCTL", Const, 1}, + {"F_FSDIRMASK", Const, 1}, + {"F_FSIN", Const, 1}, + {"F_FSINOUT", Const, 1}, + {"F_FSOUT", Const, 1}, + {"F_FSPRIV", Const, 1}, + {"F_FSVOID", Const, 1}, + {"F_FULLFSYNC", Const, 0}, + {"F_GETCODEDIR", Const, 16}, + {"F_GETFD", Const, 0}, + {"F_GETFL", Const, 0}, + {"F_GETLEASE", Const, 0}, + {"F_GETLK", Const, 0}, + {"F_GETLK64", Const, 0}, + {"F_GETLKPID", Const, 0}, + {"F_GETNOSIGPIPE", Const, 0}, + {"F_GETOWN", Const, 0}, + {"F_GETOWN_EX", Const, 0}, + {"F_GETPATH", Const, 0}, + {"F_GETPATH_MTMINFO", Const, 0}, + {"F_GETPIPE_SZ", Const, 0}, + {"F_GETPROTECTIONCLASS", Const, 0}, + {"F_GETPROTECTIONLEVEL", Const, 16}, + {"F_GETSIG", Const, 0}, + {"F_GLOBAL_NOCACHE", Const, 0}, + {"F_LOCK", Const, 0}, + {"F_LOG2PHYS", Const, 0}, + {"F_LOG2PHYS_EXT", Const, 0}, + {"F_MARKDEPENDENCY", Const, 0}, + {"F_MAXFD", Const, 1}, + {"F_NOCACHE", Const, 0}, + {"F_NODIRECT", Const, 0}, + {"F_NOTIFY", Const, 0}, + {"F_OGETLK", Const, 0}, + {"F_OK", Const, 0}, + {"F_OSETLK", Const, 0}, + {"F_OSETLKW", Const, 0}, + {"F_PARAM_MASK", Const, 1}, + {"F_PARAM_MAX", Const, 1}, + {"F_PATHPKG_CHECK", Const, 0}, + {"F_PEOFPOSMODE", Const, 0}, + {"F_PREALLOCATE", Const, 0}, + {"F_RDADVISE", Const, 0}, + {"F_RDAHEAD", Const, 0}, + {"F_RDLCK", Const, 0}, + {"F_READAHEAD", Const, 0}, + {"F_READBOOTSTRAP", Const, 0}, + {"F_SETBACKINGSTORE", Const, 0}, + {"F_SETFD", Const, 0}, + {"F_SETFL", Const, 0}, + {"F_SETLEASE", Const, 0}, + {"F_SETLK", Const, 0}, + {"F_SETLK64", Const, 0}, + {"F_SETLKW", Const, 0}, + {"F_SETLKW64", Const, 0}, + {"F_SETLKWTIMEOUT", Const, 16}, + {"F_SETLK_REMOTE", Const, 0}, + {"F_SETNOSIGPIPE", Const, 0}, + {"F_SETOWN", Const, 0}, + {"F_SETOWN_EX", Const, 0}, + {"F_SETPIPE_SZ", Const, 0}, + {"F_SETPROTECTIONCLASS", Const, 0}, + {"F_SETSIG", Const, 0}, + {"F_SETSIZE", Const, 0}, + {"F_SHLCK", Const, 0}, + {"F_SINGLE_WRITER", Const, 16}, + {"F_TEST", Const, 0}, + {"F_THAW_FS", Const, 0}, + {"F_TLOCK", Const, 0}, + {"F_TRANSCODEKEY", Const, 16}, + {"F_ULOCK", Const, 0}, + {"F_UNLCK", Const, 0}, + {"F_UNLCKSYS", Const, 0}, + {"F_VOLPOSMODE", Const, 0}, + {"F_WRITEBOOTSTRAP", Const, 0}, + {"F_WRLCK", Const, 0}, + {"Faccessat", Func, 0}, + {"Fallocate", Func, 0}, + {"Fbootstraptransfer_t", Type, 0}, + {"Fbootstraptransfer_t.Buffer", Field, 0}, + {"Fbootstraptransfer_t.Length", Field, 0}, + {"Fbootstraptransfer_t.Offset", Field, 0}, + {"Fchdir", Func, 0}, + {"Fchflags", Func, 0}, + {"Fchmod", Func, 0}, + {"Fchmodat", Func, 0}, + {"Fchown", Func, 0}, + {"Fchownat", Func, 0}, + {"FcntlFlock", Func, 3}, + {"FdSet", Type, 0}, + {"FdSet.Bits", Field, 0}, + {"FdSet.X__fds_bits", Field, 0}, + {"Fdatasync", Func, 0}, + {"FileNotifyInformation", Type, 0}, + {"FileNotifyInformation.Action", Field, 0}, + {"FileNotifyInformation.FileName", Field, 0}, + {"FileNotifyInformation.FileNameLength", Field, 0}, + {"FileNotifyInformation.NextEntryOffset", Field, 0}, + {"Filetime", Type, 0}, + {"Filetime.HighDateTime", Field, 0}, + {"Filetime.LowDateTime", Field, 0}, + {"FindClose", Func, 0}, + {"FindFirstFile", Func, 0}, + {"FindNextFile", Func, 0}, + {"Flock", Func, 0}, + {"Flock_t", Type, 0}, + {"Flock_t.Len", Field, 0}, + {"Flock_t.Pad_cgo_0", Field, 0}, + {"Flock_t.Pad_cgo_1", Field, 3}, + {"Flock_t.Pid", Field, 0}, + {"Flock_t.Start", Field, 0}, + {"Flock_t.Sysid", Field, 0}, + {"Flock_t.Type", Field, 0}, + {"Flock_t.Whence", Field, 0}, + {"FlushBpf", Func, 0}, + {"FlushFileBuffers", Func, 0}, + {"FlushViewOfFile", Func, 0}, + {"ForkExec", Func, 0}, + {"ForkLock", Var, 0}, + {"FormatMessage", Func, 0}, + {"Fpathconf", Func, 0}, + {"FreeAddrInfoW", Func, 1}, + {"FreeEnvironmentStrings", Func, 0}, + {"FreeLibrary", Func, 0}, + {"Fsid", Type, 0}, + {"Fsid.Val", Field, 0}, + {"Fsid.X__fsid_val", Field, 2}, + {"Fsid.X__val", Field, 0}, + {"Fstat", Func, 0}, + {"Fstatat", Func, 12}, + {"Fstatfs", Func, 0}, + {"Fstore_t", Type, 0}, + {"Fstore_t.Bytesalloc", Field, 0}, + {"Fstore_t.Flags", Field, 0}, + {"Fstore_t.Length", Field, 0}, + {"Fstore_t.Offset", Field, 0}, + {"Fstore_t.Posmode", Field, 0}, + {"Fsync", Func, 0}, + {"Ftruncate", Func, 0}, + {"FullPath", Func, 4}, + {"Futimes", Func, 0}, + {"Futimesat", Func, 0}, + {"GENERIC_ALL", Const, 0}, + {"GENERIC_EXECUTE", Const, 0}, + {"GENERIC_READ", Const, 0}, + {"GENERIC_WRITE", Const, 0}, + {"GUID", Type, 1}, + {"GUID.Data1", Field, 1}, + {"GUID.Data2", Field, 1}, + {"GUID.Data3", Field, 1}, + {"GUID.Data4", Field, 1}, + {"GetAcceptExSockaddrs", Func, 0}, + {"GetAdaptersInfo", Func, 0}, + {"GetAddrInfoW", Func, 1}, + {"GetCommandLine", Func, 0}, + {"GetComputerName", Func, 0}, + {"GetConsoleMode", Func, 1}, + {"GetCurrentDirectory", Func, 0}, + {"GetCurrentProcess", Func, 0}, + {"GetEnvironmentStrings", Func, 0}, + {"GetEnvironmentVariable", Func, 0}, + {"GetExitCodeProcess", Func, 0}, + {"GetFileAttributes", Func, 0}, + {"GetFileAttributesEx", Func, 0}, + {"GetFileExInfoStandard", Const, 0}, + {"GetFileExMaxInfoLevel", Const, 0}, + {"GetFileInformationByHandle", Func, 0}, + {"GetFileType", Func, 0}, + {"GetFullPathName", Func, 0}, + {"GetHostByName", Func, 0}, + {"GetIfEntry", Func, 0}, + {"GetLastError", Func, 0}, + {"GetLengthSid", Func, 0}, + {"GetLongPathName", Func, 0}, + {"GetProcAddress", Func, 0}, + {"GetProcessTimes", Func, 0}, + {"GetProtoByName", Func, 0}, + {"GetQueuedCompletionStatus", Func, 0}, + {"GetServByName", Func, 0}, + {"GetShortPathName", Func, 0}, + {"GetStartupInfo", Func, 0}, + {"GetStdHandle", Func, 0}, + {"GetSystemTimeAsFileTime", Func, 0}, + {"GetTempPath", Func, 0}, + {"GetTimeZoneInformation", Func, 0}, + {"GetTokenInformation", Func, 0}, + {"GetUserNameEx", Func, 0}, + {"GetUserProfileDirectory", Func, 0}, + {"GetVersion", Func, 0}, + {"Getcwd", Func, 0}, + {"Getdents", Func, 0}, + {"Getdirentries", Func, 0}, + {"Getdtablesize", Func, 0}, + {"Getegid", Func, 0}, + {"Getenv", Func, 0}, + {"Geteuid", Func, 0}, + {"Getfsstat", Func, 0}, + {"Getgid", Func, 0}, + {"Getgroups", Func, 0}, + {"Getpagesize", Func, 0}, + {"Getpeername", Func, 0}, + {"Getpgid", Func, 0}, + {"Getpgrp", Func, 0}, + {"Getpid", Func, 0}, + {"Getppid", Func, 0}, + {"Getpriority", Func, 0}, + {"Getrlimit", Func, 0}, + {"Getrusage", Func, 0}, + {"Getsid", Func, 0}, + {"Getsockname", Func, 0}, + {"Getsockopt", Func, 1}, + {"GetsockoptByte", Func, 0}, + {"GetsockoptICMPv6Filter", Func, 2}, + {"GetsockoptIPMreq", Func, 0}, + {"GetsockoptIPMreqn", Func, 0}, + {"GetsockoptIPv6MTUInfo", Func, 2}, + {"GetsockoptIPv6Mreq", Func, 0}, + {"GetsockoptInet4Addr", Func, 0}, + {"GetsockoptInt", Func, 0}, + {"GetsockoptUcred", Func, 1}, + {"Gettid", Func, 0}, + {"Gettimeofday", Func, 0}, + {"Getuid", Func, 0}, + {"Getwd", Func, 0}, + {"Getxattr", Func, 1}, + {"HANDLE_FLAG_INHERIT", Const, 0}, + {"HKEY_CLASSES_ROOT", Const, 0}, + {"HKEY_CURRENT_CONFIG", Const, 0}, + {"HKEY_CURRENT_USER", Const, 0}, + {"HKEY_DYN_DATA", Const, 0}, + {"HKEY_LOCAL_MACHINE", Const, 0}, + {"HKEY_PERFORMANCE_DATA", Const, 0}, + {"HKEY_USERS", Const, 0}, + {"HUPCL", Const, 0}, + {"Handle", Type, 0}, + {"Hostent", Type, 0}, + {"Hostent.AddrList", Field, 0}, + {"Hostent.AddrType", Field, 0}, + {"Hostent.Aliases", Field, 0}, + {"Hostent.Length", Field, 0}, + {"Hostent.Name", Field, 0}, + {"ICANON", Const, 0}, + {"ICMP6_FILTER", Const, 2}, + {"ICMPV6_FILTER", Const, 2}, + {"ICMPv6Filter", Type, 2}, + {"ICMPv6Filter.Data", Field, 2}, + {"ICMPv6Filter.Filt", Field, 2}, + {"ICRNL", Const, 0}, + {"IEXTEN", Const, 0}, + {"IFAN_ARRIVAL", Const, 1}, + {"IFAN_DEPARTURE", Const, 1}, + {"IFA_ADDRESS", Const, 0}, + {"IFA_ANYCAST", Const, 0}, + {"IFA_BROADCAST", Const, 0}, + {"IFA_CACHEINFO", Const, 0}, + {"IFA_F_DADFAILED", Const, 0}, + {"IFA_F_DEPRECATED", Const, 0}, + {"IFA_F_HOMEADDRESS", Const, 0}, + {"IFA_F_NODAD", Const, 0}, + {"IFA_F_OPTIMISTIC", Const, 0}, + {"IFA_F_PERMANENT", Const, 0}, + {"IFA_F_SECONDARY", Const, 0}, + {"IFA_F_TEMPORARY", Const, 0}, + {"IFA_F_TENTATIVE", Const, 0}, + {"IFA_LABEL", Const, 0}, + {"IFA_LOCAL", Const, 0}, + {"IFA_MAX", Const, 0}, + {"IFA_MULTICAST", Const, 0}, + {"IFA_ROUTE", Const, 1}, + {"IFA_UNSPEC", Const, 0}, + {"IFF_ALLMULTI", Const, 0}, + {"IFF_ALTPHYS", Const, 0}, + {"IFF_AUTOMEDIA", Const, 0}, + {"IFF_BROADCAST", Const, 0}, + {"IFF_CANTCHANGE", Const, 0}, + {"IFF_CANTCONFIG", Const, 1}, + {"IFF_DEBUG", Const, 0}, + {"IFF_DRV_OACTIVE", Const, 0}, + {"IFF_DRV_RUNNING", Const, 0}, + {"IFF_DYING", Const, 0}, + {"IFF_DYNAMIC", Const, 0}, + {"IFF_LINK0", Const, 0}, + {"IFF_LINK1", Const, 0}, + {"IFF_LINK2", Const, 0}, + {"IFF_LOOPBACK", Const, 0}, + {"IFF_MASTER", Const, 0}, + {"IFF_MONITOR", Const, 0}, + {"IFF_MULTICAST", Const, 0}, + {"IFF_NOARP", Const, 0}, + {"IFF_NOTRAILERS", Const, 0}, + {"IFF_NO_PI", Const, 0}, + {"IFF_OACTIVE", Const, 0}, + {"IFF_ONE_QUEUE", Const, 0}, + {"IFF_POINTOPOINT", Const, 0}, + {"IFF_POINTTOPOINT", Const, 0}, + {"IFF_PORTSEL", Const, 0}, + {"IFF_PPROMISC", Const, 0}, + {"IFF_PROMISC", Const, 0}, + {"IFF_RENAMING", Const, 0}, + {"IFF_RUNNING", Const, 0}, + {"IFF_SIMPLEX", Const, 0}, + {"IFF_SLAVE", Const, 0}, + {"IFF_SMART", Const, 0}, + {"IFF_STATICARP", Const, 0}, + {"IFF_TAP", Const, 0}, + {"IFF_TUN", Const, 0}, + {"IFF_TUN_EXCL", Const, 0}, + {"IFF_UP", Const, 0}, + {"IFF_VNET_HDR", Const, 0}, + {"IFLA_ADDRESS", Const, 0}, + {"IFLA_BROADCAST", Const, 0}, + {"IFLA_COST", Const, 0}, + {"IFLA_IFALIAS", Const, 0}, + {"IFLA_IFNAME", Const, 0}, + {"IFLA_LINK", Const, 0}, + {"IFLA_LINKINFO", Const, 0}, + {"IFLA_LINKMODE", Const, 0}, + {"IFLA_MAP", Const, 0}, + {"IFLA_MASTER", Const, 0}, + {"IFLA_MAX", Const, 0}, + {"IFLA_MTU", Const, 0}, + {"IFLA_NET_NS_PID", Const, 0}, + {"IFLA_OPERSTATE", Const, 0}, + {"IFLA_PRIORITY", Const, 0}, + {"IFLA_PROTINFO", Const, 0}, + {"IFLA_QDISC", Const, 0}, + {"IFLA_STATS", Const, 0}, + {"IFLA_TXQLEN", Const, 0}, + {"IFLA_UNSPEC", Const, 0}, + {"IFLA_WEIGHT", Const, 0}, + {"IFLA_WIRELESS", Const, 0}, + {"IFNAMSIZ", Const, 0}, + {"IFT_1822", Const, 0}, + {"IFT_A12MPPSWITCH", Const, 0}, + {"IFT_AAL2", Const, 0}, + {"IFT_AAL5", Const, 0}, + {"IFT_ADSL", Const, 0}, + {"IFT_AFLANE8023", Const, 0}, + {"IFT_AFLANE8025", Const, 0}, + {"IFT_ARAP", Const, 0}, + {"IFT_ARCNET", Const, 0}, + {"IFT_ARCNETPLUS", Const, 0}, + {"IFT_ASYNC", Const, 0}, + {"IFT_ATM", Const, 0}, + {"IFT_ATMDXI", Const, 0}, + {"IFT_ATMFUNI", Const, 0}, + {"IFT_ATMIMA", Const, 0}, + {"IFT_ATMLOGICAL", Const, 0}, + {"IFT_ATMRADIO", Const, 0}, + {"IFT_ATMSUBINTERFACE", Const, 0}, + {"IFT_ATMVCIENDPT", Const, 0}, + {"IFT_ATMVIRTUAL", Const, 0}, + {"IFT_BGPPOLICYACCOUNTING", Const, 0}, + {"IFT_BLUETOOTH", Const, 1}, + {"IFT_BRIDGE", Const, 0}, + {"IFT_BSC", Const, 0}, + {"IFT_CARP", Const, 0}, + {"IFT_CCTEMUL", Const, 0}, + {"IFT_CELLULAR", Const, 0}, + {"IFT_CEPT", Const, 0}, + {"IFT_CES", Const, 0}, + {"IFT_CHANNEL", Const, 0}, + {"IFT_CNR", Const, 0}, + {"IFT_COFFEE", Const, 0}, + {"IFT_COMPOSITELINK", Const, 0}, + {"IFT_DCN", Const, 0}, + {"IFT_DIGITALPOWERLINE", Const, 0}, + {"IFT_DIGITALWRAPPEROVERHEADCHANNEL", Const, 0}, + {"IFT_DLSW", Const, 0}, + {"IFT_DOCSCABLEDOWNSTREAM", Const, 0}, + {"IFT_DOCSCABLEMACLAYER", Const, 0}, + {"IFT_DOCSCABLEUPSTREAM", Const, 0}, + {"IFT_DOCSCABLEUPSTREAMCHANNEL", Const, 1}, + {"IFT_DS0", Const, 0}, + {"IFT_DS0BUNDLE", Const, 0}, + {"IFT_DS1FDL", Const, 0}, + {"IFT_DS3", Const, 0}, + {"IFT_DTM", Const, 0}, + {"IFT_DUMMY", Const, 1}, + {"IFT_DVBASILN", Const, 0}, + {"IFT_DVBASIOUT", Const, 0}, + {"IFT_DVBRCCDOWNSTREAM", Const, 0}, + {"IFT_DVBRCCMACLAYER", Const, 0}, + {"IFT_DVBRCCUPSTREAM", Const, 0}, + {"IFT_ECONET", Const, 1}, + {"IFT_ENC", Const, 0}, + {"IFT_EON", Const, 0}, + {"IFT_EPLRS", Const, 0}, + {"IFT_ESCON", Const, 0}, + {"IFT_ETHER", Const, 0}, + {"IFT_FAITH", Const, 0}, + {"IFT_FAST", Const, 0}, + {"IFT_FASTETHER", Const, 0}, + {"IFT_FASTETHERFX", Const, 0}, + {"IFT_FDDI", Const, 0}, + {"IFT_FIBRECHANNEL", Const, 0}, + {"IFT_FRAMERELAYINTERCONNECT", Const, 0}, + {"IFT_FRAMERELAYMPI", Const, 0}, + {"IFT_FRDLCIENDPT", Const, 0}, + {"IFT_FRELAY", Const, 0}, + {"IFT_FRELAYDCE", Const, 0}, + {"IFT_FRF16MFRBUNDLE", Const, 0}, + {"IFT_FRFORWARD", Const, 0}, + {"IFT_G703AT2MB", Const, 0}, + {"IFT_G703AT64K", Const, 0}, + {"IFT_GIF", Const, 0}, + {"IFT_GIGABITETHERNET", Const, 0}, + {"IFT_GR303IDT", Const, 0}, + {"IFT_GR303RDT", Const, 0}, + {"IFT_H323GATEKEEPER", Const, 0}, + {"IFT_H323PROXY", Const, 0}, + {"IFT_HDH1822", Const, 0}, + {"IFT_HDLC", Const, 0}, + {"IFT_HDSL2", Const, 0}, + {"IFT_HIPERLAN2", Const, 0}, + {"IFT_HIPPI", Const, 0}, + {"IFT_HIPPIINTERFACE", Const, 0}, + {"IFT_HOSTPAD", Const, 0}, + {"IFT_HSSI", Const, 0}, + {"IFT_HY", Const, 0}, + {"IFT_IBM370PARCHAN", Const, 0}, + {"IFT_IDSL", Const, 0}, + {"IFT_IEEE1394", Const, 0}, + {"IFT_IEEE80211", Const, 0}, + {"IFT_IEEE80212", Const, 0}, + {"IFT_IEEE8023ADLAG", Const, 0}, + {"IFT_IFGSN", Const, 0}, + {"IFT_IMT", Const, 0}, + {"IFT_INFINIBAND", Const, 1}, + {"IFT_INTERLEAVE", Const, 0}, + {"IFT_IP", Const, 0}, + {"IFT_IPFORWARD", Const, 0}, + {"IFT_IPOVERATM", Const, 0}, + {"IFT_IPOVERCDLC", Const, 0}, + {"IFT_IPOVERCLAW", Const, 0}, + {"IFT_IPSWITCH", Const, 0}, + {"IFT_IPXIP", Const, 0}, + {"IFT_ISDN", Const, 0}, + {"IFT_ISDNBASIC", Const, 0}, + {"IFT_ISDNPRIMARY", Const, 0}, + {"IFT_ISDNS", Const, 0}, + {"IFT_ISDNU", Const, 0}, + {"IFT_ISO88022LLC", Const, 0}, + {"IFT_ISO88023", Const, 0}, + {"IFT_ISO88024", Const, 0}, + {"IFT_ISO88025", Const, 0}, + {"IFT_ISO88025CRFPINT", Const, 0}, + {"IFT_ISO88025DTR", Const, 0}, + {"IFT_ISO88025FIBER", Const, 0}, + {"IFT_ISO88026", Const, 0}, + {"IFT_ISUP", Const, 0}, + {"IFT_L2VLAN", Const, 0}, + {"IFT_L3IPVLAN", Const, 0}, + {"IFT_L3IPXVLAN", Const, 0}, + {"IFT_LAPB", Const, 0}, + {"IFT_LAPD", Const, 0}, + {"IFT_LAPF", Const, 0}, + {"IFT_LINEGROUP", Const, 1}, + {"IFT_LOCALTALK", Const, 0}, + {"IFT_LOOP", Const, 0}, + {"IFT_MEDIAMAILOVERIP", Const, 0}, + {"IFT_MFSIGLINK", Const, 0}, + {"IFT_MIOX25", Const, 0}, + {"IFT_MODEM", Const, 0}, + {"IFT_MPC", Const, 0}, + {"IFT_MPLS", Const, 0}, + {"IFT_MPLSTUNNEL", Const, 0}, + {"IFT_MSDSL", Const, 0}, + {"IFT_MVL", Const, 0}, + {"IFT_MYRINET", Const, 0}, + {"IFT_NFAS", Const, 0}, + {"IFT_NSIP", Const, 0}, + {"IFT_OPTICALCHANNEL", Const, 0}, + {"IFT_OPTICALTRANSPORT", Const, 0}, + {"IFT_OTHER", Const, 0}, + {"IFT_P10", Const, 0}, + {"IFT_P80", Const, 0}, + {"IFT_PARA", Const, 0}, + {"IFT_PDP", Const, 0}, + {"IFT_PFLOG", Const, 0}, + {"IFT_PFLOW", Const, 1}, + {"IFT_PFSYNC", Const, 0}, + {"IFT_PLC", Const, 0}, + {"IFT_PON155", Const, 1}, + {"IFT_PON622", Const, 1}, + {"IFT_POS", Const, 0}, + {"IFT_PPP", Const, 0}, + {"IFT_PPPMULTILINKBUNDLE", Const, 0}, + {"IFT_PROPATM", Const, 1}, + {"IFT_PROPBWAP2MP", Const, 0}, + {"IFT_PROPCNLS", Const, 0}, + {"IFT_PROPDOCSWIRELESSDOWNSTREAM", Const, 0}, + {"IFT_PROPDOCSWIRELESSMACLAYER", Const, 0}, + {"IFT_PROPDOCSWIRELESSUPSTREAM", Const, 0}, + {"IFT_PROPMUX", Const, 0}, + {"IFT_PROPVIRTUAL", Const, 0}, + {"IFT_PROPWIRELESSP2P", Const, 0}, + {"IFT_PTPSERIAL", Const, 0}, + {"IFT_PVC", Const, 0}, + {"IFT_Q2931", Const, 1}, + {"IFT_QLLC", Const, 0}, + {"IFT_RADIOMAC", Const, 0}, + {"IFT_RADSL", Const, 0}, + {"IFT_REACHDSL", Const, 0}, + {"IFT_RFC1483", Const, 0}, + {"IFT_RS232", Const, 0}, + {"IFT_RSRB", Const, 0}, + {"IFT_SDLC", Const, 0}, + {"IFT_SDSL", Const, 0}, + {"IFT_SHDSL", Const, 0}, + {"IFT_SIP", Const, 0}, + {"IFT_SIPSIG", Const, 1}, + {"IFT_SIPTG", Const, 1}, + {"IFT_SLIP", Const, 0}, + {"IFT_SMDSDXI", Const, 0}, + {"IFT_SMDSICIP", Const, 0}, + {"IFT_SONET", Const, 0}, + {"IFT_SONETOVERHEADCHANNEL", Const, 0}, + {"IFT_SONETPATH", Const, 0}, + {"IFT_SONETVT", Const, 0}, + {"IFT_SRP", Const, 0}, + {"IFT_SS7SIGLINK", Const, 0}, + {"IFT_STACKTOSTACK", Const, 0}, + {"IFT_STARLAN", Const, 0}, + {"IFT_STF", Const, 0}, + {"IFT_T1", Const, 0}, + {"IFT_TDLC", Const, 0}, + {"IFT_TELINK", Const, 1}, + {"IFT_TERMPAD", Const, 0}, + {"IFT_TR008", Const, 0}, + {"IFT_TRANSPHDLC", Const, 0}, + {"IFT_TUNNEL", Const, 0}, + {"IFT_ULTRA", Const, 0}, + {"IFT_USB", Const, 0}, + {"IFT_V11", Const, 0}, + {"IFT_V35", Const, 0}, + {"IFT_V36", Const, 0}, + {"IFT_V37", Const, 0}, + {"IFT_VDSL", Const, 0}, + {"IFT_VIRTUALIPADDRESS", Const, 0}, + {"IFT_VIRTUALTG", Const, 1}, + {"IFT_VOICEDID", Const, 1}, + {"IFT_VOICEEM", Const, 0}, + {"IFT_VOICEEMFGD", Const, 1}, + {"IFT_VOICEENCAP", Const, 0}, + {"IFT_VOICEFGDEANA", Const, 1}, + {"IFT_VOICEFXO", Const, 0}, + {"IFT_VOICEFXS", Const, 0}, + {"IFT_VOICEOVERATM", Const, 0}, + {"IFT_VOICEOVERCABLE", Const, 1}, + {"IFT_VOICEOVERFRAMERELAY", Const, 0}, + {"IFT_VOICEOVERIP", Const, 0}, + {"IFT_X213", Const, 0}, + {"IFT_X25", Const, 0}, + {"IFT_X25DDN", Const, 0}, + {"IFT_X25HUNTGROUP", Const, 0}, + {"IFT_X25MLP", Const, 0}, + {"IFT_X25PLE", Const, 0}, + {"IFT_XETHER", Const, 0}, + {"IGNBRK", Const, 0}, + {"IGNCR", Const, 0}, + {"IGNORE", Const, 0}, + {"IGNPAR", Const, 0}, + {"IMAXBEL", Const, 0}, + {"INFINITE", Const, 0}, + {"INLCR", Const, 0}, + {"INPCK", Const, 0}, + {"INVALID_FILE_ATTRIBUTES", Const, 0}, + {"IN_ACCESS", Const, 0}, + {"IN_ALL_EVENTS", Const, 0}, + {"IN_ATTRIB", Const, 0}, + {"IN_CLASSA_HOST", Const, 0}, + {"IN_CLASSA_MAX", Const, 0}, + {"IN_CLASSA_NET", Const, 0}, + {"IN_CLASSA_NSHIFT", Const, 0}, + {"IN_CLASSB_HOST", Const, 0}, + {"IN_CLASSB_MAX", Const, 0}, + {"IN_CLASSB_NET", Const, 0}, + {"IN_CLASSB_NSHIFT", Const, 0}, + {"IN_CLASSC_HOST", Const, 0}, + {"IN_CLASSC_NET", Const, 0}, + {"IN_CLASSC_NSHIFT", Const, 0}, + {"IN_CLASSD_HOST", Const, 0}, + {"IN_CLASSD_NET", Const, 0}, + {"IN_CLASSD_NSHIFT", Const, 0}, + {"IN_CLOEXEC", Const, 0}, + {"IN_CLOSE", Const, 0}, + {"IN_CLOSE_NOWRITE", Const, 0}, + {"IN_CLOSE_WRITE", Const, 0}, + {"IN_CREATE", Const, 0}, + {"IN_DELETE", Const, 0}, + {"IN_DELETE_SELF", Const, 0}, + {"IN_DONT_FOLLOW", Const, 0}, + {"IN_EXCL_UNLINK", Const, 0}, + {"IN_IGNORED", Const, 0}, + {"IN_ISDIR", Const, 0}, + {"IN_LINKLOCALNETNUM", Const, 0}, + {"IN_LOOPBACKNET", Const, 0}, + {"IN_MASK_ADD", Const, 0}, + {"IN_MODIFY", Const, 0}, + {"IN_MOVE", Const, 0}, + {"IN_MOVED_FROM", Const, 0}, + {"IN_MOVED_TO", Const, 0}, + {"IN_MOVE_SELF", Const, 0}, + {"IN_NONBLOCK", Const, 0}, + {"IN_ONESHOT", Const, 0}, + {"IN_ONLYDIR", Const, 0}, + {"IN_OPEN", Const, 0}, + {"IN_Q_OVERFLOW", Const, 0}, + {"IN_RFC3021_HOST", Const, 1}, + {"IN_RFC3021_MASK", Const, 1}, + {"IN_RFC3021_NET", Const, 1}, + {"IN_RFC3021_NSHIFT", Const, 1}, + {"IN_UNMOUNT", Const, 0}, + {"IOC_IN", Const, 1}, + {"IOC_INOUT", Const, 1}, + {"IOC_OUT", Const, 1}, + {"IOC_VENDOR", Const, 3}, + {"IOC_WS2", Const, 1}, + {"IO_REPARSE_TAG_SYMLINK", Const, 4}, + {"IPMreq", Type, 0}, + {"IPMreq.Interface", Field, 0}, + {"IPMreq.Multiaddr", Field, 0}, + {"IPMreqn", Type, 0}, + {"IPMreqn.Address", Field, 0}, + {"IPMreqn.Ifindex", Field, 0}, + {"IPMreqn.Multiaddr", Field, 0}, + {"IPPROTO_3PC", Const, 0}, + {"IPPROTO_ADFS", Const, 0}, + {"IPPROTO_AH", Const, 0}, + {"IPPROTO_AHIP", Const, 0}, + {"IPPROTO_APES", Const, 0}, + {"IPPROTO_ARGUS", Const, 0}, + {"IPPROTO_AX25", Const, 0}, + {"IPPROTO_BHA", Const, 0}, + {"IPPROTO_BLT", Const, 0}, + {"IPPROTO_BRSATMON", Const, 0}, + {"IPPROTO_CARP", Const, 0}, + {"IPPROTO_CFTP", Const, 0}, + {"IPPROTO_CHAOS", Const, 0}, + {"IPPROTO_CMTP", Const, 0}, + {"IPPROTO_COMP", Const, 0}, + {"IPPROTO_CPHB", Const, 0}, + {"IPPROTO_CPNX", Const, 0}, + {"IPPROTO_DCCP", Const, 0}, + {"IPPROTO_DDP", Const, 0}, + {"IPPROTO_DGP", Const, 0}, + {"IPPROTO_DIVERT", Const, 0}, + {"IPPROTO_DIVERT_INIT", Const, 3}, + {"IPPROTO_DIVERT_RESP", Const, 3}, + {"IPPROTO_DONE", Const, 0}, + {"IPPROTO_DSTOPTS", Const, 0}, + {"IPPROTO_EGP", Const, 0}, + {"IPPROTO_EMCON", Const, 0}, + {"IPPROTO_ENCAP", Const, 0}, + {"IPPROTO_EON", Const, 0}, + {"IPPROTO_ESP", Const, 0}, + {"IPPROTO_ETHERIP", Const, 0}, + {"IPPROTO_FRAGMENT", Const, 0}, + {"IPPROTO_GGP", Const, 0}, + {"IPPROTO_GMTP", Const, 0}, + {"IPPROTO_GRE", Const, 0}, + {"IPPROTO_HELLO", Const, 0}, + {"IPPROTO_HMP", Const, 0}, + {"IPPROTO_HOPOPTS", Const, 0}, + {"IPPROTO_ICMP", Const, 0}, + {"IPPROTO_ICMPV6", Const, 0}, + {"IPPROTO_IDP", Const, 0}, + {"IPPROTO_IDPR", Const, 0}, + {"IPPROTO_IDRP", Const, 0}, + {"IPPROTO_IGMP", Const, 0}, + {"IPPROTO_IGP", Const, 0}, + {"IPPROTO_IGRP", Const, 0}, + {"IPPROTO_IL", Const, 0}, + {"IPPROTO_INLSP", Const, 0}, + {"IPPROTO_INP", Const, 0}, + {"IPPROTO_IP", Const, 0}, + {"IPPROTO_IPCOMP", Const, 0}, + {"IPPROTO_IPCV", Const, 0}, + {"IPPROTO_IPEIP", Const, 0}, + {"IPPROTO_IPIP", Const, 0}, + {"IPPROTO_IPPC", Const, 0}, + {"IPPROTO_IPV4", Const, 0}, + {"IPPROTO_IPV6", Const, 0}, + {"IPPROTO_IPV6_ICMP", Const, 1}, + {"IPPROTO_IRTP", Const, 0}, + {"IPPROTO_KRYPTOLAN", Const, 0}, + {"IPPROTO_LARP", Const, 0}, + {"IPPROTO_LEAF1", Const, 0}, + {"IPPROTO_LEAF2", Const, 0}, + {"IPPROTO_MAX", Const, 0}, + {"IPPROTO_MAXID", Const, 0}, + {"IPPROTO_MEAS", Const, 0}, + {"IPPROTO_MH", Const, 1}, + {"IPPROTO_MHRP", Const, 0}, + {"IPPROTO_MICP", Const, 0}, + {"IPPROTO_MOBILE", Const, 0}, + {"IPPROTO_MPLS", Const, 1}, + {"IPPROTO_MTP", Const, 0}, + {"IPPROTO_MUX", Const, 0}, + {"IPPROTO_ND", Const, 0}, + {"IPPROTO_NHRP", Const, 0}, + {"IPPROTO_NONE", Const, 0}, + {"IPPROTO_NSP", Const, 0}, + {"IPPROTO_NVPII", Const, 0}, + {"IPPROTO_OLD_DIVERT", Const, 0}, + {"IPPROTO_OSPFIGP", Const, 0}, + {"IPPROTO_PFSYNC", Const, 0}, + {"IPPROTO_PGM", Const, 0}, + {"IPPROTO_PIGP", Const, 0}, + {"IPPROTO_PIM", Const, 0}, + {"IPPROTO_PRM", Const, 0}, + {"IPPROTO_PUP", Const, 0}, + {"IPPROTO_PVP", Const, 0}, + {"IPPROTO_RAW", Const, 0}, + {"IPPROTO_RCCMON", Const, 0}, + {"IPPROTO_RDP", Const, 0}, + {"IPPROTO_ROUTING", Const, 0}, + {"IPPROTO_RSVP", Const, 0}, + {"IPPROTO_RVD", Const, 0}, + {"IPPROTO_SATEXPAK", Const, 0}, + {"IPPROTO_SATMON", Const, 0}, + {"IPPROTO_SCCSP", Const, 0}, + {"IPPROTO_SCTP", Const, 0}, + {"IPPROTO_SDRP", Const, 0}, + {"IPPROTO_SEND", Const, 1}, + {"IPPROTO_SEP", Const, 0}, + {"IPPROTO_SKIP", Const, 0}, + {"IPPROTO_SPACER", Const, 0}, + {"IPPROTO_SRPC", Const, 0}, + {"IPPROTO_ST", Const, 0}, + {"IPPROTO_SVMTP", Const, 0}, + {"IPPROTO_SWIPE", Const, 0}, + {"IPPROTO_TCF", Const, 0}, + {"IPPROTO_TCP", Const, 0}, + {"IPPROTO_TLSP", Const, 0}, + {"IPPROTO_TP", Const, 0}, + {"IPPROTO_TPXX", Const, 0}, + {"IPPROTO_TRUNK1", Const, 0}, + {"IPPROTO_TRUNK2", Const, 0}, + {"IPPROTO_TTP", Const, 0}, + {"IPPROTO_UDP", Const, 0}, + {"IPPROTO_UDPLITE", Const, 0}, + {"IPPROTO_VINES", Const, 0}, + {"IPPROTO_VISA", Const, 0}, + {"IPPROTO_VMTP", Const, 0}, + {"IPPROTO_VRRP", Const, 1}, + {"IPPROTO_WBEXPAK", Const, 0}, + {"IPPROTO_WBMON", Const, 0}, + {"IPPROTO_WSN", Const, 0}, + {"IPPROTO_XNET", Const, 0}, + {"IPPROTO_XTP", Const, 0}, + {"IPV6_2292DSTOPTS", Const, 0}, + {"IPV6_2292HOPLIMIT", Const, 0}, + {"IPV6_2292HOPOPTS", Const, 0}, + {"IPV6_2292NEXTHOP", Const, 0}, + {"IPV6_2292PKTINFO", Const, 0}, + {"IPV6_2292PKTOPTIONS", Const, 0}, + {"IPV6_2292RTHDR", Const, 0}, + {"IPV6_ADDRFORM", Const, 0}, + {"IPV6_ADD_MEMBERSHIP", Const, 0}, + {"IPV6_AUTHHDR", Const, 0}, + {"IPV6_AUTH_LEVEL", Const, 1}, + {"IPV6_AUTOFLOWLABEL", Const, 0}, + {"IPV6_BINDANY", Const, 0}, + {"IPV6_BINDV6ONLY", Const, 0}, + {"IPV6_BOUND_IF", Const, 0}, + {"IPV6_CHECKSUM", Const, 0}, + {"IPV6_DEFAULT_MULTICAST_HOPS", Const, 0}, + {"IPV6_DEFAULT_MULTICAST_LOOP", Const, 0}, + {"IPV6_DEFHLIM", Const, 0}, + {"IPV6_DONTFRAG", Const, 0}, + {"IPV6_DROP_MEMBERSHIP", Const, 0}, + {"IPV6_DSTOPTS", Const, 0}, + {"IPV6_ESP_NETWORK_LEVEL", Const, 1}, + {"IPV6_ESP_TRANS_LEVEL", Const, 1}, + {"IPV6_FAITH", Const, 0}, + {"IPV6_FLOWINFO_MASK", Const, 0}, + {"IPV6_FLOWLABEL_MASK", Const, 0}, + {"IPV6_FRAGTTL", Const, 0}, + {"IPV6_FW_ADD", Const, 0}, + {"IPV6_FW_DEL", Const, 0}, + {"IPV6_FW_FLUSH", Const, 0}, + {"IPV6_FW_GET", Const, 0}, + {"IPV6_FW_ZERO", Const, 0}, + {"IPV6_HLIMDEC", Const, 0}, + {"IPV6_HOPLIMIT", Const, 0}, + {"IPV6_HOPOPTS", Const, 0}, + {"IPV6_IPCOMP_LEVEL", Const, 1}, + {"IPV6_IPSEC_POLICY", Const, 0}, + {"IPV6_JOIN_ANYCAST", Const, 0}, + {"IPV6_JOIN_GROUP", Const, 0}, + {"IPV6_LEAVE_ANYCAST", Const, 0}, + {"IPV6_LEAVE_GROUP", Const, 0}, + {"IPV6_MAXHLIM", Const, 0}, + {"IPV6_MAXOPTHDR", Const, 0}, + {"IPV6_MAXPACKET", Const, 0}, + {"IPV6_MAX_GROUP_SRC_FILTER", Const, 0}, + {"IPV6_MAX_MEMBERSHIPS", Const, 0}, + {"IPV6_MAX_SOCK_SRC_FILTER", Const, 0}, + {"IPV6_MIN_MEMBERSHIPS", Const, 0}, + {"IPV6_MMTU", Const, 0}, + {"IPV6_MSFILTER", Const, 0}, + {"IPV6_MTU", Const, 0}, + {"IPV6_MTU_DISCOVER", Const, 0}, + {"IPV6_MULTICAST_HOPS", Const, 0}, + {"IPV6_MULTICAST_IF", Const, 0}, + {"IPV6_MULTICAST_LOOP", Const, 0}, + {"IPV6_NEXTHOP", Const, 0}, + {"IPV6_OPTIONS", Const, 1}, + {"IPV6_PATHMTU", Const, 0}, + {"IPV6_PIPEX", Const, 1}, + {"IPV6_PKTINFO", Const, 0}, + {"IPV6_PMTUDISC_DO", Const, 0}, + {"IPV6_PMTUDISC_DONT", Const, 0}, + {"IPV6_PMTUDISC_PROBE", Const, 0}, + {"IPV6_PMTUDISC_WANT", Const, 0}, + {"IPV6_PORTRANGE", Const, 0}, + {"IPV6_PORTRANGE_DEFAULT", Const, 0}, + {"IPV6_PORTRANGE_HIGH", Const, 0}, + {"IPV6_PORTRANGE_LOW", Const, 0}, + {"IPV6_PREFER_TEMPADDR", Const, 0}, + {"IPV6_RECVDSTOPTS", Const, 0}, + {"IPV6_RECVDSTPORT", Const, 3}, + {"IPV6_RECVERR", Const, 0}, + {"IPV6_RECVHOPLIMIT", Const, 0}, + {"IPV6_RECVHOPOPTS", Const, 0}, + {"IPV6_RECVPATHMTU", Const, 0}, + {"IPV6_RECVPKTINFO", Const, 0}, + {"IPV6_RECVRTHDR", Const, 0}, + {"IPV6_RECVTCLASS", Const, 0}, + {"IPV6_ROUTER_ALERT", Const, 0}, + {"IPV6_RTABLE", Const, 1}, + {"IPV6_RTHDR", Const, 0}, + {"IPV6_RTHDRDSTOPTS", Const, 0}, + {"IPV6_RTHDR_LOOSE", Const, 0}, + {"IPV6_RTHDR_STRICT", Const, 0}, + {"IPV6_RTHDR_TYPE_0", Const, 0}, + {"IPV6_RXDSTOPTS", Const, 0}, + {"IPV6_RXHOPOPTS", Const, 0}, + {"IPV6_SOCKOPT_RESERVED1", Const, 0}, + {"IPV6_TCLASS", Const, 0}, + {"IPV6_UNICAST_HOPS", Const, 0}, + {"IPV6_USE_MIN_MTU", Const, 0}, + {"IPV6_V6ONLY", Const, 0}, + {"IPV6_VERSION", Const, 0}, + {"IPV6_VERSION_MASK", Const, 0}, + {"IPV6_XFRM_POLICY", Const, 0}, + {"IP_ADD_MEMBERSHIP", Const, 0}, + {"IP_ADD_SOURCE_MEMBERSHIP", Const, 0}, + {"IP_AUTH_LEVEL", Const, 1}, + {"IP_BINDANY", Const, 0}, + {"IP_BLOCK_SOURCE", Const, 0}, + {"IP_BOUND_IF", Const, 0}, + {"IP_DEFAULT_MULTICAST_LOOP", Const, 0}, + {"IP_DEFAULT_MULTICAST_TTL", Const, 0}, + {"IP_DF", Const, 0}, + {"IP_DIVERTFL", Const, 3}, + {"IP_DONTFRAG", Const, 0}, + {"IP_DROP_MEMBERSHIP", Const, 0}, + {"IP_DROP_SOURCE_MEMBERSHIP", Const, 0}, + {"IP_DUMMYNET3", Const, 0}, + {"IP_DUMMYNET_CONFIGURE", Const, 0}, + {"IP_DUMMYNET_DEL", Const, 0}, + {"IP_DUMMYNET_FLUSH", Const, 0}, + {"IP_DUMMYNET_GET", Const, 0}, + {"IP_EF", Const, 1}, + {"IP_ERRORMTU", Const, 1}, + {"IP_ESP_NETWORK_LEVEL", Const, 1}, + {"IP_ESP_TRANS_LEVEL", Const, 1}, + {"IP_FAITH", Const, 0}, + {"IP_FREEBIND", Const, 0}, + {"IP_FW3", Const, 0}, + {"IP_FW_ADD", Const, 0}, + {"IP_FW_DEL", Const, 0}, + {"IP_FW_FLUSH", Const, 0}, + {"IP_FW_GET", Const, 0}, + {"IP_FW_NAT_CFG", Const, 0}, + {"IP_FW_NAT_DEL", Const, 0}, + {"IP_FW_NAT_GET_CONFIG", Const, 0}, + {"IP_FW_NAT_GET_LOG", Const, 0}, + {"IP_FW_RESETLOG", Const, 0}, + {"IP_FW_TABLE_ADD", Const, 0}, + {"IP_FW_TABLE_DEL", Const, 0}, + {"IP_FW_TABLE_FLUSH", Const, 0}, + {"IP_FW_TABLE_GETSIZE", Const, 0}, + {"IP_FW_TABLE_LIST", Const, 0}, + {"IP_FW_ZERO", Const, 0}, + {"IP_HDRINCL", Const, 0}, + {"IP_IPCOMP_LEVEL", Const, 1}, + {"IP_IPSECFLOWINFO", Const, 1}, + {"IP_IPSEC_LOCAL_AUTH", Const, 1}, + {"IP_IPSEC_LOCAL_CRED", Const, 1}, + {"IP_IPSEC_LOCAL_ID", Const, 1}, + {"IP_IPSEC_POLICY", Const, 0}, + {"IP_IPSEC_REMOTE_AUTH", Const, 1}, + {"IP_IPSEC_REMOTE_CRED", Const, 1}, + {"IP_IPSEC_REMOTE_ID", Const, 1}, + {"IP_MAXPACKET", Const, 0}, + {"IP_MAX_GROUP_SRC_FILTER", Const, 0}, + {"IP_MAX_MEMBERSHIPS", Const, 0}, + {"IP_MAX_SOCK_MUTE_FILTER", Const, 0}, + {"IP_MAX_SOCK_SRC_FILTER", Const, 0}, + {"IP_MAX_SOURCE_FILTER", Const, 0}, + {"IP_MF", Const, 0}, + {"IP_MINFRAGSIZE", Const, 1}, + {"IP_MINTTL", Const, 0}, + {"IP_MIN_MEMBERSHIPS", Const, 0}, + {"IP_MSFILTER", Const, 0}, + {"IP_MSS", Const, 0}, + {"IP_MTU", Const, 0}, + {"IP_MTU_DISCOVER", Const, 0}, + {"IP_MULTICAST_IF", Const, 0}, + {"IP_MULTICAST_IFINDEX", Const, 0}, + {"IP_MULTICAST_LOOP", Const, 0}, + {"IP_MULTICAST_TTL", Const, 0}, + {"IP_MULTICAST_VIF", Const, 0}, + {"IP_NAT__XXX", Const, 0}, + {"IP_OFFMASK", Const, 0}, + {"IP_OLD_FW_ADD", Const, 0}, + {"IP_OLD_FW_DEL", Const, 0}, + {"IP_OLD_FW_FLUSH", Const, 0}, + {"IP_OLD_FW_GET", Const, 0}, + {"IP_OLD_FW_RESETLOG", Const, 0}, + {"IP_OLD_FW_ZERO", Const, 0}, + {"IP_ONESBCAST", Const, 0}, + {"IP_OPTIONS", Const, 0}, + {"IP_ORIGDSTADDR", Const, 0}, + {"IP_PASSSEC", Const, 0}, + {"IP_PIPEX", Const, 1}, + {"IP_PKTINFO", Const, 0}, + {"IP_PKTOPTIONS", Const, 0}, + {"IP_PMTUDISC", Const, 0}, + {"IP_PMTUDISC_DO", Const, 0}, + {"IP_PMTUDISC_DONT", Const, 0}, + {"IP_PMTUDISC_PROBE", Const, 0}, + {"IP_PMTUDISC_WANT", Const, 0}, + {"IP_PORTRANGE", Const, 0}, + {"IP_PORTRANGE_DEFAULT", Const, 0}, + {"IP_PORTRANGE_HIGH", Const, 0}, + {"IP_PORTRANGE_LOW", Const, 0}, + {"IP_RECVDSTADDR", Const, 0}, + {"IP_RECVDSTPORT", Const, 1}, + {"IP_RECVERR", Const, 0}, + {"IP_RECVIF", Const, 0}, + {"IP_RECVOPTS", Const, 0}, + {"IP_RECVORIGDSTADDR", Const, 0}, + {"IP_RECVPKTINFO", Const, 0}, + {"IP_RECVRETOPTS", Const, 0}, + {"IP_RECVRTABLE", Const, 1}, + {"IP_RECVTOS", Const, 0}, + {"IP_RECVTTL", Const, 0}, + {"IP_RETOPTS", Const, 0}, + {"IP_RF", Const, 0}, + {"IP_ROUTER_ALERT", Const, 0}, + {"IP_RSVP_OFF", Const, 0}, + {"IP_RSVP_ON", Const, 0}, + {"IP_RSVP_VIF_OFF", Const, 0}, + {"IP_RSVP_VIF_ON", Const, 0}, + {"IP_RTABLE", Const, 1}, + {"IP_SENDSRCADDR", Const, 0}, + {"IP_STRIPHDR", Const, 0}, + {"IP_TOS", Const, 0}, + {"IP_TRAFFIC_MGT_BACKGROUND", Const, 0}, + {"IP_TRANSPARENT", Const, 0}, + {"IP_TTL", Const, 0}, + {"IP_UNBLOCK_SOURCE", Const, 0}, + {"IP_XFRM_POLICY", Const, 0}, + {"IPv6MTUInfo", Type, 2}, + {"IPv6MTUInfo.Addr", Field, 2}, + {"IPv6MTUInfo.Mtu", Field, 2}, + {"IPv6Mreq", Type, 0}, + {"IPv6Mreq.Interface", Field, 0}, + {"IPv6Mreq.Multiaddr", Field, 0}, + {"ISIG", Const, 0}, + {"ISTRIP", Const, 0}, + {"IUCLC", Const, 0}, + {"IUTF8", Const, 0}, + {"IXANY", Const, 0}, + {"IXOFF", Const, 0}, + {"IXON", Const, 0}, + {"IfAddrmsg", Type, 0}, + {"IfAddrmsg.Family", Field, 0}, + {"IfAddrmsg.Flags", Field, 0}, + {"IfAddrmsg.Index", Field, 0}, + {"IfAddrmsg.Prefixlen", Field, 0}, + {"IfAddrmsg.Scope", Field, 0}, + {"IfAnnounceMsghdr", Type, 1}, + {"IfAnnounceMsghdr.Hdrlen", Field, 2}, + {"IfAnnounceMsghdr.Index", Field, 1}, + {"IfAnnounceMsghdr.Msglen", Field, 1}, + {"IfAnnounceMsghdr.Name", Field, 1}, + {"IfAnnounceMsghdr.Type", Field, 1}, + {"IfAnnounceMsghdr.Version", Field, 1}, + {"IfAnnounceMsghdr.What", Field, 1}, + {"IfData", Type, 0}, + {"IfData.Addrlen", Field, 0}, + {"IfData.Baudrate", Field, 0}, + {"IfData.Capabilities", Field, 2}, + {"IfData.Collisions", Field, 0}, + {"IfData.Datalen", Field, 0}, + {"IfData.Epoch", Field, 0}, + {"IfData.Hdrlen", Field, 0}, + {"IfData.Hwassist", Field, 0}, + {"IfData.Ibytes", Field, 0}, + {"IfData.Ierrors", Field, 0}, + {"IfData.Imcasts", Field, 0}, + {"IfData.Ipackets", Field, 0}, + {"IfData.Iqdrops", Field, 0}, + {"IfData.Lastchange", Field, 0}, + {"IfData.Link_state", Field, 0}, + {"IfData.Mclpool", Field, 2}, + {"IfData.Metric", Field, 0}, + {"IfData.Mtu", Field, 0}, + {"IfData.Noproto", Field, 0}, + {"IfData.Obytes", Field, 0}, + {"IfData.Oerrors", Field, 0}, + {"IfData.Omcasts", Field, 0}, + {"IfData.Opackets", Field, 0}, + {"IfData.Pad", Field, 2}, + {"IfData.Pad_cgo_0", Field, 2}, + {"IfData.Pad_cgo_1", Field, 2}, + {"IfData.Physical", Field, 0}, + {"IfData.Recvquota", Field, 0}, + {"IfData.Recvtiming", Field, 0}, + {"IfData.Reserved1", Field, 0}, + {"IfData.Reserved2", Field, 0}, + {"IfData.Spare_char1", Field, 0}, + {"IfData.Spare_char2", Field, 0}, + {"IfData.Type", Field, 0}, + {"IfData.Typelen", Field, 0}, + {"IfData.Unused1", Field, 0}, + {"IfData.Unused2", Field, 0}, + {"IfData.Xmitquota", Field, 0}, + {"IfData.Xmittiming", Field, 0}, + {"IfInfomsg", Type, 0}, + {"IfInfomsg.Change", Field, 0}, + {"IfInfomsg.Family", Field, 0}, + {"IfInfomsg.Flags", Field, 0}, + {"IfInfomsg.Index", Field, 0}, + {"IfInfomsg.Type", Field, 0}, + {"IfInfomsg.X__ifi_pad", Field, 0}, + {"IfMsghdr", Type, 0}, + {"IfMsghdr.Addrs", Field, 0}, + {"IfMsghdr.Data", Field, 0}, + {"IfMsghdr.Flags", Field, 0}, + {"IfMsghdr.Hdrlen", Field, 2}, + {"IfMsghdr.Index", Field, 0}, + {"IfMsghdr.Msglen", Field, 0}, + {"IfMsghdr.Pad1", Field, 2}, + {"IfMsghdr.Pad2", Field, 2}, + {"IfMsghdr.Pad_cgo_0", Field, 0}, + {"IfMsghdr.Pad_cgo_1", Field, 2}, + {"IfMsghdr.Tableid", Field, 2}, + {"IfMsghdr.Type", Field, 0}, + {"IfMsghdr.Version", Field, 0}, + {"IfMsghdr.Xflags", Field, 2}, + {"IfaMsghdr", Type, 0}, + {"IfaMsghdr.Addrs", Field, 0}, + {"IfaMsghdr.Flags", Field, 0}, + {"IfaMsghdr.Hdrlen", Field, 2}, + {"IfaMsghdr.Index", Field, 0}, + {"IfaMsghdr.Metric", Field, 0}, + {"IfaMsghdr.Msglen", Field, 0}, + {"IfaMsghdr.Pad1", Field, 2}, + {"IfaMsghdr.Pad2", Field, 2}, + {"IfaMsghdr.Pad_cgo_0", Field, 0}, + {"IfaMsghdr.Tableid", Field, 2}, + {"IfaMsghdr.Type", Field, 0}, + {"IfaMsghdr.Version", Field, 0}, + {"IfmaMsghdr", Type, 0}, + {"IfmaMsghdr.Addrs", Field, 0}, + {"IfmaMsghdr.Flags", Field, 0}, + {"IfmaMsghdr.Index", Field, 0}, + {"IfmaMsghdr.Msglen", Field, 0}, + {"IfmaMsghdr.Pad_cgo_0", Field, 0}, + {"IfmaMsghdr.Type", Field, 0}, + {"IfmaMsghdr.Version", Field, 0}, + {"IfmaMsghdr2", Type, 0}, + {"IfmaMsghdr2.Addrs", Field, 0}, + {"IfmaMsghdr2.Flags", Field, 0}, + {"IfmaMsghdr2.Index", Field, 0}, + {"IfmaMsghdr2.Msglen", Field, 0}, + {"IfmaMsghdr2.Pad_cgo_0", Field, 0}, + {"IfmaMsghdr2.Refcount", Field, 0}, + {"IfmaMsghdr2.Type", Field, 0}, + {"IfmaMsghdr2.Version", Field, 0}, + {"ImplementsGetwd", Const, 0}, + {"Inet4Pktinfo", Type, 0}, + {"Inet4Pktinfo.Addr", Field, 0}, + {"Inet4Pktinfo.Ifindex", Field, 0}, + {"Inet4Pktinfo.Spec_dst", Field, 0}, + {"Inet6Pktinfo", Type, 0}, + {"Inet6Pktinfo.Addr", Field, 0}, + {"Inet6Pktinfo.Ifindex", Field, 0}, + {"InotifyAddWatch", Func, 0}, + {"InotifyEvent", Type, 0}, + {"InotifyEvent.Cookie", Field, 0}, + {"InotifyEvent.Len", Field, 0}, + {"InotifyEvent.Mask", Field, 0}, + {"InotifyEvent.Name", Field, 0}, + {"InotifyEvent.Wd", Field, 0}, + {"InotifyInit", Func, 0}, + {"InotifyInit1", Func, 0}, + {"InotifyRmWatch", Func, 0}, + {"InterfaceAddrMessage", Type, 0}, + {"InterfaceAddrMessage.Data", Field, 0}, + {"InterfaceAddrMessage.Header", Field, 0}, + {"InterfaceAnnounceMessage", Type, 1}, + {"InterfaceAnnounceMessage.Header", Field, 1}, + {"InterfaceInfo", Type, 0}, + {"InterfaceInfo.Address", Field, 0}, + {"InterfaceInfo.BroadcastAddress", Field, 0}, + {"InterfaceInfo.Flags", Field, 0}, + {"InterfaceInfo.Netmask", Field, 0}, + {"InterfaceMessage", Type, 0}, + {"InterfaceMessage.Data", Field, 0}, + {"InterfaceMessage.Header", Field, 0}, + {"InterfaceMulticastAddrMessage", Type, 0}, + {"InterfaceMulticastAddrMessage.Data", Field, 0}, + {"InterfaceMulticastAddrMessage.Header", Field, 0}, + {"InvalidHandle", Const, 0}, + {"Ioperm", Func, 0}, + {"Iopl", Func, 0}, + {"Iovec", Type, 0}, + {"Iovec.Base", Field, 0}, + {"Iovec.Len", Field, 0}, + {"IpAdapterInfo", Type, 0}, + {"IpAdapterInfo.AdapterName", Field, 0}, + {"IpAdapterInfo.Address", Field, 0}, + {"IpAdapterInfo.AddressLength", Field, 0}, + {"IpAdapterInfo.ComboIndex", Field, 0}, + {"IpAdapterInfo.CurrentIpAddress", Field, 0}, + {"IpAdapterInfo.Description", Field, 0}, + {"IpAdapterInfo.DhcpEnabled", Field, 0}, + {"IpAdapterInfo.DhcpServer", Field, 0}, + {"IpAdapterInfo.GatewayList", Field, 0}, + {"IpAdapterInfo.HaveWins", Field, 0}, + {"IpAdapterInfo.Index", Field, 0}, + {"IpAdapterInfo.IpAddressList", Field, 0}, + {"IpAdapterInfo.LeaseExpires", Field, 0}, + {"IpAdapterInfo.LeaseObtained", Field, 0}, + {"IpAdapterInfo.Next", Field, 0}, + {"IpAdapterInfo.PrimaryWinsServer", Field, 0}, + {"IpAdapterInfo.SecondaryWinsServer", Field, 0}, + {"IpAdapterInfo.Type", Field, 0}, + {"IpAddrString", Type, 0}, + {"IpAddrString.Context", Field, 0}, + {"IpAddrString.IpAddress", Field, 0}, + {"IpAddrString.IpMask", Field, 0}, + {"IpAddrString.Next", Field, 0}, + {"IpAddressString", Type, 0}, + {"IpAddressString.String", Field, 0}, + {"IpMaskString", Type, 0}, + {"IpMaskString.String", Field, 2}, + {"Issetugid", Func, 0}, + {"KEY_ALL_ACCESS", Const, 0}, + {"KEY_CREATE_LINK", Const, 0}, + {"KEY_CREATE_SUB_KEY", Const, 0}, + {"KEY_ENUMERATE_SUB_KEYS", Const, 0}, + {"KEY_EXECUTE", Const, 0}, + {"KEY_NOTIFY", Const, 0}, + {"KEY_QUERY_VALUE", Const, 0}, + {"KEY_READ", Const, 0}, + {"KEY_SET_VALUE", Const, 0}, + {"KEY_WOW64_32KEY", Const, 0}, + {"KEY_WOW64_64KEY", Const, 0}, + {"KEY_WRITE", Const, 0}, + {"Kevent", Func, 0}, + {"Kevent_t", Type, 0}, + {"Kevent_t.Data", Field, 0}, + {"Kevent_t.Fflags", Field, 0}, + {"Kevent_t.Filter", Field, 0}, + {"Kevent_t.Flags", Field, 0}, + {"Kevent_t.Ident", Field, 0}, + {"Kevent_t.Pad_cgo_0", Field, 2}, + {"Kevent_t.Udata", Field, 0}, + {"Kill", Func, 0}, + {"Klogctl", Func, 0}, + {"Kqueue", Func, 0}, + {"LANG_ENGLISH", Const, 0}, + {"LAYERED_PROTOCOL", Const, 2}, + {"LCNT_OVERLOAD_FLUSH", Const, 1}, + {"LINUX_REBOOT_CMD_CAD_OFF", Const, 0}, + {"LINUX_REBOOT_CMD_CAD_ON", Const, 0}, + {"LINUX_REBOOT_CMD_HALT", Const, 0}, + {"LINUX_REBOOT_CMD_KEXEC", Const, 0}, + {"LINUX_REBOOT_CMD_POWER_OFF", Const, 0}, + {"LINUX_REBOOT_CMD_RESTART", Const, 0}, + {"LINUX_REBOOT_CMD_RESTART2", Const, 0}, + {"LINUX_REBOOT_CMD_SW_SUSPEND", Const, 0}, + {"LINUX_REBOOT_MAGIC1", Const, 0}, + {"LINUX_REBOOT_MAGIC2", Const, 0}, + {"LOCK_EX", Const, 0}, + {"LOCK_NB", Const, 0}, + {"LOCK_SH", Const, 0}, + {"LOCK_UN", Const, 0}, + {"LazyDLL", Type, 0}, + {"LazyDLL.Name", Field, 0}, + {"LazyProc", Type, 0}, + {"LazyProc.Name", Field, 0}, + {"Lchown", Func, 0}, + {"Linger", Type, 0}, + {"Linger.Linger", Field, 0}, + {"Linger.Onoff", Field, 0}, + {"Link", Func, 0}, + {"Listen", Func, 0}, + {"Listxattr", Func, 1}, + {"LoadCancelIoEx", Func, 1}, + {"LoadConnectEx", Func, 1}, + {"LoadCreateSymbolicLink", Func, 4}, + {"LoadDLL", Func, 0}, + {"LoadGetAddrInfo", Func, 1}, + {"LoadLibrary", Func, 0}, + {"LoadSetFileCompletionNotificationModes", Func, 2}, + {"LocalFree", Func, 0}, + {"Log2phys_t", Type, 0}, + {"Log2phys_t.Contigbytes", Field, 0}, + {"Log2phys_t.Devoffset", Field, 0}, + {"Log2phys_t.Flags", Field, 0}, + {"LookupAccountName", Func, 0}, + {"LookupAccountSid", Func, 0}, + {"LookupSID", Func, 0}, + {"LsfJump", Func, 0}, + {"LsfSocket", Func, 0}, + {"LsfStmt", Func, 0}, + {"Lstat", Func, 0}, + {"MADV_AUTOSYNC", Const, 1}, + {"MADV_CAN_REUSE", Const, 0}, + {"MADV_CORE", Const, 1}, + {"MADV_DOFORK", Const, 0}, + {"MADV_DONTFORK", Const, 0}, + {"MADV_DONTNEED", Const, 0}, + {"MADV_FREE", Const, 0}, + {"MADV_FREE_REUSABLE", Const, 0}, + {"MADV_FREE_REUSE", Const, 0}, + {"MADV_HUGEPAGE", Const, 0}, + {"MADV_HWPOISON", Const, 0}, + {"MADV_MERGEABLE", Const, 0}, + {"MADV_NOCORE", Const, 1}, + {"MADV_NOHUGEPAGE", Const, 0}, + {"MADV_NORMAL", Const, 0}, + {"MADV_NOSYNC", Const, 1}, + {"MADV_PROTECT", Const, 1}, + {"MADV_RANDOM", Const, 0}, + {"MADV_REMOVE", Const, 0}, + {"MADV_SEQUENTIAL", Const, 0}, + {"MADV_SPACEAVAIL", Const, 3}, + {"MADV_UNMERGEABLE", Const, 0}, + {"MADV_WILLNEED", Const, 0}, + {"MADV_ZERO_WIRED_PAGES", Const, 0}, + {"MAP_32BIT", Const, 0}, + {"MAP_ALIGNED_SUPER", Const, 3}, + {"MAP_ALIGNMENT_16MB", Const, 3}, + {"MAP_ALIGNMENT_1TB", Const, 3}, + {"MAP_ALIGNMENT_256TB", Const, 3}, + {"MAP_ALIGNMENT_4GB", Const, 3}, + {"MAP_ALIGNMENT_64KB", Const, 3}, + {"MAP_ALIGNMENT_64PB", Const, 3}, + {"MAP_ALIGNMENT_MASK", Const, 3}, + {"MAP_ALIGNMENT_SHIFT", Const, 3}, + {"MAP_ANON", Const, 0}, + {"MAP_ANONYMOUS", Const, 0}, + {"MAP_COPY", Const, 0}, + {"MAP_DENYWRITE", Const, 0}, + {"MAP_EXECUTABLE", Const, 0}, + {"MAP_FILE", Const, 0}, + {"MAP_FIXED", Const, 0}, + {"MAP_FLAGMASK", Const, 3}, + {"MAP_GROWSDOWN", Const, 0}, + {"MAP_HASSEMAPHORE", Const, 0}, + {"MAP_HUGETLB", Const, 0}, + {"MAP_INHERIT", Const, 3}, + {"MAP_INHERIT_COPY", Const, 3}, + {"MAP_INHERIT_DEFAULT", Const, 3}, + {"MAP_INHERIT_DONATE_COPY", Const, 3}, + {"MAP_INHERIT_NONE", Const, 3}, + {"MAP_INHERIT_SHARE", Const, 3}, + {"MAP_JIT", Const, 0}, + {"MAP_LOCKED", Const, 0}, + {"MAP_NOCACHE", Const, 0}, + {"MAP_NOCORE", Const, 1}, + {"MAP_NOEXTEND", Const, 0}, + {"MAP_NONBLOCK", Const, 0}, + {"MAP_NORESERVE", Const, 0}, + {"MAP_NOSYNC", Const, 1}, + {"MAP_POPULATE", Const, 0}, + {"MAP_PREFAULT_READ", Const, 1}, + {"MAP_PRIVATE", Const, 0}, + {"MAP_RENAME", Const, 0}, + {"MAP_RESERVED0080", Const, 0}, + {"MAP_RESERVED0100", Const, 1}, + {"MAP_SHARED", Const, 0}, + {"MAP_STACK", Const, 0}, + {"MAP_TRYFIXED", Const, 3}, + {"MAP_TYPE", Const, 0}, + {"MAP_WIRED", Const, 3}, + {"MAXIMUM_REPARSE_DATA_BUFFER_SIZE", Const, 4}, + {"MAXLEN_IFDESCR", Const, 0}, + {"MAXLEN_PHYSADDR", Const, 0}, + {"MAX_ADAPTER_ADDRESS_LENGTH", Const, 0}, + {"MAX_ADAPTER_DESCRIPTION_LENGTH", Const, 0}, + {"MAX_ADAPTER_NAME_LENGTH", Const, 0}, + {"MAX_COMPUTERNAME_LENGTH", Const, 0}, + {"MAX_INTERFACE_NAME_LEN", Const, 0}, + {"MAX_LONG_PATH", Const, 0}, + {"MAX_PATH", Const, 0}, + {"MAX_PROTOCOL_CHAIN", Const, 2}, + {"MCL_CURRENT", Const, 0}, + {"MCL_FUTURE", Const, 0}, + {"MNT_DETACH", Const, 0}, + {"MNT_EXPIRE", Const, 0}, + {"MNT_FORCE", Const, 0}, + {"MSG_BCAST", Const, 1}, + {"MSG_CMSG_CLOEXEC", Const, 0}, + {"MSG_COMPAT", Const, 0}, + {"MSG_CONFIRM", Const, 0}, + {"MSG_CONTROLMBUF", Const, 1}, + {"MSG_CTRUNC", Const, 0}, + {"MSG_DONTROUTE", Const, 0}, + {"MSG_DONTWAIT", Const, 0}, + {"MSG_EOF", Const, 0}, + {"MSG_EOR", Const, 0}, + {"MSG_ERRQUEUE", Const, 0}, + {"MSG_FASTOPEN", Const, 1}, + {"MSG_FIN", Const, 0}, + {"MSG_FLUSH", Const, 0}, + {"MSG_HAVEMORE", Const, 0}, + {"MSG_HOLD", Const, 0}, + {"MSG_IOVUSRSPACE", Const, 1}, + {"MSG_LENUSRSPACE", Const, 1}, + {"MSG_MCAST", Const, 1}, + {"MSG_MORE", Const, 0}, + {"MSG_NAMEMBUF", Const, 1}, + {"MSG_NBIO", Const, 0}, + {"MSG_NEEDSA", Const, 0}, + {"MSG_NOSIGNAL", Const, 0}, + {"MSG_NOTIFICATION", Const, 0}, + {"MSG_OOB", Const, 0}, + {"MSG_PEEK", Const, 0}, + {"MSG_PROXY", Const, 0}, + {"MSG_RCVMORE", Const, 0}, + {"MSG_RST", Const, 0}, + {"MSG_SEND", Const, 0}, + {"MSG_SYN", Const, 0}, + {"MSG_TRUNC", Const, 0}, + {"MSG_TRYHARD", Const, 0}, + {"MSG_USERFLAGS", Const, 1}, + {"MSG_WAITALL", Const, 0}, + {"MSG_WAITFORONE", Const, 0}, + {"MSG_WAITSTREAM", Const, 0}, + {"MS_ACTIVE", Const, 0}, + {"MS_ASYNC", Const, 0}, + {"MS_BIND", Const, 0}, + {"MS_DEACTIVATE", Const, 0}, + {"MS_DIRSYNC", Const, 0}, + {"MS_INVALIDATE", Const, 0}, + {"MS_I_VERSION", Const, 0}, + {"MS_KERNMOUNT", Const, 0}, + {"MS_KILLPAGES", Const, 0}, + {"MS_MANDLOCK", Const, 0}, + {"MS_MGC_MSK", Const, 0}, + {"MS_MGC_VAL", Const, 0}, + {"MS_MOVE", Const, 0}, + {"MS_NOATIME", Const, 0}, + {"MS_NODEV", Const, 0}, + {"MS_NODIRATIME", Const, 0}, + {"MS_NOEXEC", Const, 0}, + {"MS_NOSUID", Const, 0}, + {"MS_NOUSER", Const, 0}, + {"MS_POSIXACL", Const, 0}, + {"MS_PRIVATE", Const, 0}, + {"MS_RDONLY", Const, 0}, + {"MS_REC", Const, 0}, + {"MS_RELATIME", Const, 0}, + {"MS_REMOUNT", Const, 0}, + {"MS_RMT_MASK", Const, 0}, + {"MS_SHARED", Const, 0}, + {"MS_SILENT", Const, 0}, + {"MS_SLAVE", Const, 0}, + {"MS_STRICTATIME", Const, 0}, + {"MS_SYNC", Const, 0}, + {"MS_SYNCHRONOUS", Const, 0}, + {"MS_UNBINDABLE", Const, 0}, + {"Madvise", Func, 0}, + {"MapViewOfFile", Func, 0}, + {"MaxTokenInfoClass", Const, 0}, + {"Mclpool", Type, 2}, + {"Mclpool.Alive", Field, 2}, + {"Mclpool.Cwm", Field, 2}, + {"Mclpool.Grown", Field, 2}, + {"Mclpool.Hwm", Field, 2}, + {"Mclpool.Lwm", Field, 2}, + {"MibIfRow", Type, 0}, + {"MibIfRow.AdminStatus", Field, 0}, + {"MibIfRow.Descr", Field, 0}, + {"MibIfRow.DescrLen", Field, 0}, + {"MibIfRow.InDiscards", Field, 0}, + {"MibIfRow.InErrors", Field, 0}, + {"MibIfRow.InNUcastPkts", Field, 0}, + {"MibIfRow.InOctets", Field, 0}, + {"MibIfRow.InUcastPkts", Field, 0}, + {"MibIfRow.InUnknownProtos", Field, 0}, + {"MibIfRow.Index", Field, 0}, + {"MibIfRow.LastChange", Field, 0}, + {"MibIfRow.Mtu", Field, 0}, + {"MibIfRow.Name", Field, 0}, + {"MibIfRow.OperStatus", Field, 0}, + {"MibIfRow.OutDiscards", Field, 0}, + {"MibIfRow.OutErrors", Field, 0}, + {"MibIfRow.OutNUcastPkts", Field, 0}, + {"MibIfRow.OutOctets", Field, 0}, + {"MibIfRow.OutQLen", Field, 0}, + {"MibIfRow.OutUcastPkts", Field, 0}, + {"MibIfRow.PhysAddr", Field, 0}, + {"MibIfRow.PhysAddrLen", Field, 0}, + {"MibIfRow.Speed", Field, 0}, + {"MibIfRow.Type", Field, 0}, + {"Mkdir", Func, 0}, + {"Mkdirat", Func, 0}, + {"Mkfifo", Func, 0}, + {"Mknod", Func, 0}, + {"Mknodat", Func, 0}, + {"Mlock", Func, 0}, + {"Mlockall", Func, 0}, + {"Mmap", Func, 0}, + {"Mount", Func, 0}, + {"MoveFile", Func, 0}, + {"Mprotect", Func, 0}, + {"Msghdr", Type, 0}, + {"Msghdr.Control", Field, 0}, + {"Msghdr.Controllen", Field, 0}, + {"Msghdr.Flags", Field, 0}, + {"Msghdr.Iov", Field, 0}, + {"Msghdr.Iovlen", Field, 0}, + {"Msghdr.Name", Field, 0}, + {"Msghdr.Namelen", Field, 0}, + {"Msghdr.Pad_cgo_0", Field, 0}, + {"Msghdr.Pad_cgo_1", Field, 0}, + {"Munlock", Func, 0}, + {"Munlockall", Func, 0}, + {"Munmap", Func, 0}, + {"MustLoadDLL", Func, 0}, + {"NAME_MAX", Const, 0}, + {"NETLINK_ADD_MEMBERSHIP", Const, 0}, + {"NETLINK_AUDIT", Const, 0}, + {"NETLINK_BROADCAST_ERROR", Const, 0}, + {"NETLINK_CONNECTOR", Const, 0}, + {"NETLINK_DNRTMSG", Const, 0}, + {"NETLINK_DROP_MEMBERSHIP", Const, 0}, + {"NETLINK_ECRYPTFS", Const, 0}, + {"NETLINK_FIB_LOOKUP", Const, 0}, + {"NETLINK_FIREWALL", Const, 0}, + {"NETLINK_GENERIC", Const, 0}, + {"NETLINK_INET_DIAG", Const, 0}, + {"NETLINK_IP6_FW", Const, 0}, + {"NETLINK_ISCSI", Const, 0}, + {"NETLINK_KOBJECT_UEVENT", Const, 0}, + {"NETLINK_NETFILTER", Const, 0}, + {"NETLINK_NFLOG", Const, 0}, + {"NETLINK_NO_ENOBUFS", Const, 0}, + {"NETLINK_PKTINFO", Const, 0}, + {"NETLINK_RDMA", Const, 0}, + {"NETLINK_ROUTE", Const, 0}, + {"NETLINK_SCSITRANSPORT", Const, 0}, + {"NETLINK_SELINUX", Const, 0}, + {"NETLINK_UNUSED", Const, 0}, + {"NETLINK_USERSOCK", Const, 0}, + {"NETLINK_XFRM", Const, 0}, + {"NET_RT_DUMP", Const, 0}, + {"NET_RT_DUMP2", Const, 0}, + {"NET_RT_FLAGS", Const, 0}, + {"NET_RT_IFLIST", Const, 0}, + {"NET_RT_IFLIST2", Const, 0}, + {"NET_RT_IFLISTL", Const, 1}, + {"NET_RT_IFMALIST", Const, 0}, + {"NET_RT_MAXID", Const, 0}, + {"NET_RT_OIFLIST", Const, 1}, + {"NET_RT_OOIFLIST", Const, 1}, + {"NET_RT_STAT", Const, 0}, + {"NET_RT_STATS", Const, 1}, + {"NET_RT_TABLE", Const, 1}, + {"NET_RT_TRASH", Const, 0}, + {"NLA_ALIGNTO", Const, 0}, + {"NLA_F_NESTED", Const, 0}, + {"NLA_F_NET_BYTEORDER", Const, 0}, + {"NLA_HDRLEN", Const, 0}, + {"NLMSG_ALIGNTO", Const, 0}, + {"NLMSG_DONE", Const, 0}, + {"NLMSG_ERROR", Const, 0}, + {"NLMSG_HDRLEN", Const, 0}, + {"NLMSG_MIN_TYPE", Const, 0}, + {"NLMSG_NOOP", Const, 0}, + {"NLMSG_OVERRUN", Const, 0}, + {"NLM_F_ACK", Const, 0}, + {"NLM_F_APPEND", Const, 0}, + {"NLM_F_ATOMIC", Const, 0}, + {"NLM_F_CREATE", Const, 0}, + {"NLM_F_DUMP", Const, 0}, + {"NLM_F_ECHO", Const, 0}, + {"NLM_F_EXCL", Const, 0}, + {"NLM_F_MATCH", Const, 0}, + {"NLM_F_MULTI", Const, 0}, + {"NLM_F_REPLACE", Const, 0}, + {"NLM_F_REQUEST", Const, 0}, + {"NLM_F_ROOT", Const, 0}, + {"NOFLSH", Const, 0}, + {"NOTE_ABSOLUTE", Const, 0}, + {"NOTE_ATTRIB", Const, 0}, + {"NOTE_BACKGROUND", Const, 16}, + {"NOTE_CHILD", Const, 0}, + {"NOTE_CRITICAL", Const, 16}, + {"NOTE_DELETE", Const, 0}, + {"NOTE_EOF", Const, 1}, + {"NOTE_EXEC", Const, 0}, + {"NOTE_EXIT", Const, 0}, + {"NOTE_EXITSTATUS", Const, 0}, + {"NOTE_EXIT_CSERROR", Const, 16}, + {"NOTE_EXIT_DECRYPTFAIL", Const, 16}, + {"NOTE_EXIT_DETAIL", Const, 16}, + {"NOTE_EXIT_DETAIL_MASK", Const, 16}, + {"NOTE_EXIT_MEMORY", Const, 16}, + {"NOTE_EXIT_REPARENTED", Const, 16}, + {"NOTE_EXTEND", Const, 0}, + {"NOTE_FFAND", Const, 0}, + {"NOTE_FFCOPY", Const, 0}, + {"NOTE_FFCTRLMASK", Const, 0}, + {"NOTE_FFLAGSMASK", Const, 0}, + {"NOTE_FFNOP", Const, 0}, + {"NOTE_FFOR", Const, 0}, + {"NOTE_FORK", Const, 0}, + {"NOTE_LEEWAY", Const, 16}, + {"NOTE_LINK", Const, 0}, + {"NOTE_LOWAT", Const, 0}, + {"NOTE_NONE", Const, 0}, + {"NOTE_NSECONDS", Const, 0}, + {"NOTE_PCTRLMASK", Const, 0}, + {"NOTE_PDATAMASK", Const, 0}, + {"NOTE_REAP", Const, 0}, + {"NOTE_RENAME", Const, 0}, + {"NOTE_RESOURCEEND", Const, 0}, + {"NOTE_REVOKE", Const, 0}, + {"NOTE_SECONDS", Const, 0}, + {"NOTE_SIGNAL", Const, 0}, + {"NOTE_TRACK", Const, 0}, + {"NOTE_TRACKERR", Const, 0}, + {"NOTE_TRIGGER", Const, 0}, + {"NOTE_TRUNCATE", Const, 1}, + {"NOTE_USECONDS", Const, 0}, + {"NOTE_VM_ERROR", Const, 0}, + {"NOTE_VM_PRESSURE", Const, 0}, + {"NOTE_VM_PRESSURE_SUDDEN_TERMINATE", Const, 0}, + {"NOTE_VM_PRESSURE_TERMINATE", Const, 0}, + {"NOTE_WRITE", Const, 0}, + {"NameCanonical", Const, 0}, + {"NameCanonicalEx", Const, 0}, + {"NameDisplay", Const, 0}, + {"NameDnsDomain", Const, 0}, + {"NameFullyQualifiedDN", Const, 0}, + {"NameSamCompatible", Const, 0}, + {"NameServicePrincipal", Const, 0}, + {"NameUniqueId", Const, 0}, + {"NameUnknown", Const, 0}, + {"NameUserPrincipal", Const, 0}, + {"Nanosleep", Func, 0}, + {"NetApiBufferFree", Func, 0}, + {"NetGetJoinInformation", Func, 2}, + {"NetSetupDomainName", Const, 2}, + {"NetSetupUnjoined", Const, 2}, + {"NetSetupUnknownStatus", Const, 2}, + {"NetSetupWorkgroupName", Const, 2}, + {"NetUserGetInfo", Func, 0}, + {"NetlinkMessage", Type, 0}, + {"NetlinkMessage.Data", Field, 0}, + {"NetlinkMessage.Header", Field, 0}, + {"NetlinkRIB", Func, 0}, + {"NetlinkRouteAttr", Type, 0}, + {"NetlinkRouteAttr.Attr", Field, 0}, + {"NetlinkRouteAttr.Value", Field, 0}, + {"NetlinkRouteRequest", Type, 0}, + {"NetlinkRouteRequest.Data", Field, 0}, + {"NetlinkRouteRequest.Header", Field, 0}, + {"NewCallback", Func, 0}, + {"NewCallbackCDecl", Func, 3}, + {"NewLazyDLL", Func, 0}, + {"NlAttr", Type, 0}, + {"NlAttr.Len", Field, 0}, + {"NlAttr.Type", Field, 0}, + {"NlMsgerr", Type, 0}, + {"NlMsgerr.Error", Field, 0}, + {"NlMsgerr.Msg", Field, 0}, + {"NlMsghdr", Type, 0}, + {"NlMsghdr.Flags", Field, 0}, + {"NlMsghdr.Len", Field, 0}, + {"NlMsghdr.Pid", Field, 0}, + {"NlMsghdr.Seq", Field, 0}, + {"NlMsghdr.Type", Field, 0}, + {"NsecToFiletime", Func, 0}, + {"NsecToTimespec", Func, 0}, + {"NsecToTimeval", Func, 0}, + {"Ntohs", Func, 0}, + {"OCRNL", Const, 0}, + {"OFDEL", Const, 0}, + {"OFILL", Const, 0}, + {"OFIOGETBMAP", Const, 1}, + {"OID_PKIX_KP_SERVER_AUTH", Var, 0}, + {"OID_SERVER_GATED_CRYPTO", Var, 0}, + {"OID_SGC_NETSCAPE", Var, 0}, + {"OLCUC", Const, 0}, + {"ONLCR", Const, 0}, + {"ONLRET", Const, 0}, + {"ONOCR", Const, 0}, + {"ONOEOT", Const, 1}, + {"OPEN_ALWAYS", Const, 0}, + {"OPEN_EXISTING", Const, 0}, + {"OPOST", Const, 0}, + {"O_ACCMODE", Const, 0}, + {"O_ALERT", Const, 0}, + {"O_ALT_IO", Const, 1}, + {"O_APPEND", Const, 0}, + {"O_ASYNC", Const, 0}, + {"O_CLOEXEC", Const, 0}, + {"O_CREAT", Const, 0}, + {"O_DIRECT", Const, 0}, + {"O_DIRECTORY", Const, 0}, + {"O_DP_GETRAWENCRYPTED", Const, 16}, + {"O_DSYNC", Const, 0}, + {"O_EVTONLY", Const, 0}, + {"O_EXCL", Const, 0}, + {"O_EXEC", Const, 0}, + {"O_EXLOCK", Const, 0}, + {"O_FSYNC", Const, 0}, + {"O_LARGEFILE", Const, 0}, + {"O_NDELAY", Const, 0}, + {"O_NOATIME", Const, 0}, + {"O_NOCTTY", Const, 0}, + {"O_NOFOLLOW", Const, 0}, + {"O_NONBLOCK", Const, 0}, + {"O_NOSIGPIPE", Const, 1}, + {"O_POPUP", Const, 0}, + {"O_RDONLY", Const, 0}, + {"O_RDWR", Const, 0}, + {"O_RSYNC", Const, 0}, + {"O_SHLOCK", Const, 0}, + {"O_SYMLINK", Const, 0}, + {"O_SYNC", Const, 0}, + {"O_TRUNC", Const, 0}, + {"O_TTY_INIT", Const, 0}, + {"O_WRONLY", Const, 0}, + {"Open", Func, 0}, + {"OpenCurrentProcessToken", Func, 0}, + {"OpenProcess", Func, 0}, + {"OpenProcessToken", Func, 0}, + {"Openat", Func, 0}, + {"Overlapped", Type, 0}, + {"Overlapped.HEvent", Field, 0}, + {"Overlapped.Internal", Field, 0}, + {"Overlapped.InternalHigh", Field, 0}, + {"Overlapped.Offset", Field, 0}, + {"Overlapped.OffsetHigh", Field, 0}, + {"PACKET_ADD_MEMBERSHIP", Const, 0}, + {"PACKET_BROADCAST", Const, 0}, + {"PACKET_DROP_MEMBERSHIP", Const, 0}, + {"PACKET_FASTROUTE", Const, 0}, + {"PACKET_HOST", Const, 0}, + {"PACKET_LOOPBACK", Const, 0}, + {"PACKET_MR_ALLMULTI", Const, 0}, + {"PACKET_MR_MULTICAST", Const, 0}, + {"PACKET_MR_PROMISC", Const, 0}, + {"PACKET_MULTICAST", Const, 0}, + {"PACKET_OTHERHOST", Const, 0}, + {"PACKET_OUTGOING", Const, 0}, + {"PACKET_RECV_OUTPUT", Const, 0}, + {"PACKET_RX_RING", Const, 0}, + {"PACKET_STATISTICS", Const, 0}, + {"PAGE_EXECUTE_READ", Const, 0}, + {"PAGE_EXECUTE_READWRITE", Const, 0}, + {"PAGE_EXECUTE_WRITECOPY", Const, 0}, + {"PAGE_READONLY", Const, 0}, + {"PAGE_READWRITE", Const, 0}, + {"PAGE_WRITECOPY", Const, 0}, + {"PARENB", Const, 0}, + {"PARMRK", Const, 0}, + {"PARODD", Const, 0}, + {"PENDIN", Const, 0}, + {"PFL_HIDDEN", Const, 2}, + {"PFL_MATCHES_PROTOCOL_ZERO", Const, 2}, + {"PFL_MULTIPLE_PROTO_ENTRIES", Const, 2}, + {"PFL_NETWORKDIRECT_PROVIDER", Const, 2}, + {"PFL_RECOMMENDED_PROTO_ENTRY", Const, 2}, + {"PF_FLUSH", Const, 1}, + {"PKCS_7_ASN_ENCODING", Const, 0}, + {"PMC5_PIPELINE_FLUSH", Const, 1}, + {"PRIO_PGRP", Const, 2}, + {"PRIO_PROCESS", Const, 2}, + {"PRIO_USER", Const, 2}, + {"PRI_IOFLUSH", Const, 1}, + {"PROCESS_QUERY_INFORMATION", Const, 0}, + {"PROCESS_TERMINATE", Const, 2}, + {"PROT_EXEC", Const, 0}, + {"PROT_GROWSDOWN", Const, 0}, + {"PROT_GROWSUP", Const, 0}, + {"PROT_NONE", Const, 0}, + {"PROT_READ", Const, 0}, + {"PROT_WRITE", Const, 0}, + {"PROV_DH_SCHANNEL", Const, 0}, + {"PROV_DSS", Const, 0}, + {"PROV_DSS_DH", Const, 0}, + {"PROV_EC_ECDSA_FULL", Const, 0}, + {"PROV_EC_ECDSA_SIG", Const, 0}, + {"PROV_EC_ECNRA_FULL", Const, 0}, + {"PROV_EC_ECNRA_SIG", Const, 0}, + {"PROV_FORTEZZA", Const, 0}, + {"PROV_INTEL_SEC", Const, 0}, + {"PROV_MS_EXCHANGE", Const, 0}, + {"PROV_REPLACE_OWF", Const, 0}, + {"PROV_RNG", Const, 0}, + {"PROV_RSA_AES", Const, 0}, + {"PROV_RSA_FULL", Const, 0}, + {"PROV_RSA_SCHANNEL", Const, 0}, + {"PROV_RSA_SIG", Const, 0}, + {"PROV_SPYRUS_LYNKS", Const, 0}, + {"PROV_SSL", Const, 0}, + {"PR_CAPBSET_DROP", Const, 0}, + {"PR_CAPBSET_READ", Const, 0}, + {"PR_CLEAR_SECCOMP_FILTER", Const, 0}, + {"PR_ENDIAN_BIG", Const, 0}, + {"PR_ENDIAN_LITTLE", Const, 0}, + {"PR_ENDIAN_PPC_LITTLE", Const, 0}, + {"PR_FPEMU_NOPRINT", Const, 0}, + {"PR_FPEMU_SIGFPE", Const, 0}, + {"PR_FP_EXC_ASYNC", Const, 0}, + {"PR_FP_EXC_DISABLED", Const, 0}, + {"PR_FP_EXC_DIV", Const, 0}, + {"PR_FP_EXC_INV", Const, 0}, + {"PR_FP_EXC_NONRECOV", Const, 0}, + {"PR_FP_EXC_OVF", Const, 0}, + {"PR_FP_EXC_PRECISE", Const, 0}, + {"PR_FP_EXC_RES", Const, 0}, + {"PR_FP_EXC_SW_ENABLE", Const, 0}, + {"PR_FP_EXC_UND", Const, 0}, + {"PR_GET_DUMPABLE", Const, 0}, + {"PR_GET_ENDIAN", Const, 0}, + {"PR_GET_FPEMU", Const, 0}, + {"PR_GET_FPEXC", Const, 0}, + {"PR_GET_KEEPCAPS", Const, 0}, + {"PR_GET_NAME", Const, 0}, + {"PR_GET_PDEATHSIG", Const, 0}, + {"PR_GET_SECCOMP", Const, 0}, + {"PR_GET_SECCOMP_FILTER", Const, 0}, + {"PR_GET_SECUREBITS", Const, 0}, + {"PR_GET_TIMERSLACK", Const, 0}, + {"PR_GET_TIMING", Const, 0}, + {"PR_GET_TSC", Const, 0}, + {"PR_GET_UNALIGN", Const, 0}, + {"PR_MCE_KILL", Const, 0}, + {"PR_MCE_KILL_CLEAR", Const, 0}, + {"PR_MCE_KILL_DEFAULT", Const, 0}, + {"PR_MCE_KILL_EARLY", Const, 0}, + {"PR_MCE_KILL_GET", Const, 0}, + {"PR_MCE_KILL_LATE", Const, 0}, + {"PR_MCE_KILL_SET", Const, 0}, + {"PR_SECCOMP_FILTER_EVENT", Const, 0}, + {"PR_SECCOMP_FILTER_SYSCALL", Const, 0}, + {"PR_SET_DUMPABLE", Const, 0}, + {"PR_SET_ENDIAN", Const, 0}, + {"PR_SET_FPEMU", Const, 0}, + {"PR_SET_FPEXC", Const, 0}, + {"PR_SET_KEEPCAPS", Const, 0}, + {"PR_SET_NAME", Const, 0}, + {"PR_SET_PDEATHSIG", Const, 0}, + {"PR_SET_PTRACER", Const, 0}, + {"PR_SET_SECCOMP", Const, 0}, + {"PR_SET_SECCOMP_FILTER", Const, 0}, + {"PR_SET_SECUREBITS", Const, 0}, + {"PR_SET_TIMERSLACK", Const, 0}, + {"PR_SET_TIMING", Const, 0}, + {"PR_SET_TSC", Const, 0}, + {"PR_SET_UNALIGN", Const, 0}, + {"PR_TASK_PERF_EVENTS_DISABLE", Const, 0}, + {"PR_TASK_PERF_EVENTS_ENABLE", Const, 0}, + {"PR_TIMING_STATISTICAL", Const, 0}, + {"PR_TIMING_TIMESTAMP", Const, 0}, + {"PR_TSC_ENABLE", Const, 0}, + {"PR_TSC_SIGSEGV", Const, 0}, + {"PR_UNALIGN_NOPRINT", Const, 0}, + {"PR_UNALIGN_SIGBUS", Const, 0}, + {"PTRACE_ARCH_PRCTL", Const, 0}, + {"PTRACE_ATTACH", Const, 0}, + {"PTRACE_CONT", Const, 0}, + {"PTRACE_DETACH", Const, 0}, + {"PTRACE_EVENT_CLONE", Const, 0}, + {"PTRACE_EVENT_EXEC", Const, 0}, + {"PTRACE_EVENT_EXIT", Const, 0}, + {"PTRACE_EVENT_FORK", Const, 0}, + {"PTRACE_EVENT_VFORK", Const, 0}, + {"PTRACE_EVENT_VFORK_DONE", Const, 0}, + {"PTRACE_GETCRUNCHREGS", Const, 0}, + {"PTRACE_GETEVENTMSG", Const, 0}, + {"PTRACE_GETFPREGS", Const, 0}, + {"PTRACE_GETFPXREGS", Const, 0}, + {"PTRACE_GETHBPREGS", Const, 0}, + {"PTRACE_GETREGS", Const, 0}, + {"PTRACE_GETREGSET", Const, 0}, + {"PTRACE_GETSIGINFO", Const, 0}, + {"PTRACE_GETVFPREGS", Const, 0}, + {"PTRACE_GETWMMXREGS", Const, 0}, + {"PTRACE_GET_THREAD_AREA", Const, 0}, + {"PTRACE_KILL", Const, 0}, + {"PTRACE_OLDSETOPTIONS", Const, 0}, + {"PTRACE_O_MASK", Const, 0}, + {"PTRACE_O_TRACECLONE", Const, 0}, + {"PTRACE_O_TRACEEXEC", Const, 0}, + {"PTRACE_O_TRACEEXIT", Const, 0}, + {"PTRACE_O_TRACEFORK", Const, 0}, + {"PTRACE_O_TRACESYSGOOD", Const, 0}, + {"PTRACE_O_TRACEVFORK", Const, 0}, + {"PTRACE_O_TRACEVFORKDONE", Const, 0}, + {"PTRACE_PEEKDATA", Const, 0}, + {"PTRACE_PEEKTEXT", Const, 0}, + {"PTRACE_PEEKUSR", Const, 0}, + {"PTRACE_POKEDATA", Const, 0}, + {"PTRACE_POKETEXT", Const, 0}, + {"PTRACE_POKEUSR", Const, 0}, + {"PTRACE_SETCRUNCHREGS", Const, 0}, + {"PTRACE_SETFPREGS", Const, 0}, + {"PTRACE_SETFPXREGS", Const, 0}, + {"PTRACE_SETHBPREGS", Const, 0}, + {"PTRACE_SETOPTIONS", Const, 0}, + {"PTRACE_SETREGS", Const, 0}, + {"PTRACE_SETREGSET", Const, 0}, + {"PTRACE_SETSIGINFO", Const, 0}, + {"PTRACE_SETVFPREGS", Const, 0}, + {"PTRACE_SETWMMXREGS", Const, 0}, + {"PTRACE_SET_SYSCALL", Const, 0}, + {"PTRACE_SET_THREAD_AREA", Const, 0}, + {"PTRACE_SINGLEBLOCK", Const, 0}, + {"PTRACE_SINGLESTEP", Const, 0}, + {"PTRACE_SYSCALL", Const, 0}, + {"PTRACE_SYSEMU", Const, 0}, + {"PTRACE_SYSEMU_SINGLESTEP", Const, 0}, + {"PTRACE_TRACEME", Const, 0}, + {"PT_ATTACH", Const, 0}, + {"PT_ATTACHEXC", Const, 0}, + {"PT_CONTINUE", Const, 0}, + {"PT_DATA_ADDR", Const, 0}, + {"PT_DENY_ATTACH", Const, 0}, + {"PT_DETACH", Const, 0}, + {"PT_FIRSTMACH", Const, 0}, + {"PT_FORCEQUOTA", Const, 0}, + {"PT_KILL", Const, 0}, + {"PT_MASK", Const, 1}, + {"PT_READ_D", Const, 0}, + {"PT_READ_I", Const, 0}, + {"PT_READ_U", Const, 0}, + {"PT_SIGEXC", Const, 0}, + {"PT_STEP", Const, 0}, + {"PT_TEXT_ADDR", Const, 0}, + {"PT_TEXT_END_ADDR", Const, 0}, + {"PT_THUPDATE", Const, 0}, + {"PT_TRACE_ME", Const, 0}, + {"PT_WRITE_D", Const, 0}, + {"PT_WRITE_I", Const, 0}, + {"PT_WRITE_U", Const, 0}, + {"ParseDirent", Func, 0}, + {"ParseNetlinkMessage", Func, 0}, + {"ParseNetlinkRouteAttr", Func, 0}, + {"ParseRoutingMessage", Func, 0}, + {"ParseRoutingSockaddr", Func, 0}, + {"ParseSocketControlMessage", Func, 0}, + {"ParseUnixCredentials", Func, 0}, + {"ParseUnixRights", Func, 0}, + {"PathMax", Const, 0}, + {"Pathconf", Func, 0}, + {"Pause", Func, 0}, + {"Pipe", Func, 0}, + {"Pipe2", Func, 1}, + {"PivotRoot", Func, 0}, + {"Pointer", Type, 11}, + {"PostQueuedCompletionStatus", Func, 0}, + {"Pread", Func, 0}, + {"Proc", Type, 0}, + {"Proc.Dll", Field, 0}, + {"Proc.Name", Field, 0}, + {"ProcAttr", Type, 0}, + {"ProcAttr.Dir", Field, 0}, + {"ProcAttr.Env", Field, 0}, + {"ProcAttr.Files", Field, 0}, + {"ProcAttr.Sys", Field, 0}, + {"Process32First", Func, 4}, + {"Process32Next", Func, 4}, + {"ProcessEntry32", Type, 4}, + {"ProcessEntry32.DefaultHeapID", Field, 4}, + {"ProcessEntry32.ExeFile", Field, 4}, + {"ProcessEntry32.Flags", Field, 4}, + {"ProcessEntry32.ModuleID", Field, 4}, + {"ProcessEntry32.ParentProcessID", Field, 4}, + {"ProcessEntry32.PriClassBase", Field, 4}, + {"ProcessEntry32.ProcessID", Field, 4}, + {"ProcessEntry32.Size", Field, 4}, + {"ProcessEntry32.Threads", Field, 4}, + {"ProcessEntry32.Usage", Field, 4}, + {"ProcessInformation", Type, 0}, + {"ProcessInformation.Process", Field, 0}, + {"ProcessInformation.ProcessId", Field, 0}, + {"ProcessInformation.Thread", Field, 0}, + {"ProcessInformation.ThreadId", Field, 0}, + {"Protoent", Type, 0}, + {"Protoent.Aliases", Field, 0}, + {"Protoent.Name", Field, 0}, + {"Protoent.Proto", Field, 0}, + {"PtraceAttach", Func, 0}, + {"PtraceCont", Func, 0}, + {"PtraceDetach", Func, 0}, + {"PtraceGetEventMsg", Func, 0}, + {"PtraceGetRegs", Func, 0}, + {"PtracePeekData", Func, 0}, + {"PtracePeekText", Func, 0}, + {"PtracePokeData", Func, 0}, + {"PtracePokeText", Func, 0}, + {"PtraceRegs", Type, 0}, + {"PtraceRegs.Cs", Field, 0}, + {"PtraceRegs.Ds", Field, 0}, + {"PtraceRegs.Eax", Field, 0}, + {"PtraceRegs.Ebp", Field, 0}, + {"PtraceRegs.Ebx", Field, 0}, + {"PtraceRegs.Ecx", Field, 0}, + {"PtraceRegs.Edi", Field, 0}, + {"PtraceRegs.Edx", Field, 0}, + {"PtraceRegs.Eflags", Field, 0}, + {"PtraceRegs.Eip", Field, 0}, + {"PtraceRegs.Es", Field, 0}, + {"PtraceRegs.Esi", Field, 0}, + {"PtraceRegs.Esp", Field, 0}, + {"PtraceRegs.Fs", Field, 0}, + {"PtraceRegs.Fs_base", Field, 0}, + {"PtraceRegs.Gs", Field, 0}, + {"PtraceRegs.Gs_base", Field, 0}, + {"PtraceRegs.Orig_eax", Field, 0}, + {"PtraceRegs.Orig_rax", Field, 0}, + {"PtraceRegs.R10", Field, 0}, + {"PtraceRegs.R11", Field, 0}, + {"PtraceRegs.R12", Field, 0}, + {"PtraceRegs.R13", Field, 0}, + {"PtraceRegs.R14", Field, 0}, + {"PtraceRegs.R15", Field, 0}, + {"PtraceRegs.R8", Field, 0}, + {"PtraceRegs.R9", Field, 0}, + {"PtraceRegs.Rax", Field, 0}, + {"PtraceRegs.Rbp", Field, 0}, + {"PtraceRegs.Rbx", Field, 0}, + {"PtraceRegs.Rcx", Field, 0}, + {"PtraceRegs.Rdi", Field, 0}, + {"PtraceRegs.Rdx", Field, 0}, + {"PtraceRegs.Rip", Field, 0}, + {"PtraceRegs.Rsi", Field, 0}, + {"PtraceRegs.Rsp", Field, 0}, + {"PtraceRegs.Ss", Field, 0}, + {"PtraceRegs.Uregs", Field, 0}, + {"PtraceRegs.Xcs", Field, 0}, + {"PtraceRegs.Xds", Field, 0}, + {"PtraceRegs.Xes", Field, 0}, + {"PtraceRegs.Xfs", Field, 0}, + {"PtraceRegs.Xgs", Field, 0}, + {"PtraceRegs.Xss", Field, 0}, + {"PtraceSetOptions", Func, 0}, + {"PtraceSetRegs", Func, 0}, + {"PtraceSingleStep", Func, 0}, + {"PtraceSyscall", Func, 1}, + {"Pwrite", Func, 0}, + {"REG_BINARY", Const, 0}, + {"REG_DWORD", Const, 0}, + {"REG_DWORD_BIG_ENDIAN", Const, 0}, + {"REG_DWORD_LITTLE_ENDIAN", Const, 0}, + {"REG_EXPAND_SZ", Const, 0}, + {"REG_FULL_RESOURCE_DESCRIPTOR", Const, 0}, + {"REG_LINK", Const, 0}, + {"REG_MULTI_SZ", Const, 0}, + {"REG_NONE", Const, 0}, + {"REG_QWORD", Const, 0}, + {"REG_QWORD_LITTLE_ENDIAN", Const, 0}, + {"REG_RESOURCE_LIST", Const, 0}, + {"REG_RESOURCE_REQUIREMENTS_LIST", Const, 0}, + {"REG_SZ", Const, 0}, + {"RLIMIT_AS", Const, 0}, + {"RLIMIT_CORE", Const, 0}, + {"RLIMIT_CPU", Const, 0}, + {"RLIMIT_CPU_USAGE_MONITOR", Const, 16}, + {"RLIMIT_DATA", Const, 0}, + {"RLIMIT_FSIZE", Const, 0}, + {"RLIMIT_NOFILE", Const, 0}, + {"RLIMIT_STACK", Const, 0}, + {"RLIM_INFINITY", Const, 0}, + {"RTAX_ADVMSS", Const, 0}, + {"RTAX_AUTHOR", Const, 0}, + {"RTAX_BRD", Const, 0}, + {"RTAX_CWND", Const, 0}, + {"RTAX_DST", Const, 0}, + {"RTAX_FEATURES", Const, 0}, + {"RTAX_FEATURE_ALLFRAG", Const, 0}, + {"RTAX_FEATURE_ECN", Const, 0}, + {"RTAX_FEATURE_SACK", Const, 0}, + {"RTAX_FEATURE_TIMESTAMP", Const, 0}, + {"RTAX_GATEWAY", Const, 0}, + {"RTAX_GENMASK", Const, 0}, + {"RTAX_HOPLIMIT", Const, 0}, + {"RTAX_IFA", Const, 0}, + {"RTAX_IFP", Const, 0}, + {"RTAX_INITCWND", Const, 0}, + {"RTAX_INITRWND", Const, 0}, + {"RTAX_LABEL", Const, 1}, + {"RTAX_LOCK", Const, 0}, + {"RTAX_MAX", Const, 0}, + {"RTAX_MTU", Const, 0}, + {"RTAX_NETMASK", Const, 0}, + {"RTAX_REORDERING", Const, 0}, + {"RTAX_RTO_MIN", Const, 0}, + {"RTAX_RTT", Const, 0}, + {"RTAX_RTTVAR", Const, 0}, + {"RTAX_SRC", Const, 1}, + {"RTAX_SRCMASK", Const, 1}, + {"RTAX_SSTHRESH", Const, 0}, + {"RTAX_TAG", Const, 1}, + {"RTAX_UNSPEC", Const, 0}, + {"RTAX_WINDOW", Const, 0}, + {"RTA_ALIGNTO", Const, 0}, + {"RTA_AUTHOR", Const, 0}, + {"RTA_BRD", Const, 0}, + {"RTA_CACHEINFO", Const, 0}, + {"RTA_DST", Const, 0}, + {"RTA_FLOW", Const, 0}, + {"RTA_GATEWAY", Const, 0}, + {"RTA_GENMASK", Const, 0}, + {"RTA_IFA", Const, 0}, + {"RTA_IFP", Const, 0}, + {"RTA_IIF", Const, 0}, + {"RTA_LABEL", Const, 1}, + {"RTA_MAX", Const, 0}, + {"RTA_METRICS", Const, 0}, + {"RTA_MULTIPATH", Const, 0}, + {"RTA_NETMASK", Const, 0}, + {"RTA_OIF", Const, 0}, + {"RTA_PREFSRC", Const, 0}, + {"RTA_PRIORITY", Const, 0}, + {"RTA_SRC", Const, 0}, + {"RTA_SRCMASK", Const, 1}, + {"RTA_TABLE", Const, 0}, + {"RTA_TAG", Const, 1}, + {"RTA_UNSPEC", Const, 0}, + {"RTCF_DIRECTSRC", Const, 0}, + {"RTCF_DOREDIRECT", Const, 0}, + {"RTCF_LOG", Const, 0}, + {"RTCF_MASQ", Const, 0}, + {"RTCF_NAT", Const, 0}, + {"RTCF_VALVE", Const, 0}, + {"RTF_ADDRCLASSMASK", Const, 0}, + {"RTF_ADDRCONF", Const, 0}, + {"RTF_ALLONLINK", Const, 0}, + {"RTF_ANNOUNCE", Const, 1}, + {"RTF_BLACKHOLE", Const, 0}, + {"RTF_BROADCAST", Const, 0}, + {"RTF_CACHE", Const, 0}, + {"RTF_CLONED", Const, 1}, + {"RTF_CLONING", Const, 0}, + {"RTF_CONDEMNED", Const, 0}, + {"RTF_DEFAULT", Const, 0}, + {"RTF_DELCLONE", Const, 0}, + {"RTF_DONE", Const, 0}, + {"RTF_DYNAMIC", Const, 0}, + {"RTF_FLOW", Const, 0}, + {"RTF_FMASK", Const, 0}, + {"RTF_GATEWAY", Const, 0}, + {"RTF_GWFLAG_COMPAT", Const, 3}, + {"RTF_HOST", Const, 0}, + {"RTF_IFREF", Const, 0}, + {"RTF_IFSCOPE", Const, 0}, + {"RTF_INTERFACE", Const, 0}, + {"RTF_IRTT", Const, 0}, + {"RTF_LINKRT", Const, 0}, + {"RTF_LLDATA", Const, 0}, + {"RTF_LLINFO", Const, 0}, + {"RTF_LOCAL", Const, 0}, + {"RTF_MASK", Const, 1}, + {"RTF_MODIFIED", Const, 0}, + {"RTF_MPATH", Const, 1}, + {"RTF_MPLS", Const, 1}, + {"RTF_MSS", Const, 0}, + {"RTF_MTU", Const, 0}, + {"RTF_MULTICAST", Const, 0}, + {"RTF_NAT", Const, 0}, + {"RTF_NOFORWARD", Const, 0}, + {"RTF_NONEXTHOP", Const, 0}, + {"RTF_NOPMTUDISC", Const, 0}, + {"RTF_PERMANENT_ARP", Const, 1}, + {"RTF_PINNED", Const, 0}, + {"RTF_POLICY", Const, 0}, + {"RTF_PRCLONING", Const, 0}, + {"RTF_PROTO1", Const, 0}, + {"RTF_PROTO2", Const, 0}, + {"RTF_PROTO3", Const, 0}, + {"RTF_PROXY", Const, 16}, + {"RTF_REINSTATE", Const, 0}, + {"RTF_REJECT", Const, 0}, + {"RTF_RNH_LOCKED", Const, 0}, + {"RTF_ROUTER", Const, 16}, + {"RTF_SOURCE", Const, 1}, + {"RTF_SRC", Const, 1}, + {"RTF_STATIC", Const, 0}, + {"RTF_STICKY", Const, 0}, + {"RTF_THROW", Const, 0}, + {"RTF_TUNNEL", Const, 1}, + {"RTF_UP", Const, 0}, + {"RTF_USETRAILERS", Const, 1}, + {"RTF_WASCLONED", Const, 0}, + {"RTF_WINDOW", Const, 0}, + {"RTF_XRESOLVE", Const, 0}, + {"RTM_ADD", Const, 0}, + {"RTM_BASE", Const, 0}, + {"RTM_CHANGE", Const, 0}, + {"RTM_CHGADDR", Const, 1}, + {"RTM_DELACTION", Const, 0}, + {"RTM_DELADDR", Const, 0}, + {"RTM_DELADDRLABEL", Const, 0}, + {"RTM_DELETE", Const, 0}, + {"RTM_DELLINK", Const, 0}, + {"RTM_DELMADDR", Const, 0}, + {"RTM_DELNEIGH", Const, 0}, + {"RTM_DELQDISC", Const, 0}, + {"RTM_DELROUTE", Const, 0}, + {"RTM_DELRULE", Const, 0}, + {"RTM_DELTCLASS", Const, 0}, + {"RTM_DELTFILTER", Const, 0}, + {"RTM_DESYNC", Const, 1}, + {"RTM_F_CLONED", Const, 0}, + {"RTM_F_EQUALIZE", Const, 0}, + {"RTM_F_NOTIFY", Const, 0}, + {"RTM_F_PREFIX", Const, 0}, + {"RTM_GET", Const, 0}, + {"RTM_GET2", Const, 0}, + {"RTM_GETACTION", Const, 0}, + {"RTM_GETADDR", Const, 0}, + {"RTM_GETADDRLABEL", Const, 0}, + {"RTM_GETANYCAST", Const, 0}, + {"RTM_GETDCB", Const, 0}, + {"RTM_GETLINK", Const, 0}, + {"RTM_GETMULTICAST", Const, 0}, + {"RTM_GETNEIGH", Const, 0}, + {"RTM_GETNEIGHTBL", Const, 0}, + {"RTM_GETQDISC", Const, 0}, + {"RTM_GETROUTE", Const, 0}, + {"RTM_GETRULE", Const, 0}, + {"RTM_GETTCLASS", Const, 0}, + {"RTM_GETTFILTER", Const, 0}, + {"RTM_IEEE80211", Const, 0}, + {"RTM_IFANNOUNCE", Const, 0}, + {"RTM_IFINFO", Const, 0}, + {"RTM_IFINFO2", Const, 0}, + {"RTM_LLINFO_UPD", Const, 1}, + {"RTM_LOCK", Const, 0}, + {"RTM_LOSING", Const, 0}, + {"RTM_MAX", Const, 0}, + {"RTM_MAXSIZE", Const, 1}, + {"RTM_MISS", Const, 0}, + {"RTM_NEWACTION", Const, 0}, + {"RTM_NEWADDR", Const, 0}, + {"RTM_NEWADDRLABEL", Const, 0}, + {"RTM_NEWLINK", Const, 0}, + {"RTM_NEWMADDR", Const, 0}, + {"RTM_NEWMADDR2", Const, 0}, + {"RTM_NEWNDUSEROPT", Const, 0}, + {"RTM_NEWNEIGH", Const, 0}, + {"RTM_NEWNEIGHTBL", Const, 0}, + {"RTM_NEWPREFIX", Const, 0}, + {"RTM_NEWQDISC", Const, 0}, + {"RTM_NEWROUTE", Const, 0}, + {"RTM_NEWRULE", Const, 0}, + {"RTM_NEWTCLASS", Const, 0}, + {"RTM_NEWTFILTER", Const, 0}, + {"RTM_NR_FAMILIES", Const, 0}, + {"RTM_NR_MSGTYPES", Const, 0}, + {"RTM_OIFINFO", Const, 1}, + {"RTM_OLDADD", Const, 0}, + {"RTM_OLDDEL", Const, 0}, + {"RTM_OOIFINFO", Const, 1}, + {"RTM_REDIRECT", Const, 0}, + {"RTM_RESOLVE", Const, 0}, + {"RTM_RTTUNIT", Const, 0}, + {"RTM_SETDCB", Const, 0}, + {"RTM_SETGATE", Const, 1}, + {"RTM_SETLINK", Const, 0}, + {"RTM_SETNEIGHTBL", Const, 0}, + {"RTM_VERSION", Const, 0}, + {"RTNH_ALIGNTO", Const, 0}, + {"RTNH_F_DEAD", Const, 0}, + {"RTNH_F_ONLINK", Const, 0}, + {"RTNH_F_PERVASIVE", Const, 0}, + {"RTNLGRP_IPV4_IFADDR", Const, 1}, + {"RTNLGRP_IPV4_MROUTE", Const, 1}, + {"RTNLGRP_IPV4_ROUTE", Const, 1}, + {"RTNLGRP_IPV4_RULE", Const, 1}, + {"RTNLGRP_IPV6_IFADDR", Const, 1}, + {"RTNLGRP_IPV6_IFINFO", Const, 1}, + {"RTNLGRP_IPV6_MROUTE", Const, 1}, + {"RTNLGRP_IPV6_PREFIX", Const, 1}, + {"RTNLGRP_IPV6_ROUTE", Const, 1}, + {"RTNLGRP_IPV6_RULE", Const, 1}, + {"RTNLGRP_LINK", Const, 1}, + {"RTNLGRP_ND_USEROPT", Const, 1}, + {"RTNLGRP_NEIGH", Const, 1}, + {"RTNLGRP_NONE", Const, 1}, + {"RTNLGRP_NOTIFY", Const, 1}, + {"RTNLGRP_TC", Const, 1}, + {"RTN_ANYCAST", Const, 0}, + {"RTN_BLACKHOLE", Const, 0}, + {"RTN_BROADCAST", Const, 0}, + {"RTN_LOCAL", Const, 0}, + {"RTN_MAX", Const, 0}, + {"RTN_MULTICAST", Const, 0}, + {"RTN_NAT", Const, 0}, + {"RTN_PROHIBIT", Const, 0}, + {"RTN_THROW", Const, 0}, + {"RTN_UNICAST", Const, 0}, + {"RTN_UNREACHABLE", Const, 0}, + {"RTN_UNSPEC", Const, 0}, + {"RTN_XRESOLVE", Const, 0}, + {"RTPROT_BIRD", Const, 0}, + {"RTPROT_BOOT", Const, 0}, + {"RTPROT_DHCP", Const, 0}, + {"RTPROT_DNROUTED", Const, 0}, + {"RTPROT_GATED", Const, 0}, + {"RTPROT_KERNEL", Const, 0}, + {"RTPROT_MRT", Const, 0}, + {"RTPROT_NTK", Const, 0}, + {"RTPROT_RA", Const, 0}, + {"RTPROT_REDIRECT", Const, 0}, + {"RTPROT_STATIC", Const, 0}, + {"RTPROT_UNSPEC", Const, 0}, + {"RTPROT_XORP", Const, 0}, + {"RTPROT_ZEBRA", Const, 0}, + {"RTV_EXPIRE", Const, 0}, + {"RTV_HOPCOUNT", Const, 0}, + {"RTV_MTU", Const, 0}, + {"RTV_RPIPE", Const, 0}, + {"RTV_RTT", Const, 0}, + {"RTV_RTTVAR", Const, 0}, + {"RTV_SPIPE", Const, 0}, + {"RTV_SSTHRESH", Const, 0}, + {"RTV_WEIGHT", Const, 0}, + {"RT_CACHING_CONTEXT", Const, 1}, + {"RT_CLASS_DEFAULT", Const, 0}, + {"RT_CLASS_LOCAL", Const, 0}, + {"RT_CLASS_MAIN", Const, 0}, + {"RT_CLASS_MAX", Const, 0}, + {"RT_CLASS_UNSPEC", Const, 0}, + {"RT_DEFAULT_FIB", Const, 1}, + {"RT_NORTREF", Const, 1}, + {"RT_SCOPE_HOST", Const, 0}, + {"RT_SCOPE_LINK", Const, 0}, + {"RT_SCOPE_NOWHERE", Const, 0}, + {"RT_SCOPE_SITE", Const, 0}, + {"RT_SCOPE_UNIVERSE", Const, 0}, + {"RT_TABLEID_MAX", Const, 1}, + {"RT_TABLE_COMPAT", Const, 0}, + {"RT_TABLE_DEFAULT", Const, 0}, + {"RT_TABLE_LOCAL", Const, 0}, + {"RT_TABLE_MAIN", Const, 0}, + {"RT_TABLE_MAX", Const, 0}, + {"RT_TABLE_UNSPEC", Const, 0}, + {"RUSAGE_CHILDREN", Const, 0}, + {"RUSAGE_SELF", Const, 0}, + {"RUSAGE_THREAD", Const, 0}, + {"Radvisory_t", Type, 0}, + {"Radvisory_t.Count", Field, 0}, + {"Radvisory_t.Offset", Field, 0}, + {"Radvisory_t.Pad_cgo_0", Field, 0}, + {"RawConn", Type, 9}, + {"RawSockaddr", Type, 0}, + {"RawSockaddr.Data", Field, 0}, + {"RawSockaddr.Family", Field, 0}, + {"RawSockaddr.Len", Field, 0}, + {"RawSockaddrAny", Type, 0}, + {"RawSockaddrAny.Addr", Field, 0}, + {"RawSockaddrAny.Pad", Field, 0}, + {"RawSockaddrDatalink", Type, 0}, + {"RawSockaddrDatalink.Alen", Field, 0}, + {"RawSockaddrDatalink.Data", Field, 0}, + {"RawSockaddrDatalink.Family", Field, 0}, + {"RawSockaddrDatalink.Index", Field, 0}, + {"RawSockaddrDatalink.Len", Field, 0}, + {"RawSockaddrDatalink.Nlen", Field, 0}, + {"RawSockaddrDatalink.Pad_cgo_0", Field, 2}, + {"RawSockaddrDatalink.Slen", Field, 0}, + {"RawSockaddrDatalink.Type", Field, 0}, + {"RawSockaddrInet4", Type, 0}, + {"RawSockaddrInet4.Addr", Field, 0}, + {"RawSockaddrInet4.Family", Field, 0}, + {"RawSockaddrInet4.Len", Field, 0}, + {"RawSockaddrInet4.Port", Field, 0}, + {"RawSockaddrInet4.Zero", Field, 0}, + {"RawSockaddrInet6", Type, 0}, + {"RawSockaddrInet6.Addr", Field, 0}, + {"RawSockaddrInet6.Family", Field, 0}, + {"RawSockaddrInet6.Flowinfo", Field, 0}, + {"RawSockaddrInet6.Len", Field, 0}, + {"RawSockaddrInet6.Port", Field, 0}, + {"RawSockaddrInet6.Scope_id", Field, 0}, + {"RawSockaddrLinklayer", Type, 0}, + {"RawSockaddrLinklayer.Addr", Field, 0}, + {"RawSockaddrLinklayer.Family", Field, 0}, + {"RawSockaddrLinklayer.Halen", Field, 0}, + {"RawSockaddrLinklayer.Hatype", Field, 0}, + {"RawSockaddrLinklayer.Ifindex", Field, 0}, + {"RawSockaddrLinklayer.Pkttype", Field, 0}, + {"RawSockaddrLinklayer.Protocol", Field, 0}, + {"RawSockaddrNetlink", Type, 0}, + {"RawSockaddrNetlink.Family", Field, 0}, + {"RawSockaddrNetlink.Groups", Field, 0}, + {"RawSockaddrNetlink.Pad", Field, 0}, + {"RawSockaddrNetlink.Pid", Field, 0}, + {"RawSockaddrUnix", Type, 0}, + {"RawSockaddrUnix.Family", Field, 0}, + {"RawSockaddrUnix.Len", Field, 0}, + {"RawSockaddrUnix.Pad_cgo_0", Field, 2}, + {"RawSockaddrUnix.Path", Field, 0}, + {"RawSyscall", Func, 0}, + {"RawSyscall6", Func, 0}, + {"Read", Func, 0}, + {"ReadConsole", Func, 1}, + {"ReadDirectoryChanges", Func, 0}, + {"ReadDirent", Func, 0}, + {"ReadFile", Func, 0}, + {"Readlink", Func, 0}, + {"Reboot", Func, 0}, + {"Recvfrom", Func, 0}, + {"Recvmsg", Func, 0}, + {"RegCloseKey", Func, 0}, + {"RegEnumKeyEx", Func, 0}, + {"RegOpenKeyEx", Func, 0}, + {"RegQueryInfoKey", Func, 0}, + {"RegQueryValueEx", Func, 0}, + {"RemoveDirectory", Func, 0}, + {"Removexattr", Func, 1}, + {"Rename", Func, 0}, + {"Renameat", Func, 0}, + {"Revoke", Func, 0}, + {"Rlimit", Type, 0}, + {"Rlimit.Cur", Field, 0}, + {"Rlimit.Max", Field, 0}, + {"Rmdir", Func, 0}, + {"RouteMessage", Type, 0}, + {"RouteMessage.Data", Field, 0}, + {"RouteMessage.Header", Field, 0}, + {"RouteRIB", Func, 0}, + {"RoutingMessage", Type, 0}, + {"RtAttr", Type, 0}, + {"RtAttr.Len", Field, 0}, + {"RtAttr.Type", Field, 0}, + {"RtGenmsg", Type, 0}, + {"RtGenmsg.Family", Field, 0}, + {"RtMetrics", Type, 0}, + {"RtMetrics.Expire", Field, 0}, + {"RtMetrics.Filler", Field, 0}, + {"RtMetrics.Hopcount", Field, 0}, + {"RtMetrics.Locks", Field, 0}, + {"RtMetrics.Mtu", Field, 0}, + {"RtMetrics.Pad", Field, 3}, + {"RtMetrics.Pksent", Field, 0}, + {"RtMetrics.Recvpipe", Field, 0}, + {"RtMetrics.Refcnt", Field, 2}, + {"RtMetrics.Rtt", Field, 0}, + {"RtMetrics.Rttvar", Field, 0}, + {"RtMetrics.Sendpipe", Field, 0}, + {"RtMetrics.Ssthresh", Field, 0}, + {"RtMetrics.Weight", Field, 0}, + {"RtMsg", Type, 0}, + {"RtMsg.Dst_len", Field, 0}, + {"RtMsg.Family", Field, 0}, + {"RtMsg.Flags", Field, 0}, + {"RtMsg.Protocol", Field, 0}, + {"RtMsg.Scope", Field, 0}, + {"RtMsg.Src_len", Field, 0}, + {"RtMsg.Table", Field, 0}, + {"RtMsg.Tos", Field, 0}, + {"RtMsg.Type", Field, 0}, + {"RtMsghdr", Type, 0}, + {"RtMsghdr.Addrs", Field, 0}, + {"RtMsghdr.Errno", Field, 0}, + {"RtMsghdr.Flags", Field, 0}, + {"RtMsghdr.Fmask", Field, 0}, + {"RtMsghdr.Hdrlen", Field, 2}, + {"RtMsghdr.Index", Field, 0}, + {"RtMsghdr.Inits", Field, 0}, + {"RtMsghdr.Mpls", Field, 2}, + {"RtMsghdr.Msglen", Field, 0}, + {"RtMsghdr.Pad_cgo_0", Field, 0}, + {"RtMsghdr.Pad_cgo_1", Field, 2}, + {"RtMsghdr.Pid", Field, 0}, + {"RtMsghdr.Priority", Field, 2}, + {"RtMsghdr.Rmx", Field, 0}, + {"RtMsghdr.Seq", Field, 0}, + {"RtMsghdr.Tableid", Field, 2}, + {"RtMsghdr.Type", Field, 0}, + {"RtMsghdr.Use", Field, 0}, + {"RtMsghdr.Version", Field, 0}, + {"RtNexthop", Type, 0}, + {"RtNexthop.Flags", Field, 0}, + {"RtNexthop.Hops", Field, 0}, + {"RtNexthop.Ifindex", Field, 0}, + {"RtNexthop.Len", Field, 0}, + {"Rusage", Type, 0}, + {"Rusage.CreationTime", Field, 0}, + {"Rusage.ExitTime", Field, 0}, + {"Rusage.Idrss", Field, 0}, + {"Rusage.Inblock", Field, 0}, + {"Rusage.Isrss", Field, 0}, + {"Rusage.Ixrss", Field, 0}, + {"Rusage.KernelTime", Field, 0}, + {"Rusage.Majflt", Field, 0}, + {"Rusage.Maxrss", Field, 0}, + {"Rusage.Minflt", Field, 0}, + {"Rusage.Msgrcv", Field, 0}, + {"Rusage.Msgsnd", Field, 0}, + {"Rusage.Nivcsw", Field, 0}, + {"Rusage.Nsignals", Field, 0}, + {"Rusage.Nswap", Field, 0}, + {"Rusage.Nvcsw", Field, 0}, + {"Rusage.Oublock", Field, 0}, + {"Rusage.Stime", Field, 0}, + {"Rusage.UserTime", Field, 0}, + {"Rusage.Utime", Field, 0}, + {"SCM_BINTIME", Const, 0}, + {"SCM_CREDENTIALS", Const, 0}, + {"SCM_CREDS", Const, 0}, + {"SCM_RIGHTS", Const, 0}, + {"SCM_TIMESTAMP", Const, 0}, + {"SCM_TIMESTAMPING", Const, 0}, + {"SCM_TIMESTAMPNS", Const, 0}, + {"SCM_TIMESTAMP_MONOTONIC", Const, 0}, + {"SHUT_RD", Const, 0}, + {"SHUT_RDWR", Const, 0}, + {"SHUT_WR", Const, 0}, + {"SID", Type, 0}, + {"SIDAndAttributes", Type, 0}, + {"SIDAndAttributes.Attributes", Field, 0}, + {"SIDAndAttributes.Sid", Field, 0}, + {"SIGABRT", Const, 0}, + {"SIGALRM", Const, 0}, + {"SIGBUS", Const, 0}, + {"SIGCHLD", Const, 0}, + {"SIGCLD", Const, 0}, + {"SIGCONT", Const, 0}, + {"SIGEMT", Const, 0}, + {"SIGFPE", Const, 0}, + {"SIGHUP", Const, 0}, + {"SIGILL", Const, 0}, + {"SIGINFO", Const, 0}, + {"SIGINT", Const, 0}, + {"SIGIO", Const, 0}, + {"SIGIOT", Const, 0}, + {"SIGKILL", Const, 0}, + {"SIGLIBRT", Const, 1}, + {"SIGLWP", Const, 0}, + {"SIGPIPE", Const, 0}, + {"SIGPOLL", Const, 0}, + {"SIGPROF", Const, 0}, + {"SIGPWR", Const, 0}, + {"SIGQUIT", Const, 0}, + {"SIGSEGV", Const, 0}, + {"SIGSTKFLT", Const, 0}, + {"SIGSTOP", Const, 0}, + {"SIGSYS", Const, 0}, + {"SIGTERM", Const, 0}, + {"SIGTHR", Const, 0}, + {"SIGTRAP", Const, 0}, + {"SIGTSTP", Const, 0}, + {"SIGTTIN", Const, 0}, + {"SIGTTOU", Const, 0}, + {"SIGUNUSED", Const, 0}, + {"SIGURG", Const, 0}, + {"SIGUSR1", Const, 0}, + {"SIGUSR2", Const, 0}, + {"SIGVTALRM", Const, 0}, + {"SIGWINCH", Const, 0}, + {"SIGXCPU", Const, 0}, + {"SIGXFSZ", Const, 0}, + {"SIOCADDDLCI", Const, 0}, + {"SIOCADDMULTI", Const, 0}, + {"SIOCADDRT", Const, 0}, + {"SIOCAIFADDR", Const, 0}, + {"SIOCAIFGROUP", Const, 0}, + {"SIOCALIFADDR", Const, 0}, + {"SIOCARPIPLL", Const, 0}, + {"SIOCATMARK", Const, 0}, + {"SIOCAUTOADDR", Const, 0}, + {"SIOCAUTONETMASK", Const, 0}, + {"SIOCBRDGADD", Const, 1}, + {"SIOCBRDGADDS", Const, 1}, + {"SIOCBRDGARL", Const, 1}, + {"SIOCBRDGDADDR", Const, 1}, + {"SIOCBRDGDEL", Const, 1}, + {"SIOCBRDGDELS", Const, 1}, + {"SIOCBRDGFLUSH", Const, 1}, + {"SIOCBRDGFRL", Const, 1}, + {"SIOCBRDGGCACHE", Const, 1}, + {"SIOCBRDGGFD", Const, 1}, + {"SIOCBRDGGHT", Const, 1}, + {"SIOCBRDGGIFFLGS", Const, 1}, + {"SIOCBRDGGMA", Const, 1}, + {"SIOCBRDGGPARAM", Const, 1}, + {"SIOCBRDGGPRI", Const, 1}, + {"SIOCBRDGGRL", Const, 1}, + {"SIOCBRDGGSIFS", Const, 1}, + {"SIOCBRDGGTO", Const, 1}, + {"SIOCBRDGIFS", Const, 1}, + {"SIOCBRDGRTS", Const, 1}, + {"SIOCBRDGSADDR", Const, 1}, + {"SIOCBRDGSCACHE", Const, 1}, + {"SIOCBRDGSFD", Const, 1}, + {"SIOCBRDGSHT", Const, 1}, + {"SIOCBRDGSIFCOST", Const, 1}, + {"SIOCBRDGSIFFLGS", Const, 1}, + {"SIOCBRDGSIFPRIO", Const, 1}, + {"SIOCBRDGSMA", Const, 1}, + {"SIOCBRDGSPRI", Const, 1}, + {"SIOCBRDGSPROTO", Const, 1}, + {"SIOCBRDGSTO", Const, 1}, + {"SIOCBRDGSTXHC", Const, 1}, + {"SIOCDARP", Const, 0}, + {"SIOCDELDLCI", Const, 0}, + {"SIOCDELMULTI", Const, 0}, + {"SIOCDELRT", Const, 0}, + {"SIOCDEVPRIVATE", Const, 0}, + {"SIOCDIFADDR", Const, 0}, + {"SIOCDIFGROUP", Const, 0}, + {"SIOCDIFPHYADDR", Const, 0}, + {"SIOCDLIFADDR", Const, 0}, + {"SIOCDRARP", Const, 0}, + {"SIOCGARP", Const, 0}, + {"SIOCGDRVSPEC", Const, 0}, + {"SIOCGETKALIVE", Const, 1}, + {"SIOCGETLABEL", Const, 1}, + {"SIOCGETPFLOW", Const, 1}, + {"SIOCGETPFSYNC", Const, 1}, + {"SIOCGETSGCNT", Const, 0}, + {"SIOCGETVIFCNT", Const, 0}, + {"SIOCGETVLAN", Const, 0}, + {"SIOCGHIWAT", Const, 0}, + {"SIOCGIFADDR", Const, 0}, + {"SIOCGIFADDRPREF", Const, 1}, + {"SIOCGIFALIAS", Const, 1}, + {"SIOCGIFALTMTU", Const, 0}, + {"SIOCGIFASYNCMAP", Const, 0}, + {"SIOCGIFBOND", Const, 0}, + {"SIOCGIFBR", Const, 0}, + {"SIOCGIFBRDADDR", Const, 0}, + {"SIOCGIFCAP", Const, 0}, + {"SIOCGIFCONF", Const, 0}, + {"SIOCGIFCOUNT", Const, 0}, + {"SIOCGIFDATA", Const, 1}, + {"SIOCGIFDESCR", Const, 0}, + {"SIOCGIFDEVMTU", Const, 0}, + {"SIOCGIFDLT", Const, 1}, + {"SIOCGIFDSTADDR", Const, 0}, + {"SIOCGIFENCAP", Const, 0}, + {"SIOCGIFFIB", Const, 1}, + {"SIOCGIFFLAGS", Const, 0}, + {"SIOCGIFGATTR", Const, 1}, + {"SIOCGIFGENERIC", Const, 0}, + {"SIOCGIFGMEMB", Const, 0}, + {"SIOCGIFGROUP", Const, 0}, + {"SIOCGIFHARDMTU", Const, 3}, + {"SIOCGIFHWADDR", Const, 0}, + {"SIOCGIFINDEX", Const, 0}, + {"SIOCGIFKPI", Const, 0}, + {"SIOCGIFMAC", Const, 0}, + {"SIOCGIFMAP", Const, 0}, + {"SIOCGIFMEDIA", Const, 0}, + {"SIOCGIFMEM", Const, 0}, + {"SIOCGIFMETRIC", Const, 0}, + {"SIOCGIFMTU", Const, 0}, + {"SIOCGIFNAME", Const, 0}, + {"SIOCGIFNETMASK", Const, 0}, + {"SIOCGIFPDSTADDR", Const, 0}, + {"SIOCGIFPFLAGS", Const, 0}, + {"SIOCGIFPHYS", Const, 0}, + {"SIOCGIFPRIORITY", Const, 1}, + {"SIOCGIFPSRCADDR", Const, 0}, + {"SIOCGIFRDOMAIN", Const, 1}, + {"SIOCGIFRTLABEL", Const, 1}, + {"SIOCGIFSLAVE", Const, 0}, + {"SIOCGIFSTATUS", Const, 0}, + {"SIOCGIFTIMESLOT", Const, 1}, + {"SIOCGIFTXQLEN", Const, 0}, + {"SIOCGIFVLAN", Const, 0}, + {"SIOCGIFWAKEFLAGS", Const, 0}, + {"SIOCGIFXFLAGS", Const, 1}, + {"SIOCGLIFADDR", Const, 0}, + {"SIOCGLIFPHYADDR", Const, 0}, + {"SIOCGLIFPHYRTABLE", Const, 1}, + {"SIOCGLIFPHYTTL", Const, 3}, + {"SIOCGLINKSTR", Const, 1}, + {"SIOCGLOWAT", Const, 0}, + {"SIOCGPGRP", Const, 0}, + {"SIOCGPRIVATE_0", Const, 0}, + {"SIOCGPRIVATE_1", Const, 0}, + {"SIOCGRARP", Const, 0}, + {"SIOCGSPPPPARAMS", Const, 3}, + {"SIOCGSTAMP", Const, 0}, + {"SIOCGSTAMPNS", Const, 0}, + {"SIOCGVH", Const, 1}, + {"SIOCGVNETID", Const, 3}, + {"SIOCIFCREATE", Const, 0}, + {"SIOCIFCREATE2", Const, 0}, + {"SIOCIFDESTROY", Const, 0}, + {"SIOCIFGCLONERS", Const, 0}, + {"SIOCINITIFADDR", Const, 1}, + {"SIOCPROTOPRIVATE", Const, 0}, + {"SIOCRSLVMULTI", Const, 0}, + {"SIOCRTMSG", Const, 0}, + {"SIOCSARP", Const, 0}, + {"SIOCSDRVSPEC", Const, 0}, + {"SIOCSETKALIVE", Const, 1}, + {"SIOCSETLABEL", Const, 1}, + {"SIOCSETPFLOW", Const, 1}, + {"SIOCSETPFSYNC", Const, 1}, + {"SIOCSETVLAN", Const, 0}, + {"SIOCSHIWAT", Const, 0}, + {"SIOCSIFADDR", Const, 0}, + {"SIOCSIFADDRPREF", Const, 1}, + {"SIOCSIFALTMTU", Const, 0}, + {"SIOCSIFASYNCMAP", Const, 0}, + {"SIOCSIFBOND", Const, 0}, + {"SIOCSIFBR", Const, 0}, + {"SIOCSIFBRDADDR", Const, 0}, + {"SIOCSIFCAP", Const, 0}, + {"SIOCSIFDESCR", Const, 0}, + {"SIOCSIFDSTADDR", Const, 0}, + {"SIOCSIFENCAP", Const, 0}, + {"SIOCSIFFIB", Const, 1}, + {"SIOCSIFFLAGS", Const, 0}, + {"SIOCSIFGATTR", Const, 1}, + {"SIOCSIFGENERIC", Const, 0}, + {"SIOCSIFHWADDR", Const, 0}, + {"SIOCSIFHWBROADCAST", Const, 0}, + {"SIOCSIFKPI", Const, 0}, + {"SIOCSIFLINK", Const, 0}, + {"SIOCSIFLLADDR", Const, 0}, + {"SIOCSIFMAC", Const, 0}, + {"SIOCSIFMAP", Const, 0}, + {"SIOCSIFMEDIA", Const, 0}, + {"SIOCSIFMEM", Const, 0}, + {"SIOCSIFMETRIC", Const, 0}, + {"SIOCSIFMTU", Const, 0}, + {"SIOCSIFNAME", Const, 0}, + {"SIOCSIFNETMASK", Const, 0}, + {"SIOCSIFPFLAGS", Const, 0}, + {"SIOCSIFPHYADDR", Const, 0}, + {"SIOCSIFPHYS", Const, 0}, + {"SIOCSIFPRIORITY", Const, 1}, + {"SIOCSIFRDOMAIN", Const, 1}, + {"SIOCSIFRTLABEL", Const, 1}, + {"SIOCSIFRVNET", Const, 0}, + {"SIOCSIFSLAVE", Const, 0}, + {"SIOCSIFTIMESLOT", Const, 1}, + {"SIOCSIFTXQLEN", Const, 0}, + {"SIOCSIFVLAN", Const, 0}, + {"SIOCSIFVNET", Const, 0}, + {"SIOCSIFXFLAGS", Const, 1}, + {"SIOCSLIFPHYADDR", Const, 0}, + {"SIOCSLIFPHYRTABLE", Const, 1}, + {"SIOCSLIFPHYTTL", Const, 3}, + {"SIOCSLINKSTR", Const, 1}, + {"SIOCSLOWAT", Const, 0}, + {"SIOCSPGRP", Const, 0}, + {"SIOCSRARP", Const, 0}, + {"SIOCSSPPPPARAMS", Const, 3}, + {"SIOCSVH", Const, 1}, + {"SIOCSVNETID", Const, 3}, + {"SIOCZIFDATA", Const, 1}, + {"SIO_GET_EXTENSION_FUNCTION_POINTER", Const, 1}, + {"SIO_GET_INTERFACE_LIST", Const, 0}, + {"SIO_KEEPALIVE_VALS", Const, 3}, + {"SIO_UDP_CONNRESET", Const, 4}, + {"SOCK_CLOEXEC", Const, 0}, + {"SOCK_DCCP", Const, 0}, + {"SOCK_DGRAM", Const, 0}, + {"SOCK_FLAGS_MASK", Const, 1}, + {"SOCK_MAXADDRLEN", Const, 0}, + {"SOCK_NONBLOCK", Const, 0}, + {"SOCK_NOSIGPIPE", Const, 1}, + {"SOCK_PACKET", Const, 0}, + {"SOCK_RAW", Const, 0}, + {"SOCK_RDM", Const, 0}, + {"SOCK_SEQPACKET", Const, 0}, + {"SOCK_STREAM", Const, 0}, + {"SOL_AAL", Const, 0}, + {"SOL_ATM", Const, 0}, + {"SOL_DECNET", Const, 0}, + {"SOL_ICMPV6", Const, 0}, + {"SOL_IP", Const, 0}, + {"SOL_IPV6", Const, 0}, + {"SOL_IRDA", Const, 0}, + {"SOL_PACKET", Const, 0}, + {"SOL_RAW", Const, 0}, + {"SOL_SOCKET", Const, 0}, + {"SOL_TCP", Const, 0}, + {"SOL_X25", Const, 0}, + {"SOMAXCONN", Const, 0}, + {"SO_ACCEPTCONN", Const, 0}, + {"SO_ACCEPTFILTER", Const, 0}, + {"SO_ATTACH_FILTER", Const, 0}, + {"SO_BINDANY", Const, 1}, + {"SO_BINDTODEVICE", Const, 0}, + {"SO_BINTIME", Const, 0}, + {"SO_BROADCAST", Const, 0}, + {"SO_BSDCOMPAT", Const, 0}, + {"SO_DEBUG", Const, 0}, + {"SO_DETACH_FILTER", Const, 0}, + {"SO_DOMAIN", Const, 0}, + {"SO_DONTROUTE", Const, 0}, + {"SO_DONTTRUNC", Const, 0}, + {"SO_ERROR", Const, 0}, + {"SO_KEEPALIVE", Const, 0}, + {"SO_LABEL", Const, 0}, + {"SO_LINGER", Const, 0}, + {"SO_LINGER_SEC", Const, 0}, + {"SO_LISTENINCQLEN", Const, 0}, + {"SO_LISTENQLEN", Const, 0}, + {"SO_LISTENQLIMIT", Const, 0}, + {"SO_MARK", Const, 0}, + {"SO_NETPROC", Const, 1}, + {"SO_NKE", Const, 0}, + {"SO_NOADDRERR", Const, 0}, + {"SO_NOHEADER", Const, 1}, + {"SO_NOSIGPIPE", Const, 0}, + {"SO_NOTIFYCONFLICT", Const, 0}, + {"SO_NO_CHECK", Const, 0}, + {"SO_NO_DDP", Const, 0}, + {"SO_NO_OFFLOAD", Const, 0}, + {"SO_NP_EXTENSIONS", Const, 0}, + {"SO_NREAD", Const, 0}, + {"SO_NUMRCVPKT", Const, 16}, + {"SO_NWRITE", Const, 0}, + {"SO_OOBINLINE", Const, 0}, + {"SO_OVERFLOWED", Const, 1}, + {"SO_PASSCRED", Const, 0}, + {"SO_PASSSEC", Const, 0}, + {"SO_PEERCRED", Const, 0}, + {"SO_PEERLABEL", Const, 0}, + {"SO_PEERNAME", Const, 0}, + {"SO_PEERSEC", Const, 0}, + {"SO_PRIORITY", Const, 0}, + {"SO_PROTOCOL", Const, 0}, + {"SO_PROTOTYPE", Const, 1}, + {"SO_RANDOMPORT", Const, 0}, + {"SO_RCVBUF", Const, 0}, + {"SO_RCVBUFFORCE", Const, 0}, + {"SO_RCVLOWAT", Const, 0}, + {"SO_RCVTIMEO", Const, 0}, + {"SO_RESTRICTIONS", Const, 0}, + {"SO_RESTRICT_DENYIN", Const, 0}, + {"SO_RESTRICT_DENYOUT", Const, 0}, + {"SO_RESTRICT_DENYSET", Const, 0}, + {"SO_REUSEADDR", Const, 0}, + {"SO_REUSEPORT", Const, 0}, + {"SO_REUSESHAREUID", Const, 0}, + {"SO_RTABLE", Const, 1}, + {"SO_RXQ_OVFL", Const, 0}, + {"SO_SECURITY_AUTHENTICATION", Const, 0}, + {"SO_SECURITY_ENCRYPTION_NETWORK", Const, 0}, + {"SO_SECURITY_ENCRYPTION_TRANSPORT", Const, 0}, + {"SO_SETFIB", Const, 0}, + {"SO_SNDBUF", Const, 0}, + {"SO_SNDBUFFORCE", Const, 0}, + {"SO_SNDLOWAT", Const, 0}, + {"SO_SNDTIMEO", Const, 0}, + {"SO_SPLICE", Const, 1}, + {"SO_TIMESTAMP", Const, 0}, + {"SO_TIMESTAMPING", Const, 0}, + {"SO_TIMESTAMPNS", Const, 0}, + {"SO_TIMESTAMP_MONOTONIC", Const, 0}, + {"SO_TYPE", Const, 0}, + {"SO_UPCALLCLOSEWAIT", Const, 0}, + {"SO_UPDATE_ACCEPT_CONTEXT", Const, 0}, + {"SO_UPDATE_CONNECT_CONTEXT", Const, 1}, + {"SO_USELOOPBACK", Const, 0}, + {"SO_USER_COOKIE", Const, 1}, + {"SO_VENDOR", Const, 3}, + {"SO_WANTMORE", Const, 0}, + {"SO_WANTOOBFLAG", Const, 0}, + {"SSLExtraCertChainPolicyPara", Type, 0}, + {"SSLExtraCertChainPolicyPara.AuthType", Field, 0}, + {"SSLExtraCertChainPolicyPara.Checks", Field, 0}, + {"SSLExtraCertChainPolicyPara.ServerName", Field, 0}, + {"SSLExtraCertChainPolicyPara.Size", Field, 0}, + {"STANDARD_RIGHTS_ALL", Const, 0}, + {"STANDARD_RIGHTS_EXECUTE", Const, 0}, + {"STANDARD_RIGHTS_READ", Const, 0}, + {"STANDARD_RIGHTS_REQUIRED", Const, 0}, + {"STANDARD_RIGHTS_WRITE", Const, 0}, + {"STARTF_USESHOWWINDOW", Const, 0}, + {"STARTF_USESTDHANDLES", Const, 0}, + {"STD_ERROR_HANDLE", Const, 0}, + {"STD_INPUT_HANDLE", Const, 0}, + {"STD_OUTPUT_HANDLE", Const, 0}, + {"SUBLANG_ENGLISH_US", Const, 0}, + {"SW_FORCEMINIMIZE", Const, 0}, + {"SW_HIDE", Const, 0}, + {"SW_MAXIMIZE", Const, 0}, + {"SW_MINIMIZE", Const, 0}, + {"SW_NORMAL", Const, 0}, + {"SW_RESTORE", Const, 0}, + {"SW_SHOW", Const, 0}, + {"SW_SHOWDEFAULT", Const, 0}, + {"SW_SHOWMAXIMIZED", Const, 0}, + {"SW_SHOWMINIMIZED", Const, 0}, + {"SW_SHOWMINNOACTIVE", Const, 0}, + {"SW_SHOWNA", Const, 0}, + {"SW_SHOWNOACTIVATE", Const, 0}, + {"SW_SHOWNORMAL", Const, 0}, + {"SYMBOLIC_LINK_FLAG_DIRECTORY", Const, 4}, + {"SYNCHRONIZE", Const, 0}, + {"SYSCTL_VERSION", Const, 1}, + {"SYSCTL_VERS_0", Const, 1}, + {"SYSCTL_VERS_1", Const, 1}, + {"SYSCTL_VERS_MASK", Const, 1}, + {"SYS_ABORT2", Const, 0}, + {"SYS_ACCEPT", Const, 0}, + {"SYS_ACCEPT4", Const, 0}, + {"SYS_ACCEPT_NOCANCEL", Const, 0}, + {"SYS_ACCESS", Const, 0}, + {"SYS_ACCESS_EXTENDED", Const, 0}, + {"SYS_ACCT", Const, 0}, + {"SYS_ADD_KEY", Const, 0}, + {"SYS_ADD_PROFIL", Const, 0}, + {"SYS_ADJFREQ", Const, 1}, + {"SYS_ADJTIME", Const, 0}, + {"SYS_ADJTIMEX", Const, 0}, + {"SYS_AFS_SYSCALL", Const, 0}, + {"SYS_AIO_CANCEL", Const, 0}, + {"SYS_AIO_ERROR", Const, 0}, + {"SYS_AIO_FSYNC", Const, 0}, + {"SYS_AIO_MLOCK", Const, 14}, + {"SYS_AIO_READ", Const, 0}, + {"SYS_AIO_RETURN", Const, 0}, + {"SYS_AIO_SUSPEND", Const, 0}, + {"SYS_AIO_SUSPEND_NOCANCEL", Const, 0}, + {"SYS_AIO_WAITCOMPLETE", Const, 14}, + {"SYS_AIO_WRITE", Const, 0}, + {"SYS_ALARM", Const, 0}, + {"SYS_ARCH_PRCTL", Const, 0}, + {"SYS_ARM_FADVISE64_64", Const, 0}, + {"SYS_ARM_SYNC_FILE_RANGE", Const, 0}, + {"SYS_ATGETMSG", Const, 0}, + {"SYS_ATPGETREQ", Const, 0}, + {"SYS_ATPGETRSP", Const, 0}, + {"SYS_ATPSNDREQ", Const, 0}, + {"SYS_ATPSNDRSP", Const, 0}, + {"SYS_ATPUTMSG", Const, 0}, + {"SYS_ATSOCKET", Const, 0}, + {"SYS_AUDIT", Const, 0}, + {"SYS_AUDITCTL", Const, 0}, + {"SYS_AUDITON", Const, 0}, + {"SYS_AUDIT_SESSION_JOIN", Const, 0}, + {"SYS_AUDIT_SESSION_PORT", Const, 0}, + {"SYS_AUDIT_SESSION_SELF", Const, 0}, + {"SYS_BDFLUSH", Const, 0}, + {"SYS_BIND", Const, 0}, + {"SYS_BINDAT", Const, 3}, + {"SYS_BREAK", Const, 0}, + {"SYS_BRK", Const, 0}, + {"SYS_BSDTHREAD_CREATE", Const, 0}, + {"SYS_BSDTHREAD_REGISTER", Const, 0}, + {"SYS_BSDTHREAD_TERMINATE", Const, 0}, + {"SYS_CAPGET", Const, 0}, + {"SYS_CAPSET", Const, 0}, + {"SYS_CAP_ENTER", Const, 0}, + {"SYS_CAP_FCNTLS_GET", Const, 1}, + {"SYS_CAP_FCNTLS_LIMIT", Const, 1}, + {"SYS_CAP_GETMODE", Const, 0}, + {"SYS_CAP_GETRIGHTS", Const, 0}, + {"SYS_CAP_IOCTLS_GET", Const, 1}, + {"SYS_CAP_IOCTLS_LIMIT", Const, 1}, + {"SYS_CAP_NEW", Const, 0}, + {"SYS_CAP_RIGHTS_GET", Const, 1}, + {"SYS_CAP_RIGHTS_LIMIT", Const, 1}, + {"SYS_CHDIR", Const, 0}, + {"SYS_CHFLAGS", Const, 0}, + {"SYS_CHFLAGSAT", Const, 3}, + {"SYS_CHMOD", Const, 0}, + {"SYS_CHMOD_EXTENDED", Const, 0}, + {"SYS_CHOWN", Const, 0}, + {"SYS_CHOWN32", Const, 0}, + {"SYS_CHROOT", Const, 0}, + {"SYS_CHUD", Const, 0}, + {"SYS_CLOCK_ADJTIME", Const, 0}, + {"SYS_CLOCK_GETCPUCLOCKID2", Const, 1}, + {"SYS_CLOCK_GETRES", Const, 0}, + {"SYS_CLOCK_GETTIME", Const, 0}, + {"SYS_CLOCK_NANOSLEEP", Const, 0}, + {"SYS_CLOCK_SETTIME", Const, 0}, + {"SYS_CLONE", Const, 0}, + {"SYS_CLOSE", Const, 0}, + {"SYS_CLOSEFROM", Const, 0}, + {"SYS_CLOSE_NOCANCEL", Const, 0}, + {"SYS_CONNECT", Const, 0}, + {"SYS_CONNECTAT", Const, 3}, + {"SYS_CONNECT_NOCANCEL", Const, 0}, + {"SYS_COPYFILE", Const, 0}, + {"SYS_CPUSET", Const, 0}, + {"SYS_CPUSET_GETAFFINITY", Const, 0}, + {"SYS_CPUSET_GETID", Const, 0}, + {"SYS_CPUSET_SETAFFINITY", Const, 0}, + {"SYS_CPUSET_SETID", Const, 0}, + {"SYS_CREAT", Const, 0}, + {"SYS_CREATE_MODULE", Const, 0}, + {"SYS_CSOPS", Const, 0}, + {"SYS_CSOPS_AUDITTOKEN", Const, 16}, + {"SYS_DELETE", Const, 0}, + {"SYS_DELETE_MODULE", Const, 0}, + {"SYS_DUP", Const, 0}, + {"SYS_DUP2", Const, 0}, + {"SYS_DUP3", Const, 0}, + {"SYS_EACCESS", Const, 0}, + {"SYS_EPOLL_CREATE", Const, 0}, + {"SYS_EPOLL_CREATE1", Const, 0}, + {"SYS_EPOLL_CTL", Const, 0}, + {"SYS_EPOLL_CTL_OLD", Const, 0}, + {"SYS_EPOLL_PWAIT", Const, 0}, + {"SYS_EPOLL_WAIT", Const, 0}, + {"SYS_EPOLL_WAIT_OLD", Const, 0}, + {"SYS_EVENTFD", Const, 0}, + {"SYS_EVENTFD2", Const, 0}, + {"SYS_EXCHANGEDATA", Const, 0}, + {"SYS_EXECVE", Const, 0}, + {"SYS_EXIT", Const, 0}, + {"SYS_EXIT_GROUP", Const, 0}, + {"SYS_EXTATTRCTL", Const, 0}, + {"SYS_EXTATTR_DELETE_FD", Const, 0}, + {"SYS_EXTATTR_DELETE_FILE", Const, 0}, + {"SYS_EXTATTR_DELETE_LINK", Const, 0}, + {"SYS_EXTATTR_GET_FD", Const, 0}, + {"SYS_EXTATTR_GET_FILE", Const, 0}, + {"SYS_EXTATTR_GET_LINK", Const, 0}, + {"SYS_EXTATTR_LIST_FD", Const, 0}, + {"SYS_EXTATTR_LIST_FILE", Const, 0}, + {"SYS_EXTATTR_LIST_LINK", Const, 0}, + {"SYS_EXTATTR_SET_FD", Const, 0}, + {"SYS_EXTATTR_SET_FILE", Const, 0}, + {"SYS_EXTATTR_SET_LINK", Const, 0}, + {"SYS_FACCESSAT", Const, 0}, + {"SYS_FADVISE64", Const, 0}, + {"SYS_FADVISE64_64", Const, 0}, + {"SYS_FALLOCATE", Const, 0}, + {"SYS_FANOTIFY_INIT", Const, 0}, + {"SYS_FANOTIFY_MARK", Const, 0}, + {"SYS_FCHDIR", Const, 0}, + {"SYS_FCHFLAGS", Const, 0}, + {"SYS_FCHMOD", Const, 0}, + {"SYS_FCHMODAT", Const, 0}, + {"SYS_FCHMOD_EXTENDED", Const, 0}, + {"SYS_FCHOWN", Const, 0}, + {"SYS_FCHOWN32", Const, 0}, + {"SYS_FCHOWNAT", Const, 0}, + {"SYS_FCHROOT", Const, 1}, + {"SYS_FCNTL", Const, 0}, + {"SYS_FCNTL64", Const, 0}, + {"SYS_FCNTL_NOCANCEL", Const, 0}, + {"SYS_FDATASYNC", Const, 0}, + {"SYS_FEXECVE", Const, 0}, + {"SYS_FFCLOCK_GETCOUNTER", Const, 0}, + {"SYS_FFCLOCK_GETESTIMATE", Const, 0}, + {"SYS_FFCLOCK_SETESTIMATE", Const, 0}, + {"SYS_FFSCTL", Const, 0}, + {"SYS_FGETATTRLIST", Const, 0}, + {"SYS_FGETXATTR", Const, 0}, + {"SYS_FHOPEN", Const, 0}, + {"SYS_FHSTAT", Const, 0}, + {"SYS_FHSTATFS", Const, 0}, + {"SYS_FILEPORT_MAKEFD", Const, 0}, + {"SYS_FILEPORT_MAKEPORT", Const, 0}, + {"SYS_FKTRACE", Const, 1}, + {"SYS_FLISTXATTR", Const, 0}, + {"SYS_FLOCK", Const, 0}, + {"SYS_FORK", Const, 0}, + {"SYS_FPATHCONF", Const, 0}, + {"SYS_FREEBSD6_FTRUNCATE", Const, 0}, + {"SYS_FREEBSD6_LSEEK", Const, 0}, + {"SYS_FREEBSD6_MMAP", Const, 0}, + {"SYS_FREEBSD6_PREAD", Const, 0}, + {"SYS_FREEBSD6_PWRITE", Const, 0}, + {"SYS_FREEBSD6_TRUNCATE", Const, 0}, + {"SYS_FREMOVEXATTR", Const, 0}, + {"SYS_FSCTL", Const, 0}, + {"SYS_FSETATTRLIST", Const, 0}, + {"SYS_FSETXATTR", Const, 0}, + {"SYS_FSGETPATH", Const, 0}, + {"SYS_FSTAT", Const, 0}, + {"SYS_FSTAT64", Const, 0}, + {"SYS_FSTAT64_EXTENDED", Const, 0}, + {"SYS_FSTATAT", Const, 0}, + {"SYS_FSTATAT64", Const, 0}, + {"SYS_FSTATFS", Const, 0}, + {"SYS_FSTATFS64", Const, 0}, + {"SYS_FSTATV", Const, 0}, + {"SYS_FSTATVFS1", Const, 1}, + {"SYS_FSTAT_EXTENDED", Const, 0}, + {"SYS_FSYNC", Const, 0}, + {"SYS_FSYNC_NOCANCEL", Const, 0}, + {"SYS_FSYNC_RANGE", Const, 1}, + {"SYS_FTIME", Const, 0}, + {"SYS_FTRUNCATE", Const, 0}, + {"SYS_FTRUNCATE64", Const, 0}, + {"SYS_FUTEX", Const, 0}, + {"SYS_FUTIMENS", Const, 1}, + {"SYS_FUTIMES", Const, 0}, + {"SYS_FUTIMESAT", Const, 0}, + {"SYS_GETATTRLIST", Const, 0}, + {"SYS_GETAUDIT", Const, 0}, + {"SYS_GETAUDIT_ADDR", Const, 0}, + {"SYS_GETAUID", Const, 0}, + {"SYS_GETCONTEXT", Const, 0}, + {"SYS_GETCPU", Const, 0}, + {"SYS_GETCWD", Const, 0}, + {"SYS_GETDENTS", Const, 0}, + {"SYS_GETDENTS64", Const, 0}, + {"SYS_GETDIRENTRIES", Const, 0}, + {"SYS_GETDIRENTRIES64", Const, 0}, + {"SYS_GETDIRENTRIESATTR", Const, 0}, + {"SYS_GETDTABLECOUNT", Const, 1}, + {"SYS_GETDTABLESIZE", Const, 0}, + {"SYS_GETEGID", Const, 0}, + {"SYS_GETEGID32", Const, 0}, + {"SYS_GETEUID", Const, 0}, + {"SYS_GETEUID32", Const, 0}, + {"SYS_GETFH", Const, 0}, + {"SYS_GETFSSTAT", Const, 0}, + {"SYS_GETFSSTAT64", Const, 0}, + {"SYS_GETGID", Const, 0}, + {"SYS_GETGID32", Const, 0}, + {"SYS_GETGROUPS", Const, 0}, + {"SYS_GETGROUPS32", Const, 0}, + {"SYS_GETHOSTUUID", Const, 0}, + {"SYS_GETITIMER", Const, 0}, + {"SYS_GETLCID", Const, 0}, + {"SYS_GETLOGIN", Const, 0}, + {"SYS_GETLOGINCLASS", Const, 0}, + {"SYS_GETPEERNAME", Const, 0}, + {"SYS_GETPGID", Const, 0}, + {"SYS_GETPGRP", Const, 0}, + {"SYS_GETPID", Const, 0}, + {"SYS_GETPMSG", Const, 0}, + {"SYS_GETPPID", Const, 0}, + {"SYS_GETPRIORITY", Const, 0}, + {"SYS_GETRESGID", Const, 0}, + {"SYS_GETRESGID32", Const, 0}, + {"SYS_GETRESUID", Const, 0}, + {"SYS_GETRESUID32", Const, 0}, + {"SYS_GETRLIMIT", Const, 0}, + {"SYS_GETRTABLE", Const, 1}, + {"SYS_GETRUSAGE", Const, 0}, + {"SYS_GETSGROUPS", Const, 0}, + {"SYS_GETSID", Const, 0}, + {"SYS_GETSOCKNAME", Const, 0}, + {"SYS_GETSOCKOPT", Const, 0}, + {"SYS_GETTHRID", Const, 1}, + {"SYS_GETTID", Const, 0}, + {"SYS_GETTIMEOFDAY", Const, 0}, + {"SYS_GETUID", Const, 0}, + {"SYS_GETUID32", Const, 0}, + {"SYS_GETVFSSTAT", Const, 1}, + {"SYS_GETWGROUPS", Const, 0}, + {"SYS_GETXATTR", Const, 0}, + {"SYS_GET_KERNEL_SYMS", Const, 0}, + {"SYS_GET_MEMPOLICY", Const, 0}, + {"SYS_GET_ROBUST_LIST", Const, 0}, + {"SYS_GET_THREAD_AREA", Const, 0}, + {"SYS_GSSD_SYSCALL", Const, 14}, + {"SYS_GTTY", Const, 0}, + {"SYS_IDENTITYSVC", Const, 0}, + {"SYS_IDLE", Const, 0}, + {"SYS_INITGROUPS", Const, 0}, + {"SYS_INIT_MODULE", Const, 0}, + {"SYS_INOTIFY_ADD_WATCH", Const, 0}, + {"SYS_INOTIFY_INIT", Const, 0}, + {"SYS_INOTIFY_INIT1", Const, 0}, + {"SYS_INOTIFY_RM_WATCH", Const, 0}, + {"SYS_IOCTL", Const, 0}, + {"SYS_IOPERM", Const, 0}, + {"SYS_IOPL", Const, 0}, + {"SYS_IOPOLICYSYS", Const, 0}, + {"SYS_IOPRIO_GET", Const, 0}, + {"SYS_IOPRIO_SET", Const, 0}, + {"SYS_IO_CANCEL", Const, 0}, + {"SYS_IO_DESTROY", Const, 0}, + {"SYS_IO_GETEVENTS", Const, 0}, + {"SYS_IO_SETUP", Const, 0}, + {"SYS_IO_SUBMIT", Const, 0}, + {"SYS_IPC", Const, 0}, + {"SYS_ISSETUGID", Const, 0}, + {"SYS_JAIL", Const, 0}, + {"SYS_JAIL_ATTACH", Const, 0}, + {"SYS_JAIL_GET", Const, 0}, + {"SYS_JAIL_REMOVE", Const, 0}, + {"SYS_JAIL_SET", Const, 0}, + {"SYS_KAS_INFO", Const, 16}, + {"SYS_KDEBUG_TRACE", Const, 0}, + {"SYS_KENV", Const, 0}, + {"SYS_KEVENT", Const, 0}, + {"SYS_KEVENT64", Const, 0}, + {"SYS_KEXEC_LOAD", Const, 0}, + {"SYS_KEYCTL", Const, 0}, + {"SYS_KILL", Const, 0}, + {"SYS_KLDFIND", Const, 0}, + {"SYS_KLDFIRSTMOD", Const, 0}, + {"SYS_KLDLOAD", Const, 0}, + {"SYS_KLDNEXT", Const, 0}, + {"SYS_KLDSTAT", Const, 0}, + {"SYS_KLDSYM", Const, 0}, + {"SYS_KLDUNLOAD", Const, 0}, + {"SYS_KLDUNLOADF", Const, 0}, + {"SYS_KMQ_NOTIFY", Const, 14}, + {"SYS_KMQ_OPEN", Const, 14}, + {"SYS_KMQ_SETATTR", Const, 14}, + {"SYS_KMQ_TIMEDRECEIVE", Const, 14}, + {"SYS_KMQ_TIMEDSEND", Const, 14}, + {"SYS_KMQ_UNLINK", Const, 14}, + {"SYS_KQUEUE", Const, 0}, + {"SYS_KQUEUE1", Const, 1}, + {"SYS_KSEM_CLOSE", Const, 14}, + {"SYS_KSEM_DESTROY", Const, 14}, + {"SYS_KSEM_GETVALUE", Const, 14}, + {"SYS_KSEM_INIT", Const, 14}, + {"SYS_KSEM_OPEN", Const, 14}, + {"SYS_KSEM_POST", Const, 14}, + {"SYS_KSEM_TIMEDWAIT", Const, 14}, + {"SYS_KSEM_TRYWAIT", Const, 14}, + {"SYS_KSEM_UNLINK", Const, 14}, + {"SYS_KSEM_WAIT", Const, 14}, + {"SYS_KTIMER_CREATE", Const, 0}, + {"SYS_KTIMER_DELETE", Const, 0}, + {"SYS_KTIMER_GETOVERRUN", Const, 0}, + {"SYS_KTIMER_GETTIME", Const, 0}, + {"SYS_KTIMER_SETTIME", Const, 0}, + {"SYS_KTRACE", Const, 0}, + {"SYS_LCHFLAGS", Const, 0}, + {"SYS_LCHMOD", Const, 0}, + {"SYS_LCHOWN", Const, 0}, + {"SYS_LCHOWN32", Const, 0}, + {"SYS_LEDGER", Const, 16}, + {"SYS_LGETFH", Const, 0}, + {"SYS_LGETXATTR", Const, 0}, + {"SYS_LINK", Const, 0}, + {"SYS_LINKAT", Const, 0}, + {"SYS_LIO_LISTIO", Const, 0}, + {"SYS_LISTEN", Const, 0}, + {"SYS_LISTXATTR", Const, 0}, + {"SYS_LLISTXATTR", Const, 0}, + {"SYS_LOCK", Const, 0}, + {"SYS_LOOKUP_DCOOKIE", Const, 0}, + {"SYS_LPATHCONF", Const, 0}, + {"SYS_LREMOVEXATTR", Const, 0}, + {"SYS_LSEEK", Const, 0}, + {"SYS_LSETXATTR", Const, 0}, + {"SYS_LSTAT", Const, 0}, + {"SYS_LSTAT64", Const, 0}, + {"SYS_LSTAT64_EXTENDED", Const, 0}, + {"SYS_LSTATV", Const, 0}, + {"SYS_LSTAT_EXTENDED", Const, 0}, + {"SYS_LUTIMES", Const, 0}, + {"SYS_MAC_SYSCALL", Const, 0}, + {"SYS_MADVISE", Const, 0}, + {"SYS_MADVISE1", Const, 0}, + {"SYS_MAXSYSCALL", Const, 0}, + {"SYS_MBIND", Const, 0}, + {"SYS_MIGRATE_PAGES", Const, 0}, + {"SYS_MINCORE", Const, 0}, + {"SYS_MINHERIT", Const, 0}, + {"SYS_MKCOMPLEX", Const, 0}, + {"SYS_MKDIR", Const, 0}, + {"SYS_MKDIRAT", Const, 0}, + {"SYS_MKDIR_EXTENDED", Const, 0}, + {"SYS_MKFIFO", Const, 0}, + {"SYS_MKFIFOAT", Const, 0}, + {"SYS_MKFIFO_EXTENDED", Const, 0}, + {"SYS_MKNOD", Const, 0}, + {"SYS_MKNODAT", Const, 0}, + {"SYS_MLOCK", Const, 0}, + {"SYS_MLOCKALL", Const, 0}, + {"SYS_MMAP", Const, 0}, + {"SYS_MMAP2", Const, 0}, + {"SYS_MODCTL", Const, 1}, + {"SYS_MODFIND", Const, 0}, + {"SYS_MODFNEXT", Const, 0}, + {"SYS_MODIFY_LDT", Const, 0}, + {"SYS_MODNEXT", Const, 0}, + {"SYS_MODSTAT", Const, 0}, + {"SYS_MODWATCH", Const, 0}, + {"SYS_MOUNT", Const, 0}, + {"SYS_MOVE_PAGES", Const, 0}, + {"SYS_MPROTECT", Const, 0}, + {"SYS_MPX", Const, 0}, + {"SYS_MQUERY", Const, 1}, + {"SYS_MQ_GETSETATTR", Const, 0}, + {"SYS_MQ_NOTIFY", Const, 0}, + {"SYS_MQ_OPEN", Const, 0}, + {"SYS_MQ_TIMEDRECEIVE", Const, 0}, + {"SYS_MQ_TIMEDSEND", Const, 0}, + {"SYS_MQ_UNLINK", Const, 0}, + {"SYS_MREMAP", Const, 0}, + {"SYS_MSGCTL", Const, 0}, + {"SYS_MSGGET", Const, 0}, + {"SYS_MSGRCV", Const, 0}, + {"SYS_MSGRCV_NOCANCEL", Const, 0}, + {"SYS_MSGSND", Const, 0}, + {"SYS_MSGSND_NOCANCEL", Const, 0}, + {"SYS_MSGSYS", Const, 0}, + {"SYS_MSYNC", Const, 0}, + {"SYS_MSYNC_NOCANCEL", Const, 0}, + {"SYS_MUNLOCK", Const, 0}, + {"SYS_MUNLOCKALL", Const, 0}, + {"SYS_MUNMAP", Const, 0}, + {"SYS_NAME_TO_HANDLE_AT", Const, 0}, + {"SYS_NANOSLEEP", Const, 0}, + {"SYS_NEWFSTATAT", Const, 0}, + {"SYS_NFSCLNT", Const, 0}, + {"SYS_NFSSERVCTL", Const, 0}, + {"SYS_NFSSVC", Const, 0}, + {"SYS_NFSTAT", Const, 0}, + {"SYS_NICE", Const, 0}, + {"SYS_NLM_SYSCALL", Const, 14}, + {"SYS_NLSTAT", Const, 0}, + {"SYS_NMOUNT", Const, 0}, + {"SYS_NSTAT", Const, 0}, + {"SYS_NTP_ADJTIME", Const, 0}, + {"SYS_NTP_GETTIME", Const, 0}, + {"SYS_NUMA_GETAFFINITY", Const, 14}, + {"SYS_NUMA_SETAFFINITY", Const, 14}, + {"SYS_OABI_SYSCALL_BASE", Const, 0}, + {"SYS_OBREAK", Const, 0}, + {"SYS_OLDFSTAT", Const, 0}, + {"SYS_OLDLSTAT", Const, 0}, + {"SYS_OLDOLDUNAME", Const, 0}, + {"SYS_OLDSTAT", Const, 0}, + {"SYS_OLDUNAME", Const, 0}, + {"SYS_OPEN", Const, 0}, + {"SYS_OPENAT", Const, 0}, + {"SYS_OPENBSD_POLL", Const, 0}, + {"SYS_OPEN_BY_HANDLE_AT", Const, 0}, + {"SYS_OPEN_DPROTECTED_NP", Const, 16}, + {"SYS_OPEN_EXTENDED", Const, 0}, + {"SYS_OPEN_NOCANCEL", Const, 0}, + {"SYS_OVADVISE", Const, 0}, + {"SYS_PACCEPT", Const, 1}, + {"SYS_PATHCONF", Const, 0}, + {"SYS_PAUSE", Const, 0}, + {"SYS_PCICONFIG_IOBASE", Const, 0}, + {"SYS_PCICONFIG_READ", Const, 0}, + {"SYS_PCICONFIG_WRITE", Const, 0}, + {"SYS_PDFORK", Const, 0}, + {"SYS_PDGETPID", Const, 0}, + {"SYS_PDKILL", Const, 0}, + {"SYS_PERF_EVENT_OPEN", Const, 0}, + {"SYS_PERSONALITY", Const, 0}, + {"SYS_PID_HIBERNATE", Const, 0}, + {"SYS_PID_RESUME", Const, 0}, + {"SYS_PID_SHUTDOWN_SOCKETS", Const, 0}, + {"SYS_PID_SUSPEND", Const, 0}, + {"SYS_PIPE", Const, 0}, + {"SYS_PIPE2", Const, 0}, + {"SYS_PIVOT_ROOT", Const, 0}, + {"SYS_PMC_CONTROL", Const, 1}, + {"SYS_PMC_GET_INFO", Const, 1}, + {"SYS_POLL", Const, 0}, + {"SYS_POLLTS", Const, 1}, + {"SYS_POLL_NOCANCEL", Const, 0}, + {"SYS_POSIX_FADVISE", Const, 0}, + {"SYS_POSIX_FALLOCATE", Const, 0}, + {"SYS_POSIX_OPENPT", Const, 0}, + {"SYS_POSIX_SPAWN", Const, 0}, + {"SYS_PPOLL", Const, 0}, + {"SYS_PRCTL", Const, 0}, + {"SYS_PREAD", Const, 0}, + {"SYS_PREAD64", Const, 0}, + {"SYS_PREADV", Const, 0}, + {"SYS_PREAD_NOCANCEL", Const, 0}, + {"SYS_PRLIMIT64", Const, 0}, + {"SYS_PROCCTL", Const, 3}, + {"SYS_PROCESS_POLICY", Const, 0}, + {"SYS_PROCESS_VM_READV", Const, 0}, + {"SYS_PROCESS_VM_WRITEV", Const, 0}, + {"SYS_PROC_INFO", Const, 0}, + {"SYS_PROF", Const, 0}, + {"SYS_PROFIL", Const, 0}, + {"SYS_PSELECT", Const, 0}, + {"SYS_PSELECT6", Const, 0}, + {"SYS_PSET_ASSIGN", Const, 1}, + {"SYS_PSET_CREATE", Const, 1}, + {"SYS_PSET_DESTROY", Const, 1}, + {"SYS_PSYNCH_CVBROAD", Const, 0}, + {"SYS_PSYNCH_CVCLRPREPOST", Const, 0}, + {"SYS_PSYNCH_CVSIGNAL", Const, 0}, + {"SYS_PSYNCH_CVWAIT", Const, 0}, + {"SYS_PSYNCH_MUTEXDROP", Const, 0}, + {"SYS_PSYNCH_MUTEXWAIT", Const, 0}, + {"SYS_PSYNCH_RW_DOWNGRADE", Const, 0}, + {"SYS_PSYNCH_RW_LONGRDLOCK", Const, 0}, + {"SYS_PSYNCH_RW_RDLOCK", Const, 0}, + {"SYS_PSYNCH_RW_UNLOCK", Const, 0}, + {"SYS_PSYNCH_RW_UNLOCK2", Const, 0}, + {"SYS_PSYNCH_RW_UPGRADE", Const, 0}, + {"SYS_PSYNCH_RW_WRLOCK", Const, 0}, + {"SYS_PSYNCH_RW_YIELDWRLOCK", Const, 0}, + {"SYS_PTRACE", Const, 0}, + {"SYS_PUTPMSG", Const, 0}, + {"SYS_PWRITE", Const, 0}, + {"SYS_PWRITE64", Const, 0}, + {"SYS_PWRITEV", Const, 0}, + {"SYS_PWRITE_NOCANCEL", Const, 0}, + {"SYS_QUERY_MODULE", Const, 0}, + {"SYS_QUOTACTL", Const, 0}, + {"SYS_RASCTL", Const, 1}, + {"SYS_RCTL_ADD_RULE", Const, 0}, + {"SYS_RCTL_GET_LIMITS", Const, 0}, + {"SYS_RCTL_GET_RACCT", Const, 0}, + {"SYS_RCTL_GET_RULES", Const, 0}, + {"SYS_RCTL_REMOVE_RULE", Const, 0}, + {"SYS_READ", Const, 0}, + {"SYS_READAHEAD", Const, 0}, + {"SYS_READDIR", Const, 0}, + {"SYS_READLINK", Const, 0}, + {"SYS_READLINKAT", Const, 0}, + {"SYS_READV", Const, 0}, + {"SYS_READV_NOCANCEL", Const, 0}, + {"SYS_READ_NOCANCEL", Const, 0}, + {"SYS_REBOOT", Const, 0}, + {"SYS_RECV", Const, 0}, + {"SYS_RECVFROM", Const, 0}, + {"SYS_RECVFROM_NOCANCEL", Const, 0}, + {"SYS_RECVMMSG", Const, 0}, + {"SYS_RECVMSG", Const, 0}, + {"SYS_RECVMSG_NOCANCEL", Const, 0}, + {"SYS_REMAP_FILE_PAGES", Const, 0}, + {"SYS_REMOVEXATTR", Const, 0}, + {"SYS_RENAME", Const, 0}, + {"SYS_RENAMEAT", Const, 0}, + {"SYS_REQUEST_KEY", Const, 0}, + {"SYS_RESTART_SYSCALL", Const, 0}, + {"SYS_REVOKE", Const, 0}, + {"SYS_RFORK", Const, 0}, + {"SYS_RMDIR", Const, 0}, + {"SYS_RTPRIO", Const, 0}, + {"SYS_RTPRIO_THREAD", Const, 0}, + {"SYS_RT_SIGACTION", Const, 0}, + {"SYS_RT_SIGPENDING", Const, 0}, + {"SYS_RT_SIGPROCMASK", Const, 0}, + {"SYS_RT_SIGQUEUEINFO", Const, 0}, + {"SYS_RT_SIGRETURN", Const, 0}, + {"SYS_RT_SIGSUSPEND", Const, 0}, + {"SYS_RT_SIGTIMEDWAIT", Const, 0}, + {"SYS_RT_TGSIGQUEUEINFO", Const, 0}, + {"SYS_SBRK", Const, 0}, + {"SYS_SCHED_GETAFFINITY", Const, 0}, + {"SYS_SCHED_GETPARAM", Const, 0}, + {"SYS_SCHED_GETSCHEDULER", Const, 0}, + {"SYS_SCHED_GET_PRIORITY_MAX", Const, 0}, + {"SYS_SCHED_GET_PRIORITY_MIN", Const, 0}, + {"SYS_SCHED_RR_GET_INTERVAL", Const, 0}, + {"SYS_SCHED_SETAFFINITY", Const, 0}, + {"SYS_SCHED_SETPARAM", Const, 0}, + {"SYS_SCHED_SETSCHEDULER", Const, 0}, + {"SYS_SCHED_YIELD", Const, 0}, + {"SYS_SCTP_GENERIC_RECVMSG", Const, 0}, + {"SYS_SCTP_GENERIC_SENDMSG", Const, 0}, + {"SYS_SCTP_GENERIC_SENDMSG_IOV", Const, 0}, + {"SYS_SCTP_PEELOFF", Const, 0}, + {"SYS_SEARCHFS", Const, 0}, + {"SYS_SECURITY", Const, 0}, + {"SYS_SELECT", Const, 0}, + {"SYS_SELECT_NOCANCEL", Const, 0}, + {"SYS_SEMCONFIG", Const, 1}, + {"SYS_SEMCTL", Const, 0}, + {"SYS_SEMGET", Const, 0}, + {"SYS_SEMOP", Const, 0}, + {"SYS_SEMSYS", Const, 0}, + {"SYS_SEMTIMEDOP", Const, 0}, + {"SYS_SEM_CLOSE", Const, 0}, + {"SYS_SEM_DESTROY", Const, 0}, + {"SYS_SEM_GETVALUE", Const, 0}, + {"SYS_SEM_INIT", Const, 0}, + {"SYS_SEM_OPEN", Const, 0}, + {"SYS_SEM_POST", Const, 0}, + {"SYS_SEM_TRYWAIT", Const, 0}, + {"SYS_SEM_UNLINK", Const, 0}, + {"SYS_SEM_WAIT", Const, 0}, + {"SYS_SEM_WAIT_NOCANCEL", Const, 0}, + {"SYS_SEND", Const, 0}, + {"SYS_SENDFILE", Const, 0}, + {"SYS_SENDFILE64", Const, 0}, + {"SYS_SENDMMSG", Const, 0}, + {"SYS_SENDMSG", Const, 0}, + {"SYS_SENDMSG_NOCANCEL", Const, 0}, + {"SYS_SENDTO", Const, 0}, + {"SYS_SENDTO_NOCANCEL", Const, 0}, + {"SYS_SETATTRLIST", Const, 0}, + {"SYS_SETAUDIT", Const, 0}, + {"SYS_SETAUDIT_ADDR", Const, 0}, + {"SYS_SETAUID", Const, 0}, + {"SYS_SETCONTEXT", Const, 0}, + {"SYS_SETDOMAINNAME", Const, 0}, + {"SYS_SETEGID", Const, 0}, + {"SYS_SETEUID", Const, 0}, + {"SYS_SETFIB", Const, 0}, + {"SYS_SETFSGID", Const, 0}, + {"SYS_SETFSGID32", Const, 0}, + {"SYS_SETFSUID", Const, 0}, + {"SYS_SETFSUID32", Const, 0}, + {"SYS_SETGID", Const, 0}, + {"SYS_SETGID32", Const, 0}, + {"SYS_SETGROUPS", Const, 0}, + {"SYS_SETGROUPS32", Const, 0}, + {"SYS_SETHOSTNAME", Const, 0}, + {"SYS_SETITIMER", Const, 0}, + {"SYS_SETLCID", Const, 0}, + {"SYS_SETLOGIN", Const, 0}, + {"SYS_SETLOGINCLASS", Const, 0}, + {"SYS_SETNS", Const, 0}, + {"SYS_SETPGID", Const, 0}, + {"SYS_SETPRIORITY", Const, 0}, + {"SYS_SETPRIVEXEC", Const, 0}, + {"SYS_SETREGID", Const, 0}, + {"SYS_SETREGID32", Const, 0}, + {"SYS_SETRESGID", Const, 0}, + {"SYS_SETRESGID32", Const, 0}, + {"SYS_SETRESUID", Const, 0}, + {"SYS_SETRESUID32", Const, 0}, + {"SYS_SETREUID", Const, 0}, + {"SYS_SETREUID32", Const, 0}, + {"SYS_SETRLIMIT", Const, 0}, + {"SYS_SETRTABLE", Const, 1}, + {"SYS_SETSGROUPS", Const, 0}, + {"SYS_SETSID", Const, 0}, + {"SYS_SETSOCKOPT", Const, 0}, + {"SYS_SETTID", Const, 0}, + {"SYS_SETTID_WITH_PID", Const, 0}, + {"SYS_SETTIMEOFDAY", Const, 0}, + {"SYS_SETUID", Const, 0}, + {"SYS_SETUID32", Const, 0}, + {"SYS_SETWGROUPS", Const, 0}, + {"SYS_SETXATTR", Const, 0}, + {"SYS_SET_MEMPOLICY", Const, 0}, + {"SYS_SET_ROBUST_LIST", Const, 0}, + {"SYS_SET_THREAD_AREA", Const, 0}, + {"SYS_SET_TID_ADDRESS", Const, 0}, + {"SYS_SGETMASK", Const, 0}, + {"SYS_SHARED_REGION_CHECK_NP", Const, 0}, + {"SYS_SHARED_REGION_MAP_AND_SLIDE_NP", Const, 0}, + {"SYS_SHMAT", Const, 0}, + {"SYS_SHMCTL", Const, 0}, + {"SYS_SHMDT", Const, 0}, + {"SYS_SHMGET", Const, 0}, + {"SYS_SHMSYS", Const, 0}, + {"SYS_SHM_OPEN", Const, 0}, + {"SYS_SHM_UNLINK", Const, 0}, + {"SYS_SHUTDOWN", Const, 0}, + {"SYS_SIGACTION", Const, 0}, + {"SYS_SIGALTSTACK", Const, 0}, + {"SYS_SIGNAL", Const, 0}, + {"SYS_SIGNALFD", Const, 0}, + {"SYS_SIGNALFD4", Const, 0}, + {"SYS_SIGPENDING", Const, 0}, + {"SYS_SIGPROCMASK", Const, 0}, + {"SYS_SIGQUEUE", Const, 0}, + {"SYS_SIGQUEUEINFO", Const, 1}, + {"SYS_SIGRETURN", Const, 0}, + {"SYS_SIGSUSPEND", Const, 0}, + {"SYS_SIGSUSPEND_NOCANCEL", Const, 0}, + {"SYS_SIGTIMEDWAIT", Const, 0}, + {"SYS_SIGWAIT", Const, 0}, + {"SYS_SIGWAITINFO", Const, 0}, + {"SYS_SOCKET", Const, 0}, + {"SYS_SOCKETCALL", Const, 0}, + {"SYS_SOCKETPAIR", Const, 0}, + {"SYS_SPLICE", Const, 0}, + {"SYS_SSETMASK", Const, 0}, + {"SYS_SSTK", Const, 0}, + {"SYS_STACK_SNAPSHOT", Const, 0}, + {"SYS_STAT", Const, 0}, + {"SYS_STAT64", Const, 0}, + {"SYS_STAT64_EXTENDED", Const, 0}, + {"SYS_STATFS", Const, 0}, + {"SYS_STATFS64", Const, 0}, + {"SYS_STATV", Const, 0}, + {"SYS_STATVFS1", Const, 1}, + {"SYS_STAT_EXTENDED", Const, 0}, + {"SYS_STIME", Const, 0}, + {"SYS_STTY", Const, 0}, + {"SYS_SWAPCONTEXT", Const, 0}, + {"SYS_SWAPCTL", Const, 1}, + {"SYS_SWAPOFF", Const, 0}, + {"SYS_SWAPON", Const, 0}, + {"SYS_SYMLINK", Const, 0}, + {"SYS_SYMLINKAT", Const, 0}, + {"SYS_SYNC", Const, 0}, + {"SYS_SYNCFS", Const, 0}, + {"SYS_SYNC_FILE_RANGE", Const, 0}, + {"SYS_SYSARCH", Const, 0}, + {"SYS_SYSCALL", Const, 0}, + {"SYS_SYSCALL_BASE", Const, 0}, + {"SYS_SYSFS", Const, 0}, + {"SYS_SYSINFO", Const, 0}, + {"SYS_SYSLOG", Const, 0}, + {"SYS_TEE", Const, 0}, + {"SYS_TGKILL", Const, 0}, + {"SYS_THREAD_SELFID", Const, 0}, + {"SYS_THR_CREATE", Const, 0}, + {"SYS_THR_EXIT", Const, 0}, + {"SYS_THR_KILL", Const, 0}, + {"SYS_THR_KILL2", Const, 0}, + {"SYS_THR_NEW", Const, 0}, + {"SYS_THR_SELF", Const, 0}, + {"SYS_THR_SET_NAME", Const, 0}, + {"SYS_THR_SUSPEND", Const, 0}, + {"SYS_THR_WAKE", Const, 0}, + {"SYS_TIME", Const, 0}, + {"SYS_TIMERFD_CREATE", Const, 0}, + {"SYS_TIMERFD_GETTIME", Const, 0}, + {"SYS_TIMERFD_SETTIME", Const, 0}, + {"SYS_TIMER_CREATE", Const, 0}, + {"SYS_TIMER_DELETE", Const, 0}, + {"SYS_TIMER_GETOVERRUN", Const, 0}, + {"SYS_TIMER_GETTIME", Const, 0}, + {"SYS_TIMER_SETTIME", Const, 0}, + {"SYS_TIMES", Const, 0}, + {"SYS_TKILL", Const, 0}, + {"SYS_TRUNCATE", Const, 0}, + {"SYS_TRUNCATE64", Const, 0}, + {"SYS_TUXCALL", Const, 0}, + {"SYS_UGETRLIMIT", Const, 0}, + {"SYS_ULIMIT", Const, 0}, + {"SYS_UMASK", Const, 0}, + {"SYS_UMASK_EXTENDED", Const, 0}, + {"SYS_UMOUNT", Const, 0}, + {"SYS_UMOUNT2", Const, 0}, + {"SYS_UNAME", Const, 0}, + {"SYS_UNDELETE", Const, 0}, + {"SYS_UNLINK", Const, 0}, + {"SYS_UNLINKAT", Const, 0}, + {"SYS_UNMOUNT", Const, 0}, + {"SYS_UNSHARE", Const, 0}, + {"SYS_USELIB", Const, 0}, + {"SYS_USTAT", Const, 0}, + {"SYS_UTIME", Const, 0}, + {"SYS_UTIMENSAT", Const, 0}, + {"SYS_UTIMES", Const, 0}, + {"SYS_UTRACE", Const, 0}, + {"SYS_UUIDGEN", Const, 0}, + {"SYS_VADVISE", Const, 1}, + {"SYS_VFORK", Const, 0}, + {"SYS_VHANGUP", Const, 0}, + {"SYS_VM86", Const, 0}, + {"SYS_VM86OLD", Const, 0}, + {"SYS_VMSPLICE", Const, 0}, + {"SYS_VM_PRESSURE_MONITOR", Const, 0}, + {"SYS_VSERVER", Const, 0}, + {"SYS_WAIT4", Const, 0}, + {"SYS_WAIT4_NOCANCEL", Const, 0}, + {"SYS_WAIT6", Const, 1}, + {"SYS_WAITEVENT", Const, 0}, + {"SYS_WAITID", Const, 0}, + {"SYS_WAITID_NOCANCEL", Const, 0}, + {"SYS_WAITPID", Const, 0}, + {"SYS_WATCHEVENT", Const, 0}, + {"SYS_WORKQ_KERNRETURN", Const, 0}, + {"SYS_WORKQ_OPEN", Const, 0}, + {"SYS_WRITE", Const, 0}, + {"SYS_WRITEV", Const, 0}, + {"SYS_WRITEV_NOCANCEL", Const, 0}, + {"SYS_WRITE_NOCANCEL", Const, 0}, + {"SYS_YIELD", Const, 0}, + {"SYS__LLSEEK", Const, 0}, + {"SYS__LWP_CONTINUE", Const, 1}, + {"SYS__LWP_CREATE", Const, 1}, + {"SYS__LWP_CTL", Const, 1}, + {"SYS__LWP_DETACH", Const, 1}, + {"SYS__LWP_EXIT", Const, 1}, + {"SYS__LWP_GETNAME", Const, 1}, + {"SYS__LWP_GETPRIVATE", Const, 1}, + {"SYS__LWP_KILL", Const, 1}, + {"SYS__LWP_PARK", Const, 1}, + {"SYS__LWP_SELF", Const, 1}, + {"SYS__LWP_SETNAME", Const, 1}, + {"SYS__LWP_SETPRIVATE", Const, 1}, + {"SYS__LWP_SUSPEND", Const, 1}, + {"SYS__LWP_UNPARK", Const, 1}, + {"SYS__LWP_UNPARK_ALL", Const, 1}, + {"SYS__LWP_WAIT", Const, 1}, + {"SYS__LWP_WAKEUP", Const, 1}, + {"SYS__NEWSELECT", Const, 0}, + {"SYS__PSET_BIND", Const, 1}, + {"SYS__SCHED_GETAFFINITY", Const, 1}, + {"SYS__SCHED_GETPARAM", Const, 1}, + {"SYS__SCHED_SETAFFINITY", Const, 1}, + {"SYS__SCHED_SETPARAM", Const, 1}, + {"SYS__SYSCTL", Const, 0}, + {"SYS__UMTX_LOCK", Const, 0}, + {"SYS__UMTX_OP", Const, 0}, + {"SYS__UMTX_UNLOCK", Const, 0}, + {"SYS___ACL_ACLCHECK_FD", Const, 0}, + {"SYS___ACL_ACLCHECK_FILE", Const, 0}, + {"SYS___ACL_ACLCHECK_LINK", Const, 0}, + {"SYS___ACL_DELETE_FD", Const, 0}, + {"SYS___ACL_DELETE_FILE", Const, 0}, + {"SYS___ACL_DELETE_LINK", Const, 0}, + {"SYS___ACL_GET_FD", Const, 0}, + {"SYS___ACL_GET_FILE", Const, 0}, + {"SYS___ACL_GET_LINK", Const, 0}, + {"SYS___ACL_SET_FD", Const, 0}, + {"SYS___ACL_SET_FILE", Const, 0}, + {"SYS___ACL_SET_LINK", Const, 0}, + {"SYS___CAP_RIGHTS_GET", Const, 14}, + {"SYS___CLONE", Const, 1}, + {"SYS___DISABLE_THREADSIGNAL", Const, 0}, + {"SYS___GETCWD", Const, 0}, + {"SYS___GETLOGIN", Const, 1}, + {"SYS___GET_TCB", Const, 1}, + {"SYS___MAC_EXECVE", Const, 0}, + {"SYS___MAC_GETFSSTAT", Const, 0}, + {"SYS___MAC_GET_FD", Const, 0}, + {"SYS___MAC_GET_FILE", Const, 0}, + {"SYS___MAC_GET_LCID", Const, 0}, + {"SYS___MAC_GET_LCTX", Const, 0}, + {"SYS___MAC_GET_LINK", Const, 0}, + {"SYS___MAC_GET_MOUNT", Const, 0}, + {"SYS___MAC_GET_PID", Const, 0}, + {"SYS___MAC_GET_PROC", Const, 0}, + {"SYS___MAC_MOUNT", Const, 0}, + {"SYS___MAC_SET_FD", Const, 0}, + {"SYS___MAC_SET_FILE", Const, 0}, + {"SYS___MAC_SET_LCTX", Const, 0}, + {"SYS___MAC_SET_LINK", Const, 0}, + {"SYS___MAC_SET_PROC", Const, 0}, + {"SYS___MAC_SYSCALL", Const, 0}, + {"SYS___OLD_SEMWAIT_SIGNAL", Const, 0}, + {"SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", Const, 0}, + {"SYS___POSIX_CHOWN", Const, 1}, + {"SYS___POSIX_FCHOWN", Const, 1}, + {"SYS___POSIX_LCHOWN", Const, 1}, + {"SYS___POSIX_RENAME", Const, 1}, + {"SYS___PTHREAD_CANCELED", Const, 0}, + {"SYS___PTHREAD_CHDIR", Const, 0}, + {"SYS___PTHREAD_FCHDIR", Const, 0}, + {"SYS___PTHREAD_KILL", Const, 0}, + {"SYS___PTHREAD_MARKCANCEL", Const, 0}, + {"SYS___PTHREAD_SIGMASK", Const, 0}, + {"SYS___QUOTACTL", Const, 1}, + {"SYS___SEMCTL", Const, 1}, + {"SYS___SEMWAIT_SIGNAL", Const, 0}, + {"SYS___SEMWAIT_SIGNAL_NOCANCEL", Const, 0}, + {"SYS___SETLOGIN", Const, 1}, + {"SYS___SETUGID", Const, 0}, + {"SYS___SET_TCB", Const, 1}, + {"SYS___SIGACTION_SIGTRAMP", Const, 1}, + {"SYS___SIGTIMEDWAIT", Const, 1}, + {"SYS___SIGWAIT", Const, 0}, + {"SYS___SIGWAIT_NOCANCEL", Const, 0}, + {"SYS___SYSCTL", Const, 0}, + {"SYS___TFORK", Const, 1}, + {"SYS___THREXIT", Const, 1}, + {"SYS___THRSIGDIVERT", Const, 1}, + {"SYS___THRSLEEP", Const, 1}, + {"SYS___THRWAKEUP", Const, 1}, + {"S_ARCH1", Const, 1}, + {"S_ARCH2", Const, 1}, + {"S_BLKSIZE", Const, 0}, + {"S_IEXEC", Const, 0}, + {"S_IFBLK", Const, 0}, + {"S_IFCHR", Const, 0}, + {"S_IFDIR", Const, 0}, + {"S_IFIFO", Const, 0}, + {"S_IFLNK", Const, 0}, + {"S_IFMT", Const, 0}, + {"S_IFREG", Const, 0}, + {"S_IFSOCK", Const, 0}, + {"S_IFWHT", Const, 0}, + {"S_IREAD", Const, 0}, + {"S_IRGRP", Const, 0}, + {"S_IROTH", Const, 0}, + {"S_IRUSR", Const, 0}, + {"S_IRWXG", Const, 0}, + {"S_IRWXO", Const, 0}, + {"S_IRWXU", Const, 0}, + {"S_ISGID", Const, 0}, + {"S_ISTXT", Const, 0}, + {"S_ISUID", Const, 0}, + {"S_ISVTX", Const, 0}, + {"S_IWGRP", Const, 0}, + {"S_IWOTH", Const, 0}, + {"S_IWRITE", Const, 0}, + {"S_IWUSR", Const, 0}, + {"S_IXGRP", Const, 0}, + {"S_IXOTH", Const, 0}, + {"S_IXUSR", Const, 0}, + {"S_LOGIN_SET", Const, 1}, + {"SecurityAttributes", Type, 0}, + {"SecurityAttributes.InheritHandle", Field, 0}, + {"SecurityAttributes.Length", Field, 0}, + {"SecurityAttributes.SecurityDescriptor", Field, 0}, + {"Seek", Func, 0}, + {"Select", Func, 0}, + {"Sendfile", Func, 0}, + {"Sendmsg", Func, 0}, + {"SendmsgN", Func, 3}, + {"Sendto", Func, 0}, + {"Servent", Type, 0}, + {"Servent.Aliases", Field, 0}, + {"Servent.Name", Field, 0}, + {"Servent.Port", Field, 0}, + {"Servent.Proto", Field, 0}, + {"SetBpf", Func, 0}, + {"SetBpfBuflen", Func, 0}, + {"SetBpfDatalink", Func, 0}, + {"SetBpfHeadercmpl", Func, 0}, + {"SetBpfImmediate", Func, 0}, + {"SetBpfInterface", Func, 0}, + {"SetBpfPromisc", Func, 0}, + {"SetBpfTimeout", Func, 0}, + {"SetCurrentDirectory", Func, 0}, + {"SetEndOfFile", Func, 0}, + {"SetEnvironmentVariable", Func, 0}, + {"SetFileAttributes", Func, 0}, + {"SetFileCompletionNotificationModes", Func, 2}, + {"SetFilePointer", Func, 0}, + {"SetFileTime", Func, 0}, + {"SetHandleInformation", Func, 0}, + {"SetKevent", Func, 0}, + {"SetLsfPromisc", Func, 0}, + {"SetNonblock", Func, 0}, + {"Setdomainname", Func, 0}, + {"Setegid", Func, 0}, + {"Setenv", Func, 0}, + {"Seteuid", Func, 0}, + {"Setfsgid", Func, 0}, + {"Setfsuid", Func, 0}, + {"Setgid", Func, 0}, + {"Setgroups", Func, 0}, + {"Sethostname", Func, 0}, + {"Setlogin", Func, 0}, + {"Setpgid", Func, 0}, + {"Setpriority", Func, 0}, + {"Setprivexec", Func, 0}, + {"Setregid", Func, 0}, + {"Setresgid", Func, 0}, + {"Setresuid", Func, 0}, + {"Setreuid", Func, 0}, + {"Setrlimit", Func, 0}, + {"Setsid", Func, 0}, + {"Setsockopt", Func, 0}, + {"SetsockoptByte", Func, 0}, + {"SetsockoptICMPv6Filter", Func, 2}, + {"SetsockoptIPMreq", Func, 0}, + {"SetsockoptIPMreqn", Func, 0}, + {"SetsockoptIPv6Mreq", Func, 0}, + {"SetsockoptInet4Addr", Func, 0}, + {"SetsockoptInt", Func, 0}, + {"SetsockoptLinger", Func, 0}, + {"SetsockoptString", Func, 0}, + {"SetsockoptTimeval", Func, 0}, + {"Settimeofday", Func, 0}, + {"Setuid", Func, 0}, + {"Setxattr", Func, 1}, + {"Shutdown", Func, 0}, + {"SidTypeAlias", Const, 0}, + {"SidTypeComputer", Const, 0}, + {"SidTypeDeletedAccount", Const, 0}, + {"SidTypeDomain", Const, 0}, + {"SidTypeGroup", Const, 0}, + {"SidTypeInvalid", Const, 0}, + {"SidTypeLabel", Const, 0}, + {"SidTypeUnknown", Const, 0}, + {"SidTypeUser", Const, 0}, + {"SidTypeWellKnownGroup", Const, 0}, + {"Signal", Type, 0}, + {"SizeofBpfHdr", Const, 0}, + {"SizeofBpfInsn", Const, 0}, + {"SizeofBpfProgram", Const, 0}, + {"SizeofBpfStat", Const, 0}, + {"SizeofBpfVersion", Const, 0}, + {"SizeofBpfZbuf", Const, 0}, + {"SizeofBpfZbufHeader", Const, 0}, + {"SizeofCmsghdr", Const, 0}, + {"SizeofICMPv6Filter", Const, 2}, + {"SizeofIPMreq", Const, 0}, + {"SizeofIPMreqn", Const, 0}, + {"SizeofIPv6MTUInfo", Const, 2}, + {"SizeofIPv6Mreq", Const, 0}, + {"SizeofIfAddrmsg", Const, 0}, + {"SizeofIfAnnounceMsghdr", Const, 1}, + {"SizeofIfData", Const, 0}, + {"SizeofIfInfomsg", Const, 0}, + {"SizeofIfMsghdr", Const, 0}, + {"SizeofIfaMsghdr", Const, 0}, + {"SizeofIfmaMsghdr", Const, 0}, + {"SizeofIfmaMsghdr2", Const, 0}, + {"SizeofInet4Pktinfo", Const, 0}, + {"SizeofInet6Pktinfo", Const, 0}, + {"SizeofInotifyEvent", Const, 0}, + {"SizeofLinger", Const, 0}, + {"SizeofMsghdr", Const, 0}, + {"SizeofNlAttr", Const, 0}, + {"SizeofNlMsgerr", Const, 0}, + {"SizeofNlMsghdr", Const, 0}, + {"SizeofRtAttr", Const, 0}, + {"SizeofRtGenmsg", Const, 0}, + {"SizeofRtMetrics", Const, 0}, + {"SizeofRtMsg", Const, 0}, + {"SizeofRtMsghdr", Const, 0}, + {"SizeofRtNexthop", Const, 0}, + {"SizeofSockFilter", Const, 0}, + {"SizeofSockFprog", Const, 0}, + {"SizeofSockaddrAny", Const, 0}, + {"SizeofSockaddrDatalink", Const, 0}, + {"SizeofSockaddrInet4", Const, 0}, + {"SizeofSockaddrInet6", Const, 0}, + {"SizeofSockaddrLinklayer", Const, 0}, + {"SizeofSockaddrNetlink", Const, 0}, + {"SizeofSockaddrUnix", Const, 0}, + {"SizeofTCPInfo", Const, 1}, + {"SizeofUcred", Const, 0}, + {"SlicePtrFromStrings", Func, 1}, + {"SockFilter", Type, 0}, + {"SockFilter.Code", Field, 0}, + {"SockFilter.Jf", Field, 0}, + {"SockFilter.Jt", Field, 0}, + {"SockFilter.K", Field, 0}, + {"SockFprog", Type, 0}, + {"SockFprog.Filter", Field, 0}, + {"SockFprog.Len", Field, 0}, + {"SockFprog.Pad_cgo_0", Field, 0}, + {"Sockaddr", Type, 0}, + {"SockaddrDatalink", Type, 0}, + {"SockaddrDatalink.Alen", Field, 0}, + {"SockaddrDatalink.Data", Field, 0}, + {"SockaddrDatalink.Family", Field, 0}, + {"SockaddrDatalink.Index", Field, 0}, + {"SockaddrDatalink.Len", Field, 0}, + {"SockaddrDatalink.Nlen", Field, 0}, + {"SockaddrDatalink.Slen", Field, 0}, + {"SockaddrDatalink.Type", Field, 0}, + {"SockaddrGen", Type, 0}, + {"SockaddrInet4", Type, 0}, + {"SockaddrInet4.Addr", Field, 0}, + {"SockaddrInet4.Port", Field, 0}, + {"SockaddrInet6", Type, 0}, + {"SockaddrInet6.Addr", Field, 0}, + {"SockaddrInet6.Port", Field, 0}, + {"SockaddrInet6.ZoneId", Field, 0}, + {"SockaddrLinklayer", Type, 0}, + {"SockaddrLinklayer.Addr", Field, 0}, + {"SockaddrLinklayer.Halen", Field, 0}, + {"SockaddrLinklayer.Hatype", Field, 0}, + {"SockaddrLinklayer.Ifindex", Field, 0}, + {"SockaddrLinklayer.Pkttype", Field, 0}, + {"SockaddrLinklayer.Protocol", Field, 0}, + {"SockaddrNetlink", Type, 0}, + {"SockaddrNetlink.Family", Field, 0}, + {"SockaddrNetlink.Groups", Field, 0}, + {"SockaddrNetlink.Pad", Field, 0}, + {"SockaddrNetlink.Pid", Field, 0}, + {"SockaddrUnix", Type, 0}, + {"SockaddrUnix.Name", Field, 0}, + {"Socket", Func, 0}, + {"SocketControlMessage", Type, 0}, + {"SocketControlMessage.Data", Field, 0}, + {"SocketControlMessage.Header", Field, 0}, + {"SocketDisableIPv6", Var, 0}, + {"Socketpair", Func, 0}, + {"Splice", Func, 0}, + {"StartProcess", Func, 0}, + {"StartupInfo", Type, 0}, + {"StartupInfo.Cb", Field, 0}, + {"StartupInfo.Desktop", Field, 0}, + {"StartupInfo.FillAttribute", Field, 0}, + {"StartupInfo.Flags", Field, 0}, + {"StartupInfo.ShowWindow", Field, 0}, + {"StartupInfo.StdErr", Field, 0}, + {"StartupInfo.StdInput", Field, 0}, + {"StartupInfo.StdOutput", Field, 0}, + {"StartupInfo.Title", Field, 0}, + {"StartupInfo.X", Field, 0}, + {"StartupInfo.XCountChars", Field, 0}, + {"StartupInfo.XSize", Field, 0}, + {"StartupInfo.Y", Field, 0}, + {"StartupInfo.YCountChars", Field, 0}, + {"StartupInfo.YSize", Field, 0}, + {"Stat", Func, 0}, + {"Stat_t", Type, 0}, + {"Stat_t.Atim", Field, 0}, + {"Stat_t.Atim_ext", Field, 12}, + {"Stat_t.Atimespec", Field, 0}, + {"Stat_t.Birthtimespec", Field, 0}, + {"Stat_t.Blksize", Field, 0}, + {"Stat_t.Blocks", Field, 0}, + {"Stat_t.Btim_ext", Field, 12}, + {"Stat_t.Ctim", Field, 0}, + {"Stat_t.Ctim_ext", Field, 12}, + {"Stat_t.Ctimespec", Field, 0}, + {"Stat_t.Dev", Field, 0}, + {"Stat_t.Flags", Field, 0}, + {"Stat_t.Gen", Field, 0}, + {"Stat_t.Gid", Field, 0}, + {"Stat_t.Ino", Field, 0}, + {"Stat_t.Lspare", Field, 0}, + {"Stat_t.Lspare0", Field, 2}, + {"Stat_t.Lspare1", Field, 2}, + {"Stat_t.Mode", Field, 0}, + {"Stat_t.Mtim", Field, 0}, + {"Stat_t.Mtim_ext", Field, 12}, + {"Stat_t.Mtimespec", Field, 0}, + {"Stat_t.Nlink", Field, 0}, + {"Stat_t.Pad_cgo_0", Field, 0}, + {"Stat_t.Pad_cgo_1", Field, 0}, + {"Stat_t.Pad_cgo_2", Field, 0}, + {"Stat_t.Padding0", Field, 12}, + {"Stat_t.Padding1", Field, 12}, + {"Stat_t.Qspare", Field, 0}, + {"Stat_t.Rdev", Field, 0}, + {"Stat_t.Size", Field, 0}, + {"Stat_t.Spare", Field, 2}, + {"Stat_t.Uid", Field, 0}, + {"Stat_t.X__pad0", Field, 0}, + {"Stat_t.X__pad1", Field, 0}, + {"Stat_t.X__pad2", Field, 0}, + {"Stat_t.X__st_birthtim", Field, 2}, + {"Stat_t.X__st_ino", Field, 0}, + {"Stat_t.X__unused", Field, 0}, + {"Statfs", Func, 0}, + {"Statfs_t", Type, 0}, + {"Statfs_t.Asyncreads", Field, 0}, + {"Statfs_t.Asyncwrites", Field, 0}, + {"Statfs_t.Bavail", Field, 0}, + {"Statfs_t.Bfree", Field, 0}, + {"Statfs_t.Blocks", Field, 0}, + {"Statfs_t.Bsize", Field, 0}, + {"Statfs_t.Charspare", Field, 0}, + {"Statfs_t.F_asyncreads", Field, 2}, + {"Statfs_t.F_asyncwrites", Field, 2}, + {"Statfs_t.F_bavail", Field, 2}, + {"Statfs_t.F_bfree", Field, 2}, + {"Statfs_t.F_blocks", Field, 2}, + {"Statfs_t.F_bsize", Field, 2}, + {"Statfs_t.F_ctime", Field, 2}, + {"Statfs_t.F_favail", Field, 2}, + {"Statfs_t.F_ffree", Field, 2}, + {"Statfs_t.F_files", Field, 2}, + {"Statfs_t.F_flags", Field, 2}, + {"Statfs_t.F_fsid", Field, 2}, + {"Statfs_t.F_fstypename", Field, 2}, + {"Statfs_t.F_iosize", Field, 2}, + {"Statfs_t.F_mntfromname", Field, 2}, + {"Statfs_t.F_mntfromspec", Field, 3}, + {"Statfs_t.F_mntonname", Field, 2}, + {"Statfs_t.F_namemax", Field, 2}, + {"Statfs_t.F_owner", Field, 2}, + {"Statfs_t.F_spare", Field, 2}, + {"Statfs_t.F_syncreads", Field, 2}, + {"Statfs_t.F_syncwrites", Field, 2}, + {"Statfs_t.Ffree", Field, 0}, + {"Statfs_t.Files", Field, 0}, + {"Statfs_t.Flags", Field, 0}, + {"Statfs_t.Frsize", Field, 0}, + {"Statfs_t.Fsid", Field, 0}, + {"Statfs_t.Fssubtype", Field, 0}, + {"Statfs_t.Fstypename", Field, 0}, + {"Statfs_t.Iosize", Field, 0}, + {"Statfs_t.Mntfromname", Field, 0}, + {"Statfs_t.Mntonname", Field, 0}, + {"Statfs_t.Mount_info", Field, 2}, + {"Statfs_t.Namelen", Field, 0}, + {"Statfs_t.Namemax", Field, 0}, + {"Statfs_t.Owner", Field, 0}, + {"Statfs_t.Pad_cgo_0", Field, 0}, + {"Statfs_t.Pad_cgo_1", Field, 2}, + {"Statfs_t.Reserved", Field, 0}, + {"Statfs_t.Spare", Field, 0}, + {"Statfs_t.Syncreads", Field, 0}, + {"Statfs_t.Syncwrites", Field, 0}, + {"Statfs_t.Type", Field, 0}, + {"Statfs_t.Version", Field, 0}, + {"Stderr", Var, 0}, + {"Stdin", Var, 0}, + {"Stdout", Var, 0}, + {"StringBytePtr", Func, 0}, + {"StringByteSlice", Func, 0}, + {"StringSlicePtr", Func, 0}, + {"StringToSid", Func, 0}, + {"StringToUTF16", Func, 0}, + {"StringToUTF16Ptr", Func, 0}, + {"Symlink", Func, 0}, + {"Sync", Func, 0}, + {"SyncFileRange", Func, 0}, + {"SysProcAttr", Type, 0}, + {"SysProcAttr.AdditionalInheritedHandles", Field, 17}, + {"SysProcAttr.AmbientCaps", Field, 9}, + {"SysProcAttr.CgroupFD", Field, 20}, + {"SysProcAttr.Chroot", Field, 0}, + {"SysProcAttr.Cloneflags", Field, 2}, + {"SysProcAttr.CmdLine", Field, 0}, + {"SysProcAttr.CreationFlags", Field, 1}, + {"SysProcAttr.Credential", Field, 0}, + {"SysProcAttr.Ctty", Field, 1}, + {"SysProcAttr.Foreground", Field, 5}, + {"SysProcAttr.GidMappings", Field, 4}, + {"SysProcAttr.GidMappingsEnableSetgroups", Field, 5}, + {"SysProcAttr.HideWindow", Field, 0}, + {"SysProcAttr.Jail", Field, 21}, + {"SysProcAttr.NoInheritHandles", Field, 16}, + {"SysProcAttr.Noctty", Field, 0}, + {"SysProcAttr.ParentProcess", Field, 17}, + {"SysProcAttr.Pdeathsig", Field, 0}, + {"SysProcAttr.Pgid", Field, 5}, + {"SysProcAttr.PidFD", Field, 22}, + {"SysProcAttr.ProcessAttributes", Field, 13}, + {"SysProcAttr.Ptrace", Field, 0}, + {"SysProcAttr.Setctty", Field, 0}, + {"SysProcAttr.Setpgid", Field, 0}, + {"SysProcAttr.Setsid", Field, 0}, + {"SysProcAttr.ThreadAttributes", Field, 13}, + {"SysProcAttr.Token", Field, 10}, + {"SysProcAttr.UidMappings", Field, 4}, + {"SysProcAttr.Unshareflags", Field, 7}, + {"SysProcAttr.UseCgroupFD", Field, 20}, + {"SysProcIDMap", Type, 4}, + {"SysProcIDMap.ContainerID", Field, 4}, + {"SysProcIDMap.HostID", Field, 4}, + {"SysProcIDMap.Size", Field, 4}, + {"Syscall", Func, 0}, + {"Syscall12", Func, 0}, + {"Syscall15", Func, 0}, + {"Syscall18", Func, 12}, + {"Syscall6", Func, 0}, + {"Syscall9", Func, 0}, + {"SyscallN", Func, 18}, + {"Sysctl", Func, 0}, + {"SysctlUint32", Func, 0}, + {"Sysctlnode", Type, 2}, + {"Sysctlnode.Flags", Field, 2}, + {"Sysctlnode.Name", Field, 2}, + {"Sysctlnode.Num", Field, 2}, + {"Sysctlnode.Un", Field, 2}, + {"Sysctlnode.Ver", Field, 2}, + {"Sysctlnode.X__rsvd", Field, 2}, + {"Sysctlnode.X_sysctl_desc", Field, 2}, + {"Sysctlnode.X_sysctl_func", Field, 2}, + {"Sysctlnode.X_sysctl_parent", Field, 2}, + {"Sysctlnode.X_sysctl_size", Field, 2}, + {"Sysinfo", Func, 0}, + {"Sysinfo_t", Type, 0}, + {"Sysinfo_t.Bufferram", Field, 0}, + {"Sysinfo_t.Freehigh", Field, 0}, + {"Sysinfo_t.Freeram", Field, 0}, + {"Sysinfo_t.Freeswap", Field, 0}, + {"Sysinfo_t.Loads", Field, 0}, + {"Sysinfo_t.Pad", Field, 0}, + {"Sysinfo_t.Pad_cgo_0", Field, 0}, + {"Sysinfo_t.Pad_cgo_1", Field, 0}, + {"Sysinfo_t.Procs", Field, 0}, + {"Sysinfo_t.Sharedram", Field, 0}, + {"Sysinfo_t.Totalhigh", Field, 0}, + {"Sysinfo_t.Totalram", Field, 0}, + {"Sysinfo_t.Totalswap", Field, 0}, + {"Sysinfo_t.Unit", Field, 0}, + {"Sysinfo_t.Uptime", Field, 0}, + {"Sysinfo_t.X_f", Field, 0}, + {"Systemtime", Type, 0}, + {"Systemtime.Day", Field, 0}, + {"Systemtime.DayOfWeek", Field, 0}, + {"Systemtime.Hour", Field, 0}, + {"Systemtime.Milliseconds", Field, 0}, + {"Systemtime.Minute", Field, 0}, + {"Systemtime.Month", Field, 0}, + {"Systemtime.Second", Field, 0}, + {"Systemtime.Year", Field, 0}, + {"TCGETS", Const, 0}, + {"TCIFLUSH", Const, 1}, + {"TCIOFLUSH", Const, 1}, + {"TCOFLUSH", Const, 1}, + {"TCPInfo", Type, 1}, + {"TCPInfo.Advmss", Field, 1}, + {"TCPInfo.Ato", Field, 1}, + {"TCPInfo.Backoff", Field, 1}, + {"TCPInfo.Ca_state", Field, 1}, + {"TCPInfo.Fackets", Field, 1}, + {"TCPInfo.Last_ack_recv", Field, 1}, + {"TCPInfo.Last_ack_sent", Field, 1}, + {"TCPInfo.Last_data_recv", Field, 1}, + {"TCPInfo.Last_data_sent", Field, 1}, + {"TCPInfo.Lost", Field, 1}, + {"TCPInfo.Options", Field, 1}, + {"TCPInfo.Pad_cgo_0", Field, 1}, + {"TCPInfo.Pmtu", Field, 1}, + {"TCPInfo.Probes", Field, 1}, + {"TCPInfo.Rcv_mss", Field, 1}, + {"TCPInfo.Rcv_rtt", Field, 1}, + {"TCPInfo.Rcv_space", Field, 1}, + {"TCPInfo.Rcv_ssthresh", Field, 1}, + {"TCPInfo.Reordering", Field, 1}, + {"TCPInfo.Retrans", Field, 1}, + {"TCPInfo.Retransmits", Field, 1}, + {"TCPInfo.Rto", Field, 1}, + {"TCPInfo.Rtt", Field, 1}, + {"TCPInfo.Rttvar", Field, 1}, + {"TCPInfo.Sacked", Field, 1}, + {"TCPInfo.Snd_cwnd", Field, 1}, + {"TCPInfo.Snd_mss", Field, 1}, + {"TCPInfo.Snd_ssthresh", Field, 1}, + {"TCPInfo.State", Field, 1}, + {"TCPInfo.Total_retrans", Field, 1}, + {"TCPInfo.Unacked", Field, 1}, + {"TCPKeepalive", Type, 3}, + {"TCPKeepalive.Interval", Field, 3}, + {"TCPKeepalive.OnOff", Field, 3}, + {"TCPKeepalive.Time", Field, 3}, + {"TCP_CA_NAME_MAX", Const, 0}, + {"TCP_CONGCTL", Const, 1}, + {"TCP_CONGESTION", Const, 0}, + {"TCP_CONNECTIONTIMEOUT", Const, 0}, + {"TCP_CORK", Const, 0}, + {"TCP_DEFER_ACCEPT", Const, 0}, + {"TCP_ENABLE_ECN", Const, 16}, + {"TCP_INFO", Const, 0}, + {"TCP_KEEPALIVE", Const, 0}, + {"TCP_KEEPCNT", Const, 0}, + {"TCP_KEEPIDLE", Const, 0}, + {"TCP_KEEPINIT", Const, 1}, + {"TCP_KEEPINTVL", Const, 0}, + {"TCP_LINGER2", Const, 0}, + {"TCP_MAXBURST", Const, 0}, + {"TCP_MAXHLEN", Const, 0}, + {"TCP_MAXOLEN", Const, 0}, + {"TCP_MAXSEG", Const, 0}, + {"TCP_MAXWIN", Const, 0}, + {"TCP_MAX_SACK", Const, 0}, + {"TCP_MAX_WINSHIFT", Const, 0}, + {"TCP_MD5SIG", Const, 0}, + {"TCP_MD5SIG_MAXKEYLEN", Const, 0}, + {"TCP_MINMSS", Const, 0}, + {"TCP_MINMSSOVERLOAD", Const, 0}, + {"TCP_MSS", Const, 0}, + {"TCP_NODELAY", Const, 0}, + {"TCP_NOOPT", Const, 0}, + {"TCP_NOPUSH", Const, 0}, + {"TCP_NOTSENT_LOWAT", Const, 16}, + {"TCP_NSTATES", Const, 1}, + {"TCP_QUICKACK", Const, 0}, + {"TCP_RXT_CONNDROPTIME", Const, 0}, + {"TCP_RXT_FINDROP", Const, 0}, + {"TCP_SACK_ENABLE", Const, 1}, + {"TCP_SENDMOREACKS", Const, 16}, + {"TCP_SYNCNT", Const, 0}, + {"TCP_VENDOR", Const, 3}, + {"TCP_WINDOW_CLAMP", Const, 0}, + {"TCSAFLUSH", Const, 1}, + {"TCSETS", Const, 0}, + {"TF_DISCONNECT", Const, 0}, + {"TF_REUSE_SOCKET", Const, 0}, + {"TF_USE_DEFAULT_WORKER", Const, 0}, + {"TF_USE_KERNEL_APC", Const, 0}, + {"TF_USE_SYSTEM_THREAD", Const, 0}, + {"TF_WRITE_BEHIND", Const, 0}, + {"TH32CS_INHERIT", Const, 4}, + {"TH32CS_SNAPALL", Const, 4}, + {"TH32CS_SNAPHEAPLIST", Const, 4}, + {"TH32CS_SNAPMODULE", Const, 4}, + {"TH32CS_SNAPMODULE32", Const, 4}, + {"TH32CS_SNAPPROCESS", Const, 4}, + {"TH32CS_SNAPTHREAD", Const, 4}, + {"TIME_ZONE_ID_DAYLIGHT", Const, 0}, + {"TIME_ZONE_ID_STANDARD", Const, 0}, + {"TIME_ZONE_ID_UNKNOWN", Const, 0}, + {"TIOCCBRK", Const, 0}, + {"TIOCCDTR", Const, 0}, + {"TIOCCONS", Const, 0}, + {"TIOCDCDTIMESTAMP", Const, 0}, + {"TIOCDRAIN", Const, 0}, + {"TIOCDSIMICROCODE", Const, 0}, + {"TIOCEXCL", Const, 0}, + {"TIOCEXT", Const, 0}, + {"TIOCFLAG_CDTRCTS", Const, 1}, + {"TIOCFLAG_CLOCAL", Const, 1}, + {"TIOCFLAG_CRTSCTS", Const, 1}, + {"TIOCFLAG_MDMBUF", Const, 1}, + {"TIOCFLAG_PPS", Const, 1}, + {"TIOCFLAG_SOFTCAR", Const, 1}, + {"TIOCFLUSH", Const, 0}, + {"TIOCGDEV", Const, 0}, + {"TIOCGDRAINWAIT", Const, 0}, + {"TIOCGETA", Const, 0}, + {"TIOCGETD", Const, 0}, + {"TIOCGFLAGS", Const, 1}, + {"TIOCGICOUNT", Const, 0}, + {"TIOCGLCKTRMIOS", Const, 0}, + {"TIOCGLINED", Const, 1}, + {"TIOCGPGRP", Const, 0}, + {"TIOCGPTN", Const, 0}, + {"TIOCGQSIZE", Const, 1}, + {"TIOCGRANTPT", Const, 1}, + {"TIOCGRS485", Const, 0}, + {"TIOCGSERIAL", Const, 0}, + {"TIOCGSID", Const, 0}, + {"TIOCGSIZE", Const, 1}, + {"TIOCGSOFTCAR", Const, 0}, + {"TIOCGTSTAMP", Const, 1}, + {"TIOCGWINSZ", Const, 0}, + {"TIOCINQ", Const, 0}, + {"TIOCIXOFF", Const, 0}, + {"TIOCIXON", Const, 0}, + {"TIOCLINUX", Const, 0}, + {"TIOCMBIC", Const, 0}, + {"TIOCMBIS", Const, 0}, + {"TIOCMGDTRWAIT", Const, 0}, + {"TIOCMGET", Const, 0}, + {"TIOCMIWAIT", Const, 0}, + {"TIOCMODG", Const, 0}, + {"TIOCMODS", Const, 0}, + {"TIOCMSDTRWAIT", Const, 0}, + {"TIOCMSET", Const, 0}, + {"TIOCM_CAR", Const, 0}, + {"TIOCM_CD", Const, 0}, + {"TIOCM_CTS", Const, 0}, + {"TIOCM_DCD", Const, 0}, + {"TIOCM_DSR", Const, 0}, + {"TIOCM_DTR", Const, 0}, + {"TIOCM_LE", Const, 0}, + {"TIOCM_RI", Const, 0}, + {"TIOCM_RNG", Const, 0}, + {"TIOCM_RTS", Const, 0}, + {"TIOCM_SR", Const, 0}, + {"TIOCM_ST", Const, 0}, + {"TIOCNOTTY", Const, 0}, + {"TIOCNXCL", Const, 0}, + {"TIOCOUTQ", Const, 0}, + {"TIOCPKT", Const, 0}, + {"TIOCPKT_DATA", Const, 0}, + {"TIOCPKT_DOSTOP", Const, 0}, + {"TIOCPKT_FLUSHREAD", Const, 0}, + {"TIOCPKT_FLUSHWRITE", Const, 0}, + {"TIOCPKT_IOCTL", Const, 0}, + {"TIOCPKT_NOSTOP", Const, 0}, + {"TIOCPKT_START", Const, 0}, + {"TIOCPKT_STOP", Const, 0}, + {"TIOCPTMASTER", Const, 0}, + {"TIOCPTMGET", Const, 1}, + {"TIOCPTSNAME", Const, 1}, + {"TIOCPTYGNAME", Const, 0}, + {"TIOCPTYGRANT", Const, 0}, + {"TIOCPTYUNLK", Const, 0}, + {"TIOCRCVFRAME", Const, 1}, + {"TIOCREMOTE", Const, 0}, + {"TIOCSBRK", Const, 0}, + {"TIOCSCONS", Const, 0}, + {"TIOCSCTTY", Const, 0}, + {"TIOCSDRAINWAIT", Const, 0}, + {"TIOCSDTR", Const, 0}, + {"TIOCSERCONFIG", Const, 0}, + {"TIOCSERGETLSR", Const, 0}, + {"TIOCSERGETMULTI", Const, 0}, + {"TIOCSERGSTRUCT", Const, 0}, + {"TIOCSERGWILD", Const, 0}, + {"TIOCSERSETMULTI", Const, 0}, + {"TIOCSERSWILD", Const, 0}, + {"TIOCSER_TEMT", Const, 0}, + {"TIOCSETA", Const, 0}, + {"TIOCSETAF", Const, 0}, + {"TIOCSETAW", Const, 0}, + {"TIOCSETD", Const, 0}, + {"TIOCSFLAGS", Const, 1}, + {"TIOCSIG", Const, 0}, + {"TIOCSLCKTRMIOS", Const, 0}, + {"TIOCSLINED", Const, 1}, + {"TIOCSPGRP", Const, 0}, + {"TIOCSPTLCK", Const, 0}, + {"TIOCSQSIZE", Const, 1}, + {"TIOCSRS485", Const, 0}, + {"TIOCSSERIAL", Const, 0}, + {"TIOCSSIZE", Const, 1}, + {"TIOCSSOFTCAR", Const, 0}, + {"TIOCSTART", Const, 0}, + {"TIOCSTAT", Const, 0}, + {"TIOCSTI", Const, 0}, + {"TIOCSTOP", Const, 0}, + {"TIOCSTSTAMP", Const, 1}, + {"TIOCSWINSZ", Const, 0}, + {"TIOCTIMESTAMP", Const, 0}, + {"TIOCUCNTL", Const, 0}, + {"TIOCVHANGUP", Const, 0}, + {"TIOCXMTFRAME", Const, 1}, + {"TOKEN_ADJUST_DEFAULT", Const, 0}, + {"TOKEN_ADJUST_GROUPS", Const, 0}, + {"TOKEN_ADJUST_PRIVILEGES", Const, 0}, + {"TOKEN_ADJUST_SESSIONID", Const, 11}, + {"TOKEN_ALL_ACCESS", Const, 0}, + {"TOKEN_ASSIGN_PRIMARY", Const, 0}, + {"TOKEN_DUPLICATE", Const, 0}, + {"TOKEN_EXECUTE", Const, 0}, + {"TOKEN_IMPERSONATE", Const, 0}, + {"TOKEN_QUERY", Const, 0}, + {"TOKEN_QUERY_SOURCE", Const, 0}, + {"TOKEN_READ", Const, 0}, + {"TOKEN_WRITE", Const, 0}, + {"TOSTOP", Const, 0}, + {"TRUNCATE_EXISTING", Const, 0}, + {"TUNATTACHFILTER", Const, 0}, + {"TUNDETACHFILTER", Const, 0}, + {"TUNGETFEATURES", Const, 0}, + {"TUNGETIFF", Const, 0}, + {"TUNGETSNDBUF", Const, 0}, + {"TUNGETVNETHDRSZ", Const, 0}, + {"TUNSETDEBUG", Const, 0}, + {"TUNSETGROUP", Const, 0}, + {"TUNSETIFF", Const, 0}, + {"TUNSETLINK", Const, 0}, + {"TUNSETNOCSUM", Const, 0}, + {"TUNSETOFFLOAD", Const, 0}, + {"TUNSETOWNER", Const, 0}, + {"TUNSETPERSIST", Const, 0}, + {"TUNSETSNDBUF", Const, 0}, + {"TUNSETTXFILTER", Const, 0}, + {"TUNSETVNETHDRSZ", Const, 0}, + {"Tee", Func, 0}, + {"TerminateProcess", Func, 0}, + {"Termios", Type, 0}, + {"Termios.Cc", Field, 0}, + {"Termios.Cflag", Field, 0}, + {"Termios.Iflag", Field, 0}, + {"Termios.Ispeed", Field, 0}, + {"Termios.Lflag", Field, 0}, + {"Termios.Line", Field, 0}, + {"Termios.Oflag", Field, 0}, + {"Termios.Ospeed", Field, 0}, + {"Termios.Pad_cgo_0", Field, 0}, + {"Tgkill", Func, 0}, + {"Time", Func, 0}, + {"Time_t", Type, 0}, + {"Times", Func, 0}, + {"Timespec", Type, 0}, + {"Timespec.Nsec", Field, 0}, + {"Timespec.Pad_cgo_0", Field, 2}, + {"Timespec.Sec", Field, 0}, + {"TimespecToNsec", Func, 0}, + {"Timeval", Type, 0}, + {"Timeval.Pad_cgo_0", Field, 0}, + {"Timeval.Sec", Field, 0}, + {"Timeval.Usec", Field, 0}, + {"Timeval32", Type, 0}, + {"Timeval32.Sec", Field, 0}, + {"Timeval32.Usec", Field, 0}, + {"TimevalToNsec", Func, 0}, + {"Timex", Type, 0}, + {"Timex.Calcnt", Field, 0}, + {"Timex.Constant", Field, 0}, + {"Timex.Errcnt", Field, 0}, + {"Timex.Esterror", Field, 0}, + {"Timex.Freq", Field, 0}, + {"Timex.Jitcnt", Field, 0}, + {"Timex.Jitter", Field, 0}, + {"Timex.Maxerror", Field, 0}, + {"Timex.Modes", Field, 0}, + {"Timex.Offset", Field, 0}, + {"Timex.Pad_cgo_0", Field, 0}, + {"Timex.Pad_cgo_1", Field, 0}, + {"Timex.Pad_cgo_2", Field, 0}, + {"Timex.Pad_cgo_3", Field, 0}, + {"Timex.Ppsfreq", Field, 0}, + {"Timex.Precision", Field, 0}, + {"Timex.Shift", Field, 0}, + {"Timex.Stabil", Field, 0}, + {"Timex.Status", Field, 0}, + {"Timex.Stbcnt", Field, 0}, + {"Timex.Tai", Field, 0}, + {"Timex.Tick", Field, 0}, + {"Timex.Time", Field, 0}, + {"Timex.Tolerance", Field, 0}, + {"Timezoneinformation", Type, 0}, + {"Timezoneinformation.Bias", Field, 0}, + {"Timezoneinformation.DaylightBias", Field, 0}, + {"Timezoneinformation.DaylightDate", Field, 0}, + {"Timezoneinformation.DaylightName", Field, 0}, + {"Timezoneinformation.StandardBias", Field, 0}, + {"Timezoneinformation.StandardDate", Field, 0}, + {"Timezoneinformation.StandardName", Field, 0}, + {"Tms", Type, 0}, + {"Tms.Cstime", Field, 0}, + {"Tms.Cutime", Field, 0}, + {"Tms.Stime", Field, 0}, + {"Tms.Utime", Field, 0}, + {"Token", Type, 0}, + {"TokenAccessInformation", Const, 0}, + {"TokenAuditPolicy", Const, 0}, + {"TokenDefaultDacl", Const, 0}, + {"TokenElevation", Const, 0}, + {"TokenElevationType", Const, 0}, + {"TokenGroups", Const, 0}, + {"TokenGroupsAndPrivileges", Const, 0}, + {"TokenHasRestrictions", Const, 0}, + {"TokenImpersonationLevel", Const, 0}, + {"TokenIntegrityLevel", Const, 0}, + {"TokenLinkedToken", Const, 0}, + {"TokenLogonSid", Const, 0}, + {"TokenMandatoryPolicy", Const, 0}, + {"TokenOrigin", Const, 0}, + {"TokenOwner", Const, 0}, + {"TokenPrimaryGroup", Const, 0}, + {"TokenPrivileges", Const, 0}, + {"TokenRestrictedSids", Const, 0}, + {"TokenSandBoxInert", Const, 0}, + {"TokenSessionId", Const, 0}, + {"TokenSessionReference", Const, 0}, + {"TokenSource", Const, 0}, + {"TokenStatistics", Const, 0}, + {"TokenType", Const, 0}, + {"TokenUIAccess", Const, 0}, + {"TokenUser", Const, 0}, + {"TokenVirtualizationAllowed", Const, 0}, + {"TokenVirtualizationEnabled", Const, 0}, + {"Tokenprimarygroup", Type, 0}, + {"Tokenprimarygroup.PrimaryGroup", Field, 0}, + {"Tokenuser", Type, 0}, + {"Tokenuser.User", Field, 0}, + {"TranslateAccountName", Func, 0}, + {"TranslateName", Func, 0}, + {"TransmitFile", Func, 0}, + {"TransmitFileBuffers", Type, 0}, + {"TransmitFileBuffers.Head", Field, 0}, + {"TransmitFileBuffers.HeadLength", Field, 0}, + {"TransmitFileBuffers.Tail", Field, 0}, + {"TransmitFileBuffers.TailLength", Field, 0}, + {"Truncate", Func, 0}, + {"UNIX_PATH_MAX", Const, 12}, + {"USAGE_MATCH_TYPE_AND", Const, 0}, + {"USAGE_MATCH_TYPE_OR", Const, 0}, + {"UTF16FromString", Func, 1}, + {"UTF16PtrFromString", Func, 1}, + {"UTF16ToString", Func, 0}, + {"Ucred", Type, 0}, + {"Ucred.Gid", Field, 0}, + {"Ucred.Pid", Field, 0}, + {"Ucred.Uid", Field, 0}, + {"Umask", Func, 0}, + {"Uname", Func, 0}, + {"Undelete", Func, 0}, + {"UnixCredentials", Func, 0}, + {"UnixRights", Func, 0}, + {"Unlink", Func, 0}, + {"Unlinkat", Func, 0}, + {"UnmapViewOfFile", Func, 0}, + {"Unmount", Func, 0}, + {"Unsetenv", Func, 4}, + {"Unshare", Func, 0}, + {"UserInfo10", Type, 0}, + {"UserInfo10.Comment", Field, 0}, + {"UserInfo10.FullName", Field, 0}, + {"UserInfo10.Name", Field, 0}, + {"UserInfo10.UsrComment", Field, 0}, + {"Ustat", Func, 0}, + {"Ustat_t", Type, 0}, + {"Ustat_t.Fname", Field, 0}, + {"Ustat_t.Fpack", Field, 0}, + {"Ustat_t.Pad_cgo_0", Field, 0}, + {"Ustat_t.Pad_cgo_1", Field, 0}, + {"Ustat_t.Tfree", Field, 0}, + {"Ustat_t.Tinode", Field, 0}, + {"Utimbuf", Type, 0}, + {"Utimbuf.Actime", Field, 0}, + {"Utimbuf.Modtime", Field, 0}, + {"Utime", Func, 0}, + {"Utimes", Func, 0}, + {"UtimesNano", Func, 1}, + {"Utsname", Type, 0}, + {"Utsname.Domainname", Field, 0}, + {"Utsname.Machine", Field, 0}, + {"Utsname.Nodename", Field, 0}, + {"Utsname.Release", Field, 0}, + {"Utsname.Sysname", Field, 0}, + {"Utsname.Version", Field, 0}, + {"VDISCARD", Const, 0}, + {"VDSUSP", Const, 1}, + {"VEOF", Const, 0}, + {"VEOL", Const, 0}, + {"VEOL2", Const, 0}, + {"VERASE", Const, 0}, + {"VERASE2", Const, 1}, + {"VINTR", Const, 0}, + {"VKILL", Const, 0}, + {"VLNEXT", Const, 0}, + {"VMIN", Const, 0}, + {"VQUIT", Const, 0}, + {"VREPRINT", Const, 0}, + {"VSTART", Const, 0}, + {"VSTATUS", Const, 1}, + {"VSTOP", Const, 0}, + {"VSUSP", Const, 0}, + {"VSWTC", Const, 0}, + {"VT0", Const, 1}, + {"VT1", Const, 1}, + {"VTDLY", Const, 1}, + {"VTIME", Const, 0}, + {"VWERASE", Const, 0}, + {"VirtualLock", Func, 0}, + {"VirtualUnlock", Func, 0}, + {"WAIT_ABANDONED", Const, 0}, + {"WAIT_FAILED", Const, 0}, + {"WAIT_OBJECT_0", Const, 0}, + {"WAIT_TIMEOUT", Const, 0}, + {"WALL", Const, 0}, + {"WALLSIG", Const, 1}, + {"WALTSIG", Const, 1}, + {"WCLONE", Const, 0}, + {"WCONTINUED", Const, 0}, + {"WCOREFLAG", Const, 0}, + {"WEXITED", Const, 0}, + {"WLINUXCLONE", Const, 0}, + {"WNOHANG", Const, 0}, + {"WNOTHREAD", Const, 0}, + {"WNOWAIT", Const, 0}, + {"WNOZOMBIE", Const, 1}, + {"WOPTSCHECKED", Const, 1}, + {"WORDSIZE", Const, 0}, + {"WSABuf", Type, 0}, + {"WSABuf.Buf", Field, 0}, + {"WSABuf.Len", Field, 0}, + {"WSACleanup", Func, 0}, + {"WSADESCRIPTION_LEN", Const, 0}, + {"WSAData", Type, 0}, + {"WSAData.Description", Field, 0}, + {"WSAData.HighVersion", Field, 0}, + {"WSAData.MaxSockets", Field, 0}, + {"WSAData.MaxUdpDg", Field, 0}, + {"WSAData.SystemStatus", Field, 0}, + {"WSAData.VendorInfo", Field, 0}, + {"WSAData.Version", Field, 0}, + {"WSAEACCES", Const, 2}, + {"WSAECONNABORTED", Const, 9}, + {"WSAECONNRESET", Const, 3}, + {"WSAEnumProtocols", Func, 2}, + {"WSAID_CONNECTEX", Var, 1}, + {"WSAIoctl", Func, 0}, + {"WSAPROTOCOL_LEN", Const, 2}, + {"WSAProtocolChain", Type, 2}, + {"WSAProtocolChain.ChainEntries", Field, 2}, + {"WSAProtocolChain.ChainLen", Field, 2}, + {"WSAProtocolInfo", Type, 2}, + {"WSAProtocolInfo.AddressFamily", Field, 2}, + {"WSAProtocolInfo.CatalogEntryId", Field, 2}, + {"WSAProtocolInfo.MaxSockAddr", Field, 2}, + {"WSAProtocolInfo.MessageSize", Field, 2}, + {"WSAProtocolInfo.MinSockAddr", Field, 2}, + {"WSAProtocolInfo.NetworkByteOrder", Field, 2}, + {"WSAProtocolInfo.Protocol", Field, 2}, + {"WSAProtocolInfo.ProtocolChain", Field, 2}, + {"WSAProtocolInfo.ProtocolMaxOffset", Field, 2}, + {"WSAProtocolInfo.ProtocolName", Field, 2}, + {"WSAProtocolInfo.ProviderFlags", Field, 2}, + {"WSAProtocolInfo.ProviderId", Field, 2}, + {"WSAProtocolInfo.ProviderReserved", Field, 2}, + {"WSAProtocolInfo.SecurityScheme", Field, 2}, + {"WSAProtocolInfo.ServiceFlags1", Field, 2}, + {"WSAProtocolInfo.ServiceFlags2", Field, 2}, + {"WSAProtocolInfo.ServiceFlags3", Field, 2}, + {"WSAProtocolInfo.ServiceFlags4", Field, 2}, + {"WSAProtocolInfo.SocketType", Field, 2}, + {"WSAProtocolInfo.Version", Field, 2}, + {"WSARecv", Func, 0}, + {"WSARecvFrom", Func, 0}, + {"WSASYS_STATUS_LEN", Const, 0}, + {"WSASend", Func, 0}, + {"WSASendTo", Func, 0}, + {"WSASendto", Func, 0}, + {"WSAStartup", Func, 0}, + {"WSTOPPED", Const, 0}, + {"WTRAPPED", Const, 1}, + {"WUNTRACED", Const, 0}, + {"Wait4", Func, 0}, + {"WaitForSingleObject", Func, 0}, + {"WaitStatus", Type, 0}, + {"WaitStatus.ExitCode", Field, 0}, + {"Win32FileAttributeData", Type, 0}, + {"Win32FileAttributeData.CreationTime", Field, 0}, + {"Win32FileAttributeData.FileAttributes", Field, 0}, + {"Win32FileAttributeData.FileSizeHigh", Field, 0}, + {"Win32FileAttributeData.FileSizeLow", Field, 0}, + {"Win32FileAttributeData.LastAccessTime", Field, 0}, + {"Win32FileAttributeData.LastWriteTime", Field, 0}, + {"Win32finddata", Type, 0}, + {"Win32finddata.AlternateFileName", Field, 0}, + {"Win32finddata.CreationTime", Field, 0}, + {"Win32finddata.FileAttributes", Field, 0}, + {"Win32finddata.FileName", Field, 0}, + {"Win32finddata.FileSizeHigh", Field, 0}, + {"Win32finddata.FileSizeLow", Field, 0}, + {"Win32finddata.LastAccessTime", Field, 0}, + {"Win32finddata.LastWriteTime", Field, 0}, + {"Win32finddata.Reserved0", Field, 0}, + {"Win32finddata.Reserved1", Field, 0}, + {"Write", Func, 0}, + {"WriteConsole", Func, 1}, + {"WriteFile", Func, 0}, + {"X509_ASN_ENCODING", Const, 0}, + {"XCASE", Const, 0}, + {"XP1_CONNECTIONLESS", Const, 2}, + {"XP1_CONNECT_DATA", Const, 2}, + {"XP1_DISCONNECT_DATA", Const, 2}, + {"XP1_EXPEDITED_DATA", Const, 2}, + {"XP1_GRACEFUL_CLOSE", Const, 2}, + {"XP1_GUARANTEED_DELIVERY", Const, 2}, + {"XP1_GUARANTEED_ORDER", Const, 2}, + {"XP1_IFS_HANDLES", Const, 2}, + {"XP1_MESSAGE_ORIENTED", Const, 2}, + {"XP1_MULTIPOINT_CONTROL_PLANE", Const, 2}, + {"XP1_MULTIPOINT_DATA_PLANE", Const, 2}, + {"XP1_PARTIAL_MESSAGE", Const, 2}, + {"XP1_PSEUDO_STREAM", Const, 2}, + {"XP1_QOS_SUPPORTED", Const, 2}, + {"XP1_SAN_SUPPORT_SDP", Const, 2}, + {"XP1_SUPPORT_BROADCAST", Const, 2}, + {"XP1_SUPPORT_MULTIPOINT", Const, 2}, + {"XP1_UNI_RECV", Const, 2}, + {"XP1_UNI_SEND", Const, 2}, + }, + "syscall/js": { + {"CopyBytesToGo", Func, 0}, + {"CopyBytesToJS", Func, 0}, + {"Error", Type, 0}, + {"Func", Type, 0}, + {"FuncOf", Func, 0}, + {"Global", Func, 0}, + {"Null", Func, 0}, + {"Type", Type, 0}, + {"TypeBoolean", Const, 0}, + {"TypeFunction", Const, 0}, + {"TypeNull", Const, 0}, + {"TypeNumber", Const, 0}, + {"TypeObject", Const, 0}, + {"TypeString", Const, 0}, + {"TypeSymbol", Const, 0}, + {"TypeUndefined", Const, 0}, + {"Undefined", Func, 0}, + {"Value", Type, 0}, + {"ValueError", Type, 0}, + {"ValueOf", Func, 0}, + }, + "testing": { + {"(*B).Cleanup", Method, 14}, + {"(*B).Elapsed", Method, 20}, + {"(*B).Error", Method, 0}, + {"(*B).Errorf", Method, 0}, + {"(*B).Fail", Method, 0}, + {"(*B).FailNow", Method, 0}, + {"(*B).Failed", Method, 0}, + {"(*B).Fatal", Method, 0}, + {"(*B).Fatalf", Method, 0}, + {"(*B).Helper", Method, 9}, + {"(*B).Log", Method, 0}, + {"(*B).Logf", Method, 0}, + {"(*B).Name", Method, 8}, + {"(*B).ReportAllocs", Method, 1}, + {"(*B).ReportMetric", Method, 13}, + {"(*B).ResetTimer", Method, 0}, + {"(*B).Run", Method, 7}, + {"(*B).RunParallel", Method, 3}, + {"(*B).SetBytes", Method, 0}, + {"(*B).SetParallelism", Method, 3}, + {"(*B).Setenv", Method, 17}, + {"(*B).Skip", Method, 1}, + {"(*B).SkipNow", Method, 1}, + {"(*B).Skipf", Method, 1}, + {"(*B).Skipped", Method, 1}, + {"(*B).StartTimer", Method, 0}, + {"(*B).StopTimer", Method, 0}, + {"(*B).TempDir", Method, 15}, + {"(*F).Add", Method, 18}, + {"(*F).Cleanup", Method, 18}, + {"(*F).Error", Method, 18}, + {"(*F).Errorf", Method, 18}, + {"(*F).Fail", Method, 18}, + {"(*F).FailNow", Method, 18}, + {"(*F).Failed", Method, 18}, + {"(*F).Fatal", Method, 18}, + {"(*F).Fatalf", Method, 18}, + {"(*F).Fuzz", Method, 18}, + {"(*F).Helper", Method, 18}, + {"(*F).Log", Method, 18}, + {"(*F).Logf", Method, 18}, + {"(*F).Name", Method, 18}, + {"(*F).Setenv", Method, 18}, + {"(*F).Skip", Method, 18}, + {"(*F).SkipNow", Method, 18}, + {"(*F).Skipf", Method, 18}, + {"(*F).Skipped", Method, 18}, + {"(*F).TempDir", Method, 18}, + {"(*M).Run", Method, 4}, + {"(*PB).Next", Method, 3}, + {"(*T).Cleanup", Method, 14}, + {"(*T).Deadline", Method, 15}, + {"(*T).Error", Method, 0}, + {"(*T).Errorf", Method, 0}, + {"(*T).Fail", Method, 0}, + {"(*T).FailNow", Method, 0}, + {"(*T).Failed", Method, 0}, + {"(*T).Fatal", Method, 0}, + {"(*T).Fatalf", Method, 0}, + {"(*T).Helper", Method, 9}, + {"(*T).Log", Method, 0}, + {"(*T).Logf", Method, 0}, + {"(*T).Name", Method, 8}, + {"(*T).Parallel", Method, 0}, + {"(*T).Run", Method, 7}, + {"(*T).Setenv", Method, 17}, + {"(*T).Skip", Method, 1}, + {"(*T).SkipNow", Method, 1}, + {"(*T).Skipf", Method, 1}, + {"(*T).Skipped", Method, 1}, + {"(*T).TempDir", Method, 15}, + {"(BenchmarkResult).AllocedBytesPerOp", Method, 1}, + {"(BenchmarkResult).AllocsPerOp", Method, 1}, + {"(BenchmarkResult).MemString", Method, 1}, + {"(BenchmarkResult).NsPerOp", Method, 0}, + {"(BenchmarkResult).String", Method, 0}, + {"AllocsPerRun", Func, 1}, + {"B", Type, 0}, + {"B.N", Field, 0}, + {"Benchmark", Func, 0}, + {"BenchmarkResult", Type, 0}, + {"BenchmarkResult.Bytes", Field, 0}, + {"BenchmarkResult.Extra", Field, 13}, + {"BenchmarkResult.MemAllocs", Field, 1}, + {"BenchmarkResult.MemBytes", Field, 1}, + {"BenchmarkResult.N", Field, 0}, + {"BenchmarkResult.T", Field, 0}, + {"Cover", Type, 2}, + {"Cover.Blocks", Field, 2}, + {"Cover.Counters", Field, 2}, + {"Cover.CoveredPackages", Field, 2}, + {"Cover.Mode", Field, 2}, + {"CoverBlock", Type, 2}, + {"CoverBlock.Col0", Field, 2}, + {"CoverBlock.Col1", Field, 2}, + {"CoverBlock.Line0", Field, 2}, + {"CoverBlock.Line1", Field, 2}, + {"CoverBlock.Stmts", Field, 2}, + {"CoverMode", Func, 8}, + {"Coverage", Func, 4}, + {"F", Type, 18}, + {"Init", Func, 13}, + {"InternalBenchmark", Type, 0}, + {"InternalBenchmark.F", Field, 0}, + {"InternalBenchmark.Name", Field, 0}, + {"InternalExample", Type, 0}, + {"InternalExample.F", Field, 0}, + {"InternalExample.Name", Field, 0}, + {"InternalExample.Output", Field, 0}, + {"InternalExample.Unordered", Field, 7}, + {"InternalFuzzTarget", Type, 18}, + {"InternalFuzzTarget.Fn", Field, 18}, + {"InternalFuzzTarget.Name", Field, 18}, + {"InternalTest", Type, 0}, + {"InternalTest.F", Field, 0}, + {"InternalTest.Name", Field, 0}, + {"M", Type, 4}, + {"Main", Func, 0}, + {"MainStart", Func, 4}, + {"PB", Type, 3}, + {"RegisterCover", Func, 2}, + {"RunBenchmarks", Func, 0}, + {"RunExamples", Func, 0}, + {"RunTests", Func, 0}, + {"Short", Func, 0}, + {"T", Type, 0}, + {"TB", Type, 2}, + {"Testing", Func, 21}, + {"Verbose", Func, 1}, + }, + "testing/fstest": { + {"(MapFS).Glob", Method, 16}, + {"(MapFS).Open", Method, 16}, + {"(MapFS).ReadDir", Method, 16}, + {"(MapFS).ReadFile", Method, 16}, + {"(MapFS).Stat", Method, 16}, + {"(MapFS).Sub", Method, 16}, + {"MapFS", Type, 16}, + {"MapFile", Type, 16}, + {"MapFile.Data", Field, 16}, + {"MapFile.ModTime", Field, 16}, + {"MapFile.Mode", Field, 16}, + {"MapFile.Sys", Field, 16}, + {"TestFS", Func, 16}, + }, + "testing/iotest": { + {"DataErrReader", Func, 0}, + {"ErrReader", Func, 16}, + {"ErrTimeout", Var, 0}, + {"HalfReader", Func, 0}, + {"NewReadLogger", Func, 0}, + {"NewWriteLogger", Func, 0}, + {"OneByteReader", Func, 0}, + {"TestReader", Func, 16}, + {"TimeoutReader", Func, 0}, + {"TruncateWriter", Func, 0}, + }, + "testing/quick": { + {"(*CheckEqualError).Error", Method, 0}, + {"(*CheckError).Error", Method, 0}, + {"(SetupError).Error", Method, 0}, + {"Check", Func, 0}, + {"CheckEqual", Func, 0}, + {"CheckEqualError", Type, 0}, + {"CheckEqualError.CheckError", Field, 0}, + {"CheckEqualError.Out1", Field, 0}, + {"CheckEqualError.Out2", Field, 0}, + {"CheckError", Type, 0}, + {"CheckError.Count", Field, 0}, + {"CheckError.In", Field, 0}, + {"Config", Type, 0}, + {"Config.MaxCount", Field, 0}, + {"Config.MaxCountScale", Field, 0}, + {"Config.Rand", Field, 0}, + {"Config.Values", Field, 0}, + {"Generator", Type, 0}, + {"SetupError", Type, 0}, + {"Value", Func, 0}, + }, + "testing/slogtest": { + {"Run", Func, 22}, + {"TestHandler", Func, 21}, + }, + "text/scanner": { + {"(*Position).IsValid", Method, 0}, + {"(*Scanner).Init", Method, 0}, + {"(*Scanner).IsValid", Method, 0}, + {"(*Scanner).Next", Method, 0}, + {"(*Scanner).Peek", Method, 0}, + {"(*Scanner).Pos", Method, 0}, + {"(*Scanner).Scan", Method, 0}, + {"(*Scanner).TokenText", Method, 0}, + {"(Position).String", Method, 0}, + {"(Scanner).String", Method, 0}, + {"Char", Const, 0}, + {"Comment", Const, 0}, + {"EOF", Const, 0}, + {"Float", Const, 0}, + {"GoTokens", Const, 0}, + {"GoWhitespace", Const, 0}, + {"Ident", Const, 0}, + {"Int", Const, 0}, + {"Position", Type, 0}, + {"Position.Column", Field, 0}, + {"Position.Filename", Field, 0}, + {"Position.Line", Field, 0}, + {"Position.Offset", Field, 0}, + {"RawString", Const, 0}, + {"ScanChars", Const, 0}, + {"ScanComments", Const, 0}, + {"ScanFloats", Const, 0}, + {"ScanIdents", Const, 0}, + {"ScanInts", Const, 0}, + {"ScanRawStrings", Const, 0}, + {"ScanStrings", Const, 0}, + {"Scanner", Type, 0}, + {"Scanner.Error", Field, 0}, + {"Scanner.ErrorCount", Field, 0}, + {"Scanner.IsIdentRune", Field, 4}, + {"Scanner.Mode", Field, 0}, + {"Scanner.Position", Field, 0}, + {"Scanner.Whitespace", Field, 0}, + {"SkipComments", Const, 0}, + {"String", Const, 0}, + {"TokenString", Func, 0}, + }, + "text/tabwriter": { + {"(*Writer).Flush", Method, 0}, + {"(*Writer).Init", Method, 0}, + {"(*Writer).Write", Method, 0}, + {"AlignRight", Const, 0}, + {"Debug", Const, 0}, + {"DiscardEmptyColumns", Const, 0}, + {"Escape", Const, 0}, + {"FilterHTML", Const, 0}, + {"NewWriter", Func, 0}, + {"StripEscape", Const, 0}, + {"TabIndent", Const, 0}, + {"Writer", Type, 0}, + }, + "text/template": { + {"(*Template).AddParseTree", Method, 0}, + {"(*Template).Clone", Method, 0}, + {"(*Template).DefinedTemplates", Method, 5}, + {"(*Template).Delims", Method, 0}, + {"(*Template).Execute", Method, 0}, + {"(*Template).ExecuteTemplate", Method, 0}, + {"(*Template).Funcs", Method, 0}, + {"(*Template).Lookup", Method, 0}, + {"(*Template).Name", Method, 0}, + {"(*Template).New", Method, 0}, + {"(*Template).Option", Method, 5}, + {"(*Template).Parse", Method, 0}, + {"(*Template).ParseFS", Method, 16}, + {"(*Template).ParseFiles", Method, 0}, + {"(*Template).ParseGlob", Method, 0}, + {"(*Template).Templates", Method, 0}, + {"(ExecError).Error", Method, 6}, + {"(ExecError).Unwrap", Method, 13}, + {"(Template).Copy", Method, 2}, + {"(Template).ErrorContext", Method, 1}, + {"ExecError", Type, 6}, + {"ExecError.Err", Field, 6}, + {"ExecError.Name", Field, 6}, + {"FuncMap", Type, 0}, + {"HTMLEscape", Func, 0}, + {"HTMLEscapeString", Func, 0}, + {"HTMLEscaper", Func, 0}, + {"IsTrue", Func, 6}, + {"JSEscape", Func, 0}, + {"JSEscapeString", Func, 0}, + {"JSEscaper", Func, 0}, + {"Must", Func, 0}, + {"New", Func, 0}, + {"ParseFS", Func, 16}, + {"ParseFiles", Func, 0}, + {"ParseGlob", Func, 0}, + {"Template", Type, 0}, + {"Template.Tree", Field, 0}, + {"URLQueryEscaper", Func, 0}, + }, + "text/template/parse": { + {"(*ActionNode).Copy", Method, 0}, + {"(*ActionNode).String", Method, 0}, + {"(*BoolNode).Copy", Method, 0}, + {"(*BoolNode).String", Method, 0}, + {"(*BranchNode).Copy", Method, 4}, + {"(*BranchNode).String", Method, 0}, + {"(*BreakNode).Copy", Method, 18}, + {"(*BreakNode).String", Method, 18}, + {"(*ChainNode).Add", Method, 1}, + {"(*ChainNode).Copy", Method, 1}, + {"(*ChainNode).String", Method, 1}, + {"(*CommandNode).Copy", Method, 0}, + {"(*CommandNode).String", Method, 0}, + {"(*CommentNode).Copy", Method, 16}, + {"(*CommentNode).String", Method, 16}, + {"(*ContinueNode).Copy", Method, 18}, + {"(*ContinueNode).String", Method, 18}, + {"(*DotNode).Copy", Method, 0}, + {"(*DotNode).String", Method, 0}, + {"(*DotNode).Type", Method, 0}, + {"(*FieldNode).Copy", Method, 0}, + {"(*FieldNode).String", Method, 0}, + {"(*IdentifierNode).Copy", Method, 0}, + {"(*IdentifierNode).SetPos", Method, 1}, + {"(*IdentifierNode).SetTree", Method, 4}, + {"(*IdentifierNode).String", Method, 0}, + {"(*IfNode).Copy", Method, 0}, + {"(*IfNode).String", Method, 0}, + {"(*ListNode).Copy", Method, 0}, + {"(*ListNode).CopyList", Method, 0}, + {"(*ListNode).String", Method, 0}, + {"(*NilNode).Copy", Method, 1}, + {"(*NilNode).String", Method, 1}, + {"(*NilNode).Type", Method, 1}, + {"(*NumberNode).Copy", Method, 0}, + {"(*NumberNode).String", Method, 0}, + {"(*PipeNode).Copy", Method, 0}, + {"(*PipeNode).CopyPipe", Method, 0}, + {"(*PipeNode).String", Method, 0}, + {"(*RangeNode).Copy", Method, 0}, + {"(*RangeNode).String", Method, 0}, + {"(*StringNode).Copy", Method, 0}, + {"(*StringNode).String", Method, 0}, + {"(*TemplateNode).Copy", Method, 0}, + {"(*TemplateNode).String", Method, 0}, + {"(*TextNode).Copy", Method, 0}, + {"(*TextNode).String", Method, 0}, + {"(*Tree).Copy", Method, 2}, + {"(*Tree).ErrorContext", Method, 1}, + {"(*Tree).Parse", Method, 0}, + {"(*VariableNode).Copy", Method, 0}, + {"(*VariableNode).String", Method, 0}, + {"(*WithNode).Copy", Method, 0}, + {"(*WithNode).String", Method, 0}, + {"(ActionNode).Position", Method, 1}, + {"(ActionNode).Type", Method, 0}, + {"(BoolNode).Position", Method, 1}, + {"(BoolNode).Type", Method, 0}, + {"(BranchNode).Position", Method, 1}, + {"(BranchNode).Type", Method, 0}, + {"(BreakNode).Position", Method, 18}, + {"(BreakNode).Type", Method, 18}, + {"(ChainNode).Position", Method, 1}, + {"(ChainNode).Type", Method, 1}, + {"(CommandNode).Position", Method, 1}, + {"(CommandNode).Type", Method, 0}, + {"(CommentNode).Position", Method, 16}, + {"(CommentNode).Type", Method, 16}, + {"(ContinueNode).Position", Method, 18}, + {"(ContinueNode).Type", Method, 18}, + {"(DotNode).Position", Method, 1}, + {"(FieldNode).Position", Method, 1}, + {"(FieldNode).Type", Method, 0}, + {"(IdentifierNode).Position", Method, 1}, + {"(IdentifierNode).Type", Method, 0}, + {"(IfNode).Position", Method, 1}, + {"(IfNode).Type", Method, 0}, + {"(ListNode).Position", Method, 1}, + {"(ListNode).Type", Method, 0}, + {"(NilNode).Position", Method, 1}, + {"(NodeType).Type", Method, 0}, + {"(NumberNode).Position", Method, 1}, + {"(NumberNode).Type", Method, 0}, + {"(PipeNode).Position", Method, 1}, + {"(PipeNode).Type", Method, 0}, + {"(Pos).Position", Method, 1}, + {"(RangeNode).Position", Method, 1}, + {"(RangeNode).Type", Method, 0}, + {"(StringNode).Position", Method, 1}, + {"(StringNode).Type", Method, 0}, + {"(TemplateNode).Position", Method, 1}, + {"(TemplateNode).Type", Method, 0}, + {"(TextNode).Position", Method, 1}, + {"(TextNode).Type", Method, 0}, + {"(VariableNode).Position", Method, 1}, + {"(VariableNode).Type", Method, 0}, + {"(WithNode).Position", Method, 1}, + {"(WithNode).Type", Method, 0}, + {"ActionNode", Type, 0}, + {"ActionNode.Line", Field, 0}, + {"ActionNode.NodeType", Field, 0}, + {"ActionNode.Pipe", Field, 0}, + {"ActionNode.Pos", Field, 1}, + {"BoolNode", Type, 0}, + {"BoolNode.NodeType", Field, 0}, + {"BoolNode.Pos", Field, 1}, + {"BoolNode.True", Field, 0}, + {"BranchNode", Type, 0}, + {"BranchNode.ElseList", Field, 0}, + {"BranchNode.Line", Field, 0}, + {"BranchNode.List", Field, 0}, + {"BranchNode.NodeType", Field, 0}, + {"BranchNode.Pipe", Field, 0}, + {"BranchNode.Pos", Field, 1}, + {"BreakNode", Type, 18}, + {"BreakNode.Line", Field, 18}, + {"BreakNode.NodeType", Field, 18}, + {"BreakNode.Pos", Field, 18}, + {"ChainNode", Type, 1}, + {"ChainNode.Field", Field, 1}, + {"ChainNode.Node", Field, 1}, + {"ChainNode.NodeType", Field, 1}, + {"ChainNode.Pos", Field, 1}, + {"CommandNode", Type, 0}, + {"CommandNode.Args", Field, 0}, + {"CommandNode.NodeType", Field, 0}, + {"CommandNode.Pos", Field, 1}, + {"CommentNode", Type, 16}, + {"CommentNode.NodeType", Field, 16}, + {"CommentNode.Pos", Field, 16}, + {"CommentNode.Text", Field, 16}, + {"ContinueNode", Type, 18}, + {"ContinueNode.Line", Field, 18}, + {"ContinueNode.NodeType", Field, 18}, + {"ContinueNode.Pos", Field, 18}, + {"DotNode", Type, 0}, + {"DotNode.NodeType", Field, 4}, + {"DotNode.Pos", Field, 1}, + {"FieldNode", Type, 0}, + {"FieldNode.Ident", Field, 0}, + {"FieldNode.NodeType", Field, 0}, + {"FieldNode.Pos", Field, 1}, + {"IdentifierNode", Type, 0}, + {"IdentifierNode.Ident", Field, 0}, + {"IdentifierNode.NodeType", Field, 0}, + {"IdentifierNode.Pos", Field, 1}, + {"IfNode", Type, 0}, + {"IfNode.BranchNode", Field, 0}, + {"IsEmptyTree", Func, 0}, + {"ListNode", Type, 0}, + {"ListNode.NodeType", Field, 0}, + {"ListNode.Nodes", Field, 0}, + {"ListNode.Pos", Field, 1}, + {"Mode", Type, 16}, + {"New", Func, 0}, + {"NewIdentifier", Func, 0}, + {"NilNode", Type, 1}, + {"NilNode.NodeType", Field, 4}, + {"NilNode.Pos", Field, 1}, + {"Node", Type, 0}, + {"NodeAction", Const, 0}, + {"NodeBool", Const, 0}, + {"NodeBreak", Const, 18}, + {"NodeChain", Const, 1}, + {"NodeCommand", Const, 0}, + {"NodeComment", Const, 16}, + {"NodeContinue", Const, 18}, + {"NodeDot", Const, 0}, + {"NodeField", Const, 0}, + {"NodeIdentifier", Const, 0}, + {"NodeIf", Const, 0}, + {"NodeList", Const, 0}, + {"NodeNil", Const, 1}, + {"NodeNumber", Const, 0}, + {"NodePipe", Const, 0}, + {"NodeRange", Const, 0}, + {"NodeString", Const, 0}, + {"NodeTemplate", Const, 0}, + {"NodeText", Const, 0}, + {"NodeType", Type, 0}, + {"NodeVariable", Const, 0}, + {"NodeWith", Const, 0}, + {"NumberNode", Type, 0}, + {"NumberNode.Complex128", Field, 0}, + {"NumberNode.Float64", Field, 0}, + {"NumberNode.Int64", Field, 0}, + {"NumberNode.IsComplex", Field, 0}, + {"NumberNode.IsFloat", Field, 0}, + {"NumberNode.IsInt", Field, 0}, + {"NumberNode.IsUint", Field, 0}, + {"NumberNode.NodeType", Field, 0}, + {"NumberNode.Pos", Field, 1}, + {"NumberNode.Text", Field, 0}, + {"NumberNode.Uint64", Field, 0}, + {"Parse", Func, 0}, + {"ParseComments", Const, 16}, + {"PipeNode", Type, 0}, + {"PipeNode.Cmds", Field, 0}, + {"PipeNode.Decl", Field, 0}, + {"PipeNode.IsAssign", Field, 11}, + {"PipeNode.Line", Field, 0}, + {"PipeNode.NodeType", Field, 0}, + {"PipeNode.Pos", Field, 1}, + {"Pos", Type, 1}, + {"RangeNode", Type, 0}, + {"RangeNode.BranchNode", Field, 0}, + {"SkipFuncCheck", Const, 17}, + {"StringNode", Type, 0}, + {"StringNode.NodeType", Field, 0}, + {"StringNode.Pos", Field, 1}, + {"StringNode.Quoted", Field, 0}, + {"StringNode.Text", Field, 0}, + {"TemplateNode", Type, 0}, + {"TemplateNode.Line", Field, 0}, + {"TemplateNode.Name", Field, 0}, + {"TemplateNode.NodeType", Field, 0}, + {"TemplateNode.Pipe", Field, 0}, + {"TemplateNode.Pos", Field, 1}, + {"TextNode", Type, 0}, + {"TextNode.NodeType", Field, 0}, + {"TextNode.Pos", Field, 1}, + {"TextNode.Text", Field, 0}, + {"Tree", Type, 0}, + {"Tree.Mode", Field, 16}, + {"Tree.Name", Field, 0}, + {"Tree.ParseName", Field, 1}, + {"Tree.Root", Field, 0}, + {"VariableNode", Type, 0}, + {"VariableNode.Ident", Field, 0}, + {"VariableNode.NodeType", Field, 0}, + {"VariableNode.Pos", Field, 1}, + {"WithNode", Type, 0}, + {"WithNode.BranchNode", Field, 0}, + }, + "time": { + {"(*Location).String", Method, 0}, + {"(*ParseError).Error", Method, 0}, + {"(*Ticker).Reset", Method, 15}, + {"(*Ticker).Stop", Method, 0}, + {"(*Time).GobDecode", Method, 0}, + {"(*Time).UnmarshalBinary", Method, 2}, + {"(*Time).UnmarshalJSON", Method, 0}, + {"(*Time).UnmarshalText", Method, 2}, + {"(*Timer).Reset", Method, 1}, + {"(*Timer).Stop", Method, 0}, + {"(Duration).Abs", Method, 19}, + {"(Duration).Hours", Method, 0}, + {"(Duration).Microseconds", Method, 13}, + {"(Duration).Milliseconds", Method, 13}, + {"(Duration).Minutes", Method, 0}, + {"(Duration).Nanoseconds", Method, 0}, + {"(Duration).Round", Method, 9}, + {"(Duration).Seconds", Method, 0}, + {"(Duration).String", Method, 0}, + {"(Duration).Truncate", Method, 9}, + {"(Month).String", Method, 0}, + {"(Time).Add", Method, 0}, + {"(Time).AddDate", Method, 0}, + {"(Time).After", Method, 0}, + {"(Time).AppendFormat", Method, 5}, + {"(Time).Before", Method, 0}, + {"(Time).Clock", Method, 0}, + {"(Time).Compare", Method, 20}, + {"(Time).Date", Method, 0}, + {"(Time).Day", Method, 0}, + {"(Time).Equal", Method, 0}, + {"(Time).Format", Method, 0}, + {"(Time).GoString", Method, 17}, + {"(Time).GobEncode", Method, 0}, + {"(Time).Hour", Method, 0}, + {"(Time).ISOWeek", Method, 0}, + {"(Time).In", Method, 0}, + {"(Time).IsDST", Method, 17}, + {"(Time).IsZero", Method, 0}, + {"(Time).Local", Method, 0}, + {"(Time).Location", Method, 0}, + {"(Time).MarshalBinary", Method, 2}, + {"(Time).MarshalJSON", Method, 0}, + {"(Time).MarshalText", Method, 2}, + {"(Time).Minute", Method, 0}, + {"(Time).Month", Method, 0}, + {"(Time).Nanosecond", Method, 0}, + {"(Time).Round", Method, 1}, + {"(Time).Second", Method, 0}, + {"(Time).String", Method, 0}, + {"(Time).Sub", Method, 0}, + {"(Time).Truncate", Method, 1}, + {"(Time).UTC", Method, 0}, + {"(Time).Unix", Method, 0}, + {"(Time).UnixMicro", Method, 17}, + {"(Time).UnixMilli", Method, 17}, + {"(Time).UnixNano", Method, 0}, + {"(Time).Weekday", Method, 0}, + {"(Time).Year", Method, 0}, + {"(Time).YearDay", Method, 1}, + {"(Time).Zone", Method, 0}, + {"(Time).ZoneBounds", Method, 19}, + {"(Weekday).String", Method, 0}, + {"ANSIC", Const, 0}, + {"After", Func, 0}, + {"AfterFunc", Func, 0}, + {"April", Const, 0}, + {"August", Const, 0}, + {"Date", Func, 0}, + {"DateOnly", Const, 20}, + {"DateTime", Const, 20}, + {"December", Const, 0}, + {"Duration", Type, 0}, + {"February", Const, 0}, + {"FixedZone", Func, 0}, + {"Friday", Const, 0}, + {"Hour", Const, 0}, + {"January", Const, 0}, + {"July", Const, 0}, + {"June", Const, 0}, + {"Kitchen", Const, 0}, + {"Layout", Const, 17}, + {"LoadLocation", Func, 0}, + {"LoadLocationFromTZData", Func, 10}, + {"Local", Var, 0}, + {"Location", Type, 0}, + {"March", Const, 0}, + {"May", Const, 0}, + {"Microsecond", Const, 0}, + {"Millisecond", Const, 0}, + {"Minute", Const, 0}, + {"Monday", Const, 0}, + {"Month", Type, 0}, + {"Nanosecond", Const, 0}, + {"NewTicker", Func, 0}, + {"NewTimer", Func, 0}, + {"November", Const, 0}, + {"Now", Func, 0}, + {"October", Const, 0}, + {"Parse", Func, 0}, + {"ParseDuration", Func, 0}, + {"ParseError", Type, 0}, + {"ParseError.Layout", Field, 0}, + {"ParseError.LayoutElem", Field, 0}, + {"ParseError.Message", Field, 0}, + {"ParseError.Value", Field, 0}, + {"ParseError.ValueElem", Field, 0}, + {"ParseInLocation", Func, 1}, + {"RFC1123", Const, 0}, + {"RFC1123Z", Const, 0}, + {"RFC3339", Const, 0}, + {"RFC3339Nano", Const, 0}, + {"RFC822", Const, 0}, + {"RFC822Z", Const, 0}, + {"RFC850", Const, 0}, + {"RubyDate", Const, 0}, + {"Saturday", Const, 0}, + {"Second", Const, 0}, + {"September", Const, 0}, + {"Since", Func, 0}, + {"Sleep", Func, 0}, + {"Stamp", Const, 0}, + {"StampMicro", Const, 0}, + {"StampMilli", Const, 0}, + {"StampNano", Const, 0}, + {"Sunday", Const, 0}, + {"Thursday", Const, 0}, + {"Tick", Func, 0}, + {"Ticker", Type, 0}, + {"Ticker.C", Field, 0}, + {"Time", Type, 0}, + {"TimeOnly", Const, 20}, + {"Timer", Type, 0}, + {"Timer.C", Field, 0}, + {"Tuesday", Const, 0}, + {"UTC", Var, 0}, + {"Unix", Func, 0}, + {"UnixDate", Const, 0}, + {"UnixMicro", Func, 17}, + {"UnixMilli", Func, 17}, + {"Until", Func, 8}, + {"Wednesday", Const, 0}, + {"Weekday", Type, 0}, + }, + "unicode": { + {"(SpecialCase).ToLower", Method, 0}, + {"(SpecialCase).ToTitle", Method, 0}, + {"(SpecialCase).ToUpper", Method, 0}, + {"ASCII_Hex_Digit", Var, 0}, + {"Adlam", Var, 7}, + {"Ahom", Var, 5}, + {"Anatolian_Hieroglyphs", Var, 5}, + {"Arabic", Var, 0}, + {"Armenian", Var, 0}, + {"Avestan", Var, 0}, + {"AzeriCase", Var, 0}, + {"Balinese", Var, 0}, + {"Bamum", Var, 0}, + {"Bassa_Vah", Var, 4}, + {"Batak", Var, 0}, + {"Bengali", Var, 0}, + {"Bhaiksuki", Var, 7}, + {"Bidi_Control", Var, 0}, + {"Bopomofo", Var, 0}, + {"Brahmi", Var, 0}, + {"Braille", Var, 0}, + {"Buginese", Var, 0}, + {"Buhid", Var, 0}, + {"C", Var, 0}, + {"Canadian_Aboriginal", Var, 0}, + {"Carian", Var, 0}, + {"CaseRange", Type, 0}, + {"CaseRange.Delta", Field, 0}, + {"CaseRange.Hi", Field, 0}, + {"CaseRange.Lo", Field, 0}, + {"CaseRanges", Var, 0}, + {"Categories", Var, 0}, + {"Caucasian_Albanian", Var, 4}, + {"Cc", Var, 0}, + {"Cf", Var, 0}, + {"Chakma", Var, 1}, + {"Cham", Var, 0}, + {"Cherokee", Var, 0}, + {"Chorasmian", Var, 16}, + {"Co", Var, 0}, + {"Common", Var, 0}, + {"Coptic", Var, 0}, + {"Cs", Var, 0}, + {"Cuneiform", Var, 0}, + {"Cypriot", Var, 0}, + {"Cypro_Minoan", Var, 21}, + {"Cyrillic", Var, 0}, + {"Dash", Var, 0}, + {"Deprecated", Var, 0}, + {"Deseret", Var, 0}, + {"Devanagari", Var, 0}, + {"Diacritic", Var, 0}, + {"Digit", Var, 0}, + {"Dives_Akuru", Var, 16}, + {"Dogra", Var, 13}, + {"Duployan", Var, 4}, + {"Egyptian_Hieroglyphs", Var, 0}, + {"Elbasan", Var, 4}, + {"Elymaic", Var, 14}, + {"Ethiopic", Var, 0}, + {"Extender", Var, 0}, + {"FoldCategory", Var, 0}, + {"FoldScript", Var, 0}, + {"Georgian", Var, 0}, + {"Glagolitic", Var, 0}, + {"Gothic", Var, 0}, + {"Grantha", Var, 4}, + {"GraphicRanges", Var, 0}, + {"Greek", Var, 0}, + {"Gujarati", Var, 0}, + {"Gunjala_Gondi", Var, 13}, + {"Gurmukhi", Var, 0}, + {"Han", Var, 0}, + {"Hangul", Var, 0}, + {"Hanifi_Rohingya", Var, 13}, + {"Hanunoo", Var, 0}, + {"Hatran", Var, 5}, + {"Hebrew", Var, 0}, + {"Hex_Digit", Var, 0}, + {"Hiragana", Var, 0}, + {"Hyphen", Var, 0}, + {"IDS_Binary_Operator", Var, 0}, + {"IDS_Trinary_Operator", Var, 0}, + {"Ideographic", Var, 0}, + {"Imperial_Aramaic", Var, 0}, + {"In", Func, 2}, + {"Inherited", Var, 0}, + {"Inscriptional_Pahlavi", Var, 0}, + {"Inscriptional_Parthian", Var, 0}, + {"Is", Func, 0}, + {"IsControl", Func, 0}, + {"IsDigit", Func, 0}, + {"IsGraphic", Func, 0}, + {"IsLetter", Func, 0}, + {"IsLower", Func, 0}, + {"IsMark", Func, 0}, + {"IsNumber", Func, 0}, + {"IsOneOf", Func, 0}, + {"IsPrint", Func, 0}, + {"IsPunct", Func, 0}, + {"IsSpace", Func, 0}, + {"IsSymbol", Func, 0}, + {"IsTitle", Func, 0}, + {"IsUpper", Func, 0}, + {"Javanese", Var, 0}, + {"Join_Control", Var, 0}, + {"Kaithi", Var, 0}, + {"Kannada", Var, 0}, + {"Katakana", Var, 0}, + {"Kawi", Var, 21}, + {"Kayah_Li", Var, 0}, + {"Kharoshthi", Var, 0}, + {"Khitan_Small_Script", Var, 16}, + {"Khmer", Var, 0}, + {"Khojki", Var, 4}, + {"Khudawadi", Var, 4}, + {"L", Var, 0}, + {"Lao", Var, 0}, + {"Latin", Var, 0}, + {"Lepcha", Var, 0}, + {"Letter", Var, 0}, + {"Limbu", Var, 0}, + {"Linear_A", Var, 4}, + {"Linear_B", Var, 0}, + {"Lisu", Var, 0}, + {"Ll", Var, 0}, + {"Lm", Var, 0}, + {"Lo", Var, 0}, + {"Logical_Order_Exception", Var, 0}, + {"Lower", Var, 0}, + {"LowerCase", Const, 0}, + {"Lt", Var, 0}, + {"Lu", Var, 0}, + {"Lycian", Var, 0}, + {"Lydian", Var, 0}, + {"M", Var, 0}, + {"Mahajani", Var, 4}, + {"Makasar", Var, 13}, + {"Malayalam", Var, 0}, + {"Mandaic", Var, 0}, + {"Manichaean", Var, 4}, + {"Marchen", Var, 7}, + {"Mark", Var, 0}, + {"Masaram_Gondi", Var, 10}, + {"MaxASCII", Const, 0}, + {"MaxCase", Const, 0}, + {"MaxLatin1", Const, 0}, + {"MaxRune", Const, 0}, + {"Mc", Var, 0}, + {"Me", Var, 0}, + {"Medefaidrin", Var, 13}, + {"Meetei_Mayek", Var, 0}, + {"Mende_Kikakui", Var, 4}, + {"Meroitic_Cursive", Var, 1}, + {"Meroitic_Hieroglyphs", Var, 1}, + {"Miao", Var, 1}, + {"Mn", Var, 0}, + {"Modi", Var, 4}, + {"Mongolian", Var, 0}, + {"Mro", Var, 4}, + {"Multani", Var, 5}, + {"Myanmar", Var, 0}, + {"N", Var, 0}, + {"Nabataean", Var, 4}, + {"Nag_Mundari", Var, 21}, + {"Nandinagari", Var, 14}, + {"Nd", Var, 0}, + {"New_Tai_Lue", Var, 0}, + {"Newa", Var, 7}, + {"Nko", Var, 0}, + {"Nl", Var, 0}, + {"No", Var, 0}, + {"Noncharacter_Code_Point", Var, 0}, + {"Number", Var, 0}, + {"Nushu", Var, 10}, + {"Nyiakeng_Puachue_Hmong", Var, 14}, + {"Ogham", Var, 0}, + {"Ol_Chiki", Var, 0}, + {"Old_Hungarian", Var, 5}, + {"Old_Italic", Var, 0}, + {"Old_North_Arabian", Var, 4}, + {"Old_Permic", Var, 4}, + {"Old_Persian", Var, 0}, + {"Old_Sogdian", Var, 13}, + {"Old_South_Arabian", Var, 0}, + {"Old_Turkic", Var, 0}, + {"Old_Uyghur", Var, 21}, + {"Oriya", Var, 0}, + {"Osage", Var, 7}, + {"Osmanya", Var, 0}, + {"Other", Var, 0}, + {"Other_Alphabetic", Var, 0}, + {"Other_Default_Ignorable_Code_Point", Var, 0}, + {"Other_Grapheme_Extend", Var, 0}, + {"Other_ID_Continue", Var, 0}, + {"Other_ID_Start", Var, 0}, + {"Other_Lowercase", Var, 0}, + {"Other_Math", Var, 0}, + {"Other_Uppercase", Var, 0}, + {"P", Var, 0}, + {"Pahawh_Hmong", Var, 4}, + {"Palmyrene", Var, 4}, + {"Pattern_Syntax", Var, 0}, + {"Pattern_White_Space", Var, 0}, + {"Pau_Cin_Hau", Var, 4}, + {"Pc", Var, 0}, + {"Pd", Var, 0}, + {"Pe", Var, 0}, + {"Pf", Var, 0}, + {"Phags_Pa", Var, 0}, + {"Phoenician", Var, 0}, + {"Pi", Var, 0}, + {"Po", Var, 0}, + {"Prepended_Concatenation_Mark", Var, 7}, + {"PrintRanges", Var, 0}, + {"Properties", Var, 0}, + {"Ps", Var, 0}, + {"Psalter_Pahlavi", Var, 4}, + {"Punct", Var, 0}, + {"Quotation_Mark", Var, 0}, + {"Radical", Var, 0}, + {"Range16", Type, 0}, + {"Range16.Hi", Field, 0}, + {"Range16.Lo", Field, 0}, + {"Range16.Stride", Field, 0}, + {"Range32", Type, 0}, + {"Range32.Hi", Field, 0}, + {"Range32.Lo", Field, 0}, + {"Range32.Stride", Field, 0}, + {"RangeTable", Type, 0}, + {"RangeTable.LatinOffset", Field, 1}, + {"RangeTable.R16", Field, 0}, + {"RangeTable.R32", Field, 0}, + {"Regional_Indicator", Var, 10}, + {"Rejang", Var, 0}, + {"ReplacementChar", Const, 0}, + {"Runic", Var, 0}, + {"S", Var, 0}, + {"STerm", Var, 0}, + {"Samaritan", Var, 0}, + {"Saurashtra", Var, 0}, + {"Sc", Var, 0}, + {"Scripts", Var, 0}, + {"Sentence_Terminal", Var, 7}, + {"Sharada", Var, 1}, + {"Shavian", Var, 0}, + {"Siddham", Var, 4}, + {"SignWriting", Var, 5}, + {"SimpleFold", Func, 0}, + {"Sinhala", Var, 0}, + {"Sk", Var, 0}, + {"Sm", Var, 0}, + {"So", Var, 0}, + {"Soft_Dotted", Var, 0}, + {"Sogdian", Var, 13}, + {"Sora_Sompeng", Var, 1}, + {"Soyombo", Var, 10}, + {"Space", Var, 0}, + {"SpecialCase", Type, 0}, + {"Sundanese", Var, 0}, + {"Syloti_Nagri", Var, 0}, + {"Symbol", Var, 0}, + {"Syriac", Var, 0}, + {"Tagalog", Var, 0}, + {"Tagbanwa", Var, 0}, + {"Tai_Le", Var, 0}, + {"Tai_Tham", Var, 0}, + {"Tai_Viet", Var, 0}, + {"Takri", Var, 1}, + {"Tamil", Var, 0}, + {"Tangsa", Var, 21}, + {"Tangut", Var, 7}, + {"Telugu", Var, 0}, + {"Terminal_Punctuation", Var, 0}, + {"Thaana", Var, 0}, + {"Thai", Var, 0}, + {"Tibetan", Var, 0}, + {"Tifinagh", Var, 0}, + {"Tirhuta", Var, 4}, + {"Title", Var, 0}, + {"TitleCase", Const, 0}, + {"To", Func, 0}, + {"ToLower", Func, 0}, + {"ToTitle", Func, 0}, + {"ToUpper", Func, 0}, + {"Toto", Var, 21}, + {"TurkishCase", Var, 0}, + {"Ugaritic", Var, 0}, + {"Unified_Ideograph", Var, 0}, + {"Upper", Var, 0}, + {"UpperCase", Const, 0}, + {"UpperLower", Const, 0}, + {"Vai", Var, 0}, + {"Variation_Selector", Var, 0}, + {"Version", Const, 0}, + {"Vithkuqi", Var, 21}, + {"Wancho", Var, 14}, + {"Warang_Citi", Var, 4}, + {"White_Space", Var, 0}, + {"Yezidi", Var, 16}, + {"Yi", Var, 0}, + {"Z", Var, 0}, + {"Zanabazar_Square", Var, 10}, + {"Zl", Var, 0}, + {"Zp", Var, 0}, + {"Zs", Var, 0}, + }, + "unicode/utf16": { + {"AppendRune", Func, 20}, + {"Decode", Func, 0}, + {"DecodeRune", Func, 0}, + {"Encode", Func, 0}, + {"EncodeRune", Func, 0}, + {"IsSurrogate", Func, 0}, + }, + "unicode/utf8": { + {"AppendRune", Func, 18}, + {"DecodeLastRune", Func, 0}, + {"DecodeLastRuneInString", Func, 0}, + {"DecodeRune", Func, 0}, + {"DecodeRuneInString", Func, 0}, + {"EncodeRune", Func, 0}, + {"FullRune", Func, 0}, + {"FullRuneInString", Func, 0}, + {"MaxRune", Const, 0}, + {"RuneCount", Func, 0}, + {"RuneCountInString", Func, 0}, + {"RuneError", Const, 0}, + {"RuneLen", Func, 0}, + {"RuneSelf", Const, 0}, + {"RuneStart", Func, 0}, + {"UTFMax", Const, 0}, + {"Valid", Func, 0}, + {"ValidRune", Func, 1}, + {"ValidString", Func, 0}, + }, + "unsafe": { + {"Add", Func, 0}, + {"Alignof", Func, 0}, + {"Offsetof", Func, 0}, + {"Pointer", Type, 0}, + {"Sizeof", Func, 0}, + {"Slice", Func, 0}, + {"SliceData", Func, 0}, + {"String", Func, 0}, + {"StringData", Func, 0}, + }, +} diff --git a/internal/stdlib/stdlib.go b/internal/stdlib/stdlib.go new file mode 100644 index 00000000000..98904017f2c --- /dev/null +++ b/internal/stdlib/stdlib.go @@ -0,0 +1,97 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run generate.go + +// Package stdlib provides a table of all exported symbols in the +// standard library, along with the version at which they first +// appeared. +package stdlib + +import ( + "fmt" + "strings" +) + +type Symbol struct { + Name string + Kind Kind + Version Version // Go version that first included the symbol +} + +// A Kind indicates the kind of a symbol: +// function, variable, constant, type, and so on. +type Kind int8 + +const ( + Invalid Kind = iota // Example name: + Type // "Buffer" + Func // "Println" + Var // "EOF" + Const // "Pi" + Field // "Point.X" + Method // "(*Buffer).Grow" +) + +func (kind Kind) String() string { + return [...]string{ + Invalid: "invalid", + Type: "type", + Func: "func", + Var: "var", + Const: "const", + Field: "field", + Method: "method", + }[kind] +} + +// A Version represents a version of Go of the form "go1.%d". +type Version int8 + +// String returns a version string of the form "go1.23", without allocating. +func (v Version) String() string { return versions[v] } + +var versions [30]string // (increase constant as needed) + +func init() { + for i := range versions { + versions[i] = fmt.Sprintf("go1.%d", i) + } +} + +// HasPackage reports whether the specified package path is part of +// the standard library's public API. +func HasPackage(path string) bool { + _, ok := PackageSymbols[path] + return ok +} + +// SplitField splits the field symbol name into type and field +// components. It must be called only on Field symbols. +// +// Example: "File.Package" -> ("File", "Package") +func (sym *Symbol) SplitField() (typename, name string) { + if sym.Kind != Field { + panic("not a field") + } + typename, name, _ = strings.Cut(sym.Name, ".") + return +} + +// SplitMethod splits the method symbol name into pointer, receiver, +// and method components. It must be called only on Method symbols. +// +// Example: "(*Buffer).Grow" -> (true, "Buffer", "Grow") +func (sym *Symbol) SplitMethod() (ptr bool, recv, name string) { + if sym.Kind != Method { + panic("not a method") + } + recv, name, _ = strings.Cut(sym.Name, ".") + recv = recv[len("(") : len(recv)-len(")")] + ptr = recv[0] == '*' + if ptr { + recv = recv[len("*"):] + } + return +} diff --git a/internal/testenv/exec.go b/internal/testenv/exec.go index 43aad5899fc..f2ab5f5eb8d 100644 --- a/internal/testenv/exec.go +++ b/internal/testenv/exec.go @@ -92,8 +92,7 @@ func NeedsExec(t testing.TB) { // for an arbitrary grace period before the test's deadline expires, // - if Cmd has the Cancel field, fails the test if the command is canceled // due to the test's deadline, and -// - if supported, sets a Cleanup function that verifies that the test did not -// leak a subprocess. +// - sets a Cleanup function that verifies that the test did not leak a subprocess. func CommandContext(t testing.TB, ctx context.Context, name string, args ...string) *exec.Cmd { t.Helper() NeedsExec(t) @@ -126,8 +125,8 @@ func CommandContext(t testing.TB, ctx context.Context, name string, args ...stri // grace periods to clean up: one for the delay between the first // termination signal being sent (via the Cancel callback when the Context // expires) and the process being forcibly terminated (via the WaitDelay - // field), and a second one for the delay becween the process being - // terminated and and the test logging its output for debugging. + // field), and a second one for the delay between the process being + // terminated and the test logging its output for debugging. // // (We want to ensure that the test process itself has enough time to // log the output before it is also terminated.) @@ -173,21 +172,14 @@ func CommandContext(t testing.TB, ctx context.Context, name string, args ...stri rWaitDelay.Set(reflect.ValueOf(gracePeriod)) } - // t.Cleanup was added in Go 1.14; for earlier Go versions, - // we just let the Context leak. - type Cleanupper interface { - Cleanup(func()) - } - if ct, ok := t.(Cleanupper); ok { - ct.Cleanup(func() { - if cancelCtx != nil { - cancelCtx() - } - if cmd.Process != nil && cmd.ProcessState == nil { - t.Errorf("command was started, but test did not wait for it to complete: %v", cmd) - } - }) - } + t.Cleanup(func() { + if cancelCtx != nil { + cancelCtx() + } + if cmd.Process != nil && cmd.ProcessState == nil { + t.Errorf("command was started, but test did not wait for it to complete: %v", cmd) + } + }) return cmd } diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go index 88de3da05d2..d4a17ce039a 100644 --- a/internal/testenv/testenv.go +++ b/internal/testenv/testenv.go @@ -11,6 +11,7 @@ import ( "fmt" "go/build" "os" + "os/exec" "path/filepath" "runtime" "runtime/debug" @@ -21,8 +22,6 @@ import ( "golang.org/x/mod/modfile" "golang.org/x/tools/internal/goroot" - - exec "golang.org/x/sys/execabs" ) // packageMainIsDevel reports whether the module containing package main @@ -46,7 +45,10 @@ var checkGoBuild struct { err error } -func hasTool(tool string) error { +// HasTool reports an error if the required tool is not available in PATH. +// +// For certain tools, it checks that the tool executable is correct. +func HasTool(tool string) error { if tool == "cgo" { enabled, err := cgoEnabled(false) if err != nil { @@ -84,8 +86,11 @@ func hasTool(tool string) error { // GOROOT. Otherwise, 'some/path/go test ./...' will test against some // version of the 'go' binary other than 'some/path/go', which is almost // certainly not what the user intended. - out, err := exec.Command(tool, "env", "GOROOT").CombinedOutput() + out, err := exec.Command(tool, "env", "GOROOT").Output() if err != nil { + if exit, ok := err.(*exec.ExitError); ok && len(exit.Stderr) > 0 { + err = fmt.Errorf("%w\nstderr:\n%s)", err, exit.Stderr) + } checkGoBuild.err = err return } @@ -142,8 +147,11 @@ func cgoEnabled(bypassEnvironment bool) (bool, error) { if bypassEnvironment { cmd.Env = append(append([]string(nil), os.Environ()...), "CGO_ENABLED=") } - out, err := cmd.CombinedOutput() + out, err := cmd.Output() if err != nil { + if exit, ok := err.(*exec.ExitError); ok && len(exit.Stderr) > 0 { + err = fmt.Errorf("%w\nstderr:\n%s", err, exit.Stderr) + } return false, err } enabled := strings.TrimSpace(string(out)) @@ -193,13 +201,19 @@ func allowMissingTool(tool string) bool { // NeedsTool skips t if the named tool is not present in the path. // As a special case, "cgo" means "go" is present and can compile cgo programs. func NeedsTool(t testing.TB, tool string) { - err := hasTool(tool) + err := HasTool(tool) if err == nil { return } t.Helper() if allowMissingTool(tool) { + // TODO(adonovan): if we skip because of (e.g.) + // mismatched go env GOROOT and runtime.GOROOT, don't + // we risk some users not getting the coverage they expect? + // bcmills notes: this shouldn't be a concern as of CL 404134 (Go 1.19). + // We could probably safely get rid of that GOPATH consistency + // check entirely at this point. t.Skipf("skipping because %s tool not available: %v", tool, err) } else { t.Fatalf("%s tool not available: %v", tool, err) diff --git a/internal/tokeninternal/tokeninternal.go b/internal/tokeninternal/tokeninternal.go index 7e638ec24fc..ff9437a36cd 100644 --- a/internal/tokeninternal/tokeninternal.go +++ b/internal/tokeninternal/tokeninternal.go @@ -34,30 +34,16 @@ func GetLines(file *token.File) []int { lines []int _ []struct{} } - type tokenFile118 struct { - _ *token.FileSet // deleted in go1.19 - tokenFile119 - } - - type uP = unsafe.Pointer - switch unsafe.Sizeof(*file) { - case unsafe.Sizeof(tokenFile118{}): - var ptr *tokenFile118 - *(*uP)(uP(&ptr)) = uP(file) - ptr.mu.Lock() - defer ptr.mu.Unlock() - return ptr.lines - case unsafe.Sizeof(tokenFile119{}): - var ptr *tokenFile119 - *(*uP)(uP(&ptr)) = uP(file) - ptr.mu.Lock() - defer ptr.mu.Unlock() - return ptr.lines - - default: + if unsafe.Sizeof(*file) != unsafe.Sizeof(tokenFile119{}) { panic("unexpected token.File size") } + var ptr *tokenFile119 + type uP = unsafe.Pointer + *(*uP)(uP(&ptr)) = uP(file) + ptr.mu.Lock() + defer ptr.mu.Unlock() + return ptr.lines } // AddExistingFiles adds the specified files to the FileSet if they diff --git a/internal/typeparams/common.go b/internal/typeparams/common.go index d0d0649fe2a..9771b8c3d69 100644 --- a/internal/typeparams/common.go +++ b/internal/typeparams/common.go @@ -2,20 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package typeparams contains common utilities for writing tools that interact -// with generic Go code, as introduced with Go 1.18. -// -// Many of the types and functions in this package are proxies for the new APIs -// introduced in the standard library with Go 1.18. For example, the -// typeparams.Union type is an alias for go/types.Union, and the ForTypeSpec -// function returns the value of the go/ast.TypeSpec.TypeParams field. At Go -// versions older than 1.18 these helpers are implemented as stubs, allowing -// users of this package to write code that handles generic constructs inline, -// even if the Go version being used to compile does not support generics. -// -// Additionally, this package contains common utilities for working with the -// new generic constructs, to supplement the standard library APIs. Notably, -// the StructuralTerms API computes a minimal representation of the structural +// Package typeparams contains common utilities for writing tools that +// interact with generic Go code, as introduced with Go 1.18. It +// supplements the standard library APIs. Notably, the StructuralTerms +// API computes a minimal representation of the structural // restrictions on a type parameter. // // An external version of these APIs is available in the @@ -23,10 +13,11 @@ package typeparams import ( - "fmt" "go/ast" "go/token" "go/types" + + "golang.org/x/tools/internal/aliases" ) // UnpackIndexExpr extracts data from AST nodes that represent index @@ -42,7 +33,7 @@ func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Ex switch e := n.(type) { case *ast.IndexExpr: return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack - case *IndexListExpr: + case *ast.IndexListExpr: return e.X, e.Lbrack, e.Indices, e.Rbrack } return nil, token.NoPos, nil, token.NoPos @@ -63,7 +54,7 @@ func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack toke Rbrack: rbrack, } default: - return &IndexListExpr{ + return &ast.IndexListExpr{ X: x, Lbrack: lbrack, Indices: indices, @@ -72,68 +63,12 @@ func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack toke } } -// IsTypeParam reports whether t is a type parameter. +// IsTypeParam reports whether t is a type parameter (or an alias of one). func IsTypeParam(t types.Type) bool { - _, ok := t.(*TypeParam) + _, ok := aliases.Unalias(t).(*types.TypeParam) return ok } -// OriginMethod returns the origin method associated with the method fn. -// For methods on a non-generic receiver base type, this is just -// fn. However, for methods with a generic receiver, OriginMethod returns the -// corresponding method in the method set of the origin type. -// -// As a special case, if fn is not a method (has no receiver), OriginMethod -// returns fn. -func OriginMethod(fn *types.Func) *types.Func { - recv := fn.Type().(*types.Signature).Recv() - if recv == nil { - return fn - } - base := recv.Type() - p, isPtr := base.(*types.Pointer) - if isPtr { - base = p.Elem() - } - named, isNamed := base.(*types.Named) - if !isNamed { - // Receiver is a *types.Interface. - return fn - } - if ForNamed(named).Len() == 0 { - // Receiver base has no type parameters, so we can avoid the lookup below. - return fn - } - orig := NamedTypeOrigin(named) - gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name()) - - // This is a fix for a gopls crash (#60628) due to a go/types bug (#60634). In: - // package p - // type T *int - // func (*T) f() {} - // LookupFieldOrMethod(T, true, p, f)=nil, but NewMethodSet(*T)={(*T).f}. - // Here we make them consistent by force. - // (The go/types bug is general, but this workaround is reached only - // for generic T thanks to the early return above.) - if gfn == nil { - mset := types.NewMethodSet(types.NewPointer(orig)) - for i := 0; i < mset.Len(); i++ { - m := mset.At(i) - if m.Obj().Id() == fn.Id() { - gfn = m.Obj() - break - } - } - } - - // In golang/go#61196, we observe another crash, this time inexplicable. - if gfn == nil { - panic(fmt.Sprintf("missing origin method for %s.%s; named == origin: %t, named.NumMethods(): %d, origin.NumMethods(): %d", named, fn, named == orig, named.NumMethods(), orig.NumMethods())) - } - - return gfn.(*types.Func) -} - // GenericAssignableTo is a generalization of types.AssignableTo that // implements the following rule for uninstantiated generic types: // @@ -157,7 +92,10 @@ func OriginMethod(fn *types.Func) *types.Func { // // In this case, GenericAssignableTo reports that instantiations of Container // are assignable to the corresponding instantiation of Interface. -func GenericAssignableTo(ctxt *Context, V, T types.Type) bool { +func GenericAssignableTo(ctxt *types.Context, V, T types.Type) bool { + V = aliases.Unalias(V) + T = aliases.Unalias(T) + // If V and T are not both named, or do not have matching non-empty type // parameter lists, fall back on types.AssignableTo. @@ -167,9 +105,9 @@ func GenericAssignableTo(ctxt *Context, V, T types.Type) bool { return types.AssignableTo(V, T) } - vtparams := ForNamed(VN) - ttparams := ForNamed(TN) - if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || NamedTypeArgs(VN).Len() != 0 || NamedTypeArgs(TN).Len() != 0 { + vtparams := VN.TypeParams() + ttparams := TN.TypeParams() + if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || VN.TypeArgs().Len() != 0 || TN.TypeArgs().Len() != 0 { return types.AssignableTo(V, T) } @@ -182,7 +120,7 @@ func GenericAssignableTo(ctxt *Context, V, T types.Type) bool { // Minor optimization: ensure we share a context across the two // instantiations below. if ctxt == nil { - ctxt = NewContext() + ctxt = types.NewContext() } var targs []types.Type @@ -190,12 +128,12 @@ func GenericAssignableTo(ctxt *Context, V, T types.Type) bool { targs = append(targs, vtparams.At(i)) } - vinst, err := Instantiate(ctxt, V, targs, true) + vinst, err := types.Instantiate(ctxt, V, targs, true) if err != nil { panic("type parameters should satisfy their own constraints") } - tinst, err := Instantiate(ctxt, T, targs, true) + tinst, err := types.Instantiate(ctxt, T, targs, true) if err != nil { return false } diff --git a/internal/typeparams/common_test.go b/internal/typeparams/common_test.go index d1f13fa7f53..779a942d59e 100644 --- a/internal/typeparams/common_test.go +++ b/internal/typeparams/common_test.go @@ -11,7 +11,6 @@ import ( "go/types" "testing" - "golang.org/x/tools/internal/testenv" . "golang.org/x/tools/internal/typeparams" ) @@ -19,7 +18,7 @@ func TestGetIndexExprData(t *testing.T) { x := &ast.Ident{} i := &ast.Ident{} - want := &IndexListExpr{X: x, Lbrack: 1, Indices: []ast.Expr{i}, Rbrack: 2} + want := &ast.IndexListExpr{X: x, Lbrack: 1, Indices: []ast.Expr{i}, Rbrack: 2} tests := map[ast.Node]bool{ &ast.IndexExpr{X: x, Lbrack: 1, Index: i, Rbrack: 2}: true, want: true, @@ -40,8 +39,7 @@ func TestGetIndexExprData(t *testing.T) { } } -func TestOriginMethodRecursive(t *testing.T) { - testenv.NeedsGo1Point(t, 18) +func TestFuncOriginRecursive(t *testing.T) { src := `package p type N[A any] int @@ -106,14 +104,13 @@ func (r *N[C]) n() { } } for _, test := range tests { - if got := OriginMethod(test.input); got != test.want { - t.Errorf("OriginMethod(%q) = %v, want %v", test.name, test.input, test.want) + if got := test.input.Origin(); got != test.want { + t.Errorf("Origin(%q) = %v, want %v", test.name, test.input, test.want) } } } -func TestOriginMethodUses(t *testing.T) { - testenv.NeedsGo1Point(t, 18) +func TestFuncOriginUses(t *testing.T) { tests := []string{ `type T interface { m() }; func _(t T) { t.m() }`, @@ -150,7 +147,7 @@ func TestOriginMethodUses(t *testing.T) { if call, ok := n.(*ast.CallExpr); ok { sel := call.Fun.(*ast.SelectorExpr) use := info.Uses[sel.Sel].(*types.Func) - orig := OriginMethod(use) + orig := use.Origin() if orig != m { t.Errorf("%s:\nUses[%v] = %v, want %v", src, types.ExprString(sel), use, m) } @@ -163,8 +160,8 @@ func TestOriginMethodUses(t *testing.T) { // Issue #60628 was a crash in gopls caused by inconsistency (#60634) between // LookupFieldOrMethod and NewFileSet for methods with an illegal // *T receiver type, where T itself is a pointer. -// This is a regression test for the workaround in OriginMethod. -func TestOriginMethod60628(t *testing.T) { +// This is a regression test for the workaround in the (now deleted) OriginMethod. +func TestFuncOrigin60628(t *testing.T) { const src = `package p; type T[P any] *int; func (r *T[A]) f() {}` fset := token.NewFileSet() f, err := parser.ParseFile(fset, "p.go", src, 0) @@ -202,15 +199,13 @@ func TestOriginMethod60628(t *testing.T) { } // Check the workaround. - if OriginMethod(m) == nil { - t.Errorf("OriginMethod(%v) = nil", m) + if m.Origin() == nil { + t.Errorf("Origin(%v) = nil", m) } } } func TestGenericAssignableTo(t *testing.T) { - testenv.NeedsGo1Point(t, 18) - tests := []struct { src string want bool diff --git a/internal/typeparams/coretype.go b/internal/typeparams/coretype.go index 71248209ee5..24933e43dac 100644 --- a/internal/typeparams/coretype.go +++ b/internal/typeparams/coretype.go @@ -5,7 +5,10 @@ package typeparams import ( + "fmt" "go/types" + + "golang.org/x/tools/internal/aliases" ) // CoreType returns the core type of T or nil if T does not have a core type. @@ -108,15 +111,42 @@ func CoreType(T types.Type) types.Type { // // _NormalTerms makes no guarantees about the order of terms, except that it // is deterministic. -func _NormalTerms(typ types.Type) ([]*Term, error) { - switch typ := typ.(type) { - case *TypeParam: +func _NormalTerms(typ types.Type) ([]*types.Term, error) { + switch typ := aliases.Unalias(typ).(type) { + case *types.TypeParam: return StructuralTerms(typ) - case *Union: + case *types.Union: return UnionTermSet(typ) case *types.Interface: return InterfaceTermSet(typ) default: - return []*Term{NewTerm(false, typ)}, nil + return []*types.Term{types.NewTerm(false, typ)}, nil + } +} + +// Deref returns the type of the variable pointed to by t, +// if t's core type is a pointer; otherwise it returns t. +// +// Do not assume that Deref(T)==T implies T is not a pointer: +// consider "type T *T", for example. +// +// TODO(adonovan): ideally this would live in typesinternal, but that +// creates an import cycle. Move there when we melt this package down. +func Deref(t types.Type) types.Type { + if ptr, ok := CoreType(t).(*types.Pointer); ok { + return ptr.Elem() + } + return t +} + +// MustDeref returns the type of the variable pointed to by t. +// It panics if t's core type is not a pointer. +// +// TODO(adonovan): ideally this would live in typesinternal, but that +// creates an import cycle. Move there when we melt this package down. +func MustDeref(t types.Type) types.Type { + if ptr, ok := CoreType(t).(*types.Pointer); ok { + return ptr.Elem() } + panic(fmt.Sprintf("%v is not a pointer", t)) } diff --git a/internal/typeparams/coretype_test.go b/internal/typeparams/coretype_test.go index 288439952a7..a9575f9238e 100644 --- a/internal/typeparams/coretype_test.go +++ b/internal/typeparams/coretype_test.go @@ -15,10 +15,6 @@ import ( ) func TestCoreType(t *testing.T) { - if !typeparams.Enabled { - t.Skip("TestCoreType requires type parameters.") - } - const source = ` package P diff --git a/internal/typeparams/enabled_go117.go b/internal/typeparams/enabled_go117.go deleted file mode 100644 index 18212390e19..00000000000 --- a/internal/typeparams/enabled_go117.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package typeparams - -// Enabled reports whether type parameters are enabled in the current build -// environment. -const Enabled = false diff --git a/internal/typeparams/enabled_go118.go b/internal/typeparams/enabled_go118.go deleted file mode 100644 index d67148823c4..00000000000 --- a/internal/typeparams/enabled_go118.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package typeparams - -// Note: this constant is in a separate file as this is the only acceptable -// diff between the <1.18 API of this package and the 1.18 API. - -// Enabled reports whether type parameters are enabled in the current build -// environment. -const Enabled = true diff --git a/internal/typeparams/free.go b/internal/typeparams/free.go new file mode 100644 index 00000000000..de3496d10b3 --- /dev/null +++ b/internal/typeparams/free.go @@ -0,0 +1,121 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeparams + +import ( + "go/types" + + "golang.org/x/tools/internal/aliases" +) + +// Free is a memoization of the set of free type parameters within a +// type. It makes a sequence of calls to [Free.Has] for overlapping +// types more efficient. The zero value is ready for use. +// +// NOTE: Adapted from go/types/infer.go. If it is later exported, factor. +type Free struct { + seen map[types.Type]bool +} + +// Has reports whether the specified type has a free type parameter. +func (w *Free) Has(typ types.Type) (res bool) { + + // detect cycles + if x, ok := w.seen[typ]; ok { + return x + } + if w.seen == nil { + w.seen = make(map[types.Type]bool) + } + w.seen[typ] = false + defer func() { + w.seen[typ] = res + }() + + switch t := typ.(type) { + case nil, *types.Basic: // TODO(gri) should nil be handled here? + break + + case *aliases.Alias: + return w.Has(aliases.Unalias(t)) + + case *types.Array: + return w.Has(t.Elem()) + + case *types.Slice: + return w.Has(t.Elem()) + + case *types.Struct: + for i, n := 0, t.NumFields(); i < n; i++ { + if w.Has(t.Field(i).Type()) { + return true + } + } + + case *types.Pointer: + return w.Has(t.Elem()) + + case *types.Tuple: + n := t.Len() + for i := 0; i < n; i++ { + if w.Has(t.At(i).Type()) { + return true + } + } + + case *types.Signature: + // t.tparams may not be nil if we are looking at a signature + // of a generic function type (or an interface method) that is + // part of the type we're testing. We don't care about these type + // parameters. + // Similarly, the receiver of a method may declare (rather than + // use) type parameters, we don't care about those either. + // Thus, we only need to look at the input and result parameters. + return w.Has(t.Params()) || w.Has(t.Results()) + + case *types.Interface: + for i, n := 0, t.NumMethods(); i < n; i++ { + if w.Has(t.Method(i).Type()) { + return true + } + } + terms, err := InterfaceTermSet(t) + if err != nil { + panic(err) + } + for _, term := range terms { + if w.Has(term.Type()) { + return true + } + } + + case *types.Map: + return w.Has(t.Key()) || w.Has(t.Elem()) + + case *types.Chan: + return w.Has(t.Elem()) + + case *types.Named: + args := t.TypeArgs() + // TODO(taking): this does not match go/types/infer.go. Check with rfindley. + if params := t.TypeParams(); params.Len() > args.Len() { + return true + } + for i, n := 0, args.Len(); i < n; i++ { + if w.Has(args.At(i)) { + return true + } + } + return w.Has(t.Underlying()) // recurse for types local to parameterized functions + + case *types.TypeParam: + return true + + default: + panic(t) // unreachable + } + + return false +} diff --git a/internal/typeparams/free_test.go b/internal/typeparams/free_test.go new file mode 100644 index 00000000000..b73a8238be3 --- /dev/null +++ b/internal/typeparams/free_test.go @@ -0,0 +1,73 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeparams + +import ( + "go/ast" + "go/parser" + "go/token" + "go/types" + "testing" +) + +func TestFree(t *testing.T) { + const source = ` +package P +type A int +func (A) f() +func (*A) g() + +type fer interface { f() } + +func Apply[T fer](x T) T { + x.f() + return x +} + +type V[T any] []T +func (v *V[T]) Push(x T) { *v = append(*v, x) } +` + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "hello.go", source, 0) + if err != nil { + t.Fatal(err) + } + + var conf types.Config + pkg, err := conf.Check("P", fset, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + for _, test := range []struct { + expr string // type expression + want bool // expected value + }{ + {"A", false}, + {"*A", false}, + {"error", false}, + {"*error", false}, + {"struct{A}", false}, + {"*struct{A}", false}, + {"fer", false}, + {"Apply", true}, + {"Apply[A]", false}, + {"V", true}, + {"V[A]", false}, + {"*V[A]", false}, + {"(*V[A]).Push", false}, + } { + tv, err := types.Eval(fset, pkg, 0, test.expr) + if err != nil { + t.Errorf("Eval(%s) failed: %v", test.expr, err) + } + + if got := new(Free).Has(tv.Type); got != test.want { + t.Logf("Eval(%s) returned the type %s", test.expr, tv.Type) + t.Errorf("isParameterized(%s) = %v, want %v", test.expr, got, test.want) + } + } +} diff --git a/internal/typeparams/genericfeatures/features.go b/internal/typeparams/genericfeatures/features.go index 8ceef867451..e7d0e0e6112 100644 --- a/internal/typeparams/genericfeatures/features.go +++ b/internal/typeparams/genericfeatures/features.go @@ -12,7 +12,7 @@ import ( "strings" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/aliases" ) // Features is a set of flags reporting which features of generic Go code a @@ -77,24 +77,23 @@ func ForPackage(inspect *inspector.Inspector, info *types.Info) Features { inspect.Preorder(nodeFilter, func(node ast.Node) { switch n := node.(type) { case *ast.FuncType: - if tparams := typeparams.ForFuncType(n); tparams != nil { + if tparams := n.TypeParams; tparams != nil { direct |= GenericFuncDecls } case *ast.InterfaceType: tv := info.Types[n] - if iface, _ := tv.Type.(*types.Interface); iface != nil && !typeparams.IsMethodSet(iface) { + if iface, _ := tv.Type.(*types.Interface); iface != nil && !iface.IsMethodSet() { direct |= EmbeddedTypeSets } case *ast.TypeSpec: - if tparams := typeparams.ForTypeSpec(n); tparams != nil { + if tparams := n.TypeParams; tparams != nil { direct |= GenericTypeDecls } } }) - instances := typeparams.GetInstances(info) - for _, inst := range instances { - switch inst.Type.(type) { + for _, inst := range info.Instances { + switch aliases.Unalias(inst.Type).(type) { case *types.Named: direct |= TypeInstantiation case *types.Signature: diff --git a/internal/typeparams/normalize.go b/internal/typeparams/normalize.go index 9c631b6512d..93c80fdc96c 100644 --- a/internal/typeparams/normalize.go +++ b/internal/typeparams/normalize.go @@ -60,7 +60,7 @@ var ErrEmptyTypeSet = errors.New("empty type set") // // StructuralTerms makes no guarantees about the order of terms, except that it // is deterministic. -func StructuralTerms(tparam *TypeParam) ([]*Term, error) { +func StructuralTerms(tparam *types.TypeParam) ([]*types.Term, error) { constraint := tparam.Constraint() if constraint == nil { return nil, fmt.Errorf("%s has nil constraint", tparam) @@ -78,7 +78,7 @@ func StructuralTerms(tparam *TypeParam) ([]*Term, error) { // // See the documentation of StructuralTerms for more information on // normalization. -func InterfaceTermSet(iface *types.Interface) ([]*Term, error) { +func InterfaceTermSet(iface *types.Interface) ([]*types.Term, error) { return computeTermSet(iface) } @@ -88,11 +88,11 @@ func InterfaceTermSet(iface *types.Interface) ([]*Term, error) { // // See the documentation of StructuralTerms for more information on // normalization. -func UnionTermSet(union *Union) ([]*Term, error) { +func UnionTermSet(union *types.Union) ([]*types.Term, error) { return computeTermSet(union) } -func computeTermSet(typ types.Type) ([]*Term, error) { +func computeTermSet(typ types.Type) ([]*types.Term, error) { tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0) if err != nil { return nil, err @@ -103,9 +103,9 @@ func computeTermSet(typ types.Type) ([]*Term, error) { if tset.terms.isAll() { return nil, nil } - var terms []*Term + var terms []*types.Term for _, term := range tset.terms { - terms = append(terms, NewTerm(term.tilde, term.typ)) + terms = append(terms, types.NewTerm(term.tilde, term.typ)) } return terms, nil } @@ -162,7 +162,7 @@ func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth in tset.terms = allTermlist for i := 0; i < u.NumEmbeddeds(); i++ { embedded := u.EmbeddedType(i) - if _, ok := embedded.Underlying().(*TypeParam); ok { + if _, ok := embedded.Underlying().(*types.TypeParam); ok { return nil, fmt.Errorf("invalid embedded type %T", embedded) } tset2, err := computeTermSetInternal(embedded, seen, depth+1) @@ -171,7 +171,7 @@ func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth in } tset.terms = tset.terms.intersect(tset2.terms) } - case *Union: + case *types.Union: // The term set of a union is the union of term sets of its terms. tset.terms = nil for i := 0; i < u.Len(); i++ { @@ -184,7 +184,7 @@ func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth in return nil, err } terms = tset2.terms - case *TypeParam, *Union: + case *types.TypeParam, *types.Union: // A stand-alone type parameter or union is not permitted as union // term. return nil, fmt.Errorf("invalid union term %T", t) @@ -199,7 +199,7 @@ func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth in return nil, fmt.Errorf("exceeded max term count %d", maxTermCount) } } - case *TypeParam: + case *types.TypeParam: panic("unreachable") default: // For all other types, the term set is just a single non-tilde term diff --git a/internal/typeparams/normalize_test.go b/internal/typeparams/normalize_test.go index 769433d701d..d2c678c90ff 100644 --- a/internal/typeparams/normalize_test.go +++ b/internal/typeparams/normalize_test.go @@ -13,15 +13,10 @@ import ( "strings" "testing" - "golang.org/x/tools/internal/typeparams" . "golang.org/x/tools/internal/typeparams" ) func TestStructuralTerms(t *testing.T) { - if !Enabled { - t.Skip("typeparams are not enabled") - } - // In the following tests, src must define a type T with (at least) one type // parameter. We will compute the structural terms of the first type // parameter. @@ -76,7 +71,7 @@ type T[P interface{ A|B; C }] int if obj == nil { t.Fatal("type T not found") } - T := typeparams.ForNamed(obj.Type().(*types.Named)).At(0) + T := obj.Type().(*types.Named).TypeParams().At(0) terms, err := StructuralTerms(T) if test.wantError != "" { if err == nil { @@ -95,7 +90,7 @@ type T[P interface{ A|B; C }] int got = "all" } else { qf := types.RelativeTo(pkg) - got = types.TypeString(NewUnion(terms), qf) + got = types.TypeString(types.NewUnion(terms), qf) } want := regexp.MustCompile(test.want) if !want.MatchString(got) { diff --git a/internal/typeparams/typeparams_go117.go b/internal/typeparams/typeparams_go117.go deleted file mode 100644 index 7ed86e1711b..00000000000 --- a/internal/typeparams/typeparams_go117.go +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package typeparams - -import ( - "go/ast" - "go/token" - "go/types" -) - -func unsupported() { - panic("type parameters are unsupported at this go version") -} - -// IndexListExpr is a placeholder type, as type parameters are not supported at -// this Go version. Its methods panic on use. -type IndexListExpr struct { - ast.Expr - X ast.Expr // expression - Lbrack token.Pos // position of "[" - Indices []ast.Expr // index expressions - Rbrack token.Pos // position of "]" -} - -// ForTypeSpec returns an empty field list, as type parameters on not supported -// at this Go version. -func ForTypeSpec(*ast.TypeSpec) *ast.FieldList { - return nil -} - -// ForFuncType returns an empty field list, as type parameters are not -// supported at this Go version. -func ForFuncType(*ast.FuncType) *ast.FieldList { - return nil -} - -// TypeParam is a placeholder type, as type parameters are not supported at -// this Go version. Its methods panic on use. -type TypeParam struct{ types.Type } - -func (*TypeParam) Index() int { unsupported(); return 0 } -func (*TypeParam) Constraint() types.Type { unsupported(); return nil } -func (*TypeParam) Obj() *types.TypeName { unsupported(); return nil } - -// TypeParamList is a placeholder for an empty type parameter list. -type TypeParamList struct{} - -func (*TypeParamList) Len() int { return 0 } -func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil } - -// TypeList is a placeholder for an empty type list. -type TypeList struct{} - -func (*TypeList) Len() int { return 0 } -func (*TypeList) At(int) types.Type { unsupported(); return nil } - -// NewTypeParam is unsupported at this Go version, and panics. -func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam { - unsupported() - return nil -} - -// SetTypeParamConstraint is unsupported at this Go version, and panics. -func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) { - unsupported() -} - -// NewSignatureType calls types.NewSignature, panicking if recvTypeParams or -// typeParams is non-empty. -func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { - if len(recvTypeParams) != 0 || len(typeParams) != 0 { - panic("signatures cannot have type parameters at this Go version") - } - return types.NewSignature(recv, params, results, variadic) -} - -// ForSignature returns an empty slice. -func ForSignature(*types.Signature) *TypeParamList { - return nil -} - -// RecvTypeParams returns a nil slice. -func RecvTypeParams(sig *types.Signature) *TypeParamList { - return nil -} - -// IsComparable returns false, as no interfaces are type-restricted at this Go -// version. -func IsComparable(*types.Interface) bool { - return false -} - -// IsMethodSet returns true, as no interfaces are type-restricted at this Go -// version. -func IsMethodSet(*types.Interface) bool { - return true -} - -// IsImplicit returns false, as no interfaces are implicit at this Go version. -func IsImplicit(*types.Interface) bool { - return false -} - -// MarkImplicit does nothing, because this Go version does not have implicit -// interfaces. -func MarkImplicit(*types.Interface) {} - -// ForNamed returns an empty type parameter list, as type parameters are not -// supported at this Go version. -func ForNamed(*types.Named) *TypeParamList { - return nil -} - -// SetForNamed panics if tparams is non-empty. -func SetForNamed(_ *types.Named, tparams []*TypeParam) { - if len(tparams) > 0 { - unsupported() - } -} - -// NamedTypeArgs returns nil. -func NamedTypeArgs(*types.Named) *TypeList { - return nil -} - -// NamedTypeOrigin is the identity method at this Go version. -func NamedTypeOrigin(named *types.Named) *types.Named { - return named -} - -// Term holds information about a structural type restriction. -type Term struct { - tilde bool - typ types.Type -} - -func (m *Term) Tilde() bool { return m.tilde } -func (m *Term) Type() types.Type { return m.typ } -func (m *Term) String() string { - pre := "" - if m.tilde { - pre = "~" - } - return pre + m.typ.String() -} - -// NewTerm is unsupported at this Go version, and panics. -func NewTerm(tilde bool, typ types.Type) *Term { - return &Term{tilde, typ} -} - -// Union is a placeholder type, as type parameters are not supported at this Go -// version. Its methods panic on use. -type Union struct{ types.Type } - -func (*Union) Len() int { return 0 } -func (*Union) Term(i int) *Term { unsupported(); return nil } - -// NewUnion is unsupported at this Go version, and panics. -func NewUnion(terms []*Term) *Union { - unsupported() - return nil -} - -// InitInstanceInfo is a noop at this Go version. -func InitInstanceInfo(*types.Info) {} - -// Instance is a placeholder type, as type parameters are not supported at this -// Go version. -type Instance struct { - TypeArgs *TypeList - Type types.Type -} - -// GetInstances returns a nil map, as type parameters are not supported at this -// Go version. -func GetInstances(info *types.Info) map[*ast.Ident]Instance { return nil } - -// Context is a placeholder type, as type parameters are not supported at -// this Go version. -type Context struct{} - -// NewContext returns a placeholder Context instance. -func NewContext() *Context { - return &Context{} -} - -// Instantiate is unsupported on this Go version, and panics. -func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { - unsupported() - return nil, nil -} diff --git a/internal/typeparams/typeparams_go118.go b/internal/typeparams/typeparams_go118.go deleted file mode 100644 index cf301af1dbe..00000000000 --- a/internal/typeparams/typeparams_go118.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package typeparams - -import ( - "go/ast" - "go/types" -) - -// IndexListExpr is an alias for ast.IndexListExpr. -type IndexListExpr = ast.IndexListExpr - -// ForTypeSpec returns n.TypeParams. -func ForTypeSpec(n *ast.TypeSpec) *ast.FieldList { - if n == nil { - return nil - } - return n.TypeParams -} - -// ForFuncType returns n.TypeParams. -func ForFuncType(n *ast.FuncType) *ast.FieldList { - if n == nil { - return nil - } - return n.TypeParams -} - -// TypeParam is an alias for types.TypeParam -type TypeParam = types.TypeParam - -// TypeParamList is an alias for types.TypeParamList -type TypeParamList = types.TypeParamList - -// TypeList is an alias for types.TypeList -type TypeList = types.TypeList - -// NewTypeParam calls types.NewTypeParam. -func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam { - return types.NewTypeParam(name, constraint) -} - -// SetTypeParamConstraint calls tparam.SetConstraint(constraint). -func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) { - tparam.SetConstraint(constraint) -} - -// NewSignatureType calls types.NewSignatureType. -func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { - return types.NewSignatureType(recv, recvTypeParams, typeParams, params, results, variadic) -} - -// ForSignature returns sig.TypeParams() -func ForSignature(sig *types.Signature) *TypeParamList { - return sig.TypeParams() -} - -// RecvTypeParams returns sig.RecvTypeParams(). -func RecvTypeParams(sig *types.Signature) *TypeParamList { - return sig.RecvTypeParams() -} - -// IsComparable calls iface.IsComparable(). -func IsComparable(iface *types.Interface) bool { - return iface.IsComparable() -} - -// IsMethodSet calls iface.IsMethodSet(). -func IsMethodSet(iface *types.Interface) bool { - return iface.IsMethodSet() -} - -// IsImplicit calls iface.IsImplicit(). -func IsImplicit(iface *types.Interface) bool { - return iface.IsImplicit() -} - -// MarkImplicit calls iface.MarkImplicit(). -func MarkImplicit(iface *types.Interface) { - iface.MarkImplicit() -} - -// ForNamed extracts the (possibly empty) type parameter object list from -// named. -func ForNamed(named *types.Named) *TypeParamList { - return named.TypeParams() -} - -// SetForNamed sets the type params tparams on n. Each tparam must be of -// dynamic type *types.TypeParam. -func SetForNamed(n *types.Named, tparams []*TypeParam) { - n.SetTypeParams(tparams) -} - -// NamedTypeArgs returns named.TypeArgs(). -func NamedTypeArgs(named *types.Named) *TypeList { - return named.TypeArgs() -} - -// NamedTypeOrigin returns named.Orig(). -func NamedTypeOrigin(named *types.Named) *types.Named { - return named.Origin() -} - -// Term is an alias for types.Term. -type Term = types.Term - -// NewTerm calls types.NewTerm. -func NewTerm(tilde bool, typ types.Type) *Term { - return types.NewTerm(tilde, typ) -} - -// Union is an alias for types.Union -type Union = types.Union - -// NewUnion calls types.NewUnion. -func NewUnion(terms []*Term) *Union { - return types.NewUnion(terms) -} - -// InitInstanceInfo initializes info to record information about type and -// function instances. -func InitInstanceInfo(info *types.Info) { - info.Instances = make(map[*ast.Ident]types.Instance) -} - -// Instance is an alias for types.Instance. -type Instance = types.Instance - -// GetInstances returns info.Instances. -func GetInstances(info *types.Info) map[*ast.Ident]Instance { - return info.Instances -} - -// Context is an alias for types.Context. -type Context = types.Context - -// NewContext calls types.NewContext. -func NewContext() *Context { - return types.NewContext() -} - -// Instantiate calls types.Instantiate. -func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { - return types.Instantiate(ctxt, typ, targs, validate) -} diff --git a/internal/typeparams/typeparams_test.go b/internal/typeparams/typeparams_test.go deleted file mode 100644 index cdbcae98f0a..00000000000 --- a/internal/typeparams/typeparams_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package typeparams_test - -import ( - "go/ast" - "go/importer" - "go/parser" - "go/token" - "go/types" - "strings" - "testing" - - "golang.org/x/tools/internal/apidiff" - "golang.org/x/tools/internal/testenv" -) - -func TestAPIConsistency(t *testing.T) { - testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code. - - // The packages below exclude enabled_*.go, as typeparams.Enabled is - // permitted to change between versions. - old := typeCheck(t, []string{"common.go", "typeparams_go117.go"}) - new := typeCheck(t, []string{"common.go", "typeparams_go118.go"}) - - report := apidiff.Changes(old, new) - - // Temporarily ignore API diff related to Environment, so that we can use a - // transient alias in go/types to allow renaming this type without ever - // breaking the x/tools builder. - // TODO(rfindley): remove this - var filteredChanges []apidiff.Change - for _, change := range report.Changes { - if strings.Contains(change.Message, "Environment") { - continue - } - filteredChanges = append(filteredChanges, change) - } - report.Changes = filteredChanges - if len(report.Changes) > 0 { - t.Errorf("API diff:\n%s", report) - } -} - -func typeCheck(t *testing.T, filenames []string) *types.Package { - fset := token.NewFileSet() - var files []*ast.File - for _, name := range filenames { - f, err := parser.ParseFile(fset, name, nil, 0) - if err != nil { - t.Fatal(err) - } - files = append(files, f) - } - conf := types.Config{ - Importer: importer.Default(), - } - pkg, err := conf.Check("", fset, files, nil) - if err != nil { - t.Fatal(err) - } - return pkg -} diff --git a/internal/typesinternal/errorcode.go b/internal/typesinternal/errorcode.go index 07484073a57..e0c27ed251c 100644 --- a/internal/typesinternal/errorcode.go +++ b/internal/typesinternal/errorcode.go @@ -167,7 +167,7 @@ const ( UntypedNilUse // WrongAssignCount occurs when the number of values on the right-hand side - // of an assignment or or initialization expression does not match the number + // of an assignment or initialization expression does not match the number // of variables on the left-hand side. // // Example: diff --git a/internal/typesinternal/recv.go b/internal/typesinternal/recv.go new file mode 100644 index 00000000000..fea7c8b75e8 --- /dev/null +++ b/internal/typesinternal/recv.go @@ -0,0 +1,43 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "go/types" + + "golang.org/x/tools/internal/aliases" +) + +// ReceiverNamed returns the named type (if any) associated with the +// type of recv, which may be of the form N or *N, or aliases thereof. +// It also reports whether a Pointer was present. +func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) { + t := recv.Type() + if ptr, ok := aliases.Unalias(t).(*types.Pointer); ok { + isPtr = true + t = ptr.Elem() + } + named, _ = aliases.Unalias(t).(*types.Named) + return +} + +// Unpointer returns T given *T or an alias thereof. +// For all other types it is the identity function. +// It does not look at underlying types. +// The result may be an alias. +// +// Use this function to strip off the optional pointer on a receiver +// in a field or method selection, without losing the named type +// (which is needed to compute the method set). +// +// See also [typeparams.MustDeref], which removes one level of +// indirection from the type, regardless of named types (analogous to +// a LOAD instruction). +func Unpointer(t types.Type) types.Type { + if ptr, ok := aliases.Unalias(t).(*types.Pointer); ok { + return ptr.Elem() + } + return t +} diff --git a/internal/typesinternal/toonew.go b/internal/typesinternal/toonew.go new file mode 100644 index 00000000000..cc86487eaa0 --- /dev/null +++ b/internal/typesinternal/toonew.go @@ -0,0 +1,89 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "go/types" + + "golang.org/x/tools/internal/stdlib" + "golang.org/x/tools/internal/versions" +) + +// TooNewStdSymbols computes the set of package-level symbols +// exported by pkg that are not available at the specified version. +// The result maps each symbol to its minimum version. +// +// The pkg is allowed to contain type errors. +func TooNewStdSymbols(pkg *types.Package, version string) map[types.Object]string { + disallowed := make(map[types.Object]string) + + // Pass 1: package-level symbols. + symbols := stdlib.PackageSymbols[pkg.Path()] + for _, sym := range symbols { + symver := sym.Version.String() + if versions.Before(version, symver) { + switch sym.Kind { + case stdlib.Func, stdlib.Var, stdlib.Const, stdlib.Type: + disallowed[pkg.Scope().Lookup(sym.Name)] = symver + } + } + } + + // Pass 2: fields and methods. + // + // We allow fields and methods if their associated type is + // disallowed, as otherwise we would report false positives + // for compatibility shims. Consider: + // + // //go:build go1.22 + // type T struct { F std.Real } // correct new API + // + // //go:build !go1.22 + // type T struct { F fake } // shim + // type fake struct { ... } + // func (fake) M () {} + // + // These alternative declarations of T use either the std.Real + // type, introduced in go1.22, or a fake type, for the field + // F. (The fakery could be arbitrarily deep, involving more + // nested fields and methods than are shown here.) Clients + // that use the compatibility shim T will compile with any + // version of go, whether older or newer than go1.22, but only + // the newer version will use the std.Real implementation. + // + // Now consider a reference to method M in new(T).F.M() in a + // module that requires a minimum of go1.21. The analysis may + // occur using a version of Go higher than 1.21, selecting the + // first version of T, so the method M is Real.M. This would + // spuriously cause the analyzer to report a reference to a + // too-new symbol even though this expression compiles just + // fine (with the fake implementation) using go1.21. + for _, sym := range symbols { + symVersion := sym.Version.String() + if !versions.Before(version, symVersion) { + continue // allowed + } + + var obj types.Object + switch sym.Kind { + case stdlib.Field: + typename, name := sym.SplitField() + if t := pkg.Scope().Lookup(typename); t != nil && disallowed[t] == "" { + obj, _, _ = types.LookupFieldOrMethod(t.Type(), false, pkg, name) + } + + case stdlib.Method: + ptr, recvname, name := sym.SplitMethod() + if t := pkg.Scope().Lookup(recvname); t != nil && disallowed[t] == "" { + obj, _, _ = types.LookupFieldOrMethod(t.Type(), ptr, pkg, name) + } + } + if obj != nil { + disallowed[obj] = symVersion + } + } + + return disallowed +} diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index ce7d4351b22..7c77c2fbc03 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -48,5 +48,3 @@ func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos, } return ErrorCode(data[0]), token.Pos(data[1]), token.Pos(data[2]), true } - -var SetGoVersion = func(conf *types.Config, version string) bool { return false } diff --git a/internal/typesinternal/types_118.go b/internal/typesinternal/types_118.go deleted file mode 100644 index a42b072a67d..00000000000 --- a/internal/typesinternal/types_118.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package typesinternal - -import ( - "go/types" -) - -func init() { - SetGoVersion = func(conf *types.Config, version string) bool { - conf.GoVersion = version - return true - } -} diff --git a/internal/versions/features.go b/internal/versions/features.go new file mode 100644 index 00000000000..b53f1786161 --- /dev/null +++ b/internal/versions/features.go @@ -0,0 +1,43 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +// This file contains predicates for working with file versions to +// decide when a tool should consider a language feature enabled. + +// GoVersions that features in x/tools can be gated to. +const ( + Go1_18 = "go1.18" + Go1_19 = "go1.19" + Go1_20 = "go1.20" + Go1_21 = "go1.21" + Go1_22 = "go1.22" +) + +// Future is an invalid unknown Go version sometime in the future. +// Do not use directly with Compare. +const Future = "" + +// AtLeast reports whether the file version v comes after a Go release. +// +// Use this predicate to enable a behavior once a certain Go release +// has happened (and stays enabled in the future). +func AtLeast(v, release string) bool { + if v == Future { + return true // an unknown future version is always after y. + } + return Compare(Lang(v), Lang(release)) >= 0 +} + +// Before reports whether the file version v is strictly before a Go release. +// +// Use this predicate to disable a behavior once a certain Go release +// has happened (and stays enabled in the future). +func Before(v, release string) bool { + if v == Future { + return false // an unknown future version happens after y. + } + return Compare(Lang(v), Lang(release)) < 0 +} diff --git a/internal/versions/gover.go b/internal/versions/gover.go new file mode 100644 index 00000000000..bbabcd22e94 --- /dev/null +++ b/internal/versions/gover.go @@ -0,0 +1,172 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a fork of internal/gover for use by x/tools until +// go1.21 and earlier are no longer supported by x/tools. + +package versions + +import "strings" + +// A gover is a parsed Go gover: major[.Minor[.Patch]][kind[pre]] +// The numbers are the original decimal strings to avoid integer overflows +// and since there is very little actual math. (Probably overflow doesn't matter in practice, +// but at the time this code was written, there was an existing test that used +// go1.99999999999, which does not fit in an int on 32-bit platforms. +// The "big decimal" representation avoids the problem entirely.) +type gover struct { + major string // decimal + minor string // decimal or "" + patch string // decimal or "" + kind string // "", "alpha", "beta", "rc" + pre string // decimal or "" +} + +// compare returns -1, 0, or +1 depending on whether +// x < y, x == y, or x > y, interpreted as toolchain versions. +// The versions x and y must not begin with a "go" prefix: just "1.21" not "go1.21". +// Malformed versions compare less than well-formed versions and equal to each other. +// The language version "1.21" compares less than the release candidate and eventual releases "1.21rc1" and "1.21.0". +func compare(x, y string) int { + vx := parse(x) + vy := parse(y) + + if c := cmpInt(vx.major, vy.major); c != 0 { + return c + } + if c := cmpInt(vx.minor, vy.minor); c != 0 { + return c + } + if c := cmpInt(vx.patch, vy.patch); c != 0 { + return c + } + if c := strings.Compare(vx.kind, vy.kind); c != 0 { // "" < alpha < beta < rc + return c + } + if c := cmpInt(vx.pre, vy.pre); c != 0 { + return c + } + return 0 +} + +// lang returns the Go language version. For example, lang("1.2.3") == "1.2". +func lang(x string) string { + v := parse(x) + if v.minor == "" || v.major == "1" && v.minor == "0" { + return v.major + } + return v.major + "." + v.minor +} + +// isValid reports whether the version x is valid. +func isValid(x string) bool { + return parse(x) != gover{} +} + +// parse parses the Go version string x into a version. +// It returns the zero version if x is malformed. +func parse(x string) gover { + var v gover + + // Parse major version. + var ok bool + v.major, x, ok = cutInt(x) + if !ok { + return gover{} + } + if x == "" { + // Interpret "1" as "1.0.0". + v.minor = "0" + v.patch = "0" + return v + } + + // Parse . before minor version. + if x[0] != '.' { + return gover{} + } + + // Parse minor version. + v.minor, x, ok = cutInt(x[1:]) + if !ok { + return gover{} + } + if x == "" { + // Patch missing is same as "0" for older versions. + // Starting in Go 1.21, patch missing is different from explicit .0. + if cmpInt(v.minor, "21") < 0 { + v.patch = "0" + } + return v + } + + // Parse patch if present. + if x[0] == '.' { + v.patch, x, ok = cutInt(x[1:]) + if !ok || x != "" { + // Note that we are disallowing prereleases (alpha, beta, rc) for patch releases here (x != ""). + // Allowing them would be a bit confusing because we already have: + // 1.21 < 1.21rc1 + // But a prerelease of a patch would have the opposite effect: + // 1.21.3rc1 < 1.21.3 + // We've never needed them before, so let's not start now. + return gover{} + } + return v + } + + // Parse prerelease. + i := 0 + for i < len(x) && (x[i] < '0' || '9' < x[i]) { + if x[i] < 'a' || 'z' < x[i] { + return gover{} + } + i++ + } + if i == 0 { + return gover{} + } + v.kind, x = x[:i], x[i:] + if x == "" { + return v + } + v.pre, x, ok = cutInt(x) + if !ok || x != "" { + return gover{} + } + + return v +} + +// cutInt scans the leading decimal number at the start of x to an integer +// and returns that value and the rest of the string. +func cutInt(x string) (n, rest string, ok bool) { + i := 0 + for i < len(x) && '0' <= x[i] && x[i] <= '9' { + i++ + } + if i == 0 || x[0] == '0' && i != 1 { // no digits or unnecessary leading zero + return "", "", false + } + return x[:i], x[i:], true +} + +// cmpInt returns cmp.Compare(x, y) interpreting x and y as decimal numbers. +// (Copied from golang.org/x/mod/semver's compareInt.) +func cmpInt(x, y string) int { + if x == y { + return 0 + } + if len(x) < len(y) { + return -1 + } + if len(x) > len(y) { + return +1 + } + if x < y { + return -1 + } else { + return +1 + } +} diff --git a/internal/versions/toolchain.go b/internal/versions/toolchain.go new file mode 100644 index 00000000000..377bf7a53b4 --- /dev/null +++ b/internal/versions/toolchain.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +// toolchain is maximum version (<1.22) that the go toolchain used +// to build the current tool is known to support. +// +// When a tool is built with >=1.22, the value of toolchain is unused. +// +// x/tools does not support building with go <1.18. So we take this +// as the minimum possible maximum. +var toolchain string = Go1_18 diff --git a/internal/versions/toolchain_go119.go b/internal/versions/toolchain_go119.go new file mode 100644 index 00000000000..f65beed9d83 --- /dev/null +++ b/internal/versions/toolchain_go119.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.19 +// +build go1.19 + +package versions + +func init() { + if Compare(toolchain, Go1_19) < 0 { + toolchain = Go1_19 + } +} diff --git a/internal/versions/toolchain_go120.go b/internal/versions/toolchain_go120.go new file mode 100644 index 00000000000..1a9efa126cd --- /dev/null +++ b/internal/versions/toolchain_go120.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 +// +build go1.20 + +package versions + +func init() { + if Compare(toolchain, Go1_20) < 0 { + toolchain = Go1_20 + } +} diff --git a/internal/versions/toolchain_go121.go b/internal/versions/toolchain_go121.go new file mode 100644 index 00000000000..b7ef216dfec --- /dev/null +++ b/internal/versions/toolchain_go121.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.21 +// +build go1.21 + +package versions + +func init() { + if Compare(toolchain, Go1_21) < 0 { + toolchain = Go1_21 + } +} diff --git a/internal/versions/types.go b/internal/versions/types.go new file mode 100644 index 00000000000..562eef21fa2 --- /dev/null +++ b/internal/versions/types.go @@ -0,0 +1,19 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +import ( + "go/types" +) + +// GoVersion returns the Go version of the type package. +// It returns zero if no version can be determined. +func GoVersion(pkg *types.Package) string { + // TODO(taking): x/tools can call GoVersion() [from 1.21] after 1.25. + if pkg, ok := any(pkg).(interface{ GoVersion() string }); ok { + return pkg.GoVersion() + } + return "" +} diff --git a/internal/versions/types_go121.go b/internal/versions/types_go121.go new file mode 100644 index 00000000000..b4345d3349e --- /dev/null +++ b/internal/versions/types_go121.go @@ -0,0 +1,30 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.22 +// +build !go1.22 + +package versions + +import ( + "go/ast" + "go/types" +) + +// FileVersion returns a language version (<=1.21) derived from runtime.Version() +// or an unknown future version. +func FileVersion(info *types.Info, file *ast.File) string { + // In x/tools built with Go <= 1.21, we do not have Info.FileVersions + // available. We use a go version derived from the toolchain used to + // compile the tool by default. + // This will be <= go1.21. We take this as the maximum version that + // this tool can support. + // + // There are no features currently in x/tools that need to tell fine grained + // differences for versions <1.22. + return toolchain +} + +// InitFileVersions is a noop when compiled with this Go version. +func InitFileVersions(*types.Info) {} diff --git a/internal/versions/types_go122.go b/internal/versions/types_go122.go new file mode 100644 index 00000000000..e8180632a52 --- /dev/null +++ b/internal/versions/types_go122.go @@ -0,0 +1,41 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.22 +// +build go1.22 + +package versions + +import ( + "go/ast" + "go/types" +) + +// FileVersions returns a file's Go version. +// The reported version is an unknown Future version if a +// version cannot be determined. +func FileVersion(info *types.Info, file *ast.File) string { + // In tools built with Go >= 1.22, the Go version of a file + // follow a cascades of sources: + // 1) types.Info.FileVersion, which follows the cascade: + // 1.a) file version (ast.File.GoVersion), + // 1.b) the package version (types.Config.GoVersion), or + // 2) is some unknown Future version. + // + // File versions require a valid package version to be provided to types + // in Config.GoVersion. Config.GoVersion is either from the package's module + // or the toolchain (go run). This value should be provided by go/packages + // or unitchecker.Config.GoVersion. + if v := info.FileVersions[file]; IsValid(v) { + return v + } + // Note: we could instead return runtime.Version() [if valid]. + // This would act as a max version on what a tool can support. + return Future +} + +// InitFileVersions initializes info to record Go versions for Go files. +func InitFileVersions(info *types.Info) { + info.FileVersions = make(map[*ast.File]string) +} diff --git a/internal/versions/types_test.go b/internal/versions/types_test.go new file mode 100644 index 00000000000..59f6d18b45f --- /dev/null +++ b/internal/versions/types_test.go @@ -0,0 +1,87 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions_test + +import ( + "fmt" + "go/ast" + "go/importer" + "go/parser" + "go/token" + "go/types" + "testing" + + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/versions" +) + +func Test(t *testing.T) { + testenv.NeedsGo1Point(t, 22) + + var contents = map[string]string{ + "gobuild.go": ` + //go:build go1.23 + package p + `, + "noversion.go": ` + package p + `, + } + type fileTest struct { + fname string + want string + } + for _, item := range []struct { + goversion string + pversion string + tests []fileTest + }{ + {"", "", []fileTest{{"noversion.go", ""}, {"gobuild.go", ""}}}, + {"go1.22", "go1.22", []fileTest{{"noversion.go", "go1.22"}, {"gobuild.go", "go1.23"}}}, + } { + name := fmt.Sprintf("types.Config{GoVersion:%q}", item.goversion) + t.Run(name, func(t *testing.T) { + fset := token.NewFileSet() + files := make([]*ast.File, len(item.tests)) + for i, test := range item.tests { + files[i] = parse(t, fset, test.fname, contents[test.fname]) + } + pkg, info := typeCheck(t, fset, files, item.goversion) + if got, want := versions.GoVersion(pkg), item.pversion; versions.Compare(got, want) != 0 { + t.Errorf("GoVersion()=%q. expected %q", got, want) + } + if got := versions.FileVersion(info, nil); got != "" { + t.Errorf(`FileVersions(nil)=%q. expected ""`, got) + } + for i, test := range item.tests { + if got, want := versions.FileVersion(info, files[i]), test.want; got != want { + t.Errorf("FileVersions(%s)=%q. expected %q", test.fname, got, want) + } + } + }) + } +} + +func parse(t *testing.T, fset *token.FileSet, name, src string) *ast.File { + file, err := parser.ParseFile(fset, name, src, 0) + if err != nil { + t.Fatal(err) + } + return file +} + +func typeCheck(t *testing.T, fset *token.FileSet, files []*ast.File, goversion string) (*types.Package, *types.Info) { + conf := types.Config{ + Importer: importer.Default(), + GoVersion: goversion, + } + info := types.Info{} + versions.InitFileVersions(&info) + pkg, err := conf.Check("", fset, files, &info) + if err != nil { + t.Fatal(err) + } + return pkg, &info +} diff --git a/internal/versions/versions.go b/internal/versions/versions.go new file mode 100644 index 00000000000..8d1f7453dbf --- /dev/null +++ b/internal/versions/versions.go @@ -0,0 +1,57 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +import ( + "strings" +) + +// Note: If we use build tags to use go/versions when go >=1.22, +// we run into go.dev/issue/53737. Under some operations users would see an +// import of "go/versions" even if they would not compile the file. +// For example, during `go get -u ./...` (go.dev/issue/64490) we do not try to include +// For this reason, this library just a clone of go/versions for the moment. + +// Lang returns the Go language version for version x. +// If x is not a valid version, Lang returns the empty string. +// For example: +// +// Lang("go1.21rc2") = "go1.21" +// Lang("go1.21.2") = "go1.21" +// Lang("go1.21") = "go1.21" +// Lang("go1") = "go1" +// Lang("bad") = "" +// Lang("1.21") = "" +func Lang(x string) string { + v := lang(stripGo(x)) + if v == "" { + return "" + } + return x[:2+len(v)] // "go"+v without allocation +} + +// Compare returns -1, 0, or +1 depending on whether +// x < y, x == y, or x > y, interpreted as Go versions. +// The versions x and y must begin with a "go" prefix: "go1.21" not "1.21". +// Invalid versions, including the empty string, compare less than +// valid versions and equal to each other. +// The language version "go1.21" compares less than the +// release candidate and eventual releases "go1.21rc1" and "go1.21.0". +// Custom toolchain suffixes are ignored during comparison: +// "go1.21.0" and "go1.21.0-bigcorp" are equal. +func Compare(x, y string) int { return compare(stripGo(x), stripGo(y)) } + +// IsValid reports whether the version x is valid. +func IsValid(x string) bool { return isValid(stripGo(x)) } + +// stripGo converts from a "go1.21" version to a "1.21" version. +// If v does not start with "go", stripGo returns the empty string (a known invalid version). +func stripGo(v string) string { + v, _, _ = strings.Cut(v, "-") // strip -bigcorp suffix. + if len(v) < 2 || v[:2] != "go" { + return "" + } + return v[2:] +} diff --git a/internal/versions/versions_test.go b/internal/versions/versions_test.go new file mode 100644 index 00000000000..dbc1c555d22 --- /dev/null +++ b/internal/versions/versions_test.go @@ -0,0 +1,256 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions_test + +import ( + "go/ast" + "go/parser" + "go/token" + "go/types" + "testing" + + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/versions" +) + +func TestIsValid(t *testing.T) { + // valid versions + for _, x := range []string{ + "go1.21", + "go1.21.2", + "go1.21rc", + "go1.21rc2", + "go0.0", // ?? + "go1", + "go2", + "go1.20.0-bigcorp", + } { + if !versions.IsValid(x) { + t.Errorf("expected versions.IsValid(%q) to hold", x) + } + } + + // invalid versions + for _, x := range []string{ + "", + "bad", + "1.21", + "v1.21", + "go", + "goAA", + "go2_3", + "go1.BB", + "go1.21.", + "go1.21.2_2", + "go1.21rc_2", + "go1.21rc2_", + "go1.600+auto", + } { + if versions.IsValid(x) { + t.Errorf("expected versions.IsValid(%q) to not hold", x) + } + } +} + +func TestVersionComparisons(t *testing.T) { + for _, item := range []struct { + x, y string + want int + }{ + // All comparisons of go2, go1.21.2, go1.21rc2, go1.21rc2, go1, go0.0, "", bad + {"go2", "go2", 0}, + {"go2", "go1.21.2", +1}, + {"go2", "go1.21rc2", +1}, + {"go2", "go1.21rc", +1}, + {"go2", "go1.21", +1}, + {"go2", "go1", +1}, + {"go2", "go0.0", +1}, + {"go2", "", +1}, + {"go2", "bad", +1}, + {"go1.21.2", "go1.21.2", 0}, + {"go1.21.2", "go1.21rc2", +1}, + {"go1.21.2", "go1.21rc", +1}, + {"go1.21.2", "go1.21", +1}, + {"go1.21.2", "go1", +1}, + {"go1.21.2", "go0.0", +1}, + {"go1.21.2", "", +1}, + {"go1.21.2", "bad", +1}, + {"go1.21rc2", "go1.21rc2", 0}, + {"go1.21rc2", "go1.21rc", +1}, + {"go1.21rc2", "go1.21", +1}, + {"go1.21rc2", "go1", +1}, + {"go1.21rc2", "go0.0", +1}, + {"go1.21rc2", "", +1}, + {"go1.21rc2", "bad", +1}, + {"go1.21rc", "go1.21rc", 0}, + {"go1.21rc", "go1.21", +1}, + {"go1.21rc", "go1", +1}, + {"go1.21rc", "go0.0", +1}, + {"go1.21rc", "", +1}, + {"go1.21rc", "bad", +1}, + {"go1.21", "go1.21", 0}, + {"go1.21", "go1", +1}, + {"go1.21", "go0.0", +1}, + {"go1.21", "", +1}, + {"go1.21", "bad", +1}, + {"go1", "go1", 0}, + {"go1", "go0.0", +1}, + {"go1", "", +1}, + {"go1", "bad", +1}, + {"go0.0", "go0.0", 0}, + {"go0.0", "", +1}, + {"go0.0", "bad", +1}, + {"", "", 0}, + {"", "bad", 0}, + {"bad", "bad", 0}, + // Other tests. + {"go1.20", "go1.20.0-bigcorp", 0}, + {"go1.21", "go1.21.0-bigcorp", -1}, // Starting in Go 1.21, patch missing is different from explicit .0. + {"go1.21.0", "go1.21.0-bigcorp", 0}, // Starting in Go 1.21, patch missing is different from explicit .0. + {"go1.19rc1", "go1.19", -1}, + } { + got := versions.Compare(item.x, item.y) + if got != item.want { + t.Errorf("versions.Compare(%q, %q)=%d. expected %d", item.x, item.y, got, item.want) + } + reverse := versions.Compare(item.y, item.x) + if reverse != -got { + t.Errorf("versions.Compare(%q, %q)=%d. expected %d", item.y, item.x, reverse, -got) + } + } +} + +func TestLang(t *testing.T) { + for _, item := range []struct { + x string + want string + }{ + // valid + {"go1.21rc2", "go1.21"}, + {"go1.21.2", "go1.21"}, + {"go1.21", "go1.21"}, + {"go1", "go1"}, + // invalid + {"bad", ""}, + {"1.21", ""}, + } { + if got := versions.Lang(item.x); got != item.want { + t.Errorf("versions.Lang(%q)=%q. expected %q", item.x, got, item.want) + } + } + +} + +func TestKnown(t *testing.T) { + for _, v := range [...]string{ + versions.Go1_18, + versions.Go1_19, + versions.Go1_20, + versions.Go1_21, + versions.Go1_22, + } { + if !versions.IsValid(v) { + t.Errorf("Expected known version %q to be valid.", v) + } + if v != versions.Lang(v) { + t.Errorf("Expected known version %q == Lang(%q).", v, versions.Lang(v)) + } + } +} + +func TestAtLeast(t *testing.T) { + for _, item := range [...]struct { + v, release string + want bool + }{ + {versions.Future, versions.Go1_22, true}, + {versions.Go1_22, versions.Go1_22, true}, + {"go1.21", versions.Go1_22, false}, + {"invalid", versions.Go1_22, false}, + } { + if got := versions.AtLeast(item.v, item.release); got != item.want { + t.Errorf("AtLeast(%q, %q)=%v. wanted %v", item.v, item.release, got, item.want) + } + } +} + +func TestBefore(t *testing.T) { + for _, item := range [...]struct { + v, release string + want bool + }{ + {versions.Future, versions.Go1_22, false}, + {versions.Go1_22, versions.Go1_22, false}, + {"go1.21", versions.Go1_22, true}, + {"invalid", versions.Go1_22, true}, // invalid < Go1_22 + } { + if got := versions.Before(item.v, item.release); got != item.want { + t.Errorf("Before(%q, %q)=%v. wanted %v", item.v, item.release, got, item.want) + } + } +} + +func TestFileVersions122(t *testing.T) { + testenv.NeedsGo1Point(t, 22) + + const source = ` + package P + ` + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "hello.go", source, 0) + if err != nil { + t.Fatal(err) + } + + for _, conf := range []types.Config{ + {GoVersion: versions.Go1_22}, + {}, // GoVersion is unset. + } { + info := &types.Info{} + versions.InitFileVersions(info) + + _, err = conf.Check("P", fset, []*ast.File{f}, info) + if err != nil { + t.Fatal(err) + } + + v := versions.FileVersion(info, f) + if !versions.AtLeast(v, versions.Go1_22) { + t.Errorf("versions.AtLeast(%q, %q) expected to hold", v, versions.Go1_22) + } + + if versions.Before(v, versions.Go1_22) { + t.Errorf("versions.AtLeast(%q, %q) expected to be false", v, versions.Go1_22) + } + + if conf.GoVersion == "" && v != versions.Future { + t.Error("Expected the FileVersion to be the Future when conf.GoVersion is unset") + } + } +} + +func TestFileVersions121(t *testing.T) { + testenv.SkipAfterGo1Point(t, 21) + + // If <1.22, info and file are ignored. + v := versions.FileVersion(nil, nil) + oneof := map[string]bool{ + versions.Go1_18: true, + versions.Go1_19: true, + versions.Go1_20: true, + versions.Go1_21: true, + } + if !oneof[v] { + t.Errorf("FileVersion(...)=%q expected to be a known go version <1.22", v) + } + + if versions.AtLeast(v, versions.Go1_22) { + t.Errorf("versions.AtLeast(%q, %q) expected to be false", v, versions.Go1_22) + } + + if !versions.Before(v, versions.Go1_22) { + t.Errorf("versions.Before(%q, %q) expected to hold", v, versions.Go1_22) + } +} diff --git a/playground/socket/socket.go b/playground/socket/socket.go index c396aac5196..797dcc6dd4c 100644 --- a/playground/socket/socket.go +++ b/playground/socket/socket.go @@ -20,13 +20,13 @@ import ( "errors" "go/parser" "go/token" - exec "golang.org/x/sys/execabs" "io" "log" "net" "net/http" "net/url" "os" + "os/exec" "path/filepath" "runtime" "strings" diff --git a/present/parse_test.go b/present/parse_test.go index 0e59857a3a0..dad57ea77ca 100644 --- a/present/parse_test.go +++ b/present/parse_test.go @@ -6,6 +6,7 @@ package present import ( "bytes" + "fmt" "html/template" "os" "os/exec" @@ -79,11 +80,13 @@ func diff(prefix string, name1 string, b1 []byte, name2 string, b2 []byte) ([]by cmd = "/bin/ape/diff" } - data, err := exec.Command(cmd, "-u", f1, f2).CombinedOutput() + data, err := exec.Command(cmd, "-u", f1, f2).Output() if len(data) > 0 { // diff exits with a non-zero status when the files don't match. // Ignore that failure as long as we get output. err = nil + } else if exit, ok := err.(*exec.ExitError); ok && len(exit.Stderr) > 0 { + err = fmt.Errorf("%w\nstderr:\n%s)", err, exit.Stderr) } data = bytes.Replace(data, []byte(f1), []byte(name1), -1) diff --git a/refactor/eg/eg_test.go b/refactor/eg/eg_test.go index 4154e9a8f4e..36fd3add8a7 100644 --- a/refactor/eg/eg_test.go +++ b/refactor/eg/eg_test.go @@ -12,6 +12,7 @@ package eg_test import ( "bytes" "flag" + "go/build" "go/constant" "go/parser" "go/token" @@ -47,9 +48,12 @@ func Test(t *testing.T) { t.Skipf("skipping test on %q (no /usr/bin/diff)", runtime.GOOS) } + ctx := build.Default // copy + ctx.CgoEnabled = false // don't use cgo conf := loader.Config{ Fset: token.NewFileSet(), ParserMode: parser.ParseComments, + Build: &ctx, } // Each entry is a single-file package. diff --git a/refactor/rename/check.go b/refactor/rename/check.go index 9f29b98a0a4..8350ad7bc32 100644 --- a/refactor/rename/check.go +++ b/refactor/rename/check.go @@ -13,6 +13,8 @@ import ( "go/types" "golang.org/x/tools/go/loader" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/refactor/satisfy" ) @@ -343,7 +345,7 @@ func forEachLexicalRef(info *loader.PackageInfo, obj types.Object, fn func(id *a // Handle recursion ourselves for struct literals // so we don't visit field identifiers. tv := info.Types[n] - if _, ok := deref(tv.Type).Underlying().(*types.Struct); ok { + if is[*types.Struct](typeparams.CoreType(typeparams.Deref(tv.Type))) { if n.Type != nil { ast.Inspect(n.Type, visit) } @@ -435,8 +437,8 @@ func (r *renamer) checkStructField(from *types.Var) { } i++ } - if spec, ok := path[i].(*ast.TypeSpec); ok { - // This struct is also a named type. + if spec, ok := path[i].(*ast.TypeSpec); ok && !spec.Assign.IsValid() { + // This struct is also a defined type. // We must check for direct (non-promoted) field/field // and method/field conflicts. named := info.Defs[spec.Name].Type() @@ -449,7 +451,7 @@ func (r *renamer) checkStructField(from *types.Var) { return // skip checkSelections to avoid redundant errors } } else { - // This struct is not a named type. + // This struct is not a defined type. (It may be an alias.) // We need only check for direct (non-promoted) field/field conflicts. T := info.Types[tStruct].Type.Underlying().(*types.Struct) for i := 0; i < T.NumFields(); i++ { @@ -462,15 +464,14 @@ func (r *renamer) checkStructField(from *types.Var) { } } - // Renaming an anonymous field requires renaming the type too. e.g. + // Renaming an anonymous field requires renaming the TypeName too. e.g. // print(s.T) // if we rename T to U, // type T int // this and // var s struct {T} // this must change too. if from.Anonymous() { - if named, ok := from.Type().(*types.Named); ok { - r.check(named.Obj()) - } else if named, ok := deref(from.Type()).(*types.Named); ok { - r.check(named.Obj()) + // A TypeParam cannot appear as an anonymous field. + if t, ok := typesinternal.Unpointer(from.Type()).(hasTypeName); ok { + r.check(t.Obj()) } } @@ -478,6 +479,9 @@ func (r *renamer) checkStructField(from *types.Var) { r.checkSelections(from) } +// hasTypeName abstracts the named types, *types.{Named,Alias,TypeParam}. +type hasTypeName interface{ Obj() *types.TypeName } + // checkSelections checks that all uses and selections that resolve to // the specified object would continue to do so after the renaming. func (r *renamer) checkSelections(from types.Object) { @@ -591,7 +595,7 @@ func (r *renamer) checkMethod(from *types.Func) { // Check for conflict at point of declaration. // Check to ensure preservation of assignability requirements. R := recv(from).Type() - if isInterface(R) { + if types.IsInterface(R) { // Abstract method // declaration @@ -608,7 +612,7 @@ func (r *renamer) checkMethod(from *types.Func) { for _, info := range r.packages { // Start with named interface types (better errors) for _, obj := range info.Defs { - if obj, ok := obj.(*types.TypeName); ok && isInterface(obj.Type()) { + if obj, ok := obj.(*types.TypeName); ok && types.IsInterface(obj.Type()) { f, _, _ := types.LookupFieldOrMethod( obj.Type(), false, from.Pkg(), from.Name()) if f == nil { @@ -680,7 +684,7 @@ func (r *renamer) checkMethod(from *types.Func) { // yields abstract method I.f. This can make error // messages less than obvious. // - if !isInterface(key.RHS) { + if !types.IsInterface(key.RHS) { // The logic below was derived from checkSelections. rtosel := rmethods.Lookup(from.Pkg(), r.to) @@ -755,7 +759,7 @@ func (r *renamer) checkMethod(from *types.Func) { // for key := range r.satisfy() { // key = (lhs, rhs) where lhs is always an interface. - if isInterface(key.RHS) { + if types.IsInterface(key.RHS) { continue } rsel := r.msets.MethodSet(key.RHS).Lookup(from.Pkg(), from.Name()) @@ -777,7 +781,7 @@ func (r *renamer) checkMethod(from *types.Func) { var iface string I := recv(imeth).Type() - if named, ok := I.(*types.Named); ok { + if named, ok := I.(hasTypeName); ok { pos = named.Obj().Pos() iface = "interface " + named.Obj().Name() } else { @@ -845,14 +849,3 @@ func someUse(info *loader.PackageInfo, obj types.Object) *ast.Ident { } return nil } - -// -- Plundered from golang.org/x/tools/go/ssa ----------------- - -func isInterface(T types.Type) bool { return types.IsInterface(T) } - -func deref(typ types.Type) types.Type { - if p, _ := typ.(*types.Pointer); p != nil { - return p.Elem() - } - return typ -} diff --git a/refactor/rename/mvpkg.go b/refactor/rename/mvpkg.go index 58fad6be345..78897112278 100644 --- a/refactor/rename/mvpkg.go +++ b/refactor/rename/mvpkg.go @@ -18,9 +18,9 @@ import ( "go/build" "go/format" "go/token" - exec "golang.org/x/sys/execabs" "log" "os" + "os/exec" "path" "path/filepath" "regexp" diff --git a/refactor/rename/rename.go b/refactor/rename/rename.go index a80381c84b1..a5a59e97488 100644 --- a/refactor/rename/rename.go +++ b/refactor/rename/rename.go @@ -17,10 +17,10 @@ import ( "go/parser" "go/token" "go/types" - exec "golang.org/x/sys/execabs" "io" "log" "os" + "os/exec" "path" "regexp" "sort" @@ -323,7 +323,7 @@ func Main(ctxt *build.Context, offsetFlag, fromFlag, to string) error { for _, obj := range fromObjects { if obj, ok := obj.(*types.Func); ok { recv := obj.Type().(*types.Signature).Recv() - if recv != nil && isInterface(recv.Type().Underlying()) { + if recv != nil && types.IsInterface(recv.Type()) { r.changeMethods = true break } @@ -588,7 +588,7 @@ func diff(filename string, content []byte) error { } defer os.Remove(renamed) - diff, err := exec.Command(DiffCmd, "-u", filename, renamed).CombinedOutput() + diff, err := exec.Command(DiffCmd, "-u", filename, renamed).Output() if len(diff) > 0 { // diff exits with a non-zero status when the files don't match. // Ignore that failure as long as we get output. @@ -596,6 +596,9 @@ func diff(filename string, content []byte) error { return nil } if err != nil { + if exit, ok := err.(*exec.ExitError); ok && len(exit.Stderr) > 0 { + err = fmt.Errorf("%w\nstderr:\n%s", err, exit.Stderr) + } return fmt.Errorf("computing diff: %v", err) } return nil diff --git a/refactor/rename/rename_test.go b/refactor/rename/rename_test.go index 38c59c9d448..58b4f5eeb0c 100644 --- a/refactor/rename/rename_test.go +++ b/refactor/rename/rename_test.go @@ -467,6 +467,7 @@ func TestRewrites(t *testing.T) { ctxt *build.Context // nil => use previous offset, from, to string // values of the -from/-offset and -to flags want map[string]string // contents of updated files + alias bool // requires materialized aliases }{ // Elimination of renaming import. { @@ -764,6 +765,78 @@ type T2 int type U struct{ *T2 } var _ = U{}.T2 +`, + }, + }, + // Renaming of embedded field alias. + { + alias: true, + ctxt: main(`package main + +type T int +type A = T +type U struct{ A } + +var _ = U{}.A +var a A +`), + offset: "/go/src/main/0.go:#68", to: "A2", // A in "U{}.A" + want: map[string]string{ + "/go/src/main/0.go": `package main + +type T int +type A2 = T +type U struct{ A2 } + +var _ = U{}.A2 +var a A2 +`, + }, + }, + // Renaming of embedded field pointer to alias. + { + alias: true, + ctxt: main(`package main + +type T int +type A = T +type U struct{ *A } + +var _ = U{}.A +var a A +`), + offset: "/go/src/main/0.go:#69", to: "A2", // A in "U{}.A" + want: map[string]string{ + "/go/src/main/0.go": `package main + +type T int +type A2 = T +type U struct{ *A2 } + +var _ = U{}.A2 +var a A2 +`, + }, + }, + // Renaming of alias + { + ctxt: main(`package main + +type A = int + +func _() A { + return A(0) +} +`), + offset: "/go/src/main/0.go:#49", to: "A2", // A in "A(0)" + want: map[string]string{ + "/go/src/main/0.go": `package main + +type A2 = int + +func _() A2 { + return A2(0) +} `, }, }, @@ -1247,6 +1320,14 @@ func main() { return nil } + if !test.alias { + t.Setenv("GODEBUG", "gotypesalias=0") + } else if !strings.Contains(fmt.Sprint(build.Default.ReleaseTags), " go1.22") { + t.Skip("skipping test that requires materialized type aliases") + } else { + t.Setenv("GODEBUG", "gotypesalias=1") + } + err := Main(ctxt, test.offset, test.from, test.to) var prefix string if test.offset == "" { diff --git a/refactor/rename/spec.go b/refactor/rename/spec.go index 22a268a7942..ab7dbc3a92d 100644 --- a/refactor/rename/spec.go +++ b/refactor/rename/spec.go @@ -25,6 +25,7 @@ import ( "golang.org/x/tools/go/buildutil" "golang.org/x/tools/go/loader" + "golang.org/x/tools/internal/typesinternal" ) // A spec specifies an entity to rename. @@ -458,17 +459,14 @@ func findObjects(info *loader.PackageInfo, spec *spec) ([]types.Object, error) { // search within named type. obj, _, _ := types.LookupFieldOrMethod(tName.Type(), true, info.Pkg, spec.typeMember) if obj == nil { - return nil, fmt.Errorf("cannot find field or method %q of %s %s.%s", - spec.typeMember, typeKind(tName.Type()), info.Pkg.Path(), tName.Name()) + return nil, fmt.Errorf("cannot find field or method %q of %s.%s", + spec.typeMember, info.Pkg.Path(), tName.Name()) } if spec.searchFor == "" { // If it is an embedded field, return the type of the field. if v, ok := obj.(*types.Var); ok && v.Anonymous() { - switch t := v.Type().(type) { - case *types.Pointer: - return []types.Object{t.Elem().(*types.Named).Obj()}, nil - case *types.Named: + if t, ok := typesinternal.Unpointer(v.Type()).(hasTypeName); ok { return []types.Object{t.Obj()}, nil } } @@ -481,7 +479,7 @@ func findObjects(info *loader.PackageInfo, spec *spec) ([]types.Object, error) { spec.searchFor, objectKind(obj), info.Pkg.Path(), tName.Name(), obj.Name()) } - if isInterface(tName.Type()) { + if types.IsInterface(tName.Type()) { return nil, fmt.Errorf("cannot search for local name %q within abstract method (%s.%s).%s", spec.searchFor, info.Pkg.Path(), tName.Name(), searchFunc.Name()) } diff --git a/refactor/rename/util.go b/refactor/rename/util.go index 258ba786cd7..bc6dc10cac9 100644 --- a/refactor/rename/util.go +++ b/refactor/rename/util.go @@ -22,7 +22,7 @@ func objectKind(obj types.Object) string { switch obj := obj.(type) { case *types.PkgName: return "imported package name" - case *types.TypeName: + case *types.TypeName: // defined type | alias | type parameter return "type" case *types.Var: if obj.IsField() { @@ -37,10 +37,6 @@ func objectKind(obj types.Object) string { return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) } -func typeKind(T types.Type) string { - return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(T.Underlying()).String(), "*types.")) -} - // NB: for renamings, blank is not considered valid. func isValidIdentifier(id string) bool { if id == "" || id == "_" { @@ -102,3 +98,8 @@ func sameFile(x, y string) bool { } func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} diff --git a/refactor/satisfy/find.go b/refactor/satisfy/find.go index 9e60af3b618..bab0e3cfd3f 100644 --- a/refactor/satisfy/find.go +++ b/refactor/satisfy/find.go @@ -355,7 +355,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { f.sig = saved case *ast.CompositeLit: - switch T := coreType(deref(tv.Type)).(type) { + switch T := coreType(typeparams.Deref(tv.Type)).(type) { case *types.Struct: for i, elem := range e.Elts { if kv, ok := elem.(*ast.KeyValueExpr); ok { @@ -411,7 +411,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { } } - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: // f[X, Y] -- generic instantiation case *ast.SliceExpr: @@ -690,7 +690,7 @@ func (f *Finder) stmt(s ast.Stmt) { case *types.Map: xelem = ux.Elem() case *types.Pointer: // *array - xelem = coreType(deref(ux)).(*types.Array).Elem() + xelem = coreType(typeparams.Deref(ux)).(*types.Array).Elem() case *types.Slice: xelem = ux.Elem() } @@ -708,14 +708,6 @@ func (f *Finder) stmt(s ast.Stmt) { // -- Plundered from golang.org/x/tools/go/ssa ----------------- -// deref returns a pointer's element type; otherwise it returns typ. -func deref(typ types.Type) types.Type { - if p, ok := coreType(typ).(*types.Pointer); ok { - return p.Elem() - } - return typ -} - func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } func isInterface(T types.Type) bool { return types.IsInterface(T) } @@ -732,6 +724,6 @@ func instance(info *types.Info, expr ast.Expr) bool { default: return false } - _, ok := typeparams.GetInstances(info)[id] + _, ok := info.Instances[id] return ok } diff --git a/refactor/satisfy/find_test.go b/refactor/satisfy/find_test.go index 2cbd8c15ca3..daa8b219ef2 100644 --- a/refactor/satisfy/find_test.go +++ b/refactor/satisfy/find_test.go @@ -15,17 +15,13 @@ import ( "sort" "testing" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" "golang.org/x/tools/refactor/satisfy" ) // This test exercises various operations on core types of type parameters. // (It also provides pretty decent coverage of the non-generic operations.) func TestGenericCoreOperations(t *testing.T) { - if !typeparams.Enabled { - t.Skip("!typeparams.Enabled") - } - const src = `package foo import "unsafe" @@ -224,10 +220,11 @@ func constraints(t *testing.T, src string) []string { Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), Scopes: make(map[ast.Node]*types.Scope), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - typeparams.InitInstanceInfo(info) + versions.InitFileVersions(info) conf := types.Config{ Importer: importer.Default(), }