Update golang.org/x/lint commit hash to 738671d (#223)

Update golang.org/x/lint commit hash to 738671d

Reviewed-on: https://kolaente.dev/vikunja/api/pulls/223
This commit is contained in:
renovate 2020-04-07 07:29:05 +00:00 committed by konrad
parent 1448d9de98
commit 424cf80e5c
33 changed files with 983 additions and 1941 deletions

3
go.mod
View file

@ -70,10 +70,9 @@ require (
github.com/ulule/limiter/v3 v3.3.0 github.com/ulule/limiter/v3 v3.3.0
github.com/urfave/cli v1.22.2 // indirect github.com/urfave/cli v1.22.2 // indirect
golang.org/x/crypto v0.0.0-20200208060501-ecb85df21340 golang.org/x/crypto v0.0.0-20200208060501-ecb85df21340
golang.org/x/lint v0.0.0-20190409202823-959b441ac422 golang.org/x/lint v0.0.0-20200302205851-738671d3881b
golang.org/x/net v0.0.0-20200202094626-16171245cfb2 // indirect golang.org/x/net v0.0.0-20200202094626-16171245cfb2 // indirect
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5 // indirect golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5 // indirect
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d // indirect
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
gopkg.in/d4l3k/messagediff.v1 v1.2.1 gopkg.in/d4l3k/messagediff.v1 v1.2.1
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df

7
go.sum
View file

@ -345,6 +345,7 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 h1:ydJNl0ENAG67pFbB+9tfhiL2pYqLhfoaZFw/cjLhY4A= golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 h1:ydJNl0ENAG67pFbB+9tfhiL2pYqLhfoaZFw/cjLhY4A=
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200208060501-ecb85df21340 h1:KOcEaR10tFr7gdJV2GCKw8Os5yED1u1aOqHjOAb6d2Y= golang.org/x/crypto v0.0.0-20200208060501-ecb85df21340 h1:KOcEaR10tFr7gdJV2GCKw8Os5yED1u1aOqHjOAb6d2Y=
golang.org/x/crypto v0.0.0-20200208060501-ecb85df21340/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200208060501-ecb85df21340/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
@ -354,6 +355,9 @@ golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvx
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422 h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI= golang.org/x/lint v0.0.0-20190409202823-959b441ac422 h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7YapZaRvUcLFFJhusH0k=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -431,7 +435,10 @@ golang.org/x/tools v0.0.0-20190628034336-212fb13d595e h1:ZlQjfVdpDxeqxRfmO30CdqW
golang.org/x/tools v0.0.0-20190628034336-212fb13d595e/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628034336-212fb13d595e/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d h1:/iIZNFGxc/a7C3yWjGcnboV+Tkc7mxr+p6fDztwoxuM= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d h1:/iIZNFGxc/a7C3yWjGcnboV+Tkc7mxr+p6fDztwoxuM=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7 h1:EBZoQjiKKPaLbPrbpssUfuHtwM6KV/vb4U85g/cigFY=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.3.0 h1:FBSsiFRMz3LBeXIomRnVzrQwSDj4ibvcRexLG0LZGQk= google.golang.org/appengine v1.3.0 h1:FBSsiFRMz3LBeXIomRnVzrQwSDj4ibvcRexLG0LZGQk=

2
vendor/golang.org/x/lint/README.md generated vendored
View file

@ -78,7 +78,7 @@ Optionally, add this to your `~/.vimrc` to automatically run `golint` on `:w`
Add this to your `.emacs` file: Add this to your `.emacs` file:
(add-to-list 'load-path (concat (getenv "GOPATH") "/src/github.com/golang/lint/misc/emacs")) (add-to-list 'load-path (concat (getenv "GOPATH") "/src/golang.org/x/lint/misc/emacs/"))
(require 'golint) (require 'golint)
If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value. If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value.

4
vendor/golang.org/x/lint/go.mod generated vendored
View file

@ -1,3 +1,5 @@
module golang.org/x/lint module golang.org/x/lint
require golang.org/x/tools v0.0.0-20190311212946-11955173bddd go 1.11
require golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7

12
vendor/golang.org/x/lint/go.sum generated vendored
View file

@ -1,6 +1,12 @@
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd h1:/e+gpKk9r3dJobndpTytxS2gOy6m5uvpg+ISQoEcusQ= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7 h1:EBZoQjiKKPaLbPrbpssUfuHtwM6KV/vb4U85g/cigFY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

80
vendor/golang.org/x/lint/lint.go generated vendored
View file

@ -198,7 +198,6 @@ func (f *file) lint() {
f.lintBlankImports() f.lintBlankImports()
f.lintExported() f.lintExported()
f.lintNames() f.lintNames()
f.lintVarDecls()
f.lintElses() f.lintElses()
f.lintRanges() f.lintRanges()
f.lintErrorf() f.lintErrorf()
@ -840,6 +839,7 @@ var commonMethods = map[string]bool{
"ServeHTTP": true, "ServeHTTP": true,
"String": true, "String": true,
"Write": true, "Write": true,
"Unwrap": true,
} }
// lintFuncDoc examines doc comments on functions and methods. // lintFuncDoc examines doc comments on functions and methods.
@ -977,84 +977,6 @@ var zeroLiteral = map[string]bool{
"0i": true, "0i": true,
} }
// lintVarDecls examines variable declarations. It complains about declarations with
// redundant LHS types that can be inferred from the RHS.
func (f *file) lintVarDecls() {
var lastGen *ast.GenDecl // last GenDecl entered.
f.walk(func(node ast.Node) bool {
switch v := node.(type) {
case *ast.GenDecl:
if v.Tok != token.CONST && v.Tok != token.VAR {
return false
}
lastGen = v
return true
case *ast.ValueSpec:
if lastGen.Tok == token.CONST {
return false
}
if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 {
return false
}
rhs := v.Values[0]
// An underscore var appears in a common idiom for compile-time interface satisfaction,
// as in "var _ Interface = (*Concrete)(nil)".
if isIdent(v.Names[0], "_") {
return false
}
// If the RHS is a zero value, suggest dropping it.
zero := false
if lit, ok := rhs.(*ast.BasicLit); ok {
zero = zeroLiteral[lit.Value]
} else if isIdent(rhs, "nil") {
zero = true
}
if zero {
f.errorf(rhs, 0.9, category("zero-value"), "should drop = %s from declaration of var %s; it is the zero value", f.render(rhs), v.Names[0])
return false
}
lhsTyp := f.pkg.typeOf(v.Type)
rhsTyp := f.pkg.typeOf(rhs)
if !validType(lhsTyp) || !validType(rhsTyp) {
// Type checking failed (often due to missing imports).
return false
}
if !types.Identical(lhsTyp, rhsTyp) {
// Assignment to a different type is not redundant.
return false
}
// The next three conditions are for suppressing the warning in situations
// where we were unable to typecheck.
// If the LHS type is an interface, don't warn, since it is probably a
// concrete type on the RHS. Note that our feeble lexical check here
// will only pick up interface{} and other literal interface types;
// that covers most of the cases we care to exclude right now.
if _, ok := v.Type.(*ast.InterfaceType); ok {
return false
}
// If the RHS is an untyped const, only warn if the LHS type is its default type.
if defType, ok := f.isUntypedConst(rhs); ok && !isIdent(v.Type, defType) {
return false
}
f.errorf(v.Type, 0.8, category("type-inference"), "should omit type %s from declaration of var %s; it will be inferred from the right-hand side", f.render(v.Type), v.Names[0])
return false
}
return true
})
}
func validType(T types.Type) bool {
return T != nil &&
T != types.Typ[types.Invalid] &&
!strings.Contains(T.String(), "invalid type") // good but not foolproof
}
// lintElses examines else blocks. It complains about any else block whose if block ends in a return. // lintElses examines else blocks. It complains about any else block whose if block ends in a return.
func (f *file) lintElses() { func (f *file) lintElses() {
// We don't want to flag if { } else if { } else { } constructions. // We don't want to flag if { } else if { } else { } constructions.

View file

@ -275,9 +275,10 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
// We deleted an entry but now there may be // We deleted an entry but now there may be
// a blank line-sized hole where the import was. // a blank line-sized hole where the import was.
if line-lastLine > 1 { if line-lastLine > 1 || !gen.Rparen.IsValid() {
// There was a blank line immediately preceding the deleted import, // There was a blank line immediately preceding the deleted import,
// so there's no need to close the hole. // so there's no need to close the hole. The right parenthesis is
// invalid after AddImport to an import statement without parenthesis.
// Do nothing. // Do nothing.
} else if line != fset.File(gen.Rparen).LineCount() { } else if line != fset.File(gen.Rparen).LineCount() {
// There was no blank line. Close the hole. // There was no blank line. Close the hole.

View file

@ -90,7 +90,7 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
// The types argument, if non-empty, enables type-based filtering of // The types argument, if non-empty, enables type-based filtering of
// events. The function f if is called only for nodes whose type // events. The function f if is called only for nodes whose type
// matches an element of the types slice. // matches an element of the types slice.
func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prune bool)) { func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (proceed bool)) {
mask := maskOf(types) mask := maskOf(types)
for i := 0; i < len(in.events); { for i := 0; i < len(in.events); {
ev := in.events[i] ev := in.events[i]
@ -114,7 +114,7 @@ func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prun
// supplies each call to f an additional argument, the current // supplies each call to f an additional argument, the current
// traversal stack. The stack's first element is the outermost node, // traversal stack. The stack's first element is the outermost node,
// an *ast.File; its last is the innermost, n. // an *ast.File; its last is the innermost, n.
func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (prune bool)) { func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (proceed bool)) {
mask := maskOf(types) mask := maskOf(types)
var stack []ast.Node var stack []ast.Node
for i := 0; i < len(in.events); { for i := 0; i < len(in.events); {

View file

@ -2,9 +2,7 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package cgo // Package cgo handles cgo preprocessing of files containing `import "C"`.
// This file handles cgo preprocessing of files containing `import "C"`.
// //
// DESIGN // DESIGN
// //
@ -51,6 +49,8 @@ package cgo
// its handling of function calls, analogous to the treatment of map // its handling of function calls, analogous to the treatment of map
// lookups in which y=m[k] and y,ok=m[k] are both legal. // lookups in which y=m[k] and y,ok=m[k] are both legal.
package cgo
import ( import (
"fmt" "fmt"
"go/ast" "go/ast"

View file

@ -344,7 +344,7 @@ func (p *parser) expectKeyword(keyword string) {
// PackageId = string_lit . // PackageId = string_lit .
// //
func (p *parser) parsePackageId() string { func (p *parser) parsePackageID() string {
id, err := strconv.Unquote(p.expect(scanner.String)) id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil { if err != nil {
p.error(err) p.error(err)
@ -384,7 +384,7 @@ func (p *parser) parseDotIdent() string {
// //
func (p *parser) parseQualifiedName() (id, name string) { func (p *parser) parseQualifiedName() (id, name string) {
p.expect('@') p.expect('@')
id = p.parsePackageId() id = p.parsePackageID()
p.expect('.') p.expect('.')
// Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
if p.tok == '?' { if p.tok == '?' {
@ -696,7 +696,7 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
// Complete requires the type's embedded interfaces to be fully defined, // Complete requires the type's embedded interfaces to be fully defined,
// but we do not define any // but we do not define any
return types.NewInterface(methods, nil).Complete() return newInterface(methods, nil).Complete()
} }
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . // ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
@ -785,7 +785,7 @@ func (p *parser) parseType(parent *types.Package) types.Type {
func (p *parser) parseImportDecl() { func (p *parser) parseImportDecl() {
p.expectKeyword("import") p.expectKeyword("import")
name := p.parsePackageName() name := p.parsePackageName()
p.getPkg(p.parsePackageId(), name) p.getPkg(p.parsePackageID(), name)
} }
// int_lit = [ "+" | "-" ] { "0" ... "9" } . // int_lit = [ "+" | "-" ] { "0" ... "9" } .

View file

@ -60,8 +60,7 @@ causes Load to run in LoadFiles mode, collecting minimal information.
See the documentation for type Config for details. See the documentation for type Config for details.
As noted earlier, the Config.Mode controls the amount of detail As noted earlier, the Config.Mode controls the amount of detail
reported about the loaded packages, with each mode returning all the data of the reported about the loaded packages. See the documentation for type LoadMode
previous mode with some extra added. See the documentation for type LoadMode
for details. for details.
Most tools should pass their command-line arguments (after any flags) Most tools should pass their command-line arguments (after any flags)

View file

@ -84,13 +84,14 @@ func findExternalDriver(cfg *Config) driver {
cmd.Stdin = bytes.NewReader(req) cmd.Stdin = bytes.NewReader(req)
cmd.Stdout = buf cmd.Stdout = buf
cmd.Stderr = stderr cmd.Stderr = stderr
if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTDRIVERERRORS") != "" {
fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd, words...), stderr)
}
if err := cmd.Run(); err != nil { if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr) return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
} }
if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTDRIVERERRORS") != "" {
fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd, words...), stderr)
}
var response driverResponse var response driverResponse
if err := json.Unmarshal(buf.Bytes(), &response); err != nil { if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
return nil, err return nil, err

View file

@ -6,17 +6,16 @@ package packages
import ( import (
"bytes" "bytes"
"context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"go/types" "go/types"
"io/ioutil"
"log" "log"
"os" "os"
"os/exec" "os/exec"
"path" "path"
"path/filepath" "path/filepath"
"reflect" "reflect"
"regexp"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
@ -24,9 +23,6 @@ import (
"unicode" "unicode"
"golang.org/x/tools/go/internal/packagesdriver" "golang.org/x/tools/go/internal/packagesdriver"
"golang.org/x/tools/internal/gopathwalk"
"golang.org/x/tools/internal/semver"
"golang.org/x/tools/internal/span"
) )
// debug controls verbose logging. // debug controls verbose logging.
@ -45,16 +41,21 @@ type responseDeduper struct {
dr *driverResponse dr *driverResponse
} }
// init fills in r with a driverResponse. func newDeduper() *responseDeduper {
func (r *responseDeduper) init(dr *driverResponse) { return &responseDeduper{
r.dr = dr dr: &driverResponse{},
r.seenRoots = map[string]bool{} seenRoots: map[string]bool{},
r.seenPackages = map[string]*Package{} seenPackages: map[string]*Package{},
}
}
// addAll fills in r with a driverResponse.
func (r *responseDeduper) addAll(dr *driverResponse) {
for _, pkg := range dr.Packages { for _, pkg := range dr.Packages {
r.seenPackages[pkg.ID] = pkg r.addPackage(pkg)
} }
for _, root := range dr.Roots { for _, root := range dr.Roots {
r.seenRoots[root] = true r.addRoot(root)
} }
} }
@ -74,25 +75,47 @@ func (r *responseDeduper) addRoot(id string) {
r.dr.Roots = append(r.dr.Roots, id) r.dr.Roots = append(r.dr.Roots, id)
} }
// goInfo contains global information from the go tool. type golistState struct {
type goInfo struct { cfg *Config
ctx context.Context
envOnce sync.Once
goEnvError error
goEnv map[string]string
rootsOnce sync.Once
rootDirsError error
rootDirs map[string]string rootDirs map[string]string
env goEnv
// vendorDirs caches the (non)existence of vendor directories.
vendorDirs map[string]bool
} }
type goEnv struct { // getEnv returns Go environment variables. Only specific variables are
modulesOn bool // populated -- computing all of them is slow.
} func (state *golistState) getEnv() (map[string]string, error) {
state.envOnce.Do(func() {
func determineEnv(cfg *Config) goEnv { var b *bytes.Buffer
buf, err := invokeGo(cfg, "env", "GOMOD") b, state.goEnvError = state.invokeGo("env", "-json", "GOMOD", "GOPATH")
if err != nil { if state.goEnvError != nil {
return goEnv{} return
} }
gomod := bytes.TrimSpace(buf.Bytes())
env := goEnv{} state.goEnv = make(map[string]string)
env.modulesOn = len(gomod) > 0 decoder := json.NewDecoder(b)
if state.goEnvError = decoder.Decode(&state.goEnv); state.goEnvError != nil {
return
}
})
return state.goEnv, state.goEnvError
}
// mustGetEnv is a convenience function that can be used if getEnv has already succeeded.
func (state *golistState) mustGetEnv() map[string]string {
env, err := state.getEnv()
if err != nil {
panic(fmt.Sprintf("mustGetEnv: %v", err))
}
return env return env
} }
@ -100,47 +123,38 @@ func determineEnv(cfg *Config) goEnv {
// the build system package structure. // the build system package structure.
// See driver for more details. // See driver for more details.
func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) { func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
var sizes types.Sizes // Make sure that any asynchronous go commands are killed when we return.
parentCtx := cfg.Context
if parentCtx == nil {
parentCtx = context.Background()
}
ctx, cancel := context.WithCancel(parentCtx)
defer cancel()
response := newDeduper()
// Fill in response.Sizes asynchronously if necessary.
var sizeserr error var sizeserr error
var sizeswg sync.WaitGroup var sizeswg sync.WaitGroup
if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 {
sizeswg.Add(1) sizeswg.Add(1)
go func() { go func() {
sizes, sizeserr = getSizes(cfg) var sizes types.Sizes
sizes, sizeserr = packagesdriver.GetSizesGolist(ctx, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
// types.SizesFor always returns nil or a *types.StdSizes.
response.dr.Sizes, _ = sizes.(*types.StdSizes)
sizeswg.Done() sizeswg.Done()
}() }()
} }
defer sizeswg.Wait()
// start fetching rootDirs state := &golistState{
var info goInfo cfg: cfg,
var rootDirsReady, envReady = make(chan struct{}), make(chan struct{}) ctx: ctx,
go func() { vendorDirs: map[string]bool{},
info.rootDirs = determineRootDirs(cfg)
close(rootDirsReady)
}()
go func() {
info.env = determineEnv(cfg)
close(envReady)
}()
getGoInfo := func() *goInfo {
<-rootDirsReady
<-envReady
return &info
}
// Ensure that we don't leak goroutines: Load is synchronous, so callers will
// not expect it to access the fields of cfg after the call returns.
defer getGoInfo()
// always pass getGoInfo to golistDriver
golistDriver := func(cfg *Config, patterns ...string) (*driverResponse, error) {
return golistDriver(cfg, getGoInfo, patterns...)
} }
// Determine files requested in contains patterns // Determine files requested in contains patterns
var containFiles []string var containFiles []string
var packagesNamed []string
restPatterns := make([]string, 0, len(patterns)) restPatterns := make([]string, 0, len(patterns))
// Extract file= and other [querytype]= patterns. Report an error if querytype // Extract file= and other [querytype]= patterns. Report an error if querytype
// doesn't exist. // doesn't exist.
@ -156,8 +170,6 @@ extractQueries:
containFiles = append(containFiles, value) containFiles = append(containFiles, value)
case "pattern": case "pattern":
restPatterns = append(restPatterns, value) restPatterns = append(restPatterns, value)
case "iamashamedtousethedisabledqueryname":
packagesNamed = append(packagesNamed, value)
case "": // not a reserved query case "": // not a reserved query
restPatterns = append(restPatterns, pattern) restPatterns = append(restPatterns, pattern)
default: default:
@ -173,52 +185,34 @@ extractQueries:
} }
} }
response := &responseDeduper{}
var err error
// See if we have any patterns to pass through to go list. Zero initial // See if we have any patterns to pass through to go list. Zero initial
// patterns also requires a go list call, since it's the equivalent of // patterns also requires a go list call, since it's the equivalent of
// ".". // ".".
if len(restPatterns) > 0 || len(patterns) == 0 { if len(restPatterns) > 0 || len(patterns) == 0 {
dr, err := golistDriver(cfg, restPatterns...) dr, err := state.createDriverResponse(restPatterns...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
response.init(dr) response.addAll(dr)
} else {
response.init(&driverResponse{})
} }
sizeswg.Wait()
if sizeserr != nil {
return nil, sizeserr
}
// types.SizesFor always returns nil or a *types.StdSizes
response.dr.Sizes, _ = sizes.(*types.StdSizes)
var containsCandidates []string
if len(containFiles) != 0 { if len(containFiles) != 0 {
if err := runContainsQueries(cfg, golistDriver, response, containFiles, getGoInfo); err != nil { if err := state.runContainsQueries(response, containFiles); err != nil {
return nil, err return nil, err
} }
} }
if len(packagesNamed) != 0 { modifiedPkgs, needPkgs, err := state.processGolistOverlay(response)
if err := runNamedQueries(cfg, golistDriver, response, packagesNamed); err != nil {
return nil, err
}
}
modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var containsCandidates []string
if len(containFiles) > 0 { if len(containFiles) > 0 {
containsCandidates = append(containsCandidates, modifiedPkgs...) containsCandidates = append(containsCandidates, modifiedPkgs...)
containsCandidates = append(containsCandidates, needPkgs...) containsCandidates = append(containsCandidates, needPkgs...)
} }
if err := addNeededOverlayPackages(cfg, golistDriver, response, needPkgs, getGoInfo); err != nil { if err := state.addNeededOverlayPackages(response, needPkgs); err != nil {
return nil, err return nil, err
} }
// Check candidate packages for containFiles. // Check candidate packages for containFiles.
@ -247,33 +241,32 @@ extractQueries:
} }
} }
sizeswg.Wait()
if sizeserr != nil {
return nil, sizeserr
}
return response.dr, nil return response.dr, nil
} }
func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string, getGoInfo func() *goInfo) error { func (state *golistState) addNeededOverlayPackages(response *responseDeduper, pkgs []string) error {
if len(pkgs) == 0 { if len(pkgs) == 0 {
return nil return nil
} }
drivercfg := *cfg dr, err := state.createDriverResponse(pkgs...)
if getGoInfo().env.modulesOn {
drivercfg.BuildFlags = append(drivercfg.BuildFlags, "-mod=readonly")
}
dr, err := driver(&drivercfg, pkgs...)
if err != nil { if err != nil {
return err return err
} }
for _, pkg := range dr.Packages { for _, pkg := range dr.Packages {
response.addPackage(pkg) response.addPackage(pkg)
} }
_, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo) _, needPkgs, err := state.processGolistOverlay(response)
if err != nil { if err != nil {
return err return err
} }
return addNeededOverlayPackages(cfg, driver, response, needPkgs, getGoInfo) return state.addNeededOverlayPackages(response, needPkgs)
} }
func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, queries []string, goInfo func() *goInfo) error { func (state *golistState) runContainsQueries(response *responseDeduper, queries []string) error {
for _, query := range queries { for _, query := range queries {
// TODO(matloob): Do only one query per directory. // TODO(matloob): Do only one query per directory.
fdir := filepath.Dir(query) fdir := filepath.Dir(query)
@ -283,42 +276,16 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
if err != nil { if err != nil {
return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err) return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
} }
dirResponse, err := driver(cfg, pattern) dirResponse, err := state.createDriverResponse(pattern)
if err != nil {
// If there was an error loading the package, or the package is returned
// with errors, try to load the file as an ad-hoc package.
// Usually the error will appear in a returned package, but may not if we're
// in module mode and the ad-hoc is located outside a module.
if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error var queryErr error
if dirResponse, queryErr = adHocPackage(cfg, driver, pattern, query); queryErr != nil { if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
return err // return the original error
}
}
// `go list` can report errors for files that are not listed as part of a package's GoFiles.
// In the case of an invalid Go file, we should assume that it is part of package if only
// one package is in the response. The file may have valid contents in an overlay.
if len(dirResponse.Packages) == 1 {
pkg := dirResponse.Packages[0]
for i, err := range pkg.Errors {
s := errorSpan(err)
if !s.IsValid() {
break
}
if len(pkg.CompiledGoFiles) == 0 {
break
}
dir := filepath.Dir(pkg.CompiledGoFiles[0])
filename := filepath.Join(dir, filepath.Base(s.URI().Filename()))
if info, err := os.Stat(filename); err != nil || info.IsDir() {
break
}
if !contains(pkg.CompiledGoFiles, filename) {
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, filename)
pkg.GoFiles = append(pkg.GoFiles, filename)
pkg.Errors = append(pkg.Errors[:i], pkg.Errors[i+1:]...)
}
}
}
// A final attempt to construct an ad-hoc package.
if len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error
if dirResponse, queryErr = adHocPackage(cfg, driver, pattern, query); queryErr != nil {
return err // return the original error return err // return the original error
} }
} }
@ -347,345 +314,47 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
return nil return nil
} }
// adHocPackage attempts to construct an ad-hoc package given a query that failed. // adhocPackage attempts to load or construct an ad-hoc package for a given
func adHocPackage(cfg *Config, driver driver, pattern, query string) (*driverResponse, error) { // query, if the original call to the driver produced inadequate results.
// There was an error loading the package. Try to load the file as an ad-hoc package. func (state *golistState) adhocPackage(pattern, query string) (*driverResponse, error) {
// Usually the error will appear in a returned package, but may not if we're in modules mode response, err := state.createDriverResponse(query)
// and the ad-hoc is located outside a module.
dirResponse, err := driver(cfg, query)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// If we get nothing back from `go list`, try to make this file into its own ad-hoc package. // If we get nothing back from `go list`,
if len(dirResponse.Packages) == 0 && err == nil { // try to make this file into its own ad-hoc package.
dirResponse.Packages = append(dirResponse.Packages, &Package{ // TODO(rstambler): Should this check against the original response?
if len(response.Packages) == 0 {
response.Packages = append(response.Packages, &Package{
ID: "command-line-arguments", ID: "command-line-arguments",
PkgPath: query, PkgPath: query,
GoFiles: []string{query}, GoFiles: []string{query},
CompiledGoFiles: []string{query}, CompiledGoFiles: []string{query},
Imports: make(map[string]*Package), Imports: make(map[string]*Package),
}) })
dirResponse.Roots = append(dirResponse.Roots, "command-line-arguments") response.Roots = append(response.Roots, "command-line-arguments")
} }
// Special case to handle issue #33482: // Handle special cases.
// If this is a file= query for ad-hoc packages where the file only exists on an overlay, if len(response.Packages) == 1 {
// and exists outside of a module, add the file in for the package. // golang/go#33482: If this is a file= query for ad-hoc packages where
if len(dirResponse.Packages) == 1 && (dirResponse.Packages[0].ID == "command-line-arguments" || // the file only exists on an overlay, and exists outside of a module,
filepath.ToSlash(dirResponse.Packages[0].PkgPath) == filepath.ToSlash(query)) { // add the file to the package and remove the errors.
if len(dirResponse.Packages[0].GoFiles) == 0 { if response.Packages[0].ID == "command-line-arguments" ||
filepath.ToSlash(response.Packages[0].PkgPath) == filepath.ToSlash(query) {
if len(response.Packages[0].GoFiles) == 0 {
filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
// TODO(matloob): check if the file is outside of a root dir? // TODO(matloob): check if the file is outside of a root dir?
for path := range cfg.Overlay { for path := range state.cfg.Overlay {
if path == filename { if path == filename {
dirResponse.Packages[0].Errors = nil response.Packages[0].Errors = nil
dirResponse.Packages[0].GoFiles = []string{path} response.Packages[0].GoFiles = []string{path}
dirResponse.Packages[0].CompiledGoFiles = []string{path} response.Packages[0].CompiledGoFiles = []string{path}
} }
} }
} }
} }
return dirResponse, nil
}
func contains(files []string, filename string) bool {
for _, f := range files {
if f == filename {
return true
} }
} return response, nil
return false
}
// errorSpan attempts to parse a standard `go list` error message
// by stripping off the trailing error message.
//
// It works only on errors whose message is prefixed by colon,
// followed by a space (": "). For example:
//
// attributes.go:13:1: expected 'package', found 'type'
//
func errorSpan(err Error) span.Span {
if err.Pos == "" {
input := strings.TrimSpace(err.Msg)
msgIndex := strings.Index(input, ": ")
if msgIndex < 0 {
return span.Parse(input)
}
return span.Parse(input[:msgIndex])
}
return span.Parse(err.Pos)
}
// modCacheRegexp splits a path in a module cache into module, module version, and package.
var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error {
// calling `go env` isn't free; bail out if there's nothing to do.
if len(queries) == 0 {
return nil
}
// Determine which directories are relevant to scan.
roots, modRoot, err := roots(cfg)
if err != nil {
return err
}
// Scan the selected directories. Simple matches, from GOPATH/GOROOT
// or the local module, can simply be "go list"ed. Matches from the
// module cache need special treatment.
var matchesMu sync.Mutex
var simpleMatches, modCacheMatches []string
add := func(root gopathwalk.Root, dir string) {
// Walk calls this concurrently; protect the result slices.
matchesMu.Lock()
defer matchesMu.Unlock()
path := dir
if dir != root.Path {
path = dir[len(root.Path)+1:]
}
if pathMatchesQueries(path, queries) {
switch root.Type {
case gopathwalk.RootModuleCache:
modCacheMatches = append(modCacheMatches, path)
case gopathwalk.RootCurrentModule:
// We'd need to read go.mod to find the full
// import path. Relative's easier.
rel, err := filepath.Rel(cfg.Dir, dir)
if err != nil {
// This ought to be impossible, since
// we found dir in the current module.
panic(err)
}
simpleMatches = append(simpleMatches, "./"+rel)
case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT:
simpleMatches = append(simpleMatches, path)
}
}
}
startWalk := time.Now()
gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
cfg.Logf("%v for walk", time.Since(startWalk))
// Weird special case: the top-level package in a module will be in
// whatever directory the user checked the repository out into. It's
// more reasonable for that to not match the package name. So, if there
// are any Go files in the mod root, query it just to be safe.
if modRoot != "" {
rel, err := filepath.Rel(cfg.Dir, modRoot)
if err != nil {
panic(err) // See above.
}
files, err := ioutil.ReadDir(modRoot)
if err != nil {
panic(err) // See above.
}
for _, f := range files {
if strings.HasSuffix(f.Name(), ".go") {
simpleMatches = append(simpleMatches, rel)
break
}
}
}
addResponse := func(r *driverResponse) {
for _, pkg := range r.Packages {
response.addPackage(pkg)
for _, name := range queries {
if pkg.Name == name {
response.addRoot(pkg.ID)
break
}
}
}
}
if len(simpleMatches) != 0 {
resp, err := driver(cfg, simpleMatches...)
if err != nil {
return err
}
addResponse(resp)
}
// Module cache matches are tricky. We want to avoid downloading new
// versions of things, so we need to use the ones present in the cache.
// go list doesn't accept version specifiers, so we have to write out a
// temporary module, and do the list in that module.
if len(modCacheMatches) != 0 {
// Collect all the matches, deduplicating by major version
// and preferring the newest.
type modInfo struct {
mod string
major string
}
mods := make(map[modInfo]string)
var imports []string
for _, modPath := range modCacheMatches {
matches := modCacheRegexp.FindStringSubmatch(modPath)
mod, ver := filepath.ToSlash(matches[1]), matches[2]
importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3]))
major := semver.Major(ver)
if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 {
mods[modInfo{mod, major}] = ver
}
imports = append(imports, importPath)
}
// Build the temporary module.
var gomod bytes.Buffer
gomod.WriteString("module modquery\nrequire (\n")
for mod, version := range mods {
gomod.WriteString("\t" + mod.mod + " " + version + "\n")
}
gomod.WriteString(")\n")
tmpCfg := *cfg
// We're only trying to look at stuff in the module cache, so
// disable the network. This should speed things up, and has
// prevented errors in at least one case, #28518.
tmpCfg.Env = append([]string{"GOPROXY=off"}, cfg.Env...)
var err error
tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery")
if err != nil {
return err
}
defer os.RemoveAll(tmpCfg.Dir)
if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil {
return fmt.Errorf("writing go.mod for module cache query: %v", err)
}
// Run the query, using the import paths calculated from the matches above.
resp, err := driver(&tmpCfg, imports...)
if err != nil {
return fmt.Errorf("querying module cache matches: %v", err)
}
addResponse(resp)
}
return nil
}
func getSizes(cfg *Config) (types.Sizes, error) {
return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
}
// roots selects the appropriate paths to walk based on the passed-in configuration,
// particularly the environment and the presence of a go.mod in cfg.Dir's parents.
func roots(cfg *Config) ([]gopathwalk.Root, string, error) {
stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD")
if err != nil {
return nil, "", err
}
fields := strings.Split(stdout.String(), "\n")
if len(fields) != 4 || len(fields[3]) != 0 {
return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String())
}
goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2]
var modDir string
if gomod != "" {
modDir = filepath.Dir(gomod)
}
var roots []gopathwalk.Root
// Always add GOROOT.
roots = append(roots, gopathwalk.Root{
Path: filepath.Join(goroot, "/src"),
Type: gopathwalk.RootGOROOT,
})
// If modules are enabled, scan the module dir.
if modDir != "" {
roots = append(roots, gopathwalk.Root{
Path: modDir,
Type: gopathwalk.RootCurrentModule,
})
}
// Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode.
for _, p := range gopath {
if modDir != "" {
roots = append(roots, gopathwalk.Root{
Path: filepath.Join(p, "/pkg/mod"),
Type: gopathwalk.RootModuleCache,
})
} else {
roots = append(roots, gopathwalk.Root{
Path: filepath.Join(p, "/src"),
Type: gopathwalk.RootGOPATH,
})
}
}
return roots, modDir, nil
}
// These functions were copied from goimports. See further documentation there.
// pathMatchesQueries is adapted from pkgIsCandidate.
// TODO: is it reasonable to do Contains here, rather than an exact match on a path component?
func pathMatchesQueries(path string, queries []string) bool {
lastTwo := lastTwoComponents(path)
for _, query := range queries {
if strings.Contains(lastTwo, query) {
return true
}
if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) {
lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
if strings.Contains(lastTwo, query) {
return true
}
}
}
return false
}
// lastTwoComponents returns at most the last two path components
// of v, using either / or \ as the path separator.
func lastTwoComponents(v string) string {
nslash := 0
for i := len(v) - 1; i >= 0; i-- {
if v[i] == '/' || v[i] == '\\' {
nslash++
if nslash == 2 {
return v[i:]
}
}
}
return v
}
func hasHyphenOrUpperASCII(s string) bool {
for i := 0; i < len(s); i++ {
b := s[i]
if b == '-' || ('A' <= b && b <= 'Z') {
return true
}
}
return false
}
func lowerASCIIAndRemoveHyphen(s string) (ret string) {
buf := make([]byte, 0, len(s))
for i := 0; i < len(s); i++ {
b := s[i]
switch {
case b == '-':
continue
case 'A' <= b && b <= 'Z':
buf = append(buf, b+('a'-'A'))
default:
buf = append(buf, b)
}
}
return string(buf)
} }
// Fields must match go list; // Fields must match go list;
@ -730,10 +399,9 @@ func otherFiles(p *jsonPackage) [][]string {
return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles} return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
} }
// golistDriver uses the "go list" command to expand the pattern // createDriverResponse uses the "go list" command to expand the pattern
// words and return metadata for the specified packages. dir may be // words and return a response for the specified packages.
// "" and env may be nil, as per os/exec.Command. func (state *golistState) createDriverResponse(words ...string) (*driverResponse, error) {
func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driverResponse, error) {
// go list uses the following identifiers in ImportPath and Imports: // go list uses the following identifiers in ImportPath and Imports:
// //
// "p" -- importable package or main (command) // "p" -- importable package or main (command)
@ -747,7 +415,7 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// Run "go list" for complete // Run "go list" for complete
// information on the specified packages. // information on the specified packages.
buf, err := invokeGo(cfg, golistargs(cfg, words)...) buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -782,7 +450,10 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// contained in a known module or GOPATH entry. This will allow the package to be // contained in a known module or GOPATH entry. This will allow the package to be
// properly "reclaimed" when overlays are processed. // properly "reclaimed" when overlays are processed.
if filepath.IsAbs(p.ImportPath) && p.Error != nil { if filepath.IsAbs(p.ImportPath) && p.Error != nil {
pkgPath, ok := getPkgPath(cfg, p.ImportPath, rootsDirs) pkgPath, ok, err := state.getPkgPath(p.ImportPath)
if err != nil {
return nil, err
}
if ok { if ok {
p.ImportPath = pkgPath p.ImportPath = pkgPath
} }
@ -803,6 +474,7 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles), GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles), CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...), OtherFiles: absJoin(p.Dir, otherFiles(p)...),
forTest: p.ForTest,
} }
// Work around https://golang.org/issue/28749: // Work around https://golang.org/issue/28749:
@ -879,9 +551,15 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
} }
if p.Error != nil { if p.Error != nil {
msg := strings.TrimSpace(p.Error.Err) // Trim to work around golang.org/issue/32363.
// Address golang.org/issue/35964 by appending import stack to error message.
if msg == "import cycle not allowed" && len(p.Error.ImportStack) != 0 {
msg += fmt.Sprintf(": import stack: %v", p.Error.ImportStack)
}
pkg.Errors = append(pkg.Errors, Error{ pkg.Errors = append(pkg.Errors, Error{
Pos: p.Error.Pos, Pos: p.Error.Pos,
Msg: strings.TrimSpace(p.Error.Err), // Trim to work around golang.org/issue/32363. Msg: msg,
Kind: ListError,
}) })
} }
@ -892,22 +570,20 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
} }
// getPkgPath finds the package path of a directory if it's relative to a root directory. // getPkgPath finds the package path of a directory if it's relative to a root directory.
func getPkgPath(cfg *Config, dir string, goInfo func() *goInfo) (string, bool) { func (state *golistState) getPkgPath(dir string) (string, bool, error) {
absDir, err := filepath.Abs(dir) absDir, err := filepath.Abs(dir)
if err != nil { if err != nil {
cfg.Logf("error getting absolute path of %s: %v", dir, err) return "", false, err
return "", false
} }
for rdir, rpath := range goInfo().rootDirs { roots, err := state.determineRootDirs()
absRdir, err := filepath.Abs(rdir)
if err != nil { if err != nil {
cfg.Logf("error getting absolute path of %s: %v", rdir, err) return "", false, err
continue
} }
for rdir, rpath := range roots {
// Make sure that the directory is in the module, // Make sure that the directory is in the module,
// to avoid creating a path relative to another module. // to avoid creating a path relative to another module.
if !strings.HasPrefix(absDir, absRdir) { if !strings.HasPrefix(absDir, rdir) {
cfg.Logf("%s does not have prefix %s", absDir, absRdir)
continue continue
} }
// TODO(matloob): This doesn't properly handle symlinks. // TODO(matloob): This doesn't properly handle symlinks.
@ -922,11 +598,11 @@ func getPkgPath(cfg *Config, dir string, goInfo func() *goInfo) (string, bool) {
// Once the file is saved, gopls, or the next invocation of the tool will get the correct // Once the file is saved, gopls, or the next invocation of the tool will get the correct
// result straight from golist. // result straight from golist.
// TODO(matloob): Implement module tiebreaking? // TODO(matloob): Implement module tiebreaking?
return path.Join(rpath, filepath.ToSlash(r)), true return path.Join(rpath, filepath.ToSlash(r)), true, nil
} }
return filepath.ToSlash(r), true return filepath.ToSlash(r), true, nil
} }
return "", false return "", false, nil
} }
// absJoin absolutizes and flattens the lists of files. // absJoin absolutizes and flattens the lists of files.
@ -945,8 +621,8 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
func golistargs(cfg *Config, words []string) []string { func golistargs(cfg *Config, words []string) []string {
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
fullargs := []string{ fullargs := []string{
"list", "-e", "-json", "-e", "-json",
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypesInfo|NeedTypesSizes) != 0), fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests), fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)), fmt.Sprintf("-export=%t", usesExportData(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0), fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
@ -961,10 +637,17 @@ func golistargs(cfg *Config, words []string) []string {
} }
// invokeGo returns the stdout of a go command invocation. // invokeGo returns the stdout of a go command invocation.
func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) { func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, error) {
cfg := state.cfg
stdout := new(bytes.Buffer) stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer) stderr := new(bytes.Buffer)
cmd := exec.CommandContext(cfg.Context, "go", args...) goArgs := []string{verb}
if verb != "env" {
goArgs = append(goArgs, cfg.BuildFlags...)
}
goArgs = append(goArgs, args...)
cmd := exec.CommandContext(state.ctx, "go", goArgs...)
// On darwin the cwd gets resolved to the real path, which breaks anything that // On darwin the cwd gets resolved to the real path, which breaks anything that
// expects the working directory to keep the original path, including the // expects the working directory to keep the original path, including the
// go command when dealing with modules. // go command when dealing with modules.
@ -976,7 +659,7 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
cmd.Stdout = stdout cmd.Stdout = stdout
cmd.Stderr = stderr cmd.Stderr = stderr
defer func(start time.Time) { defer func(start time.Time) {
cfg.Logf("%s for %v, stderr: <<%s>> stdout: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr, stdout) cfg.Logf("%s for %v, stderr: <<%s>> stdout: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, goArgs...), stderr, stdout)
}(time.Now()) }(time.Now())
if err := cmd.Run(); err != nil { if err := cmd.Run(); err != nil {

View file

@ -1,12 +1,13 @@
package packages package packages
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"go/parser" "go/parser"
"go/token" "go/token"
"os"
"path/filepath" "path/filepath"
"sort"
"strconv" "strconv"
"strings" "strings"
) )
@ -16,7 +17,7 @@ import (
// sometimes incorrect. // sometimes incorrect.
// TODO(matloob): Handle unsupported cases, including the following: // TODO(matloob): Handle unsupported cases, including the following:
// - determining the correct package to add given a new import path // - determining the correct package to add given a new import path
func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func() *goInfo) (modifiedPkgs, needPkgs []string, err error) { func (state *golistState) processGolistOverlay(response *responseDeduper) (modifiedPkgs, needPkgs []string, err error) {
havePkgs := make(map[string]string) // importPath -> non-test package ID havePkgs := make(map[string]string) // importPath -> non-test package ID
needPkgsSet := make(map[string]bool) needPkgsSet := make(map[string]bool)
modifiedPkgsSet := make(map[string]bool) modifiedPkgsSet := make(map[string]bool)
@ -34,7 +35,23 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
// potentially modifying the transitive set of dependencies). // potentially modifying the transitive set of dependencies).
var overlayAddsImports bool var overlayAddsImports bool
for opath, contents := range cfg.Overlay { // If both a package and its test package are created by the overlay, we
// need the real package first. Process all non-test files before test
// files, and make the whole process deterministic while we're at it.
var overlayFiles []string
for opath := range state.cfg.Overlay {
overlayFiles = append(overlayFiles, opath)
}
sort.Slice(overlayFiles, func(i, j int) bool {
iTest := strings.HasSuffix(overlayFiles[i], "_test.go")
jTest := strings.HasSuffix(overlayFiles[j], "_test.go")
if iTest != jTest {
return !iTest // non-tests are before tests.
}
return overlayFiles[i] < overlayFiles[j]
})
for _, opath := range overlayFiles {
contents := state.cfg.Overlay[opath]
base := filepath.Base(opath) base := filepath.Base(opath)
dir := filepath.Dir(opath) dir := filepath.Dir(opath)
var pkg *Package // if opath belongs to both a package and its test variant, this will be the test variant var pkg *Package // if opath belongs to both a package and its test variant, this will be the test variant
@ -64,14 +81,8 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
testVariantOf = p testVariantOf = p
continue nextPackage continue nextPackage
} }
// We must have already seen the package of which this is a test variant.
if pkg != nil && p != pkg && pkg.PkgPath == p.PkgPath { if pkg != nil && p != pkg && pkg.PkgPath == p.PkgPath {
// If we've already seen the test variant,
// make sure to label which package it is a test variant of.
if hasTestFiles(pkg) {
testVariantOf = p
continue nextPackage
}
// If we have already seen the package of which this is a test variant.
if hasTestFiles(p) { if hasTestFiles(p) {
testVariantOf = pkg testVariantOf = pkg
} }
@ -86,7 +97,10 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if pkg == nil { if pkg == nil {
// Try to find the module or gopath dir the file is contained in. // Try to find the module or gopath dir the file is contained in.
// Then for modules, add the module opath to the beginning. // Then for modules, add the module opath to the beginning.
pkgPath, ok := getPkgPath(cfg, dir, rootDirs) pkgPath, ok, err := state.getPkgPath(dir)
if err != nil {
return nil, nil, err
}
if !ok { if !ok {
break break
} }
@ -114,6 +128,11 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if isTestFile && !isXTest && testVariantOf != nil { if isTestFile && !isXTest && testVariantOf != nil {
pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...) pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...)
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...) pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...)
// Add the package under test and its imports to the test variant.
pkg.forTest = testVariantOf.PkgPath
for k, v := range testVariantOf.Imports {
pkg.Imports[k] = &Package{ID: v.ID}
}
} }
} }
} }
@ -130,42 +149,45 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
continue continue
} }
for _, imp := range imports { for _, imp := range imports {
_, found := pkg.Imports[imp] if _, found := pkg.Imports[imp]; found {
if !found { continue
}
overlayAddsImports = true overlayAddsImports = true
// TODO(matloob): Handle cases when the following block isn't correct.
// These include imports of vendored packages, etc.
id, ok := havePkgs[imp] id, ok := havePkgs[imp]
if !ok { if !ok {
id = imp var err error
id, err = state.resolveImport(dir, imp)
if err != nil {
return nil, nil, err
}
} }
pkg.Imports[imp] = &Package{ID: id} pkg.Imports[imp] = &Package{ID: id}
// Add dependencies to the non-test variant version of this package as wel. // Add dependencies to the non-test variant version of this package as well.
if testVariantOf != nil { if testVariantOf != nil {
testVariantOf.Imports[imp] = &Package{ID: id} testVariantOf.Imports[imp] = &Package{ID: id}
} }
} }
} }
continue
// toPkgPath guesses the package path given the id.
toPkgPath := func(sourceDir, id string) (string, error) {
if i := strings.IndexByte(id, ' '); i >= 0 {
return state.resolveImport(sourceDir, id[:i])
}
return state.resolveImport(sourceDir, id)
} }
// toPkgPath tries to guess the package path given the id. // Now that new packages have been created, do another pass to determine
// This isn't always correct -- it's certainly wrong for // the new set of missing packages.
// vendored packages' paths.
toPkgPath := func(id string) string {
// TODO(matloob): Handle vendor paths.
i := strings.IndexByte(id, ' ')
if i >= 0 {
return id[:i]
}
return id
}
// Do another pass now that new packages have been created to determine the
// set of missing packages.
for _, pkg := range response.dr.Packages { for _, pkg := range response.dr.Packages {
for _, imp := range pkg.Imports { for _, imp := range pkg.Imports {
pkgPath := toPkgPath(imp.ID) if len(pkg.GoFiles) == 0 {
return nil, nil, fmt.Errorf("cannot resolve imports for package %q with no Go files", pkg.PkgPath)
}
pkgPath, err := toPkgPath(filepath.Dir(pkg.GoFiles[0]), imp.ID)
if err != nil {
return nil, nil, err
}
if _, ok := havePkgs[pkgPath]; !ok { if _, ok := havePkgs[pkgPath]; !ok {
needPkgsSet[pkgPath] = true needPkgsSet[pkgPath] = true
} }
@ -185,6 +207,52 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
return modifiedPkgs, needPkgs, err return modifiedPkgs, needPkgs, err
} }
// resolveImport finds the the ID of a package given its import path.
// In particular, it will find the right vendored copy when in GOPATH mode.
func (state *golistState) resolveImport(sourceDir, importPath string) (string, error) {
env, err := state.getEnv()
if err != nil {
return "", err
}
if env["GOMOD"] != "" {
return importPath, nil
}
searchDir := sourceDir
for {
vendorDir := filepath.Join(searchDir, "vendor")
exists, ok := state.vendorDirs[vendorDir]
if !ok {
info, err := os.Stat(vendorDir)
exists = err == nil && info.IsDir()
state.vendorDirs[vendorDir] = exists
}
if exists {
vendoredPath := filepath.Join(vendorDir, importPath)
if info, err := os.Stat(vendoredPath); err == nil && info.IsDir() {
// We should probably check for .go files here, but shame on anyone who fools us.
path, ok, err := state.getPkgPath(vendoredPath)
if err != nil {
return "", err
}
if ok {
return path, nil
}
}
}
// We know we've hit the top of the filesystem when we Dir / and get /,
// or C:\ and get C:\, etc.
next := filepath.Dir(searchDir)
if next == searchDir {
break
}
searchDir = next
}
return importPath, nil
}
func hasTestFiles(p *Package) bool { func hasTestFiles(p *Package) bool {
for _, f := range p.GoFiles { for _, f := range p.GoFiles {
if strings.HasSuffix(f, "_test.go") { if strings.HasSuffix(f, "_test.go") {
@ -194,44 +262,59 @@ func hasTestFiles(p *Package) bool {
return false return false
} }
// determineRootDirs returns a mapping from directories code can be contained in to the // determineRootDirs returns a mapping from absolute directories that could
// corresponding import path prefixes of those directories. // contain code to their corresponding import path prefixes.
// Its result is used to try to determine the import path for a package containing func (state *golistState) determineRootDirs() (map[string]string, error) {
// an overlay file. env, err := state.getEnv()
func determineRootDirs(cfg *Config) map[string]string {
// Assume modules first:
out, err := invokeGo(cfg, "list", "-m", "-json", "all")
if err != nil { if err != nil {
return determineRootDirsGOPATH(cfg) return nil, err
}
if env["GOMOD"] != "" {
state.rootsOnce.Do(func() {
state.rootDirs, state.rootDirsError = state.determineRootDirsModules()
})
} else {
state.rootsOnce.Do(func() {
state.rootDirs, state.rootDirsError = state.determineRootDirsGOPATH()
})
}
return state.rootDirs, state.rootDirsError
}
func (state *golistState) determineRootDirsModules() (map[string]string, error) {
out, err := state.invokeGo("list", "-m", "-json", "all")
if err != nil {
return nil, err
} }
m := map[string]string{} m := map[string]string{}
type jsonMod struct{ Path, Dir string } type jsonMod struct{ Path, Dir string }
for dec := json.NewDecoder(out); dec.More(); { for dec := json.NewDecoder(out); dec.More(); {
mod := new(jsonMod) mod := new(jsonMod)
if err := dec.Decode(mod); err != nil { if err := dec.Decode(mod); err != nil {
return m // Give up and return an empty map. Package won't be found for overlay. return nil, err
} }
if mod.Dir != "" && mod.Path != "" { if mod.Dir != "" && mod.Path != "" {
// This is a valid module; add it to the map. // This is a valid module; add it to the map.
m[mod.Dir] = mod.Path absDir, err := filepath.Abs(mod.Dir)
if err != nil {
return nil, err
}
m[absDir] = mod.Path
} }
} }
return m return m, nil
} }
func determineRootDirsGOPATH(cfg *Config) map[string]string { func (state *golistState) determineRootDirsGOPATH() (map[string]string, error) {
m := map[string]string{} m := map[string]string{}
out, err := invokeGo(cfg, "env", "GOPATH") for _, dir := range filepath.SplitList(state.mustGetEnv()["GOPATH"]) {
absDir, err := filepath.Abs(dir)
if err != nil { if err != nil {
// Could not determine root dir mapping. Everything is best-effort, so just return an empty map. return nil, err
// When we try to find the import path for a directory, there will be no root-dir match and
// we'll give up.
return m
} }
for _, p := range filepath.SplitList(string(bytes.TrimSpace(out.Bytes()))) { m[filepath.Join(absDir, "src")] = ""
m[filepath.Join(p, "src")] = ""
} }
return m return m, nil
} }
func extractImports(filename string, contents []byte) ([]string, error) { func extractImports(filename string, contents []byte) ([]string, error) {

View file

@ -23,6 +23,7 @@ import (
"sync" "sync"
"golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/gcexportdata"
"golang.org/x/tools/internal/packagesinternal"
) )
// A LoadMode controls the amount of detail to return when loading. // A LoadMode controls the amount of detail to return when loading.
@ -34,6 +35,9 @@ import (
// Load may return more information than requested. // Load may return more information than requested.
type LoadMode int type LoadMode int
// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
// NeedExportFile to make it consistent with the Package field it's adding.
const ( const (
// NeedName adds Name and PkgPath. // NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota NeedName LoadMode = 1 << iota
@ -51,7 +55,7 @@ const (
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports. // NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
NeedDeps NeedDeps
// NeedExportsFile adds ExportsFile. // NeedExportsFile adds ExportFile.
NeedExportsFile NeedExportsFile
// NeedTypes adds Types, Fset, and IllTyped. // NeedTypes adds Types, Fset, and IllTyped.
@ -292,6 +296,15 @@ type Package struct {
// TypesSizes provides the effective size function for types in TypesInfo. // TypesSizes provides the effective size function for types in TypesInfo.
TypesSizes types.Sizes TypesSizes types.Sizes
// forTest is the package under test, if any.
forTest string
}
func init() {
packagesinternal.GetForTest = func(p interface{}) string {
return p.(*Package).forTest
}
} }
// An Error describes a problem with a package's metadata, syntax, or types. // An Error describes a problem with a package's metadata, syntax, or types.
@ -500,12 +513,23 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
if i, found := rootMap[pkg.ID]; found { if i, found := rootMap[pkg.ID]; found {
rootIndex = i rootIndex = i
} }
// Overlays can invalidate export data.
// TODO(matloob): make this check fine-grained based on dependencies on overlaid files
exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe"
// This package needs type information if the caller requested types and the package is
// either a root, or it's a non-root and the user requested dependencies ...
needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0))
// This package needs source if the call requested source (or types info, which implies source)
// and the package is either a root, or itas a non- root and the user requested dependencies...
needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
// ... or if we need types and the exportData is invalid. We fall back to (incompletely)
// typechecking packages from source if they fail to compile.
(ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
lpkg := &loaderPackage{ lpkg := &loaderPackage{
Package: pkg, Package: pkg,
needtypes: (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0, needtypes: needtypes,
needsrc: (ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0 || needsrc: needsrc,
len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
pkg.ExportFile == "" && pkg.PkgPath != "unsafe",
} }
ld.pkgs[lpkg.ID] = lpkg ld.pkgs[lpkg.ID] = lpkg
if rootIndex >= 0 { if rootIndex >= 0 {
@ -713,7 +737,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
// which would then require that such created packages be explicitly // which would then require that such created packages be explicitly
// inserted back into the Import graph as a final step after export data loading. // inserted back into the Import graph as a final step after export data loading.
// The Diamond test exercises this case. // The Diamond test exercises this case.
if !lpkg.needtypes { if !lpkg.needtypes && !lpkg.needsrc {
return return
} }
if !lpkg.needsrc { if !lpkg.needsrc {

View file

@ -4,6 +4,7 @@ package imports // import "golang.org/x/tools/imports"
import ( import (
"go/build" "go/build"
"os"
intimp "golang.org/x/tools/internal/imports" intimp "golang.org/x/tools/internal/imports"
) )
@ -42,6 +43,10 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) {
Env: &intimp.ProcessEnv{ Env: &intimp.ProcessEnv{
GOPATH: build.Default.GOPATH, GOPATH: build.Default.GOPATH,
GOROOT: build.Default.GOROOT, GOROOT: build.Default.GOROOT,
GOFLAGS: os.Getenv("GOFLAGS"),
GO111MODULE: os.Getenv("GO111MODULE"),
GOPROXY: os.Getenv("GOPROXY"),
GOSUMDB: os.Getenv("GOSUMDB"),
Debug: Debug, Debug: Debug,
LocalPrefix: LocalPrefix, LocalPrefix: LocalPrefix,
}, },

View file

@ -14,14 +14,14 @@ import (
"sync" "sync"
) )
// TraverseLink is used as a return value from WalkFuncs to indicate that the // ErrTraverseLink is used as a return value from WalkFuncs to indicate that the
// symlink named in the call may be traversed. // symlink named in the call may be traversed.
var TraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory") var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
// SkipFiles is a used as a return value from WalkFuncs to indicate that the // ErrSkipFiles is a used as a return value from WalkFuncs to indicate that the
// callback should not be called for any other files in the current directory. // callback should not be called for any other files in the current directory.
// Child directories will still be traversed. // Child directories will still be traversed.
var SkipFiles = errors.New("fastwalk: skip remaining files in directory") var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory")
// Walk is a faster implementation of filepath.Walk. // Walk is a faster implementation of filepath.Walk.
// //
@ -167,7 +167,7 @@ func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
err := w.fn(joined, typ) err := w.fn(joined, typ)
if typ == os.ModeSymlink { if typ == os.ModeSymlink {
if err == TraverseLink { if err == ErrTraverseLink {
// Set callbackDone so we don't call it twice for both the // Set callbackDone so we don't call it twice for both the
// symlink-as-symlink and the symlink-as-directory later: // symlink-as-symlink and the symlink-as-directory later:
w.enqueue(walkItem{dir: joined, callbackDone: true}) w.enqueue(walkItem{dir: joined, callbackDone: true})

View file

@ -26,7 +26,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
continue continue
} }
if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil { if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
if err == SkipFiles { if err == ErrSkipFiles {
skipFiles = true skipFiles = true
continue continue
} }

View file

@ -66,7 +66,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
continue continue
} }
if err := fn(dirName, name, typ); err != nil { if err := fn(dirName, name, typ); err != nil {
if err == SkipFiles { if err == ErrSkipFiles {
skipFiles = true skipFiles = true
continue continue
} }

View file

@ -77,6 +77,7 @@ func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root
} }
} }
// walkDir creates a walker and starts fastwalk with this walker.
func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) { func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) {
if _, err := os.Stat(root.Path); os.IsNotExist(err) { if _, err := os.Stat(root.Path); os.IsNotExist(err) {
if opts.Debug { if opts.Debug {
@ -114,7 +115,7 @@ type walker struct {
ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files. ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files.
} }
// init initializes the walker based on its Options. // init initializes the walker based on its Options
func (w *walker) init() { func (w *walker) init() {
var ignoredPaths []string var ignoredPaths []string
if w.root.Type == RootModuleCache { if w.root.Type == RootModuleCache {
@ -167,6 +168,7 @@ func (w *walker) getIgnoredDirs(path string) []string {
return ignoredDirs return ignoredDirs
} }
// shouldSkipDir reports whether the file should be skipped or not.
func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool { func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
for _, ignoredDir := range w.ignoredDirs { for _, ignoredDir := range w.ignoredDirs {
if os.SameFile(fi, ignoredDir) { if os.SameFile(fi, ignoredDir) {
@ -180,20 +182,21 @@ func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
return false return false
} }
// walk walks through the given path.
func (w *walker) walk(path string, typ os.FileMode) error { func (w *walker) walk(path string, typ os.FileMode) error {
dir := filepath.Dir(path) dir := filepath.Dir(path)
if typ.IsRegular() { if typ.IsRegular() {
if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) { if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
// Doesn't make sense to have regular files // Doesn't make sense to have regular files
// directly in your $GOPATH/src or $GOROOT/src. // directly in your $GOPATH/src or $GOROOT/src.
return fastwalk.SkipFiles return fastwalk.ErrSkipFiles
} }
if !strings.HasSuffix(path, ".go") { if !strings.HasSuffix(path, ".go") {
return nil return nil
} }
w.add(w.root, dir) w.add(w.root, dir)
return fastwalk.SkipFiles return fastwalk.ErrSkipFiles
} }
if typ == os.ModeDir { if typ == os.ModeDir {
base := filepath.Base(path) base := filepath.Base(path)
@ -221,7 +224,7 @@ func (w *walker) walk(path string, typ os.FileMode) error {
return nil return nil
} }
if w.shouldTraverse(dir, fi) { if w.shouldTraverse(dir, fi) {
return fastwalk.TraverseLink return fastwalk.ErrTraverseLink
} }
} }
return nil return nil

View file

@ -27,7 +27,6 @@ import (
"unicode/utf8" "unicode/utf8"
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/gopathwalk" "golang.org/x/tools/internal/gopathwalk"
) )
@ -83,6 +82,7 @@ type ImportFix struct {
IdentName string IdentName string
// FixType is the type of fix this is (AddImport, DeleteImport, SetImportName). // FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
FixType ImportFixType FixType ImportFixType
Relevance int // see pkg
} }
// An ImportInfo represents a single import statement. // An ImportInfo represents a single import statement.
@ -585,62 +585,86 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
return fixes, nil return fixes, nil
} }
// getCandidatePkgs returns the list of pkgs that are accessible from filename, // Highest relevance, used for the standard library. Chosen arbitrarily to
// optionall filtered to only packages named pkgName. // match pre-existing gopls code.
func getCandidatePkgs(pkgName, filename string, env *ProcessEnv) ([]*pkg, error) { const MaxRelevance = 7
// TODO(heschi): filter out current package. (Don't forget x_test can import x.)
var result []*pkg // getCandidatePkgs works with the passed callback to find all acceptable packages.
// Start off with the standard library. // It deduplicates by import path, and uses a cached stdlib rather than reading
for importPath := range stdlib { // from disk.
if pkgName != "" && path.Base(importPath) != pkgName { func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error {
continue notSelf := func(p *pkg) bool {
return p.packageName != filePkg || p.dir != filepath.Dir(filename)
} }
result = append(result, &pkg{ // Start off with the standard library.
for importPath, exports := range stdlib {
p := &pkg{
dir: filepath.Join(env.GOROOT, "src", importPath), dir: filepath.Join(env.GOROOT, "src", importPath),
importPathShort: importPath, importPathShort: importPath,
packageName: path.Base(importPath), packageName: path.Base(importPath),
relevance: 0, relevance: MaxRelevance,
}) }
if notSelf(p) && wrappedCallback.packageNameLoaded(p) {
wrappedCallback.exportsLoaded(p, exports)
}
} }
var mu sync.Mutex
dupCheck := map[string]struct{}{}
scanFilter := &scanCallback{
rootFound: func(root gopathwalk.Root) bool {
// Exclude goroot results -- getting them is relatively expensive, not cached, // Exclude goroot results -- getting them is relatively expensive, not cached,
// and generally redundant with the in-memory version. // and generally redundant with the in-memory version.
exclude := []gopathwalk.RootType{gopathwalk.RootGOROOT} return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root)
// Only the go/packages resolver uses the first argument, and nobody uses that resolver. },
scannedPkgs, err := env.GetResolver().scan(nil, true, exclude) dirFound: wrappedCallback.dirFound,
if err != nil { packageNameLoaded: func(pkg *pkg) bool {
return nil, err mu.Lock()
} defer mu.Unlock()
dupCheck := map[string]struct{}{}
for _, pkg := range scannedPkgs {
if pkgName != "" && pkg.packageName != pkgName {
continue
}
if !canUse(filename, pkg.dir) {
continue
}
if _, ok := dupCheck[pkg.importPathShort]; ok { if _, ok := dupCheck[pkg.importPathShort]; ok {
continue return false
} }
dupCheck[pkg.importPathShort] = struct{}{} dupCheck[pkg.importPathShort] = struct{}{}
result = append(result, pkg) return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
},
exportsLoaded: func(pkg *pkg, exports []string) {
// If we're an x_test, load the package under test's test variant.
if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
var err error
_, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true)
if err != nil {
return
} }
}
wrappedCallback.exportsLoaded(pkg, exports)
},
}
return env.GetResolver().scan(ctx, scanFilter)
}
// Sort first by relevance, then by package name, with import path as a tiebreaker. func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) map[string]int {
sort.Slice(result, func(i, j int) bool { result := make(map[string]int)
pi, pj := result[i], result[j] for _, path := range paths {
if pi.relevance != pj.relevance { result[path] = env.GetResolver().scoreImportPath(ctx, path)
return pi.relevance < pj.relevance
} }
if pi.packageName != pj.packageName { return result
return pi.packageName < pj.packageName }
}
return pi.importPathShort < pj.importPathShort
})
return result, nil func PrimeCache(ctx context.Context, env *ProcessEnv) error {
// Fully scan the disk for directories, but don't actually read any Go files.
callback := &scanCallback{
rootFound: func(gopathwalk.Root) bool {
return true
},
dirFound: func(pkg *pkg) bool {
return false
},
packageNameLoaded: func(pkg *pkg) bool {
return false
},
}
return getCandidatePkgs(ctx, callback, "", "", env)
} }
func candidateImportName(pkg *pkg) string { func candidateImportName(pkg *pkg) string {
@ -651,23 +675,37 @@ func candidateImportName(pkg *pkg) string {
} }
// getAllCandidates gets all of the candidates to be imported, regardless of if they are needed. // getAllCandidates gets all of the candidates to be imported, regardless of if they are needed.
func getAllCandidates(filename string, env *ProcessEnv) ([]ImportFix, error) { func getAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
pkgs, err := getCandidatePkgs("", filename, env) callback := &scanCallback{
if err != nil { rootFound: func(gopathwalk.Root) bool {
return nil, err return true
},
dirFound: func(pkg *pkg) bool {
if !canUse(filename, pkg.dir) {
return false
} }
result := make([]ImportFix, 0, len(pkgs)) // Try the assumed package name first, then a simpler path match
for _, pkg := range pkgs { // in case of packages named vN, which are not uncommon.
result = append(result, ImportFix{ return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) ||
strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix)
},
packageNameLoaded: func(pkg *pkg) bool {
if !strings.HasPrefix(pkg.packageName, searchPrefix) {
return false
}
wrapped(ImportFix{
StmtInfo: ImportInfo{ StmtInfo: ImportInfo{
ImportPath: pkg.importPathShort, ImportPath: pkg.importPathShort,
Name: candidateImportName(pkg), Name: candidateImportName(pkg),
}, },
IdentName: pkg.packageName, IdentName: pkg.packageName,
FixType: AddImport, FixType: AddImport,
Relevance: pkg.relevance,
}) })
return false
},
} }
return result, nil return getCandidatePkgs(ctx, callback, filename, filePkg, env)
} }
// A PackageExport is a package and its exports. // A PackageExport is a package and its exports.
@ -676,42 +714,34 @@ type PackageExport struct {
Exports []string Exports []string
} }
func getPackageExports(completePackage, filename string, env *ProcessEnv) ([]PackageExport, error) { func getPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error {
pkgs, err := getCandidatePkgs(completePackage, filename, env) callback := &scanCallback{
if err != nil { rootFound: func(gopathwalk.Root) bool {
return nil, err return true
} },
dirFound: func(pkg *pkg) bool {
results := make([]PackageExport, 0, len(pkgs)) return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
for _, pkg := range pkgs { },
fix := &ImportFix{ packageNameLoaded: func(pkg *pkg) bool {
return pkg.packageName == searchPkg
},
exportsLoaded: func(pkg *pkg, exports []string) {
sort.Strings(exports)
wrapped(PackageExport{
Fix: &ImportFix{
StmtInfo: ImportInfo{ StmtInfo: ImportInfo{
ImportPath: pkg.importPathShort, ImportPath: pkg.importPathShort,
Name: candidateImportName(pkg), Name: candidateImportName(pkg),
}, },
IdentName: pkg.packageName, IdentName: pkg.packageName,
FixType: AddImport, FixType: AddImport,
} Relevance: pkg.relevance,
var exports []string },
if e, ok := stdlib[pkg.importPathShort]; ok {
exports = e
} else {
exports, err = loadExportsForPackage(context.Background(), env, completePackage, pkg)
if err != nil {
if env.Debug {
env.Logf("while completing %q, error loading exports from %q: %v", completePackage, pkg.importPathShort, err)
}
continue
}
}
sort.Strings(exports)
results = append(results, PackageExport{
Fix: fix,
Exports: exports, Exports: exports,
}) })
},
} }
return getCandidatePkgs(ctx, callback, filename, filePkg, env)
return results, nil
} }
// ProcessEnv contains environment variables and settings that affect the use of // ProcessEnv contains environment variables and settings that affect the use of
@ -725,15 +755,19 @@ type ProcessEnv struct {
GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS, GOSUMDB string GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS, GOSUMDB string
WorkingDir string WorkingDir string
// If true, use go/packages regardless of the environment.
ForceGoPackages bool
// Logf is the default logger for the ProcessEnv. // Logf is the default logger for the ProcessEnv.
Logf func(format string, args ...interface{}) Logf func(format string, args ...interface{})
resolver Resolver resolver Resolver
} }
// CopyConfig copies the env's configuration into a new env.
func (e *ProcessEnv) CopyConfig() *ProcessEnv {
copy := *e
copy.resolver = nil
return &copy
}
func (e *ProcessEnv) env() []string { func (e *ProcessEnv) env() []string {
env := os.Environ() env := os.Environ()
add := func(k, v string) { add := func(k, v string) {
@ -757,39 +791,34 @@ func (e *ProcessEnv) GetResolver() Resolver {
if e.resolver != nil { if e.resolver != nil {
return e.resolver return e.resolver
} }
if e.ForceGoPackages {
e.resolver = &goPackagesResolver{env: e}
return e.resolver
}
out, err := e.invokeGo("env", "GOMOD") out, err := e.invokeGo("env", "GOMOD")
if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 { if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 {
e.resolver = &gopathResolver{env: e} e.resolver = newGopathResolver(e)
return e.resolver return e.resolver
} }
e.resolver = &ModuleResolver{env: e} e.resolver = newModuleResolver(e)
return e.resolver return e.resolver
} }
func (e *ProcessEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config {
return &packages.Config{
Mode: mode,
Dir: e.WorkingDir,
Env: e.env(),
}
}
func (e *ProcessEnv) buildContext() *build.Context { func (e *ProcessEnv) buildContext() *build.Context {
ctx := build.Default ctx := build.Default
ctx.GOROOT = e.GOROOT ctx.GOROOT = e.GOROOT
ctx.GOPATH = e.GOPATH ctx.GOPATH = e.GOPATH
// As of Go 1.14, build.Context has a WorkingDir field // As of Go 1.14, build.Context has a Dir field
// (see golang.org/issue/34860). // (see golang.org/issue/34860).
// Populate it only if present. // Populate it only if present.
if wd := reflect.ValueOf(&ctx).Elem().FieldByName("WorkingDir"); wd.IsValid() && wd.Kind() == reflect.String { rc := reflect.ValueOf(&ctx).Elem()
wd.SetString(e.WorkingDir) dir := rc.FieldByName("Dir")
if !dir.IsValid() {
// Working drafts of Go 1.14 named the field "WorkingDir" instead.
// TODO(bcmills): Remove this case after the Go 1.14 beta has been released.
dir = rc.FieldByName("WorkingDir")
} }
if dir.IsValid() && dir.Kind() == reflect.String {
dir.SetString(e.WorkingDir)
}
return &ctx return &ctx
} }
@ -848,94 +877,65 @@ func addStdlibCandidates(pass *pass, refs references) {
type Resolver interface { type Resolver interface {
// loadPackageNames loads the package names in importPaths. // loadPackageNames loads the package names in importPaths.
loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
// scan finds (at least) the packages satisfying refs. If loadNames is true, // scan works with callback to search for packages. See scanCallback for details.
// package names will be set on the results, and dirs whose package name scan(ctx context.Context, callback *scanCallback) error
// could not be determined will be excluded.
scan(refs references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error)
// loadExports returns the set of exported symbols in the package at dir. // loadExports returns the set of exported symbols in the package at dir.
// loadExports may be called concurrently. // loadExports may be called concurrently.
loadExports(ctx context.Context, pkg *pkg) (string, []string, error) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error)
// scoreImportPath returns the relevance for an import path.
scoreImportPath(ctx context.Context, path string) int
ClearForNewScan() ClearForNewScan()
} }
// gopackagesResolver implements resolver for GOPATH and module workspaces using go/packages. // A scanCallback controls a call to scan and receives its results.
type goPackagesResolver struct { // In general, minor errors will be silently discarded; a user should not
env *ProcessEnv // expect to receive a full series of calls for everything.
} type scanCallback struct {
// rootFound is called before scanning a new root dir. If it returns true,
func (r *goPackagesResolver) ClearForNewScan() {} // the root will be scanned. Returning false will not necessarily prevent
// directories from that root making it to dirFound.
func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { rootFound func(gopathwalk.Root) bool
if len(importPaths) == 0 { // dirFound is called when a directory is found that is possibly a Go package.
return nil, nil // pkg will be populated with everything except packageName.
} // If it returns true, the package's name will be loaded.
cfg := r.env.newPackagesConfig(packages.LoadFiles) dirFound func(pkg *pkg) bool
pkgs, err := packages.Load(cfg, importPaths...) // packageNameLoaded is called when a package is found and its name is loaded.
if err != nil { // If it returns true, the package's exports will be loaded.
return nil, err packageNameLoaded func(pkg *pkg) bool
} // exportsLoaded is called when a package's exports have been loaded.
names := map[string]string{} exportsLoaded func(pkg *pkg, exports []string)
for _, pkg := range pkgs {
names[VendorlessPath(pkg.PkgPath)] = pkg.Name
}
// We may not have found all the packages. Guess the rest.
for _, path := range importPaths {
if _, ok := names[path]; ok {
continue
}
names[path] = ImportPathToAssumedName(path)
}
return names, nil
}
func (r *goPackagesResolver) scan(refs references, _ bool, _ []gopathwalk.RootType) ([]*pkg, error) {
var loadQueries []string
for pkgName := range refs {
loadQueries = append(loadQueries, "iamashamedtousethedisabledqueryname="+pkgName)
}
sort.Strings(loadQueries)
cfg := r.env.newPackagesConfig(packages.LoadFiles)
goPackages, err := packages.Load(cfg, loadQueries...)
if err != nil {
return nil, err
}
var scan []*pkg
for _, goPackage := range goPackages {
scan = append(scan, &pkg{
dir: filepath.Dir(goPackage.CompiledGoFiles[0]),
importPathShort: VendorlessPath(goPackage.PkgPath),
goPackage: goPackage,
packageName: goPackage.Name,
})
}
return scan, nil
}
func (r *goPackagesResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) {
if pkg.goPackage == nil {
return "", nil, fmt.Errorf("goPackage not set")
}
var exports []string
fset := token.NewFileSet()
for _, fname := range pkg.goPackage.CompiledGoFiles {
f, err := parser.ParseFile(fset, fname, nil, 0)
if err != nil {
return "", nil, fmt.Errorf("parsing %s: %v", fname, err)
}
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports = append(exports, name)
}
}
}
return pkg.goPackage.Name, exports, nil
} }
func addExternalCandidates(pass *pass, refs references, filename string) error { func addExternalCandidates(pass *pass, refs references, filename string) error {
dirScan, err := pass.env.GetResolver().scan(refs, false, nil) var mu sync.Mutex
found := make(map[string][]pkgDistance)
callback := &scanCallback{
rootFound: func(gopathwalk.Root) bool {
return true // We want everything.
},
dirFound: func(pkg *pkg) bool {
return pkgIsCandidate(filename, refs, pkg)
},
packageNameLoaded: func(pkg *pkg) bool {
if _, want := refs[pkg.packageName]; !want {
return false
}
if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
// The candidate is in the same directory and has the
// same package name. Don't try to import ourselves.
return false
}
if !canUse(filename, pkg.dir) {
return false
}
mu.Lock()
defer mu.Unlock()
found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
return false // We'll do our own loading after we sort.
},
}
err := pass.env.GetResolver().scan(context.Background(), callback)
if err != nil { if err != nil {
return err return err
} }
@ -962,7 +962,7 @@ func addExternalCandidates(pass *pass, refs references, filename string) error {
go func(pkgName string, symbols map[string]bool) { go func(pkgName string, symbols map[string]bool) {
defer wg.Done() defer wg.Done()
found, err := findImport(ctx, pass, dirScan, pkgName, symbols, filename) found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename)
if err != nil { if err != nil {
firstErrOnce.Do(func() { firstErrOnce.Do(func() {
@ -1034,23 +1034,35 @@ func ImportPathToAssumedName(importPath string) string {
// gopathResolver implements resolver for GOPATH workspaces. // gopathResolver implements resolver for GOPATH workspaces.
type gopathResolver struct { type gopathResolver struct {
env *ProcessEnv env *ProcessEnv
walked bool
cache *dirInfoCache cache *dirInfoCache
scanSema chan struct{} // scanSema prevents concurrent scans.
} }
func (r *gopathResolver) init() { func newGopathResolver(env *ProcessEnv) *gopathResolver {
if r.cache == nil { r := &gopathResolver{
r.cache = &dirInfoCache{ env: env,
cache: &dirInfoCache{
dirs: map[string]*directoryPackageInfo{}, dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
},
scanSema: make(chan struct{}, 1),
} }
} r.scanSema <- struct{}{}
return r
} }
func (r *gopathResolver) ClearForNewScan() { func (r *gopathResolver) ClearForNewScan() {
r.cache = nil <-r.scanSema
r.cache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
}
r.walked = false
r.scanSema <- struct{}{}
} }
func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
r.init()
names := map[string]string{} names := map[string]string{}
for _, path := range importPaths { for _, path := range importPaths {
names[path] = importPathToName(r.env, path, srcDir) names[path] = importPathToName(r.env, path, srcDir)
@ -1130,7 +1142,6 @@ func packageDirToName(dir string) (packageName string, err error) {
} }
type pkg struct { type pkg struct {
goPackage *packages.Package
dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http") dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
importPathShort string // vendorless import path ("net/http", "a/b") importPathShort string // vendorless import path ("net/http", "a/b")
packageName string // package name loaded from source if requested packageName string // package name loaded from source if requested
@ -1178,8 +1189,7 @@ func distance(basepath, targetpath string) int {
return strings.Count(p, string(filepath.Separator)) + 1 return strings.Count(p, string(filepath.Separator)) + 1
} }
func (r *gopathResolver) scan(_ references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error) { func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error {
r.init()
add := func(root gopathwalk.Root, dir string) { add := func(root gopathwalk.Root, dir string) {
// We assume cached directories have not changed. We can skip them and their // We assume cached directories have not changed. We can skip them and their
// children. // children.
@ -1196,56 +1206,84 @@ func (r *gopathResolver) scan(_ references, loadNames bool, exclude []gopathwalk
} }
r.cache.Store(dir, info) r.cache.Store(dir, info)
} }
roots := filterRoots(gopathwalk.SrcDirsRoots(r.env.buildContext()), exclude) processDir := func(info directoryPackageInfo) {
gopathwalk.Walk(roots, add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false}) // Skip this directory if we were not able to get the package information successfully.
var result []*pkg if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
for _, dir := range r.cache.Keys() { return
info, ok := r.cache.Load(dir)
if !ok {
continue
}
if loadNames {
var err error
info, err = r.cache.CachePackageName(info)
if err != nil {
continue
}
} }
p := &pkg{ p := &pkg{
importPathShort: info.nonCanonicalImportPath, importPathShort: info.nonCanonicalImportPath,
dir: dir, dir: info.dir,
relevance: 1, relevance: MaxRelevance - 1,
packageName: info.packageName,
} }
if info.rootType == gopathwalk.RootGOROOT { if info.rootType == gopathwalk.RootGOROOT {
p.relevance = 0 p.relevance = MaxRelevance
} }
result = append(result, p)
if !callback.dirFound(p) {
return
} }
return result, nil var err error
p.packageName, err = r.cache.CachePackageName(info)
if err != nil {
return
}
if !callback.packageNameLoaded(p) {
return
}
if _, exports, err := r.loadExports(ctx, p, false); err == nil {
callback.exportsLoaded(p, exports)
}
}
stop := r.cache.ScanAndListen(ctx, processDir)
defer stop()
// The callback is not necessarily safe to use in the goroutine below. Process roots eagerly.
roots := filterRoots(gopathwalk.SrcDirsRoots(r.env.buildContext()), callback.rootFound)
// We can't cancel walks, because we need them to finish to have a usable
// cache. Instead, run them in a separate goroutine and detach.
scanDone := make(chan struct{})
go func() {
select {
case <-ctx.Done():
return
case <-r.scanSema:
}
defer func() { r.scanSema <- struct{}{} }()
gopathwalk.Walk(roots, add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false})
close(scanDone)
}()
select {
case <-ctx.Done():
case <-scanDone:
}
return nil
} }
func filterRoots(roots []gopathwalk.Root, exclude []gopathwalk.RootType) []gopathwalk.Root { func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) int {
var result []gopathwalk.Root if _, ok := stdlib[path]; ok {
outer: return MaxRelevance
for _, root := range roots {
for _, i := range exclude {
if i == root.Type {
continue outer
} }
return MaxRelevance - 1
}
func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root {
var result []gopathwalk.Root
for _, root := range roots {
if !include(root) {
continue
} }
result = append(result, root) result = append(result, root)
} }
return result return result
} }
func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) { func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
r.init() if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
if info, ok := r.cache.Load(pkg.dir); ok {
return r.cache.CacheExports(ctx, r.env, info) return r.cache.CacheExports(ctx, r.env, info)
} }
return loadExportsFromFiles(ctx, r.env, pkg.dir) return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
} }
// VendorlessPath returns the devendorized version of the import path ipath. // VendorlessPath returns the devendorized version of the import path ipath.
@ -1261,7 +1299,7 @@ func VendorlessPath(ipath string) string {
return ipath return ipath
} }
func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (string, []string, error) { func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) {
var exports []string var exports []string
// Look for non-test, buildable .go files which could provide exports. // Look for non-test, buildable .go files which could provide exports.
@ -1272,7 +1310,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
var files []os.FileInfo var files []os.FileInfo
for _, fi := range all { for _, fi := range all {
name := fi.Name() name := fi.Name()
if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") { if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) {
continue continue
} }
match, err := env.buildContext().MatchFile(dir, fi.Name()) match, err := env.buildContext().MatchFile(dir, fi.Name())
@ -1305,6 +1343,10 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
// handled by MatchFile above. // handled by MatchFile above.
continue continue
} }
if includeTest && strings.HasSuffix(f.Name.Name, "_test") {
// x_test package. We want internal test files only.
continue
}
pkgName = f.Name.Name pkgName = f.Name.Name
for name := range f.Scope.Objects { for name := range f.Scope.Objects {
if ast.IsExported(name) { if ast.IsExported(name) {
@ -1323,29 +1365,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
// findImport searches for a package with the given symbols. // findImport searches for a package with the given symbols.
// If no package is found, findImport returns ("", false, nil) // If no package is found, findImport returns ("", false, nil)
func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) { func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
pkgDir, err := filepath.Abs(filename)
if err != nil {
return nil, err
}
pkgDir = filepath.Dir(pkgDir)
// Find candidate packages, looking only at their directory names first.
var candidates []pkgDistance
for _, pkg := range dirScan {
if pkg.dir == pkgDir && pass.f.Name.Name == pkgName {
// The candidate is in the same directory and has the
// same package name. Don't try to import ourselves.
continue
}
if pkgIsCandidate(filename, pkgName, pkg) {
candidates = append(candidates, pkgDistance{
pkg: pkg,
distance: distance(pkgDir, pkg.dir),
})
}
}
// Sort the candidates by their import package length, // Sort the candidates by their import package length,
// assuming that shorter package names are better than long // assuming that shorter package names are better than long
// ones. Note that this sorts by the de-vendored name, so // ones. Note that this sorts by the de-vendored name, so
@ -1358,7 +1378,6 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
} }
// Collect exports for packages with matching names. // Collect exports for packages with matching names.
rescv := make([]chan *pkg, len(candidates)) rescv := make([]chan *pkg, len(candidates))
for i := range candidates { for i := range candidates {
rescv[i] = make(chan *pkg, 1) rescv[i] = make(chan *pkg, 1)
@ -1393,7 +1412,9 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
if pass.env.Debug { if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName) pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
} }
exports, err := loadExportsForPackage(ctx, pass.env, pkgName, c.pkg) // If we're an x_test, load the package under test's test variant.
includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir
_, exports, err := pass.env.GetResolver().loadExports(ctx, c.pkg, includeTest)
if err != nil { if err != nil {
if pass.env.Debug { if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err) pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
@ -1430,17 +1451,6 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
return nil, nil return nil, nil
} }
func loadExportsForPackage(ctx context.Context, env *ProcessEnv, expectPkg string, pkg *pkg) ([]string, error) {
pkgName, exports, err := env.GetResolver().loadExports(ctx, pkg)
if err != nil {
return nil, err
}
if expectPkg != pkgName {
return nil, fmt.Errorf("dir %v is package %v, wanted %v", pkg.dir, pkgName, expectPkg)
}
return exports, err
}
// pkgIsCandidate reports whether pkg is a candidate for satisfying the // pkgIsCandidate reports whether pkg is a candidate for satisfying the
// finding which package pkgIdent in the file named by filename is trying // finding which package pkgIdent in the file named by filename is trying
// to refer to. // to refer to.
@ -1453,7 +1463,7 @@ func loadExportsForPackage(ctx context.Context, env *ProcessEnv, expectPkg strin
// filename is the file being formatted. // filename is the file being formatted.
// pkgIdent is the package being searched for, like "client" (if // pkgIdent is the package being searched for, like "client" (if
// searching for "client.New") // searching for "client.New")
func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool { func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
// Check "internal" and "vendor" visibility: // Check "internal" and "vendor" visibility:
if !canUse(filename, pkg.dir) { if !canUse(filename, pkg.dir) {
return false return false
@ -1471,6 +1481,7 @@ func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
// "bar", which is strongly discouraged // "bar", which is strongly discouraged
// anyway. There's no reason goimports needs // anyway. There's no reason goimports needs
// to be slow just to accommodate that. // to be slow just to accommodate that.
for pkgIdent := range refs {
lastTwo := lastTwoComponents(pkg.importPathShort) lastTwo := lastTwoComponents(pkg.importPathShort)
if strings.Contains(lastTwo, pkgIdent) { if strings.Contains(lastTwo, pkgIdent) {
return true return true
@ -1481,7 +1492,7 @@ func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
return true return true
} }
} }
}
return false return false
} }

View file

@ -11,6 +11,7 @@ package imports
import ( import (
"bufio" "bufio"
"bytes" "bytes"
"context"
"fmt" "fmt"
"go/ast" "go/ast"
"go/build" "go/build"
@ -21,6 +22,7 @@ import (
"io" "io"
"io/ioutil" "io/ioutil"
"log" "log"
"os"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
@ -83,42 +85,54 @@ func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix,
return getFixes(fileSet, file, filename, opt.Env) return getFixes(fileSet, file, filename, opt.Env)
} }
// ApplyFix will apply all of the fixes to the file and format it. // ApplyFixes applies all of the fixes to the file and formats it. extraMode
func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options) (formatted []byte, err error) { // is added in when parsing the file.
func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, extraMode parser.Mode) (formatted []byte, err error) {
src, opt, err = initialize(filename, src, opt) src, opt, err = initialize(filename, src, opt)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Don't use parse() -- we don't care about fragments or statement lists
// here, and we need to work with unparseable files.
fileSet := token.NewFileSet() fileSet := token.NewFileSet()
file, adjust, err := parse(fileSet, filename, src, opt) parserMode := parser.Mode(0)
if err != nil { if opt.Comments {
parserMode |= parser.ParseComments
}
if opt.AllErrors {
parserMode |= parser.AllErrors
}
parserMode |= extraMode
file, err := parser.ParseFile(fileSet, filename, src, parserMode)
if file == nil {
return nil, err return nil, err
} }
// Apply the fixes to the file. // Apply the fixes to the file.
apply(fileSet, file, fixes) apply(fileSet, file, fixes)
return formatFile(fileSet, file, src, adjust, opt) return formatFile(fileSet, file, src, nil, opt)
} }
// GetAllCandidates gets all of the standard library candidate packages to import in // GetAllCandidates gets all of the packages starting with prefix that can be
// sorted order on import path. // imported by filename, sorted by import path.
func GetAllCandidates(filename string, opt *Options) (pkgs []ImportFix, err error) { func GetAllCandidates(ctx context.Context, callback func(ImportFix), searchPrefix, filename, filePkg string, opt *Options) error {
_, opt, err = initialize(filename, nil, opt) _, opt, err := initialize(filename, []byte{}, opt)
if err != nil { if err != nil {
return nil, err return err
} }
return getAllCandidates(filename, opt.Env) return getAllCandidates(ctx, callback, searchPrefix, filename, filePkg, opt.Env)
} }
// GetPackageExports returns all known packages with name pkg and their exports. // GetPackageExports returns all known packages with name pkg and their exports.
func GetPackageExports(pkg, filename string, opt *Options) (exports []PackageExport, err error) { func GetPackageExports(ctx context.Context, callback func(PackageExport), searchPkg, filename, filePkg string, opt *Options) error {
_, opt, err = initialize(filename, nil, opt) _, opt, err := initialize(filename, []byte{}, opt)
if err != nil { if err != nil {
return nil, err return err
} }
return getPackageExports(pkg, filename, opt.Env) return getPackageExports(ctx, callback, searchPkg, filename, filePkg, opt.Env)
} }
// initialize sets the values for opt and src. // initialize sets the values for opt and src.
@ -135,6 +149,10 @@ func initialize(filename string, src []byte, opt *Options) ([]byte, *Options, er
opt.Env = &ProcessEnv{ opt.Env = &ProcessEnv{
GOPATH: build.Default.GOPATH, GOPATH: build.Default.GOPATH,
GOROOT: build.Default.GOROOT, GOROOT: build.Default.GOROOT,
GOFLAGS: os.Getenv("GOFLAGS"),
GO111MODULE: os.Getenv("GO111MODULE"),
GOPROXY: os.Getenv("GOPROXY"),
GOSUMDB: os.Getenv("GOSUMDB"),
} }
} }

View file

@ -13,7 +13,6 @@ import (
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
"sync"
"golang.org/x/tools/internal/gopathwalk" "golang.org/x/tools/internal/gopathwalk"
"golang.org/x/tools/internal/module" "golang.org/x/tools/internal/module"
@ -26,11 +25,14 @@ type ModuleResolver struct {
env *ProcessEnv env *ProcessEnv
moduleCacheDir string moduleCacheDir string
dummyVendorMod *ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory. dummyVendorMod *ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory.
roots []gopathwalk.Root
scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots.
scannedRoots map[gopathwalk.Root]bool
Initialized bool initialized bool
Main *ModuleJSON main *ModuleJSON
ModsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path... modsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
ModsByDir []*ModuleJSON // ...or Dir. modsByDir []*ModuleJSON // ...or Dir.
// moduleCacheCache stores information about the module cache. // moduleCacheCache stores information about the module cache.
moduleCacheCache *dirInfoCache moduleCacheCache *dirInfoCache
@ -41,13 +43,23 @@ type ModuleJSON struct {
Path string // module path Path string // module path
Replace *ModuleJSON // replaced by this module Replace *ModuleJSON // replaced by this module
Main bool // is this the main module? Main bool // is this the main module?
Indirect bool // is this module only an indirect dependency of main module?
Dir string // directory holding files for this module, if any Dir string // directory holding files for this module, if any
GoMod string // path to go.mod file for this module, if any GoMod string // path to go.mod file for this module, if any
GoVersion string // go version used in module GoVersion string // go version used in module
} }
func newModuleResolver(e *ProcessEnv) *ModuleResolver {
r := &ModuleResolver{
env: e,
scanSema: make(chan struct{}, 1),
}
r.scanSema <- struct{}{}
return r
}
func (r *ModuleResolver) init() error { func (r *ModuleResolver) init() error {
if r.Initialized { if r.initialized {
return nil return nil
} }
mainMod, vendorEnabled, err := vendorEnabled(r.env) mainMod, vendorEnabled, err := vendorEnabled(r.env)
@ -58,13 +70,13 @@ func (r *ModuleResolver) init() error {
if mainMod != nil && vendorEnabled { if mainMod != nil && vendorEnabled {
// Vendor mode is on, so all the non-Main modules are irrelevant, // Vendor mode is on, so all the non-Main modules are irrelevant,
// and we need to search /vendor for everything. // and we need to search /vendor for everything.
r.Main = mainMod r.main = mainMod
r.dummyVendorMod = &ModuleJSON{ r.dummyVendorMod = &ModuleJSON{
Path: "", Path: "",
Dir: filepath.Join(mainMod.Dir, "vendor"), Dir: filepath.Join(mainMod.Dir, "vendor"),
} }
r.ModsByModPath = []*ModuleJSON{mainMod, r.dummyVendorMod} r.modsByModPath = []*ModuleJSON{mainMod, r.dummyVendorMod}
r.ModsByDir = []*ModuleJSON{mainMod, r.dummyVendorMod} r.modsByDir = []*ModuleJSON{mainMod, r.dummyVendorMod}
} else { } else {
// Vendor mode is off, so run go list -m ... to find everything. // Vendor mode is off, so run go list -m ... to find everything.
r.initAllMods() r.initAllMods()
@ -72,30 +84,64 @@ func (r *ModuleResolver) init() error {
r.moduleCacheDir = filepath.Join(filepath.SplitList(r.env.GOPATH)[0], "/pkg/mod") r.moduleCacheDir = filepath.Join(filepath.SplitList(r.env.GOPATH)[0], "/pkg/mod")
sort.Slice(r.ModsByModPath, func(i, j int) bool { sort.Slice(r.modsByModPath, func(i, j int) bool {
count := func(x int) int { count := func(x int) int {
return strings.Count(r.ModsByModPath[x].Path, "/") return strings.Count(r.modsByModPath[x].Path, "/")
} }
return count(j) < count(i) // descending order return count(j) < count(i) // descending order
}) })
sort.Slice(r.ModsByDir, func(i, j int) bool { sort.Slice(r.modsByDir, func(i, j int) bool {
count := func(x int) int { count := func(x int) int {
return strings.Count(r.ModsByDir[x].Dir, "/") return strings.Count(r.modsByDir[x].Dir, "/")
} }
return count(j) < count(i) // descending order return count(j) < count(i) // descending order
}) })
r.roots = []gopathwalk.Root{
{filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
}
if r.main != nil {
r.roots = append(r.roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
}
if vendorEnabled {
r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
} else {
addDep := func(mod *ModuleJSON) {
if mod.Replace == nil {
// This is redundant with the cache, but we'll skip it cheaply enough.
r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootModuleCache})
} else {
r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
}
}
// Walk dependent modules before scanning the full mod cache, direct deps first.
for _, mod := range r.modsByModPath {
if !mod.Indirect && !mod.Main {
addDep(mod)
}
}
for _, mod := range r.modsByModPath {
if mod.Indirect && !mod.Main {
addDep(mod)
}
}
r.roots = append(r.roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
}
r.scannedRoots = map[gopathwalk.Root]bool{}
if r.moduleCacheCache == nil { if r.moduleCacheCache == nil {
r.moduleCacheCache = &dirInfoCache{ r.moduleCacheCache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{}, dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
} }
} }
if r.otherCache == nil { if r.otherCache == nil {
r.otherCache = &dirInfoCache{ r.otherCache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{}, dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
} }
} }
r.Initialized = true r.initialized = true
return nil return nil
} }
@ -116,27 +162,35 @@ func (r *ModuleResolver) initAllMods() error {
// Can't do anything with a module that's not downloaded. // Can't do anything with a module that's not downloaded.
continue continue
} }
r.ModsByModPath = append(r.ModsByModPath, mod) r.modsByModPath = append(r.modsByModPath, mod)
r.ModsByDir = append(r.ModsByDir, mod) r.modsByDir = append(r.modsByDir, mod)
if mod.Main { if mod.Main {
r.Main = mod r.main = mod
} }
} }
return nil return nil
} }
func (r *ModuleResolver) ClearForNewScan() { func (r *ModuleResolver) ClearForNewScan() {
<-r.scanSema
r.scannedRoots = map[gopathwalk.Root]bool{}
r.otherCache = &dirInfoCache{ r.otherCache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{}, dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
} }
r.scanSema <- struct{}{}
} }
func (r *ModuleResolver) ClearForNewMod() { func (r *ModuleResolver) ClearForNewMod() {
env := r.env <-r.scanSema
*r = ModuleResolver{ *r = ModuleResolver{
env: env, env: r.env,
moduleCacheCache: r.moduleCacheCache,
otherCache: r.otherCache,
scanSema: r.scanSema,
} }
r.init() r.init()
r.scanSema <- struct{}{}
} }
// findPackage returns the module and directory that contains the package at // findPackage returns the module and directory that contains the package at
@ -144,7 +198,7 @@ func (r *ModuleResolver) ClearForNewMod() {
func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) { func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
// This can't find packages in the stdlib, but that's harmless for all // This can't find packages in the stdlib, but that's harmless for all
// the existing code paths. // the existing code paths.
for _, m := range r.ModsByModPath { for _, m := range r.modsByModPath {
if !strings.HasPrefix(importPath, m.Path) { if !strings.HasPrefix(importPath, m.Path) {
continue continue
} }
@ -211,7 +265,7 @@ func (r *ModuleResolver) cacheKeys() []string {
} }
// cachePackageName caches the package name for a dir already in the cache. // cachePackageName caches the package name for a dir already in the cache.
func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (directoryPackageInfo, error) { func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) {
if info.rootType == gopathwalk.RootModuleCache { if info.rootType == gopathwalk.RootModuleCache {
return r.moduleCacheCache.CachePackageName(info) return r.moduleCacheCache.CachePackageName(info)
} }
@ -238,7 +292,7 @@ func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
// - in /vendor/ in -mod=vendor mode. // - in /vendor/ in -mod=vendor mode.
// - nested module? Dunno. // - nested module? Dunno.
// Rumor has it that replace targets cannot contain other replace targets. // Rumor has it that replace targets cannot contain other replace targets.
for _, m := range r.ModsByDir { for _, m := range r.modsByDir {
if !strings.HasPrefix(dir, m.Dir) { if !strings.HasPrefix(dir, m.Dir) {
continue continue
} }
@ -333,41 +387,49 @@ func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil return names, nil
} }
func (r *ModuleResolver) scan(_ references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error) { func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
if err := r.init(); err != nil { if err := r.init(); err != nil {
return nil, err return err
} }
// Walk GOROOT, GOPATH/pkg/mod, and the main module. processDir := func(info directoryPackageInfo) {
roots := []gopathwalk.Root{ // Skip this directory if we were not able to get the package information successfully.
{filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT}, if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
} return
if r.Main != nil {
roots = append(roots, gopathwalk.Root{r.Main.Dir, gopathwalk.RootCurrentModule})
}
if r.dummyVendorMod != nil {
roots = append(roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
} else {
roots = append(roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
// Walk replace targets, just in case they're not in any of the above.
for _, mod := range r.ModsByModPath {
if mod.Replace != nil {
roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
}
} }
pkg, err := r.canonicalize(info)
if err != nil {
return
} }
roots = filterRoots(roots, exclude) if !callback.dirFound(pkg) {
return
}
pkg.packageName, err = r.cachePackageName(info)
if err != nil {
return
}
var result []*pkg if !callback.packageNameLoaded(pkg) {
var mu sync.Mutex return
}
_, exports, err := r.loadExports(ctx, pkg, false)
if err != nil {
return
}
callback.exportsLoaded(pkg, exports)
}
// We assume cached directories have not changed. We can skip them and their // Start processing everything in the cache, and listen for the new stuff
// children. // we discover in the walk below.
stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir)
defer stop1()
stop2 := r.otherCache.ScanAndListen(ctx, processDir)
defer stop2()
// We assume cached directories are fully cached, including all their
// children, and have not changed. We can skip them.
skip := func(root gopathwalk.Root, dir string) bool { skip := func(root gopathwalk.Root, dir string) bool {
mu.Lock()
defer mu.Unlock()
info, ok := r.cacheLoad(dir) info, ok := r.cacheLoad(dir)
if !ok { if !ok {
return false return false
@ -379,44 +441,64 @@ func (r *ModuleResolver) scan(_ references, loadNames bool, exclude []gopathwalk
return packageScanned return packageScanned
} }
// Add anything new to the cache. We'll process everything in it below. // Add anything new to the cache, and process it if we're still listening.
add := func(root gopathwalk.Root, dir string) { add := func(root gopathwalk.Root, dir string) {
mu.Lock()
defer mu.Unlock()
r.cacheStore(r.scanDirForPackage(root, dir)) r.cacheStore(r.scanDirForPackage(root, dir))
} }
gopathwalk.WalkSkip(roots, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true}) // r.roots and the callback are not necessarily safe to use in the
// goroutine below. Process them eagerly.
roots := filterRoots(r.roots, callback.rootFound)
// We can't cancel walks, because we need them to finish to have a usable
// cache. Instead, run them in a separate goroutine and detach.
scanDone := make(chan struct{})
go func() {
select {
case <-ctx.Done():
return
case <-r.scanSema:
}
defer func() { r.scanSema <- struct{}{} }()
// We have the lock on r.scannedRoots, and no other scans can run.
for _, root := range roots {
if ctx.Err() != nil {
return
}
// Everything we already had, and everything new, is now in the cache. if r.scannedRoots[root] {
for _, dir := range r.cacheKeys() {
info, ok := r.cacheLoad(dir)
if !ok {
continue continue
} }
gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
r.scannedRoots[root] = true
}
close(scanDone)
}()
select {
case <-ctx.Done():
case <-scanDone:
}
return nil
}
// Skip this directory if we were not able to get the package information successfully. func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) int {
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { if _, ok := stdlib[path]; ok {
continue return MaxRelevance
} }
mod, _ := r.findPackage(path)
return modRelevance(mod)
}
// If we want package names, make sure the cache has them. func modRelevance(mod *ModuleJSON) int {
if loadNames { switch {
var err error case mod == nil: // out of scope
if info, err = r.cachePackageName(info); err != nil { return MaxRelevance - 4
continue case mod.Indirect:
return MaxRelevance - 3
case !mod.Main:
return MaxRelevance - 2
default:
return MaxRelevance - 1 // main module ties with stdlib
} }
}
res, err := r.canonicalize(info)
if err != nil {
continue
}
result = append(result, res)
}
return result, nil
} }
// canonicalize gets the result of canonicalizing the packages using the results // canonicalize gets the result of canonicalizing the packages using the results
@ -428,15 +510,14 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
importPathShort: info.nonCanonicalImportPath, importPathShort: info.nonCanonicalImportPath,
dir: info.dir, dir: info.dir,
packageName: path.Base(info.nonCanonicalImportPath), packageName: path.Base(info.nonCanonicalImportPath),
relevance: 0, relevance: MaxRelevance,
}, nil }, nil
} }
importPath := info.nonCanonicalImportPath importPath := info.nonCanonicalImportPath
relevance := 2 mod := r.findModuleByDir(info.dir)
// Check if the directory is underneath a module that's in scope. // Check if the directory is underneath a module that's in scope.
if mod := r.findModuleByDir(info.dir); mod != nil { if mod != nil {
relevance = 1
// It is. If dir is the target of a replace directive, // It is. If dir is the target of a replace directive,
// our guessed import path is wrong. Use the real one. // our guessed import path is wrong. Use the real one.
if mod.Dir == info.dir { if mod.Dir == info.dir {
@ -445,15 +526,16 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
dirInMod := info.dir[len(mod.Dir)+len("/"):] dirInMod := info.dir[len(mod.Dir)+len("/"):]
importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod)) importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
} }
} else if info.needsReplace { } else if !strings.HasPrefix(importPath, info.moduleName) {
// The module's name doesn't match the package's import path. It
// probably needs a replace directive we don't have.
return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir) return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir)
} }
res := &pkg{ res := &pkg{
importPathShort: importPath, importPathShort: importPath,
dir: info.dir, dir: info.dir,
packageName: info.packageName, // may not be populated if the caller didn't ask for it relevance: modRelevance(mod),
relevance: relevance,
} }
// We may have discovered a package that has a different version // We may have discovered a package that has a different version
// in scope already. Canonicalize to that one if possible. // in scope already. Canonicalize to that one if possible.
@ -463,14 +545,14 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
return res, nil return res, nil
} }
func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) { func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
if err := r.init(); err != nil { if err := r.init(); err != nil {
return "", nil, err return "", nil, err
} }
if info, ok := r.cacheLoad(pkg.dir); ok { if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
return r.cacheExports(ctx, r.env, info) return r.cacheExports(ctx, r.env, info)
} }
return loadExportsFromFiles(ctx, r.env, pkg.dir) return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
} }
func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo { func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
@ -488,7 +570,7 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
} }
switch root.Type { switch root.Type {
case gopathwalk.RootCurrentModule: case gopathwalk.RootCurrentModule:
importPath = path.Join(r.Main.Path, filepath.ToSlash(subdir)) importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
case gopathwalk.RootModuleCache: case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir) matches := modCacheRegexp.FindStringSubmatch(subdir)
if len(matches) == 0 { if len(matches) == 0 {
@ -516,7 +598,6 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
dir: dir, dir: dir,
rootType: root.Type, rootType: root.Type,
nonCanonicalImportPath: importPath, nonCanonicalImportPath: importPath,
needsReplace: false,
moduleDir: modDir, moduleDir: modDir,
moduleName: modName, moduleName: modName,
} }
@ -524,14 +605,6 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
// stdlib packages are always in scope, despite the confusing go.mod // stdlib packages are always in scope, despite the confusing go.mod
return result return result
} }
// Check that this package is not obviously impossible to import.
if !strings.HasPrefix(importPath, modName) {
// The module's declared path does not match
// its expected path. It probably needs a
// replace directive we don't have.
result.needsReplace = true
}
return result return result
} }

View file

@ -49,10 +49,6 @@ type directoryPackageInfo struct {
// nonCanonicalImportPath is the package's expected import path. It may // nonCanonicalImportPath is the package's expected import path. It may
// not actually be importable at that path. // not actually be importable at that path.
nonCanonicalImportPath string nonCanonicalImportPath string
// needsReplace is true if the nonCanonicalImportPath does not match the
// module's declared path, making it impossible to import without a
// replace directive.
needsReplace bool
// Module-related information. // Module-related information.
moduleDir string // The directory that is the module root of this dir. moduleDir string // The directory that is the module root of this dir.
@ -98,14 +94,84 @@ type dirInfoCache struct {
mu sync.Mutex mu sync.Mutex
// dirs stores information about packages in directories, keyed by absolute path. // dirs stores information about packages in directories, keyed by absolute path.
dirs map[string]*directoryPackageInfo dirs map[string]*directoryPackageInfo
listeners map[*int]cacheListener
}
type cacheListener func(directoryPackageInfo)
// ScanAndListen calls listener on all the items in the cache, and on anything
// newly added. The returned stop function waits for all in-flight callbacks to
// finish and blocks new ones.
func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() {
ctx, cancel := context.WithCancel(ctx)
// Flushing out all the callbacks is tricky without knowing how many there
// are going to be. Setting an arbitrary limit makes it much easier.
const maxInFlight = 10
sema := make(chan struct{}, maxInFlight)
for i := 0; i < maxInFlight; i++ {
sema <- struct{}{}
}
cookie := new(int) // A unique ID we can use for the listener.
// We can't hold mu while calling the listener.
d.mu.Lock()
var keys []string
for key := range d.dirs {
keys = append(keys, key)
}
d.listeners[cookie] = func(info directoryPackageInfo) {
select {
case <-ctx.Done():
return
case <-sema:
}
listener(info)
sema <- struct{}{}
}
d.mu.Unlock()
// Process the pre-existing keys.
for _, k := range keys {
select {
case <-ctx.Done():
cancel()
return func() {}
default:
}
if v, ok := d.Load(k); ok {
listener(v)
}
}
return func() {
cancel()
d.mu.Lock()
delete(d.listeners, cookie)
d.mu.Unlock()
for i := 0; i < maxInFlight; i++ {
<-sema
}
}
} }
// Store stores the package info for dir. // Store stores the package info for dir.
func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) { func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) {
d.mu.Lock() d.mu.Lock()
defer d.mu.Unlock() _, old := d.dirs[dir]
stored := info // defensive copy d.dirs[dir] = &info
d.dirs[dir] = &stored var listeners []cacheListener
for _, l := range d.listeners {
listeners = append(listeners, l)
}
d.mu.Unlock()
if !old {
for _, l := range listeners {
l(info)
}
}
} }
// Load returns a copy of the directoryPackageInfo for absolute directory dir. // Load returns a copy of the directoryPackageInfo for absolute directory dir.
@ -129,17 +195,17 @@ func (d *dirInfoCache) Keys() (keys []string) {
return keys return keys
} }
func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (directoryPackageInfo, error) { func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) {
if loaded, err := info.reachedStatus(nameLoaded); loaded { if loaded, err := info.reachedStatus(nameLoaded); loaded {
return info, err return info.packageName, err
} }
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
return info, fmt.Errorf("cannot read package name, scan error: %v", err) return "", fmt.Errorf("cannot read package name, scan error: %v", err)
} }
info.packageName, info.err = packageDirToName(info.dir) info.packageName, info.err = packageDirToName(info.dir)
info.status = nameLoaded info.status = nameLoaded
d.Store(info.dir, info) d.Store(info.dir, info)
return info, info.err return info.packageName, info.err
} }
func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) { func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
@ -149,8 +215,8 @@ func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info d
if reached, err := info.reachedStatus(nameLoaded); reached && err != nil { if reached, err := info.reachedStatus(nameLoaded); reached && err != nil {
return "", nil, err return "", nil, err
} }
info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir) info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false)
if info.err == context.Canceled { if info.err == context.Canceled || info.err == context.DeadlineExceeded {
return info.packageName, info.exports, info.err return info.packageName, info.exports, info.err
} }
// The cache structure wants things to proceed linearly. We can skip a // The cache structure wants things to proceed linearly. We can skip a

View file

@ -0,0 +1,4 @@
// Package packagesinternal exposes internal-only fields from go/packages.
package packagesinternal
var GetForTest = func(p interface{}) string { return "" }

View file

@ -1,100 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package span
import (
"strconv"
"strings"
"unicode/utf8"
)
// Parse returns the location represented by the input.
// All inputs are valid locations, as they can always be a pure filename.
// The returned span will be normalized, and thus if printed may produce a
// different string.
func Parse(input string) Span {
// :0:0#0-0:0#0
valid := input
var hold, offset int
hadCol := false
suf := rstripSuffix(input)
if suf.sep == "#" {
offset = suf.num
suf = rstripSuffix(suf.remains)
}
if suf.sep == ":" {
valid = suf.remains
hold = suf.num
hadCol = true
suf = rstripSuffix(suf.remains)
}
switch {
case suf.sep == ":":
return New(NewURI(suf.remains), NewPoint(suf.num, hold, offset), Point{})
case suf.sep == "-":
// we have a span, fall out of the case to continue
default:
// separator not valid, rewind to either the : or the start
return New(NewURI(valid), NewPoint(hold, 0, offset), Point{})
}
// only the span form can get here
// at this point we still don't know what the numbers we have mean
// if have not yet seen a : then we might have either a line or a column depending
// on whether start has a column or not
// we build an end point and will fix it later if needed
end := NewPoint(suf.num, hold, offset)
hold, offset = 0, 0
suf = rstripSuffix(suf.remains)
if suf.sep == "#" {
offset = suf.num
suf = rstripSuffix(suf.remains)
}
if suf.sep != ":" {
// turns out we don't have a span after all, rewind
return New(NewURI(valid), end, Point{})
}
valid = suf.remains
hold = suf.num
suf = rstripSuffix(suf.remains)
if suf.sep != ":" {
// line#offset only
return New(NewURI(valid), NewPoint(hold, 0, offset), end)
}
// we have a column, so if end only had one number, it is also the column
if !hadCol {
end = NewPoint(suf.num, end.v.Line, end.v.Offset)
}
return New(NewURI(suf.remains), NewPoint(suf.num, hold, offset), end)
}
type suffix struct {
remains string
sep string
num int
}
func rstripSuffix(input string) suffix {
if len(input) == 0 {
return suffix{"", "", -1}
}
remains := input
num := -1
// first see if we have a number at the end
last := strings.LastIndexFunc(remains, func(r rune) bool { return r < '0' || r > '9' })
if last >= 0 && last < len(remains)-1 {
number, err := strconv.ParseInt(remains[last+1:], 10, 64)
if err == nil {
num = int(number)
remains = remains[:last+1]
}
}
// now see if we have a trailing separator
r, w := utf8.DecodeLastRuneInString(remains)
if r != ':' && r != '#' && r == '#' {
return suffix{input, "", -1}
}
remains = remains[:len(remains)-w]
return suffix{remains, string(r), num}
}

View file

@ -1,285 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package span contains support for representing with positions and ranges in
// text files.
package span
import (
"encoding/json"
"fmt"
"path"
)
// Span represents a source code range in standardized form.
type Span struct {
v span
}
// Point represents a single point within a file.
// In general this should only be used as part of a Span, as on its own it
// does not carry enough information.
type Point struct {
v point
}
type span struct {
URI URI `json:"uri"`
Start point `json:"start"`
End point `json:"end"`
}
type point struct {
Line int `json:"line"`
Column int `json:"column"`
Offset int `json:"offset"`
}
// Invalid is a span that reports false from IsValid
var Invalid = Span{v: span{Start: invalidPoint.v, End: invalidPoint.v}}
var invalidPoint = Point{v: point{Line: 0, Column: 0, Offset: -1}}
// Converter is the interface to an object that can convert between line:column
// and offset forms for a single file.
type Converter interface {
//ToPosition converts from an offset to a line:column pair.
ToPosition(offset int) (int, int, error)
//ToOffset converts from a line:column pair to an offset.
ToOffset(line, col int) (int, error)
}
func New(uri URI, start Point, end Point) Span {
s := Span{v: span{URI: uri, Start: start.v, End: end.v}}
s.v.clean()
return s
}
func NewPoint(line, col, offset int) Point {
p := Point{v: point{Line: line, Column: col, Offset: offset}}
p.v.clean()
return p
}
func Compare(a, b Span) int {
if r := CompareURI(a.URI(), b.URI()); r != 0 {
return r
}
if r := comparePoint(a.v.Start, b.v.Start); r != 0 {
return r
}
return comparePoint(a.v.End, b.v.End)
}
func ComparePoint(a, b Point) int {
return comparePoint(a.v, b.v)
}
func comparePoint(a, b point) int {
if !a.hasPosition() {
if a.Offset < b.Offset {
return -1
}
if a.Offset > b.Offset {
return 1
}
return 0
}
if a.Line < b.Line {
return -1
}
if a.Line > b.Line {
return 1
}
if a.Column < b.Column {
return -1
}
if a.Column > b.Column {
return 1
}
return 0
}
func (s Span) HasPosition() bool { return s.v.Start.hasPosition() }
func (s Span) HasOffset() bool { return s.v.Start.hasOffset() }
func (s Span) IsValid() bool { return s.v.Start.isValid() }
func (s Span) IsPoint() bool { return s.v.Start == s.v.End }
func (s Span) URI() URI { return s.v.URI }
func (s Span) Start() Point { return Point{s.v.Start} }
func (s Span) End() Point { return Point{s.v.End} }
func (s *Span) MarshalJSON() ([]byte, error) { return json.Marshal(&s.v) }
func (s *Span) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &s.v) }
func (p Point) HasPosition() bool { return p.v.hasPosition() }
func (p Point) HasOffset() bool { return p.v.hasOffset() }
func (p Point) IsValid() bool { return p.v.isValid() }
func (p *Point) MarshalJSON() ([]byte, error) { return json.Marshal(&p.v) }
func (p *Point) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &p.v) }
func (p Point) Line() int {
if !p.v.hasPosition() {
panic(fmt.Errorf("position not set in %v", p.v))
}
return p.v.Line
}
func (p Point) Column() int {
if !p.v.hasPosition() {
panic(fmt.Errorf("position not set in %v", p.v))
}
return p.v.Column
}
func (p Point) Offset() int {
if !p.v.hasOffset() {
panic(fmt.Errorf("offset not set in %v", p.v))
}
return p.v.Offset
}
func (p point) hasPosition() bool { return p.Line > 0 }
func (p point) hasOffset() bool { return p.Offset >= 0 }
func (p point) isValid() bool { return p.hasPosition() || p.hasOffset() }
func (p point) isZero() bool {
return (p.Line == 1 && p.Column == 1) || (!p.hasPosition() && p.Offset == 0)
}
func (s *span) clean() {
//this presumes the points are already clean
if !s.End.isValid() || (s.End == point{}) {
s.End = s.Start
}
}
func (p *point) clean() {
if p.Line < 0 {
p.Line = 0
}
if p.Column <= 0 {
if p.Line > 0 {
p.Column = 1
} else {
p.Column = 0
}
}
if p.Offset == 0 && (p.Line > 1 || p.Column > 1) {
p.Offset = -1
}
}
// Format implements fmt.Formatter to print the Location in a standard form.
// The format produced is one that can be read back in using Parse.
func (s Span) Format(f fmt.State, c rune) {
fullForm := f.Flag('+')
preferOffset := f.Flag('#')
// we should always have a uri, simplify if it is file format
//TODO: make sure the end of the uri is unambiguous
uri := string(s.v.URI)
if c == 'f' {
uri = path.Base(uri)
} else if !fullForm {
uri = s.v.URI.Filename()
}
fmt.Fprint(f, uri)
if !s.IsValid() || (!fullForm && s.v.Start.isZero() && s.v.End.isZero()) {
return
}
// see which bits of start to write
printOffset := s.HasOffset() && (fullForm || preferOffset || !s.HasPosition())
printLine := s.HasPosition() && (fullForm || !printOffset)
printColumn := printLine && (fullForm || (s.v.Start.Column > 1 || s.v.End.Column > 1))
fmt.Fprint(f, ":")
if printLine {
fmt.Fprintf(f, "%d", s.v.Start.Line)
}
if printColumn {
fmt.Fprintf(f, ":%d", s.v.Start.Column)
}
if printOffset {
fmt.Fprintf(f, "#%d", s.v.Start.Offset)
}
// start is written, do we need end?
if s.IsPoint() {
return
}
// we don't print the line if it did not change
printLine = fullForm || (printLine && s.v.End.Line > s.v.Start.Line)
fmt.Fprint(f, "-")
if printLine {
fmt.Fprintf(f, "%d", s.v.End.Line)
}
if printColumn {
if printLine {
fmt.Fprint(f, ":")
}
fmt.Fprintf(f, "%d", s.v.End.Column)
}
if printOffset {
fmt.Fprintf(f, "#%d", s.v.End.Offset)
}
}
func (s Span) WithPosition(c Converter) (Span, error) {
if err := s.update(c, true, false); err != nil {
return Span{}, err
}
return s, nil
}
func (s Span) WithOffset(c Converter) (Span, error) {
if err := s.update(c, false, true); err != nil {
return Span{}, err
}
return s, nil
}
func (s Span) WithAll(c Converter) (Span, error) {
if err := s.update(c, true, true); err != nil {
return Span{}, err
}
return s, nil
}
func (s *Span) update(c Converter, withPos, withOffset bool) error {
if !s.IsValid() {
return fmt.Errorf("cannot add information to an invalid span")
}
if withPos && !s.HasPosition() {
if err := s.v.Start.updatePosition(c); err != nil {
return err
}
if s.v.End.Offset == s.v.Start.Offset {
s.v.End = s.v.Start
} else if err := s.v.End.updatePosition(c); err != nil {
return err
}
}
if withOffset && (!s.HasOffset() || (s.v.End.hasPosition() && !s.v.End.hasOffset())) {
if err := s.v.Start.updateOffset(c); err != nil {
return err
}
if s.v.End.Line == s.v.Start.Line && s.v.End.Column == s.v.Start.Column {
s.v.End.Offset = s.v.Start.Offset
} else if err := s.v.End.updateOffset(c); err != nil {
return err
}
}
return nil
}
func (p *point) updatePosition(c Converter) error {
line, col, err := c.ToPosition(p.Offset)
if err != nil {
return err
}
p.Line = line
p.Column = col
return nil
}
func (p *point) updateOffset(c Converter) error {
offset, err := c.ToOffset(p.Line, p.Column)
if err != nil {
return err
}
p.Offset = offset
return nil
}

View file

@ -1,179 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package span
import (
"fmt"
"go/token"
)
// Range represents a source code range in token.Pos form.
// It also carries the FileSet that produced the positions, so that it is
// self contained.
type Range struct {
FileSet *token.FileSet
Start token.Pos
End token.Pos
Converter Converter
}
// TokenConverter is a Converter backed by a token file set and file.
// It uses the file set methods to work out the conversions, which
// makes it fast and does not require the file contents.
type TokenConverter struct {
fset *token.FileSet
file *token.File
}
// NewRange creates a new Range from a FileSet and two positions.
// To represent a point pass a 0 as the end pos.
func NewRange(fset *token.FileSet, start, end token.Pos) Range {
return Range{
FileSet: fset,
Start: start,
End: end,
}
}
// NewTokenConverter returns an implementation of Converter backed by a
// token.File.
func NewTokenConverter(fset *token.FileSet, f *token.File) *TokenConverter {
return &TokenConverter{fset: fset, file: f}
}
// NewContentConverter returns an implementation of Converter for the
// given file content.
func NewContentConverter(filename string, content []byte) *TokenConverter {
fset := token.NewFileSet()
f := fset.AddFile(filename, -1, len(content))
f.SetLinesForContent(content)
return &TokenConverter{fset: fset, file: f}
}
// IsPoint returns true if the range represents a single point.
func (r Range) IsPoint() bool {
return r.Start == r.End
}
// Span converts a Range to a Span that represents the Range.
// It will fill in all the members of the Span, calculating the line and column
// information.
func (r Range) Span() (Span, error) {
f := r.FileSet.File(r.Start)
if f == nil {
return Span{}, fmt.Errorf("file not found in FileSet")
}
var s Span
var err error
var startFilename string
startFilename, s.v.Start.Line, s.v.Start.Column, err = position(f, r.Start)
if err != nil {
return Span{}, err
}
s.v.URI = FileURI(startFilename)
if r.End.IsValid() {
var endFilename string
endFilename, s.v.End.Line, s.v.End.Column, err = position(f, r.End)
if err != nil {
return Span{}, err
}
// In the presence of line directives, a single File can have sections from
// multiple file names.
if endFilename != startFilename {
return Span{}, fmt.Errorf("span begins in file %q but ends in %q", startFilename, endFilename)
}
}
s.v.Start.clean()
s.v.End.clean()
s.v.clean()
if r.Converter != nil {
return s.WithOffset(r.Converter)
}
if startFilename != f.Name() {
return Span{}, fmt.Errorf("must supply Converter for file %q containing lines from %q", f.Name(), startFilename)
}
return s.WithOffset(NewTokenConverter(r.FileSet, f))
}
func position(f *token.File, pos token.Pos) (string, int, int, error) {
off, err := offset(f, pos)
if err != nil {
return "", 0, 0, err
}
return positionFromOffset(f, off)
}
func positionFromOffset(f *token.File, offset int) (string, int, int, error) {
if offset > f.Size() {
return "", 0, 0, fmt.Errorf("offset %v is past the end of the file %v", offset, f.Size())
}
pos := f.Pos(offset)
p := f.Position(pos)
if offset == f.Size() {
return p.Filename, p.Line + 1, 1, nil
}
return p.Filename, p.Line, p.Column, nil
}
// offset is a copy of the Offset function in go/token, but with the adjustment
// that it does not panic on invalid positions.
func offset(f *token.File, pos token.Pos) (int, error) {
if int(pos) < f.Base() || int(pos) > f.Base()+f.Size() {
return 0, fmt.Errorf("invalid pos")
}
return int(pos) - f.Base(), nil
}
// Range converts a Span to a Range that represents the Span for the supplied
// File.
func (s Span) Range(converter *TokenConverter) (Range, error) {
s, err := s.WithOffset(converter)
if err != nil {
return Range{}, err
}
// go/token will panic if the offset is larger than the file's size,
// so check here to avoid panicking.
if s.Start().Offset() > converter.file.Size() {
return Range{}, fmt.Errorf("start offset %v is past the end of the file %v", s.Start(), converter.file.Size())
}
if s.End().Offset() > converter.file.Size() {
return Range{}, fmt.Errorf("end offset %v is past the end of the file %v", s.End(), converter.file.Size())
}
return Range{
FileSet: converter.fset,
Start: converter.file.Pos(s.Start().Offset()),
End: converter.file.Pos(s.End().Offset()),
Converter: converter,
}, nil
}
func (l *TokenConverter) ToPosition(offset int) (int, int, error) {
_, line, col, err := positionFromOffset(l.file, offset)
return line, col, err
}
func (l *TokenConverter) ToOffset(line, col int) (int, error) {
if line < 0 {
return -1, fmt.Errorf("line is not valid")
}
lineMax := l.file.LineCount() + 1
if line > lineMax {
return -1, fmt.Errorf("line is beyond end of file %v", lineMax)
} else if line == lineMax {
if col > 1 {
return -1, fmt.Errorf("column is beyond end of file")
}
// at the end of the file, allowing for a trailing eol
return l.file.Size(), nil
}
pos := lineStart(l.file, line)
if !pos.IsValid() {
return -1, fmt.Errorf("line is not in file")
}
// we assume that column is in bytes here, and that the first byte of a
// line is at column 1
pos += token.Pos(col - 1)
return offset(l.file, pos)
}

View file

@ -1,39 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !go1.12
package span
import (
"go/token"
)
// lineStart is the pre-Go 1.12 version of (*token.File).LineStart. For Go
// versions <= 1.11, we borrow logic from the analysisutil package.
// TODO(rstambler): Delete this file when we no longer support Go 1.11.
func lineStart(f *token.File, line int) token.Pos {
// Use binary search to find the start offset of this line.
min := 0 // inclusive
max := f.Size() // exclusive
for {
offset := (min + max) / 2
pos := f.Pos(offset)
posn := f.Position(pos)
if posn.Line == line {
return pos - (token.Pos(posn.Column) - 1)
}
if min+1 >= max {
return token.NoPos
}
if posn.Line < line {
min = offset
} else {
max = offset
}
}
}

View file

@ -1,16 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.12
package span
import (
"go/token"
)
// TODO(rstambler): Delete this file when we no longer support Go 1.11.
func lineStart(f *token.File, line int) token.Pos {
return f.LineStart(line)
}

View file

@ -1,152 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package span
import (
"fmt"
"net/url"
"os"
"path"
"path/filepath"
"runtime"
"strings"
"unicode"
)
const fileScheme = "file"
// URI represents the full URI for a file.
type URI string
// Filename returns the file path for the given URI.
// It is an error to call this on a URI that is not a valid filename.
func (uri URI) Filename() string {
filename, err := filename(uri)
if err != nil {
panic(err)
}
return filepath.FromSlash(filename)
}
func filename(uri URI) (string, error) {
if uri == "" {
return "", nil
}
u, err := url.ParseRequestURI(string(uri))
if err != nil {
return "", err
}
if u.Scheme != fileScheme {
return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri)
}
if isWindowsDriveURI(u.Path) {
u.Path = u.Path[1:]
}
return u.Path, nil
}
// NewURI returns a span URI for the string.
// It will attempt to detect if the string is a file path or uri.
func NewURI(s string) URI {
if u, err := url.PathUnescape(s); err == nil {
s = u
}
if strings.HasPrefix(s, fileScheme+"://") {
return URI(s)
}
return FileURI(s)
}
func CompareURI(a, b URI) int {
if equalURI(a, b) {
return 0
}
if a < b {
return -1
}
return 1
}
func equalURI(a, b URI) bool {
if a == b {
return true
}
// If we have the same URI basename, we may still have the same file URIs.
if !strings.EqualFold(path.Base(string(a)), path.Base(string(b))) {
return false
}
fa, err := filename(a)
if err != nil {
return false
}
fb, err := filename(b)
if err != nil {
return false
}
// Stat the files to check if they are equal.
infoa, err := os.Stat(filepath.FromSlash(fa))
if err != nil {
return false
}
infob, err := os.Stat(filepath.FromSlash(fb))
if err != nil {
return false
}
return os.SameFile(infoa, infob)
}
// FileURI returns a span URI for the supplied file path.
// It will always have the file scheme.
func FileURI(path string) URI {
if path == "" {
return ""
}
// Handle standard library paths that contain the literal "$GOROOT".
// TODO(rstambler): The go/packages API should allow one to determine a user's $GOROOT.
const prefix = "$GOROOT"
if len(path) >= len(prefix) && strings.EqualFold(prefix, path[:len(prefix)]) {
suffix := path[len(prefix):]
path = runtime.GOROOT() + suffix
}
if !isWindowsDrivePath(path) {
if abs, err := filepath.Abs(path); err == nil {
path = abs
}
}
// Check the file path again, in case it became absolute.
if isWindowsDrivePath(path) {
path = "/" + path
}
path = filepath.ToSlash(path)
u := url.URL{
Scheme: fileScheme,
Path: path,
}
uri := u.String()
if unescaped, err := url.PathUnescape(uri); err == nil {
uri = unescaped
}
return URI(uri)
}
// isWindowsDrivePath returns true if the file path is of the form used by
// Windows. We check if the path begins with a drive letter, followed by a ":".
func isWindowsDrivePath(path string) bool {
if len(path) < 4 {
return false
}
return unicode.IsLetter(rune(path[0])) && path[1] == ':'
}
// isWindowsDriveURI returns true if the file URI is of the format used by
// Windows URIs. The url.Parse package does not specially handle Windows paths
// (see https://golang.org/issue/6027). We check if the URI path has
// a drive prefix (e.g. "/C:"). If so, we trim the leading "/".
func isWindowsDriveURI(uri string) bool {
if len(uri) < 4 {
return false
}
return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
}

View file

@ -1,94 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package span
import (
"fmt"
"unicode/utf16"
"unicode/utf8"
)
// ToUTF16Column calculates the utf16 column expressed by the point given the
// supplied file contents.
// This is used to convert from the native (always in bytes) column
// representation and the utf16 counts used by some editors.
func ToUTF16Column(p Point, content []byte) (int, error) {
if content == nil {
return -1, fmt.Errorf("ToUTF16Column: missing content")
}
if !p.HasPosition() {
return -1, fmt.Errorf("ToUTF16Column: point is missing position")
}
if !p.HasOffset() {
return -1, fmt.Errorf("ToUTF16Column: point is missing offset")
}
offset := p.Offset() // 0-based
colZero := p.Column() - 1 // 0-based
if colZero == 0 {
// 0-based column 0, so it must be chr 1
return 1, nil
} else if colZero < 0 {
return -1, fmt.Errorf("ToUTF16Column: column is invalid (%v)", colZero)
}
// work out the offset at the start of the line using the column
lineOffset := offset - colZero
if lineOffset < 0 || offset > len(content) {
return -1, fmt.Errorf("ToUTF16Column: offsets %v-%v outside file contents (%v)", lineOffset, offset, len(content))
}
// Use the offset to pick out the line start.
// This cannot panic: offset > len(content) and lineOffset < offset.
start := content[lineOffset:]
// Now, truncate down to the supplied column.
start = start[:colZero]
// and count the number of utf16 characters
// in theory we could do this by hand more efficiently...
return len(utf16.Encode([]rune(string(start)))) + 1, nil
}
// FromUTF16Column advances the point by the utf16 character offset given the
// supplied line contents.
// This is used to convert from the utf16 counts used by some editors to the
// native (always in bytes) column representation.
func FromUTF16Column(p Point, chr int, content []byte) (Point, error) {
if !p.HasOffset() {
return Point{}, fmt.Errorf("FromUTF16Column: point is missing offset")
}
// if chr is 1 then no adjustment needed
if chr <= 1 {
return p, nil
}
if p.Offset() >= len(content) {
return p, fmt.Errorf("FromUTF16Column: offset (%v) greater than length of content (%v)", p.Offset(), len(content))
}
remains := content[p.Offset():]
// scan forward the specified number of characters
for count := 1; count < chr; count++ {
if len(remains) <= 0 {
return Point{}, fmt.Errorf("FromUTF16Column: chr goes beyond the content")
}
r, w := utf8.DecodeRune(remains)
if r == '\n' {
// Per the LSP spec:
//
// > If the character value is greater than the line length it
// > defaults back to the line length.
break
}
remains = remains[w:]
if r >= 0x10000 {
// a two point rune
count++
// if we finished in a two point rune, do not advance past the first
if count >= chr {
break
}
}
p.v.Column += w
p.v.Offset += w
}
return p, nil
}

6
vendor/modules.txt vendored
View file

@ -208,7 +208,7 @@ golang.org/x/crypto/acme
golang.org/x/crypto/acme/autocert golang.org/x/crypto/acme/autocert
golang.org/x/crypto/bcrypt golang.org/x/crypto/bcrypt
golang.org/x/crypto/blowfish golang.org/x/crypto/blowfish
# golang.org/x/lint v0.0.0-20190409202823-959b441ac422 # golang.org/x/lint v0.0.0-20200302205851-738671d3881b
golang.org/x/lint golang.org/x/lint
golang.org/x/lint/golint golang.org/x/lint/golint
# golang.org/x/net v0.0.0-20200202094626-16171245cfb2 # golang.org/x/net v0.0.0-20200202094626-16171245cfb2
@ -221,7 +221,7 @@ golang.org/x/text/transform
golang.org/x/text/unicode/bidi golang.org/x/text/unicode/bidi
golang.org/x/text/unicode/norm golang.org/x/text/unicode/norm
golang.org/x/text/width golang.org/x/text/width
# golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d # golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7
golang.org/x/tools/go/ast/astutil golang.org/x/tools/go/ast/astutil
golang.org/x/tools/go/ast/inspector golang.org/x/tools/go/ast/inspector
golang.org/x/tools/go/buildutil golang.org/x/tools/go/buildutil
@ -237,8 +237,8 @@ golang.org/x/tools/internal/fastwalk
golang.org/x/tools/internal/gopathwalk golang.org/x/tools/internal/gopathwalk
golang.org/x/tools/internal/imports golang.org/x/tools/internal/imports
golang.org/x/tools/internal/module golang.org/x/tools/internal/module
golang.org/x/tools/internal/packagesinternal
golang.org/x/tools/internal/semver golang.org/x/tools/internal/semver
golang.org/x/tools/internal/span
# gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc # gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc
gopkg.in/alexcesaro/quotedprintable.v3 gopkg.in/alexcesaro/quotedprintable.v3
# gopkg.in/d4l3k/messagediff.v1 v1.2.1 # gopkg.in/d4l3k/messagediff.v1 v1.2.1