diff --git a/go.mod b/go.mod index b2800c3a..cb7c25f0 100644 --- a/go.mod +++ b/go.mod @@ -73,6 +73,6 @@ require ( gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df gopkg.in/testfixtures.v2 v2.5.3 gopkg.in/yaml.v2 v2.2.2 // indirect - honnef.co/go/tools v0.0.0-20190215041234-466a0476246c + honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a src.techknowlogick.com/xormigrate v0.0.0-20190321151057-24497c23c09c ) diff --git a/go.sum b/go.sum index a7025e0f..f8bda70b 100644 --- a/go.sum +++ b/go.sum @@ -31,6 +31,7 @@ github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cweill/gotests v1.5.2 h1:kKqmKmS2wCV3tuLnfpbiuN8OlkosQZTpCfiqmiuNAsA= +github.com/cweill/gotests v1.5.3 h1:k3t4wW/x/YNixWZJhUIn+mivmK5iV1tJVOwVYkx0UcU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -266,5 +267,7 @@ honnef.co/go/tools v0.0.0-20190128043916-71123fcbb8fe h1:/GZ/onp6W295MEgrIwtlbnx honnef.co/go/tools v0.0.0-20190128043916-71123fcbb8fe/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190215041234-466a0476246c h1:z+UFwlQ7KVwdlQTE5JjvDvfZmyyAVrEiiwau20b7X8k= honnef.co/go/tools v0.0.0-20190215041234-466a0476246c/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a h1:LJwr7TCTghdatWv40WobzlKXc9c4s8oGa7QKJUtHhWA= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= src.techknowlogick.com/xormigrate v0.0.0-20190321151057-24497c23c09c h1:fTwL7EZ3ouk3xeiPiRBYEjSPWTREb9T57bjzpRBNOpQ= src.techknowlogick.com/xormigrate v0.0.0-20190321151057-24497c23c09c/go.mod h1:B2NutmcRaDDw4EGe7DoCwyWCELA8W+KxXPhLtgqFUaU= diff --git a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go new file mode 100644 index 00000000..db88a951 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go @@ -0,0 +1,182 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package inspector provides helper functions for traversal over the +// syntax trees of a package, including node filtering by type, and +// materialization of the traversal stack. +// +// During construction, the inspector does a complete traversal and +// builds a list of push/pop events and their node type. Subsequent +// method calls that request a traversal scan this list, rather than walk +// the AST, and perform type filtering using efficient bit sets. +// +// Experiments suggest the inspector's traversals are about 2.5x faster +// than ast.Inspect, but it may take around 5 traversals for this +// benefit to amortize the inspector's construction cost. +// If efficiency is the primary concern, do not use use Inspector for +// one-off traversals. +package inspector + +// There are four orthogonal features in a traversal: +// 1 type filtering +// 2 pruning +// 3 postorder calls to f +// 4 stack +// Rather than offer all of them in the API, +// only a few combinations are exposed: +// - Preorder is the fastest and has fewest features, +// but is the most commonly needed traversal. +// - Nodes and WithStack both provide pruning and postorder calls, +// even though few clients need it, because supporting two versions +// is not justified. +// More combinations could be supported by expressing them as +// wrappers around a more generic traversal, but this was measured +// and found to degrade performance significantly (30%). + +import ( + "go/ast" +) + +// An Inspector provides methods for inspecting +// (traversing) the syntax trees of a package. +type Inspector struct { + events []event +} + +// New returns an Inspector for the specified syntax trees. +func New(files []*ast.File) *Inspector { + return &Inspector{traverse(files)} +} + +// An event represents a push or a pop +// of an ast.Node during a traversal. +type event struct { + node ast.Node + typ uint64 // typeOf(node) + index int // 1 + index of corresponding pop event, or 0 if this is a pop +} + +// Preorder visits all the nodes of the files supplied to New in +// depth-first order. It calls f(n) for each node n before it visits +// n's children. +// +// The types argument, if non-empty, enables type-based filtering of +// events. The function f if is called only for nodes whose type +// matches an element of the types slice. +func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) { + // Because it avoids postorder calls to f, and the pruning + // check, Preorder is almost twice as fast as Nodes. The two + // features seem to contribute similar slowdowns (~1.4x each). + + mask := maskOf(types) + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.typ&mask != 0 { + if ev.index > 0 { + f(ev.node) + } + } + i++ + } +} + +// Nodes visits the nodes of the files supplied to New in depth-first +// order. It calls f(n, true) for each node n before it visits n's +// children. If f returns true, Nodes invokes f recursively for each +// of the non-nil children of the node, followed by a call of +// f(n, false). +// +// The types argument, if non-empty, enables type-based filtering of +// events. The function f if is called only for nodes whose type +// matches an element of the types slice. +func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prune bool)) { + mask := maskOf(types) + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.typ&mask != 0 { + if ev.index > 0 { + // push + if !f(ev.node, true) { + i = ev.index // jump to corresponding pop + 1 + continue + } + } else { + // pop + f(ev.node, false) + } + } + i++ + } +} + +// WithStack visits nodes in a similar manner to Nodes, but it +// supplies each call to f an additional argument, the current +// traversal stack. The stack's first element is the outermost node, +// an *ast.File; its last is the innermost, n. +func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (prune bool)) { + mask := maskOf(types) + var stack []ast.Node + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.index > 0 { + // push + stack = append(stack, ev.node) + if ev.typ&mask != 0 { + if !f(ev.node, true, stack) { + i = ev.index + stack = stack[:len(stack)-1] + continue + } + } + } else { + // pop + if ev.typ&mask != 0 { + f(ev.node, false, stack) + } + stack = stack[:len(stack)-1] + } + i++ + } +} + +// traverse builds the table of events representing a traversal. +func traverse(files []*ast.File) []event { + // Preallocate approximate number of events + // based on source file extent. + // This makes traverse faster by 4x (!). + var extent int + for _, f := range files { + extent += int(f.End() - f.Pos()) + } + // This estimate is based on the net/http package. + events := make([]event, 0, extent*33/100) + + var stack []event + for _, f := range files { + ast.Inspect(f, func(n ast.Node) bool { + if n != nil { + // push + ev := event{ + node: n, + typ: typeOf(n), + index: len(events), // push event temporarily holds own index + } + stack = append(stack, ev) + events = append(events, ev) + } else { + // pop + ev := stack[len(stack)-1] + stack = stack[:len(stack)-1] + + events[ev.index].index = len(events) + 1 // make push refer to pop + + ev.index = 0 // turn ev into a pop event + events = append(events, ev) + } + return true + }) + } + + return events +} diff --git a/vendor/golang.org/x/tools/go/ast/inspector/typeof.go b/vendor/golang.org/x/tools/go/ast/inspector/typeof.go new file mode 100644 index 00000000..d61301b1 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/inspector/typeof.go @@ -0,0 +1,216 @@ +package inspector + +// This file defines func typeOf(ast.Node) uint64. +// +// The initial map-based implementation was too slow; +// see https://go-review.googlesource.com/c/tools/+/135655/1/go/ast/inspector/inspector.go#196 + +import "go/ast" + +const ( + nArrayType = iota + nAssignStmt + nBadDecl + nBadExpr + nBadStmt + nBasicLit + nBinaryExpr + nBlockStmt + nBranchStmt + nCallExpr + nCaseClause + nChanType + nCommClause + nComment + nCommentGroup + nCompositeLit + nDeclStmt + nDeferStmt + nEllipsis + nEmptyStmt + nExprStmt + nField + nFieldList + nFile + nForStmt + nFuncDecl + nFuncLit + nFuncType + nGenDecl + nGoStmt + nIdent + nIfStmt + nImportSpec + nIncDecStmt + nIndexExpr + nInterfaceType + nKeyValueExpr + nLabeledStmt + nMapType + nPackage + nParenExpr + nRangeStmt + nReturnStmt + nSelectStmt + nSelectorExpr + nSendStmt + nSliceExpr + nStarExpr + nStructType + nSwitchStmt + nTypeAssertExpr + nTypeSpec + nTypeSwitchStmt + nUnaryExpr + nValueSpec +) + +// typeOf returns a distinct single-bit value that represents the type of n. +// +// Various implementations were benchmarked with BenchmarkNewInspector: +// GOGC=off +// - type switch 4.9-5.5ms 2.1ms +// - binary search over a sorted list of types 5.5-5.9ms 2.5ms +// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms +// - linear scan, unordered list 6.4ms 2.7ms +// - hash table 6.5ms 3.1ms +// A perfect hash seemed like overkill. +// +// The compiler's switch statement is the clear winner +// as it produces a binary tree in code, +// with constant conditions and good branch prediction. +// (Sadly it is the most verbose in source code.) +// Binary search suffered from poor branch prediction. +// +func typeOf(n ast.Node) uint64 { + // Fast path: nearly half of all nodes are identifiers. + if _, ok := n.(*ast.Ident); ok { + return 1 << nIdent + } + + // These cases include all nodes encountered by ast.Inspect. + switch n.(type) { + case *ast.ArrayType: + return 1 << nArrayType + case *ast.AssignStmt: + return 1 << nAssignStmt + case *ast.BadDecl: + return 1 << nBadDecl + case *ast.BadExpr: + return 1 << nBadExpr + case *ast.BadStmt: + return 1 << nBadStmt + case *ast.BasicLit: + return 1 << nBasicLit + case *ast.BinaryExpr: + return 1 << nBinaryExpr + case *ast.BlockStmt: + return 1 << nBlockStmt + case *ast.BranchStmt: + return 1 << nBranchStmt + case *ast.CallExpr: + return 1 << nCallExpr + case *ast.CaseClause: + return 1 << nCaseClause + case *ast.ChanType: + return 1 << nChanType + case *ast.CommClause: + return 1 << nCommClause + case *ast.Comment: + return 1 << nComment + case *ast.CommentGroup: + return 1 << nCommentGroup + case *ast.CompositeLit: + return 1 << nCompositeLit + case *ast.DeclStmt: + return 1 << nDeclStmt + case *ast.DeferStmt: + return 1 << nDeferStmt + case *ast.Ellipsis: + return 1 << nEllipsis + case *ast.EmptyStmt: + return 1 << nEmptyStmt + case *ast.ExprStmt: + return 1 << nExprStmt + case *ast.Field: + return 1 << nField + case *ast.FieldList: + return 1 << nFieldList + case *ast.File: + return 1 << nFile + case *ast.ForStmt: + return 1 << nForStmt + case *ast.FuncDecl: + return 1 << nFuncDecl + case *ast.FuncLit: + return 1 << nFuncLit + case *ast.FuncType: + return 1 << nFuncType + case *ast.GenDecl: + return 1 << nGenDecl + case *ast.GoStmt: + return 1 << nGoStmt + case *ast.Ident: + return 1 << nIdent + case *ast.IfStmt: + return 1 << nIfStmt + case *ast.ImportSpec: + return 1 << nImportSpec + case *ast.IncDecStmt: + return 1 << nIncDecStmt + case *ast.IndexExpr: + return 1 << nIndexExpr + case *ast.InterfaceType: + return 1 << nInterfaceType + case *ast.KeyValueExpr: + return 1 << nKeyValueExpr + case *ast.LabeledStmt: + return 1 << nLabeledStmt + case *ast.MapType: + return 1 << nMapType + case *ast.Package: + return 1 << nPackage + case *ast.ParenExpr: + return 1 << nParenExpr + case *ast.RangeStmt: + return 1 << nRangeStmt + case *ast.ReturnStmt: + return 1 << nReturnStmt + case *ast.SelectStmt: + return 1 << nSelectStmt + case *ast.SelectorExpr: + return 1 << nSelectorExpr + case *ast.SendStmt: + return 1 << nSendStmt + case *ast.SliceExpr: + return 1 << nSliceExpr + case *ast.StarExpr: + return 1 << nStarExpr + case *ast.StructType: + return 1 << nStructType + case *ast.SwitchStmt: + return 1 << nSwitchStmt + case *ast.TypeAssertExpr: + return 1 << nTypeAssertExpr + case *ast.TypeSpec: + return 1 << nTypeSpec + case *ast.TypeSwitchStmt: + return 1 << nTypeSwitchStmt + case *ast.UnaryExpr: + return 1 << nUnaryExpr + case *ast.ValueSpec: + return 1 << nValueSpec + } + return 0 +} + +func maskOf(nodes []ast.Node) uint64 { + if nodes == nil { + return 1<<64 - 1 // match all node types + } + var mask uint64 + for _, n := range nodes { + mask |= typeOf(n) + } + return mask +} diff --git a/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go b/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go index 3c8d9647..6f381850 100644 --- a/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go +++ b/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go @@ -20,11 +20,8 @@ func main() { simple.NewChecker(), staticcheck.NewChecker(), stylecheck.NewChecker(), + &unused.Checker{}, } - uc := unused.NewChecker(unused.CheckAll) - uc.ConsiderReflection = true - checkers = append(checkers, unused.NewLintChecker(uc)) - lintutil.ProcessFlagSet(checkers, fs) } diff --git a/vendor/honnef.co/go/tools/config/config.go b/vendor/honnef.co/go/tools/config/config.go index 112980b4..cfde5d51 100644 --- a/vendor/honnef.co/go/tools/config/config.go +++ b/vendor/honnef.co/go/tools/config/config.go @@ -82,7 +82,7 @@ var defaultConfig = Config{ "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", - "XSS", + "XSS", "SIP", "RTP", }, DotImportWhitelist: []string{}, HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"}, diff --git a/vendor/honnef.co/go/tools/config/example.conf b/vendor/honnef.co/go/tools/config/example.conf index 5ffc597f..a715a24d 100644 --- a/vendor/honnef.co/go/tools/config/example.conf +++ b/vendor/honnef.co/go/tools/config/example.conf @@ -5,6 +5,6 @@ initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", - "XSS"] + "XSS", "SIP", "RTP"] dot_import_whitelist = [] http_status_code_whitelist = ["200", "400", "404", "500"] diff --git a/vendor/honnef.co/go/tools/deprecated/stdlib.go b/vendor/honnef.co/go/tools/deprecated/stdlib.go index b6b217c3..5d8ce186 100644 --- a/vendor/honnef.co/go/tools/deprecated/stdlib.go +++ b/vendor/honnef.co/go/tools/deprecated/stdlib.go @@ -9,46 +9,104 @@ var Stdlib = map[string]Deprecation{ "image/jpeg.Reader": {4, 0}, // FIXME(dh): AllowBinary isn't being detected as deprecated // because the comment has a newline right after "Deprecated:" - "go/build.AllowBinary": {7, 7}, - "(archive/zip.FileHeader).CompressedSize": {1, 1}, - "(archive/zip.FileHeader).UncompressedSize": {1, 1}, - "(go/doc.Package).Bugs": {1, 1}, - "os.SEEK_SET": {7, 7}, - "os.SEEK_CUR": {7, 7}, - "os.SEEK_END": {7, 7}, - "(net.Dialer).Cancel": {7, 7}, - "runtime.CPUProfile": {9, 0}, - "compress/flate.ReadError": {6, 6}, - "compress/flate.WriteError": {6, 6}, - "path/filepath.HasPrefix": {0, 0}, - "(net/http.Transport).Dial": {7, 7}, - "(*net/http.Transport).CancelRequest": {6, 5}, - "net/http.ErrWriteAfterFlush": {7, 0}, - "net/http.ErrHeaderTooLong": {8, 0}, - "net/http.ErrShortBody": {8, 0}, - "net/http.ErrMissingContentLength": {8, 0}, - "net/http/httputil.ErrPersistEOF": {0, 0}, - "net/http/httputil.ErrClosed": {0, 0}, - "net/http/httputil.ErrPipeline": {0, 0}, - "net/http/httputil.ServerConn": {0, 0}, - "net/http/httputil.NewServerConn": {0, 0}, - "net/http/httputil.ClientConn": {0, 0}, - "net/http/httputil.NewClientConn": {0, 0}, - "net/http/httputil.NewProxyClientConn": {0, 0}, - "(net/http.Request).Cancel": {7, 7}, - "(text/template/parse.PipeNode).Line": {1, 1}, - "(text/template/parse.ActionNode).Line": {1, 1}, - "(text/template/parse.BranchNode).Line": {1, 1}, - "(text/template/parse.TemplateNode).Line": {1, 1}, - "database/sql/driver.ColumnConverter": {9, 9}, - "database/sql/driver.Execer": {8, 8}, - "database/sql/driver.Queryer": {8, 8}, - "(database/sql/driver.Conn).Begin": {8, 8}, - "(database/sql/driver.Stmt).Exec": {8, 8}, - "(database/sql/driver.Stmt).Query": {8, 8}, - "syscall.StringByteSlice": {1, 1}, - "syscall.StringBytePtr": {1, 1}, - "syscall.StringSlicePtr": {1, 1}, - "syscall.StringToUTF16": {1, 1}, - "syscall.StringToUTF16Ptr": {1, 1}, + "go/build.AllowBinary": {7, 7}, + "(archive/zip.FileHeader).CompressedSize": {1, 1}, + "(archive/zip.FileHeader).UncompressedSize": {1, 1}, + "(archive/zip.FileHeader).ModifiedTime": {10, 10}, + "(archive/zip.FileHeader).ModifiedDate": {10, 10}, + "(*archive/zip.FileHeader).ModTime": {10, 10}, + "(*archive/zip.FileHeader).SetModTime": {10, 10}, + "(go/doc.Package).Bugs": {1, 1}, + "os.SEEK_SET": {7, 7}, + "os.SEEK_CUR": {7, 7}, + "os.SEEK_END": {7, 7}, + "(net.Dialer).Cancel": {7, 7}, + "runtime.CPUProfile": {9, 0}, + "compress/flate.ReadError": {6, 6}, + "compress/flate.WriteError": {6, 6}, + "path/filepath.HasPrefix": {0, 0}, + "(net/http.Transport).Dial": {7, 7}, + "(*net/http.Transport).CancelRequest": {6, 5}, + "net/http.ErrWriteAfterFlush": {7, 0}, + "net/http.ErrHeaderTooLong": {8, 0}, + "net/http.ErrShortBody": {8, 0}, + "net/http.ErrMissingContentLength": {8, 0}, + "net/http/httputil.ErrPersistEOF": {0, 0}, + "net/http/httputil.ErrClosed": {0, 0}, + "net/http/httputil.ErrPipeline": {0, 0}, + "net/http/httputil.ServerConn": {0, 0}, + "net/http/httputil.NewServerConn": {0, 0}, + "net/http/httputil.ClientConn": {0, 0}, + "net/http/httputil.NewClientConn": {0, 0}, + "net/http/httputil.NewProxyClientConn": {0, 0}, + "(net/http.Request).Cancel": {7, 7}, + "(text/template/parse.PipeNode).Line": {1, 1}, + "(text/template/parse.ActionNode).Line": {1, 1}, + "(text/template/parse.BranchNode).Line": {1, 1}, + "(text/template/parse.TemplateNode).Line": {1, 1}, + "database/sql/driver.ColumnConverter": {9, 9}, + "database/sql/driver.Execer": {8, 8}, + "database/sql/driver.Queryer": {8, 8}, + "(database/sql/driver.Conn).Begin": {8, 8}, + "(database/sql/driver.Stmt).Exec": {8, 8}, + "(database/sql/driver.Stmt).Query": {8, 8}, + "syscall.StringByteSlice": {1, 1}, + "syscall.StringBytePtr": {1, 1}, + "syscall.StringSlicePtr": {1, 1}, + "syscall.StringToUTF16": {1, 1}, + "syscall.StringToUTF16Ptr": {1, 1}, + "(*regexp.Regexp).Copy": {12, 12}, + "(archive/tar.Header).Xattrs": {10, 10}, + "archive/tar.TypeRegA": {11, 1}, + "go/types.NewInterface": {11, 11}, + "(*go/types.Interface).Embedded": {11, 11}, + "go/importer.For": {12, 12}, + "encoding/json.InvalidUTF8Error": {2, 2}, + "encoding/json.UnmarshalFieldError": {2, 2}, + "encoding/csv.ErrTrailingComma": {2, 2}, + "(encoding/csv.Reader).TrailingComma": {2, 2}, + "(net.Dialer).DualStack": {12, 12}, + "net/http.ErrUnexpectedTrailer": {12, 12}, + "net/http.CloseNotifier": {11, 7}, + "net/http.ProtocolError": {8, 8}, + "(crypto/x509.CertificateRequest).Attributes": {5, 3}, + // This function has no alternative, but also no purpose. + "(*crypto/rc4.Cipher).Reset": {12, 0}, + "(net/http/httptest.ResponseRecorder).HeaderMap": {11, 7}, + + // All of these have been deprecated in favour of external libraries + "syscall.AttachLsf": {7, 0}, + "syscall.DetachLsf": {7, 0}, + "syscall.LsfSocket": {7, 0}, + "syscall.SetLsfPromisc": {7, 0}, + "syscall.LsfJump": {7, 0}, + "syscall.LsfStmt": {7, 0}, + "syscall.BpfStmt": {7, 0}, + "syscall.BpfJump": {7, 0}, + "syscall.BpfBuflen": {7, 0}, + "syscall.SetBpfBuflen": {7, 0}, + "syscall.BpfDatalink": {7, 0}, + "syscall.SetBpfDatalink": {7, 0}, + "syscall.SetBpfPromisc": {7, 0}, + "syscall.FlushBpf": {7, 0}, + "syscall.BpfInterface": {7, 0}, + "syscall.SetBpfInterface": {7, 0}, + "syscall.BpfTimeout": {7, 0}, + "syscall.SetBpfTimeout": {7, 0}, + "syscall.BpfStats": {7, 0}, + "syscall.SetBpfImmediate": {7, 0}, + "syscall.SetBpf": {7, 0}, + "syscall.CheckBpfVersion": {7, 0}, + "syscall.BpfHeadercmpl": {7, 0}, + "syscall.SetBpfHeadercmpl": {7, 0}, + "syscall.RouteRIB": {8, 0}, + "syscall.RoutingMessage": {8, 0}, + "syscall.RouteMessage": {8, 0}, + "syscall.InterfaceMessage": {8, 0}, + "syscall.InterfaceAddrMessage": {8, 0}, + "syscall.ParseRoutingMessage": {8, 0}, + "syscall.ParseRoutingSockaddr": {8, 0}, + "InterfaceAnnounceMessage": {7, 0}, + "InterfaceMulticastAddrMessage": {7, 0}, + "syscall.FormatMessage": {5, 0}, } diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/callee.go b/vendor/honnef.co/go/tools/go/types/typeutil/callee.go new file mode 100644 index 00000000..38f596da --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/callee.go @@ -0,0 +1,46 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/ast/astutil" +) + +// Callee returns the named target of a function call, if any: +// a function, method, builtin, or variable. +func Callee(info *types.Info, call *ast.CallExpr) types.Object { + var obj types.Object + switch fun := astutil.Unparen(call.Fun).(type) { + case *ast.Ident: + obj = info.Uses[fun] // type, var, builtin, or declared func + case *ast.SelectorExpr: + if sel, ok := info.Selections[fun]; ok { + obj = sel.Obj() // method or field + } else { + obj = info.Uses[fun.Sel] // qualified identifier? + } + } + if _, ok := obj.(*types.TypeName); ok { + return nil // T(x) is a conversion, not a call + } + return obj +} + +// StaticCallee returns the target (function or method) of a static +// function call, if any. It returns nil for calls to builtins. +func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { + if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) { + return f + } + return nil +} + +func interfaceMethod(f *types.Func) bool { + recv := f.Type().(*types.Signature).Recv() + return recv != nil && types.IsInterface(recv.Type()) +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/identical.go b/vendor/honnef.co/go/tools/go/types/typeutil/identical.go new file mode 100644 index 00000000..7eda2946 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/identical.go @@ -0,0 +1,29 @@ +package typeutil + +import ( + "go/types" +) + +// Identical reports whether x and y are identical types. +// Unlike types.Identical, receivers of Signature types are not ignored. +func Identical(x, y types.Type) (ret bool) { + if !types.Identical(x, y) { + return false + } + sigX, ok := x.(*types.Signature) + if !ok { + return true + } + sigY, ok := y.(*types.Signature) + if !ok { + // should be impossible + return true + } + if sigX.Recv() == sigY.Recv() { + return true + } + if sigX.Recv() == nil || sigY.Recv() == nil { + return false + } + return Identical(sigX.Recv().Type(), sigY.Recv().Type()) +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/imports.go b/vendor/honnef.co/go/tools/go/types/typeutil/imports.go new file mode 100644 index 00000000..9c441dba --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/imports.go @@ -0,0 +1,31 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +import "go/types" + +// Dependencies returns all dependencies of the specified packages. +// +// Dependent packages appear in topological order: if package P imports +// package Q, Q appears earlier than P in the result. +// The algorithm follows import statements in the order they +// appear in the source code, so the result is a total order. +// +func Dependencies(pkgs ...*types.Package) []*types.Package { + var result []*types.Package + seen := make(map[*types.Package]bool) + var visit func(pkgs []*types.Package) + visit = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !seen[p] { + seen[p] = true + visit(p.Imports()) + result = append(result, p) + } + } + } + visit(pkgs) + return result +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/map.go b/vendor/honnef.co/go/tools/go/types/typeutil/map.go new file mode 100644 index 00000000..db0b3bce --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/map.go @@ -0,0 +1,315 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeutil defines various utilities for types, such as Map, +// a mapping from types.Type to interface{} values. +package typeutil + +import ( + "bytes" + "fmt" + "go/types" + "reflect" +) + +// Map is a hash-table-based mapping from types (types.Type) to +// arbitrary interface{} values. The concrete types that implement +// the Type interface are pointers. Since they are not canonicalized, +// == cannot be used to check for equivalence, and thus we cannot +// simply use a Go map. +// +// Just as with map[K]V, a nil *Map is a valid empty map. +// +// Not thread-safe. +// +// This fork handles Signatures correctly, respecting method receivers. +// +type Map struct { + hasher Hasher // shared by many Maps + table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused + length int // number of map entries +} + +// entry is an entry (key/value association) in a hash bucket. +type entry struct { + key types.Type + value interface{} +} + +// SetHasher sets the hasher used by Map. +// +// All Hashers are functionally equivalent but contain internal state +// used to cache the results of hashing previously seen types. +// +// A single Hasher created by MakeHasher() may be shared among many +// Maps. This is recommended if the instances have many keys in +// common, as it will amortize the cost of hash computation. +// +// A Hasher may grow without bound as new types are seen. Even when a +// type is deleted from the map, the Hasher never shrinks, since other +// types in the map may reference the deleted type indirectly. +// +// Hashers are not thread-safe, and read-only operations such as +// Map.Lookup require updates to the hasher, so a full Mutex lock (not a +// read-lock) is require around all Map operations if a shared +// hasher is accessed from multiple threads. +// +// If SetHasher is not called, the Map will create a private hasher at +// the first call to Insert. +// +func (m *Map) SetHasher(hasher Hasher) { + m.hasher = hasher +} + +// Delete removes the entry with the given key, if any. +// It returns true if the entry was found. +// +func (m *Map) Delete(key types.Type) bool { + if m != nil && m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + for i, e := range bucket { + if e.key != nil && Identical(key, e.key) { + // We can't compact the bucket as it + // would disturb iterators. + bucket[i] = entry{} + m.length-- + return true + } + } + } + return false +} + +// At returns the map entry for the given key. +// The result is nil if the entry is not present. +// +func (m *Map) At(key types.Type) interface{} { + if m != nil && m.table != nil { + for _, e := range m.table[m.hasher.Hash(key)] { + if e.key != nil && Identical(key, e.key) { + return e.value + } + } + } + return nil +} + +// Set sets the map entry for key to val, +// and returns the previous entry, if any. +func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) { + if m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + var hole *entry + for i, e := range bucket { + if e.key == nil { + hole = &bucket[i] + } else if Identical(key, e.key) { + prev = e.value + bucket[i].value = value + return + } + } + + if hole != nil { + *hole = entry{key, value} // overwrite deleted entry + } else { + m.table[hash] = append(bucket, entry{key, value}) + } + } else { + if m.hasher.memo == nil { + m.hasher = MakeHasher() + } + hash := m.hasher.Hash(key) + m.table = map[uint32][]entry{hash: {entry{key, value}}} + } + + m.length++ + return +} + +// Len returns the number of map entries. +func (m *Map) Len() int { + if m != nil { + return m.length + } + return 0 +} + +// Iterate calls function f on each entry in the map in unspecified order. +// +// If f should mutate the map, Iterate provides the same guarantees as +// Go maps: if f deletes a map entry that Iterate has not yet reached, +// f will not be invoked for it, but if f inserts a map entry that +// Iterate has not yet reached, whether or not f will be invoked for +// it is unspecified. +// +func (m *Map) Iterate(f func(key types.Type, value interface{})) { + if m != nil { + for _, bucket := range m.table { + for _, e := range bucket { + if e.key != nil { + f(e.key, e.value) + } + } + } + } +} + +// Keys returns a new slice containing the set of map keys. +// The order is unspecified. +func (m *Map) Keys() []types.Type { + keys := make([]types.Type, 0, m.Len()) + m.Iterate(func(key types.Type, _ interface{}) { + keys = append(keys, key) + }) + return keys +} + +func (m *Map) toString(values bool) string { + if m == nil { + return "{}" + } + var buf bytes.Buffer + fmt.Fprint(&buf, "{") + sep := "" + m.Iterate(func(key types.Type, value interface{}) { + fmt.Fprint(&buf, sep) + sep = ", " + fmt.Fprint(&buf, key) + if values { + fmt.Fprintf(&buf, ": %q", value) + } + }) + fmt.Fprint(&buf, "}") + return buf.String() +} + +// String returns a string representation of the map's entries. +// Values are printed using fmt.Sprintf("%v", v). +// Order is unspecified. +// +func (m *Map) String() string { + return m.toString(true) +} + +// KeysString returns a string representation of the map's key set. +// Order is unspecified. +// +func (m *Map) KeysString() string { + return m.toString(false) +} + +//////////////////////////////////////////////////////////////////////// +// Hasher + +// A Hasher maps each type to its hash value. +// For efficiency, a hasher uses memoization; thus its memory +// footprint grows monotonically over time. +// Hashers are not thread-safe. +// Hashers have reference semantics. +// Call MakeHasher to create a Hasher. +type Hasher struct { + memo map[types.Type]uint32 +} + +// MakeHasher returns a new Hasher instance. +func MakeHasher() Hasher { + return Hasher{make(map[types.Type]uint32)} +} + +// Hash computes a hash value for the given type t such that +// Identical(t, t') => Hash(t) == Hash(t'). +func (h Hasher) Hash(t types.Type) uint32 { + hash, ok := h.memo[t] + if !ok { + hash = h.hashFor(t) + h.memo[t] = hash + } + return hash +} + +// hashString computes the Fowler–Noll–Vo hash of s. +func hashString(s string) uint32 { + var h uint32 + for i := 0; i < len(s); i++ { + h ^= uint32(s[i]) + h *= 16777619 + } + return h +} + +// hashFor computes the hash of t. +func (h Hasher) hashFor(t types.Type) uint32 { + // See Identical for rationale. + switch t := t.(type) { + case *types.Basic: + return uint32(t.Kind()) + + case *types.Array: + return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) + + case *types.Slice: + return 9049 + 2*h.Hash(t.Elem()) + + case *types.Struct: + var hash uint32 = 9059 + for i, n := 0, t.NumFields(); i < n; i++ { + f := t.Field(i) + if f.Anonymous() { + hash += 8861 + } + hash += hashString(t.Tag(i)) + hash += hashString(f.Name()) // (ignore f.Pkg) + hash += h.Hash(f.Type()) + } + return hash + + case *types.Pointer: + return 9067 + 2*h.Hash(t.Elem()) + + case *types.Signature: + var hash uint32 = 9091 + if t.Variadic() { + hash *= 8863 + } + return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) + + case *types.Interface: + var hash uint32 = 9103 + for i, n := 0, t.NumMethods(); i < n; i++ { + // See go/types.identicalMethods for rationale. + // Method order is not significant. + // Ignore m.Pkg(). + m := t.Method(i) + hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) + } + return hash + + case *types.Map: + return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) + + case *types.Chan: + return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) + + case *types.Named: + // Not safe with a copying GC; objects may move. + return uint32(reflect.ValueOf(t.Obj()).Pointer()) + + case *types.Tuple: + return h.hashTuple(t) + } + panic(t) +} + +func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { + // See go/types.identicalTypes for rationale. + n := tuple.Len() + var hash uint32 = 9137 + 2*uint32(n) + for i := 0; i < n; i++ { + hash += 3 * h.Hash(tuple.At(i).Type()) + } + return hash +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/methodsetcache.go b/vendor/honnef.co/go/tools/go/types/typeutil/methodsetcache.go new file mode 100644 index 00000000..32084610 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/methodsetcache.go @@ -0,0 +1,72 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements a cache of method sets. + +package typeutil + +import ( + "go/types" + "sync" +) + +// A MethodSetCache records the method set of each type T for which +// MethodSet(T) is called so that repeat queries are fast. +// The zero value is a ready-to-use cache instance. +type MethodSetCache struct { + mu sync.Mutex + named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N + others map[types.Type]*types.MethodSet // all other types +} + +// MethodSet returns the method set of type T. It is thread-safe. +// +// If cache is nil, this function is equivalent to types.NewMethodSet(T). +// Utility functions can thus expose an optional *MethodSetCache +// parameter to clients that care about performance. +// +func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet { + if cache == nil { + return types.NewMethodSet(T) + } + cache.mu.Lock() + defer cache.mu.Unlock() + + switch T := T.(type) { + case *types.Named: + return cache.lookupNamed(T).value + + case *types.Pointer: + if N, ok := T.Elem().(*types.Named); ok { + return cache.lookupNamed(N).pointer + } + } + + // all other types + // (The map uses pointer equivalence, not type identity.) + mset := cache.others[T] + if mset == nil { + mset = types.NewMethodSet(T) + if cache.others == nil { + cache.others = make(map[types.Type]*types.MethodSet) + } + cache.others[T] = mset + } + return mset +} + +func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } { + if cache.named == nil { + cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet }) + } + // Avoid recomputing mset(*T) for each distinct Pointer + // instance whose underlying type is a named type. + msets, ok := cache.named[named] + if !ok { + msets.value = types.NewMethodSet(named) + msets.pointer = types.NewMethodSet(types.NewPointer(named)) + cache.named[named] = msets + } + return msets +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/ui.go b/vendor/honnef.co/go/tools/go/types/typeutil/ui.go new file mode 100644 index 00000000..9849c24c --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/ui.go @@ -0,0 +1,52 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +// This file defines utilities for user interfaces that display types. + +import "go/types" + +// IntuitiveMethodSet returns the intuitive method set of a type T, +// which is the set of methods you can call on an addressable value of +// that type. +// +// The result always contains MethodSet(T), and is exactly MethodSet(T) +// for interface types and for pointer-to-concrete types. +// For all other concrete types T, the result additionally +// contains each method belonging to *T if there is no identically +// named method on T itself. +// +// This corresponds to user intuition about method sets; +// this function is intended only for user interfaces. +// +// The order of the result is as for types.MethodSet(T). +// +func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { + isPointerToConcrete := func(T types.Type) bool { + ptr, ok := T.(*types.Pointer) + return ok && !types.IsInterface(ptr.Elem()) + } + + var result []*types.Selection + mset := msets.MethodSet(T) + if types.IsInterface(T) || isPointerToConcrete(T) { + for i, n := 0, mset.Len(); i < n; i++ { + result = append(result, mset.At(i)) + } + } else { + // T is some other concrete type. + // Report methods of T and *T, preferring those of T. + pmset := msets.MethodSet(types.NewPointer(T)) + for i, n := 0, pmset.Len(); i < n; i++ { + meth := pmset.At(i) + if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil { + meth = m + } + result = append(result, meth) + } + + } + return result +} diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go index cbbafbcd..cf797fb1 100644 --- a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go +++ b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go @@ -10,7 +10,7 @@ import ( ) func CheckRangeStringRunes(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { fn := func(node ast.Node) bool { rng, ok := node.(*ast.RangeStmt) if !ok || !IsBlank(rng.Key) { diff --git a/vendor/honnef.co/go/tools/lint/generated.go b/vendor/honnef.co/go/tools/lint/generated.go index 58b23f68..65532829 100644 --- a/vendor/honnef.co/go/tools/lint/generated.go +++ b/vendor/honnef.co/go/tools/lint/generated.go @@ -4,6 +4,7 @@ import ( "bufio" "bytes" "io" + "os" ) var ( @@ -15,8 +16,13 @@ var ( crnl = []byte("\r\n") ) -func isGenerated(r io.Reader) bool { - br := bufio.NewReader(r) +func isGenerated(path string) bool { + f, err := os.Open(path) + if err != nil { + return false + } + defer f.Close() + br := bufio.NewReader(f) for { s, err := br.ReadBytes('\n') if err != nil && err != io.EOF { diff --git a/vendor/honnef.co/go/tools/lint/lint.go b/vendor/honnef.co/go/tools/lint/lint.go index c81f6e82..c4d9ff67 100644 --- a/vendor/honnef.co/go/tools/lint/lint.go +++ b/vendor/honnef.co/go/tools/lint/lint.go @@ -2,6 +2,7 @@ package lint // import "honnef.co/go/tools/lint" import ( + "bytes" "fmt" "go/ast" "go/token" @@ -9,12 +10,14 @@ import ( "io" "os" "path/filepath" + "runtime" "sort" "strings" "sync" "time" "unicode" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/packages" "honnef.co/go/tools/config" "honnef.co/go/tools/ssa" @@ -22,9 +25,9 @@ import ( ) type Job struct { - Program *Program + Pkg *Pkg + GoVersion int - checker string check Check problems []Problem @@ -106,20 +109,10 @@ func (gi *GlobIgnore) Match(p Problem) bool { } type Program struct { - SSA *ssa.Program - InitialPackages []*Pkg - InitialFunctions []*ssa.Function - AllPackages []*packages.Package - AllFunctions []*ssa.Function - Files []*ast.File - GoVersion int - - tokenFileMap map[*token.File]*ast.File - astFileMap map[*ast.File]*Pkg - packagesMap map[string]*packages.Package - - genMu sync.RWMutex - generatedMap map[string]bool + SSA *ssa.Program + InitialPackages []*Pkg + AllPackages []*packages.Package + AllFunctions []*ssa.Function } func (prog *Program) Fset() *token.FileSet { @@ -141,7 +134,6 @@ type Problem struct { Position token.Position // position in source file Text string // the prose that describes the problem Check string - Checker string Package *Pkg Severity Severity } @@ -164,6 +156,7 @@ type Check struct { Fn Func ID string FilterGenerated bool + Doc string } // A Linter lints Go source code. @@ -205,12 +198,8 @@ func (l *Linter) ignore(p Problem) bool { return false } -func (prog *Program) File(node Positioner) *ast.File { - return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())] -} - func (j *Job) File(node Positioner) *ast.File { - return j.Program.File(node) + return j.Pkg.tokenFileMap[j.Pkg.Fset.File(node.Pos())] } func parseDirective(s string) (cmd string, args []string) { @@ -266,6 +255,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { if stats != nil { stats.SSABuild = time.Since(t) } + runtime.GC() t = time.Now() pkgMap := map[*ssa.Package]*Pkg{} @@ -291,9 +281,19 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } pkg := &Pkg{ - SSA: ssapkg, - Package: pkg, - Config: cfg, + SSA: ssapkg, + Package: pkg, + Config: cfg, + Generated: map[string]bool{}, + tokenFileMap: map[*token.File]*ast.File{}, + } + pkg.Inspector = inspector.New(pkg.Syntax) + for _, f := range pkg.Syntax { + tf := pkg.Fset.File(f.Pos()) + pkg.tokenFileMap[tf] = f + + path := DisplayPosition(pkg.Fset, f.Pos()).Filename + pkg.Generated[path] = isGenerated(path) } pkgMap[ssapkg] = pkg pkgs = append(pkgs, pkg) @@ -303,42 +303,15 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { SSA: ssaprog, InitialPackages: pkgs, AllPackages: allPkgs, - GoVersion: l.GoVersion, - tokenFileMap: map[*token.File]*ast.File{}, - astFileMap: map[*ast.File]*Pkg{}, - generatedMap: map[string]bool{}, - } - prog.packagesMap = map[string]*packages.Package{} - for _, pkg := range allPkgs { - prog.packagesMap[pkg.Types.Path()] = pkg } - isInitial := map[*types.Package]struct{}{} - for _, pkg := range pkgs { - isInitial[pkg.Types] = struct{}{} - } for fn := range ssautil.AllFunctions(ssaprog) { + prog.AllFunctions = append(prog.AllFunctions, fn) if fn.Pkg == nil { continue } - prog.AllFunctions = append(prog.AllFunctions, fn) - if _, ok := isInitial[fn.Pkg.Pkg]; ok { - prog.InitialFunctions = append(prog.InitialFunctions, fn) - } - } - for _, pkg := range pkgs { - prog.Files = append(prog.Files, pkg.Syntax...) - - ssapkg := ssaprog.Package(pkg.Types) - for _, f := range pkg.Syntax { - prog.astFileMap[f] = pkgMap[ssapkg] - } - } - - for _, pkg := range allPkgs { - for _, f := range pkg.Syntax { - tf := pkg.Fset.File(f.Pos()) - prog.tokenFileMap[tf] = f + if pkg, ok := pkgMap[fn.Pkg]; ok { + pkg.InitialFunctions = append(pkg.InitialFunctions, fn) } } @@ -346,6 +319,19 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { l.automaticIgnores = nil for _, pkg := range initial { for _, f := range pkg.Syntax { + found := false + commentLoop: + for _, cg := range f.Comments { + for _, c := range cg.List { + if strings.Contains(c.Text, "//lint:") { + found = true + break commentLoop + } + } + } + if !found { + continue + } cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) for node, cgs := range cm { for _, cg := range cgs { @@ -359,10 +345,9 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { if len(args) < 2 { // FIXME(dh): this causes duplicated warnings when using megacheck p := Problem{ - Position: prog.DisplayPosition(c.Pos()), + Position: DisplayPosition(prog.Fset(), c.Pos()), Text: "malformed linter directive; missing the required reason field?", Check: "", - Checker: "lint", Package: nil, } out = append(out, p) @@ -373,7 +358,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { continue } checks := strings.Split(args[0], ",") - pos := prog.DisplayPosition(node.Pos()) + pos := DisplayPosition(prog.Fset(), node.Pos()) var ig Ignore switch cmd { case "ignore": @@ -396,23 +381,6 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } } - sizes := struct { - types int - defs int - uses int - implicits int - selections int - scopes int - }{} - for _, pkg := range pkgs { - sizes.types += len(pkg.TypesInfo.Types) - sizes.defs += len(pkg.TypesInfo.Defs) - sizes.uses += len(pkg.TypesInfo.Uses) - sizes.implicits += len(pkg.TypesInfo.Implicits) - sizes.selections += len(pkg.TypesInfo.Selections) - sizes.scopes += len(pkg.TypesInfo.Scopes) - } - if stats != nil { stats.OtherInitWork = time.Since(t) } @@ -428,41 +396,31 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { var jobs []*Job var allChecks []string + var wg sync.WaitGroup for _, checker := range l.Checkers { - checks := checker.Checks() - for _, check := range checks { + for _, check := range checker.Checks() { allChecks = append(allChecks, check.ID) - j := &Job{ - Program: prog, - checker: checker.Name(), - check: check, + if check.Fn == nil { + continue + } + for _, pkg := range pkgs { + j := &Job{ + Pkg: pkg, + check: check, + GoVersion: l.GoVersion, + } + jobs = append(jobs, j) + wg.Add(1) + go func(check Check, j *Job) { + t := time.Now() + check.Fn(j) + j.duration = time.Since(t) + wg.Done() + }(check, j) } - jobs = append(jobs, j) } } - max := len(jobs) - if l.MaxConcurrentJobs > 0 { - max = l.MaxConcurrentJobs - } - - sem := make(chan struct{}, max) - wg := &sync.WaitGroup{} - for _, j := range jobs { - wg.Add(1) - go func(j *Job) { - defer wg.Done() - sem <- struct{}{} - defer func() { <-sem }() - fn := j.check.Fn - if fn == nil { - return - } - t := time.Now() - fn(j) - j.duration = time.Since(t) - }(j) - } wg.Wait() for _, j := range jobs { @@ -470,6 +428,9 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { stats.Jobs = append(stats.Jobs, JobStat{j.check.ID, j.duration}) } for _, p := range j.problems { + if p.Package == nil { + panic(fmt.Sprintf("internal error: problem at position %s has nil package", p.Position)) + } allowedChecks := FilterChecks(allChecks, p.Package.Config.Checks) if l.ignore(p) { @@ -498,19 +459,21 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } couldveMatched := false - for f, pkg := range prog.astFileMap { - if prog.Fset().Position(f.Pos()).Filename != ig.File { - continue - } - allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) - for _, c := range ig.Checks { - if !allowedChecks[c] { + for _, pkg := range pkgs { + for _, f := range pkg.tokenFileMap { + if prog.Fset().Position(f.Pos()).Filename != ig.File { continue } - couldveMatched = true + allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) + for _, c := range ig.Checks { + if !allowedChecks[c] { + continue + } + couldveMatched = true + break + } break } - break } if !couldveMatched { @@ -519,10 +482,9 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { continue } p := Problem{ - Position: prog.DisplayPosition(ig.pos), + Position: DisplayPosition(prog.Fset(), ig.pos), Text: "this linter directive didn't match anything; should it be removed?", Check: "", - Checker: "lint", Package: nil, } out = append(out, p) @@ -609,28 +571,30 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { return allowedChecks } -func (prog *Program) Package(path string) *packages.Package { - return prog.packagesMap[path] -} - // Pkg represents a package being linted. type Pkg struct { - SSA *ssa.Package + SSA *ssa.Package + InitialFunctions []*ssa.Function *packages.Package - Config config.Config + Config config.Config + Inspector *inspector.Inspector + // TODO(dh): this map should probably map from *ast.File, not string + Generated map[string]bool + + tokenFileMap map[*token.File]*ast.File } type Positioner interface { Pos() token.Pos } -func (prog *Program) DisplayPosition(p token.Pos) token.Position { +func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { // Only use the adjusted position if it points to another Go file. // This means we'll point to the original file for cgo files, but // we won't point to a YACC grammar file. - pos := prog.Fset().PositionFor(p, false) - adjPos := prog.Fset().PositionFor(p, true) + pos := fset.PositionFor(p, false) + adjPos := fset.PositionFor(p, true) if filepath.Ext(adjPos.Filename) == ".go" { return adjPos @@ -638,60 +602,21 @@ func (prog *Program) DisplayPosition(p token.Pos) token.Position { return pos } -func (prog *Program) isGenerated(path string) bool { - // This function isn't very efficient in terms of lock contention - // and lack of parallelism, but it really shouldn't matter. - // Projects consists of thousands of files, and have hundreds of - // errors. That's not a lot of calls to isGenerated. - - prog.genMu.RLock() - if b, ok := prog.generatedMap[path]; ok { - prog.genMu.RUnlock() - return b - } - prog.genMu.RUnlock() - prog.genMu.Lock() - defer prog.genMu.Unlock() - // recheck to avoid doing extra work in case of race - if b, ok := prog.generatedMap[path]; ok { - return b - } - - f, err := os.Open(path) - if err != nil { - return false - } - defer f.Close() - b := isGenerated(f) - prog.generatedMap[path] = b - return b -} - func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { - tf := j.Program.SSA.Fset.File(n.Pos()) - f := j.Program.tokenFileMap[tf] - pkg := j.Program.astFileMap[f] - - pos := j.Program.DisplayPosition(n.Pos()) - if j.Program.isGenerated(pos.Filename) && j.check.FilterGenerated { + pos := DisplayPosition(j.Pkg.Fset, n.Pos()) + if j.Pkg.Generated[pos.Filename] && j.check.FilterGenerated { return nil } problem := Problem{ Position: pos, Text: fmt.Sprintf(format, args...), Check: j.check.ID, - Checker: j.checker, - Package: pkg, + Package: j.Pkg, } j.problems = append(j.problems, problem) return &j.problems[len(j.problems)-1] } -func (j *Job) NodePackage(node Positioner) *Pkg { - f := j.File(node) - return j.Program.astFileMap[f] -} - func allPackages(pkgs []*packages.Package) []*packages.Package { var out []*packages.Package packages.Visit( @@ -704,3 +629,51 @@ func allPackages(pkgs []*packages.Package) []*packages.Package { ) return out } + +var bufferPool = &sync.Pool{ + New: func() interface{} { + buf := bytes.NewBuffer(nil) + buf.Grow(64) + return buf + }, +} + +func FuncName(f *types.Func) string { + buf := bufferPool.Get().(*bytes.Buffer) + buf.Reset() + if f.Type() != nil { + sig := f.Type().(*types.Signature) + if recv := sig.Recv(); recv != nil { + buf.WriteByte('(') + if _, ok := recv.Type().(*types.Interface); ok { + // gcimporter creates abstract methods of + // named interfaces using the interface type + // (not the named type) as the receiver. + // Don't print it in full. + buf.WriteString("interface") + } else { + types.WriteType(buf, recv.Type(), nil) + } + buf.WriteByte(')') + buf.WriteByte('.') + } else if f.Pkg() != nil { + writePackage(buf, f.Pkg()) + } + } + buf.WriteString(f.Name()) + s := buf.String() + bufferPool.Put(buf) + return s +} + +func writePackage(buf *bytes.Buffer, pkg *types.Package) { + if pkg == nil { + return + } + var s string + s = pkg.Path() + if s != "" { + buf.WriteString(s) + buf.WriteByte('.') + } +} diff --git a/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go b/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go index 2f614c9b..ab6800ab 100644 --- a/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go +++ b/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go @@ -30,7 +30,7 @@ func CallName(call *ssa.CallCommon) string { if !ok { return "" } - return fn.FullName() + return lint.FuncName(fn) case *ssa.Builtin: return v.Name() } @@ -63,7 +63,7 @@ func IsExample(fn *ssa.Function) bool { func IsPointerLike(T types.Type) bool { switch T := T.Underlying().(type) { - case *types.Interface, *types.Chan, *types.Map, *types.Pointer: + case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer: return true case *types.Basic: return T.Kind() == types.UnsafePointer @@ -103,26 +103,14 @@ func IsZero(expr ast.Expr) bool { return IsIntLiteral(expr, "0") } -func TypeOf(j *lint.Job, expr ast.Expr) types.Type { - if expr == nil { - return nil - } - return j.NodePackage(expr).TypesInfo.TypeOf(expr) -} - -func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { return IsType(TypeOf(j, expr), name) } - -func ObjectOf(j *lint.Job, ident *ast.Ident) types.Object { - if ident == nil { - return nil - } - return j.NodePackage(ident).TypesInfo.ObjectOf(ident) +func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { + return IsType(j.Pkg.TypesInfo.TypeOf(expr), name) } func IsInTest(j *lint.Job, node lint.Positioner) bool { // FIXME(dh): this doesn't work for global variables with // initializers - f := j.Program.SSA.Fset.File(node.Pos()) + f := j.Pkg.Fset.File(node.Pos()) return f != nil && strings.HasSuffix(f.Name(), "_test.go") } @@ -130,15 +118,11 @@ func IsInMain(j *lint.Job, node lint.Positioner) bool { if node, ok := node.(packager); ok { return node.Package().Pkg.Name() == "main" } - pkg := j.NodePackage(node) - if pkg == nil { - return false - } - return pkg.Types.Name() == "main" + return j.Pkg.Types.Name() == "main" } func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { - info := j.NodePackage(expr).TypesInfo + info := j.Pkg.TypesInfo sel := info.Selections[expr] if sel == nil { if x, ok := expr.X.(*ast.Ident); ok { @@ -155,11 +139,11 @@ func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { } func IsNil(j *lint.Job, expr ast.Expr) bool { - return j.NodePackage(expr).TypesInfo.Types[expr].IsNil() + return j.Pkg.TypesInfo.Types[expr].IsNil() } func BoolConst(j *lint.Job, expr ast.Expr) bool { - val := j.NodePackage(expr).TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() + val := j.Pkg.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() return constant.BoolVal(val) } @@ -172,7 +156,7 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { if !ok { return false } - obj := j.NodePackage(expr).TypesInfo.ObjectOf(ident) + obj := j.Pkg.TypesInfo.ObjectOf(ident) c, ok := obj.(*types.Const) if !ok { return false @@ -188,7 +172,7 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { } func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { - tv := j.NodePackage(expr).TypesInfo.Types[expr] + tv := j.Pkg.TypesInfo.Types[expr] if tv.Value == nil { return 0, false } @@ -199,7 +183,7 @@ func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { } func ExprToString(j *lint.Job, expr ast.Expr) (string, bool) { - val := j.NodePackage(expr).TypesInfo.Types[expr].Value + val := j.Pkg.TypesInfo.Types[expr].Value if val == nil { return "", false } @@ -229,22 +213,22 @@ func DereferenceR(T types.Type) types.Type { } func IsGoVersion(j *lint.Job, minor int) bool { - return j.Program.GoVersion >= minor + return j.GoVersion >= minor } func CallNameAST(j *lint.Job, call *ast.CallExpr) string { switch fun := call.Fun.(type) { case *ast.SelectorExpr: - fn, ok := ObjectOf(j, fun.Sel).(*types.Func) + fn, ok := j.Pkg.TypesInfo.ObjectOf(fun.Sel).(*types.Func) if !ok { return "" } - return fn.FullName() + return lint.FuncName(fn) case *ast.Ident: - obj := ObjectOf(j, fun) + obj := j.Pkg.TypesInfo.ObjectOf(fun) switch obj := obj.(type) { case *types.Func: - return obj.FullName() + return lint.FuncName(obj) case *types.Builtin: return obj.Name() default: @@ -273,9 +257,8 @@ func IsCallToAnyAST(j *lint.Job, node ast.Node, names ...string) bool { } func Render(j *lint.Job, x interface{}) string { - fset := j.Program.SSA.Fset var buf bytes.Buffer - if err := printer.Fprint(&buf, fset, x); err != nil { + if err := printer.Fprint(&buf, j.Pkg.Fset, x); err != nil { panic(err) } return buf.String() @@ -311,11 +294,10 @@ func Inspect(node ast.Node, fn func(node ast.Node) bool) { ast.Inspect(node, fn) } -func GroupSpecs(j *lint.Job, specs []ast.Spec) [][]ast.Spec { +func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec { if len(specs) == 0 { return nil } - fset := j.Program.SSA.Fset groups := make([][]ast.Spec, 1) groups[0] = append(groups[0], specs[0]) diff --git a/vendor/honnef.co/go/tools/lint/lintutil/util.go b/vendor/honnef.co/go/tools/lint/lintutil/util.go index 1142aa04..70171183 100644 --- a/vendor/honnef.co/go/tools/lint/lintutil/util.go +++ b/vendor/honnef.co/go/tools/lint/lintutil/util.go @@ -17,6 +17,7 @@ import ( "os" "regexp" "runtime" + "runtime/debug" "runtime/pprof" "strconv" "strings" @@ -109,6 +110,7 @@ func FlagSet(name string) *flag.FlagSet { flags.Bool("version", false, "Print version and exit") flags.Bool("show-ignored", false, "Don't filter ignored problems") flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')") + flags.String("explain", "", "Print description of `check`") flags.Int("debug.max-concurrent-jobs", 0, "Number of jobs to run concurrently") flags.Bool("debug.print-stats", false, "Print debug statistics") @@ -131,7 +133,22 @@ func FlagSet(name string) *flag.FlagSet { return flags } +func findCheck(cs []lint.Checker, check string) (lint.Check, bool) { + for _, c := range cs { + for _, cc := range c.Checks() { + if cc.ID == check { + return cc, true + } + } + } + return lint.Check{}, false +} + func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { + if _, ok := os.LookupEnv("GOGC"); !ok { + debug.SetGCPercent(50) + } + tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) @@ -139,6 +156,7 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string) printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool) showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) + explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string) maxConcurrentJobs := fs.Lookup("debug.max-concurrent-jobs").Value.(flag.Getter).Get().(int) printStats := fs.Lookup("debug.print-stats").Value.(flag.Getter).Get().(bool) @@ -175,6 +193,20 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { exit(0) } + if explain != "" { + check, ok := findCheck(cs, explain) + if !ok { + fmt.Fprintln(os.Stderr, "Couldn't find check", explain) + exit(1) + } + if check.Doc == "" { + fmt.Fprintln(os.Stderr, explain, "has no documentation") + exit(1) + } + fmt.Println(check.Doc) + exit(0) + } + ps, err := Lint(cs, fs.Args(), &Options{ Tags: strings.Fields(tags), LintTests: tests, @@ -279,6 +311,7 @@ func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, erro return nil, err } stats.PackageLoading = time.Since(t) + runtime.GC() var problems []lint.Problem workingPkgs := make([]*packages.Package, 0, len(pkgs)) @@ -346,7 +379,6 @@ func compileErrors(pkg *packages.Package) []lint.Problem { p := lint.Problem{ Position: parsePos(err.Pos), Text: err.Msg, - Checker: "compiler", Check: "compile", } ps = append(ps, p) diff --git a/vendor/honnef.co/go/tools/printf/fuzz.go b/vendor/honnef.co/go/tools/printf/fuzz.go new file mode 100644 index 00000000..8ebf357f --- /dev/null +++ b/vendor/honnef.co/go/tools/printf/fuzz.go @@ -0,0 +1,11 @@ +// +build gofuzz + +package printf + +func Fuzz(data []byte) int { + _, err := Parse(string(data)) + if err == nil { + return 1 + } + return 0 +} diff --git a/vendor/honnef.co/go/tools/printf/printf.go b/vendor/honnef.co/go/tools/printf/printf.go new file mode 100644 index 00000000..754db9b1 --- /dev/null +++ b/vendor/honnef.co/go/tools/printf/printf.go @@ -0,0 +1,197 @@ +// Package printf implements a parser for fmt.Printf-style format +// strings. +// +// It parses verbs according to the following syntax: +// Numeric -> '0'-'9' +// Letter -> 'a'-'z' | 'A'-'Z' +// Index -> '[' Numeric+ ']' +// Star -> '*' +// Star -> Index '*' +// +// Precision -> Numeric+ | Star +// Width -> Numeric+ | Star +// +// WidthAndPrecision -> Width '.' Precision +// WidthAndPrecision -> Width '.' +// WidthAndPrecision -> Width +// WidthAndPrecision -> '.' Precision +// WidthAndPrecision -> '.' +// +// Flag -> '+' | '-' | '#' | ' ' | '0' +// Verb -> Letter | '%' +// +// Input -> '%' [ Flag+ ] [ WidthAndPrecision ] [ Index ] Verb +package printf + +import ( + "errors" + "regexp" + "strconv" + "strings" +) + +// ErrInvalid is returned for invalid format strings or verbs. +var ErrInvalid = errors.New("invalid format string") + +type Verb struct { + Letter rune + Flags string + + Width Argument + Precision Argument + // Which value in the argument list the verb uses. + // -1 denotes the next argument, + // values > 0 denote explicit arguments. + // The value 0 denotes that no argument is consumed. This is the case for %%. + Value int + + Raw string +} + +// Argument is an implicit or explicit width or precision. +type Argument interface { + isArgument() +} + +// The Default value, when no width or precision is provided. +type Default struct{} + +// Zero is the implicit zero value. +// This value may only appear for precisions in format strings like %6.f +type Zero struct{} + +// Star is a * value, which may either refer to the next argument (Index == -1) or an explicit argument. +type Star struct{ Index int } + +// A Literal value, such as 6 in %6d. +type Literal int + +func (Default) isArgument() {} +func (Zero) isArgument() {} +func (Star) isArgument() {} +func (Literal) isArgument() {} + +// Parse parses f and returns a list of actions. +// An action may either be a literal string, or a Verb. +func Parse(f string) ([]interface{}, error) { + var out []interface{} + for len(f) > 0 { + if f[0] == '%' { + v, n, err := ParseVerb(f) + if err != nil { + return nil, err + } + f = f[n:] + out = append(out, v) + } else { + n := strings.IndexByte(f, '%') + if n > -1 { + out = append(out, f[:n]) + f = f[n:] + } else { + out = append(out, f) + f = "" + } + } + } + + return out, nil +} + +func atoi(s string) int { + n, _ := strconv.Atoi(s) + return n +} + +// ParseVerb parses the verb at the beginning of f. +// It returns the verb, how much of the input was consumed, and an error, if any. +func ParseVerb(f string) (Verb, int, error) { + if len(f) < 2 { + return Verb{}, 0, ErrInvalid + } + const ( + flags = 1 + + width = 2 + widthStar = 3 + widthIndex = 5 + + dot = 6 + prec = 7 + precStar = 8 + precIndex = 10 + + verbIndex = 11 + verb = 12 + ) + + m := re.FindStringSubmatch(f) + if m == nil { + return Verb{}, 0, ErrInvalid + } + + v := Verb{ + Letter: []rune(m[verb])[0], + Flags: m[flags], + Raw: m[0], + } + + if m[width] != "" { + // Literal width + v.Width = Literal(atoi(m[width])) + } else if m[widthStar] != "" { + // Star width + if m[widthIndex] != "" { + v.Width = Star{atoi(m[widthIndex])} + } else { + v.Width = Star{-1} + } + } else { + // Default width + v.Width = Default{} + } + + if m[dot] == "" { + // default precision + v.Precision = Default{} + } else { + if m[prec] != "" { + // Literal precision + v.Precision = Literal(atoi(m[prec])) + } else if m[precStar] != "" { + // Star precision + if m[precIndex] != "" { + v.Precision = Star{atoi(m[precIndex])} + } else { + v.Precision = Star{-1} + } + } else { + // Zero precision + v.Precision = Zero{} + } + } + + if m[verb] == "%" { + v.Value = 0 + } else if m[verbIndex] != "" { + v.Value = atoi(m[verbIndex]) + } else { + v.Value = -1 + } + + return v, len(m[0]), nil +} + +const ( + flags = `([+#0 -]*)` + verb = `([a-zA-Z%])` + index = `(?:\[([0-9]+)\])` + star = `((` + index + `)?\*)` + width1 = `([0-9]+)` + width2 = star + width = `(?:` + width1 + `|` + width2 + `)` + precision = width + widthAndPrecision = `(?:(?:` + width + `)?(?:(\.)(?:` + precision + `)?)?)` +) + +var re = regexp.MustCompile(`^%` + flags + widthAndPrecision + `?` + index + `?` + verb) diff --git a/vendor/honnef.co/go/tools/simple/doc.go b/vendor/honnef.co/go/tools/simple/doc.go new file mode 100644 index 00000000..ea437da5 --- /dev/null +++ b/vendor/honnef.co/go/tools/simple/doc.go @@ -0,0 +1,426 @@ +package simple + +var docS1000 = `Use plain channel send or receive + +Select statements with a single case can be replaced with a simple send or receive. + +Before: + +select { +case x := <-ch: + fmt.Println(x) +} + +After: + +x := <-ch +fmt.Println(x) + +Available since + 2017.1 +` + +var docS1001 = `Replace with copy() + +Use copy() for copying elements from one slice to another. + +Before: + +for i, x := range src { + dst[i] = x +} + +After: + +copy(dst, src) + +Available since + 2017.1 +` + +var docS1002 = `Omit comparison with boolean constant + +Before: + +if x == true {} + +After: + +if x {} + +Available since + 2017.1 +` + +var docS1003 = `Replace with strings.Contains + +Before: + +if strings.Index(x, y) != -1 {} + +After: + +if strings.Contains(x, y) {} + +Available since + 2017.1 +` + +var docS1004 = `Replace with bytes.Equal + +Before: + +if bytes.Compare(x, y) == 0 {} + +After: + +if bytes.Equal(x, y) {} + +Available since + 2017.1 +` + +var docS1005 = `Drop unnecessary use of the blank identifier + +In many cases, assigning to the blank identifier is unnecessary. + +Before: + +for _ = range s {} +x, _ = someMap[key] +_ = <-ch + +After: + +for range s{} +x = someMap[key] +<-ch + +Available since + 2017.1 +` + +var docS1006 = `Replace with for { ... } + +For infinite loops, using for { ... } is the most idiomatic choice. + +Available since + 2017.1 +` + +var docS1007 = `Simplify regular expression by using raw string literal + +Raw string literals use ` + "`" + ` instead of " and do not support any escape sequences. This means that the backslash (\) can be used freely, without the need of escaping. + +Since regular expressions have their own escape sequences, raw strings can improve their readability. + +Before: + +regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z") + +After: + +regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `) + +Available since + 2017.1 +` + +var docS1008 = `Simplify returning boolean expression + +Before: + +if { + return true +} +return false + +After: + +return + +Available since + 2017.1 +` + +var docS1009 = `Omit redundant nil check on slices + +The len function is defined for all slices, even nil ones, which have a length of zero. It is not necessary to check if a slice is not nil before checking that its length is not zero. + +Before: + +if x != nil && len(x) != 0 {} + +After: + +if len(x) != 0 {} + +Available since + 2017.1 +` + +var docS1010 = `Omit default slice index + +When slicing, the second index defaults to the length of the value, making s[n:len(s)] and s[n:] equivalent. + +Available since + 2017.1 +` + +var docS1011 = `Use a single append to concatenate two slices + +Before: + +for _, e := range y { + x = append(x, e) +} + +After: + +x = append(x, y...) + +Available since + 2017.1 +` + +var docS1012 = `Replace with time.Since(x) + +The time.Since helper has the same effect as using time.Now().Sub(x) but is easier to read. + +Before: + +time.Now().Sub(x) + +After: + +time.Since(x) + +Available since + 2017.1 +` + +var docS1016 = `Use a type conversion + +Two struct types with identical fields can be converted between each other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during conversions. It is thus not necessary to manually copy every field individually. + +Before: + +var x T1 +y := T2{ + Field1: x.Field1, + Field2: x.Field2, +} + +After: + +var x T1 +y := T2(x) + +Available since + 2017.1 +` + +var docS1017 = `Replace with strings.TrimPrefix + +Instead of using strings.HasPrefix and manual slicing, use the strings.TrimPrefix function. If the string doesn't start with the prefix, the original string will be returned. Using strings.TrimPrefix reduces complexity, and avoids common bugs, such as off-by-one mistakes. + +Before: + +if strings.HasPrefix(str, prefix) { + str = str[len(prefix):] +} + +After: + +str = strings.TrimPrefix(str, prefix) + +Available since + 2017.1 +` + +var docS1018 = `Replace with copy() + +copy() permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a slice. + +Before: + +for i := 0; i < n; i++ { + bs[i] = bs[offset+i] +} + +After: + +copy(bs[:n], bs[offset:]) + +Available since + 2017.1 +` + +var docS1019 = `Simplify make call + +The make function has default values for the length and capacity arguments. For channels and maps, the length defaults to zero. Additionally, for slices the capacity defaults to the length. + +Available since + 2017.1 +` + +var docS1020 = `Omit redundant nil check in type assertion + +Before: + +if _, ok := i.(T); ok && i != nil {} + +After: + +if _, ok := i.(T); ok {} + +Available since + 2017.1 +` + +var docS1021 = `Merge variable declaration and assignment + +Before: + +var x uint +x = 1 + +After: + +var x uint = 1 + +Available since + 2017.1 +` +var docS1023 = `Omit redundant control flow + +Functions that have no return value do not need a return statement as the final statement of the function. + +Switches in Go do not have automatic fallthrough, unlike languages like C. It is not necessary to have a break statement as the final statement in a case block. + +Available since + 2017.1 +` + +var docS1024 = `Replace with time.Until(x) + +The time.Until helper has the same effect as using x.Sub(time.Now()) but is easier to read. + +Before: + +x.Sub(time.Now()) + +After: + +time.Until(x) + +Available since + 2017.1 +` + +var docS1025 = `Don't use fmt.Sprintf("%s", x) unnecessarily + +In many instances, there are easier and more efficient ways of getting a value's string representation. Whenever a value's underlying type is a string already, or the type has a String method, they should be used directly. + +Given the following shared definitions + +type T1 string +type T2 int + +func (T2) String() string { return "Hello, world" } + +var x string +var y T1 +var z T2 + +we can simplify the following + +fmt.Sprintf("%s", x) +fmt.Sprintf("%s", y) +fmt.Sprintf("%s", z) + +to + +x +string(y) +z.String() + +Available since + 2017.1 +` + +var docS1028 = `replace with fmt.Errorf + +Before: + +errors.New(fmt.Sprintf(...)) + +After: + +fmt.Errorf(...) + +Available since + 2017.1 +` + +var docS1029 = `Range over the string + +Ranging over a string will yield byte offsets and runes. If the offset isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the string will be more performant, however, as it avoids allocating a new slice, the size of which depends on the length of the string. + +Before: + +for _, r := range []rune(s) {} + +After: + +for _, r := range s {} + +Available since + 2017.1 +` + +var docS1030 = `Use bytes.Buffer.String or bytes.Buffer.Bytes + +bytes.Buffer has both a String and a Bytes method. It is never necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply use the other method. + +Available since + 2017.1 +` + +var docS1031 = `Omit redundant nil check around loop + +You can use range on nil slices and maps, the loop will simply never execute. This makes an additional nil check around the loop unnecessary. + +Before: + +if s != nil { + for _, x := range s { + ... + } +} + +After: + +for _, x := range s { + ... +} + +Available since + 2017.1 +` + +var docS1032 = `Replace with sort.Ints(x), sort.Float64s(x), sort.Strings(x) + +The sort.Ints, sort.Float64s and sort.Strings functions are easier to read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) and sort.Sort(sort.StringSlice(x)). + +Before: + +sort.Sort(sort.StringSlice(x)) + +After: + +sort.Strings(x) + +Available since + 2019.1 +` diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go index 1d96713f..db805770 100644 --- a/vendor/honnef.co/go/tools/simple/lint.go +++ b/vendor/honnef.co/go/tools/simple/lint.go @@ -2,6 +2,7 @@ package simple // import "honnef.co/go/tools/simple" import ( + "fmt" "go/ast" "go/constant" "go/token" @@ -35,35 +36,35 @@ func (c *Checker) Init(prog *lint.Program) {} func (c *Checker) Checks() []lint.Check { return []lint.Check{ - {ID: "S1000", FilterGenerated: true, Fn: c.LintSingleCaseSelect}, - {ID: "S1001", FilterGenerated: true, Fn: c.LintLoopCopy}, - {ID: "S1002", FilterGenerated: true, Fn: c.LintIfBoolCmp}, - {ID: "S1003", FilterGenerated: true, Fn: c.LintStringsContains}, - {ID: "S1004", FilterGenerated: true, Fn: c.LintBytesCompare}, - {ID: "S1005", FilterGenerated: true, Fn: c.LintUnnecessaryBlank}, - {ID: "S1006", FilterGenerated: true, Fn: c.LintForTrue}, - {ID: "S1007", FilterGenerated: true, Fn: c.LintRegexpRaw}, - {ID: "S1008", FilterGenerated: true, Fn: c.LintIfReturn}, - {ID: "S1009", FilterGenerated: true, Fn: c.LintRedundantNilCheckWithLen}, - {ID: "S1010", FilterGenerated: true, Fn: c.LintSlicing}, - {ID: "S1011", FilterGenerated: true, Fn: c.LintLoopAppend}, - {ID: "S1012", FilterGenerated: true, Fn: c.LintTimeSince}, - {ID: "S1016", FilterGenerated: true, Fn: c.LintSimplerStructConversion}, - {ID: "S1017", FilterGenerated: true, Fn: c.LintTrim}, - {ID: "S1018", FilterGenerated: true, Fn: c.LintLoopSlide}, - {ID: "S1019", FilterGenerated: true, Fn: c.LintMakeLenCap}, - {ID: "S1020", FilterGenerated: true, Fn: c.LintAssertNotNil}, - {ID: "S1021", FilterGenerated: true, Fn: c.LintDeclareAssign}, - {ID: "S1023", FilterGenerated: true, Fn: c.LintRedundantBreak}, - {ID: "S1024", FilterGenerated: true, Fn: c.LintTimeUntil}, - {ID: "S1025", FilterGenerated: true, Fn: c.LintRedundantSprintf}, - {ID: "S1028", FilterGenerated: true, Fn: c.LintErrorsNewSprintf}, - {ID: "S1029", FilterGenerated: false, Fn: c.LintRangeStringRunes}, - {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions}, - {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange}, - {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers}, - {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete}, - {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch}, + {ID: "S1000", FilterGenerated: true, Fn: c.LintSingleCaseSelect, Doc: docS1000}, + {ID: "S1001", FilterGenerated: true, Fn: c.LintLoopCopy, Doc: docS1001}, + {ID: "S1002", FilterGenerated: true, Fn: c.LintIfBoolCmp, Doc: docS1002}, + {ID: "S1003", FilterGenerated: true, Fn: c.LintStringsContains, Doc: docS1003}, + {ID: "S1004", FilterGenerated: true, Fn: c.LintBytesCompare, Doc: docS1004}, + {ID: "S1005", FilterGenerated: true, Fn: c.LintUnnecessaryBlank, Doc: docS1005}, + {ID: "S1006", FilterGenerated: true, Fn: c.LintForTrue, Doc: docS1006}, + {ID: "S1007", FilterGenerated: true, Fn: c.LintRegexpRaw, Doc: docS1007}, + {ID: "S1008", FilterGenerated: true, Fn: c.LintIfReturn, Doc: docS1008}, + {ID: "S1009", FilterGenerated: true, Fn: c.LintRedundantNilCheckWithLen, Doc: docS1009}, + {ID: "S1010", FilterGenerated: true, Fn: c.LintSlicing, Doc: docS1010}, + {ID: "S1011", FilterGenerated: true, Fn: c.LintLoopAppend, Doc: docS1011}, + {ID: "S1012", FilterGenerated: true, Fn: c.LintTimeSince, Doc: docS1012}, + {ID: "S1016", FilterGenerated: true, Fn: c.LintSimplerStructConversion, Doc: docS1016}, + {ID: "S1017", FilterGenerated: true, Fn: c.LintTrim, Doc: docS1017}, + {ID: "S1018", FilterGenerated: true, Fn: c.LintLoopSlide, Doc: docS1018}, + {ID: "S1019", FilterGenerated: true, Fn: c.LintMakeLenCap, Doc: docS1019}, + {ID: "S1020", FilterGenerated: true, Fn: c.LintAssertNotNil, Doc: docS1020}, + {ID: "S1021", FilterGenerated: true, Fn: c.LintDeclareAssign, Doc: docS1021}, + {ID: "S1023", FilterGenerated: true, Fn: c.LintRedundantBreak, Doc: docS1023}, + {ID: "S1024", FilterGenerated: true, Fn: c.LintTimeUntil, Doc: docS1024}, + {ID: "S1025", FilterGenerated: true, Fn: c.LintRedundantSprintf, Doc: docS1025}, + {ID: "S1028", FilterGenerated: true, Fn: c.LintErrorsNewSprintf, Doc: docS1028}, + {ID: "S1029", FilterGenerated: false, Fn: c.LintRangeStringRunes, Doc: docS1029}, + {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions, Doc: docS1030}, + {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange, Doc: docS1031}, + {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers, Doc: docS1032}, + {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete, Doc: ``}, + {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch, Doc: ``}, } } @@ -77,129 +78,119 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { } seen := map[ast.Node]struct{}{} - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { switch v := node.(type) { case *ast.ForStmt: if len(v.Body.List) != 1 { - return true + return } if !isSingleSelect(v.Body.List[0]) { - return true + return } if _, ok := v.Body.List[0].(*ast.SelectStmt).Body.List[0].(*ast.CommClause).Comm.(*ast.SendStmt); ok { // Don't suggest using range for channel sends - return true + return } seen[v.Body.List[0]] = struct{}{} j.Errorf(node, "should use for range instead of for { select {} }") case *ast.SelectStmt: if _, ok := seen[v]; ok { - return true + return } if !isSingleSelect(v) { - return true + return } j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") - return true } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) } func (c *Checker) LintLoopCopy(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + loop := node.(*ast.RangeStmt) if loop.Key == nil { - return true + return } if len(loop.Body.List) != 1 { - return true + return } stmt, ok := loop.Body.List[0].(*ast.AssignStmt) if !ok { - return true + return } if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { - return true + return } lhs, ok := stmt.Lhs[0].(*ast.IndexExpr) if !ok { - return true + return } - if _, ok := TypeOf(j, lhs.X).(*types.Slice); !ok { - return true + if _, ok := j.Pkg.TypesInfo.TypeOf(lhs.X).(*types.Slice); !ok { + return } lidx, ok := lhs.Index.(*ast.Ident) if !ok { - return true + return } key, ok := loop.Key.(*ast.Ident) if !ok { - return true + return } - if TypeOf(j, lhs) == nil || TypeOf(j, stmt.Rhs[0]) == nil { - return true + if j.Pkg.TypesInfo.TypeOf(lhs) == nil || j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0]) == nil { + return } - if ObjectOf(j, lidx) != ObjectOf(j, key) { - return true + if j.Pkg.TypesInfo.ObjectOf(lidx) != j.Pkg.TypesInfo.ObjectOf(key) { + return } - if !types.Identical(TypeOf(j, lhs), TypeOf(j, stmt.Rhs[0])) { - return true + if !types.Identical(j.Pkg.TypesInfo.TypeOf(lhs), j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0])) { + return } - if _, ok := TypeOf(j, loop.X).(*types.Slice); !ok { - return true + if _, ok := j.Pkg.TypesInfo.TypeOf(loop.X).(*types.Slice); !ok { + return } if rhs, ok := stmt.Rhs[0].(*ast.IndexExpr); ok { rx, ok := rhs.X.(*ast.Ident) _ = rx if !ok { - return true + return } ridx, ok := rhs.Index.(*ast.Ident) if !ok { - return true + return } - if ObjectOf(j, ridx) != ObjectOf(j, key) { - return true + if j.Pkg.TypesInfo.ObjectOf(ridx) != j.Pkg.TypesInfo.ObjectOf(key) { + return } } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { value, ok := loop.Value.(*ast.Ident) if !ok { - return true + return } - if ObjectOf(j, rhs) != ObjectOf(j, value) { - return true + if j.Pkg.TypesInfo.ObjectOf(rhs) != j.Pkg.TypesInfo.ObjectOf(value) { + return } } else { - return true + return } j.Errorf(loop, "should use copy() instead of a loop") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintIfBoolCmp(j *lint.Job) { - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok || (expr.Op != token.EQL && expr.Op != token.NEQ) { - return true + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) + if expr.Op != token.EQL && expr.Op != token.NEQ { + return } x := IsBoolConst(j, expr.X) y := IsBoolConst(j, expr.Y) if !x && !y { - return true + return } var other ast.Expr var val bool @@ -210,9 +201,9 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { val = BoolConst(j, expr.Y) other = expr.X } - basic, ok := TypeOf(j, other).Underlying().(*types.Basic) + basic, ok := j.Pkg.TypesInfo.TypeOf(other).Underlying().(*types.Basic) if !ok || basic.Kind() != types.Bool { - return true + return } op := "" if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) { @@ -225,44 +216,38 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { r = "!" + r } if IsInTest(j, node) { - return true + return } j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesBufferConversions(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok || len(call.Args) != 1 { - return true + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) + if len(call.Args) != 1 { + return } argCall, ok := call.Args[0].(*ast.CallExpr) if !ok { - return true + return } sel, ok := argCall.Fun.(*ast.SelectorExpr) if !ok { - return true + return } - typ := TypeOf(j, call.Fun) + typ := j.Pkg.TypesInfo.TypeOf(call.Fun) if typ == types.Universe.Lookup("string").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).Bytes") { j.Errorf(call, "should use %v.String() instead of %v", Render(j, sel.X), Render(j, call)) } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).String") { j.Errorf(call, "should use %v.Bytes() instead of %v", Render(j, sel.X), Render(j, call)) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintStringsContains(j *lint.Job) { @@ -271,46 +256,43 @@ func (c *Checker) LintStringsContains(j *lint.Job) { -1: {token.GTR: true, token.NEQ: true, token.EQL: false}, 0: {token.GEQ: true, token.LSS: false}, } - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) switch expr.Op { case token.GEQ, token.GTR, token.NEQ, token.LSS, token.EQL: default: - return true + return } value, ok := ExprToInt(j, expr.Y) if !ok { - return true + return } allowedOps, ok := allowed[value] if !ok { - return true + return } b, ok := allowedOps[expr.Op] if !ok { - return true + return } call, ok := expr.X.(*ast.CallExpr) if !ok { - return true + return } sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } pkgIdent, ok := sel.X.(*ast.Ident) if !ok { - return true + return } funIdent := sel.Sel if pkgIdent.Name != "strings" && pkgIdent.Name != "bytes" { - return true + return } newFunc := "" switch funIdent.Name { @@ -321,7 +303,7 @@ func (c *Checker) LintStringsContains(j *lint.Job) { case "Index": newFunc = "Contains" default: - return true + return } prefix := "" @@ -329,33 +311,26 @@ func (c *Checker) LintStringsContains(j *lint.Job) { prefix = "!" } j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(j, call.Args)) - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesCompare(j *lint.Job) { - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) if expr.Op != token.NEQ && expr.Op != token.EQL { - return true + return } call, ok := expr.X.(*ast.CallExpr) if !ok { - return true + return } if !IsCallToAST(j, call, "bytes.Compare") { - return true + return } value, ok := ExprToInt(j, expr.Y) if !ok || value != 0 { - return true + return } args := RenderArgs(j, call.Args) prefix := "" @@ -363,70 +338,58 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { prefix = "!" } j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintForTrue(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + loop := node.(*ast.ForStmt) if loop.Init != nil || loop.Post != nil { - return true + return } if !IsBoolConst(j, loop.Cond) || !BoolConst(j, loop.Cond) { - return true + return } j.Errorf(loop, "should use for {} instead of for true {}") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintRegexpRaw(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "regexp.MustCompile") && !IsCallToAST(j, call, "regexp.Compile") { - return true + return } sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } if len(call.Args) != 1 { // invalid function call - return true + return } lit, ok := call.Args[Arg("regexp.Compile.expr")].(*ast.BasicLit) if !ok { // TODO(dominikh): support string concat, maybe support constants - return true + return } if lit.Kind != token.STRING { // invalid function call - return true + return } if lit.Value[0] != '"' { // already a raw string - return true + return } val := lit.Value if !strings.Contains(val, `\\`) { - return true + return } if strings.Contains(val, "`") { - return true + return } bs := false @@ -441,34 +404,28 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { } if bs { // backslash followed by non-backslash -> escape sequence - return true + return } } j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintIfReturn(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) l := len(block.List) if l < 2 { - return true + return } n1, n2 := block.List[l-2], block.List[l-1] if len(block.List) >= 3 { if _, ok := block.List[l-3].(*ast.IfStmt); ok { // Do not flag a series of if statements - return true + return } } // if statement with no init, no else, a single condition @@ -476,48 +433,45 @@ func (c *Checker) LintIfReturn(j *lint.Job) { // statement in the body, that returns a boolean constant ifs, ok := n1.(*ast.IfStmt) if !ok { - return true + return } if ifs.Else != nil || ifs.Init != nil { - return true + return } if len(ifs.Body.List) != 1 { - return true + return } if op, ok := ifs.Cond.(*ast.BinaryExpr); ok { switch op.Op { case token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: default: - return true + return } } ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt) if !ok { - return true + return } if len(ret1.Results) != 1 { - return true + return } if !IsBoolConst(j, ret1.Results[0]) { - return true + return } ret2, ok := n2.(*ast.ReturnStmt) if !ok { - return true + return } if len(ret2.Results) != 1 { - return true + return } if !IsBoolConst(j, ret2.Results[0]) { - return true + return } j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } // LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: @@ -538,103 +492,100 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok { return false, false } - c, ok := ObjectOf(j, id).(*types.Const) + c, ok := j.Pkg.TypesInfo.ObjectOf(id).(*types.Const) if !ok { return false, false } return true, c.Val().Kind() == constant.Int && c.Val().String() == "0" } - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { // check that expr is "x || y" or "x && y" - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + expr := node.(*ast.BinaryExpr) if expr.Op != token.LOR && expr.Op != token.LAND { - return true + return } eqNil := expr.Op == token.LOR // check that x is "xx == nil" or "xx != nil" x, ok := expr.X.(*ast.BinaryExpr) if !ok { - return true + return } if eqNil && x.Op != token.EQL { - return true + return } if !eqNil && x.Op != token.NEQ { - return true + return } xx, ok := x.X.(*ast.Ident) if !ok { - return true + return } if !IsNil(j, x.Y) { - return true + return } // check that y is "len(xx) == 0" or "len(xx) ... " y, ok := expr.Y.(*ast.BinaryExpr) if !ok { - return true + return } if eqNil && y.Op != token.EQL { // must be len(xx) *==* 0 - return false + return } yx, ok := y.X.(*ast.CallExpr) if !ok { - return true + return } yxFun, ok := yx.Fun.(*ast.Ident) if !ok || yxFun.Name != "len" || len(yx.Args) != 1 { - return true + return } yxArg, ok := yx.Args[Arg("len.v")].(*ast.Ident) if !ok { - return true + return } if yxArg.Name != xx.Name { - return true + return } if eqNil && !IsZero(y.Y) { // must be len(x) == *0* - return true + return } if !eqNil { isConst, isZero := isConstZero(y.Y) if !isConst { - return true + return } switch y.Op { case token.EQL: // avoid false positive for "xx != nil && len(xx) == 0" if isZero { - return true + return } case token.GEQ: // avoid false positive for "xx != nil && len(xx) >= 0" if isZero { - return true + return } case token.NEQ: // avoid false positive for "xx != nil && len(xx) != " if !isZero { - return true + return } case token.GTR: // ok default: - return true + return } } // finally check that xx type is one of array, slice, map or chan // this is to prevent false positive in case if xx is a pointer to an array var nilType string - switch TypeOf(j, xx).(type) { + switch j.Pkg.TypesInfo.TypeOf(xx).(type) { case *types.Slice: nilType = "nil slices" case *types.Map: @@ -642,50 +593,41 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { case *types.Chan: nilType = "nil channels" default: - return true + return } j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintSlicing(j *lint.Job) { - fn := func(node ast.Node) bool { - n, ok := node.(*ast.SliceExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + n := node.(*ast.SliceExpr) if n.Max != nil { - return true + return } s, ok := n.X.(*ast.Ident) if !ok || s.Obj == nil { - return true + return } call, ok := n.High.(*ast.CallExpr) if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() { - return true + return } fun, ok := call.Fun.(*ast.Ident) if !ok || fun.Name != "len" { - return true + return } - if _, ok := ObjectOf(j, fun).(*types.Builtin); !ok { - return true + if _, ok := j.Pkg.TypesInfo.ObjectOf(fun).(*types.Builtin); !ok { + return } arg, ok := call.Args[Arg("len.v")].(*ast.Ident) if !ok || arg.Obj != s.Obj { - return true + return } j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) } func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { @@ -695,7 +637,7 @@ func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { if !ok { return true } - if ObjectOf(j, ident) == ObjectOf(j, ident2) { + if j.Pkg.TypesInfo.ObjectOf(ident) == j.Pkg.TypesInfo.ObjectOf(ident2) { found = true return false } @@ -706,132 +648,111 @@ func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { } func (c *Checker) LintLoopAppend(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + loop := node.(*ast.RangeStmt) if !IsBlank(loop.Key) { - return true + return } val, ok := loop.Value.(*ast.Ident) if !ok { - return true + return } if len(loop.Body.List) != 1 { - return true + return } stmt, ok := loop.Body.List[0].(*ast.AssignStmt) if !ok { - return true + return } if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { - return true + return } if refersTo(j, stmt.Lhs[0], val) { - return true + return } call, ok := stmt.Rhs[0].(*ast.CallExpr) if !ok { - return true + return } if len(call.Args) != 2 || call.Ellipsis.IsValid() { - return true + return } fun, ok := call.Fun.(*ast.Ident) if !ok { - return true + return } - obj := ObjectOf(j, fun) + obj := j.Pkg.TypesInfo.ObjectOf(fun) fn, ok := obj.(*types.Builtin) if !ok || fn.Name() != "append" { - return true + return } - src := TypeOf(j, loop.X) - dst := TypeOf(j, call.Args[Arg("append.slice")]) + src := j.Pkg.TypesInfo.TypeOf(loop.X) + dst := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("append.slice")]) // TODO(dominikh) remove nil check once Go issue #15173 has // been fixed if src == nil { - return true + return } if !types.Identical(src, dst) { - return true + return } if Render(j, stmt.Lhs[0]) != Render(j, call.Args[Arg("append.slice")]) { - return true + return } el, ok := call.Args[Arg("append.elems")].(*ast.Ident) if !ok { - return true + return } - if ObjectOf(j, val) != ObjectOf(j, el) { - return true + if j.Pkg.TypesInfo.ObjectOf(val) != j.Pkg.TypesInfo.ObjectOf(el) { + return } j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", Render(j, stmt.Lhs[0]), Render(j, call.Args[Arg("append.slice")]), Render(j, loop.X)) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintTimeSince(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } if !IsCallToAST(j, sel.X, "time.Now") { - return true + return } if sel.Sel.Name != "Sub" { - return true + return } j.Errorf(call, "should use time.Since instead of time.Now().Sub") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintTimeUntil(j *lint.Job) { if !IsGoVersion(j, 8) { return } - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "(time.Time).Sub") { - return true + return } if !IsCallToAST(j, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { - return true + return } j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { fn1 := func(node ast.Node) { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return - } + assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { return } @@ -842,7 +763,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { case *ast.IndexExpr: // The type-checker should make sure that it's a map, but // let's be safe. - if _, ok := TypeOf(j, rhs.X).Underlying().(*types.Map); !ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(rhs.X).Underlying().(*types.Map); !ok { return } case *ast.UnaryExpr: @@ -858,10 +779,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } fn2 := func(node ast.Node) { - stmt, ok := node.(*ast.AssignStmt) - if !ok { - return - } + stmt := node.(*ast.AssignStmt) if len(stmt.Lhs) != len(stmt.Rhs) { return } @@ -882,10 +800,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } fn3 := func(node ast.Node) { - rs, ok := node.(*ast.RangeStmt) - if !ok { - return - } + rs := node.(*ast.RangeStmt) // for x, _ if !IsBlank(rs.Key) && IsBlank(rs.Value) { @@ -897,45 +812,39 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } } - fn := func(node ast.Node) bool { - fn1(node) - fn2(node) - if IsGoVersion(j, 4) { - fn3(node) - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) + if IsGoVersion(j, 4) { + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) } } func (c *Checker) LintSimplerStructConversion(j *lint.Job) { var skip ast.Node - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { // Do not suggest type conversion between pointers if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND { if lit, ok := unary.X.(*ast.CompositeLit); ok { skip = lit } - return true + return } if node == skip { - return true + return } lit, ok := node.(*ast.CompositeLit) if !ok { - return true + return } - typ1, _ := TypeOf(j, lit.Type).(*types.Named) + typ1, _ := j.Pkg.TypesInfo.TypeOf(lit.Type).(*types.Named) if typ1 == nil { - return true + return } s1, ok := typ1.Underlying().(*types.Struct) if !ok { - return true + return } var typ2 *types.Named @@ -949,14 +858,14 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return nil, nil, false } - typ := TypeOf(j, sel.X) + typ := j.Pkg.TypesInfo.TypeOf(sel.X) return typ, ident, typ != nil } if len(lit.Elts) == 0 { - return true + return } if s1.NumFields() != len(lit.Elts) { - return true + return } for i, elt := range lit.Elts { var t types.Type @@ -966,39 +875,39 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { case *ast.SelectorExpr: t, id, ok = getSelType(elt) if !ok { - return true + return } if i >= s1.NumFields() || s1.Field(i).Name() != elt.Sel.Name { - return true + return } case *ast.KeyValueExpr: var sel *ast.SelectorExpr sel, ok = elt.Value.(*ast.SelectorExpr) if !ok { - return true + return } if elt.Key.(*ast.Ident).Name != sel.Sel.Name { - return true + return } t, id, ok = getSelType(elt.Value) } if !ok { - return true + return } // All fields must be initialized from the same object if ident != nil && ident.Obj != id.Obj { - return true + return } typ2, _ = t.(*types.Named) if typ2 == nil { - return true + return } ident = id } if typ2 == nil { - return true + return } if typ1.Obj().Pkg() != typ2.Obj().Pkg() { @@ -1007,32 +916,29 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { // by coincidence. Furthermore, if the dependency ever // adds more fields to its type, it could break the code // that relies on the type conversion to work. - return true + return } s2, ok := typ2.Underlying().(*types.Struct) if !ok { - return true + return } if typ1 == typ2 { - return true + return } if IsGoVersion(j, 8) { if !types.IdenticalIgnoreTags(s1, s2) { - return true + return } } else { if !types.Identical(s1, s2) { - return true + return } } j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) } func (c *Checker) LintTrim(j *lint.Job) { @@ -1066,26 +972,23 @@ func (c *Checker) LintTrim(j *lint.Job) { return sameNonDynamic(call.Args[Arg("len.v")], ident) } - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { var pkg string var fun string - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + ifstmt := node.(*ast.IfStmt) if ifstmt.Init != nil { - return true + return } if ifstmt.Else != nil { - return true + return } if len(ifstmt.Body.List) != 1 { - return true + return } condCall, ok := ifstmt.Cond.(*ast.CallExpr) if !ok { - return true + return } switch { case IsCallToAST(j, condCall, "strings.HasPrefix"): @@ -1107,27 +1010,27 @@ func (c *Checker) LintTrim(j *lint.Job) { pkg = "bytes" fun = "Contains" default: - return true + return } assign, ok := ifstmt.Body.List[0].(*ast.AssignStmt) if !ok { - return true + return } if assign.Tok != token.ASSIGN { - return true + return } if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return true + return } if !sameNonDynamic(condCall.Args[0], assign.Lhs[0]) { - return true + return } switch rhs := assign.Rhs[0].(type) { case *ast.CallExpr: if len(rhs.Args) < 2 || !sameNonDynamic(condCall.Args[0], rhs.Args[0]) || !sameNonDynamic(condCall.Args[1], rhs.Args[1]) { - return true + return } if IsCallToAST(j, condCall, "strings.HasPrefix") && IsCallToAST(j, rhs, "strings.TrimPrefix") || IsCallToAST(j, condCall, "strings.HasSuffix") && IsCallToAST(j, rhs, "strings.TrimSuffix") || @@ -1137,17 +1040,17 @@ func (c *Checker) LintTrim(j *lint.Job) { IsCallToAST(j, condCall, "bytes.Contains") && IsCallToAST(j, rhs, "bytes.Replace") { j.Errorf(ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(j, rhs)) } - return true + return case *ast.SliceExpr: slice := rhs if !ok { - return true + return } if slice.Slice3 { - return true + return } if !sameNonDynamic(slice.X, condCall.Args[0]) { - return true + return } var index ast.Expr switch fun { @@ -1155,14 +1058,14 @@ func (c *Checker) LintTrim(j *lint.Job) { // TODO(dh) We could detect a High that is len(s), but another // rule will already flag that, anyway. if slice.High != nil { - return true + return } index = slice.Low case "HasSuffix": if slice.Low != nil { n, ok := ExprToInt(j, slice.Low) if !ok || n != 0 { - return true + return } } index = slice.High @@ -1171,59 +1074,59 @@ func (c *Checker) LintTrim(j *lint.Job) { switch index := index.(type) { case *ast.CallExpr: if fun != "HasPrefix" { - return true + return } if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { - return true + return } if len(index.Args) != 1 { - return true + return } id3 := index.Args[Arg("len.v")] switch oid3 := condCall.Args[1].(type) { case *ast.BasicLit: if pkg != "strings" { - return false + return } lit, ok := id3.(*ast.BasicLit) if !ok { - return true + return } s1, ok1 := ExprToString(j, lit) s2, ok2 := ExprToString(j, condCall.Args[1]) if !ok1 || !ok2 || s1 != s2 { - return true + return } default: if !sameNonDynamic(id3, oid3) { - return true + return } } case *ast.BasicLit, *ast.Ident: if fun != "HasPrefix" { - return true + return } if pkg != "strings" { - return true + return } string, ok1 := ExprToString(j, condCall.Args[1]) int, ok2 := ExprToInt(j, slice.Low) if !ok1 || !ok2 || int != int64(len(string)) { - return true + return } case *ast.BinaryExpr: if fun != "HasSuffix" { - return true + return } if index.Op != token.SUB { - return true + return } if !isLenOnIdent(index.X, condCall.Args[0]) || !isLenOnIdent(index.Y, condCall.Args[1]) { - return true + return } default: - return true + return } var replacement string @@ -1234,14 +1137,9 @@ func (c *Checker) LintTrim(j *lint.Job) { replacement = "TrimSuffix" } j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) - return true - default: - return true } } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintLoopSlide(j *lint.Job) { @@ -1250,7 +1148,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { // TODO(dh): detect length that is an expression, not a variable name // TODO(dh): support sliding to a different offset than the beginning of the slice - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { /* for i := 0; i < n; i++ { bs[i] = bs[offset+i] @@ -1261,108 +1159,102 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { copy(bs[:n], bs[offset:offset+n]) */ - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil { - return true + loop := node.(*ast.ForStmt) + if len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil { + return } assign, ok := loop.Init.(*ast.AssignStmt) if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !IsZero(assign.Rhs[0]) { - return true + return } initvar, ok := assign.Lhs[0].(*ast.Ident) if !ok { - return true + return } post, ok := loop.Post.(*ast.IncDecStmt) if !ok || post.Tok != token.INC { - return true + return } postvar, ok := post.X.(*ast.Ident) - if !ok || ObjectOf(j, postvar) != ObjectOf(j, initvar) { - return true + if !ok || j.Pkg.TypesInfo.ObjectOf(postvar) != j.Pkg.TypesInfo.ObjectOf(initvar) { + return } bin, ok := loop.Cond.(*ast.BinaryExpr) if !ok || bin.Op != token.LSS { - return true + return } binx, ok := bin.X.(*ast.Ident) - if !ok || ObjectOf(j, binx) != ObjectOf(j, initvar) { - return true + if !ok || j.Pkg.TypesInfo.ObjectOf(binx) != j.Pkg.TypesInfo.ObjectOf(initvar) { + return } biny, ok := bin.Y.(*ast.Ident) if !ok { - return true + return } assign, ok = loop.Body.List[0].(*ast.AssignStmt) if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || assign.Tok != token.ASSIGN { - return true + return } lhs, ok := assign.Lhs[0].(*ast.IndexExpr) if !ok { - return true + return } rhs, ok := assign.Rhs[0].(*ast.IndexExpr) if !ok { - return true + return } bs1, ok := lhs.X.(*ast.Ident) if !ok { - return true + return } bs2, ok := rhs.X.(*ast.Ident) if !ok { - return true + return } - obj1 := ObjectOf(j, bs1) - obj2 := ObjectOf(j, bs2) + obj1 := j.Pkg.TypesInfo.ObjectOf(bs1) + obj2 := j.Pkg.TypesInfo.ObjectOf(bs2) if obj1 != obj2 { - return true + return } if _, ok := obj1.Type().Underlying().(*types.Slice); !ok { - return true + return } index1, ok := lhs.Index.(*ast.Ident) - if !ok || ObjectOf(j, index1) != ObjectOf(j, initvar) { - return true + if !ok || j.Pkg.TypesInfo.ObjectOf(index1) != j.Pkg.TypesInfo.ObjectOf(initvar) { + return } index2, ok := rhs.Index.(*ast.BinaryExpr) if !ok || index2.Op != token.ADD { - return true + return } add1, ok := index2.X.(*ast.Ident) if !ok { - return true + return } add2, ok := index2.Y.(*ast.Ident) - if !ok || ObjectOf(j, add2) != ObjectOf(j, initvar) { - return true + if !ok || j.Pkg.TypesInfo.ObjectOf(add2) != j.Pkg.TypesInfo.ObjectOf(initvar) { + return } j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(j, bs1), Render(j, biny), Render(j, bs1), Render(j, add1)) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintMakeLenCap(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" { // FIXME check whether make is indeed the built-in function - return true + return } switch len(call.Args) { case 2: // make(T, len) - if _, ok := TypeOf(j, call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { break } if IsZero(call.Args[Arg("make.size[0]")]) { @@ -1376,11 +1268,8 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { Render(j, call.Args[Arg("make.t")]), Render(j, call.Args[Arg("make.size[0]")])) } } - return false - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintAssertNotNil(j *lint.Job) { @@ -1405,116 +1294,121 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { } return true } - fn1 := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + fn1 := func(node ast.Node) { + ifstmt := node.(*ast.IfStmt) assign, ok := ifstmt.Init.(*ast.AssignStmt) if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) { - return true + return } assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) if !ok { - return true + return } binop, ok := ifstmt.Cond.(*ast.BinaryExpr) if !ok || binop.Op != token.LAND { - return true + return } assertIdent, ok := assert.X.(*ast.Ident) if !ok { - return true + return } assignIdent, ok := assign.Lhs[1].(*ast.Ident) if !ok { - return true + return } if !(isNilCheck(assertIdent, binop.X) && isOKCheck(assignIdent, binop.Y)) && !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { - return true + return } j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) - return true } - fn2 := func(node ast.Node) bool { + fn2 := func(node ast.Node) { // Check that outer ifstmt is an 'if x != nil {}' - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + ifstmt := node.(*ast.IfStmt) if ifstmt.Init != nil { - return true + return } if ifstmt.Else != nil { - return true + return } if len(ifstmt.Body.List) != 1 { - return true + return } binop, ok := ifstmt.Cond.(*ast.BinaryExpr) if !ok { - return true + return } if binop.Op != token.NEQ { - return true + return } lhs, ok := binop.X.(*ast.Ident) if !ok { - return true + return } if !IsNil(j, binop.Y) { - return true + return } // Check that inner ifstmt is an `if _, ok := x.(T); ok {}` ifstmt, ok = ifstmt.Body.List[0].(*ast.IfStmt) if !ok { - return true + return } assign, ok := ifstmt.Init.(*ast.AssignStmt) if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) { - return true + return } assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) if !ok { - return true + return } assertIdent, ok := assert.X.(*ast.Ident) if !ok { - return true + return } if lhs.Obj != assertIdent.Obj { - return true + return } assignIdent, ok := assign.Lhs[1].(*ast.Ident) if !ok { - return true + return } if !isOKCheck(assignIdent, ifstmt.Cond) { - return true + return } j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) - return true - } - fn := func(node ast.Node) bool { - b1 := fn1(node) - b2 := fn2(node) - return b1 || b2 - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) } func (c *Checker) LintDeclareAssign(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { + hasMultipleAssignments := func(root ast.Node, ident *ast.Ident) bool { + num := 0 + ast.Inspect(root, func(node ast.Node) bool { + if num >= 2 { + return false + } + assign, ok := node.(*ast.AssignStmt) + if !ok { + return true + } + for _, lhs := range assign.Lhs { + if oident, ok := lhs.(*ast.Ident); ok { + if oident.Obj == ident.Obj { + num++ + } + } + } + return true - } + }) + return num >= 2 + } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) if len(block.List) < 2 { - return true + return } for i, stmt := range block.List[:len(block.List)-1] { _ = i @@ -1549,21 +1443,19 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { if refersTo(j, assign.Rhs[0], ident) { continue } + if hasMultipleAssignments(block, ident) { + continue + } + j.Errorf(decl, "should merge variable declaration with assignment on next line") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } func (c *Checker) LintRedundantBreak(j *lint.Job) { fn1 := func(node ast.Node) { - clause, ok := node.(*ast.CaseClause) - if !ok { - return - } + clause := node.(*ast.CaseClause) if len(clause.Body) < 2 { return } @@ -1584,7 +1476,7 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { ret = x.Type.Results body = x.Body default: - return + panic(fmt.Sprintf("unreachable: %T", node)) } // if the func has results, a return can't be redundant. // similarly, if there are no statements, there can be @@ -1600,66 +1492,52 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { // checked x.Type.Results to be nil. j.Errorf(rst, "redundant return statement") } - fn := func(node ast.Node) bool { - fn1(node) - fn2(node) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) } -func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { - // OPT(dh): we can cache the type lookup - idx := strings.IndexRune(iface, '.') - var scope *types.Scope - var ifaceName string - if idx == -1 { - scope = types.Universe - ifaceName = iface - } else { - pkgName := iface[:idx] - pkg := j.Program.Package(pkgName) - if pkg == nil { - return false - } - scope = pkg.Types.Scope() - ifaceName = iface[idx+1:] - } - - obj := scope.Lookup(ifaceName) - if obj == nil { +func isStringer(T types.Type) bool { + ms := types.NewMethodSet(T) + sel := ms.Lookup(nil, "String") + if sel == nil { return false } - i, ok := obj.Type().Underlying().(*types.Interface) + fn, ok := sel.Obj().(*types.Func) if !ok { + // should be unreachable return false } - return types.Implements(typ, i) + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 0 { + return false + } + if sig.Results().Len() != 1 { + return false + } + if !IsType(sig.Results().At(0).Type(), "string") { + return false + } + return true } func (c *Checker) LintRedundantSprintf(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "fmt.Sprintf") { - return true + return } if len(call.Args) != 2 { - return true + return } if s, ok := ExprToString(j, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" { - return true + return } arg := call.Args[Arg("fmt.Sprintf.a[0]")] - typ := TypeOf(j, arg) + typ := j.Pkg.TypesInfo.TypeOf(arg) - if c.Implements(j, typ, "fmt.Stringer") { + if isStringer(typ) { j.Errorf(call, "should use String() instead of fmt.Sprintf") - return true + return } if typ.Underlying() == types.Universe.Lookup("string").Type() { @@ -1669,28 +1547,22 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { j.Errorf(call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { if !IsCallToAST(j, node, "errors.New") { - return true + return } call := node.(*ast.CallExpr) if !IsCallToAST(j, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { - return true + return } j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintRangeStringRunes(j *lint.Job) { @@ -1698,45 +1570,38 @@ func (c *Checker) LintRangeStringRunes(j *lint.Job) { } func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } - + fn := func(node ast.Node) { + ifstmt := node.(*ast.IfStmt) cond, ok := ifstmt.Cond.(*ast.BinaryExpr) if !ok { - return true + return } if cond.Op != token.NEQ || !IsNil(j, cond.Y) || len(ifstmt.Body.List) != 1 { - return true + return } loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt) if !ok { - return true + return } ifXIdent, ok := cond.X.(*ast.Ident) if !ok { - return true + return } rangeXIdent, ok := loop.X.(*ast.Ident) if !ok { - return true + return } if ifXIdent.Obj != rangeXIdent.Obj { - return true + return } - switch TypeOf(j, rangeXIdent).(type) { + switch j.Pkg.TypesInfo.TypeOf(rangeXIdent).(type) { case *types.Slice, *types.Map: j.Errorf(node, "unnecessary nil check around range") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func isPermissibleSort(j *lint.Job, node ast.Node) bool { @@ -1761,7 +1626,7 @@ func isPermissibleSort(j *lint.Job, node ast.Node) bool { } func (c *Checker) LintSortHelpers(j *lint.Job) { - fnFuncs := func(node ast.Node) bool { + fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { case *ast.FuncLit: @@ -1769,10 +1634,10 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { case *ast.FuncDecl: body = node.Body default: - return true + panic(fmt.Sprintf("unreachable: %T", node)) } if body == nil { - return true + return } type Error struct { @@ -1810,17 +1675,14 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { ast.Inspect(body, fnSorts) if permissible { - return false + return } for _, err := range errors { j.Errorf(err.node, "%s", err.msg) } - return false - } - - for _, f := range j.Program.Files { - ast.Inspect(f, fnFuncs) + return } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) } func (c *Checker) LintGuardedDelete(j *lint.Job) { @@ -1845,73 +1707,64 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if !ok { return nil, nil, nil, false } - if _, ok := TypeOf(j, index.X).(*types.Map); !ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(index.X).(*types.Map); !ok { return nil, nil, nil, false } key = index.Index return ident, index.X, key, true } - fn := func(node ast.Node) bool { - stmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + stmt := node.(*ast.IfStmt) if len(stmt.Body.List) != 1 { - return true + return } if stmt.Else != nil { - return true + return } expr, ok := stmt.Body.List[0].(*ast.ExprStmt) if !ok { - return true + return } call, ok := expr.X.(*ast.CallExpr) if !ok { - return true + return } if !IsCallToAST(j, call, "delete") { - return true + return } b, m, key, ok := isCommaOkMapIndex(stmt.Init) if !ok { - return true + return } - if cond, ok := stmt.Cond.(*ast.Ident); !ok || ObjectOf(j, cond) != ObjectOf(j, b) { - return true + if cond, ok := stmt.Cond.(*ast.Ident); !ok || j.Pkg.TypesInfo.ObjectOf(cond) != j.Pkg.TypesInfo.ObjectOf(b) { + return } if Render(j, call.Args[0]) != Render(j, m) || Render(j, call.Args[1]) != Render(j, key) { - return true + return } j.Errorf(stmt, "unnecessary guard around call to delete") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { - fn := func(node ast.Node) bool { - stmt, ok := node.(*ast.TypeSwitchStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + stmt := node.(*ast.TypeSwitchStmt) if stmt.Init != nil { // bailing out for now, can't anticipate how type switches with initializers are being used - return true + return } expr, ok := stmt.Assign.(*ast.ExprStmt) if !ok { // the user is in fact assigning the result - return true + return } assert := expr.X.(*ast.TypeAssertExpr) ident, ok := assert.X.(*ast.Ident) if !ok { - return true + return } - x := ObjectOf(j, ident) + x := j.Pkg.TypesInfo.ObjectOf(ident) var allOffenders []ast.Node for _, clause := range stmt.Body.List { clause := clause.(*ast.CaseClause) @@ -1930,12 +1783,12 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { hasUnrelatedAssertion = true return false } - if ObjectOf(j, ident) != x { + if j.Pkg.TypesInfo.ObjectOf(ident) != x { hasUnrelatedAssertion = true return false } - if !types.Identical(TypeOf(j, clause.List[0]), TypeOf(j, assert2.Type)) { + if !types.Identical(j.Pkg.TypesInfo.TypeOf(clause.List[0]), j.Pkg.TypesInfo.TypeOf(assert2.Type)) { hasUnrelatedAssertion = true return false } @@ -1954,14 +1807,11 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { if len(allOffenders) != 0 { at := "" for _, offender := range allOffenders { - pos := j.Program.DisplayPosition(offender.Pos()) + pos := lint.DisplayPosition(j.Pkg.Fset, offender.Pos()) at += "\n\t" + pos.String() } j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) } diff --git a/vendor/honnef.co/go/tools/ssautil/ssautil.go b/vendor/honnef.co/go/tools/ssautil/ssautil.go index a18f849e..72c3c919 100644 --- a/vendor/honnef.co/go/tools/ssautil/ssautil.go +++ b/vendor/honnef.co/go/tools/ssautil/ssautil.go @@ -39,3 +39,20 @@ func Walk(b *ssa.BasicBlock, fn func(*ssa.BasicBlock) bool) { wl = append(wl, b.Succs...) } } + +func Vararg(x *ssa.Slice) ([]ssa.Value, bool) { + var out []ssa.Value + slice, ok := x.X.(*ssa.Alloc) + if !ok || slice.Comment != "varargs" { + return nil, false + } + for _, ref := range *slice.Referrers() { + idx, ok := ref.(*ssa.IndexAddr) + if !ok { + continue + } + v := (*idx.Referrers())[0].(*ssa.Store).Val + out = append(out, v) + } + return out, true +} diff --git a/vendor/honnef.co/go/tools/staticcheck/doc.go b/vendor/honnef.co/go/tools/staticcheck/doc.go new file mode 100644 index 00000000..07a39ef4 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/doc.go @@ -0,0 +1,797 @@ +package staticcheck + +var docSA1000 = `Invalid regular expression + +Available since + 2017.1 +` + +var docSA1001 = `Invalid template + +Available since + 2017.1 +` + +var docSA1002 = `Invalid format in time.Parse + +Available since + 2017.1 +` + +var docSA1003 = `Unsupported argument to functions in encoding/binary + +The encoding/binary package can only serialize types with known sizes. +This precludes the use of the 'int' and 'uint' types, as their sizes +differ on different architectures. Furthermore, it doesn't support +serializing maps, channels, strings, or functions. + +Before Go 1.8, bool wasn't supported, either. + +Available since + 2017.1 +` + +var docSA1004 = `Suspiciously small untyped constant in time.Sleep + +The time.Sleep function takes a time.Duration as its only argument. +Durations are expressed in nanoseconds. Thus, calling time.Sleep(1) +will sleep for 1 nanosecond. This is a common source of bugs, as sleep +functions in other languages often accept seconds or milliseconds. + +The time package provides constants such as time.Second to express +large durations. These can be combined with arithmetic to express +arbitrary durations, for example '5 * time.Second' for 5 seconds. + +If you truly meant to sleep for a tiny amount of time, use +'n * time.Nanosecond" to signal to staticcheck that you did mean to sleep +for some amount of nanoseconds. + +Available since + 2017.1 +` + +var docSA1005 = `Invalid first argument to exec.Command + +os/exec runs programs directly (using variants of the fork and exec +system calls on Unix systems). This shouldn't be confused with running +a command in a shell. The shell will allow for features such as input +redirection, pipes, and general scripting. The shell is also +responsible for splitting the user's input into a program name and its +arguments. For example, the equivalent to + + ls / /tmp + +would be + + exec.Command("ls", "/", "/tmp") + +If you want to run a command in a shell, consider using something like +the following – but be aware that not all systems, particularly +Windows, will have a /bin/sh program: + + exec.Command("/bin/sh", "-c", "ls | grep Awesome") + +Available since + 2017.1 +` + +var docSA1006 = `Printf with dynamic first argument and no further arguments + +Using fmt.Printf with a dynamic first argument can lead to unexpected +output. The first argument is a format string, where certain character +combinations have special meaning. If, for example, a user were to +enter a string such as + + Interest rate: 5% + +and you printed it with + +fmt.Printf(s) + +it would lead to the following output: + + Interest rate: 5%!(NOVERB). + +Similarly, forming the first parameter via string concatenation with +user input should be avoided for the same reason. When printing user +input, either use a variant of fmt.Print, or use the %s Printf verb +and pass the string as an argument. + +Available since + 2017.1 +` + +var docSA1007 = `Invalid URL in net/url.Parse + +Available since + 2017.1 +` + +var docSA1008 = `Non-canonical key in http.Header map + +Available since + 2017.1 +` + +var docSA1010 = `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results + +If n >= 0, the function returns at most n matches/submatches. To +return all results, specify a negative number. + +Available since + 2017.1 +` + +var docSA1011 = `Various methods in the strings package expect valid UTF-8, but invalid input is provided + +Available since + 2017.1 +` + +var docSA1012 = `A nil context.Context is being passed to a function, consider using context.TODO instead + +Available since + 2017.1 +` + +var docSA1013 = `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second + +Available since + 2017.1 +` + +var docSA1014 = `Non-pointer value passed to Unmarshal or Decode + +Available since + 2017.1 +` + +var docSA1015 = `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions + +Available since + 2017.1 +` + +var docSA1016 = `Trapping a signal that cannot be trapped + +Not all signals can be intercepted by a process. Speficially, on +UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are +never passed to the process, but instead handled directly by the +kernel. It is therefore pointless to try and handle these signals. + +Available since + 2017.1 +` + +var docSA1017 = `Channels used with os/signal.Notify should be buffered + +The os/signal package uses non-blocking channel sends when delivering +signals. If the receiving end of the channel isn't ready and the +channel is either unbuffered or full, the signal will be dropped. To +avoid missing signals, the channel should be buffered and of the +appropriate size. For a channel used for notification of just one +signal value, a buffer of size 1 is sufficient. + + +Available since + 2017.1 +` + +var docSA1018 = `strings.Replace called with n == 0, which does nothing + +With n == 0, zero instances will be replaced. To replace all +instances, use a negative number, or use strings.ReplaceAll. + +Available since + 2017.1 +` + +var docSA1019 = `Using a deprecated function, variable, constant or field + +Available since + 2017.1 +` + +var docSA1020 = `Using an invalid host:port pair with a net.Listen-related function + +Available since + 2017.1 +` + +var docSA1021 = `Using bytes.Equal to compare two net.IP + +A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The +length of the slice for an IPv4 address, however, can be either 4 or +16 bytes long, using different ways of representing IPv4 addresses. In +order to correctly compare two net.IPs, the net.IP.Equal method should +be used, as it takes both representations into account. + +Available since + 2017.1 +` + +var docSA1023 = `Modifying the buffer in an io.Writer implementation + +Write must not modify the slice data, even temporarily. + +Available since + 2017.1 +` + +var docSA1024 = `A string cutset contains duplicate characters, suggesting TrimPrefix or TrimSuffix should be used instead of TrimLeft or TrimRight + +Available since + 2017.1 +` + +var docSA1025 = `It is not possible to use Reset's return value correctly + +Available since + 2019.1 +` + +var docSA1026 = `Cannot marshal channels or functions + +Available since + Unreleased +` + +var docSA1027 = `Atomic access to 64-bit variable must be 64-bit aligned + +On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to +arrange for 64-bit alignment of 64-bit words accessed atomically. The +first word in a variable or in an allocated struct, array, or slice +can be relied upon to be 64-bit aligned. + +You can use the structlayout tool to inspect the alignment of fields +in a struct. + +Available since + Unreleased +` + +var docSA2000 = `sync.WaitGroup.Add called inside the goroutine, leading to a race condition + +Available since + 2017.1 +` + +var docSA2001 = `Empty critical section, did you mean to defer the unlock? + +Available since + 2017.1 +` + +var docSA2002 = `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed + +Available since + 2017.1 +` + +var docSA2003 = `Deferred Lock right after locking, likely meant to defer Unlock instead + +Available since + 2017.1 +` + +var docSA3000 = `TestMain doesn't call os.Exit, hiding test failures + +Test executables (and in turn 'go test') exit with a non-zero status +code if any tests failed. When specifying your own TestMain function, +it is your responsibility to arrange for this, by calling os.Exit with +the correct code. The correct code is returned by (*testing.M).Run, so +the usual way of implementing TestMain is to end it with +os.Exit(m.Run()). + +Available since + 2017.1 +` + +var docSA3001 = `Assigning to b.N in benchmarks distorts the results + +The testing package dynamically sets b.N to improve the reliability of +benchmarks and uses it in computations to determine the duration of a +single operation. Benchmark code must not alter b.N as this would +falsify results. + +Available since + 2017.1 +` + +var docSA4000 = `Boolean expression has identical expressions on both sides + +Available since + 2017.1 +` + +var docSA4001 = `&*x gets simplified to x, it does not copy x + +Available since + 2017.1 +` + +var docSA4002 = `Comparing strings with known different sizes has predictable results + +Available since + 2017.1 +` + +var docSA4003 = `Comparing unsigned values against negative values is pointless + +Available since + 2017.1 +` + +var docSA4004 = `The loop exits unconditionally after one iteration + +Available since + 2017.1 +` + +var docSA4005 = `Field assignment that will never be observed. Did you mean to use a pointer receiver? + +Available since + 2017.1 +` + +var docSA4006 = `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code? + +Available since + 2017.1 +` + +var docSA4008 = `The variable in the loop condition never changes, are you incrementing the wrong variable? + +Available since + 2017.1 +` + +var docSA4009 = `A function argument is overwritten before its first use + +Available since + 2017.1 +` + +var docSA4010 = `The result of append will never be observed anywhere + +Available since + 2017.1 +` + +var docSA4011 = `Break statement with no effect. Did you mean to break out of an outer loop? + +Available since + 2017.1 +` + +var docSA4012 = `Comparing a value against NaN even though no value is equal to NaN + +Available since + 2017.1 +` + +var docSA4013 = `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo. + +Available since + 2017.1 +` + +var docSA4014 = `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either + +Available since + 2017.1 +` + +var docSA4015 = `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful + +Available since + 2017.1 +` + +var docSA4016 = `Certain bitwise operations, such as x ^ 0, do not do anything useful + +Available since + 2017.1 +` + +var docSA4017 = `A pure function's return value is discarded, making the call pointless + +Available since + 2017.1 +` + +var docSA4018 = `Self-assignment of variables + +Available since + 2017.1 +` + +var docSA4019 = `Multiple, identical build constraints in the same file + +Available since + 2017.1 +` + +var docSA4020 = `Unreachable case clause in a type switch + +In a type switch like the following + + type T struct{} + func (T) Read(b []byte) (int, error) { return 0, nil } + + var v interface{} = T{} + + switch v.(type) { + case io.Reader: + // ... + case T: + // unreachable + } + +the second case clause can never be reached because T implements +io.Reader and case clauses are evaluated in source order. + +Another example: + + type T struct{} + func (T) Read(b []byte) (int, error) { return 0, nil } + func (T) Close() error { return nil } + + var v interface{} = T{} + + switch v.(type) { + case io.Reader: + // ... + case io.ReadCloser: + // unreachable + } + +Even though T has a Close method and thus implements io.ReadCloser, +io.Reader will always match first. The method set of io.Reader is a +subset of io.ReadCloser. Thus it is impossible to match the second +case without mtching the first case. + + +Structurally equivalent interfaces + +A special case of the previous example are structurally identical +interfaces. Given these declarations + + type T error + type V error + + func doSomething() error { + err, ok := doAnotherThing() + if ok { + return T(err) + } + + return U(err) + } + +the following type switch will have an unreachable case clause: + + switch doSomething().(type) { + case T: + // ... + case V: + // unreachable + } + +T will always match before V because they are structurally equivalent +and therefore doSomething()'s return value implements both. + +Available since + Unreleased +` + +var docSA4021 = `x = append(y) is equivalent to x = y + +Available since + Unreleased +` + +var docSA5000 = `Assignment to nil map + +Available since + 2017.1 +` + +var docSA5001 = `Defering Close before checking for a possible error + +Available since + 2017.1 +` + +var docSA5002 = `The empty for loop (for {}) spins and can block the scheduler + +Available since + 2017.1 +` + +var docSA5003 = `Defers in infinite loops will never execute + +Defers are scoped to the surrounding function, not the surrounding +block. In a function that never returns, i.e. one containing an +infinite loop, defers will never execute. + +Available since + 2017.1 +` + +var docSA5004 = `for { select { ... with an empty default branch spins + +Available since + 2017.1 +` + +var docSA5005 = `The finalizer references the finalized object, preventing garbage collection + +A finalizer is a function associated with an object that runs when the +garbage collector is ready to collect said object, that is when the +object is no longer referenced by anything. + +If the finalizer references the object, however, it will always remain +as the final reference to that object, preventing the garbage +collector from collecting the object. The finalizer will never run, +and the object will never be collected, leading to a memory leak. That +is why the finalizer should instead use its first argument to operate +on the object. That way, the number of references can temporarily go +to zero before the object is being passed to the finalizer. + +Available since + 2017.1 +` + +var docSA5006 = `Slice index out of bounds + +Available since + 2017.1 +` + +var docSA5007 = `Infinite recursive call + +A function that calls itself recursively needs to have an exit +condition. Otherwise it will recurse forever, until the system runs +out of memory. + +This issue can be caused by simple bugs such as forgetting to add an +exit condition. It can also happen "on purpose". Some languages have +tail call optimization which makes certain infinite recursive calls +safe to use. Go, however, does not implement TCO, and as such a loop +should be used instead. + +Available since + 2017.1 +` + +var docSA6000 = `Using regexp.Match or related in a loop, should use regexp.Compile + +Available since + 2017.1 +` + +var docSA6001 = `Missing an optimization opportunity when indexing maps by byte slices + +Map keys must be comparable, which precludes the use of byte slices. +This usually leads to using string keys and converting byte slices to +strings. + +Normally, a conversion of a byte slice to a string needs to copy the data and +causes allocations. The compiler, however, recognizes m[string(b)] and +uses the data of b directly, without copying it, because it knows that +the data can't change during the map lookup. This leads to the +counter-intuitive situation that + + k := string(b) + println(m[k]) + println(m[k]) + +will be less efficient than + + println(m[string(b)]) + println(m[string(b)]) + +because the first version needs to copy and allocate, while the second +one does not. + +For some history on this optimization, check out commit +f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository. + +Available since + 2017.1 +` + +var docSA6002 = `Storing non-pointer values in sync.Pool allocates memory + +A sync.Pool is used to avoid unnecessary allocations and reduce the +amount of work the garbage collector has to do. + +When passing a value that is not a pointer to a function that accepts +an interface, the value needs to be placed on the heap, which means an +additional allocation. Slices are a common thing to put in sync.Pools, +and they're structs with 3 fields (length, capacity, and a pointer to +an array). In order to avoid the extra allocation, one should store a +pointer to the slice instead. + +See the comments on https://go-review.googlesource.com/c/go/+/24371 +that discuss this problem. + +Available since + 2017.1 +` + +var docSA6003 = `Converting a string to a slice of runes before ranging over it + +You may want to loop over the runes in a string. Instead of converting +the string to a slice of runes and looping over that, you can loop +over the string itself. That is, + + for _, r := range s {} + +and + + for _, r := range []rune(s) {} + +will yield the same values. The first version, however, will be faster +and avoid unnecessary memory allocations. + +Do note that if you are interested in the indices, ranging over a +string and over a slice of runes will yield different indices. The +first one yields byte offsets, while the second one yields indices in +the slice of runes. + +Available since + 2017.1 +` + +var docSA6005 = `Inefficient string comparison with strings.ToLower or strings.ToUpper + +Converting two strings to the same case and comparing them like so + + if strings.ToLower(s1) == strings.ToLower(s2) { + ... + } + +is significantly more expensive than comparing them with +strings.EqualFold(s1, s2). This is due to memory usage as well as +computational complexity. + +strings.ToLower will have to allocate memory for the new strings, as +well as convert both strings fully, even if they differ on the very +first byte. strings.EqualFold, on the other hand, compares the strings +one character at a time. It doesn't need to create two intermediate +strings and can return as soon as the first non-matching character has +been found. + +For a more in-depth explanation of this issue, see +https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/ + +Available since + Unreleased +` + +var docSA9001 = `Defers in 'for range' loops may not run when you expect them to + +Available since + 2017.1 +` + +var docSA9002 = `Using a non-octal os.FileMode that looks like it was meant to be in octal. + +Available since + 2017.1 +` + +var docSA9003 = `Empty body in an if or else branch + +Available since + 2017.1 +` + +var docSA9004 = `Only the first constant has an explicit type + +In a constant declaration such as the following: + + const ( + First byte = 1 + Second = 2 + ) + +the constant Second does not have the same type as the constant First. +This construct shouldn't be confused with + + const ( + First byte = iota + Second + ) + +where First and Second do indeed have the same type. The type is only +passed on when no explicit value is assigned to the constant. + +When declaring enumerations with explicit values it is therefore +important not to write + + const ( + EnumFirst EnumType = 1 + EnumSecond = 2 + EnumThird = 3 + ) + +This discrepancy in types can cause various confusing behaviors and +bugs. + + +Wrong type in variable declarations + +The most obvious issue with such incorrect enumerations expresses +itself as a compile error: + +package pkg + + const ( + EnumFirst uint8 = 1 + EnumSecond = 2 + ) + + func fn(useFirst bool) { + x := EnumSecond + if useFirst { + x = EnumFirst + } + } + +fails to compile with + + ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment + + +Losing method sets + +A more subtle issue occurs with types that have methods and optional +interfaces. Consider the following: + + package main + + import "fmt" + + type Enum int + + func (e Enum) String() string { + return "an enum" + } + + const ( + EnumFirst Enum = 1 + EnumSecond = 2 + ) + + func main() { + fmt.Println(EnumFirst) + fmt.Println(EnumSecond) + } + +This code will output + + an enum + 2 + +as EnumSecond has no explicit type, and thus defaults to int. + +Available since + 2019.1 +` + +var docSA9005 = `Trying to marshal a struct with no public fields nor custom marshaling + +The encoding/json and encoding/xml packages only operate on exported +fields in structs, not unexported ones. It is usually an error to try +to (un)marshal structs that only consist of unexported fields. + +This check will not flag calls involving types that define custom +marshaling behavior, e.g. via MarshalJSON methods. It will also not +flag empty structs. + +Available since + Unreleased +` diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go index 69389844..1300eff8 100644 --- a/vendor/honnef.co/go/tools/staticcheck/lint.go +++ b/vendor/honnef.co/go/tools/staticcheck/lint.go @@ -17,6 +17,7 @@ import ( "strings" "sync" texttemplate "text/template" + "unicode" . "honnef.co/go/tools/arg" "honnef.co/go/tools/deprecated" @@ -24,6 +25,7 @@ import ( "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" + "honnef.co/go/tools/printf" "honnef.co/go/tools/ssa" "honnef.co/go/tools/ssautil" "honnef.co/go/tools/staticcheck/vrp" @@ -229,8 +231,423 @@ var ( "(*encoding/json.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/json.Encoder).Encode.v"), "json", "MarshalJSON", "MarshalText"), "(*encoding/xml.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/xml.Encoder).Encode.v"), "xml", "MarshalXML", "MarshalText"), } + + checkAtomicAlignment = map[string]CallCheck{ + "sync/atomic.AddInt64": checkAtomicAlignmentImpl, + "sync/atomic.AddUint64": checkAtomicAlignmentImpl, + "sync/atomic.CompareAndSwapInt64": checkAtomicAlignmentImpl, + "sync/atomic.CompareAndSwapUint64": checkAtomicAlignmentImpl, + "sync/atomic.LoadInt64": checkAtomicAlignmentImpl, + "sync/atomic.LoadUint64": checkAtomicAlignmentImpl, + "sync/atomic.StoreInt64": checkAtomicAlignmentImpl, + "sync/atomic.StoreUint64": checkAtomicAlignmentImpl, + "sync/atomic.SwapInt64": checkAtomicAlignmentImpl, + "sync/atomic.SwapUint64": checkAtomicAlignmentImpl, + } + + // TODO(dh): detect printf wrappers + checkPrintfRules = map[string]CallCheck{ + "fmt.Errorf": func(call *Call) { checkPrintfCall(call, 0, 1) }, + "fmt.Printf": func(call *Call) { checkPrintfCall(call, 0, 1) }, + "fmt.Sprintf": func(call *Call) { checkPrintfCall(call, 0, 1) }, + "fmt.Fprintf": func(call *Call) { checkPrintfCall(call, 1, 2) }, + } ) +func checkPrintfCall(call *Call, fIdx, vIdx int) { + f := call.Args[fIdx] + var args []ssa.Value + switch v := call.Args[vIdx].Value.Value.(type) { + case *ssa.Slice: + var ok bool + args, ok = ssautil.Vararg(v) + if !ok { + // We don't know what the actual arguments to the function are + return + } + case *ssa.Const: + // nil, i.e. no arguments + default: + // We don't know what the actual arguments to the function are + return + } + checkPrintfCallImpl(call, f.Value.Value, args) +} + +type verbFlag int + +const ( + isInt verbFlag = 1 << iota + isBool + isFP + isString + isPointer + isPseudoPointer + isSlice + isAny + noRecurse +) + +var verbs = [...]verbFlag{ + 'b': isPseudoPointer | isInt | isFP, + 'c': isInt, + 'd': isPseudoPointer | isInt, + 'e': isFP, + 'E': isFP, + 'f': isFP, + 'F': isFP, + 'g': isFP, + 'G': isFP, + 'o': isPseudoPointer | isInt, + 'p': isSlice | isPointer | noRecurse, + 'q': isInt | isString, + 's': isString, + 't': isBool, + 'T': isAny, + 'U': isInt, + 'v': isAny, + 'X': isPseudoPointer | isInt | isString, + 'x': isPseudoPointer | isInt | isString, +} + +func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { + var elem func(T types.Type, verb rune) ([]types.Type, bool) + elem = func(T types.Type, verb rune) ([]types.Type, bool) { + if verbs[verb]&noRecurse != 0 { + return []types.Type{T}, false + } + switch T := T.(type) { + case *types.Slice: + if verbs[verb]&isSlice != 0 { + return []types.Type{T}, false + } + if verbs[verb]&isString != 0 && IsType(T.Elem().Underlying(), "byte") { + return []types.Type{T}, false + } + return []types.Type{T.Elem()}, true + case *types.Map: + key := T.Key() + val := T.Elem() + return []types.Type{key, val}, true + case *types.Struct: + out := make([]types.Type, 0, T.NumFields()) + for i := 0; i < T.NumFields(); i++ { + out = append(out, T.Field(i).Type()) + } + return out, true + case *types.Array: + return []types.Type{T.Elem()}, true + default: + return []types.Type{T}, false + } + } + isInfo := func(T types.Type, info types.BasicInfo) bool { + basic, ok := T.Underlying().(*types.Basic) + return ok && basic.Info()&info != 0 + } + + isStringer := func(T types.Type, ms *types.MethodSet) bool { + sel := ms.Lookup(nil, "String") + if sel == nil { + return false + } + fn, ok := sel.Obj().(*types.Func) + if !ok { + // should be unreachable + return false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 0 { + return false + } + if sig.Results().Len() != 1 { + return false + } + if !IsType(sig.Results().At(0).Type(), "string") { + return false + } + return true + } + isError := func(T types.Type, ms *types.MethodSet) bool { + sel := ms.Lookup(nil, "Error") + if sel == nil { + return false + } + fn, ok := sel.Obj().(*types.Func) + if !ok { + // should be unreachable + return false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 0 { + return false + } + if sig.Results().Len() != 1 { + return false + } + if !IsType(sig.Results().At(0).Type(), "string") { + return false + } + return true + } + + isFormatter := func(T types.Type, ms *types.MethodSet) bool { + sel := ms.Lookup(nil, "Format") + if sel == nil { + return false + } + fn, ok := sel.Obj().(*types.Func) + if !ok { + // should be unreachable + return false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 2 { + return false + } + // TODO(dh): check the types of the arguments for more + // precision + if sig.Results().Len() != 0 { + return false + } + return true + } + + seen := map[types.Type]bool{} + var checkType func(verb rune, T types.Type, top bool) bool + checkType = func(verb rune, T types.Type, top bool) bool { + if top { + for k := range seen { + delete(seen, k) + } + } + if seen[T] { + return true + } + seen[T] = true + if int(verb) >= len(verbs) { + // Unknown verb + return true + } + + flags := verbs[verb] + if flags == 0 { + // Unknown verb + return true + } + + ms := types.NewMethodSet(T) + if isFormatter(T, ms) { + // the value is responsible for formatting itself + return true + } + + if flags&isString != 0 && (isStringer(T, ms) || isError(T, ms)) { + // Check for stringer early because we're about to dereference + return true + } + + T = T.Underlying() + if flags&(isPointer|isPseudoPointer) == 0 && top { + T = Dereference(T) + } + if flags&isPseudoPointer != 0 && top { + t := Dereference(T) + if _, ok := t.Underlying().(*types.Struct); ok { + T = t + } + } + + if _, ok := T.(*types.Interface); ok { + // We don't know what's in the interface + return true + } + + var info types.BasicInfo + if flags&isInt != 0 { + info |= types.IsInteger + } + if flags&isBool != 0 { + info |= types.IsBoolean + } + if flags&isFP != 0 { + info |= types.IsFloat | types.IsComplex + } + if flags&isString != 0 { + info |= types.IsString + } + + if info != 0 && isInfo(T, info) { + return true + } + + if flags&isString != 0 && (IsType(T, "[]byte") || isStringer(T, ms) || isError(T, ms)) { + return true + } + + if flags&isPointer != 0 && IsPointerLike(T) { + return true + } + if flags&isPseudoPointer != 0 { + switch U := T.Underlying().(type) { + case *types.Pointer: + if !top { + return true + } + + if _, ok := U.Elem().Underlying().(*types.Struct); !ok { + return true + } + case *types.Chan, *types.Signature: + return true + } + } + + if flags&isSlice != 0 { + if _, ok := T.(*types.Slice); ok { + return true + } + } + + if flags&isAny != 0 { + return true + } + + elems, ok := elem(T.Underlying(), verb) + if !ok { + return false + } + for _, elem := range elems { + if !checkType(verb, elem, false) { + return false + } + } + + return true + } + + k, ok := f.(*ssa.Const) + if !ok { + return + } + actions, err := printf.Parse(constant.StringVal(k.Value)) + if err != nil { + call.Invalid("couldn't parse format string") + return + } + + ptr := 1 + hasExplicit := false + + checkStar := func(verb printf.Verb, star printf.Argument) bool { + if star, ok := star.(printf.Star); ok { + idx := 0 + if star.Index == -1 { + idx = ptr + ptr++ + } else { + hasExplicit = true + idx = star.Index + ptr = star.Index + 1 + } + if idx == 0 { + call.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw)) + return false + } + if idx > len(args) { + call.Invalid( + fmt.Sprintf("Printf format %s reads arg #%d, but call has only %d args", + verb.Raw, idx, len(args))) + return false + } + if arg, ok := args[idx-1].(*ssa.MakeInterface); ok { + if !isInfo(arg.X.Type(), types.IsInteger) { + call.Invalid(fmt.Sprintf("Printf format %s reads non-int arg #%d as argument of *", verb.Raw, idx)) + } + } + } + return true + } + + // We only report one problem per format string. Making a + // mistake with an index tends to invalidate all future + // implicit indices. + for _, action := range actions { + verb, ok := action.(printf.Verb) + if !ok { + continue + } + + if !checkStar(verb, verb.Width) || !checkStar(verb, verb.Precision) { + return + } + + off := ptr + if verb.Value != -1 { + hasExplicit = true + off = verb.Value + } + if off > len(args) { + call.Invalid( + fmt.Sprintf("Printf format %s reads arg #%d, but call has only %d args", + verb.Raw, off, len(args))) + return + } else if verb.Value == 0 && verb.Letter != '%' { + call.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw)) + return + } else if off != 0 { + arg, ok := args[off-1].(*ssa.MakeInterface) + if ok { + if !checkType(verb.Letter, arg.X.Type(), true) { + call.Invalid(fmt.Sprintf("Printf format %s has arg #%d of wrong type %s", + verb.Raw, ptr, args[ptr-1].(*ssa.MakeInterface).X.Type())) + return + } + } + } + + switch verb.Value { + case -1: + // Consume next argument + ptr++ + case 0: + // Don't consume any arguments + default: + ptr = verb.Value + 1 + } + } + + if !hasExplicit && ptr <= len(args) { + call.Invalid(fmt.Sprintf("Printf call needs %d args but has %d args", ptr-1, len(args))) + } +} + +func checkAtomicAlignmentImpl(call *Call) { + sizes := call.Job.Pkg.TypesSizes + if sizes.Sizeof(types.Typ[types.Uintptr]) != 4 { + // Not running on a 32-bit platform + return + } + v, ok := call.Args[0].Value.Value.(*ssa.FieldAddr) + if !ok { + // TODO(dh): also check indexing into arrays and slices + return + } + T := v.X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct) + fields := make([]*types.Var, 0, T.NumFields()) + for i := 0; i < T.NumFields() && i <= v.Field; i++ { + fields = append(fields, T.Field(i)) + } + + off := sizes.Offsetsof(fields)[v.Field] + if off%8 != 0 { + msg := fmt.Sprintf("address of non 64-bit aligned field %s passed to %s", + T.Field(v.Field).Name(), + CallName(call.Instr.Common())) + call.Invalid(msg) + } +} + func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { return func(call *Call) { arg := call.Args[argN] @@ -330,81 +747,85 @@ func (*Checker) Prefix() string { return "SA" } func (c *Checker) Checks() []lint.Check { return []lint.Check{ - {ID: "SA1000", FilterGenerated: false, Fn: c.callChecker(checkRegexpRules)}, - {ID: "SA1001", FilterGenerated: false, Fn: c.CheckTemplate}, - {ID: "SA1002", FilterGenerated: false, Fn: c.callChecker(checkTimeParseRules)}, - {ID: "SA1003", FilterGenerated: false, Fn: c.callChecker(checkEncodingBinaryRules)}, - {ID: "SA1004", FilterGenerated: false, Fn: c.CheckTimeSleepConstant}, - {ID: "SA1005", FilterGenerated: false, Fn: c.CheckExec}, - {ID: "SA1006", FilterGenerated: false, Fn: c.CheckUnsafePrintf}, - {ID: "SA1007", FilterGenerated: false, Fn: c.callChecker(checkURLsRules)}, - {ID: "SA1008", FilterGenerated: false, Fn: c.CheckCanonicalHeaderKey}, - {ID: "SA1010", FilterGenerated: false, Fn: c.callChecker(checkRegexpFindAllRules)}, - {ID: "SA1011", FilterGenerated: false, Fn: c.callChecker(checkUTF8CutsetRules)}, - {ID: "SA1012", FilterGenerated: false, Fn: c.CheckNilContext}, - {ID: "SA1013", FilterGenerated: false, Fn: c.CheckSeeker}, - {ID: "SA1014", FilterGenerated: false, Fn: c.callChecker(checkUnmarshalPointerRules)}, - {ID: "SA1015", FilterGenerated: false, Fn: c.CheckLeakyTimeTick}, - {ID: "SA1016", FilterGenerated: false, Fn: c.CheckUntrappableSignal}, - {ID: "SA1017", FilterGenerated: false, Fn: c.callChecker(checkUnbufferedSignalChanRules)}, - {ID: "SA1018", FilterGenerated: false, Fn: c.callChecker(checkStringsReplaceZeroRules)}, - {ID: "SA1019", FilterGenerated: false, Fn: c.CheckDeprecated}, - {ID: "SA1020", FilterGenerated: false, Fn: c.callChecker(checkListenAddressRules)}, - {ID: "SA1021", FilterGenerated: false, Fn: c.callChecker(checkBytesEqualIPRules)}, - {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified}, - {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules)}, - {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue}, - {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal)}, + {ID: "SA1000", FilterGenerated: false, Fn: c.callChecker(checkRegexpRules), Doc: docSA1000}, + {ID: "SA1001", FilterGenerated: false, Fn: c.CheckTemplate, Doc: docSA1001}, + {ID: "SA1002", FilterGenerated: false, Fn: c.callChecker(checkTimeParseRules), Doc: docSA1002}, + {ID: "SA1003", FilterGenerated: false, Fn: c.callChecker(checkEncodingBinaryRules), Doc: docSA1003}, + {ID: "SA1004", FilterGenerated: false, Fn: c.CheckTimeSleepConstant, Doc: docSA1004}, + {ID: "SA1005", FilterGenerated: false, Fn: c.CheckExec, Doc: docSA1005}, + {ID: "SA1006", FilterGenerated: false, Fn: c.CheckUnsafePrintf, Doc: docSA1006}, + {ID: "SA1007", FilterGenerated: false, Fn: c.callChecker(checkURLsRules), Doc: docSA1007}, + {ID: "SA1008", FilterGenerated: false, Fn: c.CheckCanonicalHeaderKey, Doc: docSA1008}, + {ID: "SA1010", FilterGenerated: false, Fn: c.callChecker(checkRegexpFindAllRules), Doc: docSA1010}, + {ID: "SA1011", FilterGenerated: false, Fn: c.callChecker(checkUTF8CutsetRules), Doc: docSA1011}, + {ID: "SA1012", FilterGenerated: false, Fn: c.CheckNilContext, Doc: docSA1012}, + {ID: "SA1013", FilterGenerated: false, Fn: c.CheckSeeker, Doc: docSA1013}, + {ID: "SA1014", FilterGenerated: false, Fn: c.callChecker(checkUnmarshalPointerRules), Doc: docSA1014}, + {ID: "SA1015", FilterGenerated: false, Fn: c.CheckLeakyTimeTick, Doc: docSA1015}, + {ID: "SA1016", FilterGenerated: false, Fn: c.CheckUntrappableSignal, Doc: docSA1016}, + {ID: "SA1017", FilterGenerated: false, Fn: c.callChecker(checkUnbufferedSignalChanRules), Doc: docSA1017}, + {ID: "SA1018", FilterGenerated: false, Fn: c.callChecker(checkStringsReplaceZeroRules), Doc: docSA1018}, + {ID: "SA1019", FilterGenerated: false, Fn: c.CheckDeprecated, Doc: docSA1019}, + {ID: "SA1020", FilterGenerated: false, Fn: c.callChecker(checkListenAddressRules), Doc: docSA1020}, + {ID: "SA1021", FilterGenerated: false, Fn: c.callChecker(checkBytesEqualIPRules), Doc: docSA1021}, + {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified, Doc: docSA1023}, + {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules), Doc: docSA1024}, + {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue, Doc: docSA1025}, + {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal), Doc: docSA1026}, + {ID: "SA1027", FilterGenerated: false, Fn: c.callChecker(checkAtomicAlignment), Doc: docSA1027}, - {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd}, - {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection}, - {ID: "SA2002", FilterGenerated: false, Fn: c.CheckConcurrentTesting}, - {ID: "SA2003", FilterGenerated: false, Fn: c.CheckDeferLock}, + {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd, Doc: docSA2000}, + {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection, Doc: docSA2001}, + {ID: "SA2002", FilterGenerated: false, Fn: c.CheckConcurrentTesting, Doc: docSA2002}, + {ID: "SA2003", FilterGenerated: false, Fn: c.CheckDeferLock, Doc: docSA2003}, - {ID: "SA3000", FilterGenerated: false, Fn: c.CheckTestMainExit}, - {ID: "SA3001", FilterGenerated: false, Fn: c.CheckBenchmarkN}, + {ID: "SA3000", FilterGenerated: false, Fn: c.CheckTestMainExit, Doc: docSA3000}, + {ID: "SA3001", FilterGenerated: false, Fn: c.CheckBenchmarkN, Doc: docSA3001}, - {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical}, - {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy}, - {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison}, - {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison}, - {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop}, - {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues}, - {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition}, - {ID: "SA4009", FilterGenerated: false, Fn: c.CheckArgOverwritten}, - {ID: "SA4010", FilterGenerated: false, Fn: c.CheckIneffectiveAppend}, - {ID: "SA4011", FilterGenerated: false, Fn: c.CheckScopedBreak}, - {ID: "SA4012", FilterGenerated: false, Fn: c.CheckNaNComparison}, - {ID: "SA4013", FilterGenerated: false, Fn: c.CheckDoubleNegation}, - {ID: "SA4014", FilterGenerated: false, Fn: c.CheckRepeatedIfElse}, - {ID: "SA4015", FilterGenerated: false, Fn: c.callChecker(checkMathIntRules)}, - {ID: "SA4016", FilterGenerated: false, Fn: c.CheckSillyBitwiseOps}, - {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions}, - {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment}, - {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints}, - {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases}, + {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical, Doc: docSA4000}, + {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy, Doc: docSA4001}, + {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison, Doc: docSA4002}, + {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison, Doc: docSA4003}, + {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop, Doc: docSA4004}, + {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues, Doc: docSA4006}, + {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition, Doc: docSA4008}, + {ID: "SA4009", FilterGenerated: false, Fn: c.CheckArgOverwritten, Doc: docSA4009}, + {ID: "SA4010", FilterGenerated: false, Fn: c.CheckIneffectiveAppend, Doc: docSA4010}, + {ID: "SA4011", FilterGenerated: false, Fn: c.CheckScopedBreak, Doc: docSA4011}, + {ID: "SA4012", FilterGenerated: false, Fn: c.CheckNaNComparison, Doc: docSA4012}, + {ID: "SA4013", FilterGenerated: false, Fn: c.CheckDoubleNegation, Doc: docSA4013}, + {ID: "SA4014", FilterGenerated: false, Fn: c.CheckRepeatedIfElse, Doc: docSA4014}, + {ID: "SA4015", FilterGenerated: false, Fn: c.callChecker(checkMathIntRules), Doc: docSA4015}, + {ID: "SA4016", FilterGenerated: false, Fn: c.CheckSillyBitwiseOps, Doc: docSA4016}, + {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions, Doc: docSA4017}, + {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment, Doc: docSA4018}, + {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints, Doc: docSA4019}, + {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases, Doc: docSA4020}, + {ID: "SA4021", FilterGenerated: true, Fn: c.CheckSingleArgAppend, Doc: docSA4021}, - {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps}, - {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer}, - {ID: "SA5002", FilterGenerated: false, Fn: c.CheckInfiniteEmptyLoop}, - {ID: "SA5003", FilterGenerated: false, Fn: c.CheckDeferInInfiniteLoop}, - {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault}, - {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer}, - {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion}, + {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps, Doc: docSA5000}, + {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer, Doc: docSA5001}, + {ID: "SA5002", FilterGenerated: false, Fn: c.CheckInfiniteEmptyLoop, Doc: docSA5002}, + {ID: "SA5003", FilterGenerated: false, Fn: c.CheckDeferInInfiniteLoop, Doc: docSA5003}, + {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault, Doc: docSA5004}, + {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer, Doc: docSA5005}, + {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion, Doc: docSA5007}, + {ID: "SA5008", FilterGenerated: false, Fn: c.CheckStructTags, Doc: ``}, + {ID: "SA5009", FilterGenerated: false, Fn: c.callChecker(checkPrintfRules), Doc: ``}, - {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules)}, - {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey}, - {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules)}, - {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes}, - // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp}, - {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison}, + {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules), Doc: docSA6000}, + {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey, Doc: docSA6001}, + {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules), Doc: docSA6002}, + {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes, Doc: docSA6003}, + // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp, Doc: docSA6004}, + {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison, Doc: docSA6005}, - {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop}, - {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode}, - {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch}, - {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration}, + {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop, Doc: docSA9001}, + {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode, Doc: docSA9002}, + {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch, Doc: docSA9003}, + {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration, Doc: docSA9004}, // Filtering generated code because it may include empty structs generated from data models. - {ID: "SA9005", FilterGenerated: true, Fn: c.callChecker(checkNoopMarshal)}, + {ID: "SA9005", FilterGenerated: true, Fn: c.callChecker(checkNoopMarshal), Doc: docSA9005}, } // "SA5006": c.CheckSliceOutOfBounds, @@ -603,20 +1024,12 @@ func applyStdlibKnowledge(fn *ssa.Function) { } } -func hasType(j *lint.Job, expr ast.Expr, name string) bool { - T := TypeOf(j, expr) - return IsType(T, name) -} - func (c *Checker) CheckUntrappableSignal(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAnyAST(j, call, "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { - return true + return } for _, arg := range call.Args { if conv, ok := arg.(*ast.CallExpr); ok && isName(j, conv.Fun, "os.Signal") { @@ -630,26 +1043,20 @@ func (c *Checker) CheckUntrappableSignal(j *lint.Job) { j.Errorf(arg, "%s signal cannot be trapped", Render(j, arg)) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTemplate(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) var kind string if IsCallToAST(j, call, "(*text/template.Template).Parse") { kind = "text" } else if IsCallToAST(j, call, "(*html/template.Template).Parse") { kind = "html" } else { - return true + return } sel := call.Fun.(*ast.SelectorExpr) if !IsCallToAST(j, sel.X, "text/template.New") && @@ -658,11 +1065,11 @@ func (c *Checker) CheckTemplate(j *lint.Job) { // different delims. A better solution with less false // negatives would use data flow analysis to see where the // template comes from and where it has been - return true + return } s, ok := ExprToString(j, call.Args[Arg("(*text/template.Template).Parse.text")]) if !ok { - return true + return } var err error switch kind { @@ -677,35 +1084,29 @@ func (c *Checker) CheckTemplate(j *lint.Job) { j.Errorf(call.Args[Arg("(*text/template.Template).Parse.text")], "%s", err) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "time.Sleep") { - return true + return } lit, ok := call.Args[Arg("time.Sleep.d")].(*ast.BasicLit) if !ok { - return true + return } n, err := strconv.Atoi(lit.Value) if err != nil { - return true + return } if n == 0 || n > 120 { // time.Sleep(0) is a seldom used pattern in concurrency // tests. >120 might be intentional. 120 was chosen // because the user could've meant 2 minutes. - return true + return } recommendation := "time.Sleep(time.Nanosecond)" if n != 1 { @@ -713,64 +1114,55 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { } j.Errorf(call.Args[Arg("time.Sleep.d")], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { - fn := func(node ast.Node) bool { - g, ok := node.(*ast.GoStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + g := node.(*ast.GoStmt) fun, ok := g.Call.Fun.(*ast.FuncLit) if !ok { - return true + return } if len(fun.Body.List) == 0 { - return true + return } stmt, ok := fun.Body.List[0].(*ast.ExprStmt) if !ok { - return true + return } call, ok := stmt.X.(*ast.CallExpr) if !ok { - return true + return } sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } - fn, ok := ObjectOf(j, sel.Sel).(*types.Func) + fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { - return true + return } - if fn.FullName() == "(*sync.WaitGroup).Add" { + if lint.FuncName(fn) == "(*sync.WaitGroup).Add" { j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", Render(j, stmt)) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) } func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 0 || loop.Post != nil { - return true + fn := func(node ast.Node) { + loop := node.(*ast.ForStmt) + if len(loop.Body.List) != 0 || loop.Post != nil { + return } if loop.Init != nil { // TODO(dh): this isn't strictly necessary, it just makes // the check easier. - return true + return } // An empty loop is bad news in two cases: 1) The loop has no // condition. In that case, it's just a loop that spins @@ -786,34 +1178,30 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { if loop.Cond != nil { if hasSideEffects(loop.Cond) { - return true + return } if ident, ok := loop.Cond.(*ast.Ident); ok { - if k, ok := ObjectOf(j, ident).(*types.Const); ok { + if k, ok := j.Pkg.TypesInfo.ObjectOf(ident).(*types.Const); ok { if !constant.BoolVal(k.Val()) { // don't flag `for false {}` loops. They're a debug aid. - return true + return } } } j.Errorf(loop, "loop condition never changes or has a race condition") } j.Errorf(loop, "this loop will spin, using 100%% CPU") - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { mightExit := false var defers []ast.Stmt - loop, ok := node.(*ast.ForStmt) - if !ok || loop.Cond != nil { - return true + loop := node.(*ast.ForStmt) + if loop.Cond != nil { + return } fn2 := func(node ast.Node) bool { switch stmt := node.(type) { @@ -837,28 +1225,22 @@ func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { } ast.Inspect(loop.Body, fn2) if mightExit { - return true + return } for _, stmt := range defers { j.Errorf(stmt, "defers in this infinite loop will never run") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) + fn := func(node ast.Node) { + loop := node.(*ast.RangeStmt) + typ := j.Pkg.TypesInfo.TypeOf(loop.X) + _, ok := typ.Underlying().(*types.Chan) if !ok { - return true - } - typ := TypeOf(j, loop.X) - _, ok = typ.Underlying().(*types.Chan) - if !ok { - return true + return } fn2 := func(node ast.Node) bool { switch stmt := node.(type) { @@ -871,20 +1253,17 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { return true } ast.Inspect(loop.Body, fn2) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckTestMainExit(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { if !isTestMain(j, node) { - return true + return } - arg := ObjectOf(j, node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) + arg := j.Pkg.TypesInfo.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) callsRun := false fn2 := func(node ast.Node) bool { call, ok := node.(*ast.CallExpr) @@ -899,7 +1278,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { if !ok { return true } - if arg != ObjectOf(j, ident) { + if arg != j.Pkg.TypesInfo.ObjectOf(ident) { return true } if sel.Sel.Name == "Run" { @@ -922,11 +1301,8 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { if !callsExit && callsRun { j.Errorf(node, "TestMain should call os.Exit to set exit code") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder(nil, fn) } func isTestMain(j *lint.Job, node ast.Node) bool { @@ -948,64 +1324,52 @@ func isTestMain(j *lint.Job, node ast.Node) bool { } func (c *Checker) CheckExec(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "os/exec.Command") { - return true + return } val, ok := ExprToString(j, call.Args[Arg("os/exec.Command.name")]) if !ok { - return true + return } if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { - return true + return } j.Errorf(call.Args[Arg("os/exec.Command.name")], "first argument to exec.Command looks like a shell command, but a program name or path are expected") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { - return true + fn := func(node ast.Node) { + loop := node.(*ast.ForStmt) + if len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { + return } sel, ok := loop.Body.List[0].(*ast.SelectStmt) if !ok { - return true + return } for _, c := range sel.Body.List { if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 { j.Errorf(comm, "should not have an empty default case in a for+select loop. The loop will spin.") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { - fn := func(node ast.Node) bool { - op, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + op := node.(*ast.BinaryExpr) switch op.Op { case token.EQL, token.NEQ: - if basic, ok := TypeOf(j, op.X).Underlying().(*types.Basic); ok { + if basic, ok := j.Pkg.TypesInfo.TypeOf(op.X).Underlying().(*types.Basic); ok { if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { // f == f and f != f might be used to check for NaN - return true + return } } case token.SUB, token.QUO, token.AND, token.REM, token.OR, token.XOR, token.AND_NOT, @@ -1013,22 +1377,34 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { default: // For some ops, such as + and *, it can make sense to // have identical operands - return true + return } if Render(j, op.X) != Render(j, op.Y) { - return true + return + } + l1, ok1 := op.X.(*ast.BasicLit) + l2, ok2 := op.Y.(*ast.BasicLit) + if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && IsGenerated(j.File(l1)) { + // cgo generates the following function call: + // _cgoCheckPointer(_cgoBase0, 0 == 0) – it uses 0 == 0 + // instead of true in case the user shadowed the + // identifier. Ideally we'd restrict this exception to + // calls of _cgoCheckPointer, but it's not worth the + // hassle of keeping track of the stack. + // are very rare to begin with, and we're mostly checking + // for them to catch typos such as 1 == 1 where the user + // meant to type i == 1. The odds of a false negative for + // 0 == 0 are slim. + return } j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckScopedBreak(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { case *ast.ForStmt: @@ -1036,7 +1412,7 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { case *ast.RangeStmt: body = node.Body default: - return true + panic(fmt.Sprintf("unreachable: %T", node)) } for _, stmt := range body.List { var blocks [][]ast.Stmt @@ -1081,52 +1457,40 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { } } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckUnsafePrintf(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) var arg int if IsCallToAnyAST(j, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { arg = Arg("fmt.Printf.format") } else if IsCallToAnyAST(j, call, "fmt.Fprintf") { arg = Arg("fmt.Fprintf.format") } else { - return true + return } if len(call.Args) != arg+1 { - return true + return } switch call.Args[arg].(type) { case *ast.CallExpr, *ast.Ident: default: - return true + return } j.Errorf(call.Args[arg], "printf-style function with dynamic format string and no further arguments should use print-style function instead") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckEarlyDefer(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) if len(block.List) < 2 { - return true + return } for i, stmt := range block.List { if i == len(block.List)-1 { @@ -1149,7 +1513,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if !ok { continue } - sig, ok := TypeOf(j, call.Fun).(*types.Signature) + sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { continue } @@ -1186,11 +1550,8 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { } j.Errorf(def, "should check returned error before deferring %s", Render(j, def.Call)) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } func selectorX(sel *ast.SelectorExpr) ast.Node { @@ -1228,7 +1589,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { return nil, "", false } - fn, ok := ObjectOf(j, sel.Sel).(*types.Func) + fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { return nil, "", false } @@ -1240,13 +1601,10 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { return sel.X, fn.Name(), true } - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) if len(block.List) < 2 { - return true + return } for i := range block.List[:len(block.List)-1] { sel1, method1, ok1 := mutexParams(block.List[i]) @@ -1260,11 +1618,8 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { j.Errorf(block.List[i+1], "empty critical section") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } // cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't @@ -1272,7 +1627,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`) func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { if unary, ok := node.(*ast.UnaryExpr); ok { if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { ident, ok := star.X.(*ast.Ident) @@ -1287,15 +1642,12 @@ func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { j.Errorf(star, "*&x will be simplified to x. It will not copy x.") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) } func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, b := range ssafn.Blocks { for _, ins := range b.Instrs { binop, ok := ins.(*ssa.BinOp) @@ -1325,7 +1677,7 @@ func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { } func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node, _ bool) bool { assign, ok := node.(*ast.AssignStmt) if ok { // TODO(dh): This risks missing some Header reads, for @@ -1336,7 +1688,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { continue } - if hasType(j, op.X, "net/http.Header") { + if IsOfType(j, op.X, "net/http.Header") { return false } } @@ -1346,7 +1698,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { return true } - if !hasType(j, op.X, "net/http.Header") { + if !IsOfType(j, op.X, "net/http.Header") { return true } s, ok := ExprToString(j, op.Index) @@ -1359,40 +1711,32 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) return true } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Pkg.Inspector.Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) } func (c *Checker) CheckBenchmarkN(j *lint.Job) { - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return true + return } sel, ok := assign.Lhs[0].(*ast.SelectorExpr) if !ok { - return true + return } if sel.Sel.Name != "N" { - return true + return } - if !hasType(j, sel.X, "*testing.B") { - return true + if !IsOfType(j, sel.X, "*testing.B") { + return } j.Errorf(assign, "should not assign to %s", Render(j, sel)) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if IsExample(ssafn) { continue } @@ -1462,7 +1806,7 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { } func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ssabinop, ok := ins.(*ssa.BinOp) @@ -1506,7 +1850,7 @@ func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { } func (c *Checker) CheckNilMaps(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { mu, ok := ins.(*ssa.MapUpdate) @@ -1532,18 +1876,15 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { if !ok { return false } - return IsObject(ObjectOf(j, sel.Sel), name) + return IsObject(j.Pkg.TypesInfo.ObjectOf(sel.Sel), name) } - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } - tx := TypeOf(j, expr.X) + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) + tx := j.Pkg.TypesInfo.TypeOf(expr.X) basic, ok := tx.Underlying().(*types.Basic) if !ok { - return true + return } var max string @@ -1607,11 +1948,8 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { @@ -1656,7 +1994,7 @@ func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ss } func (c *Checker) CheckLoopCondition(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { fn := func(node ast.Node) bool { loop, ok := node.(*ast.ForStmt) if !ok { @@ -1721,7 +2059,7 @@ func (c *Checker) CheckLoopCondition(j *lint.Job) { } func (c *Checker) CheckArgOverwritten(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { fn := func(node ast.Node) bool { var typ *ast.FuncType var body *ast.BlockStmt @@ -1741,7 +2079,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { } for _, field := range typ.Params.List { for _, arg := range field.Names { - obj := ObjectOf(j, arg) + obj := j.Pkg.TypesInfo.ObjectOf(arg) var ssaobj *ssa.Parameter for _, param := range ssafn.Params { if param.Object() == obj { @@ -1771,7 +2109,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { if !ok { continue } - if ObjectOf(j, ident) == obj { + if j.Pkg.TypesInfo.ObjectOf(ident) == obj { assigned = true return false } @@ -1798,7 +2136,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { // // - any nested, unlabelled continue, even if it is in another // loop or closure. - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { var body *ast.BlockStmt switch fn := node.(type) { case *ast.FuncDecl: @@ -1806,10 +2144,10 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { case *ast.FuncLit: body = fn.Body default: - return true + panic(fmt.Sprintf("unreachable: %T", node)) } if body == nil { - return true + return } labels := map[*ast.Object]ast.Stmt{} ast.Inspect(body, func(node ast.Node) bool { @@ -1829,7 +2167,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { body = node.Body loop = node case *ast.RangeStmt: - typ := TypeOf(j, node.X) + typ := j.Pkg.TypesInfo.TypeOf(node.X) if _, ok := typ.Underlying().(*types.Map); ok { // looping once over a map is a valid pattern for // getting an arbitrary element. @@ -1893,82 +2231,67 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { } return true }) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) } func (c *Checker) CheckNilContext(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if len(call.Args) == 0 { - return true + return } - if typ, ok := TypeOf(j, call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { - return true + if typ, ok := j.Pkg.TypesInfo.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { + return } - sig, ok := TypeOf(j, call.Fun).(*types.Signature) + sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { - return true + return } if sig.Params().Len() == 0 { - return true + return } if !IsType(sig.Params().At(0).Type(), "context.Context") { - return true + return } j.Errorf(call.Args[0], "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckSeeker(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } if sel.Sel.Name != "Seek" { - return true + return } if len(call.Args) != 2 { - return true + return } arg0, ok := call.Args[Arg("(io.Seeker).Seek.offset")].(*ast.SelectorExpr) if !ok { - return true + return } switch arg0.Sel.Name { case "SeekStart", "SeekCurrent", "SeekEnd": default: - return true + return } pkg, ok := arg0.X.(*ast.Ident) if !ok { - return true + return } if pkg.Name != "io" { - return true + return } j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { @@ -1986,7 +2309,7 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { return true } - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { val, ok := ins.(ssa.Value) @@ -2038,7 +2361,7 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { } func (c *Checker) CheckConcurrentTesting(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { gostmt, ok := ins.(*ssa.Go) @@ -2096,7 +2419,7 @@ func (c *Checker) CheckConcurrentTesting(j *lint.Job) { } func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { node := c.funcDescs.CallGraph.CreateNode(ssafn) for _, edge := range node.Out { if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" { @@ -2124,7 +2447,7 @@ func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { } for _, b := range mc.Bindings { if b == v { - pos := j.Program.DisplayPosition(mc.Fn.Pos()) + pos := lint.DisplayPosition(j.Pkg.Fset, mc.Fn.Pos()) j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) } } @@ -2133,7 +2456,7 @@ func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { } func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ia, ok := ins.(*ssa.IndexAddr) @@ -2157,7 +2480,7 @@ func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { } func (c *Checker) CheckDeferLock(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { instrs := FilterDebug(block.Instrs) if len(instrs) < 2 { @@ -2203,7 +2526,7 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { } return IsCallTo(call.Common(), "math.NaN") } - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2219,7 +2542,7 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { } func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { node := c.funcDescs.CallGraph.CreateNode(ssafn) for _, edge := range node.Out { if edge.Callee != node { @@ -2272,15 +2595,15 @@ func isName(j *lint.Job, expr ast.Expr, name string) bool { var obj types.Object switch expr := expr.(type) { case *ast.Ident: - obj = ObjectOf(j, expr) + obj = j.Pkg.TypesInfo.ObjectOf(expr) case *ast.SelectorExpr: - obj = ObjectOf(j, expr.Sel) + obj = j.Pkg.TypesInfo.ObjectOf(expr.Sel) } return objectName(obj) == name } func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if IsInMain(j, ssafn) || IsInTest(j, ssafn) { continue } @@ -2300,24 +2623,18 @@ func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { } func (c *Checker) CheckDoubleNegation(j *lint.Job) { - fn := func(node ast.Node) bool { - unary1, ok := node.(*ast.UnaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + unary1 := node.(*ast.UnaryExpr) unary2, ok := unary1.X.(*ast.UnaryExpr) if !ok { - return true + return } if unary1.Op != token.NOT || unary2.Op != token.NOT { - return true + return } j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) } func hasSideEffects(node ast.Node) bool { @@ -2353,21 +2670,18 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { } return inits, conds } - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + ifstmt := node.(*ast.IfStmt) if seen[ifstmt] { - return true + return } inits, conds := collectConds(ifstmt, nil, nil) if len(inits) > 0 { - return true + return } for _, cond := range conds { if hasSideEffects(cond) { - return true + return } } counts := map[string]int{} @@ -2378,15 +2692,12 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { j.Errorf(cond, "this condition occurs multiple times in this if/else if chain") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2424,14 +2735,11 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { } func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) + sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { - return true - } - sig, ok := TypeOf(j, call.Fun).(*types.Signature) - if !ok { - return true + return } n := sig.Params().Len() var args []int @@ -2459,16 +2767,13 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { j.Errorf(call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckPureFunctions(j *lint.Job) { fnLoop: - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if IsInTest(j, ssafn) { params := ssafn.Signature.Params() for i := 0; i < params.Len(); i++ { @@ -2510,7 +2815,7 @@ fnLoop: } func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { - obj := ObjectOf(j, ident) + obj := j.Pkg.TypesInfo.ObjectOf(ident) if obj.Pkg() == nil { return false, "" } @@ -2524,8 +2829,8 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { var ssafn *ssa.Function stack := 0 - fn := func(node ast.Node) bool { - if node == nil { + fn := func(node ast.Node, push bool) bool { + if !push { stack-- } else { stack++ @@ -2534,18 +2839,18 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { ssafn = nil } if fn, ok := node.(*ast.FuncDecl); ok { - ssafn = j.Program.SSA.FuncValue(ObjectOf(j, fn.Name).(*types.Func)) + ssafn = j.Pkg.SSA.Prog.FuncValue(j.Pkg.TypesInfo.ObjectOf(fn.Name).(*types.Func)) } sel, ok := node.(*ast.SelectorExpr) if !ok { return true } - obj := ObjectOf(j, sel.Sel) + obj := j.Pkg.TypesInfo.ObjectOf(sel.Sel) if obj.Pkg() == nil { return true } - nodePkg := j.NodePackage(node).Types + nodePkg := j.Pkg.Types if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() { // Don't flag stuff in our own package return true @@ -2574,24 +2879,20 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { } return true } - for _, pkg := range j.Program.InitialPackages { - for _, f := range pkg.Syntax { - ast.Inspect(f, func(node ast.Node) bool { - if node, ok := node.(*ast.ImportSpec); ok { - p := node.Path.Value - path := p[1 : len(p)-1] - imp := pkg.Imports[path] - if alt := c.deprecatedPkgs[imp.Types]; alt != "" { - j.Errorf(node, "Package %s is deprecated: %s", path, alt) - } + for _, f := range j.Pkg.Syntax { + ast.Inspect(f, func(node ast.Node) bool { + if node, ok := node.(*ast.ImportSpec); ok { + p := node.Path.Value + path := p[1 : len(p)-1] + imp := j.Pkg.Imports[path] + if alt := c.deprecatedPkgs[imp.Types]; alt != "" { + j.Errorf(node, "Package %s is deprecated: %s", path, alt) } - return true - }) - } - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) + } + return true + }) } + j.Pkg.Inspector.Nodes(nil, fn) } func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { @@ -2601,7 +2902,7 @@ func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { } func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { node := c.funcDescs.CallGraph.CreateNode(ssafn) for _, edge := range node.Out { callee := edge.Callee.Func @@ -2610,7 +2911,7 @@ func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { continue } - r, ok := rules[obj.FullName()] + r, ok := rules[lint.FuncName(obj)] if !ok { continue } @@ -2679,7 +2980,7 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { // Taint the argument as MUST_NOT_MODIFY, then propagate that // through functions like bytes.Split - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { sig := ssafn.Signature if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 { continue @@ -2737,7 +3038,7 @@ func loopedRegexp(name string) CallCheck { } func (c *Checker) CheckEmptyBranch(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if ssafn.Syntax() == nil { continue } @@ -2770,7 +3071,7 @@ func (c *Checker) CheckEmptyBranch(j *lint.Job) { } func (c *Checker) CheckMapBytesKey(j *lint.Job) { - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { for _, b := range fn.Blocks { insLoop: for _, ins := range b.Instrs { @@ -2825,13 +3126,10 @@ func (c *Checker) CheckRangeStringRunes(j *lint.Job) { } func (c *Checker) CheckSelfAssignment(j *lint.Job) { - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + assign := node.(*ast.AssignStmt) if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) { - return true + return } for i, stmt := range assign.Lhs { rlh := Render(j, stmt) @@ -2840,11 +3138,8 @@ func (c *Checker) CheckSelfAssignment(j *lint.Job) { j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func buildTagsIdentical(s1, s2 []string) bool { @@ -2866,7 +3161,7 @@ func buildTagsIdentical(s1, s2 []string) bool { } func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { - for _, f := range job.Program.Files { + for _, f := range job.Pkg.Syntax { constraints := buildTags(f) for i, constraint1 := range constraints { for j, constraint2 := range constraints { @@ -2886,7 +3181,7 @@ func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { func (c *Checker) CheckSillyRegexp(j *lint.Job) { // We could use the rule checking engine for this, but the // arguments aren't really invalid. - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { for _, b := range fn.Blocks { for _, ins := range b.Instrs { call, ok := ins.(*ssa.Call) @@ -2917,19 +3212,16 @@ func (c *Checker) CheckSillyRegexp(j *lint.Job) { } func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { - fn := func(node ast.Node) bool { - decl, ok := node.(*ast.GenDecl) - if !ok { - return true - } + fn := func(node ast.Node) { + decl := node.(*ast.GenDecl) if !decl.Lparen.IsValid() { - return true + return } if decl.Tok != token.CONST { - return true + return } - groups := GroupSpecs(j, decl.Specs) + groups := GroupSpecs(j.Pkg.Fset, decl.Specs) groupLoop: for _, group := range groups { if len(group) < 2 { @@ -2964,15 +3256,12 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { } j.Errorf(group[0], "only the first constant in this group has an explicit type") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) } func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { for _, block := range fn.Blocks { for _, ins := range block.Instrs { call, ok := ins.(*ssa.Call) @@ -3034,11 +3323,8 @@ func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { } func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { - fn := func(node ast.Node) bool { - binExpr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + binExpr := node.(*ast.BinaryExpr) var negative bool switch binExpr.Op { @@ -3047,7 +3333,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { case token.NEQ: negative = true default: - return true + return } const ( @@ -3061,7 +3347,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { } else if IsCallToAST(j, binExpr.X, up) && IsCallToAST(j, binExpr.Y, up) { call = up } else { - return true + return } bang := "" @@ -3070,12 +3356,9 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { } j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) - return true } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { @@ -3101,11 +3384,8 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { return nil, nil, false } - fn := func(node ast.Node) bool { - tsStmt, ok := node.(*ast.TypeSwitchStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + tsStmt := node.(*ast.TypeSwitchStmt) type ccAndTypes struct { cc *ast.CaseClause @@ -3124,7 +3404,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { Ts := make([]types.Type, len(cc.List)) for i, expr := range cc.List { - Ts[i] = TypeOf(j, expr) + Ts[i] = j.Pkg.TypesInfo.TypeOf(expr) } ccs = append(ccs, ccAndTypes{cc: cc, types: Ts}) @@ -3132,7 +3412,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { if len(ccs) <= 1 { // Zero or one case clauses, nothing to check. - return true + return } // Check if case clauses following cc have types that are subsumed by cc. @@ -3143,11 +3423,123 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { } } } - - return true } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) +} + +func (c *Checker) CheckSingleArgAppend(j *lint.Job) { + fn := func(node ast.Node) { + if !IsCallToAST(j, node, "append") { + return + } + call := node.(*ast.CallExpr) + if len(call.Args) != 1 { + return + } + j.Errorf(call, "x = append(y) is equivalent to x = y") + } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) +} + +func (c *Checker) CheckStructTags(j *lint.Job) { + fn := func(node ast.Node) { + for _, field := range node.(*ast.StructType).Fields.List { + if field.Tag == nil { + continue + } + tags, err := parseStructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) + if err != nil { + j.Errorf(field.Tag, "unparseable struct tag: %s", err) + continue + } + for k, v := range tags { + if len(v) > 1 { + j.Errorf(field.Tag, "duplicate struct tag %q", k) + continue + } + + switch k { + case "json": + checkJSONTag(j, field, v[0]) + case "xml": + checkXMLTag(j, field, v[0]) + } + } + } + } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.StructType)(nil)}, fn) +} + +func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { + if len(tag) == 0 { + // TODO(dh): should we flag empty tags? + } + fields := strings.Split(tag, ",") + for _, r := range fields[0] { + if !unicode.IsLetter(r) && !unicode.IsDigit(r) && !strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", r) { + j.Errorf(field.Tag, "invalid JSON field name %q", fields[0]) + } + } + var co, cs, ci int + for _, s := range fields[1:] { + switch s { + case "omitempty": + co++ + case "": + // allow stuff like "-," + case "string": + cs++ + // only for string, floating point, integer and bool + T := Dereference(j.Pkg.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() + basic, ok := T.(*types.Basic) + if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { + j.Errorf(field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") + } + case "inline": + ci++ + default: + j.Errorf(field.Tag, "unknown JSON option %q", s) + } + } + if co > 1 { + j.Errorf(field.Tag, `duplicate JSON option "omitempty"`) + } + if cs > 1 { + j.Errorf(field.Tag, `duplicate JSON option "string"`) + } + if ci > 1 { + j.Errorf(field.Tag, `duplicate JSON option "inline"`) + } +} + +func checkXMLTag(j *lint.Job, field *ast.Field, tag string) { + if len(tag) == 0 { + // TODO(dh): should we flag empty tags? + } + fields := strings.Split(tag, ",") + counts := map[string]int{} + var exclusives []string + for _, s := range fields[1:] { + switch s { + case "attr", "chardata", "cdata", "innerxml", "comment": + counts[s]++ + if counts[s] == 1 { + exclusives = append(exclusives, s) + } + case "omitempty", "any": + counts[s]++ + case "": + default: + j.Errorf(field.Tag, "unknown XML option %q", s) + } + } + for k, v := range counts { + if v > 1 { + j.Errorf(field.Tag, "duplicate XML option %q", k) + } + } + if len(exclusives) > 1 { + j.Errorf(field.Tag, "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) } } diff --git a/vendor/honnef.co/go/tools/staticcheck/structtag.go b/vendor/honnef.co/go/tools/staticcheck/structtag.go new file mode 100644 index 00000000..38830a22 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/structtag.go @@ -0,0 +1,58 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2019 Dominik Honnef. All rights reserved. + +package staticcheck + +import "strconv" + +func parseStructTag(tag string) (map[string][]string, error) { + // FIXME(dh): detect missing closing quote + out := map[string][]string{} + + for tag != "" { + // Skip leading space. + i := 0 + for i < len(tag) && tag[i] == ' ' { + i++ + } + tag = tag[i:] + if tag == "" { + break + } + + // Scan to colon. A space, a quote or a control character is a syntax error. + // Strictly speaking, control chars include the range [0x7f, 0x9f], not just + // [0x00, 0x1f], but in practice, we ignore the multi-byte control characters + // as it is simpler to inspect the tag's bytes than the tag's runes. + i = 0 + for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { + i++ + } + if i == 0 || i+1 >= len(tag) || tag[i] != ':' || tag[i+1] != '"' { + break + } + name := string(tag[:i]) + tag = tag[i+1:] + + // Scan quoted string to find value. + i = 1 + for i < len(tag) && tag[i] != '"' { + if tag[i] == '\\' { + i++ + } + i++ + } + if i >= len(tag) { + break + } + qvalue := string(tag[:i+1]) + tag = tag[i+1:] + + value, err := strconv.Unquote(qvalue) + if err != nil { + return nil, err + } + out[name] = append(out[name], value) + } + return out, nil +} diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go index cb17f042..a0011ed0 100644 --- a/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go +++ b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go @@ -12,6 +12,7 @@ import ( "sort" "strings" + "honnef.co/go/tools/lint" "honnef.co/go/tools/ssa" ) @@ -291,7 +292,7 @@ func BuildGraph(f *ssa.Function) *Graph { case *ssa.Call: if static := ins.Common().StaticCallee(); static != nil { if fn, ok := static.Object().(*types.Func); ok { - switch fn.FullName() { + switch lint.FuncName(fn) { case "bytes.Index", "bytes.IndexAny", "bytes.IndexByte", "bytes.IndexFunc", "bytes.IndexRune", "bytes.LastIndex", "bytes.LastIndexAny", "bytes.LastIndexByte", "bytes.LastIndexFunc", diff --git a/vendor/honnef.co/go/tools/stylecheck/doc.go b/vendor/honnef.co/go/tools/stylecheck/doc.go new file mode 100644 index 00000000..efc65092 --- /dev/null +++ b/vendor/honnef.co/go/tools/stylecheck/doc.go @@ -0,0 +1,170 @@ +package stylecheck + +var docST1000 = `Incorrect or missing package comment + +Packages must have a package comment that is formatted according to +the guidelines laid out in +https://github.com/golang/go/wiki/CodeReviewComments#package-comments. + +Available since + 2019.1, non-default +` + +var docST1001 = `Dot imports are discouraged + +Dot imports that aren't in external test packages are discouraged. + +The dot_import_whitelist option can be used to whitelist certain +imports. + +Quoting Go Code Review Comments: + + The import . form can be useful in tests that, due to circular + dependencies, cannot be made part of the package being tested: + + package foo_test + + import ( + "bar/testutil" // also imports "foo" + . "foo" + ) + + In this case, the test file cannot be in package foo because it + uses bar/testutil, which imports foo. So we use the 'import .' + form to let the file pretend to be part of package foo even though + it is not. Except for this one case, do not use import . in your + programs. It makes the programs much harder to read because it is + unclear whether a name like Quux is a top-level identifier in the + current package or in an imported package. + +Available since + 2019.1 + +Options + dot_import_whitelist +` + +var docST1003 = `Poorly chosen identifier + +Identifiers, such as variable and package names, follow certain rules. + +See the following links for details: + + http://golang.org/doc/effective_go.html#package-names + http://golang.org/doc/effective_go.html#mixed-caps + https://github.com/golang/go/wiki/CodeReviewComments#initialisms + https://github.com/golang/go/wiki/CodeReviewComments#variable-names + +Available since + 2019.1, non-default + +Options + initialisms +` + +var docST1005 = `Incorrectly formatted error string + +Error strings follow a set of guidelines to ensure uniformity and good +composability. + +Quoting Go Code Review Comments: + + Error strings should not be capitalized (unless beginning with + proper nouns or acronyms) or end with punctuation, since they are + usually printed following other context. That is, use + fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so + that log.Printf("Reading %s: %v", filename, err) formats without a + spurious capital letter mid-message. + +Available since + 2019.1 +` + +var docST1006 = `Poorly chosen receiver name + +Quoting Go Code Review Comments: + + The name of a method's receiver should be a reflection of its + identity; often a one or two letter abbreviation of its type + suffices (such as "c" or "cl" for "Client"). Don't use generic + names such as "me", "this" or "self", identifiers typical of + object-oriented languages that place more emphasis on methods as + opposed to functions. The name need not be as descriptive as that + of a method argument, as its role is obvious and serves no + documentary purpose. It can be very short as it will appear on + almost every line of every method of the type; familiarity admits + brevity. Be consistent, too: if you call the receiver "c" in one + method, don't call it "cl" in another. + +Available since + 2019.1 +` + +var docST1008 = `A function's error value should be its last return value + +A function's error value should be its last return value. + +Available since + 2019.1 +` + +var docST1011 = `Poorly chosen name for variable of type time.Duration + +time.Duration values represent an amount of time, which is represented +as a count of nanoseconds. An expression like 5 * time.Microsecond +yields the value 5000. It is therefore not appropriate to suffix a +variable of type time.Duration with any time unit, such as Msec or +Milli. + +Available since + 2019.1 +` + +var docST1012 = `Poorly chosen name for error variable + +Error variables that are part of an API should be called errFoo or +ErrFoo. + +Available since + 2019.1 +` + +var docST1013 = `Should use constants for HTTP error codes, not magic numbers + +HTTP has a tremendous number of status codes. While some of those are +well known (200, 400, 404, 500), most of them are not. The net/http +package provides constants for all status codes that are part of the +various specifications. It is recommended to use these constants +instead of hard-coding magic numbers, to vastly improve the +readability of your code. + +Available since + 2019.1 + +Options + http_status_code_whitelist +` + +var docST1015 = `A switch's default case should be the first or last case + +Available since + 2019.1 +` + +var docST1016 = `Use consistent method receiver names + +Available since + 2019.1, non-default +` + +var docST1017 = `Don't use Yoda conditions + +Available since + Unreleased +` + +var docST1018 = `Avoid zero-width and control characters in string literals + +Available since + Unreleased +` diff --git a/vendor/honnef.co/go/tools/stylecheck/lint.go b/vendor/honnef.co/go/tools/stylecheck/lint.go index ee7efa45..120d97f0 100644 --- a/vendor/honnef.co/go/tools/stylecheck/lint.go +++ b/vendor/honnef.co/go/tools/stylecheck/lint.go @@ -32,23 +32,24 @@ func (c *Checker) Init(prog *lint.Program) {} func (c *Checker) Checks() []lint.Check { return []lint.Check{ - {ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment}, - {ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports}, - // {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports}, - {ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames}, - // {ID: "ST1004", FilterGenerated: false, Fn: nil, }, - {ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings}, - {ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames}, - // {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec}, - {ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn}, - // {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn}, - // {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg}, - {ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames}, - {ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames}, - {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes}, - {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder}, - {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical}, - {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions}, + {ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment, Doc: docST1000}, + {ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports, Doc: docST1001}, + // {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports, Doc: docST1002}, + {ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames, Doc: docST1003}, + // {ID: "ST1004", FilterGenerated: false, Fn: nil, , Doc: docST1004}, + {ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings, Doc: docST1005}, + {ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames, Doc: docST1006}, + // {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec, Doc: docST1007}, + {ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn, Doc: docST1008}, + // {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn, Doc: docST1009}, + // {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg, Doc: docST1010}, + {ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames, Doc: docST1011}, + {ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames, Doc: docST1012}, + {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes, Doc: docST1013}, + {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder, Doc: docST1015}, + {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical, Doc: docST1016}, + {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions, Doc: docST1017}, + {ID: "ST1018", FilterGenerated: false, Fn: c.CheckInvisibleCharacters, Doc: docST1018}, } } @@ -61,60 +62,56 @@ func (c *Checker) CheckPackageComment(j *lint.Job) { // which case they get appended. But that doesn't happen a lot in // the real world. - for _, pkg := range j.Program.InitialPackages { - if pkg.Name == "main" { + if j.Pkg.Name == "main" { + return + } + hasDocs := false + for _, f := range j.Pkg.Syntax { + if IsInTest(j, f) { continue } - hasDocs := false - for _, f := range pkg.Syntax { + if f.Doc != nil && len(f.Doc.List) > 0 { + hasDocs = true + prefix := "Package " + f.Name.Name + " " + if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { + j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix) + } + f.Doc.Text() + } + } + + if !hasDocs { + for _, f := range j.Pkg.Syntax { if IsInTest(j, f) { continue } - if f.Doc != nil && len(f.Doc.List) > 0 { - hasDocs = true - prefix := "Package " + f.Name.Name + " " - if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { - j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix) - } - f.Doc.Text() - } - } - - if !hasDocs { - for _, f := range pkg.Syntax { - if IsInTest(j, f) { - continue - } - j.Errorf(f, "at least one file in a package should have a package comment") - } + j.Errorf(f, "at least one file in a package should have a package comment") } } } func (c *Checker) CheckDotImports(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - for _, f := range pkg.Syntax { - imports: - for _, imp := range f.Imports { - path := imp.Path.Value - path = path[1 : len(path)-1] - for _, w := range pkg.Config.DotImportWhitelist { - if w == path { - continue imports - } + for _, f := range j.Pkg.Syntax { + imports: + for _, imp := range f.Imports { + path := imp.Path.Value + path = path[1 : len(path)-1] + for _, w := range j.Pkg.Config.DotImportWhitelist { + if w == path { + continue imports } + } - if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) { - j.Errorf(imp, "should not use dot imports") - } + if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) { + j.Errorf(imp, "should not use dot imports") } } } } func (c *Checker) CheckBlankImports(j *lint.Job) { - fset := j.Program.Fset() - for _, f := range j.Program.Files { + fset := j.Pkg.Fset + for _, f := range j.Pkg.Syntax { if IsInMain(j, f) || IsInTest(j, f) { continue } @@ -177,14 +174,14 @@ func (c *Checker) CheckIncDec(j *lint.Job) { // x += 2 // ... // x += 1 - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok || (assign.Tok != token.ADD_ASSIGN && assign.Tok != token.SUB_ASSIGN) { - return true + fn := func(node ast.Node) { + assign := node.(*ast.AssignStmt) + if assign.Tok != token.ADD_ASSIGN && assign.Tok != token.SUB_ASSIGN { + return } if (len(assign.Lhs) != 1 || len(assign.Rhs) != 1) || !IsIntLiteral(assign.Rhs[0], "1") { - return true + return } suffix := "" @@ -196,16 +193,13 @@ func (c *Checker) CheckIncDec(j *lint.Job) { } j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckErrorReturn(j *lint.Job) { fnLoop: - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { sig := fn.Type().(*types.Signature) rets := sig.Results() if rets == nil || rets.Len() < 2 { @@ -229,7 +223,7 @@ fnLoop: // CheckUnexportedReturn checks that exported functions on exported // types do not return unexported types. func (c *Checker) CheckUnexportedReturn(j *lint.Job) { - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { if fn.Synthetic != "" || fn.Parent() != nil { continue } @@ -252,23 +246,21 @@ func (c *Checker) CheckUnexportedReturn(j *lint.Job) { } func (c *Checker) CheckReceiverNames(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - for _, m := range pkg.SSA.Members { - if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { - ms := typeutil.IntuitiveMethodSet(T.Type(), nil) - for _, sel := range ms { - fn := sel.Obj().(*types.Func) - recv := fn.Type().(*types.Signature).Recv() - if Dereference(recv.Type()) != T.Type() { - // skip embedded methods - continue - } - if recv.Name() == "self" || recv.Name() == "this" { - j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) - } - if recv.Name() == "_" { - j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused") - } + for _, m := range j.Pkg.SSA.Members { + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { + ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + for _, sel := range ms { + fn := sel.Obj().(*types.Func) + recv := fn.Type().(*types.Signature).Recv() + if Dereference(recv.Type()) != T.Type() { + // skip embedded methods + continue + } + if recv.Name() == "self" || recv.Name() == "this" { + j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + } + if recv.Name() == "_" { + j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused") } } } @@ -276,37 +268,35 @@ func (c *Checker) CheckReceiverNames(j *lint.Job) { } func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - for _, m := range pkg.SSA.Members { - names := map[string]int{} + for _, m := range j.Pkg.SSA.Members { + names := map[string]int{} - var firstFn *types.Func - if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { - ms := typeutil.IntuitiveMethodSet(T.Type(), nil) - for _, sel := range ms { - fn := sel.Obj().(*types.Func) - recv := fn.Type().(*types.Signature).Recv() - if Dereference(recv.Type()) != T.Type() { - // skip embedded methods - continue - } - if firstFn == nil { - firstFn = fn - } - if recv.Name() != "" && recv.Name() != "_" { - names[recv.Name()]++ - } + var firstFn *types.Func + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { + ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + for _, sel := range ms { + fn := sel.Obj().(*types.Func) + recv := fn.Type().(*types.Signature).Recv() + if Dereference(recv.Type()) != T.Type() { + // skip embedded methods + continue + } + if firstFn == nil { + firstFn = fn + } + if recv.Name() != "" && recv.Name() != "_" { + names[recv.Name()]++ } } + } - if len(names) > 1 { - var seen []string - for name, count := range names { - seen = append(seen, fmt.Sprintf("%dx %q", count, name)) - } - - j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) + if len(names) > 1 { + var seen []string + for name, count := range names { + seen = append(seen, fmt.Sprintf("%dx %q", count, name)) } + + j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) } } } @@ -315,7 +305,7 @@ func (c *Checker) CheckContextFirstArg(j *lint.Job) { // TODO(dh): this check doesn't apply to test helpers. Example from the stdlib: // func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) { fnLoop: - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { if fn.Synthetic != "" || fn.Parent() != nil { continue } @@ -337,17 +327,19 @@ fnLoop: } func (c *Checker) CheckErrorStrings(j *lint.Job) { - fnNames := map[*ssa.Package]map[string]bool{} - for _, fn := range j.Program.InitialFunctions { - m := fnNames[fn.Package()] - if m == nil { - m = map[string]bool{} - fnNames[fn.Package()] = m + objNames := map[*ssa.Package]map[string]bool{} + ssapkg := j.Pkg.SSA + objNames[ssapkg] = map[string]bool{} + for _, m := range ssapkg.Members { + if typ, ok := m.(*ssa.Type); ok { + objNames[ssapkg][typ.Name()] = true } - m[fn.Name()] = true + } + for _, fn := range j.Pkg.InitialFunctions { + objNames[fn.Package()][fn.Name()] = true } - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { if IsInTest(j, fn) { // We don't care about malformed error messages in tests; // they're usually for direct human consumption, not part @@ -399,8 +391,8 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) { } word = strings.TrimRightFunc(word, func(r rune) bool { return unicode.IsPunct(r) }) - if fnNames[fn.Package()][word] { - // Word is probably the name of a function in this package + if objNames[fn.Package()][word] { + // Word is probably the name of a function or type in this package continue } // First word in error starts with a capital @@ -437,15 +429,15 @@ func (c *Checker) CheckTimeNames(j *lint.Job) { } } } - for _, f := range j.Program.Files { + for _, f := range j.Pkg.Syntax { ast.Inspect(f, func(node ast.Node) bool { switch node := node.(type) { case *ast.ValueSpec: - T := TypeOf(j, node.Type) + T := j.Pkg.TypesInfo.TypeOf(node.Type) fn(T, node.Names) case *ast.FieldList: for _, field := range node.List { - T := TypeOf(j, field.Type) + T := j.Pkg.TypesInfo.TypeOf(field.Type) fn(T, field.Names) } } @@ -455,7 +447,7 @@ func (c *Checker) CheckTimeNames(j *lint.Job) { } func (c *Checker) CheckErrorVarNames(j *lint.Job) { - for _, f := range j.Program.Files { + for _, f := range j.Pkg.Syntax { for _, decl := range f.Decls { gen, ok := decl.(*ast.GenDecl) if !ok || gen.Tok != token.VAR { @@ -549,61 +541,56 @@ var httpStatusCodes = map[int]string{ } func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - whitelist := map[string]bool{} - for _, code := range pkg.Config.HTTPStatusCodeWhitelist { - whitelist[code] = true - } - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - - var arg int - switch CallNameAST(j, call) { - case "net/http.Error": - arg = 2 - case "net/http.Redirect": - arg = 3 - case "net/http.StatusText": - arg = 0 - case "net/http.RedirectHandler": - arg = 1 - default: - return true - } - lit, ok := call.Args[arg].(*ast.BasicLit) - if !ok { - return true - } - if whitelist[lit.Value] { - return true - } - - n, err := strconv.Atoi(lit.Value) - if err != nil { - return true - } - s, ok := httpStatusCodes[n] - if !ok { - return true - } - j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n) + whitelist := map[string]bool{} + for _, code := range j.Pkg.Config.HTTPStatusCodeWhitelist { + whitelist[code] = true + } + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { return true } - for _, f := range pkg.Syntax { - ast.Inspect(f, fn) + + var arg int + switch CallNameAST(j, call) { + case "net/http.Error": + arg = 2 + case "net/http.Redirect": + arg = 3 + case "net/http.StatusText": + arg = 0 + case "net/http.RedirectHandler": + arg = 1 + default: + return true } + lit, ok := call.Args[arg].(*ast.BasicLit) + if !ok { + return true + } + if whitelist[lit.Value] { + return true + } + + n, err := strconv.Atoi(lit.Value) + if err != nil { + return true + } + s, ok := httpStatusCodes[n] + if !ok { + return true + } + j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n) + return true + } + for _, f := range j.Pkg.Syntax { + ast.Inspect(f, fn) } } func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { - fn := func(node ast.Node) bool { - stmt, ok := node.(*ast.SwitchStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + stmt := node.(*ast.SwitchStmt) list := stmt.Body.List for i, c := range list { if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 { @@ -611,33 +598,41 @@ func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { break } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) } func (c *Checker) CheckYodaConditions(j *lint.Job) { - fn := func(node ast.Node) bool { - cond, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + cond := node.(*ast.BinaryExpr) if cond.Op != token.EQL && cond.Op != token.NEQ { - return true + return } if _, ok := cond.X.(*ast.BasicLit); !ok { - return true + return } if _, ok := cond.Y.(*ast.BasicLit); ok { // Don't flag lit == lit conditions, just in case - return true + return } j.Errorf(cond, "don't use Yoda conditions") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) +} + +func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { + fn := func(node ast.Node) { + lit := node.(*ast.BasicLit) + if lit.Kind != token.STRING { + return + } + for _, r := range lit.Value { + if unicode.Is(unicode.Cf, r) { + j.Errorf(lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) + } else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' { + j.Errorf(lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) + } + } + } + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) } diff --git a/vendor/honnef.co/go/tools/stylecheck/names.go b/vendor/honnef.co/go/tools/stylecheck/names.go index e855590f..1c0718fd 100644 --- a/vendor/honnef.co/go/tools/stylecheck/names.go +++ b/vendor/honnef.co/go/tools/stylecheck/names.go @@ -71,109 +71,107 @@ func (c *Checker) CheckNames(j *lint.Job) { } } - for _, pkg := range j.Program.InitialPackages { - initialisms := make(map[string]bool, len(pkg.Config.Initialisms)) - for _, word := range pkg.Config.Initialisms { - initialisms[word] = true + initialisms := make(map[string]bool, len(j.Pkg.Config.Initialisms)) + for _, word := range j.Pkg.Config.Initialisms { + initialisms[word] = true + } + for _, f := range j.Pkg.Syntax { + // Package names need slightly different handling than other names. + if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") { + j.Errorf(f, "should not use underscores in package names") + } + if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 { + j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) } - for _, f := range pkg.Syntax { - // Package names need slightly different handling than other names. - if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") { - j.Errorf(f, "should not use underscores in package names") - } - if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 { - j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) - } - ast.Inspect(f, func(node ast.Node) bool { - switch v := node.(type) { - case *ast.AssignStmt: - if v.Tok != token.DEFINE { - return true - } - for _, exp := range v.Lhs { - if id, ok := exp.(*ast.Ident); ok { - check(id, "var", initialisms) - } - } - case *ast.FuncDecl: - // Functions with no body are defined elsewhere (in - // assembly, or via go:linkname). These are likely to - // be something very low level (such as the runtime), - // where our rules don't apply. - if v.Body == nil { - return true + ast.Inspect(f, func(node ast.Node) bool { + switch v := node.(type) { + case *ast.AssignStmt: + if v.Tok != token.DEFINE { + return true + } + for _, exp := range v.Lhs { + if id, ok := exp.(*ast.Ident); ok { + check(id, "var", initialisms) } + } + case *ast.FuncDecl: + // Functions with no body are defined elsewhere (in + // assembly, or via go:linkname). These are likely to + // be something very low level (such as the runtime), + // where our rules don't apply. + if v.Body == nil { + return true + } - if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { - return true - } + if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + return true + } - thing := "func" - if v.Recv != nil { - thing = "method" - } + thing := "func" + if v.Recv != nil { + thing = "method" + } - if !isTechnicallyExported(v) { - check(v.Name, thing, initialisms) - } + if !isTechnicallyExported(v) { + check(v.Name, thing, initialisms) + } - checkList(v.Type.Params, thing+" parameter", initialisms) - checkList(v.Type.Results, thing+" result", initialisms) - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return true - } - var thing string - switch v.Tok { - case token.CONST: - thing = "const" - case token.TYPE: - thing = "type" - case token.VAR: - thing = "var" - } - for _, spec := range v.Specs { - switch s := spec.(type) { - case *ast.TypeSpec: - check(s.Name, thing, initialisms) - case *ast.ValueSpec: - for _, id := range s.Names { - check(id, thing, initialisms) - } - } - } - case *ast.InterfaceType: - // Do not check interface method names. - // They are often constrainted by the method names of concrete types. - for _, x := range v.Methods.List { - ft, ok := x.Type.(*ast.FuncType) - if !ok { // might be an embedded interface name - continue - } - checkList(ft.Params, "interface method parameter", initialisms) - checkList(ft.Results, "interface method result", initialisms) - } - case *ast.RangeStmt: - if v.Tok == token.ASSIGN { - return true - } - if id, ok := v.Key.(*ast.Ident); ok { - check(id, "range var", initialisms) - } - if id, ok := v.Value.(*ast.Ident); ok { - check(id, "range var", initialisms) - } - case *ast.StructType: - for _, f := range v.Fields.List { - for _, id := range f.Names { - check(id, "struct field", initialisms) + checkList(v.Type.Params, thing+" parameter", initialisms) + checkList(v.Type.Results, thing+" result", initialisms) + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return true + } + var thing string + switch v.Tok { + case token.CONST: + thing = "const" + case token.TYPE: + thing = "type" + case token.VAR: + thing = "var" + } + for _, spec := range v.Specs { + switch s := spec.(type) { + case *ast.TypeSpec: + check(s.Name, thing, initialisms) + case *ast.ValueSpec: + for _, id := range s.Names { + check(id, thing, initialisms) } } } - return true - }) - } + case *ast.InterfaceType: + // Do not check interface method names. + // They are often constrainted by the method names of concrete types. + for _, x := range v.Methods.List { + ft, ok := x.Type.(*ast.FuncType) + if !ok { // might be an embedded interface name + continue + } + checkList(ft.Params, "interface method parameter", initialisms) + checkList(ft.Results, "interface method result", initialisms) + } + case *ast.RangeStmt: + if v.Tok == token.ASSIGN { + return true + } + if id, ok := v.Key.(*ast.Ident); ok { + check(id, "range var", initialisms) + } + if id, ok := v.Value.(*ast.Ident); ok { + check(id, "range var", initialisms) + } + case *ast.StructType: + for _, f := range v.Fields.List { + for _, id := range f.Names { + check(id, "struct field", initialisms) + } + } + } + return true + }) } } diff --git a/vendor/honnef.co/go/tools/unused/implements.go b/vendor/honnef.co/go/tools/unused/implements.go index 78a54563..835baac6 100644 --- a/vendor/honnef.co/go/tools/unused/implements.go +++ b/vendor/honnef.co/go/tools/unused/implements.go @@ -37,43 +37,46 @@ func sameId(obj types.Object, pkg *types.Package, name string) bool { return pkg.Path() == obj.Pkg().Path() } -func (c *Checker) implements(V types.Type, T *types.Interface) bool { +func (g *Graph) implements(V types.Type, T *types.Interface, msV *types.MethodSet) ([]*types.Selection, bool) { // fast path for common case if T.Empty() { - return true + return nil, true } if ityp, _ := V.Underlying().(*types.Interface); ityp != nil { + // TODO(dh): is this code reachable? for i := 0; i < T.NumMethods(); i++ { m := T.Method(i) _, obj := lookupMethod(ityp, m.Pkg(), m.Name()) switch { case obj == nil: - return false + return nil, false case !types.Identical(obj.Type(), m.Type()): - return false + return nil, false } } - return true + return nil, true } // A concrete type implements T if it implements all methods of T. - ms := c.msCache.MethodSet(V) + var sels []*types.Selection for i := 0; i < T.NumMethods(); i++ { m := T.Method(i) - sel := ms.Lookup(m.Pkg(), m.Name()) + sel := msV.Lookup(m.Pkg(), m.Name()) if sel == nil { - return false + return nil, false } f, _ := sel.Obj().(*types.Func) if f == nil { - return false + return nil, false } if !types.Identical(f.Type(), m.Type()) { - return false + return nil, false } + + sels = append(sels, sel) } - return true + return sels, true } diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go index b1dbd6f5..f69bddae 100644 --- a/vendor/honnef.co/go/tools/unused/unused.go +++ b/vendor/honnef.co/go/tools/unused/unused.go @@ -1,4 +1,4 @@ -package unused // import "honnef.co/go/tools/unused" +package unused import ( "fmt" @@ -6,32 +6,148 @@ import ( "go/token" "go/types" "io" - "path/filepath" "strings" + "sync" + "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/lint" - . "honnef.co/go/tools/lint/lintdsl" - - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/typeutil" + "honnef.co/go/tools/lint/lintdsl" + "honnef.co/go/tools/ssa" ) -func NewLintChecker(c *Checker) *LintChecker { - l := &LintChecker{ - c: c, +// TODO(dh): conversions between structs mark fields as used, but the +// conversion itself isn't part of that subgraph. even if the function +// containing the conversion is unused, the fields will be marked as +// used. + +// TODO(dh): we cannot observe function calls in assembly files. + +/* + +- packages use: + - (1.1) exported named types (unless in package main) + - (1.2) exported functions (unless in package main) + - (1.3) exported variables (unless in package main) + - (1.4) exported constants (unless in package main) + - (1.5) init functions + - (1.6) functions exported to cgo + - (1.7) the main function iff in the main package + - (1.8) symbols linked via go:linkname + +- named types use: + - (2.1) exported methods + - (2.2) the type they're based on + - (2.3) all their aliases. we can't easily track uses of aliases + because go/types turns them into uses of the aliased types. assume + that if a type is used, so are all of its aliases. + +- variables and constants use: + - their types + +- functions use: + - (4.1) all their arguments, return parameters and receivers + - (4.2) anonymous functions defined beneath them + - (4.3) closures and bound methods. + this implements a simplified model where a function is used merely by being referenced, even if it is never called. + that way we don't have to keep track of closures escaping functions. + - (4.4) functions they return. we assume that someone else will call the returned function + - (4.5) functions/interface methods they call + - types they instantiate or convert to + - (4.7) fields they access + - (4.8) types of all instructions + - (4.9) package-level variables they assign to iff in tests (sinks for benchmarks) + +- conversions use: + - (5.1) when converting between two equivalent structs, the fields in + either struct use each other. the fields are relevant for the + conversion, but only if the fields are also accessed outside the + conversion. + - (5.2) when converting to or from unsafe.Pointer, mark all fields as used. + +- structs use: + - (6.1) fields of type NoCopy sentinel + - (6.2) exported fields + - (6.3) embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) + - (6.4) embedded fields that have exported methods (recursively) + - (6.5) embedded structs that have exported fields (recursively) + +- (7.1) field accesses use fields +- (7.2) fields use their types + +- (8.0) How we handle interfaces: + - (8.1) We do not technically care about interfaces that only consist of + exported methods. Exported methods on concrete types are always + marked as used. + - Any concrete type implements all known interfaces. Even if it isn't + assigned to any interfaces in our code, the user may receive a value + of the type and expect to pass it back to us through an interface. + + Concrete types use their methods that implement interfaces. If the + type is used, it uses those methods. Otherwise, it doesn't. This + way, types aren't incorrectly marked reachable through the edge + from method to type. + + - (8.3) All interface methods are marked as used, even if they never get + called. This is to accomodate sum types (unexported interface + method that must exist but never gets called.) + + - (8.4) All embedded interfaces are marked as used. This is an + extension of 8.3, but we have to explicitly track embedded + interfaces because in a chain C->B->A, B wouldn't be marked as + used by 8.3 just because it contributes A's methods to C. + +- Inherent uses: + - thunks and other generated wrappers call the real function + - (9.2) variables use their types + - (9.3) types use their underlying and element types + - (9.4) conversions use the type they convert to + - (9.5) instructions use their operands + - (9.6) instructions use their operands' types + - (9.7) variable _reads_ use variables, writes do not, except in tests + - (9.8) runtime functions that may be called from user code via the compiler + + +- const groups: + (10.1) if one constant out of a block of constants is used, mark all + of them used. a lot of the time, unused constants exist for the sake + of completeness. See also + https://github.com/dominikh/go-tools/issues/365 + + + +- Differences in whole program mode: + - (e1) all packages share a single graph + - (e2) types aim to implement all exported interfaces from all packages + - (e3) exported identifiers aren't automatically used. for fields and + methods this poses extra issues due to reflection. We assume + that all exported fields are used. We also maintain a list of + known reflection-based method callers. + +*/ + +func assert(b bool) { + if !b { + panic("failed assertion") } - return l } -type LintChecker struct { - c *Checker +type Checker struct { + WholeProgram bool + Debug io.Writer + + interfaces []*types.Interface + initialPackages []*lint.Pkg + scopes map[*types.Scope]*ssa.Function + + seenMu sync.Mutex + seen map[token.Position]struct{} + out []types.Object } -func (*LintChecker) Name() string { return "unused" } -func (*LintChecker) Prefix() string { return "U" } +func (*Checker) Name() string { return "unused" } +func (*Checker) Prefix() string { return "U" } -func (l *LintChecker) Init(*lint.Program) {} -func (l *LintChecker) Checks() []lint.Check { +func (l *Checker) Checks() []lint.Check { return []lint.Check{ {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, } @@ -51,222 +167,1527 @@ func typString(obj types.Object) string { case *types.TypeName: return "type" default: - // log.Printf("%T", obj) return "identifier" } } -func (l *LintChecker) Lint(j *lint.Job) { - unused := l.c.Check(j.Program) +// /usr/lib/go/src/runtime/proc.go:433:6: func badmorestackg0 is unused (U1000) + +// Functions defined in the Go runtime that may be called through +// compiler magic or via assembly. +var runtimeFuncs = map[string]bool{ + // The first part of the list is copied from + // cmd/compile/internal/gc/builtin.go, var runtimeDecls + "newobject": true, + "panicindex": true, + "panicslice": true, + "panicdivide": true, + "panicmakeslicelen": true, + "throwinit": true, + "panicwrap": true, + "gopanic": true, + "gorecover": true, + "goschedguarded": true, + "printbool": true, + "printfloat": true, + "printint": true, + "printhex": true, + "printuint": true, + "printcomplex": true, + "printstring": true, + "printpointer": true, + "printiface": true, + "printeface": true, + "printslice": true, + "printnl": true, + "printsp": true, + "printlock": true, + "printunlock": true, + "concatstring2": true, + "concatstring3": true, + "concatstring4": true, + "concatstring5": true, + "concatstrings": true, + "cmpstring": true, + "intstring": true, + "slicebytetostring": true, + "slicebytetostringtmp": true, + "slicerunetostring": true, + "stringtoslicebyte": true, + "stringtoslicerune": true, + "slicecopy": true, + "slicestringcopy": true, + "decoderune": true, + "countrunes": true, + "convI2I": true, + "convT16": true, + "convT32": true, + "convT64": true, + "convTstring": true, + "convTslice": true, + "convT2E": true, + "convT2Enoptr": true, + "convT2I": true, + "convT2Inoptr": true, + "assertE2I": true, + "assertE2I2": true, + "assertI2I": true, + "assertI2I2": true, + "panicdottypeE": true, + "panicdottypeI": true, + "panicnildottype": true, + "ifaceeq": true, + "efaceeq": true, + "fastrand": true, + "makemap64": true, + "makemap": true, + "makemap_small": true, + "mapaccess1": true, + "mapaccess1_fast32": true, + "mapaccess1_fast64": true, + "mapaccess1_faststr": true, + "mapaccess1_fat": true, + "mapaccess2": true, + "mapaccess2_fast32": true, + "mapaccess2_fast64": true, + "mapaccess2_faststr": true, + "mapaccess2_fat": true, + "mapassign": true, + "mapassign_fast32": true, + "mapassign_fast32ptr": true, + "mapassign_fast64": true, + "mapassign_fast64ptr": true, + "mapassign_faststr": true, + "mapiterinit": true, + "mapdelete": true, + "mapdelete_fast32": true, + "mapdelete_fast64": true, + "mapdelete_faststr": true, + "mapiternext": true, + "mapclear": true, + "makechan64": true, + "makechan": true, + "chanrecv1": true, + "chanrecv2": true, + "chansend1": true, + "closechan": true, + "writeBarrier": true, + "typedmemmove": true, + "typedmemclr": true, + "typedslicecopy": true, + "selectnbsend": true, + "selectnbrecv": true, + "selectnbrecv2": true, + "selectsetpc": true, + "selectgo": true, + "block": true, + "makeslice": true, + "makeslice64": true, + "growslice": true, + "memmove": true, + "memclrNoHeapPointers": true, + "memclrHasPointers": true, + "memequal": true, + "memequal8": true, + "memequal16": true, + "memequal32": true, + "memequal64": true, + "memequal128": true, + "int64div": true, + "uint64div": true, + "int64mod": true, + "uint64mod": true, + "float64toint64": true, + "float64touint64": true, + "float64touint32": true, + "int64tofloat64": true, + "uint64tofloat64": true, + "uint32tofloat64": true, + "complex128div": true, + "racefuncenter": true, + "racefuncenterfp": true, + "racefuncexit": true, + "raceread": true, + "racewrite": true, + "racereadrange": true, + "racewriterange": true, + "msanread": true, + "msanwrite": true, + "x86HasPOPCNT": true, + "x86HasSSE41": true, + "arm64HasATOMICS": true, + + // The second part of the list is extracted from assembly code in + // the standard library, with the exception of the runtime package itself + "abort": true, + "aeshashbody": true, + "args": true, + "asminit": true, + "badctxt": true, + "badmcall2": true, + "badmcall": true, + "badmorestackg0": true, + "badmorestackgsignal": true, + "badsignal2": true, + "callbackasm1": true, + "callCfunction": true, + "cgocallback_gofunc": true, + "cgocallbackg": true, + "checkgoarm": true, + "check": true, + "debugCallCheck": true, + "debugCallWrap": true, + "emptyfunc": true, + "entersyscall": true, + "exit": true, + "exits": true, + "exitsyscall": true, + "externalthreadhandler": true, + "findnull": true, + "goexit1": true, + "gostring": true, + "i386_set_ldt": true, + "_initcgo": true, + "init_thread_tls": true, + "ldt0setup": true, + "libpreinit": true, + "load_g": true, + "morestack": true, + "mstart": true, + "nacl_sysinfo": true, + "nanotimeQPC": true, + "nanotime": true, + "newosproc0": true, + "newproc": true, + "newstack": true, + "noted": true, + "nowQPC": true, + "osinit": true, + "printf": true, + "racecallback": true, + "reflectcallmove": true, + "reginit": true, + "rt0_go": true, + "save_g": true, + "schedinit": true, + "setldt": true, + "settls": true, + "sighandler": true, + "sigprofNonGo": true, + "sigtrampgo": true, + "_sigtramp": true, + "sigtramp": true, + "stackcheck": true, + "syscall_chdir": true, + "syscall_chroot": true, + "syscall_close": true, + "syscall_dup2": true, + "syscall_execve": true, + "syscall_exit": true, + "syscall_fcntl": true, + "syscall_forkx": true, + "syscall_gethostname": true, + "syscall_getpid": true, + "syscall_ioctl": true, + "syscall_pipe": true, + "syscall_rawsyscall6": true, + "syscall_rawSyscall6": true, + "syscall_rawsyscall": true, + "syscall_RawSyscall": true, + "syscall_rawsysvicall6": true, + "syscall_setgid": true, + "syscall_setgroups": true, + "syscall_setpgid": true, + "syscall_setsid": true, + "syscall_setuid": true, + "syscall_syscall6": true, + "syscall_syscall": true, + "syscall_Syscall": true, + "syscall_sysvicall6": true, + "syscall_wait4": true, + "syscall_write": true, + "traceback": true, + "tstart": true, + "usplitR0": true, + "wbBufFlush": true, + "write": true, +} + +func (c *Checker) Init(prog *lint.Program) { + for _, pkg := range prog.AllPackages { + c.interfaces = append(c.interfaces, interfacesFromExportData(pkg.Types)...) + } + c.initialPackages = prog.InitialPackages + c.seen = map[token.Position]struct{}{} + + c.scopes = map[*types.Scope]*ssa.Function{} + for _, pkg := range prog.InitialPackages { + for _, fn := range pkg.InitialFunctions { + if fn.Object() != nil { + scope := fn.Object().(*types.Func).Scope() + c.scopes[scope] = fn + } + } + } + + // This is a hack to work in the confines of "one package per + // job". We do all the actual work in the Init function, and only + // report results in the actual checker function. + var out []types.Object + if c.WholeProgram { + // (e1) all packages share a single graph + out = c.processPkgs(prog.InitialPackages...) + } else { + var wg sync.WaitGroup + var mu sync.Mutex + for _, pkg := range prog.InitialPackages { + pkg := pkg + wg.Add(1) + go func() { + res := c.processPkgs(pkg) + mu.Lock() + out = append(out, res...) + mu.Unlock() + wg.Done() + }() + } + wg.Wait() + } + out2 := make([]types.Object, 0, len(out)) + for _, v := range out { + if _, ok := c.seen[prog.Fset().Position(v.Pos())]; !ok { + out2 = append(out2, v) + } + } + c.out = out2 +} + +func (c *Checker) Lint(j *lint.Job) { + // The actual work is being done in Init. We only report existing + // results here. + unused := c.out for _, u := range unused { - name := u.Obj.Name() - if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { + if u.Pkg() != j.Pkg.Types { + continue + } + name := u.Name() + if sig, ok := u.Type().(*types.Signature); ok && sig.Recv() != nil { switch sig.Recv().Type().(type) { case *types.Named, *types.Pointer: typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) if len(typ) > 0 && typ[0] == '*' { - name = fmt.Sprintf("(%s).%s", typ, u.Obj.Name()) + name = fmt.Sprintf("(%s).%s", typ, u.Name()) } else if len(typ) > 0 { - name = fmt.Sprintf("%s.%s", typ, u.Obj.Name()) + name = fmt.Sprintf("%s.%s", typ, u.Name()) } } } - j.Errorf(u.Obj, "%s %s is unused", typString(u.Obj), name) + j.Errorf(u, "%s %s is unused", typString(u), name) } } -type graph struct { - roots []*graphNode - nodes map[interface{}]*graphNode +func (c *Checker) debugf(f string, v ...interface{}) { + if c.Debug != nil { + fmt.Fprintf(c.Debug, f, v...) + } } -func (g *graph) markUsedBy(obj, usedBy interface{}) { - objNode := g.getNode(obj) - usedByNode := g.getNode(usedBy) - if objNode.obj == usedByNode.obj { +func (graph *Graph) quieten(node *Node) { + if node.seen { return } - usedByNode.uses[objNode] = struct{}{} -} - -var labelCounter = 1 - -func (g *graph) getNode(obj interface{}) *graphNode { - for { - if pt, ok := obj.(*types.Pointer); ok { - obj = pt.Elem() - } else { - break + switch obj := node.obj.(type) { + case *ssa.Function: + sig := obj.Type().(*types.Signature) + if sig.Recv() != nil { + if node, ok := graph.nodeMaybe(sig.Recv()); ok { + node.quiet = true + } } - } - _, ok := g.nodes[obj] - if !ok { - g.addObj(obj) - } - - return g.nodes[obj] -} - -func (g *graph) addObj(obj interface{}) { - if pt, ok := obj.(*types.Pointer); ok { - obj = pt.Elem() - } - node := &graphNode{obj: obj, uses: make(map[*graphNode]struct{}), n: labelCounter} - g.nodes[obj] = node - labelCounter++ - - if obj, ok := obj.(*types.Struct); ok { - n := obj.NumFields() - for i := 0; i < n; i++ { - field := obj.Field(i) - g.markUsedBy(obj, field) + for i := 0; i < sig.Params().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { + node.quiet = true + } + } + for i := 0; i < sig.Results().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { + node.quiet = true + } + } + case *types.Named: + for i := 0; i < obj.NumMethods(); i++ { + m := graph.pkg.Prog.FuncValue(obj.Method(i)) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true + } + } + case *types.Struct: + for i := 0; i < obj.NumFields(); i++ { + if node, ok := graph.nodeMaybe(obj.Field(i)); ok { + node.quiet = true + } + } + case *types.Interface: + for i := 0; i < obj.NumExplicitMethods(); i++ { + m := obj.ExplicitMethod(i) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true + } } } } -type graphNode struct { - obj interface{} - uses map[*graphNode]struct{} - used bool - quiet bool - n int -} +func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { + graph := NewGraph() + graph.wholeProgram = c.WholeProgram + graph.scopes = c.scopes + graph.initialPackages = c.initialPackages -type CheckMode int + var out []types.Object -const ( - CheckConstants CheckMode = 1 << iota - CheckFields - CheckFunctions - CheckTypes - CheckVariables - - CheckAll = CheckConstants | CheckFields | CheckFunctions | CheckTypes | CheckVariables -) - -type Unused struct { - Obj types.Object - Position token.Position -} - -type Checker struct { - Mode CheckMode - WholeProgram bool - ConsiderReflection bool - Debug io.Writer - - graph *graph - - msCache typeutil.MethodSetCache - prog *lint.Program - topmostCache map[*types.Scope]*types.Scope - interfaces []*types.Interface -} - -func NewChecker(mode CheckMode) *Checker { - return &Checker{ - Mode: mode, - graph: &graph{ - nodes: make(map[interface{}]*graphNode), - }, - topmostCache: make(map[*types.Scope]*types.Scope), + for _, pkg := range pkgs { + if pkg.PkgPath == "unsafe" { + continue + } + graph.entry(pkg) } -} -func (c *Checker) checkConstants() bool { return (c.Mode & CheckConstants) > 0 } -func (c *Checker) checkFields() bool { return (c.Mode & CheckFields) > 0 } -func (c *Checker) checkFunctions() bool { return (c.Mode & CheckFunctions) > 0 } -func (c *Checker) checkTypes() bool { return (c.Mode & CheckTypes) > 0 } -func (c *Checker) checkVariables() bool { return (c.Mode & CheckVariables) > 0 } - -func (c *Checker) markFields(typ types.Type) { - structType, ok := typ.Underlying().(*types.Struct) - if !ok { - return - } - n := structType.NumFields() - for i := 0; i < n; i++ { - field := structType.Field(i) - c.graph.markUsedBy(field, typ) - } -} - -type Error struct { - Errors map[string][]error -} - -func (e Error) Error() string { - return fmt.Sprintf("errors in %d packages", len(e.Errors)) -} - -func (c *Checker) Check(prog *lint.Program) []Unused { - var unused []Unused - c.prog = prog if c.WholeProgram { - c.findExportedInterfaces() - } - for _, pkg := range prog.InitialPackages { - c.processDefs(pkg) - c.processUses(pkg) - c.processTypes(pkg) - c.processSelections(pkg) - c.processAST(pkg) - } + var ifaces []*types.Interface + var notIfaces []types.Type - for _, node := range c.graph.nodes { - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - typNode, ok := c.graph.nodes[obj.Type()] - if !ok { - continue - } - node.uses[typNode] = struct{}{} - } + // implement as many interfaces as possible + graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { + switch t := t.(type) { + case *types.Interface: + ifaces = append(ifaces, t) + default: + if _, ok := t.Underlying().(*types.Interface); !ok { + notIfaces = append(notIfaces, t) + } + } + }) - roots := map[*graphNode]struct{}{} - for _, root := range c.graph.roots { - roots[root] = struct{}{} + // OPT(dh): this is not terribly efficient + ifaces = append(ifaces, c.interfaces...) + + // (8.0) handle interfaces + // (e2) types aim to implement all exported interfaces from all packages + for _, t := range notIfaces { + ms := graph.msCache.MethodSet(t) + for _, iface := range ifaces { + if sels, ok := graph.implements(t, iface, ms); ok { + for _, sel := range sels { + graph.useMethod(t, sel, t, "implements") + } + } + } + } } - markNodesUsed(roots) - c.markNodesQuiet() - c.deduplicate() if c.Debug != nil { - c.printDebugGraph(c.Debug) - } - - for _, node := range c.graph.nodes { - if node.used || node.quiet { - continue - } - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - found := false - if !false { - for _, pkg := range prog.InitialPackages { - if pkg.Types == obj.Pkg() { - found = true - break + debugNode := func(node *Node) { + if node.obj == nil { + c.debugf("n%d [label=\"Root\"];\n", node.id) + } else { + c.debugf("n%d [label=%q];\n", node.id, node.obj) + } + for used, reasons := range node.used { + for _, reason := range reasons { + c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) } } } - if !found { - continue - } - pos := c.prog.Fset().Position(obj.Pos()) - if pos.Filename == "" || filepath.Base(pos.Filename) == "C" { - continue + c.debugf("digraph{\n") + debugNode(graph.Root) + for _, node := range graph.Nodes { + debugNode(node) } - - unused = append(unused, Unused{Obj: obj, Position: pos}) + graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { + debugNode(value.(*Node)) + }) + c.debugf("}\n") } - return unused + graph.color(graph.Root) + // if a node is unused, don't report any of the node's + // children as unused. for example, if a function is unused, + // don't flag its receiver. if a named type is unused, don't + // flag its methods. + + for _, node := range graph.Nodes { + graph.quieten(node) + } + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + graph.quieten(value.(*Node)) + }) + + report := func(node *Node) { + if node.seen { + var pos token.Pos + switch obj := node.obj.(type) { + case types.Object: + pos = obj.Pos() + case *ssa.Function: + pos = obj.Pos() + } + + if pos != 0 { + c.seenMu.Lock() + c.seen[pkgs[0].Fset.Position(pos)] = struct{}{} + c.seenMu.Unlock() + } + return + } + if node.quiet { + c.debugf("n%d [color=purple];\n", node.id) + return + } + + type packager1 interface { + Pkg() *types.Package + } + type packager2 interface { + Package() *ssa.Package + } + + // do not report objects from packages we aren't checking. + checkPkg: + switch obj := node.obj.(type) { + case packager1: + for _, pkg := range pkgs { + if pkg.Types == obj.Pkg() { + break checkPkg + } + } + c.debugf("n%d [color=yellow];\n", node.id) + return + case packager2: + // This happens to filter $bound and $thunk, which + // should be fine, since we wouldn't want to report + // them, anyway. Remember that this filtering is only + // for the output, it doesn't affect the reachability + // of nodes in the graph. + for _, pkg := range pkgs { + if pkg.SSA == obj.Package() { + break checkPkg + } + } + c.debugf("n%d [color=yellow];\n", node.id) + return + } + + c.debugf("n%d [color=red];\n", node.id) + switch obj := node.obj.(type) { + case *types.Var: + // don't report unnamed variables (receivers, interface embedding) + if obj.Name() != "" || obj.IsField() { + out = append(out, obj) + } + case types.Object: + if obj.Name() != "_" { + out = append(out, obj) + } + case *ssa.Function: + if obj == nil { + // TODO(dh): how does this happen? + return + } + if obj.Object() == nil { + // Closures + return + } + out = append(out, obj.Object()) + default: + c.debugf("n%d [color=gray];\n", node.id) + } + } + for _, node := range graph.Nodes { + report(node) + } + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + report(value.(*Node)) + }) + + return out +} + +type Graph struct { + pkg *ssa.Package + msCache typeutil.MethodSetCache + scopes map[*types.Scope]*ssa.Function + + wholeProgram bool + + nodeCounter int + + Root *Node + TypeNodes typeutil.Map + Nodes map[interface{}]*Node + + seenTypes typeutil.Map + seenFns map[*ssa.Function]struct{} + + initialPackages []*lint.Pkg +} + +func NewGraph() *Graph { + g := &Graph{ + Nodes: map[interface{}]*Node{}, + seenFns: map[*ssa.Function]struct{}{}, + } + g.Root = g.newNode(nil) + return g +} + +func (g *Graph) color(root *Node) { + if root.seen { + return + } + root.seen = true + for other := range root.used { + g.color(other) + } +} + +type ConstGroup struct { + // give the struct a size to get unique pointers + _ byte +} + +func (ConstGroup) String() string { return "const group" } + +type Node struct { + obj interface{} + id int + used map[*Node][]string + + seen bool + quiet bool +} + +func (g *Graph) nodeMaybe(obj interface{}) (*Node, bool) { + if t, ok := obj.(types.Type); ok { + if v := g.TypeNodes.At(t); v != nil { + return v.(*Node), true + } + return nil, false + } + if node, ok := g.Nodes[obj]; ok { + return node, true + } + return nil, false +} + +func (g *Graph) node(obj interface{}) (node *Node, new bool) { + if t, ok := obj.(types.Type); ok { + if v := g.TypeNodes.At(t); v != nil { + return v.(*Node), false + } + node := g.newNode(obj) + g.TypeNodes.Set(t, node) + return node, true + } + if node, ok := g.Nodes[obj]; ok { + return node, false + } + node = g.newNode(obj) + g.Nodes[obj] = node + return node, true +} + +func (g *Graph) newNode(obj interface{}) *Node { + g.nodeCounter++ + return &Node{ + obj: obj, + id: g.nodeCounter, + used: map[*Node][]string{}, + } +} + +func (n *Node) use(node *Node, reason string) { + assert(node != nil) + n.used[node] = append(n.used[node], reason) +} + +// isIrrelevant reports whether an object's presence in the graph is +// of any relevance. A lot of objects will never have outgoing edges, +// nor meaningful incoming ones. Examples are basic types and empty +// signatures, among many others. +// +// Dropping these objects should have no effect on correctness, but +// may improve performance. It also helps with debugging, as it +// greatly reduces the size of the graph. +func isIrrelevant(obj interface{}) bool { + if obj, ok := obj.(types.Object); ok { + switch obj := obj.(type) { + case *types.Var: + if obj.IsField() { + // We need to track package fields + return false + } + if obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() { + // We need to track package-level variables + return false + } + return isIrrelevant(obj.Type()) + default: + return false + } + } + if T, ok := obj.(types.Type); ok { + switch T := T.(type) { + case *types.Array: + return isIrrelevant(T.Elem()) + case *types.Slice: + return isIrrelevant(T.Elem()) + case *types.Basic: + return true + case *types.Tuple: + for i := 0; i < T.Len(); i++ { + if !isIrrelevant(T.At(i).Type()) { + return false + } + } + return true + case *types.Signature: + if T.Recv() != nil { + return false + } + for i := 0; i < T.Params().Len(); i++ { + if !isIrrelevant(T.Params().At(i)) { + return false + } + } + for i := 0; i < T.Results().Len(); i++ { + if !isIrrelevant(T.Results().At(i)) { + return false + } + } + return true + case *types.Interface: + return T.NumMethods() == 0 + default: + return false + } + } + return false +} + +func (g *Graph) isInterestingPackage(pkg *types.Package) bool { + if g.wholeProgram { + for _, opkg := range g.initialPackages { + if opkg.Types == pkg { + return true + } + } + return false + } + return pkg == g.pkg.Pkg +} + +func (g *Graph) see(obj interface{}) { + if isIrrelevant(obj) { + return + } + + assert(obj != nil) + if obj, ok := obj.(types.Object); ok && obj.Pkg() != nil { + if !g.isInterestingPackage(obj.Pkg()) { + return + } + } + + // add new node to graph + g.node(obj) +} + +func (g *Graph) use(used, by interface{}, reason string) { + if isIrrelevant(used) { + return + } + + assert(used != nil) + if _, ok := used.(*types.Func); ok { + assert(g.pkg.Prog.FuncValue(used.(*types.Func)) == nil) + } + if _, ok := by.(*types.Func); ok { + assert(g.pkg.Prog.FuncValue(by.(*types.Func)) == nil) + } + if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { + if !g.isInterestingPackage(obj.Pkg()) { + return + } + } + if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { + if !g.isInterestingPackage(obj.Pkg()) { + return + } + } + usedNode, new := g.node(used) + assert(!new) + if by == nil { + g.Root.use(usedNode, reason) + } else { + byNode, new := g.node(by) + assert(!new) + byNode.use(usedNode, reason) + } +} + +func (g *Graph) seeAndUse(used, by interface{}, reason string) { + g.see(used) + g.use(used, by, reason) +} + +func (g *Graph) trackExportedIdentifier(obj types.Object) bool { + if !obj.Exported() { + // object isn't exported, the question is moot + return false + } + if g.wholeProgram { + // whole program mode tracks exported identifiers accurately + return false + } + + path := g.pkg.Prog.Fset.Position(obj.Pos()).Filename + if g.pkg.Pkg.Name() == "main" && !strings.HasSuffix(path, "_test.go") { + // exported identifiers in package main can't be imported. + // However, test functions can be called, and xtest packages + // even have access to exported identifiers. + return false + } + + // at one point we only considered exported identifiers in + // *_test.go files if they were Benchmark, Example or Test + // functions. However, this doesn't work when we look at one + // package at a time, because objects exported in a test variant + // of a package may be used by the xtest package. The only + // solution would be to look at multiple packages at once + return true +} + +func (g *Graph) entry(pkg *lint.Pkg) { + // TODO rename Entry + g.pkg = pkg.SSA + + for _, f := range pkg.Syntax { + for _, cg := range f.Comments { + for _, c := range cg.List { + if strings.HasPrefix(c.Text, "//go:linkname ") { + // FIXME(dh): we're looking at all comments. The + // compiler only looks at comments in the + // left-most column. The intention probably is to + // only look at top-level comments. + + // (1.8) packages use symbols linked via go:linkname + fields := strings.Fields(c.Text) + if len(fields) == 3 { + if m, ok := pkg.SSA.Members[fields[1]]; ok { + var obj interface{} + switch m := m.(type) { + case *ssa.Global: + obj = m.Object() + case *ssa.Function: + obj = m + default: + panic(fmt.Sprintf("unhandled type: %T", m)) + } + assert(obj != nil) + g.seeAndUse(obj, nil, "go:linkname") + } + } + } + } + } + } + + surroundingFunc := func(obj types.Object) *ssa.Function { + scope := obj.Parent() + for scope != nil { + if fn := g.scopes[scope]; fn != nil { + return fn + } + scope = scope.Parent() + } + return nil + } + + // SSA form won't tell us about locally scoped types that aren't + // being used. Walk the list of Defs to get all named types. + // + // SSA form also won't tell us about constants; use Defs and Uses + // to determine which constants exist and which are being used. + for _, obj := range pkg.TypesInfo.Defs { + switch obj := obj.(type) { + case *types.TypeName: + // types are being handled by walking the AST + case *types.Const: + g.see(obj) + fn := surroundingFunc(obj) + if fn == nil && g.trackExportedIdentifier(obj) { + // (1.4) packages use exported constants (unless in package main) + g.use(obj, nil, "exported constant") + } + g.typ(obj.Type()) + g.seeAndUse(obj.Type(), obj, "constant type") + } + } + + // Find constants being used inside functions, find sinks in tests + handledConsts := map[*ast.Ident]struct{}{} + for _, fn := range pkg.InitialFunctions { + g.see(fn) + node := fn.Syntax() + if node == nil { + continue + } + ast.Inspect(node, func(node ast.Node) bool { + switch node := node.(type) { + case *ast.Ident: + obj, ok := pkg.TypesInfo.Uses[node] + if !ok { + return true + } + switch obj := obj.(type) { + case *types.Const: + g.seeAndUse(obj, fn, "used constant") + } + case *ast.AssignStmt: + for _, expr := range node.Lhs { + ident, ok := expr.(*ast.Ident) + if !ok { + continue + } + obj := pkg.TypesInfo.ObjectOf(ident) + if obj == nil { + continue + } + path := g.pkg.Prog.Fset.File(obj.Pos()).Name() + if strings.HasSuffix(path, "_test.go") { + if obj.Parent() != nil && obj.Parent().Parent() != nil && obj.Parent().Parent().Parent() == nil { + // object's scope is the package, whose + // parent is the file, whose parent is nil + + // (4.9) functions use package-level variables they assign to iff in tests (sinks for benchmarks) + // (9.7) variable _reads_ use variables, writes do not, except in tests + g.seeAndUse(obj, fn, "test sink") + } + } + } + } + + return true + }) + } + // Find constants being used in non-function contexts + for ident, obj := range pkg.TypesInfo.Uses { + _, ok := obj.(*types.Const) + if !ok { + continue + } + if _, ok := handledConsts[ident]; ok { + continue + } + g.seeAndUse(obj, nil, "used constant") + } + + var fn *ssa.Function + pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)}, func(n ast.Node) { + switch n := n.(type) { + case *ast.FuncDecl: + fn = pkg.SSA.Prog.FuncValue(pkg.TypesInfo.ObjectOf(n.Name).(*types.Func)) + if fn != nil { + g.see(fn) + } + case *ast.GenDecl: + switch n.Tok { + case token.CONST: + groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs) + for _, specs := range groups { + if len(specs) > 1 { + cg := &ConstGroup{} + g.see(cg) + for _, spec := range specs { + for _, name := range spec.(*ast.ValueSpec).Names { + obj := pkg.TypesInfo.ObjectOf(name) + // (10.1) const groups + g.seeAndUse(obj, cg, "const group") + g.use(cg, obj, "const group") + } + } + } + } + case token.VAR: + for _, spec := range n.Specs { + v := spec.(*ast.ValueSpec) + for _, name := range v.Names { + T := pkg.TypesInfo.TypeOf(name) + if fn != nil { + g.seeAndUse(T, fn, "var decl") + } else { + g.seeAndUse(T, nil, "var decl") + } + g.typ(T) + } + } + case token.TYPE: + for _, spec := range n.Specs { + // go/types doesn't provide a way to go from a + // types.Named to the named type it was based on + // (the t1 in type t2 t1). Therefore we walk the + // AST and process GenDecls. + // + // (2.2) named types use the type they're based on + v := spec.(*ast.TypeSpec) + T := pkg.TypesInfo.TypeOf(v.Type) + obj := pkg.TypesInfo.ObjectOf(v.Name) + g.see(obj) + g.see(T) + g.use(T, obj, "type") + g.typ(obj.Type()) + g.typ(T) + + if v.Assign != 0 { + aliasFor := obj.(*types.TypeName).Type() + // (2.3) named types use all their aliases. we can't easily track uses of aliases + if isIrrelevant(aliasFor) { + // We do not track the type this is an + // alias for (for example builtins), so + // just mark the alias used. + // + // FIXME(dh): what about aliases declared inside functions? + g.use(obj, nil, "alias") + } else { + g.see(aliasFor) + g.seeAndUse(obj, aliasFor, "alias") + } + } + } + } + default: + panic(fmt.Sprintf("unreachable: %T", n)) + } + }) + + for _, m := range g.pkg.Members { + switch m := m.(type) { + case *ssa.NamedConst: + // nothing to do, we collect all constants from Defs + case *ssa.Global: + if m.Object() != nil { + g.see(m.Object()) + if g.trackExportedIdentifier(m.Object()) { + // (1.3) packages use exported variables (unless in package main) + g.use(m.Object(), nil, "exported top-level variable") + } + } + case *ssa.Function: + g.see(m) + if m.Name() == "init" { + // (1.5) packages use init functions + g.use(m, nil, "init function") + } + // This branch catches top-level functions, not methods. + if m.Object() != nil && g.trackExportedIdentifier(m.Object()) { + // (1.2) packages use exported functions (unless in package main) + g.use(m, nil, "exported top-level function") + } + if m.Name() == "main" && g.pkg.Pkg.Name() == "main" { + // (1.7) packages use the main function iff in the main package + g.use(m, nil, "main function") + } + if g.pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { + // (9.8) runtime functions that may be called from user code via the compiler + g.use(m, nil, "runtime function") + } + if m.Syntax() != nil { + doc := m.Syntax().(*ast.FuncDecl).Doc + if doc != nil { + for _, cmt := range doc.List { + if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { + // (1.6) packages use functions exported to cgo + g.use(m, nil, "cgo exported") + } + } + } + } + g.function(m) + case *ssa.Type: + if m.Object() != nil { + g.see(m.Object()) + if g.trackExportedIdentifier(m.Object()) { + // (1.1) packages use exported named types (unless in package main) + g.use(m.Object(), nil, "exported top-level type") + } + } + g.typ(m.Type()) + default: + panic(fmt.Sprintf("unreachable: %T", m)) + } + } + + if !g.wholeProgram { + // When not in whole program mode we process one package per + // graph, which means g.seenTypes only contains types of + // interest to us. In whole program mode, we're better off + // processing all interfaces at once, globally, both for + // performance reasons and because in whole program mode we + // actually care about all interfaces, not just the subset + // that has unexported methods. + + var ifaces []*types.Interface + var notIfaces []types.Type + + g.seenTypes.Iterate(func(t types.Type, _ interface{}) { + switch t := t.(type) { + case *types.Interface: + // OPT(dh): (8.1) we only need interfaces that have unexported methods + ifaces = append(ifaces, t) + default: + if _, ok := t.Underlying().(*types.Interface); !ok { + notIfaces = append(notIfaces, t) + } + } + }) + + // (8.0) handle interfaces + for _, t := range notIfaces { + ms := g.msCache.MethodSet(t) + for _, iface := range ifaces { + if sels, ok := g.implements(t, iface, ms); ok { + for _, sel := range sels { + g.useMethod(t, sel, t, "implements") + } + } + } + } + } +} + +func (g *Graph) useMethod(t types.Type, sel *types.Selection, by interface{}, reason string) { + obj := sel.Obj() + path := sel.Index() + assert(obj != nil) + if len(path) > 1 { + base := lintdsl.Dereference(t).Underlying().(*types.Struct) + for _, idx := range path[:len(path)-1] { + next := base.Field(idx) + // (6.3) structs use embedded fields that help implement interfaces + g.seeAndUse(next, base, "provides method") + base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) + } + } + if fn := g.pkg.Prog.FuncValue(obj.(*types.Func)); fn != nil { + // actual function + g.seeAndUse(fn, by, reason) + } else { + // interface method + g.seeAndUse(obj, by, reason) + } +} + +func (g *Graph) function(fn *ssa.Function) { + if fn.Package() != nil && fn.Package() != g.pkg { + return + } + if _, ok := g.seenFns[fn]; ok { + return + } + g.seenFns[fn] = struct{}{} + + // (4.1) functions use all their arguments, return parameters and receivers + g.seeAndUse(fn.Signature, fn, "function signature") + g.signature(fn.Signature) + g.instructions(fn) + for _, anon := range fn.AnonFuncs { + // (4.2) functions use anonymous functions defined beneath them + g.seeAndUse(anon, fn, "anonymous function") + g.function(anon) + } +} + +func (g *Graph) typ(t types.Type) { + if g.seenTypes.At(t) != nil { + return + } + if t, ok := t.(*types.Named); ok && t.Obj().Pkg() != nil { + if t.Obj().Pkg() != g.pkg.Pkg { + return + } + } + g.seenTypes.Set(t, struct{}{}) + if isIrrelevant(t) { + return + } + + g.see(t) + switch t := t.(type) { + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + g.see(t.Field(i)) + if t.Field(i).Exported() { + // (6.2) structs use exported fields + g.use(t.Field(i), t, "exported struct field") + } else if t.Field(i).Name() == "_" { + g.use(t.Field(i), t, "blank field") + } else if isNoCopyType(t.Field(i).Type()) { + // (6.1) structs use fields of type NoCopy sentinel + g.use(t.Field(i), t, "NoCopy sentinel") + } + if t.Field(i).Anonymous() { + // (e3) exported identifiers aren't automatically used. + if !g.wholeProgram { + // does the embedded field contribute exported methods to the method set? + T := t.Field(i).Type() + if _, ok := T.Underlying().(*types.Pointer); !ok { + // An embedded field is addressable, so check + // the pointer type to get the full method set + T = types.NewPointer(T) + } + ms := g.msCache.MethodSet(T) + for j := 0; j < ms.Len(); j++ { + if ms.At(j).Obj().Exported() { + // (6.4) structs use embedded fields that have exported methods (recursively) + g.use(t.Field(i), t, "extends exported method set") + break + } + } + } + + seen := map[*types.Struct]struct{}{} + var hasExportedField func(t types.Type) bool + hasExportedField = func(T types.Type) bool { + t, ok := lintdsl.Dereference(T).Underlying().(*types.Struct) + if !ok { + return false + } + if _, ok := seen[t]; ok { + return false + } + seen[t] = struct{}{} + for i := 0; i < t.NumFields(); i++ { + field := t.Field(i) + if field.Exported() { + return true + } + if field.Embedded() && hasExportedField(field.Type()) { + return true + } + } + return false + } + // does the embedded field contribute exported fields? + if hasExportedField(t.Field(i).Type()) { + // (6.5) structs use embedded structs that have exported fields (recursively) + g.use(t.Field(i), t, "extends exported fields") + } + + } + g.variable(t.Field(i)) + } + case *types.Basic: + // Nothing to do + case *types.Named: + // (9.3) types use their underlying and element types + g.seeAndUse(t.Underlying(), t, "underlying type") + g.seeAndUse(t.Obj(), t, "type name") + g.seeAndUse(t, t.Obj(), "named type") + + for i := 0; i < t.NumMethods(); i++ { + meth := g.pkg.Prog.FuncValue(t.Method(i)) + g.see(meth) + // don't use trackExportedIdentifier here, we care about + // all exported methods, even in package main or in tests. + if meth.Object() != nil && meth.Object().Exported() && !g.wholeProgram { + // (2.1) named types use exported methods + g.use(meth, t, "exported method") + } + g.function(meth) + } + + g.typ(t.Underlying()) + case *types.Slice: + // (9.3) types use their underlying and element types + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Map: + // (9.3) types use their underlying and element types + g.seeAndUse(t.Elem(), t, "element type") + // (9.3) types use their underlying and element types + g.seeAndUse(t.Key(), t, "key type") + g.typ(t.Elem()) + g.typ(t.Key()) + case *types.Signature: + g.signature(t) + case *types.Interface: + for i := 0; i < t.NumMethods(); i++ { + m := t.Method(i) + // (8.3) All interface methods are marked as used + g.seeAndUse(m, t, "interface method") + g.seeAndUse(m.Type().(*types.Signature), m, "signature") + g.signature(m.Type().(*types.Signature)) + } + for i := 0; i < t.NumEmbeddeds(); i++ { + tt := t.EmbeddedType(i) + // (8.4) All embedded interfaces are marked as used + g.seeAndUse(tt, t, "embedded interface") + } + case *types.Array: + // (9.3) types use their underlying and element types + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Pointer: + // (9.3) types use their underlying and element types + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Chan: + // (9.3) types use their underlying and element types + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Tuple: + for i := 0; i < t.Len(); i++ { + // (9.3) types use their underlying and element types + g.seeAndUse(t.At(i), t, "tuple element") + g.variable(t.At(i)) + } + default: + panic(fmt.Sprintf("unreachable: %T", t)) + } +} + +func (g *Graph) variable(v *types.Var) { + // (9.2) variables use their types + g.seeAndUse(v.Type(), v, "variable type") + g.typ(v.Type()) +} + +func (g *Graph) signature(sig *types.Signature) { + if sig.Recv() != nil { + g.seeAndUse(sig.Recv(), sig, "receiver") + g.variable(sig.Recv()) + } + for i := 0; i < sig.Params().Len(); i++ { + param := sig.Params().At(i) + g.seeAndUse(param, sig, "function argument") + g.variable(param) + } + for i := 0; i < sig.Results().Len(); i++ { + param := sig.Results().At(i) + g.seeAndUse(param, sig, "function result") + g.variable(param) + } +} + +func (g *Graph) instructions(fn *ssa.Function) { + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + ops := instr.Operands(nil) + switch instr.(type) { + case *ssa.Store: + // (9.7) variable _reads_ use variables, writes do not + ops = ops[1:] + case *ssa.DebugRef: + ops = nil + } + for _, arg := range ops { + walkPhi(*arg, func(v ssa.Value) { + switch v := v.(type) { + case *ssa.Function: + // (4.3) functions use closures and bound methods. + // (4.5) functions use functions they call + // (9.5) instructions use their operands + // (4.4) functions use functions they return. we assume that someone else will call the returned function + g.seeAndUse(v, fn, "instruction operand") + g.function(v) + case *ssa.Const: + // (9.6) instructions use their operands' types + g.seeAndUse(v.Type(), fn, "constant's type") + g.typ(v.Type()) + case *ssa.Global: + if v.Object() != nil { + // (9.5) instructions use their operands + g.seeAndUse(v.Object(), fn, "instruction operand") + } + } + }) + } + if v, ok := instr.(ssa.Value); ok { + if _, ok := v.(*ssa.Range); !ok { + // See https://github.com/golang/go/issues/19670 + + // (4.8) instructions use their types + // (9.4) conversions use the type they convert to + g.seeAndUse(v.Type(), fn, "instruction") + g.typ(v.Type()) + } + } + switch instr := instr.(type) { + case *ssa.Field: + st := instr.X.Type().Underlying().(*types.Struct) + field := st.Field(instr.Field) + // (4.7) functions use fields they access + g.seeAndUse(field, fn, "field access") + case *ssa.FieldAddr: + st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) + field := st.Field(instr.Field) + // (4.7) functions use fields they access + g.seeAndUse(field, fn, "field access") + case *ssa.Store: + // nothing to do, handled generically by operands + case *ssa.Call: + c := instr.Common() + if !c.IsInvoke() { + // handled generically as an instruction operand + + if g.wholeProgram { + // (e3) special case known reflection-based method callers + switch lintdsl.CallName(c) { + case "net/rpc.Register", "net/rpc.RegisterName", "(*net/rpc.Server).Register", "(*net/rpc.Server).RegisterName": + var arg ssa.Value + switch lintdsl.CallName(c) { + case "net/rpc.Register": + arg = c.Args[0] + case "net/rpc.RegisterName": + arg = c.Args[1] + case "(*net/rpc.Server).Register": + arg = c.Args[1] + case "(*net/rpc.Server).RegisterName": + arg = c.Args[2] + } + walkPhi(arg, func(v ssa.Value) { + if v, ok := v.(*ssa.MakeInterface); ok { + walkPhi(v.X, func(vv ssa.Value) { + ms := g.msCache.MethodSet(vv.Type()) + for i := 0; i < ms.Len(); i++ { + if ms.At(i).Obj().Exported() { + g.useMethod(vv.Type(), ms.At(i), fn, "net/rpc.Register") + } + } + }) + } + }) + } + } + } else { + // (4.5) functions use functions/interface methods they call + g.seeAndUse(c.Method, fn, "interface call") + } + case *ssa.Return: + // nothing to do, handled generically by operands + case *ssa.ChangeType: + // conversion type handled generically + + s1, ok1 := lintdsl.Dereference(instr.Type()).Underlying().(*types.Struct) + s2, ok2 := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) + if ok1 && ok2 { + // Converting between two structs. The fields are + // relevant for the conversion, but only if the + // fields are also used outside of the conversion. + // Mark fields as used by each other. + + assert(s1.NumFields() == s2.NumFields()) + for i := 0; i < s1.NumFields(); i++ { + g.see(s1.Field(i)) + g.see(s2.Field(i)) + // (5.1) when converting between two equivalent structs, the fields in + // either struct use each other. the fields are relevant for the + // conversion, but only if the fields are also accessed outside the + // conversion. + g.seeAndUse(s1.Field(i), s2.Field(i), "struct conversion") + g.seeAndUse(s2.Field(i), s1.Field(i), "struct conversion") + } + } + case *ssa.MakeInterface: + // nothing to do, handled generically by operands + case *ssa.Slice: + // nothing to do, handled generically by operands + case *ssa.RunDefers: + // nothing to do, the deferred functions are already marked use by defering them. + case *ssa.Convert: + // to unsafe.Pointer + if typ, ok := instr.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { + if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok { + if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { + for i := 0; i < st.NumFields(); i++ { + // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. + g.seeAndUse(st.Field(i), fn, "unsafe conversion") + } + } + } + } + // from unsafe.Pointer + if typ, ok := instr.X.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { + if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { + if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { + for i := 0; i < st.NumFields(); i++ { + // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. + g.seeAndUse(st.Field(i), fn, "unsafe conversion") + } + } + } + } + case *ssa.TypeAssert: + // nothing to do, handled generically by instruction + // type (possibly a tuple, which contains the asserted + // to type). redundantly handled by the type of + // ssa.Extract, too + case *ssa.MakeClosure: + // nothing to do, handled generically by operands + case *ssa.Alloc: + // nothing to do + case *ssa.UnOp: + // nothing to do + case *ssa.BinOp: + // nothing to do + case *ssa.If: + // nothing to do + case *ssa.Jump: + // nothing to do + case *ssa.IndexAddr: + // nothing to do + case *ssa.Extract: + // nothing to do + case *ssa.Panic: + // nothing to do + case *ssa.DebugRef: + // nothing to do + case *ssa.BlankStore: + // nothing to do + case *ssa.Phi: + // nothing to do + case *ssa.MakeMap: + // nothing to do + case *ssa.MapUpdate: + // nothing to do + case *ssa.Lookup: + // nothing to do + case *ssa.MakeSlice: + // nothing to do + case *ssa.Send: + // nothing to do + case *ssa.MakeChan: + // nothing to do + case *ssa.Range: + // nothing to do + case *ssa.Next: + // nothing to do + case *ssa.Index: + // nothing to do + case *ssa.Select: + // nothing to do + case *ssa.ChangeInterface: + // nothing to do + case *ssa.Go: + // nothing to do, handled generically by operands + case *ssa.Defer: + // nothing to do, handled generically by operands + default: + panic(fmt.Sprintf("unreachable: %T", instr)) + } + } + } } // isNoCopyType reports whether a type represents the NoCopy sentinel @@ -302,799 +1723,73 @@ func isNoCopyType(typ types.Type) bool { return true } -func (c *Checker) useNoCopyFields(typ types.Type) { - if st, ok := typ.Underlying().(*types.Struct); ok { - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if isNoCopyType(field.Type()) { - c.graph.markUsedBy(field, typ) - c.graph.markUsedBy(field.Type().(*types.Named).Method(0), field.Type()) - } - } - } -} - -func (c *Checker) useExportedFields(typ types.Type, by types.Type) bool { - any := false - if st, ok := typ.Underlying().(*types.Struct); ok { - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if field.Anonymous() { - if c.useExportedFields(field.Type(), typ) { - c.graph.markUsedBy(field, typ) - } - } - if field.Exported() { - c.graph.markUsedBy(field, by) - any = true - } - } - } - return any -} - -func (c *Checker) useExportedMethods(typ types.Type) { - named, ok := typ.(*types.Named) +func walkPhi(v ssa.Value, fn func(v ssa.Value)) { + phi, ok := v.(*ssa.Phi) if !ok { + fn(v) return } - ms := typeutil.IntuitiveMethodSet(named, &c.msCache) - for i := 0; i < len(ms); i++ { - meth := ms[i].Obj() - if meth.Exported() { - c.graph.markUsedBy(meth, typ) + + seen := map[ssa.Value]struct{}{} + var impl func(v *ssa.Phi) + impl = func(v *ssa.Phi) { + if _, ok := seen[v]; ok { + return } - } - - st, ok := named.Underlying().(*types.Struct) - if !ok { - return - } - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if !field.Anonymous() { - continue - } - ms := typeutil.IntuitiveMethodSet(field.Type(), &c.msCache) - for j := 0; j < len(ms); j++ { - if ms[j].Obj().Exported() { - c.graph.markUsedBy(field, typ) - break - } - } - } -} - -func (c *Checker) processDefs(pkg *lint.Pkg) { - for _, obj := range pkg.TypesInfo.Defs { - if obj == nil { - continue - } - c.graph.getNode(obj) - - if obj, ok := obj.(*types.TypeName); ok { - c.graph.markUsedBy(obj.Type().Underlying(), obj.Type()) - c.graph.markUsedBy(obj.Type(), obj) // TODO is this needed? - c.graph.markUsedBy(obj, obj.Type()) - - // We mark all exported fields as used. For normal - // operation, we have to. The user may use these fields - // without us knowing. - // - // TODO(dh): In whole-program mode, however, we mark them - // as used because of reflection (such as JSON - // marshaling). Strictly speaking, we would only need to - // mark them used if an instance of the type was - // accessible via an interface value. - if !c.WholeProgram || c.ConsiderReflection { - c.useExportedFields(obj.Type(), obj.Type()) - } - - // TODO(dh): Traditionally we have not marked all exported - // methods as exported, even though they're strictly - // speaking accessible through reflection. We've done that - // because using methods just via reflection is rare, and - // not worth the false negatives. With the new -reflect - // flag, however, we should reconsider that choice. - if !c.WholeProgram { - c.useExportedMethods(obj.Type()) - } - } - - switch obj := obj.(type) { - case *types.Var, *types.Const, *types.Func, *types.TypeName: - if obj.Exported() { - // Exported variables and constants use their types, - // even if there's no expression using them in the - // checked program. - // - // Also operates on funcs and type names, but that's - // irrelevant/redundant. - c.graph.markUsedBy(obj.Type(), obj) - } - if obj.Name() == "_" { - node := c.graph.getNode(obj) - node.quiet = true - scope := c.topmostScope(pkg.Types.Scope().Innermost(obj.Pos()), pkg.Types) - if scope == pkg.Types.Scope() { - c.graph.roots = append(c.graph.roots, node) - } else { - c.graph.markUsedBy(obj, scope) - } + seen[v] = struct{}{} + for _, e := range v.Edges { + if ev, ok := e.(*ssa.Phi); ok { + impl(ev) } else { - // Variables declared in functions are used. This is - // done so that arguments and return parameters are - // always marked as used. - if _, ok := obj.(*types.Var); ok { - if obj.Parent() != obj.Pkg().Scope() && obj.Parent() != nil { - c.graph.markUsedBy(obj, c.topmostScope(obj.Parent(), obj.Pkg())) - c.graph.markUsedBy(obj.Type(), obj) - } - } - } - } - - if fn, ok := obj.(*types.Func); ok { - // A function uses its signature - c.graph.markUsedBy(fn, fn.Type()) - - // A function uses its return types - sig := fn.Type().(*types.Signature) - res := sig.Results() - n := res.Len() - for i := 0; i < n; i++ { - c.graph.markUsedBy(res.At(i).Type(), fn) - } - } - - if obj, ok := obj.(interface { - Scope() *types.Scope - Pkg() *types.Package - }); ok { - scope := obj.Scope() - c.graph.markUsedBy(c.topmostScope(scope, obj.Pkg()), obj) - } - - if c.isRoot(obj) { - node := c.graph.getNode(obj) - c.graph.roots = append(c.graph.roots, node) - if obj, ok := obj.(*types.PkgName); ok { - scope := obj.Pkg().Scope() - c.graph.markUsedBy(scope, obj) + fn(e) } } } + impl(phi) } -func (c *Checker) processUses(pkg *lint.Pkg) { - for ident, usedObj := range pkg.TypesInfo.Uses { - if _, ok := usedObj.(*types.PkgName); ok { - continue - } - pos := ident.Pos() - scope := pkg.Types.Scope().Innermost(pos) - scope = c.topmostScope(scope, pkg.Types) - if scope != pkg.Types.Scope() { - c.graph.markUsedBy(usedObj, scope) - } - - switch usedObj.(type) { - case *types.Var, *types.Const: - c.graph.markUsedBy(usedObj.Type(), usedObj) - } +func interfacesFromExportData(pkg *types.Package) []*types.Interface { + var out []*types.Interface + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + out = append(out, interfacesFromObject(obj)...) } + return out } -func (c *Checker) findExportedInterfaces() { - c.interfaces = []*types.Interface{types.Universe.Lookup("error").Type().(*types.Named).Underlying().(*types.Interface)} - var pkgs []*packages.Package - if c.WholeProgram { - pkgs = append(pkgs, c.prog.AllPackages...) - } else { - for _, pkg := range c.prog.InitialPackages { - pkgs = append(pkgs, pkg.Package) - } - } - - for _, pkg := range pkgs { - for _, tv := range pkg.TypesInfo.Types { - iface, ok := tv.Type.(*types.Interface) - if !ok { - continue - } - if iface.NumMethods() == 0 { - continue - } - c.interfaces = append(c.interfaces, iface) - } - } -} - -func (c *Checker) processTypes(pkg *lint.Pkg) { - named := map[*types.Named]*types.Pointer{} - var interfaces []*types.Interface - for _, tv := range pkg.TypesInfo.Types { - if typ, ok := tv.Type.(interface { - Elem() types.Type - }); ok { - c.graph.markUsedBy(typ.Elem(), typ) - } - - switch obj := tv.Type.(type) { - case *types.Named: - named[obj] = types.NewPointer(obj) - c.graph.markUsedBy(obj, obj.Underlying()) - c.graph.markUsedBy(obj.Underlying(), obj) - case *types.Interface: - if obj.NumMethods() > 0 { - interfaces = append(interfaces, obj) - } - case *types.Struct: - c.useNoCopyFields(obj) - if pkg.Types.Name() != "main" && !c.WholeProgram { - c.useExportedFields(obj, obj) - } - } - } - - // Pretend that all types are meant to implement as many - // interfaces as possible. - // - // TODO(dh): For normal operations, that's the best we can do, as - // we have no idea what external users will do with our types. In - // whole-program mode, we could be more precise, in two ways: - // 1) Only consider interfaces if a type has been assigned to one - // 2) Use SSA and flow analysis and determine the exact set of - // interfaces that is relevant. - fn := func(iface *types.Interface) { - for i := 0; i < iface.NumEmbeddeds(); i++ { - c.graph.markUsedBy(iface.Embedded(i), iface) - } - namedLoop: - for obj, objPtr := range named { - switch obj.Underlying().(type) { - case *types.Interface: - // pointers to interfaces have no methods, only checking non-pointer - if !c.implements(obj, iface) { - continue namedLoop - } - default: - // pointer receivers include the method set of non-pointer receivers, - // only checking pointer - if !c.implements(objPtr, iface) { - continue namedLoop - } - } - - ifaceMethods := make(map[string]struct{}, iface.NumMethods()) - n := iface.NumMethods() - for i := 0; i < n; i++ { - meth := iface.Method(i) - ifaceMethods[meth.Name()] = struct{}{} - } - for _, obj := range []types.Type{obj, objPtr} { - ms := c.msCache.MethodSet(obj) - n := ms.Len() - for i := 0; i < n; i++ { - sel := ms.At(i) - meth := sel.Obj().(*types.Func) - _, found := ifaceMethods[meth.Name()] - if !found { - continue - } - c.graph.markUsedBy(meth.Type().(*types.Signature).Recv().Type(), obj) // embedded receiver - if len(sel.Index()) > 1 { - f := getField(obj, sel.Index()[0]) - c.graph.markUsedBy(f, obj) // embedded receiver - } - c.graph.markUsedBy(meth, obj) - } - } - } - } - - for _, iface := range interfaces { - fn(iface) - } - for _, iface := range c.interfaces { - fn(iface) - } -} - -func (c *Checker) processSelections(pkg *lint.Pkg) { - fn := func(expr *ast.SelectorExpr, sel *types.Selection, offset int) { - scope := pkg.Types.Scope().Innermost(expr.Pos()) - c.graph.markUsedBy(sel, c.topmostScope(scope, pkg.Types)) - c.graph.markUsedBy(sel.Obj(), sel) - if len(sel.Index()) > 1 { - typ := sel.Recv() - indices := sel.Index() - for _, idx := range indices[:len(indices)-offset] { - obj := getField(typ, idx) - typ = obj.Type() - c.graph.markUsedBy(obj, sel) - } - } - } - - for expr, sel := range pkg.TypesInfo.Selections { - switch sel.Kind() { - case types.FieldVal: - fn(expr, sel, 0) - case types.MethodVal: - fn(expr, sel, 1) - } - } -} - -func dereferenceType(typ types.Type) types.Type { - if typ, ok := typ.(*types.Pointer); ok { - return typ.Elem() - } - return typ -} - -// processConversion marks fields as used if they're part of a type conversion. -func (c *Checker) processConversion(pkg *lint.Pkg, node ast.Node) { - if node, ok := node.(*ast.CallExpr); ok { - callTyp := pkg.TypesInfo.TypeOf(node.Fun) - var typDst *types.Struct - var ok bool - switch typ := callTyp.(type) { - case *types.Named: - typDst, ok = typ.Underlying().(*types.Struct) - case *types.Pointer: - typDst, ok = typ.Elem().Underlying().(*types.Struct) - default: - return - } - if !ok { - return - } - - if typ, ok := pkg.TypesInfo.TypeOf(node.Args[0]).(*types.Basic); ok && typ.Kind() == types.UnsafePointer { - // This is an unsafe conversion. Assume that all the - // fields are relevant (they are, because of memory - // layout) - n := typDst.NumFields() - for i := 0; i < n; i++ { - c.graph.markUsedBy(typDst.Field(i), typDst) - } - return - } - - typSrc, ok := dereferenceType(pkg.TypesInfo.TypeOf(node.Args[0])).Underlying().(*types.Struct) - if !ok { - return - } - - // When we convert from type t1 to t2, were t1 and t2 are - // structs, all fields are relevant, as otherwise the - // conversion would fail. - // - // We mark t2's fields as used by t1's fields, and vice - // versa. That way, if no code actually refers to a field - // in either type, it's still correctly marked as unused. - // If a field is used in either struct, it's implicitly - // relevant in the other one, too. - // - // It works in a similar way for conversions between types - // of two packages, only that the extra information in the - // graph is redundant unless we're in whole program mode. - n := typDst.NumFields() - for i := 0; i < n; i++ { - fDst := typDst.Field(i) - fSrc := typSrc.Field(i) - c.graph.markUsedBy(fDst, fSrc) - c.graph.markUsedBy(fSrc, fDst) - } - } -} - -// processCompositeLiteral marks fields as used if the struct is used -// in a composite literal. -func (c *Checker) processCompositeLiteral(pkg *lint.Pkg, node ast.Node) { - // XXX how does this actually work? wouldn't it match t{}? - if node, ok := node.(*ast.CompositeLit); ok { - typ := pkg.TypesInfo.TypeOf(node) - if _, ok := typ.(*types.Named); ok { - typ = typ.Underlying() - } - if _, ok := typ.(*types.Struct); !ok { - return - } - - if isBasicStruct(node.Elts) { - c.markFields(typ) - } - } -} - -// processCgoExported marks functions as used if they're being -// exported to cgo. -func (c *Checker) processCgoExported(pkg *lint.Pkg, node ast.Node) { - if node, ok := node.(*ast.FuncDecl); ok { - if node.Doc == nil { - return - } - for _, cmt := range node.Doc.List { - if !strings.HasPrefix(cmt.Text, "//go:cgo_export_") { - return - } - obj := pkg.TypesInfo.ObjectOf(node.Name) - c.graph.roots = append(c.graph.roots, c.graph.getNode(obj)) - } - } -} - -func (c *Checker) processVariableDeclaration(pkg *lint.Pkg, node ast.Node) { - if decl, ok := node.(*ast.GenDecl); ok { - for _, spec := range decl.Specs { - spec, ok := spec.(*ast.ValueSpec) - if !ok { - continue - } - for i, name := range spec.Names { - if i >= len(spec.Values) { - break - } - value := spec.Values[i] - fn := func(node ast.Node) bool { - if node3, ok := node.(*ast.Ident); ok { - obj := pkg.TypesInfo.ObjectOf(node3) - if _, ok := obj.(*types.PkgName); ok { - return true - } - c.graph.markUsedBy(obj, pkg.TypesInfo.ObjectOf(name)) - } - return true - } - ast.Inspect(value, fn) - } - } - } -} - -func (c *Checker) processArrayConstants(pkg *lint.Pkg, node ast.Node) { - if decl, ok := node.(*ast.ArrayType); ok { - ident, ok := decl.Len.(*ast.Ident) - if !ok { - return - } - c.graph.markUsedBy(pkg.TypesInfo.ObjectOf(ident), pkg.TypesInfo.TypeOf(decl)) - } -} - -func (c *Checker) processKnownReflectMethodCallers(pkg *lint.Pkg, node ast.Node) { - call, ok := node.(*ast.CallExpr) - if !ok { - return - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return - } - if !IsType(pkg.TypesInfo.TypeOf(sel.X), "*net/rpc.Server") { - x, ok := sel.X.(*ast.Ident) - if !ok { - return - } - pkgname, ok := pkg.TypesInfo.ObjectOf(x).(*types.PkgName) - if !ok { - return - } - if pkgname.Imported().Path() != "net/rpc" { - return - } - } - - var arg ast.Expr - switch sel.Sel.Name { - case "Register": - if len(call.Args) != 1 { - return - } - arg = call.Args[0] - case "RegisterName": - if len(call.Args) != 2 { - return - } - arg = call.Args[1] - } - typ := pkg.TypesInfo.TypeOf(arg) - ms := types.NewMethodSet(typ) - for i := 0; i < ms.Len(); i++ { - c.graph.markUsedBy(ms.At(i).Obj(), typ) - } -} - -func (c *Checker) processAST(pkg *lint.Pkg) { - fn := func(node ast.Node) bool { - c.processConversion(pkg, node) - c.processKnownReflectMethodCallers(pkg, node) - c.processCompositeLiteral(pkg, node) - c.processCgoExported(pkg, node) - c.processVariableDeclaration(pkg, node) - c.processArrayConstants(pkg, node) - return true - } - for _, file := range pkg.Syntax { - ast.Inspect(file, fn) - } -} - -func isBasicStruct(elts []ast.Expr) bool { - for _, elt := range elts { - if _, ok := elt.(*ast.KeyValueExpr); !ok { - return true - } - } - return false -} - -func isPkgScope(obj types.Object) bool { - return obj.Parent() == obj.Pkg().Scope() -} - -func isMain(obj types.Object) bool { - if obj.Pkg().Name() != "main" { - return false - } - if obj.Name() != "main" { - return false - } - if !isPkgScope(obj) { - return false - } - if !isFunction(obj) { - return false - } - if isMethod(obj) { - return false - } - return true -} - -func isFunction(obj types.Object) bool { - _, ok := obj.(*types.Func) - return ok -} - -func isMethod(obj types.Object) bool { - if !isFunction(obj) { - return false - } - return obj.(*types.Func).Type().(*types.Signature).Recv() != nil -} - -func isVariable(obj types.Object) bool { - _, ok := obj.(*types.Var) - return ok -} - -func isConstant(obj types.Object) bool { - _, ok := obj.(*types.Const) - return ok -} - -func isType(obj types.Object) bool { - _, ok := obj.(*types.TypeName) - return ok -} - -func isField(obj types.Object) bool { - if obj, ok := obj.(*types.Var); ok && obj.IsField() { - return true - } - return false -} - -func (c *Checker) checkFlags(v interface{}) bool { - obj, ok := v.(types.Object) - if !ok { - return false - } - if isFunction(obj) && !c.checkFunctions() { - return false - } - if isVariable(obj) && !c.checkVariables() { - return false - } - if isConstant(obj) && !c.checkConstants() { - return false - } - if isType(obj) && !c.checkTypes() { - return false - } - if isField(obj) && !c.checkFields() { - return false - } - return true -} - -func (c *Checker) isRoot(obj types.Object) bool { - // - in local mode, main, init, tests, and non-test, non-main exported are roots - // - in global mode (not yet implemented), main, init and tests are roots - - if _, ok := obj.(*types.PkgName); ok { - return true - } - - if isMain(obj) || (isFunction(obj) && !isMethod(obj) && obj.Name() == "init") { - return true - } - if obj.Exported() { - f := c.prog.Fset().Position(obj.Pos()).Filename - if strings.HasSuffix(f, "_test.go") { - return strings.HasPrefix(obj.Name(), "Test") || - strings.HasPrefix(obj.Name(), "Benchmark") || - strings.HasPrefix(obj.Name(), "Example") - } - - // Package-level are used, except in package main - if isPkgScope(obj) && obj.Pkg().Name() != "main" && !c.WholeProgram { - return true - } - } - return false -} - -func markNodesUsed(nodes map[*graphNode]struct{}) { - for node := range nodes { - wasUsed := node.used - node.used = true - if !wasUsed { - markNodesUsed(node.uses) - } - } -} - -// deduplicate merges objects based on their positions. This is done -// to work around packages existing multiple times in go/packages. -func (c *Checker) deduplicate() { - m := map[token.Position]struct{ used, quiet bool }{} - for _, node := range c.graph.nodes { - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - pos := c.prog.Fset().Position(obj.Pos()) - m[pos] = struct{ used, quiet bool }{ - m[pos].used || node.used, - m[pos].quiet || node.quiet, - } - } - - for _, node := range c.graph.nodes { - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - pos := c.prog.Fset().Position(obj.Pos()) - node.used = m[pos].used - node.quiet = m[pos].quiet - } -} - -func (c *Checker) markNodesQuiet() { - for _, node := range c.graph.nodes { - if node.used { - continue - } - if obj, ok := node.obj.(types.Object); ok && !c.checkFlags(obj) { - node.quiet = true - continue - } - c.markObjQuiet(node.obj) - } -} - -func (c *Checker) markObjQuiet(obj interface{}) { +func interfacesFromObject(obj types.Object) []*types.Interface { + var out []*types.Interface switch obj := obj.(type) { - case *types.Named: - n := obj.NumMethods() - for i := 0; i < n; i++ { - meth := obj.Method(i) - node := c.graph.getNode(meth) - node.quiet = true - c.markObjQuiet(meth.Scope()) - } - case *types.Struct: - n := obj.NumFields() - for i := 0; i < n; i++ { - field := obj.Field(i) - c.graph.nodes[field].quiet = true - } case *types.Func: - c.markObjQuiet(obj.Scope()) - case *types.Scope: - if obj == nil { - return + sig := obj.Type().(*types.Signature) + for i := 0; i < sig.Results().Len(); i++ { + out = append(out, interfacesFromObject(sig.Results().At(i))...) } - if obj.Parent() == types.Universe { - return + for i := 0; i < sig.Params().Len(); i++ { + out = append(out, interfacesFromObject(sig.Params().At(i))...) } - for _, name := range obj.Names() { - v := obj.Lookup(name) - if n, ok := c.graph.nodes[v]; ok { - n.quiet = true + case *types.TypeName: + if named, ok := obj.Type().(*types.Named); ok { + for i := 0; i < named.NumMethods(); i++ { + out = append(out, interfacesFromObject(named.Method(i))...) + } + + if iface, ok := named.Underlying().(*types.Interface); ok { + out = append(out, iface) } } - n := obj.NumChildren() - for i := 0; i < n; i++ { - c.markObjQuiet(obj.Child(i)) + case *types.Var: + // No call to Underlying here. We want unnamed interfaces + // only. Named interfaces are gotten directly from the + // package's scope. + if iface, ok := obj.Type().(*types.Interface); ok { + out = append(out, iface) } + case *types.Const: + case *types.Builtin: + default: + panic(fmt.Sprintf("unhandled type: %T", obj)) } -} - -func getField(typ types.Type, idx int) *types.Var { - switch obj := typ.(type) { - case *types.Pointer: - return getField(obj.Elem(), idx) - case *types.Named: - switch v := obj.Underlying().(type) { - case *types.Struct: - return v.Field(idx) - case *types.Pointer: - return getField(v.Elem(), idx) - default: - panic(fmt.Sprintf("unexpected type %s", typ)) - } - case *types.Struct: - return obj.Field(idx) - } - return nil -} - -func (c *Checker) topmostScope(scope *types.Scope, pkg *types.Package) (ret *types.Scope) { - if top, ok := c.topmostCache[scope]; ok { - return top - } - defer func() { - c.topmostCache[scope] = ret - }() - if scope == pkg.Scope() { - return scope - } - if scope.Parent().Parent() == pkg.Scope() { - return scope - } - return c.topmostScope(scope.Parent(), pkg) -} - -func (c *Checker) printDebugGraph(w io.Writer) { - fmt.Fprintln(w, "digraph {") - fmt.Fprintln(w, "n0 [label = roots]") - for _, node := range c.graph.nodes { - s := fmt.Sprintf("%s (%T)", node.obj, node.obj) - s = strings.Replace(s, "\n", "", -1) - s = strings.Replace(s, `"`, "", -1) - fmt.Fprintf(w, `n%d [label = %q]`, node.n, s) - color := "black" - switch { - case node.used: - color = "green" - case node.quiet: - color = "orange" - case !c.checkFlags(node.obj): - color = "purple" - default: - color = "red" - } - fmt.Fprintf(w, "[color = %s]", color) - fmt.Fprintln(w) - } - - for _, node1 := range c.graph.nodes { - for node2 := range node1.uses { - fmt.Fprintf(w, "n%d -> n%d\n", node1.n, node2.n) - } - } - for _, root := range c.graph.roots { - fmt.Fprintf(w, "n0 -> n%d\n", root.n) - } - fmt.Fprintln(w, "}") + return out } diff --git a/vendor/modules.txt b/vendor/modules.txt index 01586ca7..c119bb18 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -180,6 +180,7 @@ golang.org/x/text/unicode/bidi golang.org/x/tools/go/loader golang.org/x/tools/go/ast/astutil golang.org/x/tools/go/gcexportdata +golang.org/x/tools/go/ast/inspector golang.org/x/tools/go/packages golang.org/x/tools/go/types/typeutil golang.org/x/tools/go/buildutil @@ -200,7 +201,7 @@ gopkg.in/gomail.v2 gopkg.in/testfixtures.v2 # gopkg.in/yaml.v2 v2.2.2 gopkg.in/yaml.v2 -# honnef.co/go/tools v0.0.0-20190215041234-466a0476246c +# honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a honnef.co/go/tools/cmd/staticcheck honnef.co/go/tools/lint honnef.co/go/tools/lint/lintutil @@ -218,8 +219,10 @@ honnef.co/go/tools/internal/sharedcheck honnef.co/go/tools/lint/lintdsl honnef.co/go/tools/deprecated honnef.co/go/tools/functions +honnef.co/go/tools/printf honnef.co/go/tools/ssautil honnef.co/go/tools/staticcheck/vrp +honnef.co/go/tools/go/types/typeutil honnef.co/go/tools/callgraph honnef.co/go/tools/callgraph/static # src.techknowlogick.com/xormigrate v0.0.0-20190321151057-24497c23c09c