Update module spf13/viper to v1.7.0 (#494)
Update module spf13/viper to v1.7.0 Reviewed-on: https://kolaente.dev/vikunja/api/pulls/494
This commit is contained in:
parent
18f6e31b54
commit
def2362682
89 changed files with 9018 additions and 4502 deletions
4
go.mod
4
go.mod
|
@ -62,7 +62,7 @@ require (
|
||||||
github.com/spf13/afero v1.2.2
|
github.com/spf13/afero v1.2.2
|
||||||
github.com/spf13/cobra v0.0.7
|
github.com/spf13/cobra v0.0.7
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/spf13/viper v1.6.3
|
github.com/spf13/viper v1.7.0
|
||||||
github.com/stretchr/testify v1.5.1
|
github.com/stretchr/testify v1.5.1
|
||||||
github.com/swaggo/swag v1.6.3
|
github.com/swaggo/swag v1.6.3
|
||||||
github.com/ulule/limiter/v3 v3.5.0
|
github.com/ulule/limiter/v3 v3.5.0
|
||||||
|
@ -75,7 +75,7 @@ require (
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
||||||
gopkg.in/d4l3k/messagediff.v1 v1.2.1
|
gopkg.in/d4l3k/messagediff.v1 v1.2.1
|
||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
||||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a
|
honnef.co/go/tools v0.0.1-2019.2.3
|
||||||
src.techknowlogick.com/xgo v0.0.0-20200408234745-bb0faa361273
|
src.techknowlogick.com/xgo v0.0.0-20200408234745-bb0faa361273
|
||||||
src.techknowlogick.com/xormigrate v1.2.0
|
src.techknowlogick.com/xormigrate v1.2.0
|
||||||
xorm.io/builder v0.3.7
|
xorm.io/builder v0.3.7
|
||||||
|
|
116
go.sum
116
go.sum
|
@ -9,8 +9,19 @@ cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg=
|
||||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
cloud.google.com/go v0.37.4 h1:glPeL3BQJsbF6aIIYfZizMwc5LTYz250bDMjttbBGAU=
|
cloud.google.com/go v0.37.4 h1:glPeL3BQJsbF6aIIYfZizMwc5LTYz250bDMjttbBGAU=
|
||||||
cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw=
|
cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw=
|
||||||
|
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||||
|
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
|
||||||
|
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
|
||||||
|
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
|
||||||
|
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
|
||||||
|
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||||
|
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||||
|
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
|
||||||
|
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||||
|
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
|
||||||
code.vikunja.io/web v0.0.0-20200208214421-c90649369427 h1:6ps5r0OxZNRdmCavh1k/xMwftN27hHauo+EtdTGxLug=
|
code.vikunja.io/web v0.0.0-20200208214421-c90649369427 h1:6ps5r0OxZNRdmCavh1k/xMwftN27hHauo+EtdTGxLug=
|
||||||
code.vikunja.io/web v0.0.0-20200208214421-c90649369427/go.mod h1:cuP1/ieGWAZzgQGw+QPt6Y5F0fVb/8Ol5NV4QSezGdo=
|
code.vikunja.io/web v0.0.0-20200208214421-c90649369427/go.mod h1:cuP1/ieGWAZzgQGw+QPt6Y5F0fVb/8Ol5NV4QSezGdo=
|
||||||
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a h1:lSA0F4e9A2NcQSqGqTOXqu2aRi/XEQxDCBwM8yJtE6s=
|
gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a h1:lSA0F4e9A2NcQSqGqTOXqu2aRi/XEQxDCBwM8yJtE6s=
|
||||||
gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a/go.mod h1:EXuID2Zs0pAQhH8yz+DNjUbjppKQzKFAn28TMYPB6IU=
|
gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a/go.mod h1:EXuID2Zs0pAQhH8yz+DNjUbjppKQzKFAn28TMYPB6IU=
|
||||||
gitea.com/xorm/tests v0.7.0 h1:pFcaxTGGAWw3rDuVfhBdyr+mX1uzdTtncyAKxkCQ/IE=
|
gitea.com/xorm/tests v0.7.0 h1:pFcaxTGGAWw3rDuVfhBdyr+mX1uzdTtncyAKxkCQ/IE=
|
||||||
|
@ -19,6 +30,7 @@ gitea.com/xorm/xorm-redis-cache v0.2.0 h1:qglRHt6/7vJmDeld6j+n10M9PmruAh+Le2lgNr
|
||||||
gitea.com/xorm/xorm-redis-cache v0.2.0/go.mod h1:juYdjkmIKvLbPkdfBVKGVJ2daFQIJAgKsn4mL4ZK8Zk=
|
gitea.com/xorm/xorm-redis-cache v0.2.0/go.mod h1:juYdjkmIKvLbPkdfBVKGVJ2daFQIJAgKsn4mL4ZK8Zk=
|
||||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
|
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
|
||||||
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
|
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
|
||||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||||
|
@ -36,7 +48,10 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||||
|
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||||
|
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||||
|
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA=
|
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA=
|
||||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||||
github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
|
github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
|
||||||
|
@ -47,6 +62,8 @@ github.com/beorn7/perks v1.0.0 h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0=
|
||||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
|
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||||
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||||
github.com/c2h5oh/datasize v0.0.0-20200112174442-28bbd4740fee h1:BnPxIde0gjtTnc9Er7cxvBk8DHLWhEux0SxayC8dP6I=
|
github.com/c2h5oh/datasize v0.0.0-20200112174442-28bbd4740fee h1:BnPxIde0gjtTnc9Er7cxvBk8DHLWhEux0SxayC8dP6I=
|
||||||
|
@ -62,6 +79,7 @@ github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMe
|
||||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
|
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
|
github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
|
||||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||||
|
@ -86,6 +104,7 @@ github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8
|
||||||
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
||||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||||
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
||||||
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||||
|
@ -103,6 +122,7 @@ github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm
|
||||||
github.com/gin-gonic/gin v1.3.0/go.mod h1:7cKuhb5qV2ggCFctp2fJQ+ErvciLZrIeoOSOm6mUr7Y=
|
github.com/gin-gonic/gin v1.3.0/go.mod h1:7cKuhb5qV2ggCFctp2fJQ+ErvciLZrIeoOSOm6mUr7Y=
|
||||||
github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM=
|
github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM=
|
||||||
github.com/gin-gonic/gin v1.6.2/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M=
|
github.com/gin-gonic/gin v1.6.2/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M=
|
||||||
|
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||||
|
@ -153,6 +173,7 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU
|
||||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
|
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||||
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
|
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
|
||||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
|
github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
|
||||||
|
@ -182,7 +203,10 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||||
|
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||||
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||||
|
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf h1:vc7Dmrk4JwS0ZPS6WZvWlwDflgDTA26jItmbSj83nug=
|
github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf h1:vc7Dmrk4JwS0ZPS6WZvWlwDflgDTA26jItmbSj83nug=
|
||||||
|
@ -190,12 +214,31 @@ github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:
|
||||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||||
|
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||||
|
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
|
||||||
|
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||||
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
|
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||||
|
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||||
|
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||||
|
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||||
|
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
||||||
|
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
|
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
|
||||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
|
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
|
||||||
|
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||||
|
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||||
|
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||||
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334 h1:VHgatEHNcBFEB7inlalqfNqw65aNkM1lGX2yt3NmbS8=
|
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334 h1:VHgatEHNcBFEB7inlalqfNqw65aNkM1lGX2yt3NmbS8=
|
||||||
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE=
|
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE=
|
||||||
github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=
|
github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=
|
||||||
|
@ -271,10 +314,12 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN
|
||||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=
|
github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=
|
||||||
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||||
|
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||||
github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU=
|
github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU=
|
||||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||||
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
|
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
|
||||||
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||||
|
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||||
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
|
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
|
||||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||||
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
||||||
|
@ -294,7 +339,14 @@ github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJK
|
||||||
github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
|
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||||
|
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||||
|
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
|
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||||
|
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
|
||||||
|
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
|
||||||
|
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
@ -325,6 +377,7 @@ github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoT
|
||||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=
|
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=
|
||||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
||||||
|
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||||
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
|
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
|
||||||
|
@ -338,6 +391,7 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||||
github.com/pquerna/otp v1.2.0 h1:/A3+Jn+cagqayeR3iHs/L62m5ue7710D35zl1zJ1kok=
|
github.com/pquerna/otp v1.2.0 h1:/A3+Jn+cagqayeR3iHs/L62m5ue7710D35zl1zJ1kok=
|
||||||
github.com/pquerna/otp v1.2.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
github.com/pquerna/otp v1.2.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
||||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||||
|
@ -378,8 +432,11 @@ github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4
|
||||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||||
|
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||||
github.com/shopspring/decimal v0.0.0-20191009025716-f1972eb1d1f5/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
github.com/shopspring/decimal v0.0.0-20191009025716-f1972eb1d1f5/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 h1:bUGsEnyNbVPw06Bs80sCeARAlK8lhwqGyi6UT8ymuGk=
|
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 h1:bUGsEnyNbVPw06Bs80sCeARAlK8lhwqGyi6UT8ymuGk=
|
||||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||||
|
@ -414,6 +471,8 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An
|
||||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||||
github.com/spf13/viper v1.6.3 h1:pDDu1OyEDTKzpJwdq4TiuLyMsUgRa/BT5cn5O62NoHs=
|
github.com/spf13/viper v1.6.3 h1:pDDu1OyEDTKzpJwdq4TiuLyMsUgRa/BT5cn5O62NoHs=
|
||||||
github.com/spf13/viper v1.6.3/go.mod h1:jUMtyi0/lB5yZH/FjyGAoH7IMNrIhlBf6pXZmbMDvzw=
|
github.com/spf13/viper v1.6.3/go.mod h1:jUMtyi0/lB5yZH/FjyGAoH7IMNrIhlBf6pXZmbMDvzw=
|
||||||
|
github.com/spf13/viper v1.7.0 h1:xVKxvI7ouOI5I+U9s2eeiUfMaWBVoXA3AWskkrqK0VM=
|
||||||
|
github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||||
|
@ -463,12 +522,16 @@ github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wK
|
||||||
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||||
go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||||
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||||
|
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||||
|
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||||
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
||||||
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 h1:ydJNl0ENAG67pFbB+9tfhiL2pYqLhfoaZFw/cjLhY4A=
|
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 h1:ydJNl0ENAG67pFbB+9tfhiL2pYqLhfoaZFw/cjLhY4A=
|
||||||
|
@ -492,12 +555,25 @@ golang.org/x/crypto v0.0.0-20200427165652-729f1e841bcc/go.mod h1:LzIPMQfyMNhhGPh
|
||||||
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79 h1:IaQbIIB2X/Mp/DKctl6ROxz1KyMlKp4uyvL6+kQ7C88=
|
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79 h1:IaQbIIB2X/Mp/DKctl6ROxz1KyMlKp4uyvL6+kQ7C88=
|
||||||
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
|
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||||
|
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||||
|
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||||
|
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7YapZaRvUcLFFJhusH0k=
|
golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7YapZaRvUcLFFJhusH0k=
|
||||||
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||||
|
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||||
|
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
|
||||||
|
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||||
|
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
|
||||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||||
golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ=
|
golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ=
|
||||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
|
@ -506,13 +582,16 @@ golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73r
|
||||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE=
|
golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE=
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c h1:uOCk1iQW6Vc18bnC13MfzScl+wdKBmM9Y9kU7Z83/lw=
|
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c h1:uOCk1iQW6Vc18bnC13MfzScl+wdKBmM9Y9kU7Z83/lw=
|
||||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||||
|
@ -530,6 +609,7 @@ golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e h1:3G+cUijn7XD+S4eJFddp53Pv7
|
||||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f h1:Bl/8QSvNqXvPGPGXa2z5xUTmV7VDcZyvRZ+QQXkXTZQ=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f h1:Bl/8QSvNqXvPGPGXa2z5xUTmV7VDcZyvRZ+QQXkXTZQ=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
@ -537,22 +617,28 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ
|
||||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190609082536-301114b31cce/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190609082536-301114b31cce/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190610200419-93c9922d18ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190610200419-93c9922d18ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb h1:fgwFCsaw9buMuxNd6+DQfAuSFqbNiQZpcgJQAgJsK6k=
|
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb h1:fgwFCsaw9buMuxNd6+DQfAuSFqbNiQZpcgJQAgJsK6k=
|
||||||
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
@ -584,14 +670,24 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm
|
||||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
|
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
golang.org/x/tools v0.0.0-20190606050223-4d9ae51c2468/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190606050223-4d9ae51c2468/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190608022120-eacb66d2a7c3/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190608022120-eacb66d2a7c3/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190611222205-d73e1c7e250b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190611222205-d73e1c7e250b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190628034336-212fb13d595e h1:ZlQjfVdpDxeqxRfmO30CdqWWzTvgRCj0MxaUVfxEG1k=
|
golang.org/x/tools v0.0.0-20190628034336-212fb13d595e h1:ZlQjfVdpDxeqxRfmO30CdqWWzTvgRCj0MxaUVfxEG1k=
|
||||||
golang.org/x/tools v0.0.0-20190628034336-212fb13d595e/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190628034336-212fb13d595e/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7 h1:EBZoQjiKKPaLbPrbpssUfuHtwM6KV/vb4U85g/cigFY=
|
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7 h1:EBZoQjiKKPaLbPrbpssUfuHtwM6KV/vb4U85g/cigFY=
|
||||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||||
|
@ -602,18 +698,34 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
||||||
|
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||||
|
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||||
|
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||||
|
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||||
|
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
google.golang.org/appengine v1.3.0 h1:FBSsiFRMz3LBeXIomRnVzrQwSDj4ibvcRexLG0LZGQk=
|
google.golang.org/appengine v1.3.0 h1:FBSsiFRMz3LBeXIomRnVzrQwSDj4ibvcRexLG0LZGQk=
|
||||||
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
google.golang.org/appengine v1.6.0 h1:Tfd7cKwKbFRsI8RMAD3oqqw7JPFRrvFlOsfbgVkjOOw=
|
google.golang.org/appengine v1.6.0 h1:Tfd7cKwKbFRsI8RMAD3oqqw7JPFRrvFlOsfbgVkjOOw=
|
||||||
google.golang.org/appengine v1.6.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
google.golang.org/appengine v1.6.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||||
|
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||||
|
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
|
||||||
|
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||||
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
||||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
|
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||||
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
|
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||||
|
@ -632,6 +744,7 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8X
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/d4l3k/messagediff.v1 v1.2.1 h1:70AthpjunwzUiarMHyED52mj9UwtAnE89l1Gmrt3EU0=
|
gopkg.in/d4l3k/messagediff.v1 v1.2.1 h1:70AthpjunwzUiarMHyED52mj9UwtAnE89l1Gmrt3EU0=
|
||||||
gopkg.in/d4l3k/messagediff.v1 v1.2.1/go.mod h1:EUzikiKadqXWcD1AzJLagx0j/BeeWGtn++04Xniyg44=
|
gopkg.in/d4l3k/messagediff.v1 v1.2.1/go.mod h1:EUzikiKadqXWcD1AzJLagx0j/BeeWGtn++04Xniyg44=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE=
|
gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE=
|
||||||
gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y=
|
gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y=
|
||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
||||||
|
@ -657,6 +770,9 @@ honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWh
|
||||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a h1:LJwr7TCTghdatWv40WobzlKXc9c4s8oGa7QKJUtHhWA=
|
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a h1:LJwr7TCTghdatWv40WobzlKXc9c4s8oGa7QKJUtHhWA=
|
||||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
|
||||||
|
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||||
|
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||||
src.techknowlogick.com/xgo v0.0.0-20200408234745-bb0faa361273 h1:dE6ry9rVwDn3soD4wPCXqEG60AZTuhniZzHdnj3c+74=
|
src.techknowlogick.com/xgo v0.0.0-20200408234745-bb0faa361273 h1:dE6ry9rVwDn3soD4wPCXqEG60AZTuhniZzHdnj3c+74=
|
||||||
src.techknowlogick.com/xgo v0.0.0-20200408234745-bb0faa361273/go.mod h1:31CE1YKtDOrKTk9PSnjTpe6YbO6W/0LTYZ1VskL09oU=
|
src.techknowlogick.com/xgo v0.0.0-20200408234745-bb0faa361273/go.mod h1:31CE1YKtDOrKTk9PSnjTpe6YbO6W/0LTYZ1VskL09oU=
|
||||||
src.techknowlogick.com/xormigrate v1.2.0 h1:bq9JaI48bxB+OddMghicjmV7sGmBUogJq4HmTN0DOcw=
|
src.techknowlogick.com/xormigrate v1.2.0 h1:bq9JaI48bxB+OddMghicjmV7sGmBUogJq4HmTN0DOcw=
|
||||||
|
|
15
vendor/github.com/spf13/viper/.editorconfig
generated
vendored
Normal file
15
vendor/github.com/spf13/viper/.editorconfig
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
indent_size = 4
|
||||||
|
indent_style = space
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.go]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[{Makefile, *.mk}]
|
||||||
|
indent_style = tab
|
17
vendor/github.com/spf13/viper/.gitignore
generated
vendored
17
vendor/github.com/spf13/viper/.gitignore
generated
vendored
|
@ -1,20 +1,5 @@
|
||||||
|
/.idea/
|
||||||
/bin/
|
/bin/
|
||||||
/build/
|
/build/
|
||||||
/var/
|
/var/
|
||||||
/vendor/
|
/vendor/
|
||||||
|
|
||||||
# IDE integration
|
|
||||||
/.vscode/*
|
|
||||||
!/.vscode/launch.json
|
|
||||||
!/.vscode/tasks.json
|
|
||||||
/.idea/*
|
|
||||||
!/.idea/codeStyles/
|
|
||||||
!/.idea/copyright/
|
|
||||||
!/.idea/dataSources.xml
|
|
||||||
!/.idea/*.iml
|
|
||||||
!/.idea/externalDependencies.xml
|
|
||||||
!/.idea/go.imports.xml
|
|
||||||
!/.idea/modules.xml
|
|
||||||
!/.idea/runConfigurations/
|
|
||||||
!/.idea/scopes/
|
|
||||||
!/.idea/sqldialects.xml
|
|
||||||
|
|
5
vendor/github.com/spf13/viper/.golangci.yml
generated
vendored
5
vendor/github.com/spf13/viper/.golangci.yml
generated
vendored
|
@ -21,4 +21,7 @@ linters:
|
||||||
- scopelint
|
- scopelint
|
||||||
- gocyclo
|
- gocyclo
|
||||||
- gocognit
|
- gocognit
|
||||||
- gocritic
|
- gocritic
|
||||||
|
|
||||||
|
service:
|
||||||
|
golangci-lint-version: 1.21.x
|
||||||
|
|
32
vendor/github.com/spf13/viper/.travis.yml
generated
vendored
32
vendor/github.com/spf13/viper/.travis.yml
generated
vendored
|
@ -1,32 +0,0 @@
|
||||||
go_import_path: github.com/spf13/viper
|
|
||||||
|
|
||||||
language: go
|
|
||||||
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- GO111MODULE="on"
|
|
||||||
- GOFLAGS="-mod=readonly"
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.11.x
|
|
||||||
- 1.12.x
|
|
||||||
- 1.13.x
|
|
||||||
- tip
|
|
||||||
|
|
||||||
os:
|
|
||||||
- linux
|
|
||||||
- osx
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
allow_failures:
|
|
||||||
- go: tip
|
|
||||||
fast_finish: true
|
|
||||||
|
|
||||||
script:
|
|
||||||
- go install ./...
|
|
||||||
- diff -u <(echo -n) <(gofmt -d .)
|
|
||||||
- go test -v ./...
|
|
||||||
|
|
||||||
after_success:
|
|
||||||
- go get -u -d github.com/spf13/hugo
|
|
||||||
- cd $GOPATH/src/github.com/spf13/hugo && make && ./hugo -s docs && cd -
|
|
31
vendor/github.com/spf13/viper/README.md
generated
vendored
31
vendor/github.com/spf13/viper/README.md
generated
vendored
|
@ -1,10 +1,13 @@
|
||||||
![viper logo](https://cloud.githubusercontent.com/assets/173412/10886745/998df88a-8151-11e5-9448-4736db51020d.png)
|
![Viper](.github/logo.png?raw=true)
|
||||||
|
|
||||||
Go configuration with fangs!
|
[![Mentioned in Awesome Go](https://awesome.re/mentioned-badge-flat.svg)](https://github.com/avelino/awesome-go#configuration)
|
||||||
|
|
||||||
[![Actions](https://github.com/spf13/viper/workflows/CI/badge.svg)](https://github.com/spf13/viper)
|
[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/spf13/viper/CI?style=flat-square)](https://github.com/spf13/viper/actions?query=workflow%3ACI)
|
||||||
[![Join the chat at https://gitter.im/spf13/viper](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/spf13/viper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[![Join the chat at https://gitter.im/spf13/viper](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/spf13/viper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
[![GoDoc](https://godoc.org/github.com/spf13/viper?status.svg)](https://godoc.org/github.com/spf13/viper)
|
[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/viper?style=flat-square)](https://goreportcard.com/report/github.com/spf13/viper)
|
||||||
|
[![go.dev reference](https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white&style=flat-square)](https://pkg.go.dev/mod/github.com/spf13/viper)
|
||||||
|
|
||||||
|
**Go configuration with fangs!**
|
||||||
|
|
||||||
Many Go projects are built using Viper including:
|
Many Go projects are built using Viper including:
|
||||||
|
|
||||||
|
@ -400,7 +403,7 @@ in a Key/Value store such as etcd or Consul. These values take precedence over
|
||||||
default values, but are overridden by configuration values retrieved from disk,
|
default values, but are overridden by configuration values retrieved from disk,
|
||||||
flags, or environment variables.
|
flags, or environment variables.
|
||||||
|
|
||||||
Viper uses [crypt](https://github.com/xordataexchange/crypt) to retrieve
|
Viper uses [crypt](https://github.com/bketelsen/crypt) to retrieve
|
||||||
configuration from the K/V store, which means that you can store your
|
configuration from the K/V store, which means that you can store your
|
||||||
configuration values encrypted and have them automatically decrypted if you have
|
configuration values encrypted and have them automatically decrypted if you have
|
||||||
the correct gpg keyring. Encryption is optional.
|
the correct gpg keyring. Encryption is optional.
|
||||||
|
@ -412,7 +415,7 @@ independently of it.
|
||||||
K/V store. `crypt` defaults to etcd on http://127.0.0.1:4001.
|
K/V store. `crypt` defaults to etcd on http://127.0.0.1:4001.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ go get github.com/xordataexchange/crypt/bin/crypt
|
$ go get github.com/bketelsen/crypt/bin/crypt
|
||||||
$ crypt set -plaintext /config/hugo.json /Users/hugo/settings/config.json
|
$ crypt set -plaintext /config/hugo.json /Users/hugo/settings/config.json
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -435,7 +438,7 @@ err := viper.ReadRemoteConfig()
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Consul
|
#### Consul
|
||||||
You need to set a key to Consul key/value storage with JSON value containing your desired config.
|
You need to set a key to Consul key/value storage with JSON value containing your desired config.
|
||||||
For example, create a Consul key/value store key `MY_CONSUL_KEY` with value:
|
For example, create a Consul key/value store key `MY_CONSUL_KEY` with value:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -454,6 +457,16 @@ fmt.Println(viper.Get("port")) // 8080
|
||||||
fmt.Println(viper.Get("hostname")) // myhostname.com
|
fmt.Println(viper.Get("hostname")) // myhostname.com
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Firestore
|
||||||
|
|
||||||
|
```go
|
||||||
|
viper.AddRemoteProvider("firestore", "google-cloud-project-id", "collection/document")
|
||||||
|
viper.SetConfigType("json") // Config's format: "json", "toml", "yaml", "yml"
|
||||||
|
err := viper.ReadRemoteConfig()
|
||||||
|
```
|
||||||
|
|
||||||
|
Of course, you're allowed to use `SecureRemoteProvider` also
|
||||||
|
|
||||||
### Remote Key/Value Store Example - Encrypted
|
### Remote Key/Value Store Example - Encrypted
|
||||||
|
|
||||||
```go
|
```go
|
||||||
|
@ -728,14 +741,14 @@ Viper uses [github.com/mitchellh/mapstructure](https://github.com/mitchellh/maps
|
||||||
|
|
||||||
### Marshalling to string
|
### Marshalling to string
|
||||||
|
|
||||||
You may need to marshal all the settings held in viper into a string rather than write them to a file.
|
You may need to marshal all the settings held in viper into a string rather than write them to a file.
|
||||||
You can use your favorite format's marshaller with the config returned by `AllSettings()`.
|
You can use your favorite format's marshaller with the config returned by `AllSettings()`.
|
||||||
|
|
||||||
```go
|
```go
|
||||||
import (
|
import (
|
||||||
yaml "gopkg.in/yaml.v2"
|
yaml "gopkg.in/yaml.v2"
|
||||||
// ...
|
// ...
|
||||||
)
|
)
|
||||||
|
|
||||||
func yamlStringSettings() string {
|
func yamlStringSettings() string {
|
||||||
c := viper.AllSettings()
|
c := viper.AllSettings()
|
||||||
|
|
13
vendor/github.com/spf13/viper/go.mod
generated
vendored
13
vendor/github.com/spf13/viper/go.mod
generated
vendored
|
@ -3,27 +3,22 @@ module github.com/spf13/viper
|
||||||
go 1.12
|
go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6 // indirect
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c
|
||||||
github.com/coreos/bbolt v1.3.2 // indirect
|
github.com/coreos/bbolt v1.3.2 // indirect
|
||||||
github.com/coreos/etcd v3.3.13+incompatible // indirect
|
|
||||||
github.com/coreos/go-semver v0.2.0 // indirect
|
|
||||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e // indirect
|
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e // indirect
|
||||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f // indirect
|
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f // indirect
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect
|
||||||
github.com/fsnotify/fsnotify v1.4.7
|
github.com/fsnotify/fsnotify v1.4.7
|
||||||
github.com/gogo/protobuf v1.2.1 // indirect
|
github.com/gogo/protobuf v1.2.1 // indirect
|
||||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef // indirect
|
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef // indirect
|
||||||
github.com/google/btree v1.0.0 // indirect
|
github.com/gorilla/websocket v1.4.2 // indirect
|
||||||
github.com/gorilla/websocket v1.4.0 // indirect
|
|
||||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0 // indirect
|
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0 // indirect
|
||||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect
|
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0 // indirect
|
github.com/grpc-ecosystem/grpc-gateway v1.9.0 // indirect
|
||||||
github.com/hashicorp/hcl v1.0.0
|
github.com/hashicorp/hcl v1.0.0
|
||||||
github.com/jonboulle/clockwork v0.1.0 // indirect
|
github.com/jonboulle/clockwork v0.1.0 // indirect
|
||||||
github.com/json-iterator/go v1.1.9 // indirect
|
|
||||||
github.com/magiconair/properties v1.8.1
|
github.com/magiconair/properties v1.8.1
|
||||||
github.com/mitchellh/mapstructure v1.1.2
|
github.com/mitchellh/mapstructure v1.1.2
|
||||||
github.com/modern-go/reflect2 v1.0.1 // indirect
|
|
||||||
github.com/pelletier/go-toml v1.2.0
|
github.com/pelletier/go-toml v1.2.0
|
||||||
github.com/prometheus/client_golang v0.9.3 // indirect
|
github.com/prometheus/client_golang v0.9.3 // indirect
|
||||||
github.com/smartystreets/goconvey v1.6.4 // indirect
|
github.com/smartystreets/goconvey v1.6.4 // indirect
|
||||||
|
@ -36,14 +31,10 @@ require (
|
||||||
github.com/subosito/gotenv v1.2.0
|
github.com/subosito/gotenv v1.2.0
|
||||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5 // indirect
|
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5 // indirect
|
||||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 // indirect
|
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 // indirect
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77
|
|
||||||
go.etcd.io/bbolt v1.3.2 // indirect
|
go.etcd.io/bbolt v1.3.2 // indirect
|
||||||
go.uber.org/atomic v1.4.0 // indirect
|
go.uber.org/atomic v1.4.0 // indirect
|
||||||
go.uber.org/multierr v1.1.0 // indirect
|
go.uber.org/multierr v1.1.0 // indirect
|
||||||
go.uber.org/zap v1.10.0 // indirect
|
go.uber.org/zap v1.10.0 // indirect
|
||||||
golang.org/x/net v0.0.0-20190522155817-f3200d17e092 // indirect
|
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 // indirect
|
|
||||||
google.golang.org/grpc v1.21.0 // indirect
|
|
||||||
gopkg.in/ini.v1 v1.51.0
|
gopkg.in/ini.v1 v1.51.0
|
||||||
gopkg.in/yaml.v2 v2.2.4
|
gopkg.in/yaml.v2 v2.2.4
|
||||||
)
|
)
|
||||||
|
|
222
vendor/github.com/spf13/viper/go.sum
generated
vendored
222
vendor/github.com/spf13/viper/go.sum
generated
vendored
|
@ -1,22 +1,46 @@
|
||||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
|
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
|
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||||
|
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
|
||||||
|
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
|
||||||
|
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
|
||||||
|
cloud.google.com/go v0.46.3 h1:AVXDdKsrtX33oR9fbCMu/+c1o8Ofjq6Ku/MInaLVg5Y=
|
||||||
|
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
|
||||||
|
cloud.google.com/go/bigquery v1.0.1 h1:hL+ycaJpVE9M7nLoiXb/Pn10ENE2u+oddxbD8uu0ZVU=
|
||||||
|
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||||
|
cloud.google.com/go/datastore v1.0.0 h1:Kt+gOPPp2LEPWp8CSfxhsM8ik9CcyE/gYu+0r+RnZvM=
|
||||||
|
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||||
|
cloud.google.com/go/firestore v1.1.0 h1:9x7Bx0A9R5/M9jibeJeZWqjeVEIxYW9fZYqB9a70/bY=
|
||||||
|
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
|
||||||
|
cloud.google.com/go/pubsub v1.0.1 h1:W9tAK3E57P75u0XLLR82LZyw8VpAnhmyTOxW9qzmyj8=
|
||||||
|
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||||
|
cloud.google.com/go/storage v1.0.0 h1:VV2nUM3wwLLGh9lSABFgZMjInyUbJeaRSE64WuAIQ+4=
|
||||||
|
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
|
||||||
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6 h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=
|
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I=
|
||||||
|
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||||
|
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||||
github.com/beorn7/perks v1.0.0 h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0=
|
github.com/beorn7/perks v1.0.0 h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0=
|
||||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||||
|
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||||
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c h1:+0HFd5KSZ/mm3JmhmrDukiId5iR6w4+BdFtfSy4yWIc=
|
||||||
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
github.com/coreos/bbolt v1.3.2 h1:wZwiHHUieZCquLkDL0B8UhzreNWsPHooDAG3q34zk0s=
|
github.com/coreos/bbolt v1.3.2 h1:wZwiHHUieZCquLkDL0B8UhzreNWsPHooDAG3q34zk0s=
|
||||||
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||||
github.com/coreos/etcd v3.3.13+incompatible h1:8F3hqu9fGYLBifCmRCJsicFqDx/D68Rt3q1JMazcgBQ=
|
github.com/coreos/etcd v3.3.13+incompatible h1:8F3hqu9fGYLBifCmRCJsicFqDx/D68Rt3q1JMazcgBQ=
|
||||||
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/go-semver v0.2.0 h1:3Jm3tLmsgAYcjC+4Up7hJrFBPr+n7rAqYeSw/SZazuY=
|
github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=
|
||||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=
|
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=
|
||||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=
|
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=
|
||||||
|
@ -27,10 +51,12 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||||
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
||||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
|
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||||
|
@ -43,29 +69,77 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU
|
||||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk=
|
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk=
|
||||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
|
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
|
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
|
github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
|
||||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
|
||||||
|
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=
|
github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=
|
||||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
||||||
|
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
|
||||||
|
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||||
|
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||||
|
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||||
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
|
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||||
|
github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=
|
||||||
|
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
|
||||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0 h1:Iju5GlWwrvL6UBg4zJJt3btmonfrMlCDdsejg4CZE7c=
|
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0 h1:Iju5GlWwrvL6UBg4zJJt3btmonfrMlCDdsejg4CZE7c=
|
||||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=
|
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=
|
||||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0 h1:bM6ZAFZmc/wPFaRDi0d5L7hGEZEx/2u+Tmr2evNHDiI=
|
github.com/grpc-ecosystem/grpc-gateway v1.9.0 h1:bM6ZAFZmc/wPFaRDi0d5L7hGEZEx/2u+Tmr2evNHDiI=
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||||
|
github.com/hashicorp/consul/api v1.1.0 h1:BNQPM9ytxj6jbjjdRPioQ94T6YXriSopn0i8COv6SRA=
|
||||||
|
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
|
||||||
|
github.com/hashicorp/consul/sdk v0.1.1 h1:LnuDWGNsoajlhGyHJvuWW6FVqRl8JOTPqS6CPTsYjhY=
|
||||||
|
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||||
|
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
|
||||||
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
|
github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=
|
||||||
|
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||||
|
github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=
|
||||||
|
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||||
|
github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=
|
||||||
|
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||||
|
github.com/hashicorp/go-rootcerts v1.0.0 h1:Rqb66Oo1X/eSV1x66xbDccZjhJigjg0+e82kpwzSwCI=
|
||||||
|
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||||
|
github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs=
|
||||||
|
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
||||||
|
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
|
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
|
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
|
||||||
|
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||||
|
github.com/hashicorp/memberlist v0.1.3 h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M=
|
||||||
|
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||||
|
github.com/hashicorp/serf v0.8.2 h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=
|
||||||
|
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||||
github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=
|
github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=
|
||||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
|
github.com/json-iterator/go v1.1.6 h1:MrUvLMLTMxbqFJ9kzlvat/rYZqZnW3u4wkLzWTaFwKs=
|
||||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024 h1:rBMNdlhTLzJjJSDIjNEXX1Pz3Hmwmz91v+zycvx9PJc=
|
||||||
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
|
@ -81,23 +155,39 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=
|
github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=
|
||||||
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
|
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||||
|
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
|
github.com/miekg/dns v1.0.14 h1:9jZdLNd/P4+SfEJ0TNyxYpsK8N4GtfylBLqtbYN1sbA=
|
||||||
|
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||||
|
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||||
|
github.com/mitchellh/go-homedir v1.0.0 h1:vKb8ShqSby24Yrqr/yDYkuFz8d0WUjys40rvnGC8aR0=
|
||||||
|
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
|
github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=
|
||||||
|
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||||
|
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
|
||||||
|
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
|
||||||
|
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
|
||||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
|
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c h1:Lgl0gzECD8GnQ5QCWA8o6BtfL6mDH5rQgM4/fX3avOs=
|
||||||
|
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||||
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw=
|
github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw=
|
||||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||||
|
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||||
github.com/prometheus/client_golang v0.9.3 h1:9iH4JKXLzFbOAdtqv/a+j8aewx2Y8lAjAydhbaScPF8=
|
github.com/prometheus/client_golang v0.9.3 h1:9iH4JKXLzFbOAdtqv/a+j8aewx2Y8lAjAydhbaScPF8=
|
||||||
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
|
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
|
||||||
|
@ -112,6 +202,10 @@ github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084 h1:sofwID9zm4tzr
|
||||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
|
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||||
|
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
|
||||||
|
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||||
github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo=
|
github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo=
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||||
|
@ -141,10 +235,11 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5 h1:LnC5Kc
|
||||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=
|
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=
|
||||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77 h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=
|
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
|
||||||
go.etcd.io/bbolt v1.3.2 h1:Z/90sZLPOeCy2PwprqkFa25PdkusRzaj9P8zm/KNyvk=
|
go.etcd.io/bbolt v1.3.2 h1:Z/90sZLPOeCy2PwprqkFa25PdkusRzaj9P8zm/KNyvk=
|
||||||
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||||
|
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||||
|
go.opencensus.io v0.22.0 h1:C9hSCOW830chIVkdja34wa6Ky+IzWllkUinR+BtRZd4=
|
||||||
|
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||||
go.uber.org/atomic v1.4.0 h1:cxzIVoETapQEqDhQu3QfnvXAV4AlzcvUCxkVUFw3+EU=
|
go.uber.org/atomic v1.4.0 h1:cxzIVoETapQEqDhQu3QfnvXAV4AlzcvUCxkVUFw3+EU=
|
||||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||||
go.uber.org/multierr v1.1.0 h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI=
|
go.uber.org/multierr v1.1.0 h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI=
|
||||||
|
@ -152,46 +247,132 @@ go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/
|
||||||
go.uber.org/zap v1.10.0 h1:ORx85nbTijNz8ljznvCMR1ZBIPKFn3jQrag10X2AsuM=
|
go.uber.org/zap v1.10.0 h1:ORx85nbTijNz8ljznvCMR1ZBIPKFn3jQrag10X2AsuM=
|
||||||
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU=
|
||||||
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
|
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||||
|
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136 h1:A1gGSx58LAGVHUUsOf7IiR0u8Xb6W51gRwfDBhkdcaw=
|
||||||
|
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||||
|
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||||
|
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
|
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||||
|
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs=
|
||||||
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||||
|
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
|
||||||
|
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||||
|
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
|
||||||
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0 h1:HyfiK1WMnHj5FXFXatD+Qs1A/xC2Run6RzeW1SyHxpc=
|
||||||
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||||
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384 h1:TFlARGu6Czu1z7q93HTxcP1P+/ZFC/IKythI5RzrnRg=
|
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384 h1:TFlARGu6Czu1z7q93HTxcP1P+/ZFC/IKythI5RzrnRg=
|
||||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
|
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
|
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc h1:NCy3Ohtk6Iny5V/reW2Ktypo4zIpWBdRJ1uFMjBxdg8=
|
||||||
|
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||||
|
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||||
|
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||||
|
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||||
|
google.golang.org/api v0.13.0 h1:Q3Ui3V3/CVinFWFiW39Iw0kMuVrRzYX0wN6OPFp0lTA=
|
||||||
|
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
|
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I=
|
||||||
|
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc=
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc=
|
||||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
|
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||||
|
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||||
|
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
|
||||||
|
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a h1:Ob5/580gVHBJZgXnff1cZDbG+xLtMVE5mDRTe+nIsX4=
|
||||||
|
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
google.golang.org/grpc v1.21.0 h1:G+97AoqBnmZIT91cLG/EkCoK9NSelj64P8bOHHNmGn0=
|
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||||
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
google.golang.org/grpc v1.21.1 h1:j6XxA85m/6txkUCHvzlV5f+HBNl/1r5cZ2A/3IEFOO8=
|
||||||
|
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
||||||
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||||
|
@ -200,3 +381,8 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
||||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
|
||||||
|
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||||
|
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||||
|
|
58
vendor/github.com/spf13/viper/viper.go
generated
vendored
58
vendor/github.com/spf13/viper/viper.go
generated
vendored
|
@ -287,7 +287,7 @@ func NewWithOptions(opts ...Option) *Viper {
|
||||||
func Reset() {
|
func Reset() {
|
||||||
v = New()
|
v = New()
|
||||||
SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "dotenv", "env", "ini"}
|
SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "dotenv", "env", "ini"}
|
||||||
SupportedRemoteProviders = []string{"etcd", "consul"}
|
SupportedRemoteProviders = []string{"etcd", "consul", "firestore"}
|
||||||
}
|
}
|
||||||
|
|
||||||
type defaultRemoteProvider struct {
|
type defaultRemoteProvider struct {
|
||||||
|
@ -328,7 +328,7 @@ type RemoteProvider interface {
|
||||||
var SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "dotenv", "env", "ini"}
|
var SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "dotenv", "env", "ini"}
|
||||||
|
|
||||||
// SupportedRemoteProviders are universally supported remote providers.
|
// SupportedRemoteProviders are universally supported remote providers.
|
||||||
var SupportedRemoteProviders = []string{"etcd", "consul"}
|
var SupportedRemoteProviders = []string{"etcd", "consul", "firestore"}
|
||||||
|
|
||||||
func OnConfigChange(run func(in fsnotify.Event)) { v.OnConfigChange(run) }
|
func OnConfigChange(run func(in fsnotify.Event)) { v.OnConfigChange(run) }
|
||||||
func (v *Viper) OnConfigChange(run func(in fsnotify.Event)) {
|
func (v *Viper) OnConfigChange(run func(in fsnotify.Event)) {
|
||||||
|
@ -477,7 +477,7 @@ func (v *Viper) AddConfigPath(in string) {
|
||||||
|
|
||||||
// AddRemoteProvider adds a remote configuration source.
|
// AddRemoteProvider adds a remote configuration source.
|
||||||
// Remote Providers are searched in the order they are added.
|
// Remote Providers are searched in the order they are added.
|
||||||
// provider is a string value, "etcd" or "consul" are currently supported.
|
// provider is a string value: "etcd", "consul" or "firestore" are currently supported.
|
||||||
// endpoint is the url. etcd requires http://ip:port consul requires ip:port
|
// endpoint is the url. etcd requires http://ip:port consul requires ip:port
|
||||||
// path is the path in the k/v store to retrieve configuration
|
// path is the path in the k/v store to retrieve configuration
|
||||||
// To retrieve a config file called myapp.json from /configs/myapp.json
|
// To retrieve a config file called myapp.json from /configs/myapp.json
|
||||||
|
@ -506,14 +506,14 @@ func (v *Viper) AddRemoteProvider(provider, endpoint, path string) error {
|
||||||
|
|
||||||
// AddSecureRemoteProvider adds a remote configuration source.
|
// AddSecureRemoteProvider adds a remote configuration source.
|
||||||
// Secure Remote Providers are searched in the order they are added.
|
// Secure Remote Providers are searched in the order they are added.
|
||||||
// provider is a string value, "etcd" or "consul" are currently supported.
|
// provider is a string value: "etcd", "consul" or "firestore" are currently supported.
|
||||||
// endpoint is the url. etcd requires http://ip:port consul requires ip:port
|
// endpoint is the url. etcd requires http://ip:port consul requires ip:port
|
||||||
// secretkeyring is the filepath to your openpgp secret keyring. e.g. /etc/secrets/myring.gpg
|
// secretkeyring is the filepath to your openpgp secret keyring. e.g. /etc/secrets/myring.gpg
|
||||||
// path is the path in the k/v store to retrieve configuration
|
// path is the path in the k/v store to retrieve configuration
|
||||||
// To retrieve a config file called myapp.json from /configs/myapp.json
|
// To retrieve a config file called myapp.json from /configs/myapp.json
|
||||||
// you should set path to /configs and set config name (SetConfigName()) to
|
// you should set path to /configs and set config name (SetConfigName()) to
|
||||||
// "myapp"
|
// "myapp"
|
||||||
// Secure Remote Providers are implemented with github.com/xordataexchange/crypt
|
// Secure Remote Providers are implemented with github.com/bketelsen/crypt
|
||||||
func AddSecureRemoteProvider(provider, endpoint, path, secretkeyring string) error {
|
func AddSecureRemoteProvider(provider, endpoint, path, secretkeyring string) error {
|
||||||
return v.AddSecureRemoteProvider(provider, endpoint, path, secretkeyring)
|
return v.AddSecureRemoteProvider(provider, endpoint, path, secretkeyring)
|
||||||
}
|
}
|
||||||
|
@ -996,11 +996,6 @@ func (v *Viper) BindFlagValues(flags FlagValueSet) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// BindFlagValue binds a specific key to a FlagValue.
|
// BindFlagValue binds a specific key to a FlagValue.
|
||||||
// Example (where serverCmd is a Cobra instance):
|
|
||||||
//
|
|
||||||
// serverCmd.Flags().Int("port", 1138, "Port to run Application server on")
|
|
||||||
// Viper.BindFlagValue("port", serverCmd.Flags().Lookup("port"))
|
|
||||||
//
|
|
||||||
func BindFlagValue(key string, flag FlagValue) error { return v.BindFlagValue(key, flag) }
|
func BindFlagValue(key string, flag FlagValue) error { return v.BindFlagValue(key, flag) }
|
||||||
func (v *Viper) BindFlagValue(key string, flag FlagValue) error {
|
func (v *Viper) BindFlagValue(key string, flag FlagValue) error {
|
||||||
if flag == nil {
|
if flag == nil {
|
||||||
|
@ -1088,6 +1083,8 @@ func (v *Viper) find(lcaseKey string, flagDefault bool) interface{} {
|
||||||
s = strings.TrimSuffix(s, "]")
|
s = strings.TrimSuffix(s, "]")
|
||||||
res, _ := readAsCSV(s)
|
res, _ := readAsCSV(s)
|
||||||
return cast.ToIntSlice(res)
|
return cast.ToIntSlice(res)
|
||||||
|
case "stringToString":
|
||||||
|
return stringToStringConv(flag.ValueString())
|
||||||
default:
|
default:
|
||||||
return flag.ValueString()
|
return flag.ValueString()
|
||||||
}
|
}
|
||||||
|
@ -1163,6 +1160,8 @@ func (v *Viper) find(lcaseKey string, flagDefault bool) interface{} {
|
||||||
s = strings.TrimSuffix(s, "]")
|
s = strings.TrimSuffix(s, "]")
|
||||||
res, _ := readAsCSV(s)
|
res, _ := readAsCSV(s)
|
||||||
return cast.ToIntSlice(res)
|
return cast.ToIntSlice(res)
|
||||||
|
case "stringToString":
|
||||||
|
return stringToStringConv(flag.ValueString())
|
||||||
default:
|
default:
|
||||||
return flag.ValueString()
|
return flag.ValueString()
|
||||||
}
|
}
|
||||||
|
@ -1182,6 +1181,30 @@ func readAsCSV(val string) ([]string, error) {
|
||||||
return csvReader.Read()
|
return csvReader.Read()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// mostly copied from pflag's implementation of this operation here https://github.com/spf13/pflag/blob/master/string_to_string.go#L79
|
||||||
|
// alterations are: errors are swallowed, map[string]interface{} is returned in order to enable cast.ToStringMap
|
||||||
|
func stringToStringConv(val string) interface{} {
|
||||||
|
val = strings.Trim(val, "[]")
|
||||||
|
// An empty string would cause an empty map
|
||||||
|
if len(val) == 0 {
|
||||||
|
return map[string]interface{}{}
|
||||||
|
}
|
||||||
|
r := csv.NewReader(strings.NewReader(val))
|
||||||
|
ss, err := r.Read()
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out := make(map[string]interface{}, len(ss))
|
||||||
|
for _, pair := range ss {
|
||||||
|
kv := strings.SplitN(pair, "=", 2)
|
||||||
|
if len(kv) != 2 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out[kv[0]] = kv[1]
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
// IsSet checks to see if the key has been set in any of the data locations.
|
// IsSet checks to see if the key has been set in any of the data locations.
|
||||||
// IsSet is case-insensitive for a key.
|
// IsSet is case-insensitive for a key.
|
||||||
func IsSet(key string) bool { return v.IsSet(key) }
|
func IsSet(key string) bool { return v.IsSet(key) }
|
||||||
|
@ -1418,11 +1441,18 @@ func (v *Viper) SafeWriteConfigAs(filename string) error {
|
||||||
|
|
||||||
func (v *Viper) writeConfig(filename string, force bool) error {
|
func (v *Viper) writeConfig(filename string, force bool) error {
|
||||||
jww.INFO.Println("Attempting to write configuration to file.")
|
jww.INFO.Println("Attempting to write configuration to file.")
|
||||||
|
var configType string
|
||||||
|
|
||||||
ext := filepath.Ext(filename)
|
ext := filepath.Ext(filename)
|
||||||
if len(ext) <= 1 {
|
if ext != "" {
|
||||||
return fmt.Errorf("filename: %s requires valid extension", filename)
|
configType = ext[1:]
|
||||||
|
} else {
|
||||||
|
configType = v.configType
|
||||||
}
|
}
|
||||||
configType := ext[1:]
|
if configType == "" {
|
||||||
|
return fmt.Errorf("config type could not be determined for %s", filename)
|
||||||
|
}
|
||||||
|
|
||||||
if !stringInSlice(configType, SupportedExts) {
|
if !stringInSlice(configType, SupportedExts) {
|
||||||
return UnsupportedConfigError(configType)
|
return UnsupportedConfigError(configType)
|
||||||
}
|
}
|
||||||
|
@ -1619,7 +1649,7 @@ func (v *Viper) marshalWriter(f afero.File, configType string) error {
|
||||||
if sectionName == "default" {
|
if sectionName == "default" {
|
||||||
sectionName = ""
|
sectionName = ""
|
||||||
}
|
}
|
||||||
cfg.Section(sectionName).Key(keyName).SetValue(Get(key).(string))
|
cfg.Section(sectionName).Key(keyName).SetValue(v.Get(key).(string))
|
||||||
}
|
}
|
||||||
cfg.WriteTo(f)
|
cfg.WriteTo(f)
|
||||||
}
|
}
|
||||||
|
|
237
vendor/golang.org/x/tools/go/analysis/analysis.go
generated
vendored
Normal file
237
vendor/golang.org/x/tools/go/analysis/analysis.go
generated
vendored
Normal file
|
@ -0,0 +1,237 @@
|
||||||
|
package analysis
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"golang.org/x/tools/internal/analysisinternal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// An Analyzer describes an analysis function and its options.
|
||||||
|
type Analyzer struct {
|
||||||
|
// The Name of the analyzer must be a valid Go identifier
|
||||||
|
// as it may appear in command-line flags, URLs, and so on.
|
||||||
|
Name string
|
||||||
|
|
||||||
|
// Doc is the documentation for the analyzer.
|
||||||
|
// The part before the first "\n\n" is the title
|
||||||
|
// (no capital or period, max ~60 letters).
|
||||||
|
Doc string
|
||||||
|
|
||||||
|
// Flags defines any flags accepted by the analyzer.
|
||||||
|
// The manner in which these flags are exposed to the user
|
||||||
|
// depends on the driver which runs the analyzer.
|
||||||
|
Flags flag.FlagSet
|
||||||
|
|
||||||
|
// Run applies the analyzer to a package.
|
||||||
|
// It returns an error if the analyzer failed.
|
||||||
|
//
|
||||||
|
// On success, the Run function may return a result
|
||||||
|
// computed by the Analyzer; its type must match ResultType.
|
||||||
|
// The driver makes this result available as an input to
|
||||||
|
// another Analyzer that depends directly on this one (see
|
||||||
|
// Requires) when it analyzes the same package.
|
||||||
|
//
|
||||||
|
// To pass analysis results between packages (and thus
|
||||||
|
// potentially between address spaces), use Facts, which are
|
||||||
|
// serializable.
|
||||||
|
Run func(*Pass) (interface{}, error)
|
||||||
|
|
||||||
|
// RunDespiteErrors allows the driver to invoke
|
||||||
|
// the Run method of this analyzer even on a
|
||||||
|
// package that contains parse or type errors.
|
||||||
|
RunDespiteErrors bool
|
||||||
|
|
||||||
|
// Requires is a set of analyzers that must run successfully
|
||||||
|
// before this one on a given package. This analyzer may inspect
|
||||||
|
// the outputs produced by each analyzer in Requires.
|
||||||
|
// The graph over analyzers implied by Requires edges must be acyclic.
|
||||||
|
//
|
||||||
|
// Requires establishes a "horizontal" dependency between
|
||||||
|
// analysis passes (different analyzers, same package).
|
||||||
|
Requires []*Analyzer
|
||||||
|
|
||||||
|
// ResultType is the type of the optional result of the Run function.
|
||||||
|
ResultType reflect.Type
|
||||||
|
|
||||||
|
// FactTypes indicates that this analyzer imports and exports
|
||||||
|
// Facts of the specified concrete types.
|
||||||
|
// An analyzer that uses facts may assume that its import
|
||||||
|
// dependencies have been similarly analyzed before it runs.
|
||||||
|
// Facts must be pointers.
|
||||||
|
//
|
||||||
|
// FactTypes establishes a "vertical" dependency between
|
||||||
|
// analysis passes (same analyzer, different packages).
|
||||||
|
FactTypes []Fact
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Analyzer) String() string { return a.Name }
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// Set the analysisinternal functions to be able to pass type errors
|
||||||
|
// to the Pass type without modifying the go/analysis API.
|
||||||
|
analysisinternal.SetTypeErrors = func(p interface{}, errors []types.Error) {
|
||||||
|
p.(*Pass).typeErrors = errors
|
||||||
|
}
|
||||||
|
analysisinternal.GetTypeErrors = func(p interface{}) []types.Error {
|
||||||
|
return p.(*Pass).typeErrors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Pass provides information to the Run function that
|
||||||
|
// applies a specific analyzer to a single Go package.
|
||||||
|
//
|
||||||
|
// It forms the interface between the analysis logic and the driver
|
||||||
|
// program, and has both input and an output components.
|
||||||
|
//
|
||||||
|
// As in a compiler, one pass may depend on the result computed by another.
|
||||||
|
//
|
||||||
|
// The Run function should not call any of the Pass functions concurrently.
|
||||||
|
type Pass struct {
|
||||||
|
Analyzer *Analyzer // the identity of the current analyzer
|
||||||
|
|
||||||
|
// syntax and type information
|
||||||
|
Fset *token.FileSet // file position information
|
||||||
|
Files []*ast.File // the abstract syntax tree of each file
|
||||||
|
OtherFiles []string // names of non-Go files of this package
|
||||||
|
Pkg *types.Package // type information about the package
|
||||||
|
TypesInfo *types.Info // type information about the syntax trees
|
||||||
|
TypesSizes types.Sizes // function for computing sizes of types
|
||||||
|
|
||||||
|
// Report reports a Diagnostic, a finding about a specific location
|
||||||
|
// in the analyzed source code such as a potential mistake.
|
||||||
|
// It may be called by the Run function.
|
||||||
|
Report func(Diagnostic)
|
||||||
|
|
||||||
|
// ResultOf provides the inputs to this analysis pass, which are
|
||||||
|
// the corresponding results of its prerequisite analyzers.
|
||||||
|
// The map keys are the elements of Analysis.Required,
|
||||||
|
// and the type of each corresponding value is the required
|
||||||
|
// analysis's ResultType.
|
||||||
|
ResultOf map[*Analyzer]interface{}
|
||||||
|
|
||||||
|
// -- facts --
|
||||||
|
|
||||||
|
// ImportObjectFact retrieves a fact associated with obj.
|
||||||
|
// Given a value ptr of type *T, where *T satisfies Fact,
|
||||||
|
// ImportObjectFact copies the value to *ptr.
|
||||||
|
//
|
||||||
|
// ImportObjectFact panics if called after the pass is complete.
|
||||||
|
// ImportObjectFact is not concurrency-safe.
|
||||||
|
ImportObjectFact func(obj types.Object, fact Fact) bool
|
||||||
|
|
||||||
|
// ImportPackageFact retrieves a fact associated with package pkg,
|
||||||
|
// which must be this package or one of its dependencies.
|
||||||
|
// See comments for ImportObjectFact.
|
||||||
|
ImportPackageFact func(pkg *types.Package, fact Fact) bool
|
||||||
|
|
||||||
|
// ExportObjectFact associates a fact of type *T with the obj,
|
||||||
|
// replacing any previous fact of that type.
|
||||||
|
//
|
||||||
|
// ExportObjectFact panics if it is called after the pass is
|
||||||
|
// complete, or if obj does not belong to the package being analyzed.
|
||||||
|
// ExportObjectFact is not concurrency-safe.
|
||||||
|
ExportObjectFact func(obj types.Object, fact Fact)
|
||||||
|
|
||||||
|
// ExportPackageFact associates a fact with the current package.
|
||||||
|
// See comments for ExportObjectFact.
|
||||||
|
ExportPackageFact func(fact Fact)
|
||||||
|
|
||||||
|
// AllPackageFacts returns a new slice containing all package facts of the analysis's FactTypes
|
||||||
|
// in unspecified order.
|
||||||
|
// WARNING: This is an experimental API and may change in the future.
|
||||||
|
AllPackageFacts func() []PackageFact
|
||||||
|
|
||||||
|
// AllObjectFacts returns a new slice containing all object facts of the analysis's FactTypes
|
||||||
|
// in unspecified order.
|
||||||
|
// WARNING: This is an experimental API and may change in the future.
|
||||||
|
AllObjectFacts func() []ObjectFact
|
||||||
|
|
||||||
|
// typeErrors contains types.Errors that are associated with the pkg.
|
||||||
|
typeErrors []types.Error
|
||||||
|
|
||||||
|
/* Further fields may be added in future. */
|
||||||
|
// For example, suggested or applied refactorings.
|
||||||
|
}
|
||||||
|
|
||||||
|
// PackageFact is a package together with an associated fact.
|
||||||
|
// WARNING: This is an experimental API and may change in the future.
|
||||||
|
type PackageFact struct {
|
||||||
|
Package *types.Package
|
||||||
|
Fact Fact
|
||||||
|
}
|
||||||
|
|
||||||
|
// ObjectFact is an object together with an associated fact.
|
||||||
|
// WARNING: This is an experimental API and may change in the future.
|
||||||
|
type ObjectFact struct {
|
||||||
|
Object types.Object
|
||||||
|
Fact Fact
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reportf is a helper function that reports a Diagnostic using the
|
||||||
|
// specified position and formatted error message.
|
||||||
|
func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
pass.Report(Diagnostic{Pos: pos, Message: msg})
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Range interface provides a range. It's equivalent to and satisfied by
|
||||||
|
// ast.Node.
|
||||||
|
type Range interface {
|
||||||
|
Pos() token.Pos // position of first character belonging to the node
|
||||||
|
End() token.Pos // position of first character immediately after the node
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReportRangef is a helper function that reports a Diagnostic using the
|
||||||
|
// range provided. ast.Node values can be passed in as the range because
|
||||||
|
// they satisfy the Range interface.
|
||||||
|
func (pass *Pass) ReportRangef(rng Range, format string, args ...interface{}) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
pass.Report(Diagnostic{Pos: rng.Pos(), End: rng.End(), Message: msg})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pass *Pass) String() string {
|
||||||
|
return fmt.Sprintf("%s@%s", pass.Analyzer.Name, pass.Pkg.Path())
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Fact is an intermediate fact produced during analysis.
|
||||||
|
//
|
||||||
|
// Each fact is associated with a named declaration (a types.Object) or
|
||||||
|
// with a package as a whole. A single object or package may have
|
||||||
|
// multiple associated facts, but only one of any particular fact type.
|
||||||
|
//
|
||||||
|
// A Fact represents a predicate such as "never returns", but does not
|
||||||
|
// represent the subject of the predicate such as "function F" or "package P".
|
||||||
|
//
|
||||||
|
// Facts may be produced in one analysis pass and consumed by another
|
||||||
|
// analysis pass even if these are in different address spaces.
|
||||||
|
// If package P imports Q, all facts about Q produced during
|
||||||
|
// analysis of that package will be available during later analysis of P.
|
||||||
|
// Facts are analogous to type export data in a build system:
|
||||||
|
// just as export data enables separate compilation of several passes,
|
||||||
|
// facts enable "separate analysis".
|
||||||
|
//
|
||||||
|
// Each pass (a, p) starts with the set of facts produced by the
|
||||||
|
// same analyzer a applied to the packages directly imported by p.
|
||||||
|
// The analysis may add facts to the set, and they may be exported in turn.
|
||||||
|
// An analysis's Run function may retrieve facts by calling
|
||||||
|
// Pass.Import{Object,Package}Fact and update them using
|
||||||
|
// Pass.Export{Object,Package}Fact.
|
||||||
|
//
|
||||||
|
// A fact is logically private to its Analysis. To pass values
|
||||||
|
// between different analyzers, use the results mechanism;
|
||||||
|
// see Analyzer.Requires, Analyzer.ResultType, and Pass.ResultOf.
|
||||||
|
//
|
||||||
|
// A Fact type must be a pointer.
|
||||||
|
// Facts are encoded and decoded using encoding/gob.
|
||||||
|
// A Fact may implement the GobEncoder/GobDecoder interfaces
|
||||||
|
// to customize its encoding. Fact encoding should not fail.
|
||||||
|
//
|
||||||
|
// A Fact should not be modified once exported.
|
||||||
|
type Fact interface {
|
||||||
|
AFact() // dummy method to avoid type errors
|
||||||
|
}
|
61
vendor/golang.org/x/tools/go/analysis/diagnostic.go
generated
vendored
Normal file
61
vendor/golang.org/x/tools/go/analysis/diagnostic.go
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
package analysis
|
||||||
|
|
||||||
|
import "go/token"
|
||||||
|
|
||||||
|
// A Diagnostic is a message associated with a source location or range.
|
||||||
|
//
|
||||||
|
// An Analyzer may return a variety of diagnostics; the optional Category,
|
||||||
|
// which should be a constant, may be used to classify them.
|
||||||
|
// It is primarily intended to make it easy to look up documentation.
|
||||||
|
//
|
||||||
|
// If End is provided, the diagnostic is specified to apply to the range between
|
||||||
|
// Pos and End.
|
||||||
|
type Diagnostic struct {
|
||||||
|
Pos token.Pos
|
||||||
|
End token.Pos // optional
|
||||||
|
Category string // optional
|
||||||
|
Message string
|
||||||
|
|
||||||
|
// SuggestedFixes contains suggested fixes for a diagnostic which can be used to perform
|
||||||
|
// edits to a file that address the diagnostic.
|
||||||
|
// TODO(matloob): Should multiple SuggestedFixes be allowed for a diagnostic?
|
||||||
|
// Diagnostics should not contain SuggestedFixes that overlap.
|
||||||
|
// Experimental: This API is experimental and may change in the future.
|
||||||
|
SuggestedFixes []SuggestedFix // optional
|
||||||
|
|
||||||
|
// Experimental: This API is experimental and may change in the future.
|
||||||
|
Related []RelatedInformation // optional
|
||||||
|
}
|
||||||
|
|
||||||
|
// RelatedInformation contains information related to a diagnostic.
|
||||||
|
// For example, a diagnostic that flags duplicated declarations of a
|
||||||
|
// variable may include one RelatedInformation per existing
|
||||||
|
// declaration.
|
||||||
|
type RelatedInformation struct {
|
||||||
|
Pos token.Pos
|
||||||
|
End token.Pos
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
// A SuggestedFix is a code change associated with a Diagnostic that a user can choose
|
||||||
|
// to apply to their code. Usually the SuggestedFix is meant to fix the issue flagged
|
||||||
|
// by the diagnostic.
|
||||||
|
// TextEdits for a SuggestedFix should not overlap. TextEdits for a SuggestedFix
|
||||||
|
// should not contain edits for other packages.
|
||||||
|
// Experimental: This API is experimental and may change in the future.
|
||||||
|
type SuggestedFix struct {
|
||||||
|
// A description for this suggested fix to be shown to a user deciding
|
||||||
|
// whether to accept it.
|
||||||
|
Message string
|
||||||
|
TextEdits []TextEdit
|
||||||
|
}
|
||||||
|
|
||||||
|
// A TextEdit represents the replacement of the code between Pos and End with the new text.
|
||||||
|
// Each TextEdit should apply to a single file. End should not be earlier in the file than Pos.
|
||||||
|
// Experimental: This API is experimental and may change in the future.
|
||||||
|
type TextEdit struct {
|
||||||
|
// For a pure insertion, End can either be set to Pos or token.NoPos.
|
||||||
|
Pos token.Pos
|
||||||
|
End token.Pos
|
||||||
|
NewText []byte
|
||||||
|
}
|
301
vendor/golang.org/x/tools/go/analysis/doc.go
generated
vendored
Normal file
301
vendor/golang.org/x/tools/go/analysis/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,301 @@
|
||||||
|
/*
|
||||||
|
|
||||||
|
Package analysis defines the interface between a modular static
|
||||||
|
analysis and an analysis driver program.
|
||||||
|
|
||||||
|
|
||||||
|
Background
|
||||||
|
|
||||||
|
A static analysis is a function that inspects a package of Go code and
|
||||||
|
reports a set of diagnostics (typically mistakes in the code), and
|
||||||
|
perhaps produces other results as well, such as suggested refactorings
|
||||||
|
or other facts. An analysis that reports mistakes is informally called a
|
||||||
|
"checker". For example, the printf checker reports mistakes in
|
||||||
|
fmt.Printf format strings.
|
||||||
|
|
||||||
|
A "modular" analysis is one that inspects one package at a time but can
|
||||||
|
save information from a lower-level package and use it when inspecting a
|
||||||
|
higher-level package, analogous to separate compilation in a toolchain.
|
||||||
|
The printf checker is modular: when it discovers that a function such as
|
||||||
|
log.Fatalf delegates to fmt.Printf, it records this fact, and checks
|
||||||
|
calls to that function too, including calls made from another package.
|
||||||
|
|
||||||
|
By implementing a common interface, checkers from a variety of sources
|
||||||
|
can be easily selected, incorporated, and reused in a wide range of
|
||||||
|
driver programs including command-line tools (such as vet), text editors and
|
||||||
|
IDEs, build and test systems (such as go build, Bazel, or Buck), test
|
||||||
|
frameworks, code review tools, code-base indexers (such as SourceGraph),
|
||||||
|
documentation viewers (such as godoc), batch pipelines for large code
|
||||||
|
bases, and so on.
|
||||||
|
|
||||||
|
|
||||||
|
Analyzer
|
||||||
|
|
||||||
|
The primary type in the API is Analyzer. An Analyzer statically
|
||||||
|
describes an analysis function: its name, documentation, flags,
|
||||||
|
relationship to other analyzers, and of course, its logic.
|
||||||
|
|
||||||
|
To define an analysis, a user declares a (logically constant) variable
|
||||||
|
of type Analyzer. Here is a typical example from one of the analyzers in
|
||||||
|
the go/analysis/passes/ subdirectory:
|
||||||
|
|
||||||
|
package unusedresult
|
||||||
|
|
||||||
|
var Analyzer = &analysis.Analyzer{
|
||||||
|
Name: "unusedresult",
|
||||||
|
Doc: "check for unused results of calls to some functions",
|
||||||
|
Run: run,
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
|
func run(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
|
An analysis driver is a program such as vet that runs a set of
|
||||||
|
analyses and prints the diagnostics that they report.
|
||||||
|
The driver program must import the list of Analyzers it needs.
|
||||||
|
Typically each Analyzer resides in a separate package.
|
||||||
|
To add a new Analyzer to an existing driver, add another item to the list:
|
||||||
|
|
||||||
|
import ( "unusedresult"; "nilness"; "printf" )
|
||||||
|
|
||||||
|
var analyses = []*analysis.Analyzer{
|
||||||
|
unusedresult.Analyzer,
|
||||||
|
nilness.Analyzer,
|
||||||
|
printf.Analyzer,
|
||||||
|
}
|
||||||
|
|
||||||
|
A driver may use the name, flags, and documentation to provide on-line
|
||||||
|
help that describes the analyses it performs.
|
||||||
|
The doc comment contains a brief one-line summary,
|
||||||
|
optionally followed by paragraphs of explanation.
|
||||||
|
|
||||||
|
The Analyzer type has more fields besides those shown above:
|
||||||
|
|
||||||
|
type Analyzer struct {
|
||||||
|
Name string
|
||||||
|
Doc string
|
||||||
|
Flags flag.FlagSet
|
||||||
|
Run func(*Pass) (interface{}, error)
|
||||||
|
RunDespiteErrors bool
|
||||||
|
ResultType reflect.Type
|
||||||
|
Requires []*Analyzer
|
||||||
|
FactTypes []Fact
|
||||||
|
}
|
||||||
|
|
||||||
|
The Flags field declares a set of named (global) flag variables that
|
||||||
|
control analysis behavior. Unlike vet, analysis flags are not declared
|
||||||
|
directly in the command line FlagSet; it is up to the driver to set the
|
||||||
|
flag variables. A driver for a single analysis, a, might expose its flag
|
||||||
|
f directly on the command line as -f, whereas a driver for multiple
|
||||||
|
analyses might prefix the flag name by the analysis name (-a.f) to avoid
|
||||||
|
ambiguity. An IDE might expose the flags through a graphical interface,
|
||||||
|
and a batch pipeline might configure them from a config file.
|
||||||
|
See the "findcall" analyzer for an example of flags in action.
|
||||||
|
|
||||||
|
The RunDespiteErrors flag indicates whether the analysis is equipped to
|
||||||
|
handle ill-typed code. If not, the driver will skip the analysis if
|
||||||
|
there were parse or type errors.
|
||||||
|
The optional ResultType field specifies the type of the result value
|
||||||
|
computed by this analysis and made available to other analyses.
|
||||||
|
The Requires field specifies a list of analyses upon which
|
||||||
|
this one depends and whose results it may access, and it constrains the
|
||||||
|
order in which a driver may run analyses.
|
||||||
|
The FactTypes field is discussed in the section on Modularity.
|
||||||
|
The analysis package provides a Validate function to perform basic
|
||||||
|
sanity checks on an Analyzer, such as that its Requires graph is
|
||||||
|
acyclic, its fact and result types are unique, and so on.
|
||||||
|
|
||||||
|
Finally, the Run field contains a function to be called by the driver to
|
||||||
|
execute the analysis on a single package. The driver passes it an
|
||||||
|
instance of the Pass type.
|
||||||
|
|
||||||
|
|
||||||
|
Pass
|
||||||
|
|
||||||
|
A Pass describes a single unit of work: the application of a particular
|
||||||
|
Analyzer to a particular package of Go code.
|
||||||
|
The Pass provides information to the Analyzer's Run function about the
|
||||||
|
package being analyzed, and provides operations to the Run function for
|
||||||
|
reporting diagnostics and other information back to the driver.
|
||||||
|
|
||||||
|
type Pass struct {
|
||||||
|
Fset *token.FileSet
|
||||||
|
Files []*ast.File
|
||||||
|
OtherFiles []string
|
||||||
|
Pkg *types.Package
|
||||||
|
TypesInfo *types.Info
|
||||||
|
ResultOf map[*Analyzer]interface{}
|
||||||
|
Report func(Diagnostic)
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
|
The Fset, Files, Pkg, and TypesInfo fields provide the syntax trees,
|
||||||
|
type information, and source positions for a single package of Go code.
|
||||||
|
|
||||||
|
The OtherFiles field provides the names, but not the contents, of non-Go
|
||||||
|
files such as assembly that are part of this package. See the "asmdecl"
|
||||||
|
or "buildtags" analyzers for examples of loading non-Go files and reporting
|
||||||
|
diagnostics against them.
|
||||||
|
|
||||||
|
The ResultOf field provides the results computed by the analyzers
|
||||||
|
required by this one, as expressed in its Analyzer.Requires field. The
|
||||||
|
driver runs the required analyzers first and makes their results
|
||||||
|
available in this map. Each Analyzer must return a value of the type
|
||||||
|
described in its Analyzer.ResultType field.
|
||||||
|
For example, the "ctrlflow" analyzer returns a *ctrlflow.CFGs, which
|
||||||
|
provides a control-flow graph for each function in the package (see
|
||||||
|
golang.org/x/tools/go/cfg); the "inspect" analyzer returns a value that
|
||||||
|
enables other Analyzers to traverse the syntax trees of the package more
|
||||||
|
efficiently; and the "buildssa" analyzer constructs an SSA-form
|
||||||
|
intermediate representation.
|
||||||
|
Each of these Analyzers extends the capabilities of later Analyzers
|
||||||
|
without adding a dependency to the core API, so an analysis tool pays
|
||||||
|
only for the extensions it needs.
|
||||||
|
|
||||||
|
The Report function emits a diagnostic, a message associated with a
|
||||||
|
source position. For most analyses, diagnostics are their primary
|
||||||
|
result.
|
||||||
|
For convenience, Pass provides a helper method, Reportf, to report a new
|
||||||
|
diagnostic by formatting a string.
|
||||||
|
Diagnostic is defined as:
|
||||||
|
|
||||||
|
type Diagnostic struct {
|
||||||
|
Pos token.Pos
|
||||||
|
Category string // optional
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
The optional Category field is a short identifier that classifies the
|
||||||
|
kind of message when an analysis produces several kinds of diagnostic.
|
||||||
|
|
||||||
|
Most Analyzers inspect typed Go syntax trees, but a few, such as asmdecl
|
||||||
|
and buildtag, inspect the raw text of Go source files or even non-Go
|
||||||
|
files such as assembly. To report a diagnostic against a line of a
|
||||||
|
raw text file, use the following sequence:
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filename)
|
||||||
|
if err != nil { ... }
|
||||||
|
tf := fset.AddFile(filename, -1, len(content))
|
||||||
|
tf.SetLinesForContent(content)
|
||||||
|
...
|
||||||
|
pass.Reportf(tf.LineStart(line), "oops")
|
||||||
|
|
||||||
|
|
||||||
|
Modular analysis with Facts
|
||||||
|
|
||||||
|
To improve efficiency and scalability, large programs are routinely
|
||||||
|
built using separate compilation: units of the program are compiled
|
||||||
|
separately, and recompiled only when one of their dependencies changes;
|
||||||
|
independent modules may be compiled in parallel. The same technique may
|
||||||
|
be applied to static analyses, for the same benefits. Such analyses are
|
||||||
|
described as "modular".
|
||||||
|
|
||||||
|
A compiler’s type checker is an example of a modular static analysis.
|
||||||
|
Many other checkers we would like to apply to Go programs can be
|
||||||
|
understood as alternative or non-standard type systems. For example,
|
||||||
|
vet's printf checker infers whether a function has the "printf wrapper"
|
||||||
|
type, and it applies stricter checks to calls of such functions. In
|
||||||
|
addition, it records which functions are printf wrappers for use by
|
||||||
|
later analysis passes to identify other printf wrappers by induction.
|
||||||
|
A result such as “f is a printf wrapper” that is not interesting by
|
||||||
|
itself but serves as a stepping stone to an interesting result (such as
|
||||||
|
a diagnostic) is called a "fact".
|
||||||
|
|
||||||
|
The analysis API allows an analysis to define new types of facts, to
|
||||||
|
associate facts of these types with objects (named entities) declared
|
||||||
|
within the current package, or with the package as a whole, and to query
|
||||||
|
for an existing fact of a given type associated with an object or
|
||||||
|
package.
|
||||||
|
|
||||||
|
An Analyzer that uses facts must declare their types:
|
||||||
|
|
||||||
|
var Analyzer = &analysis.Analyzer{
|
||||||
|
Name: "printf",
|
||||||
|
FactTypes: []analysis.Fact{new(isWrapper)},
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
|
type isWrapper struct{} // => *types.Func f “is a printf wrapper”
|
||||||
|
|
||||||
|
The driver program ensures that facts for a pass’s dependencies are
|
||||||
|
generated before analyzing the package and is responsible for propagating
|
||||||
|
facts from one package to another, possibly across address spaces.
|
||||||
|
Consequently, Facts must be serializable. The API requires that drivers
|
||||||
|
use the gob encoding, an efficient, robust, self-describing binary
|
||||||
|
protocol. A fact type may implement the GobEncoder/GobDecoder interfaces
|
||||||
|
if the default encoding is unsuitable. Facts should be stateless.
|
||||||
|
|
||||||
|
The Pass type has functions to import and export facts,
|
||||||
|
associated either with an object or with a package:
|
||||||
|
|
||||||
|
type Pass struct {
|
||||||
|
...
|
||||||
|
ExportObjectFact func(types.Object, Fact)
|
||||||
|
ImportObjectFact func(types.Object, Fact) bool
|
||||||
|
|
||||||
|
ExportPackageFact func(fact Fact)
|
||||||
|
ImportPackageFact func(*types.Package, Fact) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
An Analyzer may only export facts associated with the current package or
|
||||||
|
its objects, though it may import facts from any package or object that
|
||||||
|
is an import dependency of the current package.
|
||||||
|
|
||||||
|
Conceptually, ExportObjectFact(obj, fact) inserts fact into a hidden map keyed by
|
||||||
|
the pair (obj, TypeOf(fact)), and the ImportObjectFact function
|
||||||
|
retrieves the entry from this map and copies its value into the variable
|
||||||
|
pointed to by fact. This scheme assumes that the concrete type of fact
|
||||||
|
is a pointer; this assumption is checked by the Validate function.
|
||||||
|
See the "printf" analyzer for an example of object facts in action.
|
||||||
|
|
||||||
|
Some driver implementations (such as those based on Bazel and Blaze) do
|
||||||
|
not currently apply analyzers to packages of the standard library.
|
||||||
|
Therefore, for best results, analyzer authors should not rely on
|
||||||
|
analysis facts being available for standard packages.
|
||||||
|
For example, although the printf checker is capable of deducing during
|
||||||
|
analysis of the log package that log.Printf is a printf wrapper,
|
||||||
|
this fact is built in to the analyzer so that it correctly checks
|
||||||
|
calls to log.Printf even when run in a driver that does not apply
|
||||||
|
it to standard packages. We would like to remove this limitation in future.
|
||||||
|
|
||||||
|
|
||||||
|
Testing an Analyzer
|
||||||
|
|
||||||
|
The analysistest subpackage provides utilities for testing an Analyzer.
|
||||||
|
In a few lines of code, it is possible to run an analyzer on a package
|
||||||
|
of testdata files and check that it reported all the expected
|
||||||
|
diagnostics and facts (and no more). Expectations are expressed using
|
||||||
|
"// want ..." comments in the input code.
|
||||||
|
|
||||||
|
|
||||||
|
Standalone commands
|
||||||
|
|
||||||
|
Analyzers are provided in the form of packages that a driver program is
|
||||||
|
expected to import. The vet command imports a set of several analyzers,
|
||||||
|
but users may wish to define their own analysis commands that perform
|
||||||
|
additional checks. To simplify the task of creating an analysis command,
|
||||||
|
either for a single analyzer or for a whole suite, we provide the
|
||||||
|
singlechecker and multichecker subpackages.
|
||||||
|
|
||||||
|
The singlechecker package provides the main function for a command that
|
||||||
|
runs one analyzer. By convention, each analyzer such as
|
||||||
|
go/passes/findcall should be accompanied by a singlechecker-based
|
||||||
|
command such as go/analysis/passes/findcall/cmd/findcall, defined in its
|
||||||
|
entirety as:
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"golang.org/x/tools/go/analysis/passes/findcall"
|
||||||
|
"golang.org/x/tools/go/analysis/singlechecker"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() { singlechecker.Main(findcall.Analyzer) }
|
||||||
|
|
||||||
|
A tool that provides multiple analyzers can use multichecker in a
|
||||||
|
similar way, giving it the list of Analyzers.
|
||||||
|
|
||||||
|
*/
|
||||||
|
package analysis
|
49
vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go
generated
vendored
Normal file
49
vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go
generated
vendored
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package inspect defines an Analyzer that provides an AST inspector
|
||||||
|
// (golang.org/x/tools/go/ast/inspect.Inspect) for the syntax trees of a
|
||||||
|
// package. It is only a building block for other analyzers.
|
||||||
|
//
|
||||||
|
// Example of use in another analysis:
|
||||||
|
//
|
||||||
|
// import (
|
||||||
|
// "golang.org/x/tools/go/analysis"
|
||||||
|
// "golang.org/x/tools/go/analysis/passes/inspect"
|
||||||
|
// "golang.org/x/tools/go/ast/inspector"
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// var Analyzer = &analysis.Analyzer{
|
||||||
|
// ...
|
||||||
|
// Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// func run(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||||
|
// inspect.Preorder(nil, func(n ast.Node) {
|
||||||
|
// ...
|
||||||
|
// })
|
||||||
|
// return nil
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
package inspect
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"golang.org/x/tools/go/ast/inspector"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Analyzer = &analysis.Analyzer{
|
||||||
|
Name: "inspect",
|
||||||
|
Doc: "optimize AST traversal for later passes",
|
||||||
|
Run: run,
|
||||||
|
RunDespiteErrors: true,
|
||||||
|
ResultType: reflect.TypeOf(new(inspector.Inspector)),
|
||||||
|
}
|
||||||
|
|
||||||
|
func run(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
return inspector.New(pass.Files), nil
|
||||||
|
}
|
97
vendor/golang.org/x/tools/go/analysis/validate.go
generated
vendored
Normal file
97
vendor/golang.org/x/tools/go/analysis/validate.go
generated
vendored
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
package analysis
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Validate reports an error if any of the analyzers are misconfigured.
|
||||||
|
// Checks include:
|
||||||
|
// that the name is a valid identifier;
|
||||||
|
// that the Requires graph is acyclic;
|
||||||
|
// that analyzer fact types are unique;
|
||||||
|
// that each fact type is a pointer.
|
||||||
|
func Validate(analyzers []*Analyzer) error {
|
||||||
|
// Map each fact type to its sole generating analyzer.
|
||||||
|
factTypes := make(map[reflect.Type]*Analyzer)
|
||||||
|
|
||||||
|
// Traverse the Requires graph, depth first.
|
||||||
|
const (
|
||||||
|
white = iota
|
||||||
|
grey
|
||||||
|
black
|
||||||
|
finished
|
||||||
|
)
|
||||||
|
color := make(map[*Analyzer]uint8)
|
||||||
|
var visit func(a *Analyzer) error
|
||||||
|
visit = func(a *Analyzer) error {
|
||||||
|
if a == nil {
|
||||||
|
return fmt.Errorf("nil *Analyzer")
|
||||||
|
}
|
||||||
|
if color[a] == white {
|
||||||
|
color[a] = grey
|
||||||
|
|
||||||
|
// names
|
||||||
|
if !validIdent(a.Name) {
|
||||||
|
return fmt.Errorf("invalid analyzer name %q", a)
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.Doc == "" {
|
||||||
|
return fmt.Errorf("analyzer %q is undocumented", a)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fact types
|
||||||
|
for _, f := range a.FactTypes {
|
||||||
|
if f == nil {
|
||||||
|
return fmt.Errorf("analyzer %s has nil FactType", a)
|
||||||
|
}
|
||||||
|
t := reflect.TypeOf(f)
|
||||||
|
if prev := factTypes[t]; prev != nil {
|
||||||
|
return fmt.Errorf("fact type %s registered by two analyzers: %v, %v",
|
||||||
|
t, a, prev)
|
||||||
|
}
|
||||||
|
if t.Kind() != reflect.Ptr {
|
||||||
|
return fmt.Errorf("%s: fact type %s is not a pointer", a, t)
|
||||||
|
}
|
||||||
|
factTypes[t] = a
|
||||||
|
}
|
||||||
|
|
||||||
|
// recursion
|
||||||
|
for i, req := range a.Requires {
|
||||||
|
if err := visit(req); err != nil {
|
||||||
|
return fmt.Errorf("%s.Requires[%d]: %v", a.Name, i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
color[a] = black
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for _, a := range analyzers {
|
||||||
|
if err := visit(a); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reject duplicates among analyzers.
|
||||||
|
// Precondition: color[a] == black.
|
||||||
|
// Postcondition: color[a] == finished.
|
||||||
|
for _, a := range analyzers {
|
||||||
|
if color[a] == finished {
|
||||||
|
return fmt.Errorf("duplicate analyzer: %s", a.Name)
|
||||||
|
}
|
||||||
|
color[a] = finished
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validIdent(name string) bool {
|
||||||
|
for i, r := range name {
|
||||||
|
if !(r == '_' || unicode.IsLetter(r) || i > 0 && unicode.IsDigit(r)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return name != ""
|
||||||
|
}
|
523
vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
generated
vendored
Normal file
523
vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
generated
vendored
Normal file
|
@ -0,0 +1,523 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package objectpath defines a naming scheme for types.Objects
|
||||||
|
// (that is, named entities in Go programs) relative to their enclosing
|
||||||
|
// package.
|
||||||
|
//
|
||||||
|
// Type-checker objects are canonical, so they are usually identified by
|
||||||
|
// their address in memory (a pointer), but a pointer has meaning only
|
||||||
|
// within one address space. By contrast, objectpath names allow the
|
||||||
|
// identity of an object to be sent from one program to another,
|
||||||
|
// establishing a correspondence between types.Object variables that are
|
||||||
|
// distinct but logically equivalent.
|
||||||
|
//
|
||||||
|
// A single object may have multiple paths. In this example,
|
||||||
|
// type A struct{ X int }
|
||||||
|
// type B A
|
||||||
|
// the field X has two paths due to its membership of both A and B.
|
||||||
|
// The For(obj) function always returns one of these paths, arbitrarily
|
||||||
|
// but consistently.
|
||||||
|
package objectpath
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"go/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A Path is an opaque name that identifies a types.Object
|
||||||
|
// relative to its package. Conceptually, the name consists of a
|
||||||
|
// sequence of destructuring operations applied to the package scope
|
||||||
|
// to obtain the original object.
|
||||||
|
// The name does not include the package itself.
|
||||||
|
type Path string
|
||||||
|
|
||||||
|
// Encoding
|
||||||
|
//
|
||||||
|
// An object path is a textual and (with training) human-readable encoding
|
||||||
|
// of a sequence of destructuring operators, starting from a types.Package.
|
||||||
|
// The sequences represent a path through the package/object/type graph.
|
||||||
|
// We classify these operators by their type:
|
||||||
|
//
|
||||||
|
// PO package->object Package.Scope.Lookup
|
||||||
|
// OT object->type Object.Type
|
||||||
|
// TT type->type Type.{Elem,Key,Params,Results,Underlying} [EKPRU]
|
||||||
|
// TO type->object Type.{At,Field,Method,Obj} [AFMO]
|
||||||
|
//
|
||||||
|
// All valid paths start with a package and end at an object
|
||||||
|
// and thus may be defined by the regular language:
|
||||||
|
//
|
||||||
|
// objectpath = PO (OT TT* TO)*
|
||||||
|
//
|
||||||
|
// The concrete encoding follows directly:
|
||||||
|
// - The only PO operator is Package.Scope.Lookup, which requires an identifier.
|
||||||
|
// - The only OT operator is Object.Type,
|
||||||
|
// which we encode as '.' because dot cannot appear in an identifier.
|
||||||
|
// - The TT operators are encoded as [EKPRU].
|
||||||
|
// - The OT operators are encoded as [AFMO];
|
||||||
|
// three of these (At,Field,Method) require an integer operand,
|
||||||
|
// which is encoded as a string of decimal digits.
|
||||||
|
// These indices are stable across different representations
|
||||||
|
// of the same package, even source and export data.
|
||||||
|
//
|
||||||
|
// In the example below,
|
||||||
|
//
|
||||||
|
// package p
|
||||||
|
//
|
||||||
|
// type T interface {
|
||||||
|
// f() (a string, b struct{ X int })
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// field X has the path "T.UM0.RA1.F0",
|
||||||
|
// representing the following sequence of operations:
|
||||||
|
//
|
||||||
|
// p.Lookup("T") T
|
||||||
|
// .Type().Underlying().Method(0). f
|
||||||
|
// .Type().Results().At(1) b
|
||||||
|
// .Type().Field(0) X
|
||||||
|
//
|
||||||
|
// The encoding is not maximally compact---every R or P is
|
||||||
|
// followed by an A, for example---but this simplifies the
|
||||||
|
// encoder and decoder.
|
||||||
|
//
|
||||||
|
const (
|
||||||
|
// object->type operators
|
||||||
|
opType = '.' // .Type() (Object)
|
||||||
|
|
||||||
|
// type->type operators
|
||||||
|
opElem = 'E' // .Elem() (Pointer, Slice, Array, Chan, Map)
|
||||||
|
opKey = 'K' // .Key() (Map)
|
||||||
|
opParams = 'P' // .Params() (Signature)
|
||||||
|
opResults = 'R' // .Results() (Signature)
|
||||||
|
opUnderlying = 'U' // .Underlying() (Named)
|
||||||
|
|
||||||
|
// type->object operators
|
||||||
|
opAt = 'A' // .At(i) (Tuple)
|
||||||
|
opField = 'F' // .Field(i) (Struct)
|
||||||
|
opMethod = 'M' // .Method(i) (Named or Interface; not Struct: "promoted" names are ignored)
|
||||||
|
opObj = 'O' // .Obj() (Named)
|
||||||
|
)
|
||||||
|
|
||||||
|
// The For function returns the path to an object relative to its package,
|
||||||
|
// or an error if the object is not accessible from the package's Scope.
|
||||||
|
//
|
||||||
|
// The For function guarantees to return a path only for the following objects:
|
||||||
|
// - package-level types
|
||||||
|
// - exported package-level non-types
|
||||||
|
// - methods
|
||||||
|
// - parameter and result variables
|
||||||
|
// - struct fields
|
||||||
|
// These objects are sufficient to define the API of their package.
|
||||||
|
// The objects described by a package's export data are drawn from this set.
|
||||||
|
//
|
||||||
|
// For does not return a path for predeclared names, imported package
|
||||||
|
// names, local names, and unexported package-level names (except
|
||||||
|
// types).
|
||||||
|
//
|
||||||
|
// Example: given this definition,
|
||||||
|
//
|
||||||
|
// package p
|
||||||
|
//
|
||||||
|
// type T interface {
|
||||||
|
// f() (a string, b struct{ X int })
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// For(X) would return a path that denotes the following sequence of operations:
|
||||||
|
//
|
||||||
|
// p.Scope().Lookup("T") (TypeName T)
|
||||||
|
// .Type().Underlying().Method(0). (method Func f)
|
||||||
|
// .Type().Results().At(1) (field Var b)
|
||||||
|
// .Type().Field(0) (field Var X)
|
||||||
|
//
|
||||||
|
// where p is the package (*types.Package) to which X belongs.
|
||||||
|
func For(obj types.Object) (Path, error) {
|
||||||
|
pkg := obj.Pkg()
|
||||||
|
|
||||||
|
// This table lists the cases of interest.
|
||||||
|
//
|
||||||
|
// Object Action
|
||||||
|
// ------ ------
|
||||||
|
// nil reject
|
||||||
|
// builtin reject
|
||||||
|
// pkgname reject
|
||||||
|
// label reject
|
||||||
|
// var
|
||||||
|
// package-level accept
|
||||||
|
// func param/result accept
|
||||||
|
// local reject
|
||||||
|
// struct field accept
|
||||||
|
// const
|
||||||
|
// package-level accept
|
||||||
|
// local reject
|
||||||
|
// func
|
||||||
|
// package-level accept
|
||||||
|
// init functions reject
|
||||||
|
// concrete method accept
|
||||||
|
// interface method accept
|
||||||
|
// type
|
||||||
|
// package-level accept
|
||||||
|
// local reject
|
||||||
|
//
|
||||||
|
// The only accessible package-level objects are members of pkg itself.
|
||||||
|
//
|
||||||
|
// The cases are handled in four steps:
|
||||||
|
//
|
||||||
|
// 1. reject nil and builtin
|
||||||
|
// 2. accept package-level objects
|
||||||
|
// 3. reject obviously invalid objects
|
||||||
|
// 4. search the API for the path to the param/result/field/method.
|
||||||
|
|
||||||
|
// 1. reference to nil or builtin?
|
||||||
|
if pkg == nil {
|
||||||
|
return "", fmt.Errorf("predeclared %s has no path", obj)
|
||||||
|
}
|
||||||
|
scope := pkg.Scope()
|
||||||
|
|
||||||
|
// 2. package-level object?
|
||||||
|
if scope.Lookup(obj.Name()) == obj {
|
||||||
|
// Only exported objects (and non-exported types) have a path.
|
||||||
|
// Non-exported types may be referenced by other objects.
|
||||||
|
if _, ok := obj.(*types.TypeName); !ok && !obj.Exported() {
|
||||||
|
return "", fmt.Errorf("no path for non-exported %v", obj)
|
||||||
|
}
|
||||||
|
return Path(obj.Name()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Not a package-level object.
|
||||||
|
// Reject obviously non-viable cases.
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.Const, // Only package-level constants have a path.
|
||||||
|
*types.TypeName, // Only package-level types have a path.
|
||||||
|
*types.Label, // Labels are function-local.
|
||||||
|
*types.PkgName: // PkgNames are file-local.
|
||||||
|
return "", fmt.Errorf("no path for %v", obj)
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
// Could be:
|
||||||
|
// - a field (obj.IsField())
|
||||||
|
// - a func parameter or result
|
||||||
|
// - a local var.
|
||||||
|
// Sadly there is no way to distinguish
|
||||||
|
// a param/result from a local
|
||||||
|
// so we must proceed to the find.
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
// A func, if not package-level, must be a method.
|
||||||
|
if recv := obj.Type().(*types.Signature).Recv(); recv == nil {
|
||||||
|
return "", fmt.Errorf("func is not a method: %v", obj)
|
||||||
|
}
|
||||||
|
// TODO(adonovan): opt: if the method is concrete,
|
||||||
|
// do a specialized version of the rest of this function so
|
||||||
|
// that it's O(1) not O(|scope|). Basically 'find' is needed
|
||||||
|
// only for struct fields and interface methods.
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Search the API for the path to the var (field/param/result) or method.
|
||||||
|
|
||||||
|
// First inspect package-level named types.
|
||||||
|
// In the presence of path aliases, these give
|
||||||
|
// the best paths because non-types may
|
||||||
|
// refer to types, but not the reverse.
|
||||||
|
empty := make([]byte, 0, 48) // initial space
|
||||||
|
for _, name := range scope.Names() {
|
||||||
|
o := scope.Lookup(name)
|
||||||
|
tname, ok := o.(*types.TypeName)
|
||||||
|
if !ok {
|
||||||
|
continue // handle non-types in second pass
|
||||||
|
}
|
||||||
|
|
||||||
|
path := append(empty, name...)
|
||||||
|
path = append(path, opType)
|
||||||
|
|
||||||
|
T := o.Type()
|
||||||
|
|
||||||
|
if tname.IsAlias() {
|
||||||
|
// type alias
|
||||||
|
if r := find(obj, T, path); r != nil {
|
||||||
|
return Path(r), nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// defined (named) type
|
||||||
|
if r := find(obj, T.Underlying(), append(path, opUnderlying)); r != nil {
|
||||||
|
return Path(r), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then inspect everything else:
|
||||||
|
// non-types, and declared methods of defined types.
|
||||||
|
for _, name := range scope.Names() {
|
||||||
|
o := scope.Lookup(name)
|
||||||
|
path := append(empty, name...)
|
||||||
|
if _, ok := o.(*types.TypeName); !ok {
|
||||||
|
if o.Exported() {
|
||||||
|
// exported non-type (const, var, func)
|
||||||
|
if r := find(obj, o.Type(), append(path, opType)); r != nil {
|
||||||
|
return Path(r), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inspect declared methods of defined types.
|
||||||
|
if T, ok := o.Type().(*types.Named); ok {
|
||||||
|
path = append(path, opType)
|
||||||
|
for i := 0; i < T.NumMethods(); i++ {
|
||||||
|
m := T.Method(i)
|
||||||
|
path2 := appendOpArg(path, opMethod, i)
|
||||||
|
if m == obj {
|
||||||
|
return Path(path2), nil // found declared method
|
||||||
|
}
|
||||||
|
if r := find(obj, m.Type(), append(path2, opType)); r != nil {
|
||||||
|
return Path(r), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", fmt.Errorf("can't find path for %v in %s", obj, pkg.Path())
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendOpArg(path []byte, op byte, arg int) []byte {
|
||||||
|
path = append(path, op)
|
||||||
|
path = strconv.AppendInt(path, int64(arg), 10)
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
|
||||||
|
// find finds obj within type T, returning the path to it, or nil if not found.
|
||||||
|
func find(obj types.Object, T types.Type, path []byte) []byte {
|
||||||
|
switch T := T.(type) {
|
||||||
|
case *types.Basic, *types.Named:
|
||||||
|
// Named types belonging to pkg were handled already,
|
||||||
|
// so T must belong to another package. No path.
|
||||||
|
return nil
|
||||||
|
case *types.Pointer:
|
||||||
|
return find(obj, T.Elem(), append(path, opElem))
|
||||||
|
case *types.Slice:
|
||||||
|
return find(obj, T.Elem(), append(path, opElem))
|
||||||
|
case *types.Array:
|
||||||
|
return find(obj, T.Elem(), append(path, opElem))
|
||||||
|
case *types.Chan:
|
||||||
|
return find(obj, T.Elem(), append(path, opElem))
|
||||||
|
case *types.Map:
|
||||||
|
if r := find(obj, T.Key(), append(path, opKey)); r != nil {
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
return find(obj, T.Elem(), append(path, opElem))
|
||||||
|
case *types.Signature:
|
||||||
|
if r := find(obj, T.Params(), append(path, opParams)); r != nil {
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
return find(obj, T.Results(), append(path, opResults))
|
||||||
|
case *types.Struct:
|
||||||
|
for i := 0; i < T.NumFields(); i++ {
|
||||||
|
f := T.Field(i)
|
||||||
|
path2 := appendOpArg(path, opField, i)
|
||||||
|
if f == obj {
|
||||||
|
return path2 // found field var
|
||||||
|
}
|
||||||
|
if r := find(obj, f.Type(), append(path2, opType)); r != nil {
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case *types.Tuple:
|
||||||
|
for i := 0; i < T.Len(); i++ {
|
||||||
|
v := T.At(i)
|
||||||
|
path2 := appendOpArg(path, opAt, i)
|
||||||
|
if v == obj {
|
||||||
|
return path2 // found param/result var
|
||||||
|
}
|
||||||
|
if r := find(obj, v.Type(), append(path2, opType)); r != nil {
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case *types.Interface:
|
||||||
|
for i := 0; i < T.NumMethods(); i++ {
|
||||||
|
m := T.Method(i)
|
||||||
|
path2 := appendOpArg(path, opMethod, i)
|
||||||
|
if m == obj {
|
||||||
|
return path2 // found interface method
|
||||||
|
}
|
||||||
|
if r := find(obj, m.Type(), append(path2, opType)); r != nil {
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
panic(T)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Object returns the object denoted by path p within the package pkg.
|
||||||
|
func Object(pkg *types.Package, p Path) (types.Object, error) {
|
||||||
|
if p == "" {
|
||||||
|
return nil, fmt.Errorf("empty path")
|
||||||
|
}
|
||||||
|
|
||||||
|
pathstr := string(p)
|
||||||
|
var pkgobj, suffix string
|
||||||
|
if dot := strings.IndexByte(pathstr, opType); dot < 0 {
|
||||||
|
pkgobj = pathstr
|
||||||
|
} else {
|
||||||
|
pkgobj = pathstr[:dot]
|
||||||
|
suffix = pathstr[dot:] // suffix starts with "."
|
||||||
|
}
|
||||||
|
|
||||||
|
obj := pkg.Scope().Lookup(pkgobj)
|
||||||
|
if obj == nil {
|
||||||
|
return nil, fmt.Errorf("package %s does not contain %q", pkg.Path(), pkgobj)
|
||||||
|
}
|
||||||
|
|
||||||
|
// abstraction of *types.{Pointer,Slice,Array,Chan,Map}
|
||||||
|
type hasElem interface {
|
||||||
|
Elem() types.Type
|
||||||
|
}
|
||||||
|
// abstraction of *types.{Interface,Named}
|
||||||
|
type hasMethods interface {
|
||||||
|
Method(int) *types.Func
|
||||||
|
NumMethods() int
|
||||||
|
}
|
||||||
|
|
||||||
|
// The loop state is the pair (t, obj),
|
||||||
|
// exactly one of which is non-nil, initially obj.
|
||||||
|
// All suffixes start with '.' (the only object->type operation),
|
||||||
|
// followed by optional type->type operations,
|
||||||
|
// then a type->object operation.
|
||||||
|
// The cycle then repeats.
|
||||||
|
var t types.Type
|
||||||
|
for suffix != "" {
|
||||||
|
code := suffix[0]
|
||||||
|
suffix = suffix[1:]
|
||||||
|
|
||||||
|
// Codes [AFM] have an integer operand.
|
||||||
|
var index int
|
||||||
|
switch code {
|
||||||
|
case opAt, opField, opMethod:
|
||||||
|
rest := strings.TrimLeft(suffix, "0123456789")
|
||||||
|
numerals := suffix[:len(suffix)-len(rest)]
|
||||||
|
suffix = rest
|
||||||
|
i, err := strconv.Atoi(numerals)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid path: bad numeric operand %q for code %q", numerals, code)
|
||||||
|
}
|
||||||
|
index = int(i)
|
||||||
|
case opObj:
|
||||||
|
// no operand
|
||||||
|
default:
|
||||||
|
// The suffix must end with a type->object operation.
|
||||||
|
if suffix == "" {
|
||||||
|
return nil, fmt.Errorf("invalid path: ends with %q, want [AFMO]", code)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if code == opType {
|
||||||
|
if t != nil {
|
||||||
|
return nil, fmt.Errorf("invalid path: unexpected %q in type context", opType)
|
||||||
|
}
|
||||||
|
t = obj.Type()
|
||||||
|
obj = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if t == nil {
|
||||||
|
return nil, fmt.Errorf("invalid path: code %q in object context", code)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inv: t != nil, obj == nil
|
||||||
|
|
||||||
|
switch code {
|
||||||
|
case opElem:
|
||||||
|
hasElem, ok := t.(hasElem) // Pointer, Slice, Array, Chan, Map
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %T, want pointer, slice, array, chan or map)", code, t, t)
|
||||||
|
}
|
||||||
|
t = hasElem.Elem()
|
||||||
|
|
||||||
|
case opKey:
|
||||||
|
mapType, ok := t.(*types.Map)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %T, want map)", code, t, t)
|
||||||
|
}
|
||||||
|
t = mapType.Key()
|
||||||
|
|
||||||
|
case opParams:
|
||||||
|
sig, ok := t.(*types.Signature)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t)
|
||||||
|
}
|
||||||
|
t = sig.Params()
|
||||||
|
|
||||||
|
case opResults:
|
||||||
|
sig, ok := t.(*types.Signature)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t)
|
||||||
|
}
|
||||||
|
t = sig.Results()
|
||||||
|
|
||||||
|
case opUnderlying:
|
||||||
|
named, ok := t.(*types.Named)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t)
|
||||||
|
}
|
||||||
|
t = named.Underlying()
|
||||||
|
|
||||||
|
case opAt:
|
||||||
|
tuple, ok := t.(*types.Tuple)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %s, want tuple)", code, t, t)
|
||||||
|
}
|
||||||
|
if n := tuple.Len(); index >= n {
|
||||||
|
return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n)
|
||||||
|
}
|
||||||
|
obj = tuple.At(index)
|
||||||
|
t = nil
|
||||||
|
|
||||||
|
case opField:
|
||||||
|
structType, ok := t.(*types.Struct)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %T, want struct)", code, t, t)
|
||||||
|
}
|
||||||
|
if n := structType.NumFields(); index >= n {
|
||||||
|
return nil, fmt.Errorf("field index %d out of range [0-%d)", index, n)
|
||||||
|
}
|
||||||
|
obj = structType.Field(index)
|
||||||
|
t = nil
|
||||||
|
|
||||||
|
case opMethod:
|
||||||
|
hasMethods, ok := t.(hasMethods) // Interface or Named
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %s, want interface or named)", code, t, t)
|
||||||
|
}
|
||||||
|
if n := hasMethods.NumMethods(); index >= n {
|
||||||
|
return nil, fmt.Errorf("method index %d out of range [0-%d)", index, n)
|
||||||
|
}
|
||||||
|
obj = hasMethods.Method(index)
|
||||||
|
t = nil
|
||||||
|
|
||||||
|
case opObj:
|
||||||
|
named, ok := t.(*types.Named)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t)
|
||||||
|
}
|
||||||
|
obj = named.Obj()
|
||||||
|
t = nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("invalid path: unknown code %q", code)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if obj.Pkg() != pkg {
|
||||||
|
return nil, fmt.Errorf("path denotes %s, which belongs to a different package", obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj, nil // success
|
||||||
|
}
|
118
vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
generated
vendored
Normal file
118
vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
generated
vendored
Normal file
|
@ -0,0 +1,118 @@
|
||||||
|
// Copyright 2020 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package analysisinternal exposes internal-only fields from go/analysis.
|
||||||
|
package analysisinternal
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos {
|
||||||
|
// Get the end position for the type error.
|
||||||
|
offset, end := fset.PositionFor(start, false).Offset, start
|
||||||
|
if offset >= len(src) {
|
||||||
|
return end
|
||||||
|
}
|
||||||
|
if width := bytes.IndexAny(src[offset:], " \n,():;[]+-*"); width > 0 {
|
||||||
|
end = start + token.Pos(width)
|
||||||
|
}
|
||||||
|
return end
|
||||||
|
}
|
||||||
|
|
||||||
|
func ZeroValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
|
under := typ
|
||||||
|
if n, ok := typ.(*types.Named); ok {
|
||||||
|
under = n.Underlying()
|
||||||
|
}
|
||||||
|
switch u := under.(type) {
|
||||||
|
case *types.Basic:
|
||||||
|
switch {
|
||||||
|
case u.Info()&types.IsNumeric != 0:
|
||||||
|
return &ast.BasicLit{Kind: token.INT, Value: "0"}
|
||||||
|
case u.Info()&types.IsBoolean != 0:
|
||||||
|
return &ast.Ident{Name: "false"}
|
||||||
|
case u.Info()&types.IsString != 0:
|
||||||
|
return &ast.BasicLit{Kind: token.STRING, Value: `""`}
|
||||||
|
default:
|
||||||
|
panic("unknown basic type")
|
||||||
|
}
|
||||||
|
case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice:
|
||||||
|
return ast.NewIdent("nil")
|
||||||
|
case *types.Struct:
|
||||||
|
texpr := typeExpr(fset, f, pkg, typ) // typ because we want the name here.
|
||||||
|
if texpr == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &ast.CompositeLit{
|
||||||
|
Type: texpr,
|
||||||
|
}
|
||||||
|
case *types.Array:
|
||||||
|
texpr := typeExpr(fset, f, pkg, u.Elem())
|
||||||
|
if texpr == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &ast.CompositeLit{
|
||||||
|
Type: &ast.ArrayType{
|
||||||
|
Elt: texpr,
|
||||||
|
Len: &ast.BasicLit{Kind: token.INT, Value: fmt.Sprintf("%v", u.Len())},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func typeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
|
switch t := typ.(type) {
|
||||||
|
case *types.Basic:
|
||||||
|
switch t.Kind() {
|
||||||
|
case types.UnsafePointer:
|
||||||
|
return &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")}
|
||||||
|
default:
|
||||||
|
return ast.NewIdent(t.Name())
|
||||||
|
}
|
||||||
|
case *types.Named:
|
||||||
|
if t.Obj().Pkg() == pkg {
|
||||||
|
return ast.NewIdent(t.Obj().Name())
|
||||||
|
}
|
||||||
|
pkgName := t.Obj().Pkg().Name()
|
||||||
|
// If the file already imports the package under another name, use that.
|
||||||
|
for _, group := range astutil.Imports(fset, f) {
|
||||||
|
for _, cand := range group {
|
||||||
|
if strings.Trim(cand.Path.Value, `"`) == t.Obj().Pkg().Path() {
|
||||||
|
if cand.Name != nil && cand.Name.Name != "" {
|
||||||
|
pkgName = cand.Name.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pkgName == "." {
|
||||||
|
return ast.NewIdent(t.Obj().Name())
|
||||||
|
}
|
||||||
|
return &ast.SelectorExpr{
|
||||||
|
X: ast.NewIdent(pkgName),
|
||||||
|
Sel: ast.NewIdent(t.Obj().Name()),
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil // TODO: anonymous structs, but who does that
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var GetTypeErrors = func(p interface{}) []types.Error { return nil }
|
||||||
|
var SetTypeErrors = func(p interface{}, errors []types.Error) {}
|
||||||
|
|
||||||
|
type TypeErrorPass string
|
||||||
|
|
||||||
|
const (
|
||||||
|
NoNewVars TypeErrorPass = "nonewvars"
|
||||||
|
NoResultValues TypeErrorPass = "noresultvalues"
|
||||||
|
UndeclaredName TypeErrorPass = "undeclaredname"
|
||||||
|
)
|
226
vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY
vendored
Normal file
226
vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY
vendored
Normal file
|
@ -0,0 +1,226 @@
|
||||||
|
Staticcheck and its related tools make use of third party projects,
|
||||||
|
either by reusing their code, or by statically linking them into
|
||||||
|
resulting binaries. These projects are:
|
||||||
|
|
||||||
|
* The Go Programming Language - https://golang.org/
|
||||||
|
|
||||||
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
|
* github.com/BurntSushi/toml - https://github.com/BurntSushi/toml
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013 TOML authors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
* github.com/google/renameio - https://github.com/google/renameio
|
||||||
|
|
||||||
|
Copyright 2018 Google Inc.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
* github.com/kisielk/gotool – https://github.com/kisielk/gotool
|
||||||
|
|
||||||
|
Copyright (c) 2013 Kamil Kisiel <kamil@kamilkisiel.net>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
All the files in this distribution are covered under either the MIT
|
||||||
|
license (see the file LICENSE) except some files mentioned below.
|
||||||
|
|
||||||
|
match.go, match_test.go:
|
||||||
|
|
||||||
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
|
* github.com/rogpeppe/go-internal - https://github.com/rogpeppe/go-internal
|
||||||
|
|
||||||
|
Copyright (c) 2018 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
|
* golang.org/x/mod/module - https://github.com/golang/mod
|
||||||
|
|
||||||
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
|
* golang.org/x/tools/go/analysis - https://github.com/golang/tools
|
||||||
|
|
||||||
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
129
vendor/honnef.co/go/tools/callgraph/callgraph.go
vendored
129
vendor/honnef.co/go/tools/callgraph/callgraph.go
vendored
|
@ -1,129 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
Package callgraph defines the call graph and various algorithms
|
|
||||||
and utilities to operate on it.
|
|
||||||
|
|
||||||
A call graph is a labelled directed graph whose nodes represent
|
|
||||||
functions and whose edge labels represent syntactic function call
|
|
||||||
sites. The presence of a labelled edge (caller, site, callee)
|
|
||||||
indicates that caller may call callee at the specified call site.
|
|
||||||
|
|
||||||
A call graph is a multigraph: it may contain multiple edges (caller,
|
|
||||||
*, callee) connecting the same pair of nodes, so long as the edges
|
|
||||||
differ by label; this occurs when one function calls another function
|
|
||||||
from multiple call sites. Also, it may contain multiple edges
|
|
||||||
(caller, site, *) that differ only by callee; this indicates a
|
|
||||||
polymorphic call.
|
|
||||||
|
|
||||||
A SOUND call graph is one that overapproximates the dynamic calling
|
|
||||||
behaviors of the program in all possible executions. One call graph
|
|
||||||
is more PRECISE than another if it is a smaller overapproximation of
|
|
||||||
the dynamic behavior.
|
|
||||||
|
|
||||||
All call graphs have a synthetic root node which is responsible for
|
|
||||||
calling main() and init().
|
|
||||||
|
|
||||||
Calls to built-in functions (e.g. panic, println) are not represented
|
|
||||||
in the call graph; they are treated like built-in operators of the
|
|
||||||
language.
|
|
||||||
|
|
||||||
*/
|
|
||||||
package callgraph // import "honnef.co/go/tools/callgraph"
|
|
||||||
|
|
||||||
// TODO(adonovan): add a function to eliminate wrappers from the
|
|
||||||
// callgraph, preserving topology.
|
|
||||||
// More generally, we could eliminate "uninteresting" nodes such as
|
|
||||||
// nodes from packages we don't care about.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"go/token"
|
|
||||||
|
|
||||||
"honnef.co/go/tools/ssa"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A Graph represents a call graph.
|
|
||||||
//
|
|
||||||
// A graph may contain nodes that are not reachable from the root.
|
|
||||||
// If the call graph is sound, such nodes indicate unreachable
|
|
||||||
// functions.
|
|
||||||
//
|
|
||||||
type Graph struct {
|
|
||||||
Root *Node // the distinguished root node
|
|
||||||
Nodes map[*ssa.Function]*Node // all nodes by function
|
|
||||||
}
|
|
||||||
|
|
||||||
// New returns a new Graph with the specified root node.
|
|
||||||
func New(root *ssa.Function) *Graph {
|
|
||||||
g := &Graph{Nodes: make(map[*ssa.Function]*Node)}
|
|
||||||
g.Root = g.CreateNode(root)
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateNode returns the Node for fn, creating it if not present.
|
|
||||||
func (g *Graph) CreateNode(fn *ssa.Function) *Node {
|
|
||||||
n, ok := g.Nodes[fn]
|
|
||||||
if !ok {
|
|
||||||
n = &Node{Func: fn, ID: len(g.Nodes)}
|
|
||||||
g.Nodes[fn] = n
|
|
||||||
}
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Node represents a node in a call graph.
|
|
||||||
type Node struct {
|
|
||||||
Func *ssa.Function // the function this node represents
|
|
||||||
ID int // 0-based sequence number
|
|
||||||
In []*Edge // unordered set of incoming call edges (n.In[*].Callee == n)
|
|
||||||
Out []*Edge // unordered set of outgoing call edges (n.Out[*].Caller == n)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *Node) String() string {
|
|
||||||
return fmt.Sprintf("n%d:%s", n.ID, n.Func)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Edge represents an edge in the call graph.
|
|
||||||
//
|
|
||||||
// Site is nil for edges originating in synthetic or intrinsic
|
|
||||||
// functions, e.g. reflect.Call or the root of the call graph.
|
|
||||||
type Edge struct {
|
|
||||||
Caller *Node
|
|
||||||
Site ssa.CallInstruction
|
|
||||||
Callee *Node
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e Edge) String() string {
|
|
||||||
return fmt.Sprintf("%s --> %s", e.Caller, e.Callee)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e Edge) Description() string {
|
|
||||||
var prefix string
|
|
||||||
switch e.Site.(type) {
|
|
||||||
case nil:
|
|
||||||
return "synthetic call"
|
|
||||||
case *ssa.Go:
|
|
||||||
prefix = "concurrent "
|
|
||||||
case *ssa.Defer:
|
|
||||||
prefix = "deferred "
|
|
||||||
}
|
|
||||||
return prefix + e.Site.Common().Description()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e Edge) Pos() token.Pos {
|
|
||||||
if e.Site == nil {
|
|
||||||
return token.NoPos
|
|
||||||
}
|
|
||||||
return e.Site.Pos()
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddEdge adds the edge (caller, site, callee) to the call graph.
|
|
||||||
// Elimination of duplicate edges is the caller's responsibility.
|
|
||||||
func AddEdge(caller *Node, site ssa.CallInstruction, callee *Node) {
|
|
||||||
e := &Edge{caller, site, callee}
|
|
||||||
callee.In = append(callee.In, e)
|
|
||||||
caller.Out = append(caller.Out, e)
|
|
||||||
}
|
|
|
@ -1,35 +0,0 @@
|
||||||
// Package static computes the call graph of a Go program containing
|
|
||||||
// only static call edges.
|
|
||||||
package static // import "honnef.co/go/tools/callgraph/static"
|
|
||||||
|
|
||||||
import (
|
|
||||||
"honnef.co/go/tools/callgraph"
|
|
||||||
"honnef.co/go/tools/ssa"
|
|
||||||
"honnef.co/go/tools/ssa/ssautil"
|
|
||||||
)
|
|
||||||
|
|
||||||
// CallGraph computes the call graph of the specified program
|
|
||||||
// considering only static calls.
|
|
||||||
//
|
|
||||||
func CallGraph(prog *ssa.Program) *callgraph.Graph {
|
|
||||||
cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph
|
|
||||||
|
|
||||||
// TODO(adonovan): opt: use only a single pass over the ssa.Program.
|
|
||||||
// TODO(adonovan): opt: this is slower than RTA (perhaps because
|
|
||||||
// the lower precision means so many edges are allocated)!
|
|
||||||
for f := range ssautil.AllFunctions(prog) {
|
|
||||||
fnode := cg.CreateNode(f)
|
|
||||||
for _, b := range f.Blocks {
|
|
||||||
for _, instr := range b.Instrs {
|
|
||||||
if site, ok := instr.(ssa.CallInstruction); ok {
|
|
||||||
if g := site.Common().StaticCallee(); g != nil {
|
|
||||||
gnode := cg.CreateNode(g)
|
|
||||||
callgraph.AddEdge(fnode, site, gnode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return cg
|
|
||||||
}
|
|
181
vendor/honnef.co/go/tools/callgraph/util.go
vendored
181
vendor/honnef.co/go/tools/callgraph/util.go
vendored
|
@ -1,181 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package callgraph
|
|
||||||
|
|
||||||
import "honnef.co/go/tools/ssa"
|
|
||||||
|
|
||||||
// This file provides various utilities over call graphs, such as
|
|
||||||
// visitation and path search.
|
|
||||||
|
|
||||||
// CalleesOf returns a new set containing all direct callees of the
|
|
||||||
// caller node.
|
|
||||||
//
|
|
||||||
func CalleesOf(caller *Node) map[*Node]bool {
|
|
||||||
callees := make(map[*Node]bool)
|
|
||||||
for _, e := range caller.Out {
|
|
||||||
callees[e.Callee] = true
|
|
||||||
}
|
|
||||||
return callees
|
|
||||||
}
|
|
||||||
|
|
||||||
// GraphVisitEdges visits all the edges in graph g in depth-first order.
|
|
||||||
// The edge function is called for each edge in postorder. If it
|
|
||||||
// returns non-nil, visitation stops and GraphVisitEdges returns that
|
|
||||||
// value.
|
|
||||||
//
|
|
||||||
func GraphVisitEdges(g *Graph, edge func(*Edge) error) error {
|
|
||||||
seen := make(map[*Node]bool)
|
|
||||||
var visit func(n *Node) error
|
|
||||||
visit = func(n *Node) error {
|
|
||||||
if !seen[n] {
|
|
||||||
seen[n] = true
|
|
||||||
for _, e := range n.Out {
|
|
||||||
if err := visit(e.Callee); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := edge(e); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
for _, n := range g.Nodes {
|
|
||||||
if err := visit(n); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// PathSearch finds an arbitrary path starting at node start and
|
|
||||||
// ending at some node for which isEnd() returns true. On success,
|
|
||||||
// PathSearch returns the path as an ordered list of edges; on
|
|
||||||
// failure, it returns nil.
|
|
||||||
//
|
|
||||||
func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge {
|
|
||||||
stack := make([]*Edge, 0, 32)
|
|
||||||
seen := make(map[*Node]bool)
|
|
||||||
var search func(n *Node) []*Edge
|
|
||||||
search = func(n *Node) []*Edge {
|
|
||||||
if !seen[n] {
|
|
||||||
seen[n] = true
|
|
||||||
if isEnd(n) {
|
|
||||||
return stack
|
|
||||||
}
|
|
||||||
for _, e := range n.Out {
|
|
||||||
stack = append(stack, e) // push
|
|
||||||
if found := search(e.Callee); found != nil {
|
|
||||||
return found
|
|
||||||
}
|
|
||||||
stack = stack[:len(stack)-1] // pop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return search(start)
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteSyntheticNodes removes from call graph g all nodes for
|
|
||||||
// synthetic functions (except g.Root and package initializers),
|
|
||||||
// preserving the topology. In effect, calls to synthetic wrappers
|
|
||||||
// are "inlined".
|
|
||||||
//
|
|
||||||
func (g *Graph) DeleteSyntheticNodes() {
|
|
||||||
// Measurements on the standard library and go.tools show that
|
|
||||||
// resulting graph has ~15% fewer nodes and 4-8% fewer edges
|
|
||||||
// than the input.
|
|
||||||
//
|
|
||||||
// Inlining a wrapper of in-degree m, out-degree n adds m*n
|
|
||||||
// and removes m+n edges. Since most wrappers are monomorphic
|
|
||||||
// (n=1) this results in a slight reduction. Polymorphic
|
|
||||||
// wrappers (n>1), e.g. from embedding an interface value
|
|
||||||
// inside a struct to satisfy some interface, cause an
|
|
||||||
// increase in the graph, but they seem to be uncommon.
|
|
||||||
|
|
||||||
// Hash all existing edges to avoid creating duplicates.
|
|
||||||
edges := make(map[Edge]bool)
|
|
||||||
for _, cgn := range g.Nodes {
|
|
||||||
for _, e := range cgn.Out {
|
|
||||||
edges[*e] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for fn, cgn := range g.Nodes {
|
|
||||||
if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) {
|
|
||||||
continue // keep
|
|
||||||
}
|
|
||||||
for _, eIn := range cgn.In {
|
|
||||||
for _, eOut := range cgn.Out {
|
|
||||||
newEdge := Edge{eIn.Caller, eIn.Site, eOut.Callee}
|
|
||||||
if edges[newEdge] {
|
|
||||||
continue // don't add duplicate
|
|
||||||
}
|
|
||||||
AddEdge(eIn.Caller, eIn.Site, eOut.Callee)
|
|
||||||
edges[newEdge] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
g.DeleteNode(cgn)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func isInit(fn *ssa.Function) bool {
|
|
||||||
return fn.Pkg != nil && fn.Pkg.Func("init") == fn
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteNode removes node n and its edges from the graph g.
|
|
||||||
// (NB: not efficient for batch deletion.)
|
|
||||||
func (g *Graph) DeleteNode(n *Node) {
|
|
||||||
n.deleteIns()
|
|
||||||
n.deleteOuts()
|
|
||||||
delete(g.Nodes, n.Func)
|
|
||||||
}
|
|
||||||
|
|
||||||
// deleteIns deletes all incoming edges to n.
|
|
||||||
func (n *Node) deleteIns() {
|
|
||||||
for _, e := range n.In {
|
|
||||||
removeOutEdge(e)
|
|
||||||
}
|
|
||||||
n.In = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// deleteOuts deletes all outgoing edges from n.
|
|
||||||
func (n *Node) deleteOuts() {
|
|
||||||
for _, e := range n.Out {
|
|
||||||
removeInEdge(e)
|
|
||||||
}
|
|
||||||
n.Out = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// removeOutEdge removes edge.Caller's outgoing edge 'edge'.
|
|
||||||
func removeOutEdge(edge *Edge) {
|
|
||||||
caller := edge.Caller
|
|
||||||
n := len(caller.Out)
|
|
||||||
for i, e := range caller.Out {
|
|
||||||
if e == edge {
|
|
||||||
// Replace it with the final element and shrink the slice.
|
|
||||||
caller.Out[i] = caller.Out[n-1]
|
|
||||||
caller.Out[n-1] = nil // aid GC
|
|
||||||
caller.Out = caller.Out[:n-1]
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
panic("edge not found: " + edge.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// removeInEdge removes edge.Callee's incoming edge 'edge'.
|
|
||||||
func removeInEdge(edge *Edge) {
|
|
||||||
caller := edge.Callee
|
|
||||||
n := len(caller.In)
|
|
||||||
for i, e := range caller.In {
|
|
||||||
if e == edge {
|
|
||||||
// Replace it with the final element and shrink the slice.
|
|
||||||
caller.In[i] = caller.In[n-1]
|
|
||||||
caller.In[n-1] = nil // aid GC
|
|
||||||
caller.In = caller.In[:n-1]
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
panic("edge not found: " + edge.String())
|
|
||||||
}
|
|
|
@ -2,8 +2,10 @@
|
||||||
package main // import "honnef.co/go/tools/cmd/staticcheck"
|
package main // import "honnef.co/go/tools/cmd/staticcheck"
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
"honnef.co/go/tools/lint"
|
"honnef.co/go/tools/lint"
|
||||||
"honnef.co/go/tools/lint/lintutil"
|
"honnef.co/go/tools/lint/lintutil"
|
||||||
"honnef.co/go/tools/simple"
|
"honnef.co/go/tools/simple"
|
||||||
|
@ -14,14 +16,29 @@ import (
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
fs := lintutil.FlagSet("staticcheck")
|
fs := lintutil.FlagSet("staticcheck")
|
||||||
|
wholeProgram := fs.Bool("unused.whole-program", false, "Run unused in whole program mode")
|
||||||
|
debug := fs.String("debug.unused-graph", "", "Write unused's object graph to `file`")
|
||||||
fs.Parse(os.Args[1:])
|
fs.Parse(os.Args[1:])
|
||||||
|
|
||||||
checkers := []lint.Checker{
|
var cs []*analysis.Analyzer
|
||||||
simple.NewChecker(),
|
for _, v := range simple.Analyzers {
|
||||||
staticcheck.NewChecker(),
|
cs = append(cs, v)
|
||||||
stylecheck.NewChecker(),
|
}
|
||||||
&unused.Checker{},
|
for _, v := range staticcheck.Analyzers {
|
||||||
|
cs = append(cs, v)
|
||||||
|
}
|
||||||
|
for _, v := range stylecheck.Analyzers {
|
||||||
|
cs = append(cs, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
lintutil.ProcessFlagSet(checkers, fs)
|
u := unused.NewChecker(*wholeProgram)
|
||||||
|
if *debug != "" {
|
||||||
|
f, err := os.OpenFile(*debug, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
u.Debug = f
|
||||||
|
}
|
||||||
|
cums := []lint.CumulativeChecker{u}
|
||||||
|
lintutil.ProcessFlagSet(cs, cums, fs)
|
||||||
}
|
}
|
||||||
|
|
66
vendor/honnef.co/go/tools/config/config.go
vendored
66
vendor/honnef.co/go/tools/config/config.go
vendored
|
@ -1,12 +1,63 @@
|
||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/BurntSushi/toml"
|
"github.com/BurntSushi/toml"
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var Analyzer = &analysis.Analyzer{
|
||||||
|
Name: "config",
|
||||||
|
Doc: "loads configuration for the current package tree",
|
||||||
|
Run: func(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
if len(pass.Files) == 0 {
|
||||||
|
cfg := DefaultConfig
|
||||||
|
return &cfg, nil
|
||||||
|
}
|
||||||
|
cache, err := os.UserCacheDir()
|
||||||
|
if err != nil {
|
||||||
|
cache = ""
|
||||||
|
}
|
||||||
|
var path string
|
||||||
|
for _, f := range pass.Files {
|
||||||
|
p := pass.Fset.PositionFor(f.Pos(), true).Filename
|
||||||
|
// FIXME(dh): using strings.HasPrefix isn't technically
|
||||||
|
// correct, but it should be good enough for now.
|
||||||
|
if cache != "" && strings.HasPrefix(p, cache) {
|
||||||
|
// File in the build cache of the standard Go build system
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
path = p
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if path == "" {
|
||||||
|
// The package only consists of generated files.
|
||||||
|
cfg := DefaultConfig
|
||||||
|
return &cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := filepath.Dir(path)
|
||||||
|
cfg, err := Load(dir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error loading staticcheck.conf: %s", err)
|
||||||
|
}
|
||||||
|
return &cfg, nil
|
||||||
|
},
|
||||||
|
RunDespiteErrors: true,
|
||||||
|
ResultType: reflect.TypeOf((*Config)(nil)),
|
||||||
|
}
|
||||||
|
|
||||||
|
func For(pass *analysis.Pass) *Config {
|
||||||
|
return pass.ResultOf[Analyzer].(*Config)
|
||||||
|
}
|
||||||
|
|
||||||
func mergeLists(a, b []string) []string {
|
func mergeLists(a, b []string) []string {
|
||||||
out := make([]string, 0, len(a)+len(b))
|
out := make([]string, 0, len(a)+len(b))
|
||||||
for _, el := range b {
|
for _, el := range b {
|
||||||
|
@ -73,7 +124,18 @@ type Config struct {
|
||||||
HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"`
|
HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultConfig = Config{
|
func (c Config) String() string {
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
|
||||||
|
fmt.Fprintf(buf, "Checks: %#v\n", c.Checks)
|
||||||
|
fmt.Fprintf(buf, "Initialisms: %#v\n", c.Initialisms)
|
||||||
|
fmt.Fprintf(buf, "DotImportWhitelist: %#v\n", c.DotImportWhitelist)
|
||||||
|
fmt.Fprintf(buf, "HTTPStatusCodeWhitelist: %#v", c.HTTPStatusCodeWhitelist)
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
var DefaultConfig = Config{
|
||||||
Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"},
|
Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"},
|
||||||
Initialisms: []string{
|
Initialisms: []string{
|
||||||
"ACL", "API", "ASCII", "CPU", "CSS", "DNS",
|
"ACL", "API", "ASCII", "CPU", "CSS", "DNS",
|
||||||
|
@ -120,7 +182,7 @@ func parseConfigs(dir string) ([]Config, error) {
|
||||||
}
|
}
|
||||||
dir = ndir
|
dir = ndir
|
||||||
}
|
}
|
||||||
out = append(out, defaultConfig)
|
out = append(out, DefaultConfig)
|
||||||
if len(out) < 2 {
|
if len(out) < 2 {
|
||||||
return out, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
|
144
vendor/honnef.co/go/tools/facts/deprecated.go
vendored
Normal file
144
vendor/honnef.co/go/tools/facts/deprecated.go
vendored
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
package facts
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
)
|
||||||
|
|
||||||
|
type IsDeprecated struct{ Msg string }
|
||||||
|
|
||||||
|
func (*IsDeprecated) AFact() {}
|
||||||
|
func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg }
|
||||||
|
|
||||||
|
type DeprecatedResult struct {
|
||||||
|
Objects map[types.Object]*IsDeprecated
|
||||||
|
Packages map[*types.Package]*IsDeprecated
|
||||||
|
}
|
||||||
|
|
||||||
|
var Deprecated = &analysis.Analyzer{
|
||||||
|
Name: "fact_deprecated",
|
||||||
|
Doc: "Mark deprecated objects",
|
||||||
|
Run: deprecated,
|
||||||
|
FactTypes: []analysis.Fact{(*IsDeprecated)(nil)},
|
||||||
|
ResultType: reflect.TypeOf(DeprecatedResult{}),
|
||||||
|
}
|
||||||
|
|
||||||
|
func deprecated(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
var names []*ast.Ident
|
||||||
|
|
||||||
|
extractDeprecatedMessage := func(docs []*ast.CommentGroup) string {
|
||||||
|
for _, doc := range docs {
|
||||||
|
if doc == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
parts := strings.Split(doc.Text(), "\n\n")
|
||||||
|
last := parts[len(parts)-1]
|
||||||
|
if !strings.HasPrefix(last, "Deprecated: ") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
alt := last[len("Deprecated: "):]
|
||||||
|
alt = strings.Replace(alt, "\n", " ", -1)
|
||||||
|
return alt
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) {
|
||||||
|
alt := extractDeprecatedMessage(docs)
|
||||||
|
if alt == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, name := range names {
|
||||||
|
obj := pass.TypesInfo.ObjectOf(name)
|
||||||
|
pass.ExportObjectFact(obj, &IsDeprecated{alt})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var docs []*ast.CommentGroup
|
||||||
|
for _, f := range pass.Files {
|
||||||
|
docs = append(docs, f.Doc)
|
||||||
|
}
|
||||||
|
if alt := extractDeprecatedMessage(docs); alt != "" {
|
||||||
|
// Don't mark package syscall as deprecated, even though
|
||||||
|
// it is. A lot of people still use it for simple
|
||||||
|
// constants like SIGKILL, and I am not comfortable
|
||||||
|
// telling them to use x/sys for that.
|
||||||
|
if pass.Pkg.Path() != "syscall" {
|
||||||
|
pass.ExportPackageFact(&IsDeprecated{alt})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
docs = docs[:0]
|
||||||
|
for _, f := range pass.Files {
|
||||||
|
fn := func(node ast.Node) bool {
|
||||||
|
if node == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
var ret bool
|
||||||
|
switch node := node.(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
switch node.Tok {
|
||||||
|
case token.TYPE, token.CONST, token.VAR:
|
||||||
|
docs = append(docs, node.Doc)
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
docs = append(docs, node.Doc)
|
||||||
|
names = []*ast.Ident{node.Name}
|
||||||
|
ret = false
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
docs = append(docs, node.Doc)
|
||||||
|
names = []*ast.Ident{node.Name}
|
||||||
|
ret = true
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
docs = append(docs, node.Doc)
|
||||||
|
names = node.Names
|
||||||
|
ret = false
|
||||||
|
case *ast.File:
|
||||||
|
return true
|
||||||
|
case *ast.StructType:
|
||||||
|
for _, field := range node.Fields.List {
|
||||||
|
doDocs(field.Names, []*ast.CommentGroup{field.Doc})
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
case *ast.InterfaceType:
|
||||||
|
for _, field := range node.Methods.List {
|
||||||
|
doDocs(field.Names, []*ast.CommentGroup{field.Doc})
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if len(names) == 0 || len(docs) == 0 {
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
doDocs(names, docs)
|
||||||
|
|
||||||
|
docs = docs[:0]
|
||||||
|
names = nil
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
ast.Inspect(f, fn)
|
||||||
|
}
|
||||||
|
|
||||||
|
out := DeprecatedResult{
|
||||||
|
Objects: map[types.Object]*IsDeprecated{},
|
||||||
|
Packages: map[*types.Package]*IsDeprecated{},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fact := range pass.AllObjectFacts() {
|
||||||
|
out.Objects[fact.Object] = fact.Fact.(*IsDeprecated)
|
||||||
|
}
|
||||||
|
for _, fact := range pass.AllPackageFacts() {
|
||||||
|
out.Packages[fact.Package] = fact.Fact.(*IsDeprecated)
|
||||||
|
}
|
||||||
|
|
||||||
|
return out, nil
|
||||||
|
}
|
86
vendor/honnef.co/go/tools/facts/generated.go
vendored
Normal file
86
vendor/honnef.co/go/tools/facts/generated.go
vendored
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
package facts
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Generator int
|
||||||
|
|
||||||
|
// A list of known generators we can detect
|
||||||
|
const (
|
||||||
|
Unknown Generator = iota
|
||||||
|
Goyacc
|
||||||
|
Cgo
|
||||||
|
Stringer
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// used by cgo before Go 1.11
|
||||||
|
oldCgo = []byte("// Created by cgo - DO NOT EDIT")
|
||||||
|
prefix = []byte("// Code generated ")
|
||||||
|
suffix = []byte(" DO NOT EDIT.")
|
||||||
|
nl = []byte("\n")
|
||||||
|
crnl = []byte("\r\n")
|
||||||
|
)
|
||||||
|
|
||||||
|
func isGenerated(path string) (Generator, bool) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
br := bufio.NewReader(f)
|
||||||
|
for {
|
||||||
|
s, err := br.ReadBytes('\n')
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
s = bytes.TrimSuffix(s, crnl)
|
||||||
|
s = bytes.TrimSuffix(s, nl)
|
||||||
|
if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) {
|
||||||
|
text := string(s[len(prefix) : len(s)-len(suffix)])
|
||||||
|
switch text {
|
||||||
|
case "by goyacc.":
|
||||||
|
return Goyacc, true
|
||||||
|
case "by cmd/cgo;":
|
||||||
|
return Cgo, true
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(text, `by "stringer `) {
|
||||||
|
return Stringer, true
|
||||||
|
}
|
||||||
|
return Unknown, true
|
||||||
|
}
|
||||||
|
if bytes.Equal(s, oldCgo) {
|
||||||
|
return Cgo, true
|
||||||
|
}
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
var Generated = &analysis.Analyzer{
|
||||||
|
Name: "isgenerated",
|
||||||
|
Doc: "annotate file names that have been code generated",
|
||||||
|
Run: func(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
m := map[string]Generator{}
|
||||||
|
for _, f := range pass.Files {
|
||||||
|
path := pass.Fset.PositionFor(f.Pos(), false).Filename
|
||||||
|
g, ok := isGenerated(path)
|
||||||
|
if ok {
|
||||||
|
m[path] = g
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
},
|
||||||
|
RunDespiteErrors: true,
|
||||||
|
ResultType: reflect.TypeOf(map[string]Generator{}),
|
||||||
|
}
|
175
vendor/honnef.co/go/tools/facts/purity.go
vendored
Normal file
175
vendor/honnef.co/go/tools/facts/purity.go
vendored
Normal file
|
@ -0,0 +1,175 @@
|
||||||
|
package facts
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"honnef.co/go/tools/functions"
|
||||||
|
"honnef.co/go/tools/internal/passes/buildssa"
|
||||||
|
"honnef.co/go/tools/ssa"
|
||||||
|
)
|
||||||
|
|
||||||
|
type IsPure struct{}
|
||||||
|
|
||||||
|
func (*IsPure) AFact() {}
|
||||||
|
func (d *IsPure) String() string { return "is pure" }
|
||||||
|
|
||||||
|
type PurityResult map[*types.Func]*IsPure
|
||||||
|
|
||||||
|
var Purity = &analysis.Analyzer{
|
||||||
|
Name: "fact_purity",
|
||||||
|
Doc: "Mark pure functions",
|
||||||
|
Run: purity,
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
FactTypes: []analysis.Fact{(*IsPure)(nil)},
|
||||||
|
ResultType: reflect.TypeOf(PurityResult{}),
|
||||||
|
}
|
||||||
|
|
||||||
|
var pureStdlib = map[string]struct{}{
|
||||||
|
"errors.New": {},
|
||||||
|
"fmt.Errorf": {},
|
||||||
|
"fmt.Sprintf": {},
|
||||||
|
"fmt.Sprint": {},
|
||||||
|
"sort.Reverse": {},
|
||||||
|
"strings.Map": {},
|
||||||
|
"strings.Repeat": {},
|
||||||
|
"strings.Replace": {},
|
||||||
|
"strings.Title": {},
|
||||||
|
"strings.ToLower": {},
|
||||||
|
"strings.ToLowerSpecial": {},
|
||||||
|
"strings.ToTitle": {},
|
||||||
|
"strings.ToTitleSpecial": {},
|
||||||
|
"strings.ToUpper": {},
|
||||||
|
"strings.ToUpperSpecial": {},
|
||||||
|
"strings.Trim": {},
|
||||||
|
"strings.TrimFunc": {},
|
||||||
|
"strings.TrimLeft": {},
|
||||||
|
"strings.TrimLeftFunc": {},
|
||||||
|
"strings.TrimPrefix": {},
|
||||||
|
"strings.TrimRight": {},
|
||||||
|
"strings.TrimRightFunc": {},
|
||||||
|
"strings.TrimSpace": {},
|
||||||
|
"strings.TrimSuffix": {},
|
||||||
|
"(*net/http.Request).WithContext": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
func purity(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
seen := map[*ssa.Function]struct{}{}
|
||||||
|
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
|
||||||
|
var check func(ssafn *ssa.Function) (ret bool)
|
||||||
|
check = func(ssafn *ssa.Function) (ret bool) {
|
||||||
|
if ssafn.Object() == nil {
|
||||||
|
// TODO(dh): support closures
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if pass.ImportObjectFact(ssafn.Object(), new(IsPure)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if ssafn.Pkg != ssapkg {
|
||||||
|
// Function is in another package but wasn't marked as
|
||||||
|
// pure, ergo it isn't pure
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// Break recursion
|
||||||
|
if _, ok := seen[ssafn]; ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
seen[ssafn] = struct{}{}
|
||||||
|
defer func() {
|
||||||
|
if ret {
|
||||||
|
pass.ExportObjectFact(ssafn.Object(), &IsPure{})
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
if functions.IsStub(ssafn) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := pureStdlib[ssafn.Object().(*types.Func).FullName()]; ok {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if ssafn.Signature.Results().Len() == 0 {
|
||||||
|
// A function with no return values is empty or is doing some
|
||||||
|
// work we cannot see (for example because of build tags);
|
||||||
|
// don't consider it pure.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, param := range ssafn.Params {
|
||||||
|
if _, ok := param.Type().Underlying().(*types.Basic); !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ssafn.Blocks == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
checkCall := func(common *ssa.CallCommon) bool {
|
||||||
|
if common.IsInvoke() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
builtin, ok := common.Value.(*ssa.Builtin)
|
||||||
|
if !ok {
|
||||||
|
if common.StaticCallee() != ssafn {
|
||||||
|
if common.StaticCallee() == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !check(common.StaticCallee()) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
switch builtin.Name() {
|
||||||
|
case "len", "cap", "make", "new":
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for _, b := range ssafn.Blocks {
|
||||||
|
for _, ins := range b.Instrs {
|
||||||
|
switch ins := ins.(type) {
|
||||||
|
case *ssa.Call:
|
||||||
|
if !checkCall(ins.Common()) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
case *ssa.Defer:
|
||||||
|
if !checkCall(&ins.Call) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
case *ssa.Select:
|
||||||
|
return false
|
||||||
|
case *ssa.Send:
|
||||||
|
return false
|
||||||
|
case *ssa.Go:
|
||||||
|
return false
|
||||||
|
case *ssa.Panic:
|
||||||
|
return false
|
||||||
|
case *ssa.Store:
|
||||||
|
return false
|
||||||
|
case *ssa.FieldAddr:
|
||||||
|
return false
|
||||||
|
case *ssa.UnOp:
|
||||||
|
if ins.Op == token.MUL || ins.Op == token.AND {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
|
check(ssafn)
|
||||||
|
}
|
||||||
|
|
||||||
|
out := PurityResult{}
|
||||||
|
for _, fact := range pass.AllObjectFacts() {
|
||||||
|
out[fact.Object.(*types.Func)] = fact.Fact.(*IsPure)
|
||||||
|
}
|
||||||
|
return out, nil
|
||||||
|
}
|
24
vendor/honnef.co/go/tools/facts/token.go
vendored
Normal file
24
vendor/honnef.co/go/tools/facts/token.go
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
package facts
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
)
|
||||||
|
|
||||||
|
var TokenFile = &analysis.Analyzer{
|
||||||
|
Name: "tokenfileanalyzer",
|
||||||
|
Doc: "creates a mapping of *token.File to *ast.File",
|
||||||
|
Run: func(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
m := map[*token.File]*ast.File{}
|
||||||
|
for _, af := range pass.Files {
|
||||||
|
tf := pass.Fset.File(af.Pos())
|
||||||
|
m[tf] = af
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
},
|
||||||
|
RunDespiteErrors: true,
|
||||||
|
ResultType: reflect.TypeOf(map[*token.File]*ast.File{}),
|
||||||
|
}
|
56
vendor/honnef.co/go/tools/functions/concrete.go
vendored
56
vendor/honnef.co/go/tools/functions/concrete.go
vendored
|
@ -1,56 +0,0 @@
|
||||||
package functions
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/token"
|
|
||||||
"go/types"
|
|
||||||
|
|
||||||
"honnef.co/go/tools/ssa"
|
|
||||||
)
|
|
||||||
|
|
||||||
func concreteReturnTypes(fn *ssa.Function) []*types.Tuple {
|
|
||||||
res := fn.Signature.Results()
|
|
||||||
if res == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ifaces := make([]bool, res.Len())
|
|
||||||
any := false
|
|
||||||
for i := 0; i < res.Len(); i++ {
|
|
||||||
_, ifaces[i] = res.At(i).Type().Underlying().(*types.Interface)
|
|
||||||
any = any || ifaces[i]
|
|
||||||
}
|
|
||||||
if !any {
|
|
||||||
return []*types.Tuple{res}
|
|
||||||
}
|
|
||||||
var out []*types.Tuple
|
|
||||||
for _, block := range fn.Blocks {
|
|
||||||
if len(block.Instrs) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
ret, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
vars := make([]*types.Var, res.Len())
|
|
||||||
for i, v := range ret.Results {
|
|
||||||
var typ types.Type
|
|
||||||
if !ifaces[i] {
|
|
||||||
typ = res.At(i).Type()
|
|
||||||
} else if mi, ok := v.(*ssa.MakeInterface); ok {
|
|
||||||
// TODO(dh): if mi.X is a function call that returns
|
|
||||||
// an interface, call concreteReturnTypes on that
|
|
||||||
// function (or, really, go through Descriptions,
|
|
||||||
// avoid infinite recursion etc, just like nil error
|
|
||||||
// detection)
|
|
||||||
|
|
||||||
// TODO(dh): support Phi nodes
|
|
||||||
typ = mi.X.Type()
|
|
||||||
} else {
|
|
||||||
typ = res.At(i).Type()
|
|
||||||
}
|
|
||||||
vars[i] = types.NewParam(token.NoPos, nil, "", typ)
|
|
||||||
}
|
|
||||||
out = append(out, types.NewTuple(vars...))
|
|
||||||
}
|
|
||||||
// TODO(dh): deduplicate out
|
|
||||||
return out
|
|
||||||
}
|
|
150
vendor/honnef.co/go/tools/functions/functions.go
vendored
150
vendor/honnef.co/go/tools/functions/functions.go
vendored
|
@ -1,150 +0,0 @@
|
||||||
package functions
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/types"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"honnef.co/go/tools/callgraph"
|
|
||||||
"honnef.co/go/tools/callgraph/static"
|
|
||||||
"honnef.co/go/tools/ssa"
|
|
||||||
"honnef.co/go/tools/staticcheck/vrp"
|
|
||||||
)
|
|
||||||
|
|
||||||
var stdlibDescs = map[string]Description{
|
|
||||||
"errors.New": {Pure: true},
|
|
||||||
|
|
||||||
"fmt.Errorf": {Pure: true},
|
|
||||||
"fmt.Sprintf": {Pure: true},
|
|
||||||
"fmt.Sprint": {Pure: true},
|
|
||||||
|
|
||||||
"sort.Reverse": {Pure: true},
|
|
||||||
|
|
||||||
"strings.Map": {Pure: true},
|
|
||||||
"strings.Repeat": {Pure: true},
|
|
||||||
"strings.Replace": {Pure: true},
|
|
||||||
"strings.Title": {Pure: true},
|
|
||||||
"strings.ToLower": {Pure: true},
|
|
||||||
"strings.ToLowerSpecial": {Pure: true},
|
|
||||||
"strings.ToTitle": {Pure: true},
|
|
||||||
"strings.ToTitleSpecial": {Pure: true},
|
|
||||||
"strings.ToUpper": {Pure: true},
|
|
||||||
"strings.ToUpperSpecial": {Pure: true},
|
|
||||||
"strings.Trim": {Pure: true},
|
|
||||||
"strings.TrimFunc": {Pure: true},
|
|
||||||
"strings.TrimLeft": {Pure: true},
|
|
||||||
"strings.TrimLeftFunc": {Pure: true},
|
|
||||||
"strings.TrimPrefix": {Pure: true},
|
|
||||||
"strings.TrimRight": {Pure: true},
|
|
||||||
"strings.TrimRightFunc": {Pure: true},
|
|
||||||
"strings.TrimSpace": {Pure: true},
|
|
||||||
"strings.TrimSuffix": {Pure: true},
|
|
||||||
|
|
||||||
"(*net/http.Request).WithContext": {Pure: true},
|
|
||||||
|
|
||||||
"math/rand.Read": {NilError: true},
|
|
||||||
"(*math/rand.Rand).Read": {NilError: true},
|
|
||||||
}
|
|
||||||
|
|
||||||
type Description struct {
|
|
||||||
// The function is known to be pure
|
|
||||||
Pure bool
|
|
||||||
// The function is known to be a stub
|
|
||||||
Stub bool
|
|
||||||
// The function is known to never return (panics notwithstanding)
|
|
||||||
Infinite bool
|
|
||||||
// Variable ranges
|
|
||||||
Ranges vrp.Ranges
|
|
||||||
Loops []Loop
|
|
||||||
// Function returns an error as its last argument, but it is
|
|
||||||
// always nil
|
|
||||||
NilError bool
|
|
||||||
ConcreteReturnTypes []*types.Tuple
|
|
||||||
}
|
|
||||||
|
|
||||||
type descriptionEntry struct {
|
|
||||||
ready chan struct{}
|
|
||||||
result Description
|
|
||||||
}
|
|
||||||
|
|
||||||
type Descriptions struct {
|
|
||||||
CallGraph *callgraph.Graph
|
|
||||||
mu sync.Mutex
|
|
||||||
cache map[*ssa.Function]*descriptionEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewDescriptions(prog *ssa.Program) *Descriptions {
|
|
||||||
return &Descriptions{
|
|
||||||
CallGraph: static.CallGraph(prog),
|
|
||||||
cache: map[*ssa.Function]*descriptionEntry{},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Descriptions) Get(fn *ssa.Function) Description {
|
|
||||||
d.mu.Lock()
|
|
||||||
fd := d.cache[fn]
|
|
||||||
if fd == nil {
|
|
||||||
fd = &descriptionEntry{
|
|
||||||
ready: make(chan struct{}),
|
|
||||||
}
|
|
||||||
d.cache[fn] = fd
|
|
||||||
d.mu.Unlock()
|
|
||||||
|
|
||||||
{
|
|
||||||
fd.result = stdlibDescs[fn.RelString(nil)]
|
|
||||||
fd.result.Pure = fd.result.Pure || d.IsPure(fn)
|
|
||||||
fd.result.Stub = fd.result.Stub || d.IsStub(fn)
|
|
||||||
fd.result.Infinite = fd.result.Infinite || !terminates(fn)
|
|
||||||
fd.result.Ranges = vrp.BuildGraph(fn).Solve()
|
|
||||||
fd.result.Loops = findLoops(fn)
|
|
||||||
fd.result.NilError = fd.result.NilError || IsNilError(fn)
|
|
||||||
fd.result.ConcreteReturnTypes = concreteReturnTypes(fn)
|
|
||||||
}
|
|
||||||
|
|
||||||
close(fd.ready)
|
|
||||||
} else {
|
|
||||||
d.mu.Unlock()
|
|
||||||
<-fd.ready
|
|
||||||
}
|
|
||||||
return fd.result
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsNilError(fn *ssa.Function) bool {
|
|
||||||
// TODO(dh): This is very simplistic, as we only look for constant
|
|
||||||
// nil returns. A more advanced approach would work transitively.
|
|
||||||
// An even more advanced approach would be context-aware and
|
|
||||||
// determine nil errors based on inputs (e.g. io.WriteString to a
|
|
||||||
// bytes.Buffer will always return nil, but an io.WriteString to
|
|
||||||
// an os.File might not). Similarly, an os.File opened for reading
|
|
||||||
// won't error on Close, but other files will.
|
|
||||||
res := fn.Signature.Results()
|
|
||||||
if res.Len() == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
last := res.At(res.Len() - 1)
|
|
||||||
if types.TypeString(last.Type(), nil) != "error" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if fn.Blocks == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, block := range fn.Blocks {
|
|
||||||
if len(block.Instrs) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
ins := block.Instrs[len(block.Instrs)-1]
|
|
||||||
ret, ok := ins.(*ssa.Return)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
v := ret.Results[len(ret.Results)-1]
|
|
||||||
c, ok := v.(*ssa.Const)
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if !c.IsNil() {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
14
vendor/honnef.co/go/tools/functions/loops.go
vendored
14
vendor/honnef.co/go/tools/functions/loops.go
vendored
|
@ -2,9 +2,9 @@ package functions
|
||||||
|
|
||||||
import "honnef.co/go/tools/ssa"
|
import "honnef.co/go/tools/ssa"
|
||||||
|
|
||||||
type Loop map[*ssa.BasicBlock]bool
|
type Loop struct{ ssa.BlockSet }
|
||||||
|
|
||||||
func findLoops(fn *ssa.Function) []Loop {
|
func FindLoops(fn *ssa.Function) []Loop {
|
||||||
if fn.Blocks == nil {
|
if fn.Blocks == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -18,12 +18,16 @@ func findLoops(fn *ssa.Function) []Loop {
|
||||||
// n is a back-edge to h
|
// n is a back-edge to h
|
||||||
// h is the loop header
|
// h is the loop header
|
||||||
if n == h {
|
if n == h {
|
||||||
sets = append(sets, Loop{n: true})
|
set := Loop{}
|
||||||
|
set.Add(n)
|
||||||
|
sets = append(sets, set)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
set := Loop{h: true, n: true}
|
set := Loop{}
|
||||||
|
set.Add(h)
|
||||||
|
set.Add(n)
|
||||||
for _, b := range allPredsBut(n, h, nil) {
|
for _, b := range allPredsBut(n, h, nil) {
|
||||||
set[b] = true
|
set.Add(b)
|
||||||
}
|
}
|
||||||
sets = append(sets, set)
|
sets = append(sets, set)
|
||||||
}
|
}
|
||||||
|
|
101
vendor/honnef.co/go/tools/functions/pure.go
vendored
101
vendor/honnef.co/go/tools/functions/pure.go
vendored
|
@ -1,26 +1,31 @@
|
||||||
package functions
|
package functions
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"go/token"
|
|
||||||
"go/types"
|
|
||||||
|
|
||||||
"honnef.co/go/tools/callgraph"
|
|
||||||
"honnef.co/go/tools/lint/lintdsl"
|
|
||||||
"honnef.co/go/tools/ssa"
|
"honnef.co/go/tools/ssa"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func filterDebug(instr []ssa.Instruction) []ssa.Instruction {
|
||||||
|
var out []ssa.Instruction
|
||||||
|
for _, ins := range instr {
|
||||||
|
if _, ok := ins.(*ssa.DebugRef); !ok {
|
||||||
|
out = append(out, ins)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
// IsStub reports whether a function is a stub. A function is
|
// IsStub reports whether a function is a stub. A function is
|
||||||
// considered a stub if it has no instructions or exactly one
|
// considered a stub if it has no instructions or exactly one
|
||||||
// instruction, which must be either returning only constant values or
|
// instruction, which must be either returning only constant values or
|
||||||
// a panic.
|
// a panic.
|
||||||
func (d *Descriptions) IsStub(fn *ssa.Function) bool {
|
func IsStub(fn *ssa.Function) bool {
|
||||||
if len(fn.Blocks) == 0 {
|
if len(fn.Blocks) == 0 {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if len(fn.Blocks) > 1 {
|
if len(fn.Blocks) > 1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
instrs := lintdsl.FilterDebug(fn.Blocks[0].Instrs)
|
instrs := filterDebug(fn.Blocks[0].Instrs)
|
||||||
if len(instrs) != 1 {
|
if len(instrs) != 1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -39,85 +44,3 @@ func (d *Descriptions) IsStub(fn *ssa.Function) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Descriptions) IsPure(fn *ssa.Function) bool {
|
|
||||||
if fn.Signature.Results().Len() == 0 {
|
|
||||||
// A function with no return values is empty or is doing some
|
|
||||||
// work we cannot see (for example because of build tags);
|
|
||||||
// don't consider it pure.
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, param := range fn.Params {
|
|
||||||
if _, ok := param.Type().Underlying().(*types.Basic); !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if fn.Blocks == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
checkCall := func(common *ssa.CallCommon) bool {
|
|
||||||
if common.IsInvoke() {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
builtin, ok := common.Value.(*ssa.Builtin)
|
|
||||||
if !ok {
|
|
||||||
if common.StaticCallee() != fn {
|
|
||||||
if common.StaticCallee() == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
// TODO(dh): ideally, IsPure wouldn't be responsible
|
|
||||||
// for avoiding infinite recursion, but
|
|
||||||
// FunctionDescriptions would be.
|
|
||||||
node := d.CallGraph.CreateNode(common.StaticCallee())
|
|
||||||
if callgraph.PathSearch(node, func(other *callgraph.Node) bool {
|
|
||||||
return other.Func == fn
|
|
||||||
}) != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if !d.Get(common.StaticCallee()).Pure {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
switch builtin.Name() {
|
|
||||||
case "len", "cap", "make", "new":
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
for _, b := range fn.Blocks {
|
|
||||||
for _, ins := range b.Instrs {
|
|
||||||
switch ins := ins.(type) {
|
|
||||||
case *ssa.Call:
|
|
||||||
if !checkCall(ins.Common()) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
case *ssa.Defer:
|
|
||||||
if !checkCall(&ins.Call) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
case *ssa.Select:
|
|
||||||
return false
|
|
||||||
case *ssa.Send:
|
|
||||||
return false
|
|
||||||
case *ssa.Go:
|
|
||||||
return false
|
|
||||||
case *ssa.Panic:
|
|
||||||
return false
|
|
||||||
case *ssa.Store:
|
|
||||||
return false
|
|
||||||
case *ssa.FieldAddr:
|
|
||||||
return false
|
|
||||||
case *ssa.UnOp:
|
|
||||||
if ins.Op == token.MUL || ins.Op == token.AND {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
|
@ -2,10 +2,10 @@ package functions
|
||||||
|
|
||||||
import "honnef.co/go/tools/ssa"
|
import "honnef.co/go/tools/ssa"
|
||||||
|
|
||||||
// terminates reports whether fn is supposed to return, that is if it
|
// Terminates reports whether fn is supposed to return, that is if it
|
||||||
// has at least one theoretic path that returns from the function.
|
// has at least one theoretic path that returns from the function.
|
||||||
// Explicit panics do not count as terminating.
|
// Explicit panics do not count as terminating.
|
||||||
func terminates(fn *ssa.Function) bool {
|
func Terminates(fn *ssa.Function) bool {
|
||||||
if fn.Blocks == nil {
|
if fn.Blocks == nil {
|
||||||
// assuming that a function terminates is the conservative
|
// assuming that a function terminates is the conservative
|
||||||
// choice
|
// choice
|
||||||
|
|
|
@ -6,24 +6,70 @@ import (
|
||||||
|
|
||||||
// Identical reports whether x and y are identical types.
|
// Identical reports whether x and y are identical types.
|
||||||
// Unlike types.Identical, receivers of Signature types are not ignored.
|
// Unlike types.Identical, receivers of Signature types are not ignored.
|
||||||
|
// Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated).
|
||||||
|
// Unlike types.Identical, structs are compared via pointer equality.
|
||||||
func Identical(x, y types.Type) (ret bool) {
|
func Identical(x, y types.Type) (ret bool) {
|
||||||
if !types.Identical(x, y) {
|
if !types.Identical(x, y) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
sigX, ok := x.(*types.Signature)
|
|
||||||
if !ok {
|
switch x := x.(type) {
|
||||||
|
case *types.Struct:
|
||||||
|
y, ok := y.(*types.Struct)
|
||||||
|
if !ok {
|
||||||
|
// should be impossible
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return x == y
|
||||||
|
case *types.Interface:
|
||||||
|
// The issue with interfaces, typeutil.Map and types.Identical
|
||||||
|
//
|
||||||
|
// types.Identical, when comparing two interfaces, only looks at the set
|
||||||
|
// of all methods, not differentiating between implicit (embedded) and
|
||||||
|
// explicit methods.
|
||||||
|
//
|
||||||
|
// When we see the following two types, in source order
|
||||||
|
//
|
||||||
|
// type I1 interface { foo() }
|
||||||
|
// type I2 interface { I1 }
|
||||||
|
//
|
||||||
|
// then we will first correctly process I1 and its underlying type. When
|
||||||
|
// we get to I2, we will see that its underlying type is identical to
|
||||||
|
// that of I1 and not process it again. This, however, means that we will
|
||||||
|
// not record the fact that I2 embeds I1. If only I2 is reachable via the
|
||||||
|
// graph root, then I1 will not be considered used.
|
||||||
|
//
|
||||||
|
// We choose to be lazy and compare interfaces by their
|
||||||
|
// pointers. This will obviously miss identical interfaces,
|
||||||
|
// but this only has a runtime cost, it doesn't affect
|
||||||
|
// correctness.
|
||||||
|
y, ok := y.(*types.Interface)
|
||||||
|
if !ok {
|
||||||
|
// should be impossible
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if x.NumEmbeddeds() == 0 &&
|
||||||
|
y.NumEmbeddeds() == 0 &&
|
||||||
|
x.NumMethods() == 0 &&
|
||||||
|
y.NumMethods() == 0 {
|
||||||
|
// all truly empty interfaces are the same
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return x == y
|
||||||
|
case *types.Signature:
|
||||||
|
y, ok := y.(*types.Signature)
|
||||||
|
if !ok {
|
||||||
|
// should be impossible
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if x.Recv() == y.Recv() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if x.Recv() == nil || y.Recv() == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return Identical(x.Recv().Type(), y.Recv().Type())
|
||||||
|
default:
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
sigY, ok := y.(*types.Signature)
|
|
||||||
if !ok {
|
|
||||||
// should be impossible
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if sigX.Recv() == sigY.Recv() {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if sigX.Recv() == nil || sigY.Recv() == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return Identical(sigX.Recv().Type(), sigY.Recv().Type())
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,11 @@ import (
|
||||||
//
|
//
|
||||||
// Not thread-safe.
|
// Not thread-safe.
|
||||||
//
|
//
|
||||||
// This fork handles Signatures correctly, respecting method receivers.
|
// This fork handles Signatures correctly, respecting method
|
||||||
|
// receivers. Furthermore, it doesn't deduplicate interfaces or
|
||||||
|
// structs. Interfaces aren't deduplicated as not to conflate implicit
|
||||||
|
// and explicit methods. Structs aren't deduplicated because we track
|
||||||
|
// fields of each type separately.
|
||||||
//
|
//
|
||||||
type Map struct {
|
type Map struct {
|
||||||
hasher Hasher // shared by many Maps
|
hasher Hasher // shared by many Maps
|
||||||
|
|
474
vendor/honnef.co/go/tools/internal/cache/cache.go
vendored
Normal file
474
vendor/honnef.co/go/tools/internal/cache/cache.go
vendored
Normal file
|
@ -0,0 +1,474 @@
|
||||||
|
// Copyright 2017 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package cache implements a build artifact cache.
|
||||||
|
//
|
||||||
|
// This package is a slightly modified fork of Go's
|
||||||
|
// cmd/go/internal/cache package.
|
||||||
|
package cache
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"honnef.co/go/tools/internal/renameio"
|
||||||
|
)
|
||||||
|
|
||||||
|
// An ActionID is a cache action key, the hash of a complete description of a
|
||||||
|
// repeatable computation (command line, environment variables,
|
||||||
|
// input file contents, executable contents).
|
||||||
|
type ActionID [HashSize]byte
|
||||||
|
|
||||||
|
// An OutputID is a cache output key, the hash of an output of a computation.
|
||||||
|
type OutputID [HashSize]byte
|
||||||
|
|
||||||
|
// A Cache is a package cache, backed by a file system directory tree.
|
||||||
|
type Cache struct {
|
||||||
|
dir string
|
||||||
|
now func() time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open opens and returns the cache in the given directory.
|
||||||
|
//
|
||||||
|
// It is safe for multiple processes on a single machine to use the
|
||||||
|
// same cache directory in a local file system simultaneously.
|
||||||
|
// They will coordinate using operating system file locks and may
|
||||||
|
// duplicate effort but will not corrupt the cache.
|
||||||
|
//
|
||||||
|
// However, it is NOT safe for multiple processes on different machines
|
||||||
|
// to share a cache directory (for example, if the directory were stored
|
||||||
|
// in a network file system). File locking is notoriously unreliable in
|
||||||
|
// network file systems and may not suffice to protect the cache.
|
||||||
|
//
|
||||||
|
func Open(dir string) (*Cache, error) {
|
||||||
|
info, err := os.Stat(dir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return nil, &os.PathError{Op: "open", Path: dir, Err: fmt.Errorf("not a directory")}
|
||||||
|
}
|
||||||
|
for i := 0; i < 256; i++ {
|
||||||
|
name := filepath.Join(dir, fmt.Sprintf("%02x", i))
|
||||||
|
if err := os.MkdirAll(name, 0777); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
c := &Cache{
|
||||||
|
dir: dir,
|
||||||
|
now: time.Now,
|
||||||
|
}
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fileName returns the name of the file corresponding to the given id.
|
||||||
|
func (c *Cache) fileName(id [HashSize]byte, key string) string {
|
||||||
|
return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key)
|
||||||
|
}
|
||||||
|
|
||||||
|
var errMissing = errors.New("cache entry not found")
|
||||||
|
|
||||||
|
const (
|
||||||
|
// action entry file is "v1 <hex id> <hex out> <decimal size space-padded to 20 bytes> <unixnano space-padded to 20 bytes>\n"
|
||||||
|
hexSize = HashSize * 2
|
||||||
|
entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
// verify controls whether to run the cache in verify mode.
|
||||||
|
// In verify mode, the cache always returns errMissing from Get
|
||||||
|
// but then double-checks in Put that the data being written
|
||||||
|
// exactly matches any existing entry. This provides an easy
|
||||||
|
// way to detect program behavior that would have been different
|
||||||
|
// had the cache entry been returned from Get.
|
||||||
|
//
|
||||||
|
// verify is enabled by setting the environment variable
|
||||||
|
// GODEBUG=gocacheverify=1.
|
||||||
|
var verify = false
|
||||||
|
|
||||||
|
// DebugTest is set when GODEBUG=gocachetest=1 is in the environment.
|
||||||
|
var DebugTest = false
|
||||||
|
|
||||||
|
func init() { initEnv() }
|
||||||
|
|
||||||
|
func initEnv() {
|
||||||
|
verify = false
|
||||||
|
debugHash = false
|
||||||
|
debug := strings.Split(os.Getenv("GODEBUG"), ",")
|
||||||
|
for _, f := range debug {
|
||||||
|
if f == "gocacheverify=1" {
|
||||||
|
verify = true
|
||||||
|
}
|
||||||
|
if f == "gocachehash=1" {
|
||||||
|
debugHash = true
|
||||||
|
}
|
||||||
|
if f == "gocachetest=1" {
|
||||||
|
DebugTest = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get looks up the action ID in the cache,
|
||||||
|
// returning the corresponding output ID and file size, if any.
|
||||||
|
// Note that finding an output ID does not guarantee that the
|
||||||
|
// saved file for that output ID is still available.
|
||||||
|
func (c *Cache) Get(id ActionID) (Entry, error) {
|
||||||
|
if verify {
|
||||||
|
return Entry{}, errMissing
|
||||||
|
}
|
||||||
|
return c.get(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Entry struct {
|
||||||
|
OutputID OutputID
|
||||||
|
Size int64
|
||||||
|
Time time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// get is Get but does not respect verify mode, so that Put can use it.
|
||||||
|
func (c *Cache) get(id ActionID) (Entry, error) {
|
||||||
|
missing := func() (Entry, error) {
|
||||||
|
return Entry{}, errMissing
|
||||||
|
}
|
||||||
|
f, err := os.Open(c.fileName(id, "a"))
|
||||||
|
if err != nil {
|
||||||
|
return missing()
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
entry := make([]byte, entrySize+1) // +1 to detect whether f is too long
|
||||||
|
if n, err := io.ReadFull(f, entry); n != entrySize || err != io.ErrUnexpectedEOF {
|
||||||
|
return missing()
|
||||||
|
}
|
||||||
|
if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' {
|
||||||
|
return missing()
|
||||||
|
}
|
||||||
|
eid, entry := entry[3:3+hexSize], entry[3+hexSize:]
|
||||||
|
eout, entry := entry[1:1+hexSize], entry[1+hexSize:]
|
||||||
|
esize, entry := entry[1:1+20], entry[1+20:]
|
||||||
|
//lint:ignore SA4006 See https://github.com/dominikh/go-tools/issues/465
|
||||||
|
etime, entry := entry[1:1+20], entry[1+20:]
|
||||||
|
var buf [HashSize]byte
|
||||||
|
if _, err := hex.Decode(buf[:], eid); err != nil || buf != id {
|
||||||
|
return missing()
|
||||||
|
}
|
||||||
|
if _, err := hex.Decode(buf[:], eout); err != nil {
|
||||||
|
return missing()
|
||||||
|
}
|
||||||
|
i := 0
|
||||||
|
for i < len(esize) && esize[i] == ' ' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
size, err := strconv.ParseInt(string(esize[i:]), 10, 64)
|
||||||
|
if err != nil || size < 0 {
|
||||||
|
return missing()
|
||||||
|
}
|
||||||
|
i = 0
|
||||||
|
for i < len(etime) && etime[i] == ' ' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
|
||||||
|
if err != nil || size < 0 {
|
||||||
|
return missing()
|
||||||
|
}
|
||||||
|
|
||||||
|
c.used(c.fileName(id, "a"))
|
||||||
|
|
||||||
|
return Entry{buf, size, time.Unix(0, tm)}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetFile looks up the action ID in the cache and returns
|
||||||
|
// the name of the corresponding data file.
|
||||||
|
func (c *Cache) GetFile(id ActionID) (file string, entry Entry, err error) {
|
||||||
|
entry, err = c.Get(id)
|
||||||
|
if err != nil {
|
||||||
|
return "", Entry{}, err
|
||||||
|
}
|
||||||
|
file = c.OutputFile(entry.OutputID)
|
||||||
|
info, err := os.Stat(file)
|
||||||
|
if err != nil || info.Size() != entry.Size {
|
||||||
|
return "", Entry{}, errMissing
|
||||||
|
}
|
||||||
|
return file, entry, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetBytes looks up the action ID in the cache and returns
|
||||||
|
// the corresponding output bytes.
|
||||||
|
// GetBytes should only be used for data that can be expected to fit in memory.
|
||||||
|
func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) {
|
||||||
|
entry, err := c.Get(id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, entry, err
|
||||||
|
}
|
||||||
|
data, _ := ioutil.ReadFile(c.OutputFile(entry.OutputID))
|
||||||
|
if sha256.Sum256(data) != entry.OutputID {
|
||||||
|
return nil, entry, errMissing
|
||||||
|
}
|
||||||
|
return data, entry, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OutputFile returns the name of the cache file storing output with the given OutputID.
|
||||||
|
func (c *Cache) OutputFile(out OutputID) string {
|
||||||
|
file := c.fileName(out, "d")
|
||||||
|
c.used(file)
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
|
||||||
|
// Time constants for cache expiration.
|
||||||
|
//
|
||||||
|
// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour),
|
||||||
|
// to avoid causing many unnecessary inode updates. The mtimes therefore
|
||||||
|
// roughly reflect "time of last use" but may in fact be older by at most an hour.
|
||||||
|
//
|
||||||
|
// We scan the cache for entries to delete at most once per trimInterval (1 day).
|
||||||
|
//
|
||||||
|
// When we do scan the cache, we delete entries that have not been used for
|
||||||
|
// at least trimLimit (5 days). Statistics gathered from a month of usage by
|
||||||
|
// Go developers found that essentially all reuse of cached entries happened
|
||||||
|
// within 5 days of the previous reuse. See golang.org/issue/22990.
|
||||||
|
const (
|
||||||
|
mtimeInterval = 1 * time.Hour
|
||||||
|
trimInterval = 24 * time.Hour
|
||||||
|
trimLimit = 5 * 24 * time.Hour
|
||||||
|
)
|
||||||
|
|
||||||
|
// used makes a best-effort attempt to update mtime on file,
|
||||||
|
// so that mtime reflects cache access time.
|
||||||
|
//
|
||||||
|
// Because the reflection only needs to be approximate,
|
||||||
|
// and to reduce the amount of disk activity caused by using
|
||||||
|
// cache entries, used only updates the mtime if the current
|
||||||
|
// mtime is more than an hour old. This heuristic eliminates
|
||||||
|
// nearly all of the mtime updates that would otherwise happen,
|
||||||
|
// while still keeping the mtimes useful for cache trimming.
|
||||||
|
func (c *Cache) used(file string) {
|
||||||
|
info, err := os.Stat(file)
|
||||||
|
if err == nil && c.now().Sub(info.ModTime()) < mtimeInterval {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
os.Chtimes(file, c.now(), c.now())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trim removes old cache entries that are likely not to be reused.
|
||||||
|
func (c *Cache) Trim() {
|
||||||
|
now := c.now()
|
||||||
|
|
||||||
|
// We maintain in dir/trim.txt the time of the last completed cache trim.
|
||||||
|
// If the cache has been trimmed recently enough, do nothing.
|
||||||
|
// This is the common case.
|
||||||
|
data, _ := ioutil.ReadFile(filepath.Join(c.dir, "trim.txt"))
|
||||||
|
t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64)
|
||||||
|
if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trim each of the 256 subdirectories.
|
||||||
|
// We subtract an additional mtimeInterval
|
||||||
|
// to account for the imprecision of our "last used" mtimes.
|
||||||
|
cutoff := now.Add(-trimLimit - mtimeInterval)
|
||||||
|
for i := 0; i < 256; i++ {
|
||||||
|
subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i))
|
||||||
|
c.trimSubdir(subdir, cutoff)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ignore errors from here: if we don't write the complete timestamp, the
|
||||||
|
// cache will appear older than it is, and we'll trim it again next time.
|
||||||
|
renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())))
|
||||||
|
}
|
||||||
|
|
||||||
|
// trimSubdir trims a single cache subdirectory.
|
||||||
|
func (c *Cache) trimSubdir(subdir string, cutoff time.Time) {
|
||||||
|
// Read all directory entries from subdir before removing
|
||||||
|
// any files, in case removing files invalidates the file offset
|
||||||
|
// in the directory scan. Also, ignore error from f.Readdirnames,
|
||||||
|
// because we don't care about reporting the error and we still
|
||||||
|
// want to process any entries found before the error.
|
||||||
|
f, err := os.Open(subdir)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
names, _ := f.Readdirnames(-1)
|
||||||
|
f.Close()
|
||||||
|
|
||||||
|
for _, name := range names {
|
||||||
|
// Remove only cache entries (xxxx-a and xxxx-d).
|
||||||
|
if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
entry := filepath.Join(subdir, name)
|
||||||
|
info, err := os.Stat(entry)
|
||||||
|
if err == nil && info.ModTime().Before(cutoff) {
|
||||||
|
os.Remove(entry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// putIndexEntry adds an entry to the cache recording that executing the action
|
||||||
|
// with the given id produces an output with the given output id (hash) and size.
|
||||||
|
func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error {
|
||||||
|
// Note: We expect that for one reason or another it may happen
|
||||||
|
// that repeating an action produces a different output hash
|
||||||
|
// (for example, if the output contains a time stamp or temp dir name).
|
||||||
|
// While not ideal, this is also not a correctness problem, so we
|
||||||
|
// don't make a big deal about it. In particular, we leave the action
|
||||||
|
// cache entries writable specifically so that they can be overwritten.
|
||||||
|
//
|
||||||
|
// Setting GODEBUG=gocacheverify=1 does make a big deal:
|
||||||
|
// in verify mode we are double-checking that the cache entries
|
||||||
|
// are entirely reproducible. As just noted, this may be unrealistic
|
||||||
|
// in some cases but the check is also useful for shaking out real bugs.
|
||||||
|
entry := []byte(fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()))
|
||||||
|
if verify && allowVerify {
|
||||||
|
old, err := c.get(id)
|
||||||
|
if err == nil && (old.OutputID != out || old.Size != size) {
|
||||||
|
// panic to show stack trace, so we can see what code is generating this cache entry.
|
||||||
|
msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size)
|
||||||
|
panic(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
file := c.fileName(id, "a")
|
||||||
|
if err := ioutil.WriteFile(file, entry, 0666); err != nil {
|
||||||
|
// TODO(bcmills): This Remove potentially races with another go command writing to file.
|
||||||
|
// Can we eliminate it?
|
||||||
|
os.Remove(file)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
os.Chtimes(file, c.now(), c.now()) // mainly for tests
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put stores the given output in the cache as the output for the action ID.
|
||||||
|
// It may read file twice. The content of file must not change between the two passes.
|
||||||
|
func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
|
||||||
|
return c.put(id, file, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PutNoVerify is like Put but disables the verify check
|
||||||
|
// when GODEBUG=goverifycache=1 is set.
|
||||||
|
// It is meant for data that is OK to cache but that we expect to vary slightly from run to run,
|
||||||
|
// like test output containing times and the like.
|
||||||
|
func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
|
||||||
|
return c.put(id, file, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) {
|
||||||
|
// Compute output ID.
|
||||||
|
h := sha256.New()
|
||||||
|
if _, err := file.Seek(0, 0); err != nil {
|
||||||
|
return OutputID{}, 0, err
|
||||||
|
}
|
||||||
|
size, err := io.Copy(h, file)
|
||||||
|
if err != nil {
|
||||||
|
return OutputID{}, 0, err
|
||||||
|
}
|
||||||
|
var out OutputID
|
||||||
|
h.Sum(out[:0])
|
||||||
|
|
||||||
|
// Copy to cached output file (if not already present).
|
||||||
|
if err := c.copyFile(file, out, size); err != nil {
|
||||||
|
return out, size, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to cache index.
|
||||||
|
return out, size, c.putIndexEntry(id, out, size, allowVerify)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PutBytes stores the given bytes in the cache as the output for the action ID.
|
||||||
|
func (c *Cache) PutBytes(id ActionID, data []byte) error {
|
||||||
|
_, _, err := c.Put(id, bytes.NewReader(data))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyFile copies file into the cache, expecting it to have the given
|
||||||
|
// output ID and size, if that file is not present already.
|
||||||
|
func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error {
|
||||||
|
name := c.fileName(out, "d")
|
||||||
|
info, err := os.Stat(name)
|
||||||
|
if err == nil && info.Size() == size {
|
||||||
|
// Check hash.
|
||||||
|
if f, err := os.Open(name); err == nil {
|
||||||
|
h := sha256.New()
|
||||||
|
io.Copy(h, f)
|
||||||
|
f.Close()
|
||||||
|
var out2 OutputID
|
||||||
|
h.Sum(out2[:0])
|
||||||
|
if out == out2 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Hash did not match. Fall through and rewrite file.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy file to cache directory.
|
||||||
|
mode := os.O_RDWR | os.O_CREATE
|
||||||
|
if err == nil && info.Size() > size { // shouldn't happen but fix in case
|
||||||
|
mode |= os.O_TRUNC
|
||||||
|
}
|
||||||
|
f, err := os.OpenFile(name, mode, 0666)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
if size == 0 {
|
||||||
|
// File now exists with correct size.
|
||||||
|
// Only one possible zero-length file, so contents are OK too.
|
||||||
|
// Early return here makes sure there's a "last byte" for code below.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// From here on, if any of the I/O writing the file fails,
|
||||||
|
// we make a best-effort attempt to truncate the file f
|
||||||
|
// before returning, to avoid leaving bad bytes in the file.
|
||||||
|
|
||||||
|
// Copy file to f, but also into h to double-check hash.
|
||||||
|
if _, err := file.Seek(0, 0); err != nil {
|
||||||
|
f.Truncate(0)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
h := sha256.New()
|
||||||
|
w := io.MultiWriter(f, h)
|
||||||
|
if _, err := io.CopyN(w, file, size-1); err != nil {
|
||||||
|
f.Truncate(0)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Check last byte before writing it; writing it will make the size match
|
||||||
|
// what other processes expect to find and might cause them to start
|
||||||
|
// using the file.
|
||||||
|
buf := make([]byte, 1)
|
||||||
|
if _, err := file.Read(buf); err != nil {
|
||||||
|
f.Truncate(0)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
h.Write(buf)
|
||||||
|
sum := h.Sum(nil)
|
||||||
|
if !bytes.Equal(sum, out[:]) {
|
||||||
|
f.Truncate(0)
|
||||||
|
return fmt.Errorf("file content changed underfoot")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit cache file entry.
|
||||||
|
if _, err := f.Write(buf); err != nil {
|
||||||
|
f.Truncate(0)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
// Data might not have been written,
|
||||||
|
// but file may look like it is the right size.
|
||||||
|
// To be extra careful, remove cached file.
|
||||||
|
os.Remove(name)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
os.Chtimes(name, c.now(), c.now()) // mainly for tests
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
85
vendor/honnef.co/go/tools/internal/cache/default.go
vendored
Normal file
85
vendor/honnef.co/go/tools/internal/cache/default.go
vendored
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
// Copyright 2017 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package cache
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Default returns the default cache to use.
|
||||||
|
func Default() (*Cache, error) {
|
||||||
|
defaultOnce.Do(initDefaultCache)
|
||||||
|
return defaultCache, defaultDirErr
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
defaultOnce sync.Once
|
||||||
|
defaultCache *Cache
|
||||||
|
)
|
||||||
|
|
||||||
|
// cacheREADME is a message stored in a README in the cache directory.
|
||||||
|
// Because the cache lives outside the normal Go trees, we leave the
|
||||||
|
// README as a courtesy to explain where it came from.
|
||||||
|
const cacheREADME = `This directory holds cached build artifacts from staticcheck.
|
||||||
|
`
|
||||||
|
|
||||||
|
// initDefaultCache does the work of finding the default cache
|
||||||
|
// the first time Default is called.
|
||||||
|
func initDefaultCache() {
|
||||||
|
dir := DefaultDir()
|
||||||
|
if err := os.MkdirAll(dir, 0777); err != nil {
|
||||||
|
log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(filepath.Join(dir, "README")); err != nil {
|
||||||
|
// Best effort.
|
||||||
|
ioutil.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666)
|
||||||
|
}
|
||||||
|
|
||||||
|
c, err := Open(dir)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
|
||||||
|
}
|
||||||
|
defaultCache = c
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
defaultDirOnce sync.Once
|
||||||
|
defaultDir string
|
||||||
|
defaultDirErr error
|
||||||
|
)
|
||||||
|
|
||||||
|
// DefaultDir returns the effective STATICCHECK_CACHE setting.
|
||||||
|
func DefaultDir() string {
|
||||||
|
// Save the result of the first call to DefaultDir for later use in
|
||||||
|
// initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that
|
||||||
|
// subprocesses will inherit it, but that means initDefaultCache can't
|
||||||
|
// otherwise distinguish between an explicit "off" and a UserCacheDir error.
|
||||||
|
|
||||||
|
defaultDirOnce.Do(func() {
|
||||||
|
defaultDir = os.Getenv("STATICCHECK_CACHE")
|
||||||
|
if filepath.IsAbs(defaultDir) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if defaultDir != "" {
|
||||||
|
defaultDirErr = fmt.Errorf("STATICCHECK_CACHE is not an absolute path")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute default location.
|
||||||
|
dir, err := os.UserCacheDir()
|
||||||
|
if err != nil {
|
||||||
|
defaultDirErr = fmt.Errorf("STATICCHECK_CACHE is not defined and %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defaultDir = filepath.Join(dir, "staticcheck")
|
||||||
|
})
|
||||||
|
|
||||||
|
return defaultDir
|
||||||
|
}
|
176
vendor/honnef.co/go/tools/internal/cache/hash.go
vendored
Normal file
176
vendor/honnef.co/go/tools/internal/cache/hash.go
vendored
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
// Copyright 2017 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package cache
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/sha256"
|
||||||
|
"fmt"
|
||||||
|
"hash"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
var debugHash = false // set when GODEBUG=gocachehash=1
|
||||||
|
|
||||||
|
// HashSize is the number of bytes in a hash.
|
||||||
|
const HashSize = 32
|
||||||
|
|
||||||
|
// A Hash provides access to the canonical hash function used to index the cache.
|
||||||
|
// The current implementation uses salted SHA256, but clients must not assume this.
|
||||||
|
type Hash struct {
|
||||||
|
h hash.Hash
|
||||||
|
name string // for debugging
|
||||||
|
buf *bytes.Buffer // for verify
|
||||||
|
}
|
||||||
|
|
||||||
|
// hashSalt is a salt string added to the beginning of every hash
|
||||||
|
// created by NewHash. Using the Staticcheck version makes sure that different
|
||||||
|
// versions of the command do not address the same cache
|
||||||
|
// entries, so that a bug in one version does not affect the execution
|
||||||
|
// of other versions. This salt will result in additional ActionID files
|
||||||
|
// in the cache, but not additional copies of the large output files,
|
||||||
|
// which are still addressed by unsalted SHA256.
|
||||||
|
var hashSalt []byte
|
||||||
|
|
||||||
|
func SetSalt(b []byte) {
|
||||||
|
hashSalt = b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subkey returns an action ID corresponding to mixing a parent
|
||||||
|
// action ID with a string description of the subkey.
|
||||||
|
func Subkey(parent ActionID, desc string) ActionID {
|
||||||
|
h := sha256.New()
|
||||||
|
h.Write([]byte("subkey:"))
|
||||||
|
h.Write(parent[:])
|
||||||
|
h.Write([]byte(desc))
|
||||||
|
var out ActionID
|
||||||
|
h.Sum(out[:0])
|
||||||
|
if debugHash {
|
||||||
|
fmt.Fprintf(os.Stderr, "HASH subkey %x %q = %x\n", parent, desc, out)
|
||||||
|
}
|
||||||
|
if verify {
|
||||||
|
hashDebug.Lock()
|
||||||
|
hashDebug.m[out] = fmt.Sprintf("subkey %x %q", parent, desc)
|
||||||
|
hashDebug.Unlock()
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHash returns a new Hash.
|
||||||
|
// The caller is expected to Write data to it and then call Sum.
|
||||||
|
func NewHash(name string) *Hash {
|
||||||
|
h := &Hash{h: sha256.New(), name: name}
|
||||||
|
if debugHash {
|
||||||
|
fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name)
|
||||||
|
}
|
||||||
|
h.Write(hashSalt)
|
||||||
|
if verify {
|
||||||
|
h.buf = new(bytes.Buffer)
|
||||||
|
}
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write writes data to the running hash.
|
||||||
|
func (h *Hash) Write(b []byte) (int, error) {
|
||||||
|
if debugHash {
|
||||||
|
fmt.Fprintf(os.Stderr, "HASH[%s]: %q\n", h.name, b)
|
||||||
|
}
|
||||||
|
if h.buf != nil {
|
||||||
|
h.buf.Write(b)
|
||||||
|
}
|
||||||
|
return h.h.Write(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sum returns the hash of the data written previously.
|
||||||
|
func (h *Hash) Sum() [HashSize]byte {
|
||||||
|
var out [HashSize]byte
|
||||||
|
h.h.Sum(out[:0])
|
||||||
|
if debugHash {
|
||||||
|
fmt.Fprintf(os.Stderr, "HASH[%s]: %x\n", h.name, out)
|
||||||
|
}
|
||||||
|
if h.buf != nil {
|
||||||
|
hashDebug.Lock()
|
||||||
|
if hashDebug.m == nil {
|
||||||
|
hashDebug.m = make(map[[HashSize]byte]string)
|
||||||
|
}
|
||||||
|
hashDebug.m[out] = h.buf.String()
|
||||||
|
hashDebug.Unlock()
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// In GODEBUG=gocacheverify=1 mode,
|
||||||
|
// hashDebug holds the input to every computed hash ID,
|
||||||
|
// so that we can work backward from the ID involved in a
|
||||||
|
// cache entry mismatch to a description of what should be there.
|
||||||
|
var hashDebug struct {
|
||||||
|
sync.Mutex
|
||||||
|
m map[[HashSize]byte]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// reverseHash returns the input used to compute the hash id.
|
||||||
|
func reverseHash(id [HashSize]byte) string {
|
||||||
|
hashDebug.Lock()
|
||||||
|
s := hashDebug.m[id]
|
||||||
|
hashDebug.Unlock()
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
var hashFileCache struct {
|
||||||
|
sync.Mutex
|
||||||
|
m map[string][HashSize]byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// FileHash returns the hash of the named file.
|
||||||
|
// It caches repeated lookups for a given file,
|
||||||
|
// and the cache entry for a file can be initialized
|
||||||
|
// using SetFileHash.
|
||||||
|
// The hash used by FileHash is not the same as
|
||||||
|
// the hash used by NewHash.
|
||||||
|
func FileHash(file string) ([HashSize]byte, error) {
|
||||||
|
hashFileCache.Lock()
|
||||||
|
out, ok := hashFileCache.m[file]
|
||||||
|
hashFileCache.Unlock()
|
||||||
|
|
||||||
|
if ok {
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
h := sha256.New()
|
||||||
|
f, err := os.Open(file)
|
||||||
|
if err != nil {
|
||||||
|
if debugHash {
|
||||||
|
fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err)
|
||||||
|
}
|
||||||
|
return [HashSize]byte{}, err
|
||||||
|
}
|
||||||
|
_, err = io.Copy(h, f)
|
||||||
|
f.Close()
|
||||||
|
if err != nil {
|
||||||
|
if debugHash {
|
||||||
|
fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err)
|
||||||
|
}
|
||||||
|
return [HashSize]byte{}, err
|
||||||
|
}
|
||||||
|
h.Sum(out[:0])
|
||||||
|
if debugHash {
|
||||||
|
fmt.Fprintf(os.Stderr, "HASH %s: %x\n", file, out)
|
||||||
|
}
|
||||||
|
|
||||||
|
SetFileHash(file, out)
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetFileHash sets the hash returned by FileHash for file.
|
||||||
|
func SetFileHash(file string, sum [HashSize]byte) {
|
||||||
|
hashFileCache.Lock()
|
||||||
|
if hashFileCache.m == nil {
|
||||||
|
hashFileCache.m = make(map[string][HashSize]byte)
|
||||||
|
}
|
||||||
|
hashFileCache.m[file] = sum
|
||||||
|
hashFileCache.Unlock()
|
||||||
|
}
|
116
vendor/honnef.co/go/tools/internal/passes/buildssa/buildssa.go
vendored
Normal file
116
vendor/honnef.co/go/tools/internal/passes/buildssa/buildssa.go
vendored
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package buildssa defines an Analyzer that constructs the SSA
|
||||||
|
// representation of an error-free package and returns the set of all
|
||||||
|
// functions within it. It does not report any diagnostics itself but
|
||||||
|
// may be used as an input to other analyzers.
|
||||||
|
//
|
||||||
|
// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE.
|
||||||
|
package buildssa
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/types"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"honnef.co/go/tools/ssa"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Analyzer = &analysis.Analyzer{
|
||||||
|
Name: "buildssa",
|
||||||
|
Doc: "build SSA-form IR for later passes",
|
||||||
|
Run: run,
|
||||||
|
ResultType: reflect.TypeOf(new(SSA)),
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSA provides SSA-form intermediate representation for all the
|
||||||
|
// non-blank source functions in the current package.
|
||||||
|
type SSA struct {
|
||||||
|
Pkg *ssa.Package
|
||||||
|
SrcFuncs []*ssa.Function
|
||||||
|
}
|
||||||
|
|
||||||
|
func run(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
// Plundered from ssautil.BuildPackage.
|
||||||
|
|
||||||
|
// We must create a new Program for each Package because the
|
||||||
|
// analysis API provides no place to hang a Program shared by
|
||||||
|
// all Packages. Consequently, SSA Packages and Functions do not
|
||||||
|
// have a canonical representation across an analysis session of
|
||||||
|
// multiple packages. This is unlikely to be a problem in
|
||||||
|
// practice because the analysis API essentially forces all
|
||||||
|
// packages to be analysed independently, so any given call to
|
||||||
|
// Analysis.Run on a package will see only SSA objects belonging
|
||||||
|
// to a single Program.
|
||||||
|
|
||||||
|
mode := ssa.GlobalDebug
|
||||||
|
|
||||||
|
prog := ssa.NewProgram(pass.Fset, mode)
|
||||||
|
|
||||||
|
// Create SSA packages for all imports.
|
||||||
|
// Order is not significant.
|
||||||
|
created := make(map[*types.Package]bool)
|
||||||
|
var createAll func(pkgs []*types.Package)
|
||||||
|
createAll = func(pkgs []*types.Package) {
|
||||||
|
for _, p := range pkgs {
|
||||||
|
if !created[p] {
|
||||||
|
created[p] = true
|
||||||
|
prog.CreatePackage(p, nil, nil, true)
|
||||||
|
createAll(p.Imports())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
createAll(pass.Pkg.Imports())
|
||||||
|
|
||||||
|
// Create and build the primary package.
|
||||||
|
ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false)
|
||||||
|
ssapkg.Build()
|
||||||
|
|
||||||
|
// Compute list of source functions, including literals,
|
||||||
|
// in source order.
|
||||||
|
var funcs []*ssa.Function
|
||||||
|
var addAnons func(f *ssa.Function)
|
||||||
|
addAnons = func(f *ssa.Function) {
|
||||||
|
funcs = append(funcs, f)
|
||||||
|
for _, anon := range f.AnonFuncs {
|
||||||
|
addAnons(anon)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addAnons(ssapkg.Members["init"].(*ssa.Function))
|
||||||
|
for _, f := range pass.Files {
|
||||||
|
for _, decl := range f.Decls {
|
||||||
|
if fdecl, ok := decl.(*ast.FuncDecl); ok {
|
||||||
|
|
||||||
|
// SSA will not build a Function
|
||||||
|
// for a FuncDecl named blank.
|
||||||
|
// That's arguably too strict but
|
||||||
|
// relaxing it would break uniqueness of
|
||||||
|
// names of package members.
|
||||||
|
if fdecl.Name.Name == "_" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// (init functions have distinct Func
|
||||||
|
// objects named "init" and distinct
|
||||||
|
// ssa.Functions named "init#1", ...)
|
||||||
|
|
||||||
|
fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func)
|
||||||
|
if fn == nil {
|
||||||
|
panic(fn)
|
||||||
|
}
|
||||||
|
|
||||||
|
f := ssapkg.Prog.FuncValue(fn)
|
||||||
|
if f == nil {
|
||||||
|
panic(fn)
|
||||||
|
}
|
||||||
|
|
||||||
|
addAnons(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil
|
||||||
|
}
|
83
vendor/honnef.co/go/tools/internal/renameio/renameio.go
vendored
Normal file
83
vendor/honnef.co/go/tools/internal/renameio/renameio.go
vendored
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package renameio writes files atomically by renaming temporary files.
|
||||||
|
package renameio
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const patternSuffix = "*.tmp"
|
||||||
|
|
||||||
|
// Pattern returns a glob pattern that matches the unrenamed temporary files
|
||||||
|
// created when writing to filename.
|
||||||
|
func Pattern(filename string) string {
|
||||||
|
return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteFile is like ioutil.WriteFile, but first writes data to an arbitrary
|
||||||
|
// file in the same directory as filename, then renames it atomically to the
|
||||||
|
// final name.
|
||||||
|
//
|
||||||
|
// That ensures that the final location, if it exists, is always a complete file.
|
||||||
|
func WriteFile(filename string, data []byte) (err error) {
|
||||||
|
return WriteToFile(filename, bytes.NewReader(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader
|
||||||
|
// instead of a slice.
|
||||||
|
func WriteToFile(filename string, data io.Reader) (err error) {
|
||||||
|
f, err := ioutil.TempFile(filepath.Dir(filename), filepath.Base(filename)+patternSuffix)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
// Only call os.Remove on f.Name() if we failed to rename it: otherwise,
|
||||||
|
// some other process may have created a new file with the same name after
|
||||||
|
// that.
|
||||||
|
if err != nil {
|
||||||
|
f.Close()
|
||||||
|
os.Remove(f.Name())
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
if _, err := io.Copy(f, data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Sync the file before renaming it: otherwise, after a crash the reader may
|
||||||
|
// observe a 0-length file instead of the actual contents.
|
||||||
|
// See https://golang.org/issue/22397#issuecomment-380831736.
|
||||||
|
if err := f.Sync(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var start time.Time
|
||||||
|
for {
|
||||||
|
err := os.Rename(f.Name(), filename)
|
||||||
|
if err == nil || runtime.GOOS != "windows" || !strings.HasSuffix(err.Error(), "Access is denied.") {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Windows seems to occasionally trigger spurious "Access is denied" errors
|
||||||
|
// here (see golang.org/issue/31247). We're not sure why. It's probably
|
||||||
|
// worth a little extra latency to avoid propagating the spurious errors.
|
||||||
|
if start.IsZero() {
|
||||||
|
start = time.Now()
|
||||||
|
} else if time.Since(start) >= 500*time.Millisecond {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
time.Sleep(5 * time.Millisecond)
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,13 +4,14 @@ import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/types"
|
"go/types"
|
||||||
|
|
||||||
"honnef.co/go/tools/lint"
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"honnef.co/go/tools/internal/passes/buildssa"
|
||||||
. "honnef.co/go/tools/lint/lintdsl"
|
. "honnef.co/go/tools/lint/lintdsl"
|
||||||
"honnef.co/go/tools/ssa"
|
"honnef.co/go/tools/ssa"
|
||||||
)
|
)
|
||||||
|
|
||||||
func CheckRangeStringRunes(j *lint.Job) {
|
func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
|
||||||
for _, ssafn := range j.Pkg.InitialFunctions {
|
for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
fn := func(node ast.Node) bool {
|
fn := func(node ast.Node) bool {
|
||||||
rng, ok := node.(*ast.RangeStmt)
|
rng, ok := node.(*ast.RangeStmt)
|
||||||
if !ok || !IsBlank(rng.Key) {
|
if !ok || !IsBlank(rng.Key) {
|
||||||
|
@ -59,10 +60,11 @@ func CheckRangeStringRunes(j *lint.Job) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
j.Errorf(rng, "should range over string, not []rune(string)")
|
pass.Reportf(rng.Pos(), "should range over string, not []rune(string)")
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
Inspect(ssafn.Syntax(), fn)
|
Inspect(ssafn.Syntax(), fn)
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
44
vendor/honnef.co/go/tools/lint/generated.go
vendored
44
vendor/honnef.co/go/tools/lint/generated.go
vendored
|
@ -1,44 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// used by cgo before Go 1.11
|
|
||||||
oldCgo = []byte("// Created by cgo - DO NOT EDIT")
|
|
||||||
prefix = []byte("// Code generated ")
|
|
||||||
suffix = []byte(" DO NOT EDIT.")
|
|
||||||
nl = []byte("\n")
|
|
||||||
crnl = []byte("\r\n")
|
|
||||||
)
|
|
||||||
|
|
||||||
func isGenerated(path string) bool {
|
|
||||||
f, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
br := bufio.NewReader(f)
|
|
||||||
for {
|
|
||||||
s, err := br.ReadBytes('\n')
|
|
||||||
if err != nil && err != io.EOF {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
s = bytes.TrimSuffix(s, crnl)
|
|
||||||
s = bytes.TrimSuffix(s, nl)
|
|
||||||
if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if bytes.Equal(s, oldCgo) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
712
vendor/honnef.co/go/tools/lint/lint.go
vendored
712
vendor/honnef.co/go/tools/lint/lint.go
vendored
|
@ -4,34 +4,53 @@ package lint // import "honnef.co/go/tools/lint"
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/scanner"
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"sync/atomic"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
"golang.org/x/tools/go/ast/inspector"
|
"golang.org/x/tools/go/analysis"
|
||||||
"golang.org/x/tools/go/packages"
|
"golang.org/x/tools/go/packages"
|
||||||
"honnef.co/go/tools/config"
|
"honnef.co/go/tools/config"
|
||||||
"honnef.co/go/tools/ssa"
|
|
||||||
"honnef.co/go/tools/ssa/ssautil"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Job struct {
|
type Documentation struct {
|
||||||
Pkg *Pkg
|
Title string
|
||||||
GoVersion int
|
Text string
|
||||||
|
Since string
|
||||||
|
NonDefault bool
|
||||||
|
Options []string
|
||||||
|
}
|
||||||
|
|
||||||
check Check
|
func (doc *Documentation) String() string {
|
||||||
problems []Problem
|
b := &strings.Builder{}
|
||||||
|
fmt.Fprintf(b, "%s\n\n", doc.Title)
|
||||||
duration time.Duration
|
if doc.Text != "" {
|
||||||
|
fmt.Fprintf(b, "%s\n\n", doc.Text)
|
||||||
|
}
|
||||||
|
fmt.Fprint(b, "Available since\n ")
|
||||||
|
if doc.Since == "" {
|
||||||
|
fmt.Fprint(b, "unreleased")
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(b, "%s", doc.Since)
|
||||||
|
}
|
||||||
|
if doc.NonDefault {
|
||||||
|
fmt.Fprint(b, ", non-default")
|
||||||
|
}
|
||||||
|
fmt.Fprint(b, "\n")
|
||||||
|
if len(doc.Options) > 0 {
|
||||||
|
fmt.Fprintf(b, "\nOptions\n")
|
||||||
|
for _, opt := range doc.Options {
|
||||||
|
fmt.Fprintf(b, " %s", opt)
|
||||||
|
}
|
||||||
|
fmt.Fprint(b, "\n")
|
||||||
|
}
|
||||||
|
return b.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
type Ignore interface {
|
type Ignore interface {
|
||||||
|
@ -42,17 +61,18 @@ type LineIgnore struct {
|
||||||
File string
|
File string
|
||||||
Line int
|
Line int
|
||||||
Checks []string
|
Checks []string
|
||||||
matched bool
|
Matched bool
|
||||||
pos token.Pos
|
Pos token.Pos
|
||||||
}
|
}
|
||||||
|
|
||||||
func (li *LineIgnore) Match(p Problem) bool {
|
func (li *LineIgnore) Match(p Problem) bool {
|
||||||
if p.Position.Filename != li.File || p.Position.Line != li.Line {
|
pos := p.Pos
|
||||||
|
if pos.Filename != li.File || pos.Line != li.Line {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for _, c := range li.Checks {
|
for _, c := range li.Checks {
|
||||||
if m, _ := filepath.Match(c, p.Check); m {
|
if m, _ := filepath.Match(c, p.Check); m {
|
||||||
li.matched = true
|
li.Matched = true
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -61,7 +81,7 @@ func (li *LineIgnore) Match(p Problem) bool {
|
||||||
|
|
||||||
func (li *LineIgnore) String() string {
|
func (li *LineIgnore) String() string {
|
||||||
matched := "not matched"
|
matched := "not matched"
|
||||||
if li.matched {
|
if li.Matched {
|
||||||
matched = "matched"
|
matched = "matched"
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched)
|
return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched)
|
||||||
|
@ -73,7 +93,7 @@ type FileIgnore struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fi *FileIgnore) Match(p Problem) bool {
|
func (fi *FileIgnore) Match(p Problem) bool {
|
||||||
if p.Position.Filename != fi.File {
|
if p.Pos.Filename != fi.File {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for _, c := range fi.Checks {
|
for _, c := range fi.Checks {
|
||||||
|
@ -84,43 +104,6 @@ func (fi *FileIgnore) Match(p Problem) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
type GlobIgnore struct {
|
|
||||||
Pattern string
|
|
||||||
Checks []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (gi *GlobIgnore) Match(p Problem) bool {
|
|
||||||
if gi.Pattern != "*" {
|
|
||||||
pkgpath := p.Package.Types.Path()
|
|
||||||
if strings.HasSuffix(pkgpath, "_test") {
|
|
||||||
pkgpath = pkgpath[:len(pkgpath)-len("_test")]
|
|
||||||
}
|
|
||||||
name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename))
|
|
||||||
if m, _ := filepath.Match(gi.Pattern, name); !m {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, c := range gi.Checks {
|
|
||||||
if m, _ := filepath.Match(c, p.Check); m {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
type Program struct {
|
|
||||||
SSA *ssa.Program
|
|
||||||
InitialPackages []*Pkg
|
|
||||||
AllPackages []*packages.Package
|
|
||||||
AllFunctions []*ssa.Function
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prog *Program) Fset() *token.FileSet {
|
|
||||||
return prog.InitialPackages[0].Fset
|
|
||||||
}
|
|
||||||
|
|
||||||
type Func func(*Job)
|
|
||||||
|
|
||||||
type Severity uint8
|
type Severity uint8
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -131,367 +114,245 @@ const (
|
||||||
|
|
||||||
// Problem represents a problem in some source code.
|
// Problem represents a problem in some source code.
|
||||||
type Problem struct {
|
type Problem struct {
|
||||||
Position token.Position // position in source file
|
Pos token.Position
|
||||||
Text string // the prose that describes the problem
|
End token.Position
|
||||||
|
Message string
|
||||||
Check string
|
Check string
|
||||||
Package *Pkg
|
|
||||||
Severity Severity
|
Severity Severity
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Problem) String() string {
|
func (p *Problem) String() string {
|
||||||
if p.Check == "" {
|
return fmt.Sprintf("%s (%s)", p.Message, p.Check)
|
||||||
return p.Text
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%s (%s)", p.Text, p.Check)
|
|
||||||
}
|
|
||||||
|
|
||||||
type Checker interface {
|
|
||||||
Name() string
|
|
||||||
Prefix() string
|
|
||||||
Init(*Program)
|
|
||||||
Checks() []Check
|
|
||||||
}
|
|
||||||
|
|
||||||
type Check struct {
|
|
||||||
Fn Func
|
|
||||||
ID string
|
|
||||||
FilterGenerated bool
|
|
||||||
Doc string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// A Linter lints Go source code.
|
// A Linter lints Go source code.
|
||||||
type Linter struct {
|
type Linter struct {
|
||||||
Checkers []Checker
|
Checkers []*analysis.Analyzer
|
||||||
Ignores []Ignore
|
CumulativeCheckers []CumulativeChecker
|
||||||
GoVersion int
|
GoVersion int
|
||||||
ReturnIgnored bool
|
Config config.Config
|
||||||
Config config.Config
|
Stats Stats
|
||||||
|
|
||||||
MaxConcurrentJobs int
|
|
||||||
PrintStats bool
|
|
||||||
|
|
||||||
automaticIgnores []Ignore
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *Linter) ignore(p Problem) bool {
|
type CumulativeChecker interface {
|
||||||
ignored := false
|
Analyzer() *analysis.Analyzer
|
||||||
for _, ig := range l.automaticIgnores {
|
Result() []types.Object
|
||||||
// We cannot short-circuit these, as we want to record, for
|
ProblemObject(*token.FileSet, types.Object) Problem
|
||||||
// each ignore, whether it matched or not.
|
}
|
||||||
if ig.Match(p) {
|
|
||||||
ignored = true
|
func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error) {
|
||||||
}
|
var allAnalyzers []*analysis.Analyzer
|
||||||
|
allAnalyzers = append(allAnalyzers, l.Checkers...)
|
||||||
|
for _, cum := range l.CumulativeCheckers {
|
||||||
|
allAnalyzers = append(allAnalyzers, cum.Analyzer())
|
||||||
}
|
}
|
||||||
if ignored {
|
|
||||||
// no need to execute other ignores if we've already had a
|
// The -checks command line flag overrules all configuration
|
||||||
// match.
|
// files, which means that for `-checks="foo"`, no check other
|
||||||
return true
|
// than foo can ever be reported to the user. Make use of this
|
||||||
}
|
// fact to cull the list of analyses we need to run.
|
||||||
for _, ig := range l.Ignores {
|
|
||||||
// We can short-circuit here, as we aren't tracking any
|
// replace "inherit" with "all", as we don't want to base the
|
||||||
// information.
|
// list of all checks on the default configuration, which
|
||||||
if ig.Match(p) {
|
// disables certain checks.
|
||||||
return true
|
checks := make([]string, len(l.Config.Checks))
|
||||||
|
copy(checks, l.Config.Checks)
|
||||||
|
for i, c := range checks {
|
||||||
|
if c == "inherit" {
|
||||||
|
checks[i] = "all"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
allowed := FilterChecks(allAnalyzers, checks)
|
||||||
}
|
var allowedAnalyzers []*analysis.Analyzer
|
||||||
|
for _, c := range l.Checkers {
|
||||||
func (j *Job) File(node Positioner) *ast.File {
|
if allowed[c.Name] {
|
||||||
return j.Pkg.tokenFileMap[j.Pkg.Fset.File(node.Pos())]
|
allowedAnalyzers = append(allowedAnalyzers, c)
|
||||||
}
|
|
||||||
|
|
||||||
func parseDirective(s string) (cmd string, args []string) {
|
|
||||||
if !strings.HasPrefix(s, "//lint:") {
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
s = strings.TrimPrefix(s, "//lint:")
|
|
||||||
fields := strings.Split(s, " ")
|
|
||||||
return fields[0], fields[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
type PerfStats struct {
|
|
||||||
PackageLoading time.Duration
|
|
||||||
SSABuild time.Duration
|
|
||||||
OtherInitWork time.Duration
|
|
||||||
CheckerInits map[string]time.Duration
|
|
||||||
Jobs []JobStat
|
|
||||||
}
|
|
||||||
|
|
||||||
type JobStat struct {
|
|
||||||
Job string
|
|
||||||
Duration time.Duration
|
|
||||||
}
|
|
||||||
|
|
||||||
func (stats *PerfStats) Print(w io.Writer) {
|
|
||||||
fmt.Fprintln(w, "Package loading:", stats.PackageLoading)
|
|
||||||
fmt.Fprintln(w, "SSA build:", stats.SSABuild)
|
|
||||||
fmt.Fprintln(w, "Other init work:", stats.OtherInitWork)
|
|
||||||
|
|
||||||
fmt.Fprintln(w, "Checker inits:")
|
|
||||||
for checker, d := range stats.CheckerInits {
|
|
||||||
fmt.Fprintf(w, "\t%s: %s\n", checker, d)
|
|
||||||
}
|
|
||||||
fmt.Fprintln(w)
|
|
||||||
|
|
||||||
fmt.Fprintln(w, "Jobs:")
|
|
||||||
sort.Slice(stats.Jobs, func(i, j int) bool {
|
|
||||||
return stats.Jobs[i].Duration < stats.Jobs[j].Duration
|
|
||||||
})
|
|
||||||
var total time.Duration
|
|
||||||
for _, job := range stats.Jobs {
|
|
||||||
fmt.Fprintf(w, "\t%s: %s\n", job.Job, job.Duration)
|
|
||||||
total += job.Duration
|
|
||||||
}
|
|
||||||
fmt.Fprintf(w, "\tTotal: %s\n", total)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem {
|
|
||||||
allPkgs := allPackages(initial)
|
|
||||||
t := time.Now()
|
|
||||||
ssaprog, _ := ssautil.Packages(allPkgs, ssa.GlobalDebug)
|
|
||||||
ssaprog.Build()
|
|
||||||
if stats != nil {
|
|
||||||
stats.SSABuild = time.Since(t)
|
|
||||||
}
|
|
||||||
runtime.GC()
|
|
||||||
|
|
||||||
t = time.Now()
|
|
||||||
pkgMap := map[*ssa.Package]*Pkg{}
|
|
||||||
var pkgs []*Pkg
|
|
||||||
for _, pkg := range initial {
|
|
||||||
ssapkg := ssaprog.Package(pkg.Types)
|
|
||||||
var cfg config.Config
|
|
||||||
if len(pkg.GoFiles) != 0 {
|
|
||||||
path := pkg.GoFiles[0]
|
|
||||||
dir := filepath.Dir(path)
|
|
||||||
var err error
|
|
||||||
// OPT(dh): we're rebuilding the entire config tree for
|
|
||||||
// each package. for example, if we check a/b/c and
|
|
||||||
// a/b/c/d, we'll process a, a/b, a/b/c, a, a/b, a/b/c,
|
|
||||||
// a/b/c/d – we should cache configs per package and only
|
|
||||||
// load the new levels.
|
|
||||||
cfg, err = config.Load(dir)
|
|
||||||
if err != nil {
|
|
||||||
// FIXME(dh): we couldn't load the config, what are we
|
|
||||||
// supposed to do? probably tell the user somehow
|
|
||||||
}
|
|
||||||
cfg = cfg.Merge(l.Config)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pkg := &Pkg{
|
|
||||||
SSA: ssapkg,
|
|
||||||
Package: pkg,
|
|
||||||
Config: cfg,
|
|
||||||
Generated: map[string]bool{},
|
|
||||||
tokenFileMap: map[*token.File]*ast.File{},
|
|
||||||
}
|
|
||||||
pkg.Inspector = inspector.New(pkg.Syntax)
|
|
||||||
for _, f := range pkg.Syntax {
|
|
||||||
tf := pkg.Fset.File(f.Pos())
|
|
||||||
pkg.tokenFileMap[tf] = f
|
|
||||||
|
|
||||||
path := DisplayPosition(pkg.Fset, f.Pos()).Filename
|
|
||||||
pkg.Generated[path] = isGenerated(path)
|
|
||||||
}
|
|
||||||
pkgMap[ssapkg] = pkg
|
|
||||||
pkgs = append(pkgs, pkg)
|
|
||||||
}
|
}
|
||||||
|
hasCumulative := false
|
||||||
prog := &Program{
|
for _, cum := range l.CumulativeCheckers {
|
||||||
SSA: ssaprog,
|
a := cum.Analyzer()
|
||||||
InitialPackages: pkgs,
|
if allowed[a.Name] {
|
||||||
AllPackages: allPkgs,
|
hasCumulative = true
|
||||||
}
|
allowedAnalyzers = append(allowedAnalyzers, a)
|
||||||
|
|
||||||
for fn := range ssautil.AllFunctions(ssaprog) {
|
|
||||||
prog.AllFunctions = append(prog.AllFunctions, fn)
|
|
||||||
if fn.Pkg == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if pkg, ok := pkgMap[fn.Pkg]; ok {
|
|
||||||
pkg.InitialFunctions = append(pkg.InitialFunctions, fn)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var out []Problem
|
r, err := NewRunner(&l.Stats)
|
||||||
l.automaticIgnores = nil
|
if err != nil {
|
||||||
for _, pkg := range initial {
|
return nil, err
|
||||||
for _, f := range pkg.Syntax {
|
}
|
||||||
found := false
|
r.goVersion = l.GoVersion
|
||||||
commentLoop:
|
|
||||||
for _, cg := range f.Comments {
|
pkgs, err := r.Run(cfg, patterns, allowedAnalyzers, hasCumulative)
|
||||||
for _, c := range cg.List {
|
if err != nil {
|
||||||
if strings.Contains(c.Text, "//lint:") {
|
return nil, err
|
||||||
found = true
|
}
|
||||||
break commentLoop
|
|
||||||
|
tpkgToPkg := map[*types.Package]*Package{}
|
||||||
|
for _, pkg := range pkgs {
|
||||||
|
tpkgToPkg[pkg.Types] = pkg
|
||||||
|
|
||||||
|
for _, e := range pkg.errs {
|
||||||
|
switch e := e.(type) {
|
||||||
|
case types.Error:
|
||||||
|
p := Problem{
|
||||||
|
Pos: e.Fset.PositionFor(e.Pos, false),
|
||||||
|
Message: e.Msg,
|
||||||
|
Severity: Error,
|
||||||
|
Check: "compile",
|
||||||
|
}
|
||||||
|
pkg.problems = append(pkg.problems, p)
|
||||||
|
case packages.Error:
|
||||||
|
msg := e.Msg
|
||||||
|
if len(msg) != 0 && msg[0] == '\n' {
|
||||||
|
// TODO(dh): See https://github.com/golang/go/issues/32363
|
||||||
|
msg = msg[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
var pos token.Position
|
||||||
|
if e.Pos == "" {
|
||||||
|
// Under certain conditions (malformed package
|
||||||
|
// declarations, multiple packages in the same
|
||||||
|
// directory), go list emits an error on stderr
|
||||||
|
// instead of JSON. Those errors do not have
|
||||||
|
// associated position information in
|
||||||
|
// go/packages.Error, even though the output on
|
||||||
|
// stderr may contain it.
|
||||||
|
if p, n, err := parsePos(msg); err == nil {
|
||||||
|
if abs, err := filepath.Abs(p.Filename); err == nil {
|
||||||
|
p.Filename = abs
|
||||||
|
}
|
||||||
|
pos = p
|
||||||
|
msg = msg[n+2:]
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var err error
|
||||||
|
pos, _, err = parsePos(e.Pos)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("internal error: %s", e))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
p := Problem{
|
||||||
if !found {
|
Pos: pos,
|
||||||
continue
|
Message: msg,
|
||||||
}
|
Severity: Error,
|
||||||
cm := ast.NewCommentMap(pkg.Fset, f, f.Comments)
|
Check: "compile",
|
||||||
for node, cgs := range cm {
|
}
|
||||||
for _, cg := range cgs {
|
pkg.problems = append(pkg.problems, p)
|
||||||
for _, c := range cg.List {
|
case scanner.ErrorList:
|
||||||
if !strings.HasPrefix(c.Text, "//lint:") {
|
for _, e := range e {
|
||||||
continue
|
p := Problem{
|
||||||
}
|
Pos: e.Pos,
|
||||||
cmd, args := parseDirective(c.Text)
|
Message: e.Msg,
|
||||||
switch cmd {
|
Severity: Error,
|
||||||
case "ignore", "file-ignore":
|
Check: "compile",
|
||||||
if len(args) < 2 {
|
|
||||||
// FIXME(dh): this causes duplicated warnings when using megacheck
|
|
||||||
p := Problem{
|
|
||||||
Position: DisplayPosition(prog.Fset(), c.Pos()),
|
|
||||||
Text: "malformed linter directive; missing the required reason field?",
|
|
||||||
Check: "",
|
|
||||||
Package: nil,
|
|
||||||
}
|
|
||||||
out = append(out, p)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
// unknown directive, ignore
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
checks := strings.Split(args[0], ",")
|
|
||||||
pos := DisplayPosition(prog.Fset(), node.Pos())
|
|
||||||
var ig Ignore
|
|
||||||
switch cmd {
|
|
||||||
case "ignore":
|
|
||||||
ig = &LineIgnore{
|
|
||||||
File: pos.Filename,
|
|
||||||
Line: pos.Line,
|
|
||||||
Checks: checks,
|
|
||||||
pos: c.Pos(),
|
|
||||||
}
|
|
||||||
case "file-ignore":
|
|
||||||
ig = &FileIgnore{
|
|
||||||
File: pos.Filename,
|
|
||||||
Checks: checks,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
l.automaticIgnores = append(l.automaticIgnores, ig)
|
|
||||||
}
|
}
|
||||||
|
pkg.problems = append(pkg.problems, p)
|
||||||
}
|
}
|
||||||
}
|
case error:
|
||||||
}
|
p := Problem{
|
||||||
}
|
Pos: token.Position{},
|
||||||
|
Message: e.Error(),
|
||||||
if stats != nil {
|
Severity: Error,
|
||||||
stats.OtherInitWork = time.Since(t)
|
Check: "compile",
|
||||||
}
|
|
||||||
|
|
||||||
for _, checker := range l.Checkers {
|
|
||||||
t := time.Now()
|
|
||||||
checker.Init(prog)
|
|
||||||
if stats != nil {
|
|
||||||
stats.CheckerInits[checker.Name()] = time.Since(t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var jobs []*Job
|
|
||||||
var allChecks []string
|
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
for _, checker := range l.Checkers {
|
|
||||||
for _, check := range checker.Checks() {
|
|
||||||
allChecks = append(allChecks, check.ID)
|
|
||||||
if check.Fn == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for _, pkg := range pkgs {
|
|
||||||
j := &Job{
|
|
||||||
Pkg: pkg,
|
|
||||||
check: check,
|
|
||||||
GoVersion: l.GoVersion,
|
|
||||||
}
|
}
|
||||||
jobs = append(jobs, j)
|
pkg.problems = append(pkg.problems, p)
|
||||||
wg.Add(1)
|
|
||||||
go func(check Check, j *Job) {
|
|
||||||
t := time.Now()
|
|
||||||
check.Fn(j)
|
|
||||||
j.duration = time.Since(t)
|
|
||||||
wg.Done()
|
|
||||||
}(check, j)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
wg.Wait()
|
atomic.StoreUint32(&r.stats.State, StateCumulative)
|
||||||
|
var problems []Problem
|
||||||
for _, j := range jobs {
|
for _, cum := range l.CumulativeCheckers {
|
||||||
if stats != nil {
|
for _, res := range cum.Result() {
|
||||||
stats.Jobs = append(stats.Jobs, JobStat{j.check.ID, j.duration})
|
pkg := tpkgToPkg[res.Pkg()]
|
||||||
}
|
allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks)
|
||||||
for _, p := range j.problems {
|
if allowedChecks[cum.Analyzer().Name] {
|
||||||
if p.Package == nil {
|
pos := DisplayPosition(pkg.Fset, res.Pos())
|
||||||
panic(fmt.Sprintf("internal error: problem at position %s has nil package", p.Position))
|
// FIXME(dh): why are we ignoring generated files
|
||||||
}
|
// here? Surely this is specific to 'unused', not all
|
||||||
allowedChecks := FilterChecks(allChecks, p.Package.Config.Checks)
|
// cumulative checkers
|
||||||
|
if _, ok := pkg.gen[pos.Filename]; ok {
|
||||||
if l.ignore(p) {
|
|
||||||
p.Severity = Ignored
|
|
||||||
}
|
|
||||||
// TODO(dh): support globs in check white/blacklist
|
|
||||||
// OPT(dh): this approach doesn't actually disable checks,
|
|
||||||
// it just discards their results. For the moment, that's
|
|
||||||
// fine. None of our checks are super expensive. In the
|
|
||||||
// future, we may want to provide opt-in expensive
|
|
||||||
// analysis, which shouldn't run at all. It may be easiest
|
|
||||||
// to implement this in the individual checks.
|
|
||||||
if (l.ReturnIgnored || p.Severity != Ignored) && allowedChecks[p.Check] {
|
|
||||||
out = append(out, p)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, ig := range l.automaticIgnores {
|
|
||||||
ig, ok := ig.(*LineIgnore)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if ig.matched {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
couldveMatched := false
|
|
||||||
for _, pkg := range pkgs {
|
|
||||||
for _, f := range pkg.tokenFileMap {
|
|
||||||
if prog.Fset().Position(f.Pos()).Filename != ig.File {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
allowedChecks := FilterChecks(allChecks, pkg.Config.Checks)
|
p := cum.ProblemObject(pkg.Fset, res)
|
||||||
for _, c := range ig.Checks {
|
problems = append(problems, p)
|
||||||
if !allowedChecks[c] {
|
}
|
||||||
continue
|
}
|
||||||
}
|
}
|
||||||
couldveMatched = true
|
|
||||||
break
|
for _, pkg := range pkgs {
|
||||||
|
for _, ig := range pkg.ignores {
|
||||||
|
for i := range pkg.problems {
|
||||||
|
p := &pkg.problems[i]
|
||||||
|
if ig.Match(*p) {
|
||||||
|
p.Severity = Ignored
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i := range problems {
|
||||||
|
p := &problems[i]
|
||||||
|
if ig.Match(*p) {
|
||||||
|
p.Severity = Ignored
|
||||||
}
|
}
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !couldveMatched {
|
if pkg.cfg == nil {
|
||||||
// The ignored checks were disabled for the containing package.
|
// The package failed to load, otherwise we would have a
|
||||||
// Don't flag the ignore for not having matched.
|
// valid config. Pass through all errors.
|
||||||
continue
|
problems = append(problems, pkg.problems...)
|
||||||
|
} else {
|
||||||
|
for _, p := range pkg.problems {
|
||||||
|
allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks)
|
||||||
|
allowedChecks["compile"] = true
|
||||||
|
if allowedChecks[p.Check] {
|
||||||
|
problems = append(problems, p)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
p := Problem{
|
|
||||||
Position: DisplayPosition(prog.Fset(), ig.pos),
|
for _, ig := range pkg.ignores {
|
||||||
Text: "this linter directive didn't match anything; should it be removed?",
|
ig, ok := ig.(*LineIgnore)
|
||||||
Check: "",
|
if !ok {
|
||||||
Package: nil,
|
continue
|
||||||
|
}
|
||||||
|
if ig.Matched {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
couldveMatched := false
|
||||||
|
allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks)
|
||||||
|
for _, c := range ig.Checks {
|
||||||
|
if !allowedChecks[c] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
couldveMatched = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if !couldveMatched {
|
||||||
|
// The ignored checks were disabled for the containing package.
|
||||||
|
// Don't flag the ignore for not having matched.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
p := Problem{
|
||||||
|
Pos: DisplayPosition(pkg.Fset, ig.Pos),
|
||||||
|
Message: "this linter directive didn't match anything; should it be removed?",
|
||||||
|
Check: "",
|
||||||
|
}
|
||||||
|
problems = append(problems, p)
|
||||||
}
|
}
|
||||||
out = append(out, p)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Slice(out, func(i int, j int) bool {
|
if len(problems) == 0 {
|
||||||
pi, pj := out[i].Position, out[j].Position
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(problems, func(i, j int) bool {
|
||||||
|
pi := problems[i].Pos
|
||||||
|
pj := problems[j].Pos
|
||||||
|
|
||||||
if pi.Filename != pj.Filename {
|
if pi.Filename != pj.Filename {
|
||||||
return pi.Filename < pj.Filename
|
return pi.Filename < pj.Filename
|
||||||
|
@ -503,32 +364,22 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem {
|
||||||
return pi.Column < pj.Column
|
return pi.Column < pj.Column
|
||||||
}
|
}
|
||||||
|
|
||||||
return out[i].Text < out[j].Text
|
return problems[i].Message < problems[j].Message
|
||||||
})
|
})
|
||||||
|
|
||||||
if l.PrintStats && stats != nil {
|
var out []Problem
|
||||||
stats.Print(os.Stderr)
|
out = append(out, problems[0])
|
||||||
}
|
for i, p := range problems[1:] {
|
||||||
|
// We may encounter duplicate problems because one file
|
||||||
if len(out) < 2 {
|
// can be part of many packages.
|
||||||
return out
|
if problems[i] != p {
|
||||||
}
|
out = append(out, p)
|
||||||
|
|
||||||
uniq := make([]Problem, 0, len(out))
|
|
||||||
uniq = append(uniq, out[0])
|
|
||||||
prev := out[0]
|
|
||||||
for _, p := range out[1:] {
|
|
||||||
if prev.Position == p.Position && prev.Text == p.Text {
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
prev = p
|
|
||||||
uniq = append(uniq, p)
|
|
||||||
}
|
}
|
||||||
|
return out, nil
|
||||||
return uniq
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func FilterChecks(allChecks []string, checks []string) map[string]bool {
|
func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bool {
|
||||||
// OPT(dh): this entire computation could be cached per package
|
// OPT(dh): this entire computation could be cached per package
|
||||||
allowedChecks := map[string]bool{}
|
allowedChecks := map[string]bool{}
|
||||||
|
|
||||||
|
@ -541,7 +392,7 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool {
|
||||||
if check == "*" || check == "all" {
|
if check == "*" || check == "all" {
|
||||||
// Match all
|
// Match all
|
||||||
for _, c := range allChecks {
|
for _, c := range allChecks {
|
||||||
allowedChecks[c] = b
|
allowedChecks[c.Name] = b
|
||||||
}
|
}
|
||||||
} else if strings.HasSuffix(check, "*") {
|
} else if strings.HasSuffix(check, "*") {
|
||||||
// Glob
|
// Glob
|
||||||
|
@ -549,17 +400,17 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool {
|
||||||
isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1
|
isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1
|
||||||
|
|
||||||
for _, c := range allChecks {
|
for _, c := range allChecks {
|
||||||
idx := strings.IndexFunc(c, func(r rune) bool { return unicode.IsNumber(r) })
|
idx := strings.IndexFunc(c.Name, func(r rune) bool { return unicode.IsNumber(r) })
|
||||||
if isCat {
|
if isCat {
|
||||||
// Glob is S*, which should match S1000 but not SA1000
|
// Glob is S*, which should match S1000 but not SA1000
|
||||||
cat := c[:idx]
|
cat := c.Name[:idx]
|
||||||
if prefix == cat {
|
if prefix == cat {
|
||||||
allowedChecks[c] = b
|
allowedChecks[c.Name] = b
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Glob is S1*
|
// Glob is S1*
|
||||||
if strings.HasPrefix(c, prefix) {
|
if strings.HasPrefix(c.Name, prefix) {
|
||||||
allowedChecks[c] = b
|
allowedChecks[c.Name] = b
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -571,28 +422,18 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool {
|
||||||
return allowedChecks
|
return allowedChecks
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pkg represents a package being linted.
|
|
||||||
type Pkg struct {
|
|
||||||
SSA *ssa.Package
|
|
||||||
InitialFunctions []*ssa.Function
|
|
||||||
*packages.Package
|
|
||||||
Config config.Config
|
|
||||||
Inspector *inspector.Inspector
|
|
||||||
// TODO(dh): this map should probably map from *ast.File, not string
|
|
||||||
Generated map[string]bool
|
|
||||||
|
|
||||||
tokenFileMap map[*token.File]*ast.File
|
|
||||||
}
|
|
||||||
|
|
||||||
type Positioner interface {
|
type Positioner interface {
|
||||||
Pos() token.Pos
|
Pos() token.Pos
|
||||||
}
|
}
|
||||||
|
|
||||||
func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position {
|
func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position {
|
||||||
|
if p == token.NoPos {
|
||||||
|
return token.Position{}
|
||||||
|
}
|
||||||
|
|
||||||
// Only use the adjusted position if it points to another Go file.
|
// Only use the adjusted position if it points to another Go file.
|
||||||
// This means we'll point to the original file for cgo files, but
|
// This means we'll point to the original file for cgo files, but
|
||||||
// we won't point to a YACC grammar file.
|
// we won't point to a YACC grammar file.
|
||||||
|
|
||||||
pos := fset.PositionFor(p, false)
|
pos := fset.PositionFor(p, false)
|
||||||
adjPos := fset.PositionFor(p, true)
|
adjPos := fset.PositionFor(p, true)
|
||||||
|
|
||||||
|
@ -602,34 +443,6 @@ func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position {
|
||||||
return pos
|
return pos
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem {
|
|
||||||
pos := DisplayPosition(j.Pkg.Fset, n.Pos())
|
|
||||||
if j.Pkg.Generated[pos.Filename] && j.check.FilterGenerated {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
problem := Problem{
|
|
||||||
Position: pos,
|
|
||||||
Text: fmt.Sprintf(format, args...),
|
|
||||||
Check: j.check.ID,
|
|
||||||
Package: j.Pkg,
|
|
||||||
}
|
|
||||||
j.problems = append(j.problems, problem)
|
|
||||||
return &j.problems[len(j.problems)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
func allPackages(pkgs []*packages.Package) []*packages.Package {
|
|
||||||
var out []*packages.Package
|
|
||||||
packages.Visit(
|
|
||||||
pkgs,
|
|
||||||
func(pkg *packages.Package) bool {
|
|
||||||
out = append(out, pkg)
|
|
||||||
return true
|
|
||||||
},
|
|
||||||
nil,
|
|
||||||
)
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
var bufferPool = &sync.Pool{
|
var bufferPool = &sync.Pool{
|
||||||
New: func() interface{} {
|
New: func() interface{} {
|
||||||
buf := bytes.NewBuffer(nil)
|
buf := bytes.NewBuffer(nil)
|
||||||
|
@ -670,8 +483,7 @@ func writePackage(buf *bytes.Buffer, pkg *types.Package) {
|
||||||
if pkg == nil {
|
if pkg == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var s string
|
s := pkg.Path()
|
||||||
s = pkg.Path()
|
|
||||||
if s != "" {
|
if s != "" {
|
||||||
buf.WriteString(s)
|
buf.WriteString(s)
|
||||||
buf.WriteByte('.')
|
buf.WriteByte('.')
|
||||||
|
|
121
vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go
vendored
121
vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go
vendored
|
@ -4,6 +4,7 @@ package lintdsl
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/constant"
|
"go/constant"
|
||||||
|
@ -12,6 +13,8 @@ import (
|
||||||
"go/types"
|
"go/types"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"honnef.co/go/tools/facts"
|
||||||
"honnef.co/go/tools/lint"
|
"honnef.co/go/tools/lint"
|
||||||
"honnef.co/go/tools/ssa"
|
"honnef.co/go/tools/ssa"
|
||||||
)
|
)
|
||||||
|
@ -71,16 +74,6 @@ func IsPointerLike(T types.Type) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsGenerated(f *ast.File) bool {
|
|
||||||
comments := f.Comments
|
|
||||||
if len(comments) > 0 {
|
|
||||||
comment := comments[0].Text()
|
|
||||||
return strings.Contains(comment, "Code generated by") ||
|
|
||||||
strings.Contains(comment, "DO NOT EDIT")
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsIdent(expr ast.Expr, ident string) bool {
|
func IsIdent(expr ast.Expr, ident string) bool {
|
||||||
id, ok := expr.(*ast.Ident)
|
id, ok := expr.(*ast.Ident)
|
||||||
return ok && id.Name == ident
|
return ok && id.Name == ident
|
||||||
|
@ -103,26 +96,26 @@ func IsZero(expr ast.Expr) bool {
|
||||||
return IsIntLiteral(expr, "0")
|
return IsIntLiteral(expr, "0")
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsOfType(j *lint.Job, expr ast.Expr, name string) bool {
|
func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool {
|
||||||
return IsType(j.Pkg.TypesInfo.TypeOf(expr), name)
|
return IsType(pass.TypesInfo.TypeOf(expr), name)
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsInTest(j *lint.Job, node lint.Positioner) bool {
|
func IsInTest(pass *analysis.Pass, node lint.Positioner) bool {
|
||||||
// FIXME(dh): this doesn't work for global variables with
|
// FIXME(dh): this doesn't work for global variables with
|
||||||
// initializers
|
// initializers
|
||||||
f := j.Pkg.Fset.File(node.Pos())
|
f := pass.Fset.File(node.Pos())
|
||||||
return f != nil && strings.HasSuffix(f.Name(), "_test.go")
|
return f != nil && strings.HasSuffix(f.Name(), "_test.go")
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsInMain(j *lint.Job, node lint.Positioner) bool {
|
func IsInMain(pass *analysis.Pass, node lint.Positioner) bool {
|
||||||
if node, ok := node.(packager); ok {
|
if node, ok := node.(packager); ok {
|
||||||
return node.Package().Pkg.Name() == "main"
|
return node.Package().Pkg.Name() == "main"
|
||||||
}
|
}
|
||||||
return j.Pkg.Types.Name() == "main"
|
return pass.Pkg.Name() == "main"
|
||||||
}
|
}
|
||||||
|
|
||||||
func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string {
|
func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string {
|
||||||
info := j.Pkg.TypesInfo
|
info := pass.TypesInfo
|
||||||
sel := info.Selections[expr]
|
sel := info.Selections[expr]
|
||||||
if sel == nil {
|
if sel == nil {
|
||||||
if x, ok := expr.X.(*ast.Ident); ok {
|
if x, ok := expr.X.(*ast.Ident); ok {
|
||||||
|
@ -138,16 +131,16 @@ func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string {
|
||||||
return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
|
return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsNil(j *lint.Job, expr ast.Expr) bool {
|
func IsNil(pass *analysis.Pass, expr ast.Expr) bool {
|
||||||
return j.Pkg.TypesInfo.Types[expr].IsNil()
|
return pass.TypesInfo.Types[expr].IsNil()
|
||||||
}
|
}
|
||||||
|
|
||||||
func BoolConst(j *lint.Job, expr ast.Expr) bool {
|
func BoolConst(pass *analysis.Pass, expr ast.Expr) bool {
|
||||||
val := j.Pkg.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
|
val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
|
||||||
return constant.BoolVal(val)
|
return constant.BoolVal(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsBoolConst(j *lint.Job, expr ast.Expr) bool {
|
func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool {
|
||||||
// We explicitly don't support typed bools because more often than
|
// We explicitly don't support typed bools because more often than
|
||||||
// not, custom bool types are used as binary enums and the
|
// not, custom bool types are used as binary enums and the
|
||||||
// explicit comparison is desired.
|
// explicit comparison is desired.
|
||||||
|
@ -156,7 +149,7 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool {
|
||||||
if !ok {
|
if !ok {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
obj := j.Pkg.TypesInfo.ObjectOf(ident)
|
obj := pass.TypesInfo.ObjectOf(ident)
|
||||||
c, ok := obj.(*types.Const)
|
c, ok := obj.(*types.Const)
|
||||||
if !ok {
|
if !ok {
|
||||||
return false
|
return false
|
||||||
|
@ -171,8 +164,8 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) {
|
func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) {
|
||||||
tv := j.Pkg.TypesInfo.Types[expr]
|
tv := pass.TypesInfo.Types[expr]
|
||||||
if tv.Value == nil {
|
if tv.Value == nil {
|
||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
@ -182,8 +175,8 @@ func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) {
|
||||||
return constant.Int64Val(tv.Value)
|
return constant.Int64Val(tv.Value)
|
||||||
}
|
}
|
||||||
|
|
||||||
func ExprToString(j *lint.Job, expr ast.Expr) (string, bool) {
|
func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) {
|
||||||
val := j.Pkg.TypesInfo.Types[expr].Value
|
val := pass.TypesInfo.Types[expr].Value
|
||||||
if val == nil {
|
if val == nil {
|
||||||
return "", false
|
return "", false
|
||||||
}
|
}
|
||||||
|
@ -212,20 +205,21 @@ func DereferenceR(T types.Type) types.Type {
|
||||||
return T
|
return T
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsGoVersion(j *lint.Job, minor int) bool {
|
func IsGoVersion(pass *analysis.Pass, minor int) bool {
|
||||||
return j.GoVersion >= minor
|
version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int)
|
||||||
|
return version >= minor
|
||||||
}
|
}
|
||||||
|
|
||||||
func CallNameAST(j *lint.Job, call *ast.CallExpr) string {
|
func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string {
|
||||||
switch fun := call.Fun.(type) {
|
switch fun := call.Fun.(type) {
|
||||||
case *ast.SelectorExpr:
|
case *ast.SelectorExpr:
|
||||||
fn, ok := j.Pkg.TypesInfo.ObjectOf(fun.Sel).(*types.Func)
|
fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func)
|
||||||
if !ok {
|
if !ok {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
return lint.FuncName(fn)
|
return lint.FuncName(fn)
|
||||||
case *ast.Ident:
|
case *ast.Ident:
|
||||||
obj := j.Pkg.TypesInfo.ObjectOf(fun)
|
obj := pass.TypesInfo.ObjectOf(fun)
|
||||||
switch obj := obj.(type) {
|
switch obj := obj.(type) {
|
||||||
case *types.Func:
|
case *types.Func:
|
||||||
return lint.FuncName(obj)
|
return lint.FuncName(obj)
|
||||||
|
@ -239,35 +233,35 @@ func CallNameAST(j *lint.Job, call *ast.CallExpr) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsCallToAST(j *lint.Job, node ast.Node, name string) bool {
|
func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool {
|
||||||
call, ok := node.(*ast.CallExpr)
|
call, ok := node.(*ast.CallExpr)
|
||||||
if !ok {
|
if !ok {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return CallNameAST(j, call) == name
|
return CallNameAST(pass, call) == name
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsCallToAnyAST(j *lint.Job, node ast.Node, names ...string) bool {
|
func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool {
|
||||||
for _, name := range names {
|
for _, name := range names {
|
||||||
if IsCallToAST(j, node, name) {
|
if IsCallToAST(pass, node, name) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func Render(j *lint.Job, x interface{}) string {
|
func Render(pass *analysis.Pass, x interface{}) string {
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
if err := printer.Fprint(&buf, j.Pkg.Fset, x); err != nil {
|
if err := printer.Fprint(&buf, pass.Fset, x); err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func RenderArgs(j *lint.Job, args []ast.Expr) string {
|
func RenderArgs(pass *analysis.Pass, args []ast.Expr) string {
|
||||||
var ss []string
|
var ss []string
|
||||||
for _, arg := range args {
|
for _, arg := range args {
|
||||||
ss = append(ss, Render(j, arg))
|
ss = append(ss, Render(pass, arg))
|
||||||
}
|
}
|
||||||
return strings.Join(ss, ", ")
|
return strings.Join(ss, ", ")
|
||||||
}
|
}
|
||||||
|
@ -359,3 +353,48 @@ func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Fiel
|
||||||
}
|
}
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func File(pass *analysis.Pass, node lint.Positioner) *ast.File {
|
||||||
|
pass.Fset.PositionFor(node.Pos(), true)
|
||||||
|
m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File)
|
||||||
|
return m[pass.Fset.File(node.Pos())]
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsGenerated reports whether pos is in a generated file, It ignores
|
||||||
|
// //line directives.
|
||||||
|
func IsGenerated(pass *analysis.Pass, pos token.Pos) bool {
|
||||||
|
_, ok := Generator(pass, pos)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generator returns the generator that generated the file containing
|
||||||
|
// pos. It ignores //line directives.
|
||||||
|
func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) {
|
||||||
|
file := pass.Fset.PositionFor(pos, false).Filename
|
||||||
|
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
|
||||||
|
g, ok := m[file]
|
||||||
|
return g, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{}) {
|
||||||
|
file := lint.DisplayPosition(pass.Fset, pos).Filename
|
||||||
|
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
|
||||||
|
if _, ok := m[file]; ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
pass.Reportf(pos, f, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReportNodef(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
pass.Report(analysis.Diagnostic{Pos: node.Pos(), End: node.End(), Message: msg})
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReportNodefFG(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) {
|
||||||
|
file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename
|
||||||
|
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
|
||||||
|
if _, ok := m[file]; ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ReportNodef(pass, node, format, args...)
|
||||||
|
}
|
||||||
|
|
|
@ -51,7 +51,7 @@ type Text struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Text) Format(p lint.Problem) {
|
func (o Text) Format(p lint.Problem) {
|
||||||
fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Position), p.String())
|
fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Pos), p.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
type JSON struct {
|
type JSON struct {
|
||||||
|
@ -80,16 +80,22 @@ func (o JSON) Format(p lint.Problem) {
|
||||||
Code string `json:"code"`
|
Code string `json:"code"`
|
||||||
Severity string `json:"severity,omitempty"`
|
Severity string `json:"severity,omitempty"`
|
||||||
Location location `json:"location"`
|
Location location `json:"location"`
|
||||||
|
End location `json:"end"`
|
||||||
Message string `json:"message"`
|
Message string `json:"message"`
|
||||||
}{
|
}{
|
||||||
Code: p.Check,
|
Code: p.Check,
|
||||||
Severity: severity(p.Severity),
|
Severity: severity(p.Severity),
|
||||||
Location: location{
|
Location: location{
|
||||||
File: p.Position.Filename,
|
File: p.Pos.Filename,
|
||||||
Line: p.Position.Line,
|
Line: p.Pos.Line,
|
||||||
Column: p.Position.Column,
|
Column: p.Pos.Column,
|
||||||
},
|
},
|
||||||
Message: p.Text,
|
End: location{
|
||||||
|
File: p.End.Filename,
|
||||||
|
Line: p.End.Line,
|
||||||
|
Column: p.End.Column,
|
||||||
|
},
|
||||||
|
Message: p.Message,
|
||||||
}
|
}
|
||||||
_ = json.NewEncoder(o.W).Encode(jp)
|
_ = json.NewEncoder(o.W).Encode(jp)
|
||||||
}
|
}
|
||||||
|
@ -102,20 +108,21 @@ type Stylish struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *Stylish) Format(p lint.Problem) {
|
func (o *Stylish) Format(p lint.Problem) {
|
||||||
if p.Position.Filename == "" {
|
pos := p.Pos
|
||||||
p.Position.Filename = "-"
|
if pos.Filename == "" {
|
||||||
|
pos.Filename = "-"
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.Position.Filename != o.prevFile {
|
if pos.Filename != o.prevFile {
|
||||||
if o.prevFile != "" {
|
if o.prevFile != "" {
|
||||||
o.tw.Flush()
|
o.tw.Flush()
|
||||||
fmt.Fprintln(o.W)
|
fmt.Fprintln(o.W)
|
||||||
}
|
}
|
||||||
fmt.Fprintln(o.W, p.Position.Filename)
|
fmt.Fprintln(o.W, pos.Filename)
|
||||||
o.prevFile = p.Position.Filename
|
o.prevFile = pos.Filename
|
||||||
o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0)
|
o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0)
|
||||||
}
|
}
|
||||||
fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", p.Position.Line, p.Position.Column, p.Check, p.Text)
|
fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Check, p.Message)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *Stylish) Stats(total, errors, warnings int) {
|
func (o *Stylish) Stats(total, errors, warnings int) {
|
||||||
|
|
7
vendor/honnef.co/go/tools/lint/lintutil/stats.go
vendored
Normal file
7
vendor/honnef.co/go/tools/lint/lintutil/stats.go
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
// +build !aix,!android,!darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!solaris
|
||||||
|
|
||||||
|
package lintutil
|
||||||
|
|
||||||
|
import "os"
|
||||||
|
|
||||||
|
var infoSignals = []os.Signal{}
|
10
vendor/honnef.co/go/tools/lint/lintutil/stats_bsd.go
vendored
Normal file
10
vendor/honnef.co/go/tools/lint/lintutil/stats_bsd.go
vendored
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
// +build darwin dragonfly freebsd netbsd openbsd
|
||||||
|
|
||||||
|
package lintutil
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"syscall"
|
||||||
|
)
|
||||||
|
|
||||||
|
var infoSignals = []os.Signal{syscall.SIGINFO}
|
10
vendor/honnef.co/go/tools/lint/lintutil/stats_posix.go
vendored
Normal file
10
vendor/honnef.co/go/tools/lint/lintutil/stats_posix.go
vendored
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
// +build aix android linux solaris
|
||||||
|
|
||||||
|
package lintutil
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"syscall"
|
||||||
|
)
|
||||||
|
|
||||||
|
var infoSignals = []os.Signal{syscall.SIGUSR1}
|
332
vendor/honnef.co/go/tools/lint/lintutil/util.go
vendored
332
vendor/honnef.co/go/tools/lint/lintutil/util.go
vendored
|
@ -8,29 +8,70 @@
|
||||||
package lintutil // import "honnef.co/go/tools/lint/lintutil"
|
package lintutil // import "honnef.co/go/tools/lint/lintutil"
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"errors"
|
"errors"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/build"
|
"go/build"
|
||||||
"go/token"
|
"go/token"
|
||||||
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
"os/signal"
|
||||||
"regexp"
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
"runtime/debug"
|
|
||||||
"runtime/pprof"
|
"runtime/pprof"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"sync/atomic"
|
||||||
|
|
||||||
"honnef.co/go/tools/config"
|
"honnef.co/go/tools/config"
|
||||||
|
"honnef.co/go/tools/internal/cache"
|
||||||
"honnef.co/go/tools/lint"
|
"honnef.co/go/tools/lint"
|
||||||
"honnef.co/go/tools/lint/lintutil/format"
|
"honnef.co/go/tools/lint/lintutil/format"
|
||||||
"honnef.co/go/tools/version"
|
"honnef.co/go/tools/version"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"golang.org/x/tools/go/buildutil"
|
||||||
"golang.org/x/tools/go/packages"
|
"golang.org/x/tools/go/packages"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func NewVersionFlag() flag.Getter {
|
||||||
|
tags := build.Default.ReleaseTags
|
||||||
|
v := tags[len(tags)-1][2:]
|
||||||
|
version := new(VersionFlag)
|
||||||
|
if err := version.Set(v); err != nil {
|
||||||
|
panic(fmt.Sprintf("internal error: %s", err))
|
||||||
|
}
|
||||||
|
return version
|
||||||
|
}
|
||||||
|
|
||||||
|
type VersionFlag int
|
||||||
|
|
||||||
|
func (v *VersionFlag) String() string {
|
||||||
|
return fmt.Sprintf("1.%d", *v)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *VersionFlag) Set(s string) error {
|
||||||
|
if len(s) < 3 {
|
||||||
|
return errors.New("invalid Go version")
|
||||||
|
}
|
||||||
|
if s[0] != '1' {
|
||||||
|
return errors.New("invalid Go version")
|
||||||
|
}
|
||||||
|
if s[1] != '.' {
|
||||||
|
return errors.New("invalid Go version")
|
||||||
|
}
|
||||||
|
i, err := strconv.Atoi(s[2:])
|
||||||
|
*v = VersionFlag(i)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *VersionFlag) Get() interface{} {
|
||||||
|
return int(*v)
|
||||||
|
}
|
||||||
|
|
||||||
func usage(name string, flags *flag.FlagSet) func() {
|
func usage(name string, flags *flag.FlagSet) func() {
|
||||||
return func() {
|
return func() {
|
||||||
fmt.Fprintf(os.Stderr, "Usage of %s:\n", name)
|
fmt.Fprintf(os.Stderr, "Usage of %s:\n", name)
|
||||||
|
@ -43,48 +84,6 @@ func usage(name string, flags *flag.FlagSet) func() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseIgnore(s string) ([]lint.Ignore, error) {
|
|
||||||
var out []lint.Ignore
|
|
||||||
if len(s) == 0 {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
for _, part := range strings.Fields(s) {
|
|
||||||
p := strings.Split(part, ":")
|
|
||||||
if len(p) != 2 {
|
|
||||||
return nil, errors.New("malformed ignore string")
|
|
||||||
}
|
|
||||||
path := p[0]
|
|
||||||
checks := strings.Split(p[1], ",")
|
|
||||||
out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks})
|
|
||||||
}
|
|
||||||
return out, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type versionFlag int
|
|
||||||
|
|
||||||
func (v *versionFlag) String() string {
|
|
||||||
return fmt.Sprintf("1.%d", *v)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *versionFlag) Set(s string) error {
|
|
||||||
if len(s) < 3 {
|
|
||||||
return errors.New("invalid Go version")
|
|
||||||
}
|
|
||||||
if s[0] != '1' {
|
|
||||||
return errors.New("invalid Go version")
|
|
||||||
}
|
|
||||||
if s[1] != '.' {
|
|
||||||
return errors.New("invalid Go version")
|
|
||||||
}
|
|
||||||
i, err := strconv.Atoi(s[2:])
|
|
||||||
*v = versionFlag(i)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *versionFlag) Get() interface{} {
|
|
||||||
return int(*v)
|
|
||||||
}
|
|
||||||
|
|
||||||
type list []string
|
type list []string
|
||||||
|
|
||||||
func (list *list) String() string {
|
func (list *list) String() string {
|
||||||
|
@ -105,17 +104,16 @@ func FlagSet(name string) *flag.FlagSet {
|
||||||
flags := flag.NewFlagSet("", flag.ExitOnError)
|
flags := flag.NewFlagSet("", flag.ExitOnError)
|
||||||
flags.Usage = usage(name, flags)
|
flags.Usage = usage(name, flags)
|
||||||
flags.String("tags", "", "List of `build tags`")
|
flags.String("tags", "", "List of `build tags`")
|
||||||
flags.String("ignore", "", "Deprecated: use linter directives instead")
|
|
||||||
flags.Bool("tests", true, "Include tests")
|
flags.Bool("tests", true, "Include tests")
|
||||||
flags.Bool("version", false, "Print version and exit")
|
flags.Bool("version", false, "Print version and exit")
|
||||||
flags.Bool("show-ignored", false, "Don't filter ignored problems")
|
flags.Bool("show-ignored", false, "Don't filter ignored problems")
|
||||||
flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')")
|
flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')")
|
||||||
flags.String("explain", "", "Print description of `check`")
|
flags.String("explain", "", "Print description of `check`")
|
||||||
|
|
||||||
flags.Int("debug.max-concurrent-jobs", 0, "Number of jobs to run concurrently")
|
|
||||||
flags.Bool("debug.print-stats", false, "Print debug statistics")
|
|
||||||
flags.String("debug.cpuprofile", "", "Write CPU profile to `file`")
|
flags.String("debug.cpuprofile", "", "Write CPU profile to `file`")
|
||||||
flags.String("debug.memprofile", "", "Write memory profile to `file`")
|
flags.String("debug.memprofile", "", "Write memory profile to `file`")
|
||||||
|
flags.Bool("debug.version", false, "Print detailed version information about this program")
|
||||||
|
flags.Bool("debug.no-compile-errors", false, "Don't print compile errors")
|
||||||
|
|
||||||
checks := list{"inherit"}
|
checks := list{"inherit"}
|
||||||
fail := list{"all"}
|
fail := list{"all"}
|
||||||
|
@ -124,7 +122,7 @@ func FlagSet(name string) *flag.FlagSet {
|
||||||
|
|
||||||
tags := build.Default.ReleaseTags
|
tags := build.Default.ReleaseTags
|
||||||
v := tags[len(tags)-1][2:]
|
v := tags[len(tags)-1][2:]
|
||||||
version := new(versionFlag)
|
version := new(VersionFlag)
|
||||||
if err := version.Set(v); err != nil {
|
if err := version.Set(v); err != nil {
|
||||||
panic(fmt.Sprintf("internal error: %s", err))
|
panic(fmt.Sprintf("internal error: %s", err))
|
||||||
}
|
}
|
||||||
|
@ -133,24 +131,17 @@ func FlagSet(name string) *flag.FlagSet {
|
||||||
return flags
|
return flags
|
||||||
}
|
}
|
||||||
|
|
||||||
func findCheck(cs []lint.Checker, check string) (lint.Check, bool) {
|
func findCheck(cs []*analysis.Analyzer, check string) (*analysis.Analyzer, bool) {
|
||||||
for _, c := range cs {
|
for _, c := range cs {
|
||||||
for _, cc := range c.Checks() {
|
if c.Name == check {
|
||||||
if cc.ID == check {
|
return c, true
|
||||||
return cc, true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return lint.Check{}, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) {
|
func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *flag.FlagSet) {
|
||||||
if _, ok := os.LookupEnv("GOGC"); !ok {
|
|
||||||
debug.SetGCPercent(50)
|
|
||||||
}
|
|
||||||
|
|
||||||
tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string)
|
tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string)
|
||||||
ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string)
|
|
||||||
tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool)
|
tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool)
|
||||||
goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int)
|
goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int)
|
||||||
formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string)
|
formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string)
|
||||||
|
@ -158,10 +149,10 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) {
|
||||||
showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool)
|
showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool)
|
||||||
explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string)
|
explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string)
|
||||||
|
|
||||||
maxConcurrentJobs := fs.Lookup("debug.max-concurrent-jobs").Value.(flag.Getter).Get().(int)
|
|
||||||
printStats := fs.Lookup("debug.print-stats").Value.(flag.Getter).Get().(bool)
|
|
||||||
cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string)
|
cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string)
|
||||||
memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string)
|
memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string)
|
||||||
|
debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool)
|
||||||
|
debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool)
|
||||||
|
|
||||||
cfg := config.Config{}
|
cfg := config.Config{}
|
||||||
cfg.Checks = *fs.Lookup("checks").Value.(*list)
|
cfg.Checks = *fs.Lookup("checks").Value.(*list)
|
||||||
|
@ -188,13 +179,32 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) {
|
||||||
pprof.StartCPUProfile(f)
|
pprof.StartCPUProfile(f)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if debugVersion {
|
||||||
|
version.Verbose()
|
||||||
|
exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
if printVersion {
|
if printVersion {
|
||||||
version.Print()
|
version.Print()
|
||||||
exit(0)
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate that the tags argument is well-formed. go/packages
|
||||||
|
// doesn't detect malformed build flags and returns unhelpful
|
||||||
|
// errors.
|
||||||
|
tf := buildutil.TagsFlag{}
|
||||||
|
if err := tf.Set(tags); err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, fmt.Errorf("invalid value %q for flag -tags: %s", tags, err))
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
if explain != "" {
|
if explain != "" {
|
||||||
check, ok := findCheck(cs, explain)
|
var haystack []*analysis.Analyzer
|
||||||
|
haystack = append(haystack, cs...)
|
||||||
|
for _, cum := range cums {
|
||||||
|
haystack = append(haystack, cum.Analyzer())
|
||||||
|
}
|
||||||
|
check, ok := findCheck(haystack, explain)
|
||||||
if !ok {
|
if !ok {
|
||||||
fmt.Fprintln(os.Stderr, "Couldn't find check", explain)
|
fmt.Fprintln(os.Stderr, "Couldn't find check", explain)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
@ -207,16 +217,11 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) {
|
||||||
exit(0)
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
ps, err := Lint(cs, fs.Args(), &Options{
|
ps, err := Lint(cs, cums, fs.Args(), &Options{
|
||||||
Tags: strings.Fields(tags),
|
Tags: tags,
|
||||||
LintTests: tests,
|
LintTests: tests,
|
||||||
Ignores: ignore,
|
GoVersion: goVersion,
|
||||||
GoVersion: goVersion,
|
Config: cfg,
|
||||||
ReturnIgnored: showIgnored,
|
|
||||||
Config: cfg,
|
|
||||||
|
|
||||||
MaxConcurrentJobs: maxConcurrentJobs,
|
|
||||||
PrintStats: printStats,
|
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintln(os.Stderr, err)
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
@ -243,15 +248,22 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) {
|
||||||
)
|
)
|
||||||
|
|
||||||
fail := *fs.Lookup("fail").Value.(*list)
|
fail := *fs.Lookup("fail").Value.(*list)
|
||||||
var allChecks []string
|
analyzers := make([]*analysis.Analyzer, len(cs), len(cs)+len(cums))
|
||||||
for _, p := range ps {
|
copy(analyzers, cs)
|
||||||
allChecks = append(allChecks, p.Check)
|
for _, cum := range cums {
|
||||||
|
analyzers = append(analyzers, cum.Analyzer())
|
||||||
}
|
}
|
||||||
|
shouldExit := lint.FilterChecks(analyzers, fail)
|
||||||
shouldExit := lint.FilterChecks(allChecks, fail)
|
shouldExit["compile"] = true
|
||||||
|
|
||||||
total = len(ps)
|
total = len(ps)
|
||||||
for _, p := range ps {
|
for _, p := range ps {
|
||||||
|
if p.Check == "compile" && debugNoCompile {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if p.Severity == lint.Ignored && !showIgnored {
|
||||||
|
continue
|
||||||
|
}
|
||||||
if shouldExit[p.Check] {
|
if shouldExit[p.Check] {
|
||||||
errors++
|
errors++
|
||||||
} else {
|
} else {
|
||||||
|
@ -266,80 +278,97 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) {
|
||||||
if errors > 0 {
|
if errors > 0 {
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
type Options struct {
|
type Options struct {
|
||||||
Config config.Config
|
Config config.Config
|
||||||
|
|
||||||
Tags []string
|
Tags string
|
||||||
LintTests bool
|
LintTests bool
|
||||||
Ignores string
|
GoVersion int
|
||||||
GoVersion int
|
|
||||||
ReturnIgnored bool
|
|
||||||
|
|
||||||
MaxConcurrentJobs int
|
|
||||||
PrintStats bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, error) {
|
func computeSalt() ([]byte, error) {
|
||||||
stats := lint.PerfStats{
|
if version.Version != "devel" {
|
||||||
CheckerInits: map[string]time.Duration{},
|
return []byte(version.Version), nil
|
||||||
}
|
}
|
||||||
|
p, err := os.Executable()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
f, err := os.Open(p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
h := sha256.New()
|
||||||
|
if _, err := io.Copy(h, f); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return h.Sum(nil), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string, opt *Options) ([]lint.Problem, error) {
|
||||||
|
salt, err := computeSalt()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("could not compute salt for cache: %s", err)
|
||||||
|
}
|
||||||
|
cache.SetSalt(salt)
|
||||||
|
|
||||||
if opt == nil {
|
if opt == nil {
|
||||||
opt = &Options{}
|
opt = &Options{}
|
||||||
}
|
}
|
||||||
ignores, err := parseIgnore(opt.Ignores)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
conf := &packages.Config{
|
|
||||||
Mode: packages.LoadAllSyntax,
|
|
||||||
Tests: opt.LintTests,
|
|
||||||
BuildFlags: []string{
|
|
||||||
"-tags=" + strings.Join(opt.Tags, " "),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
t := time.Now()
|
|
||||||
if len(paths) == 0 {
|
|
||||||
paths = []string{"."}
|
|
||||||
}
|
|
||||||
pkgs, err := packages.Load(conf, paths...)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
stats.PackageLoading = time.Since(t)
|
|
||||||
runtime.GC()
|
|
||||||
|
|
||||||
var problems []lint.Problem
|
|
||||||
workingPkgs := make([]*packages.Package, 0, len(pkgs))
|
|
||||||
for _, pkg := range pkgs {
|
|
||||||
if pkg.IllTyped {
|
|
||||||
problems = append(problems, compileErrors(pkg)...)
|
|
||||||
} else {
|
|
||||||
workingPkgs = append(workingPkgs, pkg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(workingPkgs) == 0 {
|
|
||||||
return problems, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
l := &lint.Linter{
|
l := &lint.Linter{
|
||||||
Checkers: cs,
|
Checkers: cs,
|
||||||
Ignores: ignores,
|
CumulativeCheckers: cums,
|
||||||
GoVersion: opt.GoVersion,
|
GoVersion: opt.GoVersion,
|
||||||
ReturnIgnored: opt.ReturnIgnored,
|
Config: opt.Config,
|
||||||
Config: opt.Config,
|
}
|
||||||
|
cfg := &packages.Config{}
|
||||||
MaxConcurrentJobs: opt.MaxConcurrentJobs,
|
if opt.LintTests {
|
||||||
PrintStats: opt.PrintStats,
|
cfg.Tests = true
|
||||||
|
}
|
||||||
|
if opt.Tags != "" {
|
||||||
|
cfg.BuildFlags = append(cfg.BuildFlags, "-tags", opt.Tags)
|
||||||
}
|
}
|
||||||
problems = append(problems, l.Lint(workingPkgs, &stats)...)
|
|
||||||
|
|
||||||
return problems, nil
|
printStats := func() {
|
||||||
|
// Individual stats are read atomically, but overall there
|
||||||
|
// is no synchronisation. For printing rough progress
|
||||||
|
// information, this doesn't matter.
|
||||||
|
switch atomic.LoadUint32(&l.Stats.State) {
|
||||||
|
case lint.StateInitializing:
|
||||||
|
fmt.Fprintln(os.Stderr, "Status: initializing")
|
||||||
|
case lint.StateGraph:
|
||||||
|
fmt.Fprintln(os.Stderr, "Status: loading package graph")
|
||||||
|
case lint.StateProcessing:
|
||||||
|
fmt.Fprintf(os.Stderr, "Packages: %d/%d initial, %d/%d total; Workers: %d/%d; Problems: %d\n",
|
||||||
|
atomic.LoadUint32(&l.Stats.ProcessedInitialPackages),
|
||||||
|
atomic.LoadUint32(&l.Stats.InitialPackages),
|
||||||
|
atomic.LoadUint32(&l.Stats.ProcessedPackages),
|
||||||
|
atomic.LoadUint32(&l.Stats.TotalPackages),
|
||||||
|
atomic.LoadUint32(&l.Stats.ActiveWorkers),
|
||||||
|
atomic.LoadUint32(&l.Stats.TotalWorkers),
|
||||||
|
atomic.LoadUint32(&l.Stats.Problems),
|
||||||
|
)
|
||||||
|
case lint.StateCumulative:
|
||||||
|
fmt.Fprintln(os.Stderr, "Status: processing cumulative checkers")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(infoSignals) > 0 {
|
||||||
|
ch := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(ch, infoSignals...)
|
||||||
|
defer signal.Stop(ch)
|
||||||
|
go func() {
|
||||||
|
for range ch {
|
||||||
|
printStats()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
return l.Lint(cfg, paths)
|
||||||
}
|
}
|
||||||
|
|
||||||
var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`)
|
var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`)
|
||||||
|
@ -361,34 +390,3 @@ func parsePos(pos string) token.Position {
|
||||||
Column: col,
|
Column: col,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func compileErrors(pkg *packages.Package) []lint.Problem {
|
|
||||||
if !pkg.IllTyped {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if len(pkg.Errors) == 0 {
|
|
||||||
// transitively ill-typed
|
|
||||||
var ps []lint.Problem
|
|
||||||
for _, imp := range pkg.Imports {
|
|
||||||
ps = append(ps, compileErrors(imp)...)
|
|
||||||
}
|
|
||||||
return ps
|
|
||||||
}
|
|
||||||
var ps []lint.Problem
|
|
||||||
for _, err := range pkg.Errors {
|
|
||||||
p := lint.Problem{
|
|
||||||
Position: parsePos(err.Pos),
|
|
||||||
Text: err.Msg,
|
|
||||||
Check: "compile",
|
|
||||||
}
|
|
||||||
ps = append(ps, p)
|
|
||||||
}
|
|
||||||
return ps
|
|
||||||
}
|
|
||||||
|
|
||||||
func ProcessArgs(name string, cs []lint.Checker, args []string) {
|
|
||||||
flags := FlagSet(name)
|
|
||||||
flags.Parse(args)
|
|
||||||
|
|
||||||
ProcessFlagSet(cs, flags)
|
|
||||||
}
|
|
||||||
|
|
970
vendor/honnef.co/go/tools/lint/runner.go
vendored
Normal file
970
vendor/honnef.co/go/tools/lint/runner.go
vendored
Normal file
|
@ -0,0 +1,970 @@
|
||||||
|
package lint
|
||||||
|
|
||||||
|
/*
|
||||||
|
Parallelism
|
||||||
|
|
||||||
|
Runner implements parallel processing of packages by spawning one
|
||||||
|
goroutine per package in the dependency graph, without any semaphores.
|
||||||
|
Each goroutine initially waits on the completion of all of its
|
||||||
|
dependencies, thus establishing correct order of processing. Once all
|
||||||
|
dependencies finish processing, the goroutine will load the package
|
||||||
|
from export data or source – this loading is guarded by a semaphore,
|
||||||
|
sized according to the number of CPU cores. This way, we only have as
|
||||||
|
many packages occupying memory and CPU resources as there are actual
|
||||||
|
cores to process them.
|
||||||
|
|
||||||
|
This combination of unbounded goroutines but bounded package loading
|
||||||
|
means that if we have many parallel, independent subgraphs, they will
|
||||||
|
all execute in parallel, while not wasting resources for long linear
|
||||||
|
chains or trying to process more subgraphs in parallel than the system
|
||||||
|
can handle.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/gob"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"reflect"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"golang.org/x/tools/go/packages"
|
||||||
|
"golang.org/x/tools/go/types/objectpath"
|
||||||
|
"honnef.co/go/tools/config"
|
||||||
|
"honnef.co/go/tools/facts"
|
||||||
|
"honnef.co/go/tools/internal/cache"
|
||||||
|
"honnef.co/go/tools/loader"
|
||||||
|
)
|
||||||
|
|
||||||
|
// If enabled, abuse of the go/analysis API will lead to panics
|
||||||
|
const sanityCheck = true
|
||||||
|
|
||||||
|
// OPT(dh): for a dependency tree A->B->C->D, if we have cached data
|
||||||
|
// for B, there should be no need to load C and D individually. Go's
|
||||||
|
// export data for B contains all the data we need on types, and our
|
||||||
|
// fact cache could store the union of B, C and D in B.
|
||||||
|
//
|
||||||
|
// This may change unused's behavior, however, as it may observe fewer
|
||||||
|
// interfaces from transitive dependencies.
|
||||||
|
|
||||||
|
type Package struct {
|
||||||
|
dependents uint64
|
||||||
|
|
||||||
|
*packages.Package
|
||||||
|
Imports []*Package
|
||||||
|
initial bool
|
||||||
|
fromSource bool
|
||||||
|
hash string
|
||||||
|
done chan struct{}
|
||||||
|
|
||||||
|
resultsMu sync.Mutex
|
||||||
|
// results maps analyzer IDs to analyzer results
|
||||||
|
results []*result
|
||||||
|
|
||||||
|
cfg *config.Config
|
||||||
|
gen map[string]facts.Generator
|
||||||
|
problems []Problem
|
||||||
|
ignores []Ignore
|
||||||
|
errs []error
|
||||||
|
|
||||||
|
// these slices are indexed by analysis
|
||||||
|
facts []map[types.Object][]analysis.Fact
|
||||||
|
pkgFacts [][]analysis.Fact
|
||||||
|
|
||||||
|
canClearTypes bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pkg *Package) decUse() {
|
||||||
|
atomic.AddUint64(&pkg.dependents, ^uint64(0))
|
||||||
|
if atomic.LoadUint64(&pkg.dependents) == 0 {
|
||||||
|
// nobody depends on this package anymore
|
||||||
|
if pkg.canClearTypes {
|
||||||
|
pkg.Types = nil
|
||||||
|
}
|
||||||
|
pkg.facts = nil
|
||||||
|
pkg.pkgFacts = nil
|
||||||
|
|
||||||
|
for _, imp := range pkg.Imports {
|
||||||
|
imp.decUse()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type result struct {
|
||||||
|
v interface{}
|
||||||
|
err error
|
||||||
|
ready chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Runner struct {
|
||||||
|
ld loader.Loader
|
||||||
|
cache *cache.Cache
|
||||||
|
|
||||||
|
analyzerIDs analyzerIDs
|
||||||
|
|
||||||
|
// limits parallelism of loading packages
|
||||||
|
loadSem chan struct{}
|
||||||
|
|
||||||
|
goVersion int
|
||||||
|
stats *Stats
|
||||||
|
}
|
||||||
|
|
||||||
|
type analyzerIDs struct {
|
||||||
|
m map[*analysis.Analyzer]int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ids analyzerIDs) get(a *analysis.Analyzer) int {
|
||||||
|
id, ok := ids.m[a]
|
||||||
|
if !ok {
|
||||||
|
panic(fmt.Sprintf("no analyzer ID for %s", a.Name))
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
type Fact struct {
|
||||||
|
Path string
|
||||||
|
Fact analysis.Fact
|
||||||
|
}
|
||||||
|
|
||||||
|
type analysisAction struct {
|
||||||
|
analyzer *analysis.Analyzer
|
||||||
|
analyzerID int
|
||||||
|
pkg *Package
|
||||||
|
newPackageFacts []analysis.Fact
|
||||||
|
problems []Problem
|
||||||
|
|
||||||
|
pkgFacts map[*types.Package][]analysis.Fact
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) String() string {
|
||||||
|
return fmt.Sprintf("%s @ %s", ac.analyzer, ac.pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) allObjectFacts() []analysis.ObjectFact {
|
||||||
|
out := make([]analysis.ObjectFact, 0, len(ac.pkg.facts[ac.analyzerID]))
|
||||||
|
for obj, facts := range ac.pkg.facts[ac.analyzerID] {
|
||||||
|
for _, fact := range facts {
|
||||||
|
out = append(out, analysis.ObjectFact{
|
||||||
|
Object: obj,
|
||||||
|
Fact: fact,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) allPackageFacts() []analysis.PackageFact {
|
||||||
|
out := make([]analysis.PackageFact, 0, len(ac.pkgFacts))
|
||||||
|
for pkg, facts := range ac.pkgFacts {
|
||||||
|
for _, fact := range facts {
|
||||||
|
out = append(out, analysis.PackageFact{
|
||||||
|
Package: pkg,
|
||||||
|
Fact: fact,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) importObjectFact(obj types.Object, fact analysis.Fact) bool {
|
||||||
|
if sanityCheck && len(ac.analyzer.FactTypes) == 0 {
|
||||||
|
panic("analysis doesn't export any facts")
|
||||||
|
}
|
||||||
|
for _, f := range ac.pkg.facts[ac.analyzerID][obj] {
|
||||||
|
if reflect.TypeOf(f) == reflect.TypeOf(fact) {
|
||||||
|
reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem())
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) importPackageFact(pkg *types.Package, fact analysis.Fact) bool {
|
||||||
|
if sanityCheck && len(ac.analyzer.FactTypes) == 0 {
|
||||||
|
panic("analysis doesn't export any facts")
|
||||||
|
}
|
||||||
|
for _, f := range ac.pkgFacts[pkg] {
|
||||||
|
if reflect.TypeOf(f) == reflect.TypeOf(fact) {
|
||||||
|
reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem())
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) exportObjectFact(obj types.Object, fact analysis.Fact) {
|
||||||
|
if sanityCheck && len(ac.analyzer.FactTypes) == 0 {
|
||||||
|
panic("analysis doesn't export any facts")
|
||||||
|
}
|
||||||
|
ac.pkg.facts[ac.analyzerID][obj] = append(ac.pkg.facts[ac.analyzerID][obj], fact)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) exportPackageFact(fact analysis.Fact) {
|
||||||
|
if sanityCheck && len(ac.analyzer.FactTypes) == 0 {
|
||||||
|
panic("analysis doesn't export any facts")
|
||||||
|
}
|
||||||
|
ac.pkgFacts[ac.pkg.Types] = append(ac.pkgFacts[ac.pkg.Types], fact)
|
||||||
|
ac.newPackageFacts = append(ac.newPackageFacts, fact)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) {
|
||||||
|
p := Problem{
|
||||||
|
Pos: DisplayPosition(pass.Fset, d.Pos),
|
||||||
|
End: DisplayPosition(pass.Fset, d.End),
|
||||||
|
Message: d.Message,
|
||||||
|
Check: pass.Analyzer.Name,
|
||||||
|
}
|
||||||
|
ac.problems = append(ac.problems, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) {
|
||||||
|
ac.pkg.resultsMu.Lock()
|
||||||
|
res := ac.pkg.results[r.analyzerIDs.get(ac.analyzer)]
|
||||||
|
if res != nil {
|
||||||
|
ac.pkg.resultsMu.Unlock()
|
||||||
|
<-res.ready
|
||||||
|
return res.v, res.err
|
||||||
|
} else {
|
||||||
|
res = &result{
|
||||||
|
ready: make(chan struct{}),
|
||||||
|
}
|
||||||
|
ac.pkg.results[r.analyzerIDs.get(ac.analyzer)] = res
|
||||||
|
ac.pkg.resultsMu.Unlock()
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
res.v = ret
|
||||||
|
res.err = err
|
||||||
|
close(res.ready)
|
||||||
|
}()
|
||||||
|
|
||||||
|
pass := new(analysis.Pass)
|
||||||
|
*pass = analysis.Pass{
|
||||||
|
Analyzer: ac.analyzer,
|
||||||
|
Fset: ac.pkg.Fset,
|
||||||
|
Files: ac.pkg.Syntax,
|
||||||
|
// type information may be nil or may be populated. if it is
|
||||||
|
// nil, it will get populated later.
|
||||||
|
Pkg: ac.pkg.Types,
|
||||||
|
TypesInfo: ac.pkg.TypesInfo,
|
||||||
|
TypesSizes: ac.pkg.TypesSizes,
|
||||||
|
ResultOf: map[*analysis.Analyzer]interface{}{},
|
||||||
|
ImportObjectFact: ac.importObjectFact,
|
||||||
|
ImportPackageFact: ac.importPackageFact,
|
||||||
|
ExportObjectFact: ac.exportObjectFact,
|
||||||
|
ExportPackageFact: ac.exportPackageFact,
|
||||||
|
Report: func(d analysis.Diagnostic) {
|
||||||
|
ac.report(pass, d)
|
||||||
|
},
|
||||||
|
AllObjectFacts: ac.allObjectFacts,
|
||||||
|
AllPackageFacts: ac.allPackageFacts,
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ac.pkg.initial {
|
||||||
|
// Don't report problems in dependencies
|
||||||
|
pass.Report = func(analysis.Diagnostic) {}
|
||||||
|
}
|
||||||
|
return r.runAnalysisUser(pass, ac)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bool) {
|
||||||
|
if len(a.FactTypes) == 0 {
|
||||||
|
return nil, true
|
||||||
|
}
|
||||||
|
|
||||||
|
var facts []Fact
|
||||||
|
// Look in the cache for facts
|
||||||
|
aID, err := passActionID(pkg, a)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
aID = cache.Subkey(aID, "facts")
|
||||||
|
b, _, err := r.cache.GetBytes(aID)
|
||||||
|
if err != nil {
|
||||||
|
// No cached facts, analyse this package like a user-provided one, but ignore diagnostics
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := gob.NewDecoder(bytes.NewReader(b)).Decode(&facts); err != nil {
|
||||||
|
// Cached facts are broken, analyse this package like a user-provided one, but ignore diagnostics
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
return facts, true
|
||||||
|
}
|
||||||
|
|
||||||
|
type dependencyError struct {
|
||||||
|
dep string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err dependencyError) nested() dependencyError {
|
||||||
|
if o, ok := err.err.(dependencyError); ok {
|
||||||
|
return o.nested()
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err dependencyError) Error() string {
|
||||||
|
if o, ok := err.err.(dependencyError); ok {
|
||||||
|
return o.Error()
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("error running dependency %s: %s", err.dep, err.err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysisAction {
|
||||||
|
aid := r.analyzerIDs.get(a)
|
||||||
|
ac := &analysisAction{
|
||||||
|
analyzer: a,
|
||||||
|
analyzerID: aid,
|
||||||
|
pkg: pkg,
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(a.FactTypes) == 0 {
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge all package facts of dependencies
|
||||||
|
ac.pkgFacts = map[*types.Package][]analysis.Fact{}
|
||||||
|
seen := map[*Package]struct{}{}
|
||||||
|
var dfs func(*Package)
|
||||||
|
dfs = func(pkg *Package) {
|
||||||
|
if _, ok := seen[pkg]; ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
seen[pkg] = struct{}{}
|
||||||
|
s := pkg.pkgFacts[aid]
|
||||||
|
ac.pkgFacts[pkg.Types] = s[0:len(s):len(s)]
|
||||||
|
for _, imp := range pkg.Imports {
|
||||||
|
dfs(imp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dfs(pkg)
|
||||||
|
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// analyzes that we always want to run, even if they're not being run
|
||||||
|
// explicitly or as dependencies. these are necessary for the inner
|
||||||
|
// workings of the runner.
|
||||||
|
var injectedAnalyses = []*analysis.Analyzer{facts.Generated, config.Analyzer}
|
||||||
|
|
||||||
|
func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) {
|
||||||
|
if !ac.pkg.fromSource {
|
||||||
|
panic(fmt.Sprintf("internal error: %s was not loaded from source", ac.pkg))
|
||||||
|
}
|
||||||
|
|
||||||
|
// User-provided package, analyse it
|
||||||
|
// First analyze it with dependencies
|
||||||
|
for _, req := range ac.analyzer.Requires {
|
||||||
|
acReq := r.makeAnalysisAction(req, ac.pkg)
|
||||||
|
ret, err := r.runAnalysis(acReq)
|
||||||
|
if err != nil {
|
||||||
|
// We couldn't run a dependency, no point in going on
|
||||||
|
return nil, dependencyError{req.Name, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
pass.ResultOf[req] = ret
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then with this analyzer
|
||||||
|
ret, err := ac.analyzer.Run(pass)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ac.analyzer.FactTypes) > 0 {
|
||||||
|
// Merge new facts into the package and persist them.
|
||||||
|
var facts []Fact
|
||||||
|
for _, fact := range ac.newPackageFacts {
|
||||||
|
id := r.analyzerIDs.get(ac.analyzer)
|
||||||
|
ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact)
|
||||||
|
facts = append(facts, Fact{"", fact})
|
||||||
|
}
|
||||||
|
for obj, afacts := range ac.pkg.facts[ac.analyzerID] {
|
||||||
|
if obj.Pkg() != ac.pkg.Package.Types {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
path, err := objectpath.For(obj)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, fact := range afacts {
|
||||||
|
facts = append(facts, Fact{string(path), fact})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
if err := gob.NewEncoder(buf).Encode(facts); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
aID, err := passActionID(ac.pkg, ac.analyzer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
aID = cache.Subkey(aID, "facts")
|
||||||
|
if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRunner(stats *Stats) (*Runner, error) {
|
||||||
|
cache, err := cache.Default()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Runner{
|
||||||
|
cache: cache,
|
||||||
|
stats: stats,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run loads packages corresponding to patterns and analyses them with
|
||||||
|
// analyzers. It returns the loaded packages, which contain reported
|
||||||
|
// diagnostics as well as extracted ignore directives.
|
||||||
|
//
|
||||||
|
// Note that diagnostics have not been filtered at this point yet, to
|
||||||
|
// accomodate cumulative analyzes that require additional steps to
|
||||||
|
// produce diagnostics.
|
||||||
|
func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer, hasCumulative bool) ([]*Package, error) {
|
||||||
|
r.analyzerIDs = analyzerIDs{m: map[*analysis.Analyzer]int{}}
|
||||||
|
id := 0
|
||||||
|
seen := map[*analysis.Analyzer]struct{}{}
|
||||||
|
var dfs func(a *analysis.Analyzer)
|
||||||
|
dfs = func(a *analysis.Analyzer) {
|
||||||
|
if _, ok := seen[a]; ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
seen[a] = struct{}{}
|
||||||
|
r.analyzerIDs.m[a] = id
|
||||||
|
id++
|
||||||
|
for _, f := range a.FactTypes {
|
||||||
|
gob.Register(f)
|
||||||
|
}
|
||||||
|
for _, req := range a.Requires {
|
||||||
|
dfs(req)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, a := range analyzers {
|
||||||
|
if v := a.Flags.Lookup("go"); v != nil {
|
||||||
|
v.Value.Set(fmt.Sprintf("1.%d", r.goVersion))
|
||||||
|
}
|
||||||
|
dfs(a)
|
||||||
|
}
|
||||||
|
for _, a := range injectedAnalyses {
|
||||||
|
dfs(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
var dcfg packages.Config
|
||||||
|
if cfg != nil {
|
||||||
|
dcfg = *cfg
|
||||||
|
}
|
||||||
|
|
||||||
|
atomic.StoreUint32(&r.stats.State, StateGraph)
|
||||||
|
initialPkgs, err := r.ld.Graph(dcfg, patterns...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer r.cache.Trim()
|
||||||
|
|
||||||
|
var allPkgs []*Package
|
||||||
|
m := map[*packages.Package]*Package{}
|
||||||
|
packages.Visit(initialPkgs, nil, func(l *packages.Package) {
|
||||||
|
m[l] = &Package{
|
||||||
|
Package: l,
|
||||||
|
results: make([]*result, len(r.analyzerIDs.m)),
|
||||||
|
facts: make([]map[types.Object][]analysis.Fact, len(r.analyzerIDs.m)),
|
||||||
|
pkgFacts: make([][]analysis.Fact, len(r.analyzerIDs.m)),
|
||||||
|
done: make(chan struct{}),
|
||||||
|
// every package needs itself
|
||||||
|
dependents: 1,
|
||||||
|
canClearTypes: !hasCumulative,
|
||||||
|
}
|
||||||
|
allPkgs = append(allPkgs, m[l])
|
||||||
|
for i := range m[l].facts {
|
||||||
|
m[l].facts[i] = map[types.Object][]analysis.Fact{}
|
||||||
|
}
|
||||||
|
for _, err := range l.Errors {
|
||||||
|
m[l].errs = append(m[l].errs, err)
|
||||||
|
}
|
||||||
|
for _, v := range l.Imports {
|
||||||
|
m[v].dependents++
|
||||||
|
m[l].Imports = append(m[l].Imports, m[v])
|
||||||
|
}
|
||||||
|
|
||||||
|
m[l].hash, err = packageHash(m[l])
|
||||||
|
if err != nil {
|
||||||
|
m[l].errs = append(m[l].errs, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
pkgs := make([]*Package, len(initialPkgs))
|
||||||
|
for i, l := range initialPkgs {
|
||||||
|
pkgs[i] = m[l]
|
||||||
|
pkgs[i].initial = true
|
||||||
|
}
|
||||||
|
|
||||||
|
atomic.StoreUint32(&r.stats.InitialPackages, uint32(len(initialPkgs)))
|
||||||
|
atomic.StoreUint32(&r.stats.TotalPackages, uint32(len(allPkgs)))
|
||||||
|
atomic.StoreUint32(&r.stats.State, StateProcessing)
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
wg.Add(len(allPkgs))
|
||||||
|
r.loadSem = make(chan struct{}, runtime.GOMAXPROCS(-1))
|
||||||
|
atomic.StoreUint32(&r.stats.TotalWorkers, uint32(cap(r.loadSem)))
|
||||||
|
for _, pkg := range allPkgs {
|
||||||
|
pkg := pkg
|
||||||
|
go func() {
|
||||||
|
r.processPkg(pkg, analyzers)
|
||||||
|
|
||||||
|
if pkg.initial {
|
||||||
|
atomic.AddUint32(&r.stats.ProcessedInitialPackages, 1)
|
||||||
|
}
|
||||||
|
atomic.AddUint32(&r.stats.Problems, uint32(len(pkg.problems)))
|
||||||
|
wg.Done()
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
return pkgs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?`)
|
||||||
|
|
||||||
|
func parsePos(pos string) (token.Position, int, error) {
|
||||||
|
if pos == "-" || pos == "" {
|
||||||
|
return token.Position{}, 0, nil
|
||||||
|
}
|
||||||
|
parts := posRe.FindStringSubmatch(pos)
|
||||||
|
if parts == nil {
|
||||||
|
return token.Position{}, 0, fmt.Errorf("malformed position %q", pos)
|
||||||
|
}
|
||||||
|
file := parts[1]
|
||||||
|
line, _ := strconv.Atoi(parts[2])
|
||||||
|
col, _ := strconv.Atoi(parts[3])
|
||||||
|
return token.Position{
|
||||||
|
Filename: file,
|
||||||
|
Line: line,
|
||||||
|
Column: col,
|
||||||
|
}, len(parts[0]), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadPkg loads a Go package. If the package is in the set of initial
|
||||||
|
// packages, it will be loaded from source, otherwise it will be
|
||||||
|
// loaded from export data. In the case that the package was loaded
|
||||||
|
// from export data, cached facts will also be loaded.
|
||||||
|
//
|
||||||
|
// Currently, only cached facts for this package will be loaded, not
|
||||||
|
// for any of its dependencies.
|
||||||
|
func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error {
|
||||||
|
if pkg.Types != nil {
|
||||||
|
panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load type information
|
||||||
|
if pkg.initial {
|
||||||
|
// Load package from source
|
||||||
|
pkg.fromSource = true
|
||||||
|
return r.ld.LoadFromSource(pkg.Package)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load package from export data
|
||||||
|
if err := r.ld.LoadFromExport(pkg.Package); err != nil {
|
||||||
|
// We asked Go to give us up to date export data, yet
|
||||||
|
// we can't load it. There must be something wrong.
|
||||||
|
//
|
||||||
|
// Attempt loading from source. This should fail (because
|
||||||
|
// otherwise there would be export data); we just want to
|
||||||
|
// get the compile errors. If loading from source succeeds
|
||||||
|
// we discard the result, anyway. Otherwise we'll fail
|
||||||
|
// when trying to reload from export data later.
|
||||||
|
//
|
||||||
|
// FIXME(dh): we no longer reload from export data, so
|
||||||
|
// theoretically we should be able to continue
|
||||||
|
pkg.fromSource = true
|
||||||
|
if err := r.ld.LoadFromSource(pkg.Package); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Make sure this package can't be imported successfully
|
||||||
|
pkg.Package.Errors = append(pkg.Package.Errors, packages.Error{
|
||||||
|
Pos: "-",
|
||||||
|
Msg: fmt.Sprintf("could not load export data: %s", err),
|
||||||
|
Kind: packages.ParseError,
|
||||||
|
})
|
||||||
|
return fmt.Errorf("could not load export data: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
failed := false
|
||||||
|
seen := make([]bool, len(r.analyzerIDs.m))
|
||||||
|
var dfs func(*analysis.Analyzer)
|
||||||
|
dfs = func(a *analysis.Analyzer) {
|
||||||
|
if seen[r.analyzerIDs.get(a)] {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
seen[r.analyzerIDs.get(a)] = true
|
||||||
|
|
||||||
|
if len(a.FactTypes) > 0 {
|
||||||
|
facts, ok := r.loadCachedFacts(a, pkg)
|
||||||
|
if !ok {
|
||||||
|
failed = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range facts {
|
||||||
|
if f.Path == "" {
|
||||||
|
// This is a package fact
|
||||||
|
pkg.pkgFacts[r.analyzerIDs.get(a)] = append(pkg.pkgFacts[r.analyzerIDs.get(a)], f.Fact)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path))
|
||||||
|
if err != nil {
|
||||||
|
// Be lenient about these errors. For example, when
|
||||||
|
// analysing io/ioutil from source, we may get a fact
|
||||||
|
// for methods on the devNull type, and objectpath
|
||||||
|
// will happily create a path for them. However, when
|
||||||
|
// we later load io/ioutil from export data, the path
|
||||||
|
// no longer resolves.
|
||||||
|
//
|
||||||
|
// If an exported type embeds the unexported type,
|
||||||
|
// then (part of) the unexported type will become part
|
||||||
|
// of the type information and our path will resolve
|
||||||
|
// again.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pkg.facts[r.analyzerIDs.get(a)][obj] = append(pkg.facts[r.analyzerIDs.get(a)][obj], f.Fact)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, req := range a.Requires {
|
||||||
|
dfs(req)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, a := range analyzers {
|
||||||
|
dfs(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
if failed {
|
||||||
|
pkg.fromSource = true
|
||||||
|
// XXX we added facts to the maps, we need to get rid of those
|
||||||
|
return r.ld.LoadFromSource(pkg.Package)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type analysisError struct {
|
||||||
|
analyzer *analysis.Analyzer
|
||||||
|
pkg *Package
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err analysisError) Error() string {
|
||||||
|
return fmt.Sprintf("error running analyzer %s on %s: %s", err.analyzer, err.pkg, err.err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// processPkg processes a package. This involves loading the package,
|
||||||
|
// either from export data or from source. For packages loaded from
|
||||||
|
// source, the provides analyzers will be run on the package.
|
||||||
|
func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
|
||||||
|
defer func() {
|
||||||
|
// Clear information we no longer need. Make sure to do this
|
||||||
|
// when returning from processPkg so that we clear
|
||||||
|
// dependencies, not just initial packages.
|
||||||
|
pkg.TypesInfo = nil
|
||||||
|
pkg.Syntax = nil
|
||||||
|
pkg.results = nil
|
||||||
|
|
||||||
|
atomic.AddUint32(&r.stats.ProcessedPackages, 1)
|
||||||
|
pkg.decUse()
|
||||||
|
close(pkg.done)
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Ensure all packages have the generated map and config. This is
|
||||||
|
// required by interna of the runner. Analyses that themselves
|
||||||
|
// make use of either have an explicit dependency so that other
|
||||||
|
// runners work correctly, too.
|
||||||
|
analyzers = append(analyzers[0:len(analyzers):len(analyzers)], injectedAnalyses...)
|
||||||
|
|
||||||
|
if len(pkg.errs) != 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, imp := range pkg.Imports {
|
||||||
|
<-imp.done
|
||||||
|
if len(imp.errs) > 0 {
|
||||||
|
if imp.initial {
|
||||||
|
// Don't print the error of the dependency since it's
|
||||||
|
// an initial package and we're already printing the
|
||||||
|
// error.
|
||||||
|
pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s", imp, pkg))
|
||||||
|
} else {
|
||||||
|
var s string
|
||||||
|
for _, err := range imp.errs {
|
||||||
|
s += "\n\t" + err.Error()
|
||||||
|
}
|
||||||
|
pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s: %s", imp, pkg, s))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pkg.PkgPath == "unsafe" {
|
||||||
|
pkg.Types = types.Unsafe
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
r.loadSem <- struct{}{}
|
||||||
|
atomic.AddUint32(&r.stats.ActiveWorkers, 1)
|
||||||
|
defer func() {
|
||||||
|
<-r.loadSem
|
||||||
|
atomic.AddUint32(&r.stats.ActiveWorkers, ^uint32(0))
|
||||||
|
}()
|
||||||
|
if err := r.loadPkg(pkg, analyzers); err != nil {
|
||||||
|
pkg.errs = append(pkg.errs, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// A package's object facts is the union of all of its dependencies.
|
||||||
|
for _, imp := range pkg.Imports {
|
||||||
|
for ai, m := range imp.facts {
|
||||||
|
for obj, facts := range m {
|
||||||
|
pkg.facts[ai][obj] = facts[0:len(facts):len(facts)]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pkg.fromSource {
|
||||||
|
// Nothing left to do for the package.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run analyses on initial packages and those missing facts
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
wg.Add(len(analyzers))
|
||||||
|
errs := make([]error, len(analyzers))
|
||||||
|
var acs []*analysisAction
|
||||||
|
for i, a := range analyzers {
|
||||||
|
i := i
|
||||||
|
a := a
|
||||||
|
ac := r.makeAnalysisAction(a, pkg)
|
||||||
|
acs = append(acs, ac)
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
// Only initial packages and packages with missing
|
||||||
|
// facts will have been loaded from source.
|
||||||
|
if pkg.initial || r.hasFacts(a) {
|
||||||
|
if _, err := r.runAnalysis(ac); err != nil {
|
||||||
|
errs[i] = analysisError{a, pkg, err}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
depErrors := map[dependencyError]int{}
|
||||||
|
for _, err := range errs {
|
||||||
|
if err == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch err := err.(type) {
|
||||||
|
case analysisError:
|
||||||
|
switch err := err.err.(type) {
|
||||||
|
case dependencyError:
|
||||||
|
depErrors[err.nested()]++
|
||||||
|
default:
|
||||||
|
pkg.errs = append(pkg.errs, err)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
pkg.errs = append(pkg.errs, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for err, count := range depErrors {
|
||||||
|
pkg.errs = append(pkg.errs,
|
||||||
|
fmt.Errorf("could not run %s@%s, preventing %d analyzers from running: %s", err.dep, pkg, count, err.err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// We can't process ignores at this point because `unused` needs
|
||||||
|
// to see more than one package to make its decision.
|
||||||
|
ignores, problems := parseDirectives(pkg.Package)
|
||||||
|
pkg.ignores = append(pkg.ignores, ignores...)
|
||||||
|
pkg.problems = append(pkg.problems, problems...)
|
||||||
|
for _, ac := range acs {
|
||||||
|
pkg.problems = append(pkg.problems, ac.problems...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkg.initial {
|
||||||
|
// Only initial packages have these analyzers run, and only
|
||||||
|
// initial packages need these.
|
||||||
|
if pkg.results[r.analyzerIDs.get(config.Analyzer)].v != nil {
|
||||||
|
pkg.cfg = pkg.results[r.analyzerIDs.get(config.Analyzer)].v.(*config.Config)
|
||||||
|
}
|
||||||
|
pkg.gen = pkg.results[r.analyzerIDs.get(facts.Generated)].v.(map[string]facts.Generator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// In a previous version of the code, we would throw away all type
|
||||||
|
// information and reload it from export data. That was
|
||||||
|
// nonsensical. The *types.Package doesn't keep any information
|
||||||
|
// live that export data wouldn't also. We only need to discard
|
||||||
|
// the AST and the TypesInfo maps; that happens after we return
|
||||||
|
// from processPkg.
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasFacts reports whether an analysis exports any facts. An analysis
|
||||||
|
// that has a transitive dependency that exports facts is considered
|
||||||
|
// to be exporting facts.
|
||||||
|
func (r *Runner) hasFacts(a *analysis.Analyzer) bool {
|
||||||
|
ret := false
|
||||||
|
seen := make([]bool, len(r.analyzerIDs.m))
|
||||||
|
var dfs func(*analysis.Analyzer)
|
||||||
|
dfs = func(a *analysis.Analyzer) {
|
||||||
|
if seen[r.analyzerIDs.get(a)] {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
seen[r.analyzerIDs.get(a)] = true
|
||||||
|
if len(a.FactTypes) > 0 {
|
||||||
|
ret = true
|
||||||
|
}
|
||||||
|
for _, req := range a.Requires {
|
||||||
|
if ret {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
dfs(req)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dfs(a)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseDirective(s string) (cmd string, args []string) {
|
||||||
|
if !strings.HasPrefix(s, "//lint:") {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
s = strings.TrimPrefix(s, "//lint:")
|
||||||
|
fields := strings.Split(s, " ")
|
||||||
|
return fields[0], fields[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDirectives extracts all linter directives from the source
|
||||||
|
// files of the package. Malformed directives are returned as problems.
|
||||||
|
func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) {
|
||||||
|
var ignores []Ignore
|
||||||
|
var problems []Problem
|
||||||
|
|
||||||
|
for _, f := range pkg.Syntax {
|
||||||
|
found := false
|
||||||
|
commentLoop:
|
||||||
|
for _, cg := range f.Comments {
|
||||||
|
for _, c := range cg.List {
|
||||||
|
if strings.Contains(c.Text, "//lint:") {
|
||||||
|
found = true
|
||||||
|
break commentLoop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
cm := ast.NewCommentMap(pkg.Fset, f, f.Comments)
|
||||||
|
for node, cgs := range cm {
|
||||||
|
for _, cg := range cgs {
|
||||||
|
for _, c := range cg.List {
|
||||||
|
if !strings.HasPrefix(c.Text, "//lint:") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
cmd, args := parseDirective(c.Text)
|
||||||
|
switch cmd {
|
||||||
|
case "ignore", "file-ignore":
|
||||||
|
if len(args) < 2 {
|
||||||
|
p := Problem{
|
||||||
|
Pos: DisplayPosition(pkg.Fset, c.Pos()),
|
||||||
|
Message: "malformed linter directive; missing the required reason field?",
|
||||||
|
Severity: Error,
|
||||||
|
Check: "compile",
|
||||||
|
}
|
||||||
|
problems = append(problems, p)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
// unknown directive, ignore
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
checks := strings.Split(args[0], ",")
|
||||||
|
pos := DisplayPosition(pkg.Fset, node.Pos())
|
||||||
|
var ig Ignore
|
||||||
|
switch cmd {
|
||||||
|
case "ignore":
|
||||||
|
ig = &LineIgnore{
|
||||||
|
File: pos.Filename,
|
||||||
|
Line: pos.Line,
|
||||||
|
Checks: checks,
|
||||||
|
Pos: c.Pos(),
|
||||||
|
}
|
||||||
|
case "file-ignore":
|
||||||
|
ig = &FileIgnore{
|
||||||
|
File: pos.Filename,
|
||||||
|
Checks: checks,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ignores = append(ignores, ig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ignores, problems
|
||||||
|
}
|
||||||
|
|
||||||
|
// packageHash computes a package's hash. The hash is based on all Go
|
||||||
|
// files that make up the package, as well as the hashes of imported
|
||||||
|
// packages.
|
||||||
|
func packageHash(pkg *Package) (string, error) {
|
||||||
|
key := cache.NewHash("package hash")
|
||||||
|
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
|
||||||
|
for _, f := range pkg.CompiledGoFiles {
|
||||||
|
h, err := cache.FileHash(f)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
fmt.Fprintf(key, "file %s %x\n", f, h)
|
||||||
|
}
|
||||||
|
|
||||||
|
imps := make([]*Package, len(pkg.Imports))
|
||||||
|
copy(imps, pkg.Imports)
|
||||||
|
sort.Slice(imps, func(i, j int) bool {
|
||||||
|
return imps[i].PkgPath < imps[j].PkgPath
|
||||||
|
})
|
||||||
|
for _, dep := range imps {
|
||||||
|
if dep.PkgPath == "unsafe" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, dep.hash)
|
||||||
|
}
|
||||||
|
h := key.Sum()
|
||||||
|
return hex.EncodeToString(h[:]), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// passActionID computes an ActionID for an analysis pass.
|
||||||
|
func passActionID(pkg *Package, analyzer *analysis.Analyzer) (cache.ActionID, error) {
|
||||||
|
key := cache.NewHash("action ID")
|
||||||
|
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
|
||||||
|
fmt.Fprintf(key, "pkghash %s\n", pkg.hash)
|
||||||
|
fmt.Fprintf(key, "analyzer %s\n", analyzer.Name)
|
||||||
|
|
||||||
|
return key.Sum(), nil
|
||||||
|
}
|
20
vendor/honnef.co/go/tools/lint/stats.go
vendored
Normal file
20
vendor/honnef.co/go/tools/lint/stats.go
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
package lint
|
||||||
|
|
||||||
|
const (
|
||||||
|
StateInitializing = 0
|
||||||
|
StateGraph = 1
|
||||||
|
StateProcessing = 2
|
||||||
|
StateCumulative = 3
|
||||||
|
)
|
||||||
|
|
||||||
|
type Stats struct {
|
||||||
|
State uint32
|
||||||
|
|
||||||
|
InitialPackages uint32
|
||||||
|
TotalPackages uint32
|
||||||
|
ProcessedPackages uint32
|
||||||
|
ProcessedInitialPackages uint32
|
||||||
|
Problems uint32
|
||||||
|
ActiveWorkers uint32
|
||||||
|
TotalWorkers uint32
|
||||||
|
}
|
197
vendor/honnef.co/go/tools/loader/loader.go
vendored
Normal file
197
vendor/honnef.co/go/tools/loader/loader.go
vendored
Normal file
|
@ -0,0 +1,197 @@
|
||||||
|
package loader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/parser"
|
||||||
|
"go/scanner"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/gcexportdata"
|
||||||
|
"golang.org/x/tools/go/packages"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Loader struct {
|
||||||
|
exportMu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// Graph resolves patterns and returns packages with all the
|
||||||
|
// information required to later load type information, and optionally
|
||||||
|
// syntax trees.
|
||||||
|
//
|
||||||
|
// The provided config can set any setting with the exception of Mode.
|
||||||
|
func (ld *Loader) Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) {
|
||||||
|
cfg.Mode = packages.NeedName | packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedTypesSizes
|
||||||
|
pkgs, err := packages.Load(&cfg, patterns...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
packages.Visit(pkgs, nil, func(pkg *packages.Package) {
|
||||||
|
pkg.Fset = fset
|
||||||
|
})
|
||||||
|
return pkgs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadFromExport loads a package from export data. All of its
|
||||||
|
// dependencies must have been loaded already.
|
||||||
|
func (ld *Loader) LoadFromExport(pkg *packages.Package) error {
|
||||||
|
ld.exportMu.Lock()
|
||||||
|
defer ld.exportMu.Unlock()
|
||||||
|
|
||||||
|
pkg.IllTyped = true
|
||||||
|
for path, pkg := range pkg.Imports {
|
||||||
|
if pkg.Types == nil {
|
||||||
|
return fmt.Errorf("dependency %q hasn't been loaded yet", path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pkg.ExportFile == "" {
|
||||||
|
return fmt.Errorf("no export data for %q", pkg.ID)
|
||||||
|
}
|
||||||
|
f, err := os.Open(pkg.ExportFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
r, err := gcexportdata.NewReader(f)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
view := make(map[string]*types.Package) // view seen by gcexportdata
|
||||||
|
seen := make(map[*packages.Package]bool) // all visited packages
|
||||||
|
var visit func(pkgs map[string]*packages.Package)
|
||||||
|
visit = func(pkgs map[string]*packages.Package) {
|
||||||
|
for _, pkg := range pkgs {
|
||||||
|
if !seen[pkg] {
|
||||||
|
seen[pkg] = true
|
||||||
|
view[pkg.PkgPath] = pkg.Types
|
||||||
|
visit(pkg.Imports)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
visit(pkg.Imports)
|
||||||
|
tpkg, err := gcexportdata.Read(r, pkg.Fset, view, pkg.PkgPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
pkg.Types = tpkg
|
||||||
|
pkg.IllTyped = false
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadFromSource loads a package from source. All of its dependencies
|
||||||
|
// must have been loaded already.
|
||||||
|
func (ld *Loader) LoadFromSource(pkg *packages.Package) error {
|
||||||
|
ld.exportMu.RLock()
|
||||||
|
defer ld.exportMu.RUnlock()
|
||||||
|
|
||||||
|
pkg.IllTyped = true
|
||||||
|
pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name)
|
||||||
|
|
||||||
|
// OPT(dh): many packages have few files, much fewer than there
|
||||||
|
// are CPU cores. Additionally, parsing each individual file is
|
||||||
|
// very fast. A naive parallel implementation of this loop won't
|
||||||
|
// be faster, and tends to be slower due to extra scheduling,
|
||||||
|
// bookkeeping and potentially false sharing of cache lines.
|
||||||
|
pkg.Syntax = make([]*ast.File, len(pkg.CompiledGoFiles))
|
||||||
|
for i, file := range pkg.CompiledGoFiles {
|
||||||
|
f, err := parser.ParseFile(pkg.Fset, file, nil, parser.ParseComments)
|
||||||
|
if err != nil {
|
||||||
|
pkg.Errors = append(pkg.Errors, convertError(err)...)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
pkg.Syntax[i] = f
|
||||||
|
}
|
||||||
|
pkg.TypesInfo = &types.Info{
|
||||||
|
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||||
|
Defs: make(map[*ast.Ident]types.Object),
|
||||||
|
Uses: make(map[*ast.Ident]types.Object),
|
||||||
|
Implicits: make(map[ast.Node]types.Object),
|
||||||
|
Scopes: make(map[ast.Node]*types.Scope),
|
||||||
|
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||||
|
}
|
||||||
|
|
||||||
|
importer := func(path string) (*types.Package, error) {
|
||||||
|
if path == "unsafe" {
|
||||||
|
return types.Unsafe, nil
|
||||||
|
}
|
||||||
|
imp := pkg.Imports[path]
|
||||||
|
if imp == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if len(imp.Errors) > 0 {
|
||||||
|
return nil, imp.Errors[0]
|
||||||
|
}
|
||||||
|
return imp.Types, nil
|
||||||
|
}
|
||||||
|
tc := &types.Config{
|
||||||
|
Importer: importerFunc(importer),
|
||||||
|
Error: func(err error) {
|
||||||
|
pkg.Errors = append(pkg.Errors, convertError(err)...)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
err := types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
pkg.IllTyped = false
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertError(err error) []packages.Error {
|
||||||
|
var errs []packages.Error
|
||||||
|
// taken from go/packages
|
||||||
|
switch err := err.(type) {
|
||||||
|
case packages.Error:
|
||||||
|
// from driver
|
||||||
|
errs = append(errs, err)
|
||||||
|
|
||||||
|
case *os.PathError:
|
||||||
|
// from parser
|
||||||
|
errs = append(errs, packages.Error{
|
||||||
|
Pos: err.Path + ":1",
|
||||||
|
Msg: err.Err.Error(),
|
||||||
|
Kind: packages.ParseError,
|
||||||
|
})
|
||||||
|
|
||||||
|
case scanner.ErrorList:
|
||||||
|
// from parser
|
||||||
|
for _, err := range err {
|
||||||
|
errs = append(errs, packages.Error{
|
||||||
|
Pos: err.Pos.String(),
|
||||||
|
Msg: err.Msg,
|
||||||
|
Kind: packages.ParseError,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
case types.Error:
|
||||||
|
// from type checker
|
||||||
|
errs = append(errs, packages.Error{
|
||||||
|
Pos: err.Fset.Position(err.Pos).String(),
|
||||||
|
Msg: err.Msg,
|
||||||
|
Kind: packages.TypeError,
|
||||||
|
})
|
||||||
|
|
||||||
|
default:
|
||||||
|
// unexpected impoverished error from parser?
|
||||||
|
errs = append(errs, packages.Error{
|
||||||
|
Pos: "-",
|
||||||
|
Msg: err.Error(),
|
||||||
|
Kind: packages.UnknownError,
|
||||||
|
})
|
||||||
|
|
||||||
|
// If you see this error message, please file a bug.
|
||||||
|
log.Printf("internal error: error %q (%T) without position", err, err)
|
||||||
|
}
|
||||||
|
return errs
|
||||||
|
}
|
||||||
|
|
||||||
|
type importerFunc func(path string) (*types.Package, error)
|
||||||
|
|
||||||
|
func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
|
223
vendor/honnef.co/go/tools/simple/analysis.go
vendored
Normal file
223
vendor/honnef.co/go/tools/simple/analysis.go
vendored
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
package simple
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||||
|
"honnef.co/go/tools/facts"
|
||||||
|
"honnef.co/go/tools/internal/passes/buildssa"
|
||||||
|
"honnef.co/go/tools/lint/lintutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newFlagSet() flag.FlagSet {
|
||||||
|
fs := flag.NewFlagSet("", flag.PanicOnError)
|
||||||
|
fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
|
||||||
|
return *fs
|
||||||
|
}
|
||||||
|
|
||||||
|
var Analyzers = map[string]*analysis.Analyzer{
|
||||||
|
"S1000": {
|
||||||
|
Name: "S1000",
|
||||||
|
Run: LintSingleCaseSelect,
|
||||||
|
Doc: Docs["S1000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1001": {
|
||||||
|
Name: "S1001",
|
||||||
|
Run: LintLoopCopy,
|
||||||
|
Doc: Docs["S1001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1002": {
|
||||||
|
Name: "S1002",
|
||||||
|
Run: LintIfBoolCmp,
|
||||||
|
Doc: Docs["S1002"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1003": {
|
||||||
|
Name: "S1003",
|
||||||
|
Run: LintStringsContains,
|
||||||
|
Doc: Docs["S1003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1004": {
|
||||||
|
Name: "S1004",
|
||||||
|
Run: LintBytesCompare,
|
||||||
|
Doc: Docs["S1004"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1005": {
|
||||||
|
Name: "S1005",
|
||||||
|
Run: LintUnnecessaryBlank,
|
||||||
|
Doc: Docs["S1005"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1006": {
|
||||||
|
Name: "S1006",
|
||||||
|
Run: LintForTrue,
|
||||||
|
Doc: Docs["S1006"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1007": {
|
||||||
|
Name: "S1007",
|
||||||
|
Run: LintRegexpRaw,
|
||||||
|
Doc: Docs["S1007"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1008": {
|
||||||
|
Name: "S1008",
|
||||||
|
Run: LintIfReturn,
|
||||||
|
Doc: Docs["S1008"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1009": {
|
||||||
|
Name: "S1009",
|
||||||
|
Run: LintRedundantNilCheckWithLen,
|
||||||
|
Doc: Docs["S1009"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1010": {
|
||||||
|
Name: "S1010",
|
||||||
|
Run: LintSlicing,
|
||||||
|
Doc: Docs["S1010"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1011": {
|
||||||
|
Name: "S1011",
|
||||||
|
Run: LintLoopAppend,
|
||||||
|
Doc: Docs["S1011"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1012": {
|
||||||
|
Name: "S1012",
|
||||||
|
Run: LintTimeSince,
|
||||||
|
Doc: Docs["S1012"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1016": {
|
||||||
|
Name: "S1016",
|
||||||
|
Run: LintSimplerStructConversion,
|
||||||
|
Doc: Docs["S1016"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1017": {
|
||||||
|
Name: "S1017",
|
||||||
|
Run: LintTrim,
|
||||||
|
Doc: Docs["S1017"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1018": {
|
||||||
|
Name: "S1018",
|
||||||
|
Run: LintLoopSlide,
|
||||||
|
Doc: Docs["S1018"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1019": {
|
||||||
|
Name: "S1019",
|
||||||
|
Run: LintMakeLenCap,
|
||||||
|
Doc: Docs["S1019"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1020": {
|
||||||
|
Name: "S1020",
|
||||||
|
Run: LintAssertNotNil,
|
||||||
|
Doc: Docs["S1020"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1021": {
|
||||||
|
Name: "S1021",
|
||||||
|
Run: LintDeclareAssign,
|
||||||
|
Doc: Docs["S1021"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1023": {
|
||||||
|
Name: "S1023",
|
||||||
|
Run: LintRedundantBreak,
|
||||||
|
Doc: Docs["S1023"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1024": {
|
||||||
|
Name: "S1024",
|
||||||
|
Run: LintTimeUntil,
|
||||||
|
Doc: Docs["S1024"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1025": {
|
||||||
|
Name: "S1025",
|
||||||
|
Run: LintRedundantSprintf,
|
||||||
|
Doc: Docs["S1025"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1028": {
|
||||||
|
Name: "S1028",
|
||||||
|
Run: LintErrorsNewSprintf,
|
||||||
|
Doc: Docs["S1028"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1029": {
|
||||||
|
Name: "S1029",
|
||||||
|
Run: LintRangeStringRunes,
|
||||||
|
Doc: Docs["S1029"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1030": {
|
||||||
|
Name: "S1030",
|
||||||
|
Run: LintBytesBufferConversions,
|
||||||
|
Doc: Docs["S1030"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1031": {
|
||||||
|
Name: "S1031",
|
||||||
|
Run: LintNilCheckAroundRange,
|
||||||
|
Doc: Docs["S1031"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1032": {
|
||||||
|
Name: "S1032",
|
||||||
|
Run: LintSortHelpers,
|
||||||
|
Doc: Docs["S1032"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1033": {
|
||||||
|
Name: "S1033",
|
||||||
|
Run: LintGuardedDelete,
|
||||||
|
Doc: Docs["S1033"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"S1034": {
|
||||||
|
Name: "S1034",
|
||||||
|
Run: LintSimplifyTypeSwitch,
|
||||||
|
Doc: Docs["S1034"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
}
|
629
vendor/honnef.co/go/tools/simple/doc.go
vendored
629
vendor/honnef.co/go/tools/simple/doc.go
vendored
|
@ -1,426 +1,425 @@
|
||||||
package simple
|
package simple
|
||||||
|
|
||||||
var docS1000 = `Use plain channel send or receive
|
import "honnef.co/go/tools/lint"
|
||||||
|
|
||||||
Select statements with a single case can be replaced with a simple send or receive.
|
var Docs = map[string]*lint.Documentation{
|
||||||
|
"S1000": &lint.Documentation{
|
||||||
|
Title: `Use plain channel send or receive instead of single-case select`,
|
||||||
|
Text: `Select statements with a single case can be replaced with a simple
|
||||||
|
send or receive.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
select {
|
select {
|
||||||
case x := <-ch:
|
case x := <-ch:
|
||||||
fmt.Println(x)
|
fmt.Println(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
x := <-ch
|
x := <-ch
|
||||||
fmt.Println(x)
|
fmt.Println(x)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1001": &lint.Documentation{
|
||||||
2017.1
|
Title: `Replace for loop with call to copy`,
|
||||||
`
|
Text: `Use copy() for copying elements from one slice to another.
|
||||||
|
|
||||||
var docS1001 = `Replace with copy()
|
|
||||||
|
|
||||||
Use copy() for copying elements from one slice to another.
|
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
for i, x := range src {
|
for i, x := range src {
|
||||||
dst[i] = x
|
dst[i] = x
|
||||||
}
|
}
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
copy(dst, src)
|
copy(dst, src)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1002": &lint.Documentation{
|
||||||
2017.1
|
Title: `Omit comparison with boolean constant`,
|
||||||
`
|
Text: `Before:
|
||||||
|
|
||||||
var docS1002 = `Omit comparison with boolean constant
|
if x == true {}
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
if x {}`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1003": &lint.Documentation{
|
||||||
|
Title: `Replace call to strings.Index with strings.Contains`,
|
||||||
|
Text: `Before:
|
||||||
|
|
||||||
|
if strings.Index(x, y) != -1 {}
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
if strings.Contains(x, y) {}`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1004": &lint.Documentation{
|
||||||
|
Title: `Replace call to bytes.Compare with bytes.Equal`,
|
||||||
|
Text: `Before:
|
||||||
|
|
||||||
|
if bytes.Compare(x, y) == 0 {}
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
if bytes.Equal(x, y) {}`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1005": &lint.Documentation{
|
||||||
|
Title: `Drop unnecessary use of the blank identifier`,
|
||||||
|
Text: `In many cases, assigning to the blank identifier is unnecessary.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
if x == true {}
|
for _ = range s {}
|
||||||
|
x, _ = someMap[key]
|
||||||
|
_ = <-ch
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
if x {}
|
for range s{}
|
||||||
|
x = someMap[key]
|
||||||
|
<-ch`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1006": &lint.Documentation{
|
||||||
2017.1
|
Title: `Use for { ... } for infinite loops`,
|
||||||
`
|
Text: `For infinite loops, using for { ... } is the most idiomatic choice.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docS1003 = `Replace with strings.Contains
|
"S1007": &lint.Documentation{
|
||||||
|
Title: `Simplify regular expression by using raw string literal`,
|
||||||
|
Text: `Raw string literals use ` + "`" + ` instead of " and do not support
|
||||||
|
any escape sequences. This means that the backslash (\) can be used
|
||||||
|
freely, without the need of escaping.
|
||||||
|
|
||||||
|
Since regular expressions have their own escape sequences, raw strings
|
||||||
|
can improve their readability.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
if strings.Index(x, y) != -1 {}
|
regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z")
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
if strings.Contains(x, y) {}
|
regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1008": &lint.Documentation{
|
||||||
2017.1
|
Title: `Simplify returning boolean expression`,
|
||||||
`
|
Text: `Before:
|
||||||
|
|
||||||
var docS1004 = `Replace with bytes.Equal
|
if <expr> {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
return <expr>`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1009": &lint.Documentation{
|
||||||
|
Title: `Omit redundant nil check on slices`,
|
||||||
|
Text: `The len function is defined for all slices, even nil ones, which have
|
||||||
|
a length of zero. It is not necessary to check if a slice is not nil
|
||||||
|
before checking that its length is not zero.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
if bytes.Compare(x, y) == 0 {}
|
if x != nil && len(x) != 0 {}
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
if bytes.Equal(x, y) {}
|
if len(x) != 0 {}`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1010": &lint.Documentation{
|
||||||
2017.1
|
Title: `Omit default slice index`,
|
||||||
`
|
Text: `When slicing, the second index defaults to the length of the value,
|
||||||
|
making s[n:len(s)] and s[n:] equivalent.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docS1005 = `Drop unnecessary use of the blank identifier
|
"S1011": &lint.Documentation{
|
||||||
|
Title: `Use a single append to concatenate two slices`,
|
||||||
|
Text: `Before:
|
||||||
|
|
||||||
In many cases, assigning to the blank identifier is unnecessary.
|
for _, e := range y {
|
||||||
|
x = append(x, e)
|
||||||
|
}
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
x = append(x, y...)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1012": &lint.Documentation{
|
||||||
|
Title: `Replace time.Now().Sub(x) with time.Since(x)`,
|
||||||
|
Text: `The time.Since helper has the same effect as using time.Now().Sub(x)
|
||||||
|
but is easier to read.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
for _ = range s {}
|
time.Now().Sub(x)
|
||||||
x, _ = someMap[key]
|
|
||||||
_ = <-ch
|
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
for range s{}
|
time.Since(x)`,
|
||||||
x = someMap[key]
|
Since: "2017.1",
|
||||||
<-ch
|
},
|
||||||
|
|
||||||
Available since
|
"S1016": &lint.Documentation{
|
||||||
2017.1
|
Title: `Use a type conversion instead of manually copying struct fields`,
|
||||||
`
|
Text: `Two struct types with identical fields can be converted between each
|
||||||
|
other. In older versions of Go, the fields had to have identical
|
||||||
var docS1006 = `Replace with for { ... }
|
struct tags. Since Go 1.8, however, struct tags are ignored during
|
||||||
|
conversions. It is thus not necessary to manually copy every field
|
||||||
For infinite loops, using for { ... } is the most idiomatic choice.
|
individually.
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1007 = `Simplify regular expression by using raw string literal
|
|
||||||
|
|
||||||
Raw string literals use ` + "`" + ` instead of " and do not support any escape sequences. This means that the backslash (\) can be used freely, without the need of escaping.
|
|
||||||
|
|
||||||
Since regular expressions have their own escape sequences, raw strings can improve their readability.
|
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z")
|
var x T1
|
||||||
|
y := T2{
|
||||||
|
Field1: x.Field1,
|
||||||
|
Field2: x.Field2,
|
||||||
|
}
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `)
|
var x T1
|
||||||
|
y := T2(x)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1017": &lint.Documentation{
|
||||||
2017.1
|
Title: `Replace manual trimming with strings.TrimPrefix`,
|
||||||
`
|
Text: `Instead of using strings.HasPrefix and manual slicing, use the
|
||||||
|
strings.TrimPrefix function. If the string doesn't start with the
|
||||||
var docS1008 = `Simplify returning boolean expression
|
prefix, the original string will be returned. Using strings.TrimPrefix
|
||||||
|
reduces complexity, and avoids common bugs, such as off-by-one
|
||||||
|
mistakes.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
if <expr> {
|
if strings.HasPrefix(str, prefix) {
|
||||||
return true
|
str = str[len(prefix):]
|
||||||
}
|
}
|
||||||
return false
|
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
return <expr>
|
str = strings.TrimPrefix(str, prefix)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1018": &lint.Documentation{
|
||||||
2017.1
|
Title: `Use copy for sliding elements`,
|
||||||
`
|
Text: `copy() permits using the same source and destination slice, even with
|
||||||
|
overlapping ranges. This makes it ideal for sliding elements in a
|
||||||
var docS1009 = `Omit redundant nil check on slices
|
slice.
|
||||||
|
|
||||||
The len function is defined for all slices, even nil ones, which have a length of zero. It is not necessary to check if a slice is not nil before checking that its length is not zero.
|
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
if x != nil && len(x) != 0 {}
|
for i := 0; i < n; i++ {
|
||||||
|
bs[i] = bs[offset+i]
|
||||||
|
}
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
if len(x) != 0 {}
|
copy(bs[:n], bs[offset:])`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1019": &lint.Documentation{
|
||||||
2017.1
|
Title: `Simplify make call by omitting redundant arguments`,
|
||||||
`
|
Text: `The make function has default values for the length and capacity
|
||||||
|
arguments. For channels and maps, the length defaults to zero.
|
||||||
|
Additionally, for slices the capacity defaults to the length.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docS1010 = `Omit default slice index
|
"S1020": &lint.Documentation{
|
||||||
|
Title: `Omit redundant nil check in type assertion`,
|
||||||
|
Text: `Before:
|
||||||
|
|
||||||
When slicing, the second index defaults to the length of the value, making s[n:len(s)] and s[n:] equivalent.
|
if _, ok := i.(T); ok && i != nil {}
|
||||||
|
|
||||||
Available since
|
After:
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1011 = `Use a single append to concatenate two slices
|
if _, ok := i.(T); ok {}`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1021": &lint.Documentation{
|
||||||
|
Title: `Merge variable declaration and assignment`,
|
||||||
|
Text: `Before:
|
||||||
|
|
||||||
|
var x uint
|
||||||
|
x = 1
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
var x uint = 1`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1023": &lint.Documentation{
|
||||||
|
Title: `Omit redundant control flow`,
|
||||||
|
Text: `Functions that have no return value do not need a return statement as
|
||||||
|
the final statement of the function.
|
||||||
|
|
||||||
|
Switches in Go do not have automatic fallthrough, unlike languages
|
||||||
|
like C. It is not necessary to have a break statement as the final
|
||||||
|
statement in a case block.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1024": &lint.Documentation{
|
||||||
|
Title: `Replace x.Sub(time.Now()) with time.Until(x)`,
|
||||||
|
Text: `The time.Until helper has the same effect as using x.Sub(time.Now())
|
||||||
|
but is easier to read.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
for _, e := range y {
|
x.Sub(time.Now())
|
||||||
x = append(x, e)
|
|
||||||
}
|
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
x = append(x, y...)
|
time.Until(x)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1025": &lint.Documentation{
|
||||||
2017.1
|
Title: `Don't use fmt.Sprintf("%s", x) unnecessarily`,
|
||||||
`
|
Text: `In many instances, there are easier and more efficient ways of getting
|
||||||
|
a value's string representation. Whenever a value's underlying type is
|
||||||
var docS1012 = `Replace with time.Since(x)
|
a string already, or the type has a String method, they should be used
|
||||||
|
directly.
|
||||||
The time.Since helper has the same effect as using time.Now().Sub(x) but is easier to read.
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
time.Now().Sub(x)
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
time.Since(x)
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1016 = `Use a type conversion
|
|
||||||
|
|
||||||
Two struct types with identical fields can be converted between each other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during conversions. It is thus not necessary to manually copy every field individually.
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
var x T1
|
|
||||||
y := T2{
|
|
||||||
Field1: x.Field1,
|
|
||||||
Field2: x.Field2,
|
|
||||||
}
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
var x T1
|
|
||||||
y := T2(x)
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1017 = `Replace with strings.TrimPrefix
|
|
||||||
|
|
||||||
Instead of using strings.HasPrefix and manual slicing, use the strings.TrimPrefix function. If the string doesn't start with the prefix, the original string will be returned. Using strings.TrimPrefix reduces complexity, and avoids common bugs, such as off-by-one mistakes.
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
if strings.HasPrefix(str, prefix) {
|
|
||||||
str = str[len(prefix):]
|
|
||||||
}
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
str = strings.TrimPrefix(str, prefix)
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1018 = `Replace with copy()
|
|
||||||
|
|
||||||
copy() permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a slice.
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
for i := 0; i < n; i++ {
|
|
||||||
bs[i] = bs[offset+i]
|
|
||||||
}
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
copy(bs[:n], bs[offset:])
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1019 = `Simplify make call
|
|
||||||
|
|
||||||
The make function has default values for the length and capacity arguments. For channels and maps, the length defaults to zero. Additionally, for slices the capacity defaults to the length.
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1020 = `Omit redundant nil check in type assertion
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
if _, ok := i.(T); ok && i != nil {}
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
if _, ok := i.(T); ok {}
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1021 = `Merge variable declaration and assignment
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
var x uint
|
|
||||||
x = 1
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
var x uint = 1
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
var docS1023 = `Omit redundant control flow
|
|
||||||
|
|
||||||
Functions that have no return value do not need a return statement as the final statement of the function.
|
|
||||||
|
|
||||||
Switches in Go do not have automatic fallthrough, unlike languages like C. It is not necessary to have a break statement as the final statement in a case block.
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1024 = `Replace with time.Until(x)
|
|
||||||
|
|
||||||
The time.Until helper has the same effect as using x.Sub(time.Now()) but is easier to read.
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
x.Sub(time.Now())
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
time.Until(x)
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1025 = `Don't use fmt.Sprintf("%s", x) unnecessarily
|
|
||||||
|
|
||||||
In many instances, there are easier and more efficient ways of getting a value's string representation. Whenever a value's underlying type is a string already, or the type has a String method, they should be used directly.
|
|
||||||
|
|
||||||
Given the following shared definitions
|
Given the following shared definitions
|
||||||
|
|
||||||
type T1 string
|
type T1 string
|
||||||
type T2 int
|
type T2 int
|
||||||
|
|
||||||
func (T2) String() string { return "Hello, world" }
|
func (T2) String() string { return "Hello, world" }
|
||||||
|
|
||||||
var x string
|
var x string
|
||||||
var y T1
|
var y T1
|
||||||
var z T2
|
var z T2
|
||||||
|
|
||||||
we can simplify the following
|
we can simplify the following
|
||||||
|
|
||||||
fmt.Sprintf("%s", x)
|
fmt.Sprintf("%s", x)
|
||||||
fmt.Sprintf("%s", y)
|
fmt.Sprintf("%s", y)
|
||||||
fmt.Sprintf("%s", z)
|
fmt.Sprintf("%s", z)
|
||||||
|
|
||||||
to
|
to
|
||||||
|
|
||||||
x
|
x
|
||||||
string(y)
|
string(y)
|
||||||
z.String()
|
z.String()`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1028": &lint.Documentation{
|
||||||
2017.1
|
Title: `Simplify error construction with fmt.Errorf`,
|
||||||
`
|
Text: `Before:
|
||||||
|
|
||||||
var docS1028 = `replace with fmt.Errorf
|
errors.New(fmt.Sprintf(...))
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
errors.New(fmt.Sprintf(...))
|
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
fmt.Errorf(...)
|
fmt.Errorf(...)`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1029": &lint.Documentation{
|
||||||
2017.1
|
Title: `Range over the string directly`,
|
||||||
`
|
Text: `Ranging over a string will yield byte offsets and runes. If the offset
|
||||||
|
isn't used, this is functionally equivalent to converting the string
|
||||||
var docS1029 = `Range over the string
|
to a slice of runes and ranging over that. Ranging directly over the
|
||||||
|
string will be more performant, however, as it avoids allocating a new
|
||||||
Ranging over a string will yield byte offsets and runes. If the offset isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the string will be more performant, however, as it avoids allocating a new slice, the size of which depends on the length of the string.
|
slice, the size of which depends on the length of the string.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
for _, r := range []rune(s) {}
|
for _, r := range []rune(s) {}
|
||||||
|
|
||||||
After:
|
After:
|
||||||
|
|
||||||
for _, r := range s {}
|
for _, r := range s {}`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"S1030": &lint.Documentation{
|
||||||
2017.1
|
Title: `Use bytes.Buffer.String or bytes.Buffer.Bytes`,
|
||||||
`
|
Text: `bytes.Buffer has both a String and a Bytes method. It is never
|
||||||
|
necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply
|
||||||
|
use the other method.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docS1030 = `Use bytes.Buffer.String or bytes.Buffer.Bytes
|
"S1031": &lint.Documentation{
|
||||||
|
Title: `Omit redundant nil check around loop`,
|
||||||
bytes.Buffer has both a String and a Bytes method. It is never necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply use the other method.
|
Text: `You can use range on nil slices and maps, the loop will simply never
|
||||||
|
execute. This makes an additional nil check around the loop
|
||||||
Available since
|
unnecessary.
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1031 = `Omit redundant nil check around loop
|
|
||||||
|
|
||||||
You can use range on nil slices and maps, the loop will simply never execute. This makes an additional nil check around the loop unnecessary.
|
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
if s != nil {
|
if s != nil {
|
||||||
for _, x := range s {
|
for _, x := range s {
|
||||||
...
|
...
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
for _, x := range s {
|
||||||
|
...
|
||||||
|
}`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1032": &lint.Documentation{
|
||||||
|
Title: `Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)`,
|
||||||
|
Text: `The sort.Ints, sort.Float64s and sort.Strings functions are easier to
|
||||||
|
read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x))
|
||||||
|
and sort.Sort(sort.StringSlice(x)).
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
sort.Sort(sort.StringSlice(x))
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
sort.Strings(x)`,
|
||||||
|
Since: "2019.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1033": &lint.Documentation{
|
||||||
|
Title: `Unnecessary guard around call to delete`,
|
||||||
|
Text: `Calling delete on a nil map is a no-op.`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
|
"S1034": &lint.Documentation{
|
||||||
|
Title: `Use result of type assertion to simplify cases`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
for _, x := range s {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docS1032 = `Replace with sort.Ints(x), sort.Float64s(x), sort.Strings(x)
|
|
||||||
|
|
||||||
The sort.Ints, sort.Float64s and sort.Strings functions are easier to read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) and sort.Sort(sort.StringSlice(x)).
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
sort.Sort(sort.StringSlice(x))
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
sort.Strings(x)
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2019.1
|
|
||||||
`
|
|
||||||
|
|
525
vendor/honnef.co/go/tools/simple/lint.go
vendored
525
vendor/honnef.co/go/tools/simple/lint.go
vendored
File diff suppressed because it is too large
Load diff
18
vendor/honnef.co/go/tools/ssa/builder.go
vendored
18
vendor/honnef.co/go/tools/ssa/builder.go
vendored
|
@ -32,7 +32,7 @@ package ssa
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
exact "go/constant"
|
"go/constant"
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"os"
|
"os"
|
||||||
|
@ -58,12 +58,12 @@ var (
|
||||||
tString = types.Typ[types.String]
|
tString = types.Typ[types.String]
|
||||||
tUntypedNil = types.Typ[types.UntypedNil]
|
tUntypedNil = types.Typ[types.UntypedNil]
|
||||||
tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
|
tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
|
||||||
tEface = types.NewInterface(nil, nil).Complete()
|
tEface = types.NewInterfaceType(nil, nil).Complete()
|
||||||
|
|
||||||
// SSA Value constants.
|
// SSA Value constants.
|
||||||
vZero = intConst(0)
|
vZero = intConst(0)
|
||||||
vOne = intConst(1)
|
vOne = intConst(1)
|
||||||
vTrue = NewConst(exact.MakeBool(true), tBool)
|
vTrue = NewConst(constant.MakeBool(true), tBool)
|
||||||
)
|
)
|
||||||
|
|
||||||
// builder holds state associated with the package currently being built.
|
// builder holds state associated with the package currently being built.
|
||||||
|
@ -131,11 +131,11 @@ func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
|
||||||
switch e.Op {
|
switch e.Op {
|
||||||
case token.LAND:
|
case token.LAND:
|
||||||
b.cond(fn, e.X, rhs, done)
|
b.cond(fn, e.X, rhs, done)
|
||||||
short = NewConst(exact.MakeBool(false), t)
|
short = NewConst(constant.MakeBool(false), t)
|
||||||
|
|
||||||
case token.LOR:
|
case token.LOR:
|
||||||
b.cond(fn, e.X, done, rhs)
|
b.cond(fn, e.X, done, rhs)
|
||||||
short = NewConst(exact.MakeBool(true), t)
|
short = NewConst(constant.MakeBool(true), t)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Is rhs unreachable?
|
// Is rhs unreachable?
|
||||||
|
@ -969,10 +969,10 @@ func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
|
||||||
c.Args = b.emitCallArgs(fn, sig, e, c.Args)
|
c.Args = b.emitCallArgs(fn, sig, e, c.Args)
|
||||||
}
|
}
|
||||||
|
|
||||||
// assignOp emits to fn code to perform loc += incr or loc -= incr.
|
// assignOp emits to fn code to perform loc <op>= val.
|
||||||
func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token, pos token.Pos) {
|
func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) {
|
||||||
oldv := loc.load(fn)
|
oldv := loc.load(fn)
|
||||||
loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), pos))
|
loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos))
|
||||||
}
|
}
|
||||||
|
|
||||||
// localValueSpec emits to fn code to define all of the vars in the
|
// localValueSpec emits to fn code to define all of the vars in the
|
||||||
|
@ -1998,7 +1998,7 @@ start:
|
||||||
op = token.SUB
|
op = token.SUB
|
||||||
}
|
}
|
||||||
loc := b.addr(fn, s.X, false)
|
loc := b.addr(fn, s.X, false)
|
||||||
b.assignOp(fn, loc, NewConst(exact.MakeInt64(1), loc.typ()), op, s.Pos())
|
b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos())
|
||||||
|
|
||||||
case *ast.AssignStmt:
|
case *ast.AssignStmt:
|
||||||
switch s.Tok {
|
switch s.Tok {
|
||||||
|
|
44
vendor/honnef.co/go/tools/ssa/const.go
vendored
44
vendor/honnef.co/go/tools/ssa/const.go
vendored
|
@ -8,7 +8,7 @@ package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
exact "go/constant"
|
"go/constant"
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
@ -17,14 +17,14 @@ import (
|
||||||
// NewConst returns a new constant of the specified value and type.
|
// NewConst returns a new constant of the specified value and type.
|
||||||
// val must be valid according to the specification of Const.Value.
|
// val must be valid according to the specification of Const.Value.
|
||||||
//
|
//
|
||||||
func NewConst(val exact.Value, typ types.Type) *Const {
|
func NewConst(val constant.Value, typ types.Type) *Const {
|
||||||
return &Const{typ, val}
|
return &Const{typ, val}
|
||||||
}
|
}
|
||||||
|
|
||||||
// intConst returns an 'int' constant that evaluates to i.
|
// intConst returns an 'int' constant that evaluates to i.
|
||||||
// (i is an int64 in case the host is narrower than the target.)
|
// (i is an int64 in case the host is narrower than the target.)
|
||||||
func intConst(i int64) *Const {
|
func intConst(i int64) *Const {
|
||||||
return NewConst(exact.MakeInt64(i), tInt)
|
return NewConst(constant.MakeInt64(i), tInt)
|
||||||
}
|
}
|
||||||
|
|
||||||
// nilConst returns a nil constant of the specified type, which may
|
// nilConst returns a nil constant of the specified type, which may
|
||||||
|
@ -36,7 +36,7 @@ func nilConst(typ types.Type) *Const {
|
||||||
|
|
||||||
// stringConst returns a 'string' constant that evaluates to s.
|
// stringConst returns a 'string' constant that evaluates to s.
|
||||||
func stringConst(s string) *Const {
|
func stringConst(s string) *Const {
|
||||||
return NewConst(exact.MakeString(s), tString)
|
return NewConst(constant.MakeString(s), tString)
|
||||||
}
|
}
|
||||||
|
|
||||||
// zeroConst returns a new "zero" constant of the specified type,
|
// zeroConst returns a new "zero" constant of the specified type,
|
||||||
|
@ -48,11 +48,11 @@ func zeroConst(t types.Type) *Const {
|
||||||
case *types.Basic:
|
case *types.Basic:
|
||||||
switch {
|
switch {
|
||||||
case t.Info()&types.IsBoolean != 0:
|
case t.Info()&types.IsBoolean != 0:
|
||||||
return NewConst(exact.MakeBool(false), t)
|
return NewConst(constant.MakeBool(false), t)
|
||||||
case t.Info()&types.IsNumeric != 0:
|
case t.Info()&types.IsNumeric != 0:
|
||||||
return NewConst(exact.MakeInt64(0), t)
|
return NewConst(constant.MakeInt64(0), t)
|
||||||
case t.Info()&types.IsString != 0:
|
case t.Info()&types.IsString != 0:
|
||||||
return NewConst(exact.MakeString(""), t)
|
return NewConst(constant.MakeString(""), t)
|
||||||
case t.Kind() == types.UnsafePointer:
|
case t.Kind() == types.UnsafePointer:
|
||||||
fallthrough
|
fallthrough
|
||||||
case t.Kind() == types.UntypedNil:
|
case t.Kind() == types.UntypedNil:
|
||||||
|
@ -74,8 +74,8 @@ func (c *Const) RelString(from *types.Package) string {
|
||||||
var s string
|
var s string
|
||||||
if c.Value == nil {
|
if c.Value == nil {
|
||||||
s = "nil"
|
s = "nil"
|
||||||
} else if c.Value.Kind() == exact.String {
|
} else if c.Value.Kind() == constant.String {
|
||||||
s = exact.StringVal(c.Value)
|
s = constant.StringVal(c.Value)
|
||||||
const max = 20
|
const max = 20
|
||||||
// TODO(adonovan): don't cut a rune in half.
|
// TODO(adonovan): don't cut a rune in half.
|
||||||
if len(s) > max {
|
if len(s) > max {
|
||||||
|
@ -121,14 +121,14 @@ func (c *Const) IsNil() bool {
|
||||||
// a signed 64-bit integer.
|
// a signed 64-bit integer.
|
||||||
//
|
//
|
||||||
func (c *Const) Int64() int64 {
|
func (c *Const) Int64() int64 {
|
||||||
switch x := exact.ToInt(c.Value); x.Kind() {
|
switch x := constant.ToInt(c.Value); x.Kind() {
|
||||||
case exact.Int:
|
case constant.Int:
|
||||||
if i, ok := exact.Int64Val(x); ok {
|
if i, ok := constant.Int64Val(x); ok {
|
||||||
return i
|
return i
|
||||||
}
|
}
|
||||||
return 0
|
return 0
|
||||||
case exact.Float:
|
case constant.Float:
|
||||||
f, _ := exact.Float64Val(x)
|
f, _ := constant.Float64Val(x)
|
||||||
return int64(f)
|
return int64(f)
|
||||||
}
|
}
|
||||||
panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
|
panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
|
||||||
|
@ -138,14 +138,14 @@ func (c *Const) Int64() int64 {
|
||||||
// an unsigned 64-bit integer.
|
// an unsigned 64-bit integer.
|
||||||
//
|
//
|
||||||
func (c *Const) Uint64() uint64 {
|
func (c *Const) Uint64() uint64 {
|
||||||
switch x := exact.ToInt(c.Value); x.Kind() {
|
switch x := constant.ToInt(c.Value); x.Kind() {
|
||||||
case exact.Int:
|
case constant.Int:
|
||||||
if u, ok := exact.Uint64Val(x); ok {
|
if u, ok := constant.Uint64Val(x); ok {
|
||||||
return u
|
return u
|
||||||
}
|
}
|
||||||
return 0
|
return 0
|
||||||
case exact.Float:
|
case constant.Float:
|
||||||
f, _ := exact.Float64Val(x)
|
f, _ := constant.Float64Val(x)
|
||||||
return uint64(f)
|
return uint64(f)
|
||||||
}
|
}
|
||||||
panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
|
panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
|
||||||
|
@ -155,7 +155,7 @@ func (c *Const) Uint64() uint64 {
|
||||||
// a float64.
|
// a float64.
|
||||||
//
|
//
|
||||||
func (c *Const) Float64() float64 {
|
func (c *Const) Float64() float64 {
|
||||||
f, _ := exact.Float64Val(c.Value)
|
f, _ := constant.Float64Val(c.Value)
|
||||||
return f
|
return f
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ func (c *Const) Float64() float64 {
|
||||||
// fit a complex128.
|
// fit a complex128.
|
||||||
//
|
//
|
||||||
func (c *Const) Complex128() complex128 {
|
func (c *Const) Complex128() complex128 {
|
||||||
re, _ := exact.Float64Val(exact.Real(c.Value))
|
re, _ := constant.Float64Val(constant.Real(c.Value))
|
||||||
im, _ := exact.Float64Val(exact.Imag(c.Value))
|
im, _ := constant.Float64Val(constant.Imag(c.Value))
|
||||||
return complex(re, im)
|
return complex(re, im)
|
||||||
}
|
}
|
||||||
|
|
17
vendor/honnef.co/go/tools/ssa/create.go
vendored
17
vendor/honnef.co/go/tools/ssa/create.go
vendored
|
@ -251,12 +251,19 @@ func (prog *Program) AllPackages() []*Package {
|
||||||
return pkgs
|
return pkgs
|
||||||
}
|
}
|
||||||
|
|
||||||
// ImportedPackage returns the importable SSA Package whose import
|
// ImportedPackage returns the importable Package whose PkgPath
|
||||||
// path is path, or nil if no such SSA package has been created.
|
// is path, or nil if no such Package has been created.
|
||||||
//
|
//
|
||||||
// Not all packages are importable. For example, no import
|
// A parameter to CreatePackage determines whether a package should be
|
||||||
// declaration can resolve to the x_test package created by 'go test'
|
// considered importable. For example, no import declaration can resolve
|
||||||
// or the ad-hoc main package created 'go build foo.go'.
|
// to the ad-hoc main package created by 'go build foo.go'.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): rethink this function and the "importable" concept;
|
||||||
|
// most packages are importable. This function assumes that all
|
||||||
|
// types.Package.Path values are unique within the ssa.Program, which is
|
||||||
|
// false---yet this function remains very convenient.
|
||||||
|
// Clients should use (*Program).Package instead where possible.
|
||||||
|
// SSA doesn't really need a string-keyed map of packages.
|
||||||
//
|
//
|
||||||
func (prog *Program) ImportedPackage(path string) *Package {
|
func (prog *Program) ImportedPackage(path string) *Package {
|
||||||
return prog.imported[path]
|
return prog.imported[path]
|
||||||
|
|
12
vendor/honnef.co/go/tools/ssa/doc.go
vendored
12
vendor/honnef.co/go/tools/ssa/doc.go
vendored
|
@ -23,11 +23,13 @@
|
||||||
// such as multi-way branch can be reconstructed as needed; see
|
// such as multi-way branch can be reconstructed as needed; see
|
||||||
// ssautil.Switches() for an example.
|
// ssautil.Switches() for an example.
|
||||||
//
|
//
|
||||||
// To construct an SSA-form program, call ssautil.CreateProgram on a
|
// The simplest way to create the SSA representation of a package is
|
||||||
// loader.Program, a set of type-checked packages created from
|
// to load typed syntax trees using golang.org/x/tools/go/packages, then
|
||||||
// parsed Go source files. The resulting ssa.Program contains all the
|
// invoke the ssautil.Packages helper function. See ExampleLoadPackages
|
||||||
// packages and their members, but SSA code is not created for
|
// and ExampleWholeProgram for examples.
|
||||||
// function bodies until a subsequent call to (*Package).Build.
|
// The resulting ssa.Program contains all the packages and their
|
||||||
|
// members, but SSA code is not created for function bodies until a
|
||||||
|
// subsequent call to (*Package).Build or (*Program).Build.
|
||||||
//
|
//
|
||||||
// The builder initially builds a naive SSA form in which all local
|
// The builder initially builds a naive SSA form in which all local
|
||||||
// variables are addresses of stack locations with explicit loads and
|
// variables are addresses of stack locations with explicit loads and
|
||||||
|
|
6
vendor/honnef.co/go/tools/ssa/dom.go
vendored
6
vendor/honnef.co/go/tools/ssa/dom.go
vendored
|
@ -53,7 +53,7 @@ func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre
|
||||||
//
|
//
|
||||||
func (f *Function) DomPreorder() []*BasicBlock {
|
func (f *Function) DomPreorder() []*BasicBlock {
|
||||||
n := len(f.Blocks)
|
n := len(f.Blocks)
|
||||||
order := make(byDomPreorder, n, n)
|
order := make(byDomPreorder, n)
|
||||||
copy(order, f.Blocks)
|
copy(order, f.Blocks)
|
||||||
sort.Sort(order)
|
sort.Sort(order)
|
||||||
return order
|
return order
|
||||||
|
@ -123,7 +123,7 @@ func buildDomTree(f *Function) {
|
||||||
n := len(f.Blocks)
|
n := len(f.Blocks)
|
||||||
// Allocate space for 5 contiguous [n]*BasicBlock arrays:
|
// Allocate space for 5 contiguous [n]*BasicBlock arrays:
|
||||||
// sdom, parent, ancestor, preorder, buckets.
|
// sdom, parent, ancestor, preorder, buckets.
|
||||||
space := make([]*BasicBlock, 5*n, 5*n)
|
space := make([]*BasicBlock, 5*n)
|
||||||
lt := ltState{
|
lt := ltState{
|
||||||
sdom: space[0:n],
|
sdom: space[0:n],
|
||||||
parent: space[n : 2*n],
|
parent: space[n : 2*n],
|
||||||
|
@ -310,6 +310,7 @@ func sanityCheckDomTree(f *Function) {
|
||||||
// Printing functions ----------------------------------------
|
// Printing functions ----------------------------------------
|
||||||
|
|
||||||
// printDomTree prints the dominator tree as text, using indentation.
|
// printDomTree prints the dominator tree as text, using indentation.
|
||||||
|
//lint:ignore U1000 used during debugging
|
||||||
func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
|
func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
|
||||||
fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
|
fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
|
||||||
for _, child := range v.dom.children {
|
for _, child := range v.dom.children {
|
||||||
|
@ -319,6 +320,7 @@ func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
|
||||||
|
|
||||||
// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
|
// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
|
||||||
// (.dot) format.
|
// (.dot) format.
|
||||||
|
//lint:ignore U1000 used during debugging
|
||||||
func printDomTreeDot(buf *bytes.Buffer, f *Function) {
|
func printDomTreeDot(buf *bytes.Buffer, f *Function) {
|
||||||
fmt.Fprintln(buf, "//", f)
|
fmt.Fprintln(buf, "//", f)
|
||||||
fmt.Fprintln(buf, "digraph domtree {")
|
fmt.Fprintln(buf, "digraph domtree {")
|
||||||
|
|
1
vendor/honnef.co/go/tools/ssa/emit.go
vendored
1
vendor/honnef.co/go/tools/ssa/emit.go
vendored
|
@ -127,6 +127,7 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
|
||||||
x = emitConv(f, x, y.Type())
|
x = emitConv(f, x, y.Type())
|
||||||
} else if _, ok := y.(*Const); ok {
|
} else if _, ok := y.(*Const); ok {
|
||||||
y = emitConv(f, y, x.Type())
|
y = emitConv(f, y, x.Type())
|
||||||
|
//lint:ignore SA9003 no-op
|
||||||
} else {
|
} else {
|
||||||
// other cases, e.g. channels. No-op.
|
// other cases, e.g. channels. No-op.
|
||||||
}
|
}
|
||||||
|
|
64
vendor/honnef.co/go/tools/ssa/func.go
vendored
64
vendor/honnef.co/go/tools/ssa/func.go
vendored
|
@ -328,6 +328,70 @@ func (f *Function) finishBody() {
|
||||||
}
|
}
|
||||||
f.Locals = f.Locals[:j]
|
f.Locals = f.Locals[:j]
|
||||||
|
|
||||||
|
// comma-ok receiving from a time.Tick channel will never return
|
||||||
|
// ok == false, so any branching on the value of ok can be
|
||||||
|
// replaced with an unconditional jump. This will primarily match
|
||||||
|
// `for range time.Tick(x)` loops, but it can also match
|
||||||
|
// user-written code.
|
||||||
|
for _, block := range f.Blocks {
|
||||||
|
if len(block.Instrs) < 3 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(block.Succs) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var instrs []*Instruction
|
||||||
|
for i, ins := range block.Instrs {
|
||||||
|
if _, ok := ins.(*DebugRef); ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
instrs = append(instrs, &block.Instrs[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, ins := range instrs {
|
||||||
|
unop, ok := (*ins).(*UnOp)
|
||||||
|
if !ok || unop.Op != token.ARROW {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
call, ok := unop.X.(*Call)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if call.Common().IsInvoke() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// OPT(dh): surely there is a more efficient way of doing
|
||||||
|
// this, than using FullName. We should already have
|
||||||
|
// resolved time.Tick somewhere?
|
||||||
|
v, ok := call.Common().Value.(*Function)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t, ok := v.Object().(*types.Func)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if t.FullName() != "time.Tick" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ex, ok := (*instrs[i+1]).(*Extract)
|
||||||
|
if !ok || ex.Tuple != unop || ex.Index != 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ifstmt, ok := (*instrs[i+2]).(*If)
|
||||||
|
if !ok || ifstmt.Cond != ex {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
*instrs[i+2] = NewJump(block)
|
||||||
|
succ := block.Succs[1]
|
||||||
|
block.Succs = block.Succs[0:1]
|
||||||
|
succ.RemovePred(block)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
optimizeBlocks(f)
|
optimizeBlocks(f)
|
||||||
|
|
||||||
buildReferrers(f)
|
buildReferrers(f)
|
||||||
|
|
30
vendor/honnef.co/go/tools/ssa/lift.go
vendored
30
vendor/honnef.co/go/tools/ssa/lift.go
vendored
|
@ -341,10 +341,10 @@ func phiHasDirectReferrer(phi *Phi) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
type blockSet struct{ big.Int } // (inherit methods from Int)
|
type BlockSet struct{ big.Int } // (inherit methods from Int)
|
||||||
|
|
||||||
// add adds b to the set and returns true if the set changed.
|
// add adds b to the set and returns true if the set changed.
|
||||||
func (s *blockSet) add(b *BasicBlock) bool {
|
func (s *BlockSet) Add(b *BasicBlock) bool {
|
||||||
i := b.Index
|
i := b.Index
|
||||||
if s.Bit(i) != 0 {
|
if s.Bit(i) != 0 {
|
||||||
return false
|
return false
|
||||||
|
@ -353,9 +353,13 @@ func (s *blockSet) add(b *BasicBlock) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *BlockSet) Has(b *BasicBlock) bool {
|
||||||
|
return s.Bit(b.Index) == 1
|
||||||
|
}
|
||||||
|
|
||||||
// take removes an arbitrary element from a set s and
|
// take removes an arbitrary element from a set s and
|
||||||
// returns its index, or returns -1 if empty.
|
// returns its index, or returns -1 if empty.
|
||||||
func (s *blockSet) take() int {
|
func (s *BlockSet) Take() int {
|
||||||
l := s.BitLen()
|
l := s.BitLen()
|
||||||
for i := 0; i < l; i++ {
|
for i := 0; i < l; i++ {
|
||||||
if s.Bit(i) == 1 {
|
if s.Bit(i) == 1 {
|
||||||
|
@ -403,7 +407,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool
|
||||||
|
|
||||||
// Compute defblocks, the set of blocks containing a
|
// Compute defblocks, the set of blocks containing a
|
||||||
// definition of the alloc cell.
|
// definition of the alloc cell.
|
||||||
var defblocks blockSet
|
var defblocks BlockSet
|
||||||
for _, instr := range *alloc.Referrers() {
|
for _, instr := range *alloc.Referrers() {
|
||||||
// Bail out if we discover the alloc is not liftable;
|
// Bail out if we discover the alloc is not liftable;
|
||||||
// the only operations permitted to use the alloc are
|
// the only operations permitted to use the alloc are
|
||||||
|
@ -416,7 +420,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool
|
||||||
if instr.Addr != alloc {
|
if instr.Addr != alloc {
|
||||||
panic("Alloc.Referrers is inconsistent")
|
panic("Alloc.Referrers is inconsistent")
|
||||||
}
|
}
|
||||||
defblocks.add(instr.Block())
|
defblocks.Add(instr.Block())
|
||||||
case *UnOp:
|
case *UnOp:
|
||||||
if instr.Op != token.MUL {
|
if instr.Op != token.MUL {
|
||||||
return false // not a load
|
return false // not a load
|
||||||
|
@ -431,7 +435,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// The Alloc itself counts as a (zero) definition of the cell.
|
// The Alloc itself counts as a (zero) definition of the cell.
|
||||||
defblocks.add(alloc.Block())
|
defblocks.Add(alloc.Block())
|
||||||
|
|
||||||
if debugLifting {
|
if debugLifting {
|
||||||
fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name())
|
fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name())
|
||||||
|
@ -448,18 +452,18 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool
|
||||||
//
|
//
|
||||||
// TODO(adonovan): opt: recycle slice storage for W,
|
// TODO(adonovan): opt: recycle slice storage for W,
|
||||||
// hasAlready, defBlocks across liftAlloc calls.
|
// hasAlready, defBlocks across liftAlloc calls.
|
||||||
var hasAlready blockSet
|
var hasAlready BlockSet
|
||||||
|
|
||||||
// Initialize W and work to defblocks.
|
// Initialize W and work to defblocks.
|
||||||
var work blockSet = defblocks // blocks seen
|
var work BlockSet = defblocks // blocks seen
|
||||||
var W blockSet // blocks to do
|
var W BlockSet // blocks to do
|
||||||
W.Set(&defblocks.Int)
|
W.Set(&defblocks.Int)
|
||||||
|
|
||||||
// Traverse iterated dominance frontier, inserting φ-nodes.
|
// Traverse iterated dominance frontier, inserting φ-nodes.
|
||||||
for i := W.take(); i != -1; i = W.take() {
|
for i := W.Take(); i != -1; i = W.Take() {
|
||||||
u := fn.Blocks[i]
|
u := fn.Blocks[i]
|
||||||
for _, v := range df[u.Index] {
|
for _, v := range df[u.Index] {
|
||||||
if hasAlready.add(v) {
|
if hasAlready.Add(v) {
|
||||||
// Create φ-node.
|
// Create φ-node.
|
||||||
// It will be prepended to v.Instrs later, if needed.
|
// It will be prepended to v.Instrs later, if needed.
|
||||||
phi := &Phi{
|
phi := &Phi{
|
||||||
|
@ -478,8 +482,8 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool
|
||||||
}
|
}
|
||||||
newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
|
newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
|
||||||
|
|
||||||
if work.add(v) {
|
if work.Add(v) {
|
||||||
W.add(v)
|
W.Add(v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
4
vendor/honnef.co/go/tools/ssa/methods.go
vendored
4
vendor/honnef.co/go/tools/ssa/methods.go
vendored
|
@ -23,14 +23,14 @@ import (
|
||||||
//
|
//
|
||||||
func (prog *Program) MethodValue(sel *types.Selection) *Function {
|
func (prog *Program) MethodValue(sel *types.Selection) *Function {
|
||||||
if sel.Kind() != types.MethodVal {
|
if sel.Kind() != types.MethodVal {
|
||||||
panic(fmt.Sprintf("Method(%s) kind != MethodVal", sel))
|
panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel))
|
||||||
}
|
}
|
||||||
T := sel.Recv()
|
T := sel.Recv()
|
||||||
if isInterface(T) {
|
if isInterface(T) {
|
||||||
return nil // abstract method
|
return nil // abstract method
|
||||||
}
|
}
|
||||||
if prog.mode&LogSource != 0 {
|
if prog.mode&LogSource != 0 {
|
||||||
defer logStack("Method %s %v", T, sel)()
|
defer logStack("MethodValue %s %v", T, sel)()
|
||||||
}
|
}
|
||||||
|
|
||||||
prog.methodsMu.Lock()
|
prog.methodsMu.Lock()
|
||||||
|
|
18
vendor/honnef.co/go/tools/ssa/sanity.go
vendored
18
vendor/honnef.co/go/tools/ssa/sanity.go
vendored
|
@ -410,8 +410,8 @@ func (s *sanity) checkFunction(fn *Function) bool {
|
||||||
s.errorf("nil Prog")
|
s.errorf("nil Prog")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn.String() // must not crash
|
_ = fn.String() // must not crash
|
||||||
fn.RelString(fn.pkg()) // must not crash
|
_ = fn.RelString(fn.pkg()) // must not crash
|
||||||
|
|
||||||
// All functions have a package, except delegates (which are
|
// All functions have a package, except delegates (which are
|
||||||
// shared across packages, or duplicated as weak symbols in a
|
// shared across packages, or duplicated as weak symbols in a
|
||||||
|
@ -448,6 +448,18 @@ func (s *sanity) checkFunction(fn *Function) bool {
|
||||||
if p.Parent() != fn {
|
if p.Parent() != fn {
|
||||||
s.errorf("Param %s at index %d has wrong parent", p.Name(), i)
|
s.errorf("Param %s at index %d has wrong parent", p.Name(), i)
|
||||||
}
|
}
|
||||||
|
// Check common suffix of Signature and Params match type.
|
||||||
|
if sig := fn.Signature; sig != nil {
|
||||||
|
j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params
|
||||||
|
if j < 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !types.Identical(p.Type(), sig.Params().At(j).Type()) {
|
||||||
|
s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type())
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
s.checkReferrerList(p)
|
s.checkReferrerList(p)
|
||||||
}
|
}
|
||||||
for i, fv := range fn.FreeVars {
|
for i, fv := range fn.FreeVars {
|
||||||
|
@ -490,7 +502,7 @@ func sanityCheckPackage(pkg *Package) {
|
||||||
if pkg.Pkg == nil {
|
if pkg.Pkg == nil {
|
||||||
panic(fmt.Sprintf("Package %s has no Object", pkg))
|
panic(fmt.Sprintf("Package %s has no Object", pkg))
|
||||||
}
|
}
|
||||||
pkg.String() // must not crash
|
_ = pkg.String() // must not crash
|
||||||
|
|
||||||
for name, mem := range pkg.Members {
|
for name, mem := range pkg.Members {
|
||||||
if name != mem.Name() {
|
if name != mem.Name() {
|
||||||
|
|
2
vendor/honnef.co/go/tools/ssa/source.go
vendored
2
vendor/honnef.co/go/tools/ssa/source.go
vendored
|
@ -150,7 +150,7 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function {
|
||||||
// (modulo "untyped" bools resulting from comparisons).
|
// (modulo "untyped" bools resulting from comparisons).
|
||||||
//
|
//
|
||||||
// (Tip: to find the ssa.Value given a source position, use
|
// (Tip: to find the ssa.Value given a source position, use
|
||||||
// importer.PathEnclosingInterval to locate the ast.Node, then
|
// astutil.PathEnclosingInterval to locate the ast.Node, then
|
||||||
// EnclosingFunction to locate the Function, then ValueForExpr to find
|
// EnclosingFunction to locate the Function, then ValueForExpr to find
|
||||||
// the ssa.Value.)
|
// the ssa.Value.)
|
||||||
//
|
//
|
||||||
|
|
16
vendor/honnef.co/go/tools/ssa/ssa.go
vendored
16
vendor/honnef.co/go/tools/ssa/ssa.go
vendored
|
@ -10,7 +10,7 @@ package ssa
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
exact "go/constant"
|
"go/constant"
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"sync"
|
"sync"
|
||||||
|
@ -405,7 +405,7 @@ type Parameter struct {
|
||||||
// of the same type and value.
|
// of the same type and value.
|
||||||
//
|
//
|
||||||
// Value holds the exact value of the constant, independent of its
|
// Value holds the exact value of the constant, independent of its
|
||||||
// Type(), using the same representation as package go/exact uses for
|
// Type(), using the same representation as package go/constant uses for
|
||||||
// constants, or nil for a typed nil value.
|
// constants, or nil for a typed nil value.
|
||||||
//
|
//
|
||||||
// Pos() returns token.NoPos.
|
// Pos() returns token.NoPos.
|
||||||
|
@ -417,7 +417,7 @@ type Parameter struct {
|
||||||
//
|
//
|
||||||
type Const struct {
|
type Const struct {
|
||||||
typ types.Type
|
typ types.Type
|
||||||
Value exact.Value
|
Value constant.Value
|
||||||
}
|
}
|
||||||
|
|
||||||
// A Global is a named Value holding the address of a package-level
|
// A Global is a named Value holding the address of a package-level
|
||||||
|
@ -572,8 +572,8 @@ type BinOp struct {
|
||||||
register
|
register
|
||||||
// One of:
|
// One of:
|
||||||
// ADD SUB MUL QUO REM + - * / %
|
// ADD SUB MUL QUO REM + - * / %
|
||||||
// AND OR XOR SHL SHR AND_NOT & | ^ << >> &~
|
// AND OR XOR SHL SHR AND_NOT & | ^ << >> &^
|
||||||
// EQL LSS GTR NEQ LEQ GEQ == != < <= < >=
|
// EQL NEQ LSS LEQ GTR GEQ == != < <= < >=
|
||||||
Op token.Token
|
Op token.Token
|
||||||
X, Y Value
|
X, Y Value
|
||||||
}
|
}
|
||||||
|
@ -680,10 +680,10 @@ type ChangeInterface struct {
|
||||||
// value of a concrete type.
|
// value of a concrete type.
|
||||||
//
|
//
|
||||||
// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set
|
// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set
|
||||||
// of X, and Program.Method(m) to find the implementation of a method.
|
// of X, and Program.MethodValue(m) to find the implementation of a method.
|
||||||
//
|
//
|
||||||
// To construct the zero value of an interface type T, use:
|
// To construct the zero value of an interface type T, use:
|
||||||
// NewConst(exact.MakeNil(), T, pos)
|
// NewConst(constant.MakeNil(), T, pos)
|
||||||
//
|
//
|
||||||
// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
|
// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
|
||||||
// from an explicit conversion in the source.
|
// from an explicit conversion in the source.
|
||||||
|
@ -813,7 +813,7 @@ type Slice struct {
|
||||||
type FieldAddr struct {
|
type FieldAddr struct {
|
||||||
register
|
register
|
||||||
X Value // *struct
|
X Value // *struct
|
||||||
Field int // index into X.Type().Deref().(*types.Struct).Fields
|
Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The Field instruction yields the Field of struct X.
|
// The Field instruction yields the Field of struct X.
|
||||||
|
|
143
vendor/honnef.co/go/tools/ssa/ssautil/load.go
vendored
143
vendor/honnef.co/go/tools/ssa/ssautil/load.go
vendored
|
@ -1,143 +0,0 @@
|
||||||
// Copyright 2015 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package ssautil
|
|
||||||
|
|
||||||
// This file defines utility functions for constructing programs in SSA form.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/ast"
|
|
||||||
"go/token"
|
|
||||||
"go/types"
|
|
||||||
|
|
||||||
"golang.org/x/tools/go/loader"
|
|
||||||
"golang.org/x/tools/go/packages"
|
|
||||||
"honnef.co/go/tools/ssa"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Packages creates an SSA program for a set of packages loaded from
|
|
||||||
// source syntax using the golang.org/x/tools/go/packages.Load function.
|
|
||||||
// It creates and returns an SSA package for each well-typed package in
|
|
||||||
// the initial list. The resulting list of packages has the same length
|
|
||||||
// as initial, and contains a nil if SSA could not be constructed for
|
|
||||||
// the corresponding initial package.
|
|
||||||
//
|
|
||||||
// Code for bodies of functions is not built until Build is called
|
|
||||||
// on the resulting Program.
|
|
||||||
//
|
|
||||||
// The mode parameter controls diagnostics and checking during SSA construction.
|
|
||||||
//
|
|
||||||
func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) {
|
|
||||||
var fset *token.FileSet
|
|
||||||
if len(initial) > 0 {
|
|
||||||
fset = initial[0].Fset
|
|
||||||
}
|
|
||||||
|
|
||||||
prog := ssa.NewProgram(fset, mode)
|
|
||||||
seen := make(map[*packages.Package]*ssa.Package)
|
|
||||||
var create func(p *packages.Package) *ssa.Package
|
|
||||||
create = func(p *packages.Package) *ssa.Package {
|
|
||||||
ssapkg, ok := seen[p]
|
|
||||||
if !ok {
|
|
||||||
if p.Types == nil || p.IllTyped {
|
|
||||||
// not well typed
|
|
||||||
seen[p] = nil
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
ssapkg = prog.CreatePackage(p.Types, p.Syntax, p.TypesInfo, true)
|
|
||||||
seen[p] = ssapkg
|
|
||||||
|
|
||||||
for _, imp := range p.Imports {
|
|
||||||
create(imp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ssapkg
|
|
||||||
}
|
|
||||||
|
|
||||||
var ssapkgs []*ssa.Package
|
|
||||||
for _, p := range initial {
|
|
||||||
ssapkgs = append(ssapkgs, create(p))
|
|
||||||
}
|
|
||||||
return prog, ssapkgs
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateProgram returns a new program in SSA form, given a program
|
|
||||||
// loaded from source. An SSA package is created for each transitively
|
|
||||||
// error-free package of lprog.
|
|
||||||
//
|
|
||||||
// Code for bodies of functions is not built until Build is called
|
|
||||||
// on the result.
|
|
||||||
//
|
|
||||||
// mode controls diagnostics and checking during SSA construction.
|
|
||||||
//
|
|
||||||
func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {
|
|
||||||
prog := ssa.NewProgram(lprog.Fset, mode)
|
|
||||||
|
|
||||||
for _, info := range lprog.AllPackages {
|
|
||||||
if info.TransitivelyErrorFree {
|
|
||||||
prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return prog
|
|
||||||
}
|
|
||||||
|
|
||||||
// BuildPackage builds an SSA program with IR for a single package.
|
|
||||||
//
|
|
||||||
// It populates pkg by type-checking the specified file ASTs. All
|
|
||||||
// dependencies are loaded using the importer specified by tc, which
|
|
||||||
// typically loads compiler export data; SSA code cannot be built for
|
|
||||||
// those packages. BuildPackage then constructs an ssa.Program with all
|
|
||||||
// dependency packages created, and builds and returns the SSA package
|
|
||||||
// corresponding to pkg.
|
|
||||||
//
|
|
||||||
// The caller must have set pkg.Path() to the import path.
|
|
||||||
//
|
|
||||||
// The operation fails if there were any type-checking or import errors.
|
|
||||||
//
|
|
||||||
// See ../ssa/example_test.go for an example.
|
|
||||||
//
|
|
||||||
func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) {
|
|
||||||
if fset == nil {
|
|
||||||
panic("no token.FileSet")
|
|
||||||
}
|
|
||||||
if pkg.Path() == "" {
|
|
||||||
panic("package has no import path")
|
|
||||||
}
|
|
||||||
|
|
||||||
info := &types.Info{
|
|
||||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
|
||||||
Defs: make(map[*ast.Ident]types.Object),
|
|
||||||
Uses: make(map[*ast.Ident]types.Object),
|
|
||||||
Implicits: make(map[ast.Node]types.Object),
|
|
||||||
Scopes: make(map[ast.Node]*types.Scope),
|
|
||||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
|
||||||
}
|
|
||||||
if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
prog := ssa.NewProgram(fset, mode)
|
|
||||||
|
|
||||||
// Create SSA packages for all imports.
|
|
||||||
// Order is not significant.
|
|
||||||
created := make(map[*types.Package]bool)
|
|
||||||
var createAll func(pkgs []*types.Package)
|
|
||||||
createAll = func(pkgs []*types.Package) {
|
|
||||||
for _, p := range pkgs {
|
|
||||||
if !created[p] {
|
|
||||||
created[p] = true
|
|
||||||
prog.CreatePackage(p, nil, nil, true)
|
|
||||||
createAll(p.Imports())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
createAll(pkg.Imports())
|
|
||||||
|
|
||||||
// Create and build the primary package.
|
|
||||||
ssapkg := prog.CreatePackage(pkg, files, info, false)
|
|
||||||
ssapkg.Build()
|
|
||||||
return ssapkg, info, nil
|
|
||||||
}
|
|
234
vendor/honnef.co/go/tools/ssa/ssautil/switch.go
vendored
234
vendor/honnef.co/go/tools/ssa/ssautil/switch.go
vendored
|
@ -1,234 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package ssautil
|
|
||||||
|
|
||||||
// This file implements discovery of switch and type-switch constructs
|
|
||||||
// from low-level control flow.
|
|
||||||
//
|
|
||||||
// Many techniques exist for compiling a high-level switch with
|
|
||||||
// constant cases to efficient machine code. The optimal choice will
|
|
||||||
// depend on the data type, the specific case values, the code in the
|
|
||||||
// body of each case, and the hardware.
|
|
||||||
// Some examples:
|
|
||||||
// - a lookup table (for a switch that maps constants to constants)
|
|
||||||
// - a computed goto
|
|
||||||
// - a binary tree
|
|
||||||
// - a perfect hash
|
|
||||||
// - a two-level switch (to partition constant strings by their first byte).
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"go/token"
|
|
||||||
"go/types"
|
|
||||||
|
|
||||||
"honnef.co/go/tools/ssa"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A ConstCase represents a single constant comparison.
|
|
||||||
// It is part of a Switch.
|
|
||||||
type ConstCase struct {
|
|
||||||
Block *ssa.BasicBlock // block performing the comparison
|
|
||||||
Body *ssa.BasicBlock // body of the case
|
|
||||||
Value *ssa.Const // case comparand
|
|
||||||
}
|
|
||||||
|
|
||||||
// A TypeCase represents a single type assertion.
|
|
||||||
// It is part of a Switch.
|
|
||||||
type TypeCase struct {
|
|
||||||
Block *ssa.BasicBlock // block performing the type assert
|
|
||||||
Body *ssa.BasicBlock // body of the case
|
|
||||||
Type types.Type // case type
|
|
||||||
Binding ssa.Value // value bound by this case
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Switch is a logical high-level control flow operation
|
|
||||||
// (a multiway branch) discovered by analysis of a CFG containing
|
|
||||||
// only if/else chains. It is not part of the ssa.Instruction set.
|
|
||||||
//
|
|
||||||
// One of ConstCases and TypeCases has length >= 2;
|
|
||||||
// the other is nil.
|
|
||||||
//
|
|
||||||
// In a value switch, the list of cases may contain duplicate constants.
|
|
||||||
// A type switch may contain duplicate types, or types assignable
|
|
||||||
// to an interface type also in the list.
|
|
||||||
// TODO(adonovan): eliminate such duplicates.
|
|
||||||
//
|
|
||||||
type Switch struct {
|
|
||||||
Start *ssa.BasicBlock // block containing start of if/else chain
|
|
||||||
X ssa.Value // the switch operand
|
|
||||||
ConstCases []ConstCase // ordered list of constant comparisons
|
|
||||||
TypeCases []TypeCase // ordered list of type assertions
|
|
||||||
Default *ssa.BasicBlock // successor if all comparisons fail
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sw *Switch) String() string {
|
|
||||||
// We represent each block by the String() of its
|
|
||||||
// first Instruction, e.g. "print(42:int)".
|
|
||||||
var buf bytes.Buffer
|
|
||||||
if sw.ConstCases != nil {
|
|
||||||
fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name())
|
|
||||||
for _, c := range sw.ConstCases {
|
|
||||||
fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0])
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name())
|
|
||||||
for _, c := range sw.TypeCases {
|
|
||||||
fmt.Fprintf(&buf, "case %s %s: %s\n",
|
|
||||||
c.Binding.Name(), c.Type, c.Body.Instrs[0])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if sw.Default != nil {
|
|
||||||
fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0])
|
|
||||||
}
|
|
||||||
fmt.Fprintf(&buf, "}")
|
|
||||||
return buf.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Switches examines the control-flow graph of fn and returns the
|
|
||||||
// set of inferred value and type switches. A value switch tests an
|
|
||||||
// ssa.Value for equality against two or more compile-time constant
|
|
||||||
// values. Switches involving link-time constants (addresses) are
|
|
||||||
// ignored. A type switch type-asserts an ssa.Value against two or
|
|
||||||
// more types.
|
|
||||||
//
|
|
||||||
// The switches are returned in dominance order.
|
|
||||||
//
|
|
||||||
// The resulting switches do not necessarily correspond to uses of the
|
|
||||||
// 'switch' keyword in the source: for example, a single source-level
|
|
||||||
// switch statement with non-constant cases may result in zero, one or
|
|
||||||
// many Switches, one per plural sequence of constant cases.
|
|
||||||
// Switches may even be inferred from if/else- or goto-based control flow.
|
|
||||||
// (In general, the control flow constructs of the source program
|
|
||||||
// cannot be faithfully reproduced from the SSA representation.)
|
|
||||||
//
|
|
||||||
func Switches(fn *ssa.Function) []Switch {
|
|
||||||
// Traverse the CFG in dominance order, so we don't
|
|
||||||
// enter an if/else-chain in the middle.
|
|
||||||
var switches []Switch
|
|
||||||
seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet
|
|
||||||
for _, b := range fn.DomPreorder() {
|
|
||||||
if x, k := isComparisonBlock(b); x != nil {
|
|
||||||
// Block b starts a switch.
|
|
||||||
sw := Switch{Start: b, X: x}
|
|
||||||
valueSwitch(&sw, k, seen)
|
|
||||||
if len(sw.ConstCases) > 1 {
|
|
||||||
switches = append(switches, sw)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if y, x, T := isTypeAssertBlock(b); y != nil {
|
|
||||||
// Block b starts a type switch.
|
|
||||||
sw := Switch{Start: b, X: x}
|
|
||||||
typeSwitch(&sw, y, T, seen)
|
|
||||||
if len(sw.TypeCases) > 1 {
|
|
||||||
switches = append(switches, sw)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return switches
|
|
||||||
}
|
|
||||||
|
|
||||||
func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) {
|
|
||||||
b := sw.Start
|
|
||||||
x := sw.X
|
|
||||||
for x == sw.X {
|
|
||||||
if seen[b] {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
seen[b] = true
|
|
||||||
|
|
||||||
sw.ConstCases = append(sw.ConstCases, ConstCase{
|
|
||||||
Block: b,
|
|
||||||
Body: b.Succs[0],
|
|
||||||
Value: k,
|
|
||||||
})
|
|
||||||
b = b.Succs[1]
|
|
||||||
if len(b.Instrs) > 2 {
|
|
||||||
// Block b contains not just 'if x == k',
|
|
||||||
// so it may have side effects that
|
|
||||||
// make it unsafe to elide.
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if len(b.Preds) != 1 {
|
|
||||||
// Block b has multiple predecessors,
|
|
||||||
// so it cannot be treated as a case.
|
|
||||||
break
|
|
||||||
}
|
|
||||||
x, k = isComparisonBlock(b)
|
|
||||||
}
|
|
||||||
sw.Default = b
|
|
||||||
}
|
|
||||||
|
|
||||||
func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) {
|
|
||||||
b := sw.Start
|
|
||||||
x := sw.X
|
|
||||||
for x == sw.X {
|
|
||||||
if seen[b] {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
seen[b] = true
|
|
||||||
|
|
||||||
sw.TypeCases = append(sw.TypeCases, TypeCase{
|
|
||||||
Block: b,
|
|
||||||
Body: b.Succs[0],
|
|
||||||
Type: T,
|
|
||||||
Binding: y,
|
|
||||||
})
|
|
||||||
b = b.Succs[1]
|
|
||||||
if len(b.Instrs) > 4 {
|
|
||||||
// Block b contains not just
|
|
||||||
// {TypeAssert; Extract #0; Extract #1; If}
|
|
||||||
// so it may have side effects that
|
|
||||||
// make it unsafe to elide.
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if len(b.Preds) != 1 {
|
|
||||||
// Block b has multiple predecessors,
|
|
||||||
// so it cannot be treated as a case.
|
|
||||||
break
|
|
||||||
}
|
|
||||||
y, x, T = isTypeAssertBlock(b)
|
|
||||||
}
|
|
||||||
sw.Default = b
|
|
||||||
}
|
|
||||||
|
|
||||||
// isComparisonBlock returns the operands (v, k) if a block ends with
|
|
||||||
// a comparison v==k, where k is a compile-time constant.
|
|
||||||
//
|
|
||||||
func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) {
|
|
||||||
if n := len(b.Instrs); n >= 2 {
|
|
||||||
if i, ok := b.Instrs[n-1].(*ssa.If); ok {
|
|
||||||
if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL {
|
|
||||||
if k, ok := binop.Y.(*ssa.Const); ok {
|
|
||||||
return binop.X, k
|
|
||||||
}
|
|
||||||
if k, ok := binop.X.(*ssa.Const); ok {
|
|
||||||
return binop.Y, k
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// isTypeAssertBlock returns the operands (y, x, T) if a block ends with
|
|
||||||
// a type assertion "if y, ok := x.(T); ok {".
|
|
||||||
//
|
|
||||||
func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) {
|
|
||||||
if n := len(b.Instrs); n >= 4 {
|
|
||||||
if i, ok := b.Instrs[n-1].(*ssa.If); ok {
|
|
||||||
if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 {
|
|
||||||
if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b {
|
|
||||||
// hack: relies upon instruction ordering.
|
|
||||||
if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok {
|
|
||||||
return ext0, ta.X, ta.AssertedType
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
79
vendor/honnef.co/go/tools/ssa/ssautil/visit.go
vendored
79
vendor/honnef.co/go/tools/ssa/ssautil/visit.go
vendored
|
@ -1,79 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package ssautil // import "honnef.co/go/tools/ssa/ssautil"
|
|
||||||
|
|
||||||
import "honnef.co/go/tools/ssa"
|
|
||||||
|
|
||||||
// This file defines utilities for visiting the SSA representation of
|
|
||||||
// a Program.
|
|
||||||
//
|
|
||||||
// TODO(adonovan): test coverage.
|
|
||||||
|
|
||||||
// AllFunctions finds and returns the set of functions potentially
|
|
||||||
// needed by program prog, as determined by a simple linker-style
|
|
||||||
// reachability algorithm starting from the members and method-sets of
|
|
||||||
// each package. The result may include anonymous functions and
|
|
||||||
// synthetic wrappers.
|
|
||||||
//
|
|
||||||
// Precondition: all packages are built.
|
|
||||||
//
|
|
||||||
func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool {
|
|
||||||
visit := visitor{
|
|
||||||
prog: prog,
|
|
||||||
seen: make(map[*ssa.Function]bool),
|
|
||||||
}
|
|
||||||
visit.program()
|
|
||||||
return visit.seen
|
|
||||||
}
|
|
||||||
|
|
||||||
type visitor struct {
|
|
||||||
prog *ssa.Program
|
|
||||||
seen map[*ssa.Function]bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (visit *visitor) program() {
|
|
||||||
for _, pkg := range visit.prog.AllPackages() {
|
|
||||||
for _, mem := range pkg.Members {
|
|
||||||
if fn, ok := mem.(*ssa.Function); ok {
|
|
||||||
visit.function(fn)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, T := range visit.prog.RuntimeTypes() {
|
|
||||||
mset := visit.prog.MethodSets.MethodSet(T)
|
|
||||||
for i, n := 0, mset.Len(); i < n; i++ {
|
|
||||||
visit.function(visit.prog.MethodValue(mset.At(i)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (visit *visitor) function(fn *ssa.Function) {
|
|
||||||
if !visit.seen[fn] {
|
|
||||||
visit.seen[fn] = true
|
|
||||||
var buf [10]*ssa.Value // avoid alloc in common case
|
|
||||||
for _, b := range fn.Blocks {
|
|
||||||
for _, instr := range b.Instrs {
|
|
||||||
for _, op := range instr.Operands(buf[:0]) {
|
|
||||||
if fn, ok := (*op).(*ssa.Function); ok {
|
|
||||||
visit.function(fn)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MainPackages returns the subset of the specified packages
|
|
||||||
// named "main" that define a main function.
|
|
||||||
// The result may include synthetic "testmain" packages.
|
|
||||||
func MainPackages(pkgs []*ssa.Package) []*ssa.Package {
|
|
||||||
var mains []*ssa.Package
|
|
||||||
for _, pkg := range pkgs {
|
|
||||||
if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil {
|
|
||||||
mains = append(mains, pkg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return mains
|
|
||||||
}
|
|
3
vendor/honnef.co/go/tools/ssa/staticcheck.conf
vendored
Normal file
3
vendor/honnef.co/go/tools/ssa/staticcheck.conf
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
# ssa/... is mostly imported from upstream and we don't want to
|
||||||
|
# deviate from it too much, hence disabling SA1019
|
||||||
|
checks = ["inherit", "-SA1019"]
|
8
vendor/honnef.co/go/tools/ssa/testmain.go
vendored
8
vendor/honnef.co/go/tools/ssa/testmain.go
vendored
|
@ -8,8 +8,8 @@ package ssa
|
||||||
// tests of the supplied packages.
|
// tests of the supplied packages.
|
||||||
// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing.
|
// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing.
|
||||||
//
|
//
|
||||||
// TODO(adonovan): this file no longer needs to live in the ssa package.
|
// TODO(adonovan): throws this all away now that x/tools/go/packages
|
||||||
// Move it to ssautil.
|
// provides access to the actual synthetic test main files.
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
@ -26,6 +26,8 @@ import (
|
||||||
// FindTests returns the Test, Benchmark, and Example functions
|
// FindTests returns the Test, Benchmark, and Example functions
|
||||||
// (as defined by "go test") defined in the specified package,
|
// (as defined by "go test") defined in the specified package,
|
||||||
// and its TestMain function, if any.
|
// and its TestMain function, if any.
|
||||||
|
//
|
||||||
|
// Deprecated: use x/tools/go/packages to access synthetic testmain packages.
|
||||||
func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
|
func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
|
||||||
prog := pkg.Prog
|
prog := pkg.Prog
|
||||||
|
|
||||||
|
@ -109,6 +111,8 @@ func isTest(name, prefix string) bool {
|
||||||
//
|
//
|
||||||
// Subsequent calls to prog.AllPackages include the new package.
|
// Subsequent calls to prog.AllPackages include the new package.
|
||||||
// The package pkg must belong to the program prog.
|
// The package pkg must belong to the program prog.
|
||||||
|
//
|
||||||
|
// Deprecated: use x/tools/go/packages to access synthetic testmain packages.
|
||||||
func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
|
func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
|
||||||
if pkg.Prog != prog {
|
if pkg.Prog != prog {
|
||||||
log.Fatal("Package does not belong to Program")
|
log.Fatal("Package does not belong to Program")
|
||||||
|
|
6
vendor/honnef.co/go/tools/ssa/wrappers.go
vendored
6
vendor/honnef.co/go/tools/ssa/wrappers.go
vendored
|
@ -141,13 +141,9 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
|
||||||
// start is the index of the first regular parameter to use.
|
// start is the index of the first regular parameter to use.
|
||||||
//
|
//
|
||||||
func createParams(fn *Function, start int) {
|
func createParams(fn *Function, start int) {
|
||||||
var last *Parameter
|
|
||||||
tparams := fn.Signature.Params()
|
tparams := fn.Signature.Params()
|
||||||
for i, n := start, tparams.Len(); i < n; i++ {
|
for i, n := start, tparams.Len(); i < n; i++ {
|
||||||
last = fn.addParamObj(tparams.At(i))
|
fn.addParamObj(tparams.At(i))
|
||||||
}
|
|
||||||
if fn.Signature.Variadic() {
|
|
||||||
last.typ = types.NewSlice(last.typ)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
525
vendor/honnef.co/go/tools/staticcheck/analysis.go
vendored
Normal file
525
vendor/honnef.co/go/tools/staticcheck/analysis.go
vendored
Normal file
|
@ -0,0 +1,525 @@
|
||||||
|
package staticcheck
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
|
||||||
|
"honnef.co/go/tools/facts"
|
||||||
|
"honnef.co/go/tools/internal/passes/buildssa"
|
||||||
|
"honnef.co/go/tools/lint/lintutil"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newFlagSet() flag.FlagSet {
|
||||||
|
fs := flag.NewFlagSet("", flag.PanicOnError)
|
||||||
|
fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
|
||||||
|
return *fs
|
||||||
|
}
|
||||||
|
|
||||||
|
var Analyzers = map[string]*analysis.Analyzer{
|
||||||
|
"SA1000": {
|
||||||
|
Name: "SA1000",
|
||||||
|
Run: callChecker(checkRegexpRules),
|
||||||
|
Doc: Docs["SA1000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1001": {
|
||||||
|
Name: "SA1001",
|
||||||
|
Run: CheckTemplate,
|
||||||
|
Doc: Docs["SA1001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1002": {
|
||||||
|
Name: "SA1002",
|
||||||
|
Run: callChecker(checkTimeParseRules),
|
||||||
|
Doc: Docs["SA1002"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1003": {
|
||||||
|
Name: "SA1003",
|
||||||
|
Run: callChecker(checkEncodingBinaryRules),
|
||||||
|
Doc: Docs["SA1003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1004": {
|
||||||
|
Name: "SA1004",
|
||||||
|
Run: CheckTimeSleepConstant,
|
||||||
|
Doc: Docs["SA1004"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1005": {
|
||||||
|
Name: "SA1005",
|
||||||
|
Run: CheckExec,
|
||||||
|
Doc: Docs["SA1005"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1006": {
|
||||||
|
Name: "SA1006",
|
||||||
|
Run: CheckUnsafePrintf,
|
||||||
|
Doc: Docs["SA1006"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1007": {
|
||||||
|
Name: "SA1007",
|
||||||
|
Run: callChecker(checkURLsRules),
|
||||||
|
Doc: Docs["SA1007"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1008": {
|
||||||
|
Name: "SA1008",
|
||||||
|
Run: CheckCanonicalHeaderKey,
|
||||||
|
Doc: Docs["SA1008"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1010": {
|
||||||
|
Name: "SA1010",
|
||||||
|
Run: callChecker(checkRegexpFindAllRules),
|
||||||
|
Doc: Docs["SA1010"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1011": {
|
||||||
|
Name: "SA1011",
|
||||||
|
Run: callChecker(checkUTF8CutsetRules),
|
||||||
|
Doc: Docs["SA1011"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1012": {
|
||||||
|
Name: "SA1012",
|
||||||
|
Run: CheckNilContext,
|
||||||
|
Doc: Docs["SA1012"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1013": {
|
||||||
|
Name: "SA1013",
|
||||||
|
Run: CheckSeeker,
|
||||||
|
Doc: Docs["SA1013"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1014": {
|
||||||
|
Name: "SA1014",
|
||||||
|
Run: callChecker(checkUnmarshalPointerRules),
|
||||||
|
Doc: Docs["SA1014"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1015": {
|
||||||
|
Name: "SA1015",
|
||||||
|
Run: CheckLeakyTimeTick,
|
||||||
|
Doc: Docs["SA1015"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1016": {
|
||||||
|
Name: "SA1016",
|
||||||
|
Run: CheckUntrappableSignal,
|
||||||
|
Doc: Docs["SA1016"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1017": {
|
||||||
|
Name: "SA1017",
|
||||||
|
Run: callChecker(checkUnbufferedSignalChanRules),
|
||||||
|
Doc: Docs["SA1017"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1018": {
|
||||||
|
Name: "SA1018",
|
||||||
|
Run: callChecker(checkStringsReplaceZeroRules),
|
||||||
|
Doc: Docs["SA1018"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1019": {
|
||||||
|
Name: "SA1019",
|
||||||
|
Run: CheckDeprecated,
|
||||||
|
Doc: Docs["SA1019"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1020": {
|
||||||
|
Name: "SA1020",
|
||||||
|
Run: callChecker(checkListenAddressRules),
|
||||||
|
Doc: Docs["SA1020"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1021": {
|
||||||
|
Name: "SA1021",
|
||||||
|
Run: callChecker(checkBytesEqualIPRules),
|
||||||
|
Doc: Docs["SA1021"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1023": {
|
||||||
|
Name: "SA1023",
|
||||||
|
Run: CheckWriterBufferModified,
|
||||||
|
Doc: Docs["SA1023"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1024": {
|
||||||
|
Name: "SA1024",
|
||||||
|
Run: callChecker(checkUniqueCutsetRules),
|
||||||
|
Doc: Docs["SA1024"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1025": {
|
||||||
|
Name: "SA1025",
|
||||||
|
Run: CheckTimerResetReturnValue,
|
||||||
|
Doc: Docs["SA1025"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1026": {
|
||||||
|
Name: "SA1026",
|
||||||
|
Run: callChecker(checkUnsupportedMarshal),
|
||||||
|
Doc: Docs["SA1026"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA1027": {
|
||||||
|
Name: "SA1027",
|
||||||
|
Run: callChecker(checkAtomicAlignment),
|
||||||
|
Doc: Docs["SA1027"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
|
||||||
|
"SA2000": {
|
||||||
|
Name: "SA2000",
|
||||||
|
Run: CheckWaitgroupAdd,
|
||||||
|
Doc: Docs["SA2000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA2001": {
|
||||||
|
Name: "SA2001",
|
||||||
|
Run: CheckEmptyCriticalSection,
|
||||||
|
Doc: Docs["SA2001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA2002": {
|
||||||
|
Name: "SA2002",
|
||||||
|
Run: CheckConcurrentTesting,
|
||||||
|
Doc: Docs["SA2002"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA2003": {
|
||||||
|
Name: "SA2003",
|
||||||
|
Run: CheckDeferLock,
|
||||||
|
Doc: Docs["SA2003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
|
||||||
|
"SA3000": {
|
||||||
|
Name: "SA3000",
|
||||||
|
Run: CheckTestMainExit,
|
||||||
|
Doc: Docs["SA3000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA3001": {
|
||||||
|
Name: "SA3001",
|
||||||
|
Run: CheckBenchmarkN,
|
||||||
|
Doc: Docs["SA3001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
|
||||||
|
"SA4000": {
|
||||||
|
Name: "SA4000",
|
||||||
|
Run: CheckLhsRhsIdentical,
|
||||||
|
Doc: Docs["SA4000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.TokenFile, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4001": {
|
||||||
|
Name: "SA4001",
|
||||||
|
Run: CheckIneffectiveCopy,
|
||||||
|
Doc: Docs["SA4001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4002": {
|
||||||
|
Name: "SA4002",
|
||||||
|
Run: CheckDiffSizeComparison,
|
||||||
|
Doc: Docs["SA4002"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4003": {
|
||||||
|
Name: "SA4003",
|
||||||
|
Run: CheckExtremeComparison,
|
||||||
|
Doc: Docs["SA4003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4004": {
|
||||||
|
Name: "SA4004",
|
||||||
|
Run: CheckIneffectiveLoop,
|
||||||
|
Doc: Docs["SA4004"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4006": {
|
||||||
|
Name: "SA4006",
|
||||||
|
Run: CheckUnreadVariableValues,
|
||||||
|
Doc: Docs["SA4006"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4008": {
|
||||||
|
Name: "SA4008",
|
||||||
|
Run: CheckLoopCondition,
|
||||||
|
Doc: Docs["SA4008"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4009": {
|
||||||
|
Name: "SA4009",
|
||||||
|
Run: CheckArgOverwritten,
|
||||||
|
Doc: Docs["SA4009"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4010": {
|
||||||
|
Name: "SA4010",
|
||||||
|
Run: CheckIneffectiveAppend,
|
||||||
|
Doc: Docs["SA4010"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4011": {
|
||||||
|
Name: "SA4011",
|
||||||
|
Run: CheckScopedBreak,
|
||||||
|
Doc: Docs["SA4011"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4012": {
|
||||||
|
Name: "SA4012",
|
||||||
|
Run: CheckNaNComparison,
|
||||||
|
Doc: Docs["SA4012"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4013": {
|
||||||
|
Name: "SA4013",
|
||||||
|
Run: CheckDoubleNegation,
|
||||||
|
Doc: Docs["SA4013"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4014": {
|
||||||
|
Name: "SA4014",
|
||||||
|
Run: CheckRepeatedIfElse,
|
||||||
|
Doc: Docs["SA4014"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4015": {
|
||||||
|
Name: "SA4015",
|
||||||
|
Run: callChecker(checkMathIntRules),
|
||||||
|
Doc: Docs["SA4015"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4016": {
|
||||||
|
Name: "SA4016",
|
||||||
|
Run: CheckSillyBitwiseOps,
|
||||||
|
Doc: Docs["SA4016"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4017": {
|
||||||
|
Name: "SA4017",
|
||||||
|
Run: CheckPureFunctions,
|
||||||
|
Doc: Docs["SA4017"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Purity},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4018": {
|
||||||
|
Name: "SA4018",
|
||||||
|
Run: CheckSelfAssignment,
|
||||||
|
Doc: Docs["SA4018"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4019": {
|
||||||
|
Name: "SA4019",
|
||||||
|
Run: CheckDuplicateBuildConstraints,
|
||||||
|
Doc: Docs["SA4019"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4020": {
|
||||||
|
Name: "SA4020",
|
||||||
|
Run: CheckUnreachableTypeCases,
|
||||||
|
Doc: Docs["SA4020"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA4021": {
|
||||||
|
Name: "SA4021",
|
||||||
|
Run: CheckSingleArgAppend,
|
||||||
|
Doc: Docs["SA4021"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
|
||||||
|
"SA5000": {
|
||||||
|
Name: "SA5000",
|
||||||
|
Run: CheckNilMaps,
|
||||||
|
Doc: Docs["SA5000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5001": {
|
||||||
|
Name: "SA5001",
|
||||||
|
Run: CheckEarlyDefer,
|
||||||
|
Doc: Docs["SA5001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5002": {
|
||||||
|
Name: "SA5002",
|
||||||
|
Run: CheckInfiniteEmptyLoop,
|
||||||
|
Doc: Docs["SA5002"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5003": {
|
||||||
|
Name: "SA5003",
|
||||||
|
Run: CheckDeferInInfiniteLoop,
|
||||||
|
Doc: Docs["SA5003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5004": {
|
||||||
|
Name: "SA5004",
|
||||||
|
Run: CheckLoopEmptyDefault,
|
||||||
|
Doc: Docs["SA5004"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5005": {
|
||||||
|
Name: "SA5005",
|
||||||
|
Run: CheckCyclicFinalizer,
|
||||||
|
Doc: Docs["SA5005"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5007": {
|
||||||
|
Name: "SA5007",
|
||||||
|
Run: CheckInfiniteRecursion,
|
||||||
|
Doc: Docs["SA5007"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5008": {
|
||||||
|
Name: "SA5008",
|
||||||
|
Run: CheckStructTags,
|
||||||
|
Doc: Docs["SA5008"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA5009": {
|
||||||
|
Name: "SA5009",
|
||||||
|
Run: callChecker(checkPrintfRules),
|
||||||
|
Doc: Docs["SA5009"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
|
||||||
|
"SA6000": {
|
||||||
|
Name: "SA6000",
|
||||||
|
Run: callChecker(checkRegexpMatchLoopRules),
|
||||||
|
Doc: Docs["SA6000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA6001": {
|
||||||
|
Name: "SA6001",
|
||||||
|
Run: CheckMapBytesKey,
|
||||||
|
Doc: Docs["SA6001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA6002": {
|
||||||
|
Name: "SA6002",
|
||||||
|
Run: callChecker(checkSyncPoolValueRules),
|
||||||
|
Doc: Docs["SA6002"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA6003": {
|
||||||
|
Name: "SA6003",
|
||||||
|
Run: CheckRangeStringRunes,
|
||||||
|
Doc: Docs["SA6003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA6005": {
|
||||||
|
Name: "SA6005",
|
||||||
|
Run: CheckToLowerToUpperComparison,
|
||||||
|
Doc: Docs["SA6005"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
|
||||||
|
"SA9001": {
|
||||||
|
Name: "SA9001",
|
||||||
|
Run: CheckDubiousDeferInChannelRangeLoop,
|
||||||
|
Doc: Docs["SA9001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA9002": {
|
||||||
|
Name: "SA9002",
|
||||||
|
Run: CheckNonOctalFileMode,
|
||||||
|
Doc: Docs["SA9002"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA9003": {
|
||||||
|
Name: "SA9003",
|
||||||
|
Run: CheckEmptyBranch,
|
||||||
|
Doc: Docs["SA9003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"SA9004": {
|
||||||
|
Name: "SA9004",
|
||||||
|
Run: CheckMissingEnumTypesInDeclaration,
|
||||||
|
Doc: Docs["SA9004"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
// Filtering generated code because it may include empty structs generated from data models.
|
||||||
|
"SA9005": {
|
||||||
|
Name: "SA9005",
|
||||||
|
Run: callChecker(checkNoopMarshal),
|
||||||
|
Doc: Docs["SA9005"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, facts.Generated, facts.TokenFile},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
}
|
795
vendor/honnef.co/go/tools/staticcheck/doc.go
vendored
795
vendor/honnef.co/go/tools/staticcheck/doc.go
vendored
|
@ -1,39 +1,37 @@
|
||||||
package staticcheck
|
package staticcheck
|
||||||
|
|
||||||
var docSA1000 = `Invalid regular expression
|
import "honnef.co/go/tools/lint"
|
||||||
|
|
||||||
Available since
|
var Docs = map[string]*lint.Documentation{
|
||||||
2017.1
|
"SA1000": &lint.Documentation{
|
||||||
`
|
Title: `Invalid regular expression`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1001 = `Invalid template
|
"SA1001": &lint.Documentation{
|
||||||
|
Title: `Invalid template`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1002": &lint.Documentation{
|
||||||
2017.1
|
Title: `Invalid format in time.Parse`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1002 = `Invalid format in time.Parse
|
"SA1003": &lint.Documentation{
|
||||||
|
Title: `Unsupported argument to functions in encoding/binary`,
|
||||||
Available since
|
Text: `The encoding/binary package can only serialize types with known sizes.
|
||||||
2017.1
|
This precludes the use of the int and uint types, as their sizes
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1003 = `Unsupported argument to functions in encoding/binary
|
|
||||||
|
|
||||||
The encoding/binary package can only serialize types with known sizes.
|
|
||||||
This precludes the use of the 'int' and 'uint' types, as their sizes
|
|
||||||
differ on different architectures. Furthermore, it doesn't support
|
differ on different architectures. Furthermore, it doesn't support
|
||||||
serializing maps, channels, strings, or functions.
|
serializing maps, channels, strings, or functions.
|
||||||
|
|
||||||
Before Go 1.8, bool wasn't supported, either.
|
Before Go 1.8, bool wasn't supported, either.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1004": &lint.Documentation{
|
||||||
2017.1
|
Title: `Suspiciously small untyped constant in time.Sleep`,
|
||||||
`
|
Text: `The time.Sleep function takes a time.Duration as its only argument.
|
||||||
|
|
||||||
var docSA1004 = `Suspiciously small untyped constant in time.Sleep
|
|
||||||
|
|
||||||
The time.Sleep function takes a time.Duration as its only argument.
|
|
||||||
Durations are expressed in nanoseconds. Thus, calling time.Sleep(1)
|
Durations are expressed in nanoseconds. Thus, calling time.Sleep(1)
|
||||||
will sleep for 1 nanosecond. This is a common source of bugs, as sleep
|
will sleep for 1 nanosecond. This is a common source of bugs, as sleep
|
||||||
functions in other languages often accept seconds or milliseconds.
|
functions in other languages often accept seconds or milliseconds.
|
||||||
|
@ -43,16 +41,14 @@ large durations. These can be combined with arithmetic to express
|
||||||
arbitrary durations, for example '5 * time.Second' for 5 seconds.
|
arbitrary durations, for example '5 * time.Second' for 5 seconds.
|
||||||
|
|
||||||
If you truly meant to sleep for a tiny amount of time, use
|
If you truly meant to sleep for a tiny amount of time, use
|
||||||
'n * time.Nanosecond" to signal to staticcheck that you did mean to sleep
|
'n * time.Nanosecond' to signal to staticcheck that you did mean to sleep
|
||||||
for some amount of nanoseconds.
|
for some amount of nanoseconds.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1005": &lint.Documentation{
|
||||||
2017.1
|
Title: `Invalid first argument to exec.Command`,
|
||||||
`
|
Text: `os/exec runs programs directly (using variants of the fork and exec
|
||||||
|
|
||||||
var docSA1005 = `Invalid first argument to exec.Command
|
|
||||||
|
|
||||||
os/exec runs programs directly (using variants of the fork and exec
|
|
||||||
system calls on Unix systems). This shouldn't be confused with running
|
system calls on Unix systems). This shouldn't be confused with running
|
||||||
a command in a shell. The shell will allow for features such as input
|
a command in a shell. The shell will allow for features such as input
|
||||||
redirection, pipes, and general scripting. The shell is also
|
redirection, pipes, and general scripting. The shell is also
|
||||||
|
@ -69,15 +65,13 @@ If you want to run a command in a shell, consider using something like
|
||||||
the following – but be aware that not all systems, particularly
|
the following – but be aware that not all systems, particularly
|
||||||
Windows, will have a /bin/sh program:
|
Windows, will have a /bin/sh program:
|
||||||
|
|
||||||
exec.Command("/bin/sh", "-c", "ls | grep Awesome")
|
exec.Command("/bin/sh", "-c", "ls | grep Awesome")`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1006": &lint.Documentation{
|
||||||
2017.1
|
Title: `Printf with dynamic first argument and no further arguments`,
|
||||||
`
|
Text: `Using fmt.Printf with a dynamic first argument can lead to unexpected
|
||||||
|
|
||||||
var docSA1006 = `Printf with dynamic first argument and no further arguments
|
|
||||||
|
|
||||||
Using fmt.Printf with a dynamic first argument can lead to unexpected
|
|
||||||
output. The first argument is a format string, where certain character
|
output. The first argument is a format string, where certain character
|
||||||
combinations have special meaning. If, for example, a user were to
|
combinations have special meaning. If, for example, a user were to
|
||||||
enter a string such as
|
enter a string such as
|
||||||
|
@ -86,335 +80,325 @@ enter a string such as
|
||||||
|
|
||||||
and you printed it with
|
and you printed it with
|
||||||
|
|
||||||
fmt.Printf(s)
|
fmt.Printf(s)
|
||||||
|
|
||||||
it would lead to the following output:
|
it would lead to the following output:
|
||||||
|
|
||||||
Interest rate: 5%!(NOVERB).
|
Interest rate: 5%!(NOVERB).
|
||||||
|
|
||||||
Similarly, forming the first parameter via string concatenation with
|
Similarly, forming the first parameter via string concatenation with
|
||||||
user input should be avoided for the same reason. When printing user
|
user input should be avoided for the same reason. When printing user
|
||||||
input, either use a variant of fmt.Print, or use the %s Printf verb
|
input, either use a variant of fmt.Print, or use the %s Printf verb
|
||||||
and pass the string as an argument.
|
and pass the string as an argument.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1007": &lint.Documentation{
|
||||||
2017.1
|
Title: `Invalid URL in net/url.Parse`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1007 = `Invalid URL in net/url.Parse
|
"SA1008": &lint.Documentation{
|
||||||
|
Title: `Non-canonical key in http.Header map`,
|
||||||
|
Text: `Keys in http.Header maps are canonical, meaning they follow a specific
|
||||||
|
combination of uppercase and lowercase letters. Methods such as
|
||||||
|
http.Header.Add and http.Header.Del convert inputs into this canonical
|
||||||
|
form before manipulating the map.
|
||||||
|
|
||||||
Available since
|
When manipulating http.Header maps directly, as opposed to using the
|
||||||
2017.1
|
provided methods, care should be taken to stick to canonical form in
|
||||||
`
|
order to avoid inconsistencies. The following piece of code
|
||||||
|
demonstrates one such inconsistency:
|
||||||
|
|
||||||
var docSA1008 = `Non-canonical key in http.Header map
|
h := http.Header{}
|
||||||
|
h["etag"] = []string{"1234"}
|
||||||
|
h.Add("etag", "5678")
|
||||||
|
fmt.Println(h)
|
||||||
|
|
||||||
Available since
|
// Output:
|
||||||
2017.1
|
// map[Etag:[5678] etag:[1234]]
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1010 = `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results
|
The easiest way of obtaining the canonical form of a key is to use
|
||||||
|
http.CanonicalHeaderKey.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
If n >= 0, the function returns at most n matches/submatches. To
|
"SA1010": &lint.Documentation{
|
||||||
return all results, specify a negative number.
|
Title: `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results`,
|
||||||
|
Text: `If n >= 0, the function returns at most n matches/submatches. To
|
||||||
|
return all results, specify a negative number.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1011": &lint.Documentation{
|
||||||
2017.1
|
Title: `Various methods in the strings package expect valid UTF-8, but invalid input is provided`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1011 = `Various methods in the strings package expect valid UTF-8, but invalid input is provided
|
"SA1012": &lint.Documentation{
|
||||||
|
Title: `A nil context.Context is being passed to a function, consider using context.TODO instead`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1013": &lint.Documentation{
|
||||||
2017.1
|
Title: `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1012 = `A nil context.Context is being passed to a function, consider using context.TODO instead
|
"SA1014": &lint.Documentation{
|
||||||
|
Title: `Non-pointer value passed to Unmarshal or Decode`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1015": &lint.Documentation{
|
||||||
2017.1
|
Title: `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1013 = `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second
|
"SA1016": &lint.Documentation{
|
||||||
|
Title: `Trapping a signal that cannot be trapped`,
|
||||||
Available since
|
Text: `Not all signals can be intercepted by a process. Speficially, on
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1014 = `Non-pointer value passed to Unmarshal or Decode
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1015 = `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1016 = `Trapping a signal that cannot be trapped
|
|
||||||
|
|
||||||
Not all signals can be intercepted by a process. Speficially, on
|
|
||||||
UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are
|
UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are
|
||||||
never passed to the process, but instead handled directly by the
|
never passed to the process, but instead handled directly by the
|
||||||
kernel. It is therefore pointless to try and handle these signals.
|
kernel. It is therefore pointless to try and handle these signals.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1017": &lint.Documentation{
|
||||||
2017.1
|
Title: `Channels used with os/signal.Notify should be buffered`,
|
||||||
`
|
Text: `The os/signal package uses non-blocking channel sends when delivering
|
||||||
|
|
||||||
var docSA1017 = `Channels used with os/signal.Notify should be buffered
|
|
||||||
|
|
||||||
The os/signal package uses non-blocking channel sends when delivering
|
|
||||||
signals. If the receiving end of the channel isn't ready and the
|
signals. If the receiving end of the channel isn't ready and the
|
||||||
channel is either unbuffered or full, the signal will be dropped. To
|
channel is either unbuffered or full, the signal will be dropped. To
|
||||||
avoid missing signals, the channel should be buffered and of the
|
avoid missing signals, the channel should be buffered and of the
|
||||||
appropriate size. For a channel used for notification of just one
|
appropriate size. For a channel used for notification of just one
|
||||||
signal value, a buffer of size 1 is sufficient.
|
signal value, a buffer of size 1 is sufficient.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
|
"SA1018": &lint.Documentation{
|
||||||
|
Title: `strings.Replace called with n == 0, which does nothing`,
|
||||||
|
Text: `With n == 0, zero instances will be replaced. To replace all
|
||||||
|
instances, use a negative number, or use strings.ReplaceAll.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1019": &lint.Documentation{
|
||||||
2017.1
|
Title: `Using a deprecated function, variable, constant or field`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1018 = `strings.Replace called with n == 0, which does nothing
|
"SA1020": &lint.Documentation{
|
||||||
|
Title: `Using an invalid host:port pair with a net.Listen-related function`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
With n == 0, zero instances will be replaced. To replace all
|
"SA1021": &lint.Documentation{
|
||||||
instances, use a negative number, or use strings.ReplaceAll.
|
Title: `Using bytes.Equal to compare two net.IP`,
|
||||||
|
Text: `A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1019 = `Using a deprecated function, variable, constant or field
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1020 = `Using an invalid host:port pair with a net.Listen-related function
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1021 = `Using bytes.Equal to compare two net.IP
|
|
||||||
|
|
||||||
A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The
|
|
||||||
length of the slice for an IPv4 address, however, can be either 4 or
|
length of the slice for an IPv4 address, however, can be either 4 or
|
||||||
16 bytes long, using different ways of representing IPv4 addresses. In
|
16 bytes long, using different ways of representing IPv4 addresses. In
|
||||||
order to correctly compare two net.IPs, the net.IP.Equal method should
|
order to correctly compare two net.IPs, the net.IP.Equal method should
|
||||||
be used, as it takes both representations into account.
|
be used, as it takes both representations into account.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1023": &lint.Documentation{
|
||||||
2017.1
|
Title: `Modifying the buffer in an io.Writer implementation`,
|
||||||
`
|
Text: `Write must not modify the slice data, even temporarily.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1023 = `Modifying the buffer in an io.Writer implementation
|
"SA1024": &lint.Documentation{
|
||||||
|
Title: `A string cutset contains duplicate characters`,
|
||||||
|
Text: `The strings.TrimLeft and strings.TrimRight functions take cutsets, not
|
||||||
|
prefixes. A cutset is treated as a set of characters to remove from a
|
||||||
|
string. For example,
|
||||||
|
|
||||||
Write must not modify the slice data, even temporarily.
|
strings.TrimLeft("42133word", "1234"))
|
||||||
|
|
||||||
Available since
|
will result in the string "word" – any characters that are 1, 2, 3 or
|
||||||
2017.1
|
4 are cut from the left of the string.
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1024 = `A string cutset contains duplicate characters, suggesting TrimPrefix or TrimSuffix should be used instead of TrimLeft or TrimRight
|
In order to remove one string from another, use strings.TrimPrefix instead.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1025": &lint.Documentation{
|
||||||
2017.1
|
Title: `It is not possible to use (*time.Timer).Reset's return value correctly`,
|
||||||
`
|
Since: "2019.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA1025 = `It is not possible to use Reset's return value correctly
|
"SA1026": &lint.Documentation{
|
||||||
|
Title: `Cannot marshal channels or functions`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA1027": &lint.Documentation{
|
||||||
2019.1
|
Title: `Atomic access to 64-bit variable must be 64-bit aligned`,
|
||||||
`
|
Text: `On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to
|
||||||
|
|
||||||
var docSA1026 = `Cannot marshal channels or functions
|
|
||||||
|
|
||||||
Available since
|
|
||||||
Unreleased
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA1027 = `Atomic access to 64-bit variable must be 64-bit aligned
|
|
||||||
|
|
||||||
On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to
|
|
||||||
arrange for 64-bit alignment of 64-bit words accessed atomically. The
|
arrange for 64-bit alignment of 64-bit words accessed atomically. The
|
||||||
first word in a variable or in an allocated struct, array, or slice
|
first word in a variable or in an allocated struct, array, or slice
|
||||||
can be relied upon to be 64-bit aligned.
|
can be relied upon to be 64-bit aligned.
|
||||||
|
|
||||||
You can use the structlayout tool to inspect the alignment of fields
|
You can use the structlayout tool to inspect the alignment of fields
|
||||||
in a struct.
|
in a struct.`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA2000": &lint.Documentation{
|
||||||
Unreleased
|
Title: `sync.WaitGroup.Add called inside the goroutine, leading to a race condition`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA2000 = `sync.WaitGroup.Add called inside the goroutine, leading to a race condition
|
"SA2001": &lint.Documentation{
|
||||||
|
Title: `Empty critical section, did you mean to defer the unlock?`,
|
||||||
|
Text: `Empty critical sections of the kind
|
||||||
|
|
||||||
Available since
|
mu.Lock()
|
||||||
2017.1
|
mu.Unlock()
|
||||||
`
|
|
||||||
|
|
||||||
var docSA2001 = `Empty critical section, did you mean to defer the unlock?
|
are very often a typo, and the following was intended instead:
|
||||||
|
|
||||||
Available since
|
mu.Lock()
|
||||||
2017.1
|
defer mu.Unlock()
|
||||||
`
|
|
||||||
|
|
||||||
var docSA2002 = `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed
|
Do note that sometimes empty critical sections can be useful, as a
|
||||||
|
form of signaling to wait on another goroutine. Many times, there are
|
||||||
|
simpler ways of achieving the same effect. When that isn't the case,
|
||||||
|
the code should be amply commented to avoid confusion. Combining such
|
||||||
|
comments with a //lint:ignore directive can be used to suppress this
|
||||||
|
rare false positive.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA2002": &lint.Documentation{
|
||||||
2017.1
|
Title: `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA2003 = `Deferred Lock right after locking, likely meant to defer Unlock instead
|
"SA2003": &lint.Documentation{
|
||||||
|
Title: `Deferred Lock right after locking, likely meant to defer Unlock instead`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA3000": &lint.Documentation{
|
||||||
2017.1
|
Title: `TestMain doesn't call os.Exit, hiding test failures`,
|
||||||
`
|
Text: `Test executables (and in turn 'go test') exit with a non-zero status
|
||||||
|
|
||||||
var docSA3000 = `TestMain doesn't call os.Exit, hiding test failures
|
|
||||||
|
|
||||||
Test executables (and in turn 'go test') exit with a non-zero status
|
|
||||||
code if any tests failed. When specifying your own TestMain function,
|
code if any tests failed. When specifying your own TestMain function,
|
||||||
it is your responsibility to arrange for this, by calling os.Exit with
|
it is your responsibility to arrange for this, by calling os.Exit with
|
||||||
the correct code. The correct code is returned by (*testing.M).Run, so
|
the correct code. The correct code is returned by (*testing.M).Run, so
|
||||||
the usual way of implementing TestMain is to end it with
|
the usual way of implementing TestMain is to end it with
|
||||||
os.Exit(m.Run()).
|
os.Exit(m.Run()).`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA3001": &lint.Documentation{
|
||||||
2017.1
|
Title: `Assigning to b.N in benchmarks distorts the results`,
|
||||||
`
|
Text: `The testing package dynamically sets b.N to improve the reliability of
|
||||||
|
|
||||||
var docSA3001 = `Assigning to b.N in benchmarks distorts the results
|
|
||||||
|
|
||||||
The testing package dynamically sets b.N to improve the reliability of
|
|
||||||
benchmarks and uses it in computations to determine the duration of a
|
benchmarks and uses it in computations to determine the duration of a
|
||||||
single operation. Benchmark code must not alter b.N as this would
|
single operation. Benchmark code must not alter b.N as this would
|
||||||
falsify results.
|
falsify results.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4000": &lint.Documentation{
|
||||||
2017.1
|
Title: `Boolean expression has identical expressions on both sides`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4000 = `Boolean expression has identical expressions on both sides
|
"SA4001": &lint.Documentation{
|
||||||
|
Title: `&*x gets simplified to x, it does not copy x`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4002": &lint.Documentation{
|
||||||
2017.1
|
Title: `Comparing strings with known different sizes has predictable results`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4001 = `&*x gets simplified to x, it does not copy x
|
"SA4003": &lint.Documentation{
|
||||||
|
Title: `Comparing unsigned values against negative values is pointless`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4004": &lint.Documentation{
|
||||||
2017.1
|
Title: `The loop exits unconditionally after one iteration`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4002 = `Comparing strings with known different sizes has predictable results
|
"SA4005": &lint.Documentation{
|
||||||
|
Title: `Field assignment that will never be observed. Did you mean to use a pointer receiver?`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4006": &lint.Documentation{
|
||||||
2017.1
|
Title: `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4003 = `Comparing unsigned values against negative values is pointless
|
"SA4008": &lint.Documentation{
|
||||||
|
Title: `The variable in the loop condition never changes, are you incrementing the wrong variable?`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4009": &lint.Documentation{
|
||||||
2017.1
|
Title: `A function argument is overwritten before its first use`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4004 = `The loop exits unconditionally after one iteration
|
"SA4010": &lint.Documentation{
|
||||||
|
Title: `The result of append will never be observed anywhere`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4011": &lint.Documentation{
|
||||||
2017.1
|
Title: `Break statement with no effect. Did you mean to break out of an outer loop?`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4005 = `Field assignment that will never be observed. Did you mean to use a pointer receiver?
|
"SA4012": &lint.Documentation{
|
||||||
|
Title: `Comparing a value against NaN even though no value is equal to NaN`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4013": &lint.Documentation{
|
||||||
2017.1
|
Title: `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4006 = `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?
|
"SA4014": &lint.Documentation{
|
||||||
|
Title: `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4015": &lint.Documentation{
|
||||||
2017.1
|
Title: `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4008 = `The variable in the loop condition never changes, are you incrementing the wrong variable?
|
"SA4016": &lint.Documentation{
|
||||||
|
Title: `Certain bitwise operations, such as x ^ 0, do not do anything useful`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4017": &lint.Documentation{
|
||||||
2017.1
|
Title: `A pure function's return value is discarded, making the call pointless`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4009 = `A function argument is overwritten before its first use
|
"SA4018": &lint.Documentation{
|
||||||
|
Title: `Self-assignment of variables`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4019": &lint.Documentation{
|
||||||
2017.1
|
Title: `Multiple, identical build constraints in the same file`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4010 = `The result of append will never be observed anywhere
|
"SA4020": &lint.Documentation{
|
||||||
|
Title: `Unreachable case clause in a type switch`,
|
||||||
Available since
|
Text: `In a type switch like the following
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4011 = `Break statement with no effect. Did you mean to break out of an outer loop?
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4012 = `Comparing a value against NaN even though no value is equal to NaN
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4013 = `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4014 = `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4015 = `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4016 = `Certain bitwise operations, such as x ^ 0, do not do anything useful
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4017 = `A pure function's return value is discarded, making the call pointless
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4018 = `Self-assignment of variables
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4019 = `Multiple, identical build constraints in the same file
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA4020 = `Unreachable case clause in a type switch
|
|
||||||
|
|
||||||
In a type switch like the following
|
|
||||||
|
|
||||||
type T struct{}
|
type T struct{}
|
||||||
func (T) Read(b []byte) (int, error) { return 0, nil }
|
func (T) Read(b []byte) (int, error) { return 0, nil }
|
||||||
|
@ -449,7 +433,7 @@ Another example:
|
||||||
Even though T has a Close method and thus implements io.ReadCloser,
|
Even though T has a Close method and thus implements io.ReadCloser,
|
||||||
io.Reader will always match first. The method set of io.Reader is a
|
io.Reader will always match first. The method set of io.Reader is a
|
||||||
subset of io.ReadCloser. Thus it is impossible to match the second
|
subset of io.ReadCloser. Thus it is impossible to match the second
|
||||||
case without mtching the first case.
|
case without matching the first case.
|
||||||
|
|
||||||
|
|
||||||
Structurally equivalent interfaces
|
Structurally equivalent interfaces
|
||||||
|
@ -479,55 +463,46 @@ the following type switch will have an unreachable case clause:
|
||||||
}
|
}
|
||||||
|
|
||||||
T will always match before V because they are structurally equivalent
|
T will always match before V because they are structurally equivalent
|
||||||
and therefore doSomething()'s return value implements both.
|
and therefore doSomething()'s return value implements both.`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA4021": &lint.Documentation{
|
||||||
Unreleased
|
Title: `x = append(y) is equivalent to x = y`,
|
||||||
`
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA4021 = `x = append(y) is equivalent to x = y
|
"SA5000": &lint.Documentation{
|
||||||
|
Title: `Assignment to nil map`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA5001": &lint.Documentation{
|
||||||
Unreleased
|
Title: `Defering Close before checking for a possible error`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA5000 = `Assignment to nil map
|
"SA5002": &lint.Documentation{
|
||||||
|
Title: `The empty for loop (for {}) spins and can block the scheduler`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA5003": &lint.Documentation{
|
||||||
2017.1
|
Title: `Defers in infinite loops will never execute`,
|
||||||
`
|
Text: `Defers are scoped to the surrounding function, not the surrounding
|
||||||
|
|
||||||
var docSA5001 = `Defering Close before checking for a possible error
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA5002 = `The empty for loop (for {}) spins and can block the scheduler
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA5003 = `Defers in infinite loops will never execute
|
|
||||||
|
|
||||||
Defers are scoped to the surrounding function, not the surrounding
|
|
||||||
block. In a function that never returns, i.e. one containing an
|
block. In a function that never returns, i.e. one containing an
|
||||||
infinite loop, defers will never execute.
|
infinite loop, defers will never execute.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA5004": &lint.Documentation{
|
||||||
2017.1
|
Title: `for { select { ... with an empty default branch spins`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA5004 = `for { select { ... with an empty default branch spins
|
"SA5005": &lint.Documentation{
|
||||||
|
Title: `The finalizer references the finalized object, preventing garbage collection`,
|
||||||
Available since
|
Text: `A finalizer is a function associated with an object that runs when the
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA5005 = `The finalizer references the finalized object, preventing garbage collection
|
|
||||||
|
|
||||||
A finalizer is a function associated with an object that runs when the
|
|
||||||
garbage collector is ready to collect said object, that is when the
|
garbage collector is ready to collect said object, that is when the
|
||||||
object is no longer referenced by anything.
|
object is no longer referenced by anything.
|
||||||
|
|
||||||
|
@ -537,21 +512,18 @@ collector from collecting the object. The finalizer will never run,
|
||||||
and the object will never be collected, leading to a memory leak. That
|
and the object will never be collected, leading to a memory leak. That
|
||||||
is why the finalizer should instead use its first argument to operate
|
is why the finalizer should instead use its first argument to operate
|
||||||
on the object. That way, the number of references can temporarily go
|
on the object. That way, the number of references can temporarily go
|
||||||
to zero before the object is being passed to the finalizer.
|
to zero before the object is being passed to the finalizer.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA5006": &lint.Documentation{
|
||||||
2017.1
|
Title: `Slice index out of bounds`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA5006 = `Slice index out of bounds
|
"SA5007": &lint.Documentation{
|
||||||
|
Title: `Infinite recursive call`,
|
||||||
Available since
|
Text: `A function that calls itself recursively needs to have an exit
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA5007 = `Infinite recursive call
|
|
||||||
|
|
||||||
A function that calls itself recursively needs to have an exit
|
|
||||||
condition. Otherwise it will recurse forever, until the system runs
|
condition. Otherwise it will recurse forever, until the system runs
|
||||||
out of memory.
|
out of memory.
|
||||||
|
|
||||||
|
@ -559,21 +531,29 @@ This issue can be caused by simple bugs such as forgetting to add an
|
||||||
exit condition. It can also happen "on purpose". Some languages have
|
exit condition. It can also happen "on purpose". Some languages have
|
||||||
tail call optimization which makes certain infinite recursive calls
|
tail call optimization which makes certain infinite recursive calls
|
||||||
safe to use. Go, however, does not implement TCO, and as such a loop
|
safe to use. Go, however, does not implement TCO, and as such a loop
|
||||||
should be used instead.
|
should be used instead.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA5008": &lint.Documentation{
|
||||||
2017.1
|
Title: `Invalid struct tag`,
|
||||||
`
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA6000 = `Using regexp.Match or related in a loop, should use regexp.Compile
|
"SA5009": &lint.Documentation{
|
||||||
|
Title: `Invalid Printf call`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA6000": &lint.Documentation{
|
||||||
2017.1
|
Title: `Using regexp.Match or related in a loop, should use regexp.Compile`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA6001 = `Missing an optimization opportunity when indexing maps by byte slices
|
"SA6001": &lint.Documentation{
|
||||||
|
Title: `Missing an optimization opportunity when indexing maps by byte slices`,
|
||||||
|
|
||||||
Map keys must be comparable, which precludes the use of byte slices.
|
Text: `Map keys must be comparable, which precludes the use of byte slices.
|
||||||
This usually leads to using string keys and converting byte slices to
|
This usually leads to using string keys and converting byte slices to
|
||||||
strings.
|
strings.
|
||||||
|
|
||||||
|
@ -596,15 +576,13 @@ because the first version needs to copy and allocate, while the second
|
||||||
one does not.
|
one does not.
|
||||||
|
|
||||||
For some history on this optimization, check out commit
|
For some history on this optimization, check out commit
|
||||||
f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.
|
f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA6002": &lint.Documentation{
|
||||||
2017.1
|
Title: `Storing non-pointer values in sync.Pool allocates memory`,
|
||||||
`
|
Text: `A sync.Pool is used to avoid unnecessary allocations and reduce the
|
||||||
|
|
||||||
var docSA6002 = `Storing non-pointer values in sync.Pool allocates memory
|
|
||||||
|
|
||||||
A sync.Pool is used to avoid unnecessary allocations and reduce the
|
|
||||||
amount of work the garbage collector has to do.
|
amount of work the garbage collector has to do.
|
||||||
|
|
||||||
When passing a value that is not a pointer to a function that accepts
|
When passing a value that is not a pointer to a function that accepts
|
||||||
|
@ -615,15 +593,13 @@ an array). In order to avoid the extra allocation, one should store a
|
||||||
pointer to the slice instead.
|
pointer to the slice instead.
|
||||||
|
|
||||||
See the comments on https://go-review.googlesource.com/c/go/+/24371
|
See the comments on https://go-review.googlesource.com/c/go/+/24371
|
||||||
that discuss this problem.
|
that discuss this problem.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA6003": &lint.Documentation{
|
||||||
2017.1
|
Title: `Converting a string to a slice of runes before ranging over it`,
|
||||||
`
|
Text: `You may want to loop over the runes in a string. Instead of converting
|
||||||
|
|
||||||
var docSA6003 = `Converting a string to a slice of runes before ranging over it
|
|
||||||
|
|
||||||
You may want to loop over the runes in a string. Instead of converting
|
|
||||||
the string to a slice of runes and looping over that, you can loop
|
the string to a slice of runes and looping over that, you can loop
|
||||||
over the string itself. That is,
|
over the string itself. That is,
|
||||||
|
|
||||||
|
@ -639,15 +615,13 @@ and avoid unnecessary memory allocations.
|
||||||
Do note that if you are interested in the indices, ranging over a
|
Do note that if you are interested in the indices, ranging over a
|
||||||
string and over a slice of runes will yield different indices. The
|
string and over a slice of runes will yield different indices. The
|
||||||
first one yields byte offsets, while the second one yields indices in
|
first one yields byte offsets, while the second one yields indices in
|
||||||
the slice of runes.
|
the slice of runes.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA6005": &lint.Documentation{
|
||||||
2017.1
|
Title: `Inefficient string comparison with strings.ToLower or strings.ToUpper`,
|
||||||
`
|
Text: `Converting two strings to the same case and comparing them like so
|
||||||
|
|
||||||
var docSA6005 = `Inefficient string comparison with strings.ToLower or strings.ToUpper
|
|
||||||
|
|
||||||
Converting two strings to the same case and comparing them like so
|
|
||||||
|
|
||||||
if strings.ToLower(s1) == strings.ToLower(s2) {
|
if strings.ToLower(s1) == strings.ToLower(s2) {
|
||||||
...
|
...
|
||||||
|
@ -665,33 +639,29 @@ strings and can return as soon as the first non-matching character has
|
||||||
been found.
|
been found.
|
||||||
|
|
||||||
For a more in-depth explanation of this issue, see
|
For a more in-depth explanation of this issue, see
|
||||||
https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/
|
https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA9001": &lint.Documentation{
|
||||||
Unreleased
|
Title: `Defers in range loops may not run when you expect them to`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA9001 = `Defers in 'for range' loops may not run when you expect them to
|
"SA9002": &lint.Documentation{
|
||||||
|
Title: `Using a non-octal os.FileMode that looks like it was meant to be in octal.`,
|
||||||
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA9003": &lint.Documentation{
|
||||||
2017.1
|
Title: `Empty body in an if or else branch`,
|
||||||
`
|
Since: "2017.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docSA9002 = `Using a non-octal os.FileMode that looks like it was meant to be in octal.
|
"SA9004": &lint.Documentation{
|
||||||
|
Title: `Only the first constant has an explicit type`,
|
||||||
|
|
||||||
Available since
|
Text: `In a constant declaration such as the following:
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA9003 = `Empty body in an if or else branch
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2017.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docSA9004 = `Only the first constant has an explicit type
|
|
||||||
|
|
||||||
In a constant declaration such as the following:
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
First byte = 1
|
First byte = 1
|
||||||
|
@ -727,7 +697,7 @@ Wrong type in variable declarations
|
||||||
The most obvious issue with such incorrect enumerations expresses
|
The most obvious issue with such incorrect enumerations expresses
|
||||||
itself as a compile error:
|
itself as a compile error:
|
||||||
|
|
||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
const (
|
const (
|
||||||
EnumFirst uint8 = 1
|
EnumFirst uint8 = 1
|
||||||
|
@ -776,22 +746,19 @@ This code will output
|
||||||
an enum
|
an enum
|
||||||
2
|
2
|
||||||
|
|
||||||
as EnumSecond has no explicit type, and thus defaults to int.
|
as EnumSecond has no explicit type, and thus defaults to int.`,
|
||||||
|
Since: "2019.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"SA9005": &lint.Documentation{
|
||||||
2019.1
|
Title: `Trying to marshal a struct with no public fields nor custom marshaling`,
|
||||||
`
|
Text: `The encoding/json and encoding/xml packages only operate on exported
|
||||||
|
|
||||||
var docSA9005 = `Trying to marshal a struct with no public fields nor custom marshaling
|
|
||||||
|
|
||||||
The encoding/json and encoding/xml packages only operate on exported
|
|
||||||
fields in structs, not unexported ones. It is usually an error to try
|
fields in structs, not unexported ones. It is usually an error to try
|
||||||
to (un)marshal structs that only consist of unexported fields.
|
to (un)marshal structs that only consist of unexported fields.
|
||||||
|
|
||||||
This check will not flag calls involving types that define custom
|
This check will not flag calls involving types that define custom
|
||||||
marshaling behavior, e.g. via MarshalJSON methods. It will also not
|
marshaling behavior, e.g. via MarshalJSON methods. It will also not
|
||||||
flag empty structs.
|
flag empty structs.`,
|
||||||
|
Since: "2019.2",
|
||||||
Available since
|
},
|
||||||
Unreleased
|
}
|
||||||
`
|
|
||||||
|
|
25
vendor/honnef.co/go/tools/staticcheck/knowledge.go
vendored
Normal file
25
vendor/honnef.co/go/tools/staticcheck/knowledge.go
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
package staticcheck
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"honnef.co/go/tools/internal/passes/buildssa"
|
||||||
|
"honnef.co/go/tools/ssa"
|
||||||
|
"honnef.co/go/tools/staticcheck/vrp"
|
||||||
|
)
|
||||||
|
|
||||||
|
var valueRangesAnalyzer = &analysis.Analyzer{
|
||||||
|
Name: "vrp",
|
||||||
|
Doc: "calculate value ranges of functions",
|
||||||
|
Run: func(pass *analysis.Pass) (interface{}, error) {
|
||||||
|
m := map[*ssa.Function]vrp.Ranges{}
|
||||||
|
for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
|
vr := vrp.BuildGraph(ssafn).Solve()
|
||||||
|
m[ssafn] = vr
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
},
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
ResultType: reflect.TypeOf(map[*ssa.Function]vrp.Ranges{}),
|
||||||
|
}
|
1277
vendor/honnef.co/go/tools/staticcheck/lint.go
vendored
1277
vendor/honnef.co/go/tools/staticcheck/lint.go
vendored
File diff suppressed because it is too large
Load diff
19
vendor/honnef.co/go/tools/staticcheck/rules.go
vendored
19
vendor/honnef.co/go/tools/staticcheck/rules.go
vendored
|
@ -13,7 +13,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"honnef.co/go/tools/lint"
|
"golang.org/x/tools/go/analysis"
|
||||||
. "honnef.co/go/tools/lint/lintdsl"
|
. "honnef.co/go/tools/lint/lintdsl"
|
||||||
"honnef.co/go/tools/ssa"
|
"honnef.co/go/tools/ssa"
|
||||||
"honnef.co/go/tools/staticcheck/vrp"
|
"honnef.co/go/tools/staticcheck/vrp"
|
||||||
|
@ -26,12 +26,11 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Call struct {
|
type Call struct {
|
||||||
Job *lint.Job
|
Pass *analysis.Pass
|
||||||
Instr ssa.CallInstruction
|
Instr ssa.CallInstruction
|
||||||
Args []*Argument
|
Args []*Argument
|
||||||
|
|
||||||
Checker *Checker
|
Parent *ssa.Function
|
||||||
Parent *ssa.Function
|
|
||||||
|
|
||||||
invalids []string
|
invalids []string
|
||||||
}
|
}
|
||||||
|
@ -184,7 +183,7 @@ func ConvertedFromInt(v Value) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func validEncodingBinaryType(j *lint.Job, typ types.Type) bool {
|
func validEncodingBinaryType(pass *analysis.Pass, typ types.Type) bool {
|
||||||
typ = typ.Underlying()
|
typ = typ.Underlying()
|
||||||
switch typ := typ.(type) {
|
switch typ := typ.(type) {
|
||||||
case *types.Basic:
|
case *types.Basic:
|
||||||
|
@ -194,19 +193,19 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool {
|
||||||
types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid:
|
types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid:
|
||||||
return true
|
return true
|
||||||
case types.Bool:
|
case types.Bool:
|
||||||
return IsGoVersion(j, 8)
|
return IsGoVersion(pass, 8)
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
case *types.Struct:
|
case *types.Struct:
|
||||||
n := typ.NumFields()
|
n := typ.NumFields()
|
||||||
for i := 0; i < n; i++ {
|
for i := 0; i < n; i++ {
|
||||||
if !validEncodingBinaryType(j, typ.Field(i).Type()) {
|
if !validEncodingBinaryType(pass, typ.Field(i).Type()) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
case *types.Array:
|
case *types.Array:
|
||||||
return validEncodingBinaryType(j, typ.Elem())
|
return validEncodingBinaryType(pass, typ.Elem())
|
||||||
case *types.Interface:
|
case *types.Interface:
|
||||||
// we can't determine if it's a valid type or not
|
// we can't determine if it's a valid type or not
|
||||||
return true
|
return true
|
||||||
|
@ -214,7 +213,7 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func CanBinaryMarshal(j *lint.Job, v Value) bool {
|
func CanBinaryMarshal(pass *analysis.Pass, v Value) bool {
|
||||||
typ := v.Value.Type().Underlying()
|
typ := v.Value.Type().Underlying()
|
||||||
if ttyp, ok := typ.(*types.Pointer); ok {
|
if ttyp, ok := typ.(*types.Pointer); ok {
|
||||||
typ = ttyp.Elem().Underlying()
|
typ = ttyp.Elem().Underlying()
|
||||||
|
@ -227,7 +226,7 @@ func CanBinaryMarshal(j *lint.Job, v Value) bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return validEncodingBinaryType(j, typ)
|
return validEncodingBinaryType(pass, typ)
|
||||||
}
|
}
|
||||||
|
|
||||||
func RepeatZeroTimes(name string, arg int) CallCheck {
|
func RepeatZeroTimes(name string, arg int) CallCheck {
|
||||||
|
|
24
vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go
vendored
24
vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go
vendored
|
@ -722,16 +722,22 @@ func (g *Graph) widen(c Constraint, consts []Z) bool {
|
||||||
}
|
}
|
||||||
nlc := NInfinity
|
nlc := NInfinity
|
||||||
nuc := PInfinity
|
nuc := PInfinity
|
||||||
for _, co := range consts {
|
|
||||||
if co.Cmp(ni.Lower) <= 0 {
|
// Don't get stuck widening for an absurd amount of time due
|
||||||
nlc = co
|
// to an excess number of constants, as may be present in
|
||||||
break
|
// table-based scanners.
|
||||||
|
if len(consts) < 1000 {
|
||||||
|
for _, co := range consts {
|
||||||
|
if co.Cmp(ni.Lower) <= 0 {
|
||||||
|
nlc = co
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
for _, co := range consts {
|
||||||
for _, co := range consts {
|
if co.Cmp(ni.Upper) >= 0 {
|
||||||
if co.Cmp(ni.Upper) >= 0 {
|
nuc = co
|
||||||
nuc = co
|
break
|
||||||
break
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
111
vendor/honnef.co/go/tools/stylecheck/analysis.go
vendored
Normal file
111
vendor/honnef.co/go/tools/stylecheck/analysis.go
vendored
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
package stylecheck
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||||
|
"honnef.co/go/tools/config"
|
||||||
|
"honnef.co/go/tools/facts"
|
||||||
|
"honnef.co/go/tools/internal/passes/buildssa"
|
||||||
|
"honnef.co/go/tools/lint/lintutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newFlagSet() flag.FlagSet {
|
||||||
|
fs := flag.NewFlagSet("", flag.PanicOnError)
|
||||||
|
fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
|
||||||
|
return *fs
|
||||||
|
}
|
||||||
|
|
||||||
|
var Analyzers = map[string]*analysis.Analyzer{
|
||||||
|
"ST1000": {
|
||||||
|
Name: "ST1000",
|
||||||
|
Run: CheckPackageComment,
|
||||||
|
Doc: Docs["ST1000"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1001": {
|
||||||
|
Name: "ST1001",
|
||||||
|
Run: CheckDotImports,
|
||||||
|
Doc: Docs["ST1001"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1003": {
|
||||||
|
Name: "ST1003",
|
||||||
|
Run: CheckNames,
|
||||||
|
Doc: Docs["ST1003"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1005": {
|
||||||
|
Name: "ST1005",
|
||||||
|
Run: CheckErrorStrings,
|
||||||
|
Doc: Docs["ST1005"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1006": {
|
||||||
|
Name: "ST1006",
|
||||||
|
Run: CheckReceiverNames,
|
||||||
|
Doc: Docs["ST1006"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1008": {
|
||||||
|
Name: "ST1008",
|
||||||
|
Run: CheckErrorReturn,
|
||||||
|
Doc: Docs["ST1008"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1011": {
|
||||||
|
Name: "ST1011",
|
||||||
|
Run: CheckTimeNames,
|
||||||
|
Doc: Docs["ST1011"].String(),
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1012": {
|
||||||
|
Name: "ST1012",
|
||||||
|
Run: CheckErrorVarNames,
|
||||||
|
Doc: Docs["ST1012"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{config.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1013": {
|
||||||
|
Name: "ST1013",
|
||||||
|
Run: CheckHTTPStatusCodes,
|
||||||
|
Doc: Docs["ST1013"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{facts.Generated, facts.TokenFile, config.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1015": {
|
||||||
|
Name: "ST1015",
|
||||||
|
Run: CheckDefaultCaseOrder,
|
||||||
|
Doc: Docs["ST1015"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1016": {
|
||||||
|
Name: "ST1016",
|
||||||
|
Run: CheckReceiverNamesIdentical,
|
||||||
|
Doc: Docs["ST1016"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1017": {
|
||||||
|
Name: "ST1017",
|
||||||
|
Run: CheckYodaConditions,
|
||||||
|
Doc: Docs["ST1017"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
"ST1018": {
|
||||||
|
Name: "ST1018",
|
||||||
|
Run: CheckInvisibleCharacters,
|
||||||
|
Doc: Docs["ST1018"].String(),
|
||||||
|
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||||
|
Flags: newFlagSet(),
|
||||||
|
},
|
||||||
|
}
|
188
vendor/honnef.co/go/tools/stylecheck/doc.go
vendored
188
vendor/honnef.co/go/tools/stylecheck/doc.go
vendored
|
@ -1,18 +1,20 @@
|
||||||
package stylecheck
|
package stylecheck
|
||||||
|
|
||||||
var docST1000 = `Incorrect or missing package comment
|
import "honnef.co/go/tools/lint"
|
||||||
|
|
||||||
Packages must have a package comment that is formatted according to
|
var Docs = map[string]*lint.Documentation{
|
||||||
|
"ST1000": &lint.Documentation{
|
||||||
|
Title: `Incorrect or missing package comment`,
|
||||||
|
Text: `Packages must have a package comment that is formatted according to
|
||||||
the guidelines laid out in
|
the guidelines laid out in
|
||||||
https://github.com/golang/go/wiki/CodeReviewComments#package-comments.
|
https://github.com/golang/go/wiki/CodeReviewComments#package-comments.`,
|
||||||
|
Since: "2019.1",
|
||||||
|
NonDefault: true,
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1001": &lint.Documentation{
|
||||||
2019.1, non-default
|
Title: `Dot imports are discouraged`,
|
||||||
`
|
Text: `Dot imports that aren't in external test packages are discouraged.
|
||||||
|
|
||||||
var docST1001 = `Dot imports are discouraged
|
|
||||||
|
|
||||||
Dot imports that aren't in external test packages are discouraged.
|
|
||||||
|
|
||||||
The dot_import_whitelist option can be used to whitelist certain
|
The dot_import_whitelist option can be used to whitelist certain
|
||||||
imports.
|
imports.
|
||||||
|
@ -35,36 +37,29 @@ Quoting Go Code Review Comments:
|
||||||
it is not. Except for this one case, do not use import . in your
|
it is not. Except for this one case, do not use import . in your
|
||||||
programs. It makes the programs much harder to read because it is
|
programs. It makes the programs much harder to read because it is
|
||||||
unclear whether a name like Quux is a top-level identifier in the
|
unclear whether a name like Quux is a top-level identifier in the
|
||||||
current package or in an imported package.
|
current package or in an imported package.`,
|
||||||
|
Since: "2019.1",
|
||||||
|
Options: []string{"dot_import_whitelist"},
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1003": &lint.Documentation{
|
||||||
2019.1
|
Title: `Poorly chosen identifier`,
|
||||||
|
Text: `Identifiers, such as variable and package names, follow certain rules.
|
||||||
Options
|
|
||||||
dot_import_whitelist
|
|
||||||
`
|
|
||||||
|
|
||||||
var docST1003 = `Poorly chosen identifier
|
|
||||||
|
|
||||||
Identifiers, such as variable and package names, follow certain rules.
|
|
||||||
|
|
||||||
See the following links for details:
|
See the following links for details:
|
||||||
|
|
||||||
http://golang.org/doc/effective_go.html#package-names
|
- https://golang.org/doc/effective_go.html#package-names
|
||||||
http://golang.org/doc/effective_go.html#mixed-caps
|
- https://golang.org/doc/effective_go.html#mixed-caps
|
||||||
https://github.com/golang/go/wiki/CodeReviewComments#initialisms
|
- https://github.com/golang/go/wiki/CodeReviewComments#initialisms
|
||||||
https://github.com/golang/go/wiki/CodeReviewComments#variable-names
|
- https://github.com/golang/go/wiki/CodeReviewComments#variable-names`,
|
||||||
|
Since: "2019.1",
|
||||||
|
NonDefault: true,
|
||||||
|
Options: []string{"initialisms"},
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1005": &lint.Documentation{
|
||||||
2019.1, non-default
|
Title: `Incorrectly formatted error string`,
|
||||||
|
Text: `Error strings follow a set of guidelines to ensure uniformity and good
|
||||||
Options
|
|
||||||
initialisms
|
|
||||||
`
|
|
||||||
|
|
||||||
var docST1005 = `Incorrectly formatted error string
|
|
||||||
|
|
||||||
Error strings follow a set of guidelines to ensure uniformity and good
|
|
||||||
composability.
|
composability.
|
||||||
|
|
||||||
Quoting Go Code Review Comments:
|
Quoting Go Code Review Comments:
|
||||||
|
@ -74,15 +69,13 @@ Quoting Go Code Review Comments:
|
||||||
usually printed following other context. That is, use
|
usually printed following other context. That is, use
|
||||||
fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so
|
fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so
|
||||||
that log.Printf("Reading %s: %v", filename, err) formats without a
|
that log.Printf("Reading %s: %v", filename, err) formats without a
|
||||||
spurious capital letter mid-message.
|
spurious capital letter mid-message.`,
|
||||||
|
Since: "2019.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1006": &lint.Documentation{
|
||||||
2019.1
|
Title: `Poorly chosen receiver name`,
|
||||||
`
|
Text: `Quoting Go Code Review Comments:
|
||||||
|
|
||||||
var docST1006 = `Poorly chosen receiver name
|
|
||||||
|
|
||||||
Quoting Go Code Review Comments:
|
|
||||||
|
|
||||||
The name of a method's receiver should be a reflection of its
|
The name of a method's receiver should be a reflection of its
|
||||||
identity; often a one or two letter abbreviation of its type
|
identity; often a one or two letter abbreviation of its type
|
||||||
|
@ -94,77 +87,68 @@ Quoting Go Code Review Comments:
|
||||||
documentary purpose. It can be very short as it will appear on
|
documentary purpose. It can be very short as it will appear on
|
||||||
almost every line of every method of the type; familiarity admits
|
almost every line of every method of the type; familiarity admits
|
||||||
brevity. Be consistent, too: if you call the receiver "c" in one
|
brevity. Be consistent, too: if you call the receiver "c" in one
|
||||||
method, don't call it "cl" in another.
|
method, don't call it "cl" in another.`,
|
||||||
|
Since: "2019.1",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1008": &lint.Documentation{
|
||||||
2019.1
|
Title: `A function's error value should be its last return value`,
|
||||||
`
|
Text: `A function's error value should be its last return value.`,
|
||||||
|
Since: `2019.1`,
|
||||||
|
},
|
||||||
|
|
||||||
var docST1008 = `A function's error value should be its last return value
|
"ST1011": &lint.Documentation{
|
||||||
|
Title: `Poorly chosen name for variable of type time.Duration`,
|
||||||
A function's error value should be its last return value.
|
Text: `time.Duration values represent an amount of time, which is represented
|
||||||
|
|
||||||
Available since
|
|
||||||
2019.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docST1011 = `Poorly chosen name for variable of type time.Duration
|
|
||||||
|
|
||||||
time.Duration values represent an amount of time, which is represented
|
|
||||||
as a count of nanoseconds. An expression like 5 * time.Microsecond
|
as a count of nanoseconds. An expression like 5 * time.Microsecond
|
||||||
yields the value 5000. It is therefore not appropriate to suffix a
|
yields the value 5000. It is therefore not appropriate to suffix a
|
||||||
variable of type time.Duration with any time unit, such as Msec or
|
variable of type time.Duration with any time unit, such as Msec or
|
||||||
Milli.
|
Milli.`,
|
||||||
|
Since: `2019.1`,
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1012": &lint.Documentation{
|
||||||
2019.1
|
Title: `Poorly chosen name for error variable`,
|
||||||
`
|
Text: `Error variables that are part of an API should be called errFoo or
|
||||||
|
ErrFoo.`,
|
||||||
|
Since: "2019.1",
|
||||||
|
},
|
||||||
|
|
||||||
var docST1012 = `Poorly chosen name for error variable
|
"ST1013": &lint.Documentation{
|
||||||
|
Title: `Should use constants for HTTP error codes, not magic numbers`,
|
||||||
Error variables that are part of an API should be called errFoo or
|
Text: `HTTP has a tremendous number of status codes. While some of those are
|
||||||
ErrFoo.
|
|
||||||
|
|
||||||
Available since
|
|
||||||
2019.1
|
|
||||||
`
|
|
||||||
|
|
||||||
var docST1013 = `Should use constants for HTTP error codes, not magic numbers
|
|
||||||
|
|
||||||
HTTP has a tremendous number of status codes. While some of those are
|
|
||||||
well known (200, 400, 404, 500), most of them are not. The net/http
|
well known (200, 400, 404, 500), most of them are not. The net/http
|
||||||
package provides constants for all status codes that are part of the
|
package provides constants for all status codes that are part of the
|
||||||
various specifications. It is recommended to use these constants
|
various specifications. It is recommended to use these constants
|
||||||
instead of hard-coding magic numbers, to vastly improve the
|
instead of hard-coding magic numbers, to vastly improve the
|
||||||
readability of your code.
|
readability of your code.`,
|
||||||
|
Since: "2019.1",
|
||||||
|
Options: []string{"http_status_code_whitelist"},
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1015": &lint.Documentation{
|
||||||
2019.1
|
Title: `A switch's default case should be the first or last case`,
|
||||||
|
Since: "2019.1",
|
||||||
|
},
|
||||||
|
|
||||||
Options
|
"ST1016": &lint.Documentation{
|
||||||
http_status_code_whitelist
|
Title: `Use consistent method receiver names`,
|
||||||
`
|
Since: "2019.1",
|
||||||
|
NonDefault: true,
|
||||||
|
},
|
||||||
|
|
||||||
var docST1015 = `A switch's default case should be the first or last case
|
"ST1017": &lint.Documentation{
|
||||||
|
Title: `Don't use Yoda conditions`,
|
||||||
|
Text: `Yoda conditions are conditions of the kind 'if 42 == x', where the
|
||||||
|
literal is on the left side of the comparison. These are a common
|
||||||
|
idiom in languages in which assignment is an expression, to avoid bugs
|
||||||
|
of the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of
|
||||||
|
bug, we prefer the more idiomatic 'if x == 42'.`,
|
||||||
|
Since: "2019.2",
|
||||||
|
},
|
||||||
|
|
||||||
Available since
|
"ST1018": &lint.Documentation{
|
||||||
2019.1
|
Title: `Avoid zero-width and control characters in string literals`,
|
||||||
`
|
Since: "2019.2",
|
||||||
|
},
|
||||||
var docST1016 = `Use consistent method receiver names
|
}
|
||||||
|
|
||||||
Available since
|
|
||||||
2019.1, non-default
|
|
||||||
`
|
|
||||||
|
|
||||||
var docST1017 = `Don't use Yoda conditions
|
|
||||||
|
|
||||||
Available since
|
|
||||||
Unreleased
|
|
||||||
`
|
|
||||||
|
|
||||||
var docST1018 = `Avoid zero-width and control characters in string literals
|
|
||||||
|
|
||||||
Available since
|
|
||||||
Unreleased
|
|
||||||
`
|
|
||||||
|
|
203
vendor/honnef.co/go/tools/stylecheck/lint.go
vendored
203
vendor/honnef.co/go/tools/stylecheck/lint.go
vendored
|
@ -11,49 +11,18 @@ import (
|
||||||
"unicode"
|
"unicode"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"honnef.co/go/tools/lint"
|
"honnef.co/go/tools/config"
|
||||||
|
"honnef.co/go/tools/internal/passes/buildssa"
|
||||||
. "honnef.co/go/tools/lint/lintdsl"
|
. "honnef.co/go/tools/lint/lintdsl"
|
||||||
"honnef.co/go/tools/ssa"
|
"honnef.co/go/tools/ssa"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||||
|
"golang.org/x/tools/go/ast/inspector"
|
||||||
"golang.org/x/tools/go/types/typeutil"
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Checker struct {
|
func CheckPackageComment(pass *analysis.Pass) (interface{}, error) {
|
||||||
CheckGenerated bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewChecker() *Checker {
|
|
||||||
return &Checker{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*Checker) Name() string { return "stylecheck" }
|
|
||||||
func (*Checker) Prefix() string { return "ST" }
|
|
||||||
func (c *Checker) Init(prog *lint.Program) {}
|
|
||||||
|
|
||||||
func (c *Checker) Checks() []lint.Check {
|
|
||||||
return []lint.Check{
|
|
||||||
{ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment, Doc: docST1000},
|
|
||||||
{ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports, Doc: docST1001},
|
|
||||||
// {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports, Doc: docST1002},
|
|
||||||
{ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames, Doc: docST1003},
|
|
||||||
// {ID: "ST1004", FilterGenerated: false, Fn: nil, , Doc: docST1004},
|
|
||||||
{ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings, Doc: docST1005},
|
|
||||||
{ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames, Doc: docST1006},
|
|
||||||
// {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec, Doc: docST1007},
|
|
||||||
{ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn, Doc: docST1008},
|
|
||||||
// {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn, Doc: docST1009},
|
|
||||||
// {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg, Doc: docST1010},
|
|
||||||
{ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames, Doc: docST1011},
|
|
||||||
{ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames, Doc: docST1012},
|
|
||||||
{ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes, Doc: docST1013},
|
|
||||||
{ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder, Doc: docST1015},
|
|
||||||
{ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical, Doc: docST1016},
|
|
||||||
{ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions, Doc: docST1017},
|
|
||||||
{ID: "ST1018", FilterGenerated: false, Fn: c.CheckInvisibleCharacters, Doc: docST1018},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Checker) CheckPackageComment(j *lint.Job) {
|
|
||||||
// - At least one file in a non-main package should have a package comment
|
// - At least one file in a non-main package should have a package comment
|
||||||
//
|
//
|
||||||
// - The comment should be of the form
|
// - The comment should be of the form
|
||||||
|
@ -62,57 +31,59 @@ func (c *Checker) CheckPackageComment(j *lint.Job) {
|
||||||
// which case they get appended. But that doesn't happen a lot in
|
// which case they get appended. But that doesn't happen a lot in
|
||||||
// the real world.
|
// the real world.
|
||||||
|
|
||||||
if j.Pkg.Name == "main" {
|
if pass.Pkg.Name() == "main" {
|
||||||
return
|
return nil, nil
|
||||||
}
|
}
|
||||||
hasDocs := false
|
hasDocs := false
|
||||||
for _, f := range j.Pkg.Syntax {
|
for _, f := range pass.Files {
|
||||||
if IsInTest(j, f) {
|
if IsInTest(pass, f) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if f.Doc != nil && len(f.Doc.List) > 0 {
|
if f.Doc != nil && len(f.Doc.List) > 0 {
|
||||||
hasDocs = true
|
hasDocs = true
|
||||||
prefix := "Package " + f.Name.Name + " "
|
prefix := "Package " + f.Name.Name + " "
|
||||||
if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) {
|
if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) {
|
||||||
j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix)
|
ReportNodef(pass, f.Doc, `package comment should be of the form "%s..."`, prefix)
|
||||||
}
|
}
|
||||||
f.Doc.Text()
|
f.Doc.Text()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !hasDocs {
|
if !hasDocs {
|
||||||
for _, f := range j.Pkg.Syntax {
|
for _, f := range pass.Files {
|
||||||
if IsInTest(j, f) {
|
if IsInTest(pass, f) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
j.Errorf(f, "at least one file in a package should have a package comment")
|
ReportNodef(pass, f, "at least one file in a package should have a package comment")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckDotImports(j *lint.Job) {
|
func CheckDotImports(pass *analysis.Pass) (interface{}, error) {
|
||||||
for _, f := range j.Pkg.Syntax {
|
for _, f := range pass.Files {
|
||||||
imports:
|
imports:
|
||||||
for _, imp := range f.Imports {
|
for _, imp := range f.Imports {
|
||||||
path := imp.Path.Value
|
path := imp.Path.Value
|
||||||
path = path[1 : len(path)-1]
|
path = path[1 : len(path)-1]
|
||||||
for _, w := range j.Pkg.Config.DotImportWhitelist {
|
for _, w := range config.For(pass).DotImportWhitelist {
|
||||||
if w == path {
|
if w == path {
|
||||||
continue imports
|
continue imports
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) {
|
if imp.Name != nil && imp.Name.Name == "." && !IsInTest(pass, f) {
|
||||||
j.Errorf(imp, "should not use dot imports")
|
ReportNodefFG(pass, imp, "should not use dot imports")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckBlankImports(j *lint.Job) {
|
func CheckBlankImports(pass *analysis.Pass) (interface{}, error) {
|
||||||
fset := j.Pkg.Fset
|
fset := pass.Fset
|
||||||
for _, f := range j.Pkg.Syntax {
|
for _, f := range pass.Files {
|
||||||
if IsInMain(j, f) || IsInTest(j, f) {
|
if IsInMain(pass, f) || IsInTest(pass, f) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,13 +132,14 @@ func (c *Checker) CheckBlankImports(j *lint.Job) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if imp.Doc == nil && imp.Comment == nil && !skip[imp] {
|
if imp.Doc == nil && imp.Comment == nil && !skip[imp] {
|
||||||
j.Errorf(imp, "a blank import should be only in a main or test package, or have a comment justifying it")
|
ReportNodef(pass, imp, "a blank import should be only in a main or test package, or have a comment justifying it")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckIncDec(j *lint.Job) {
|
func CheckIncDec(pass *analysis.Pass) (interface{}, error) {
|
||||||
// TODO(dh): this can be noisy for function bodies that look like this:
|
// TODO(dh): this can be noisy for function bodies that look like this:
|
||||||
// x += 3
|
// x += 3
|
||||||
// ...
|
// ...
|
||||||
|
@ -192,14 +164,15 @@ func (c *Checker) CheckIncDec(j *lint.Job) {
|
||||||
suffix = "--"
|
suffix = "--"
|
||||||
}
|
}
|
||||||
|
|
||||||
j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix)
|
ReportNodef(pass, assign, "should replace %s with %s%s", Render(pass, assign), Render(pass, assign.Lhs[0]), suffix)
|
||||||
}
|
}
|
||||||
j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn)
|
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn)
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckErrorReturn(j *lint.Job) {
|
func CheckErrorReturn(pass *analysis.Pass) (interface{}, error) {
|
||||||
fnLoop:
|
fnLoop:
|
||||||
for _, fn := range j.Pkg.InitialFunctions {
|
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
sig := fn.Type().(*types.Signature)
|
sig := fn.Type().(*types.Signature)
|
||||||
rets := sig.Results()
|
rets := sig.Results()
|
||||||
if rets == nil || rets.Len() < 2 {
|
if rets == nil || rets.Len() < 2 {
|
||||||
|
@ -213,21 +186,22 @@ fnLoop:
|
||||||
}
|
}
|
||||||
for i := rets.Len() - 2; i >= 0; i-- {
|
for i := rets.Len() - 2; i >= 0; i-- {
|
||||||
if rets.At(i).Type() == types.Universe.Lookup("error").Type() {
|
if rets.At(i).Type() == types.Universe.Lookup("error").Type() {
|
||||||
j.Errorf(rets.At(i), "error should be returned as the last argument")
|
pass.Reportf(rets.At(i).Pos(), "error should be returned as the last argument")
|
||||||
continue fnLoop
|
continue fnLoop
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// CheckUnexportedReturn checks that exported functions on exported
|
// CheckUnexportedReturn checks that exported functions on exported
|
||||||
// types do not return unexported types.
|
// types do not return unexported types.
|
||||||
func (c *Checker) CheckUnexportedReturn(j *lint.Job) {
|
func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) {
|
||||||
for _, fn := range j.Pkg.InitialFunctions {
|
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
if fn.Synthetic != "" || fn.Parent() != nil {
|
if fn.Synthetic != "" || fn.Parent() != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if !ast.IsExported(fn.Name()) || IsInMain(j, fn) || IsInTest(j, fn) {
|
if !ast.IsExported(fn.Name()) || IsInMain(pass, fn) || IsInTest(pass, fn) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
sig := fn.Type().(*types.Signature)
|
sig := fn.Type().(*types.Signature)
|
||||||
|
@ -239,14 +213,16 @@ func (c *Checker) CheckUnexportedReturn(j *lint.Job) {
|
||||||
if named, ok := DereferenceR(res.At(i).Type()).(*types.Named); ok &&
|
if named, ok := DereferenceR(res.At(i).Type()).(*types.Named); ok &&
|
||||||
!ast.IsExported(named.Obj().Name()) &&
|
!ast.IsExported(named.Obj().Name()) &&
|
||||||
named != types.Universe.Lookup("error").Type() {
|
named != types.Universe.Lookup("error").Type() {
|
||||||
j.Errorf(fn, "should not return unexported type")
|
pass.Reportf(fn.Pos(), "should not return unexported type")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckReceiverNames(j *lint.Job) {
|
func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) {
|
||||||
for _, m := range j.Pkg.SSA.Members {
|
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
|
||||||
|
for _, m := range ssapkg.Members {
|
||||||
if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() {
|
if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() {
|
||||||
ms := typeutil.IntuitiveMethodSet(T.Type(), nil)
|
ms := typeutil.IntuitiveMethodSet(T.Type(), nil)
|
||||||
for _, sel := range ms {
|
for _, sel := range ms {
|
||||||
|
@ -257,18 +233,20 @@ func (c *Checker) CheckReceiverNames(j *lint.Job) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if recv.Name() == "self" || recv.Name() == "this" {
|
if recv.Name() == "self" || recv.Name() == "this" {
|
||||||
j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`)
|
ReportfFG(pass, recv.Pos(), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`)
|
||||||
}
|
}
|
||||||
if recv.Name() == "_" {
|
if recv.Name() == "_" {
|
||||||
j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused")
|
ReportfFG(pass, recv.Pos(), "receiver name should not be an underscore, omit the name if it is unused")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) {
|
func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) {
|
||||||
for _, m := range j.Pkg.SSA.Members {
|
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
|
||||||
|
for _, m := range ssapkg.Members {
|
||||||
names := map[string]int{}
|
names := map[string]int{}
|
||||||
|
|
||||||
var firstFn *types.Func
|
var firstFn *types.Func
|
||||||
|
@ -296,16 +274,17 @@ func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) {
|
||||||
seen = append(seen, fmt.Sprintf("%dx %q", count, name))
|
seen = append(seen, fmt.Sprintf("%dx %q", count, name))
|
||||||
}
|
}
|
||||||
|
|
||||||
j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", "))
|
pass.Reportf(firstFn.Pos(), "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", "))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckContextFirstArg(j *lint.Job) {
|
func CheckContextFirstArg(pass *analysis.Pass) (interface{}, error) {
|
||||||
// TODO(dh): this check doesn't apply to test helpers. Example from the stdlib:
|
// TODO(dh): this check doesn't apply to test helpers. Example from the stdlib:
|
||||||
// func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) {
|
// func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) {
|
||||||
fnLoop:
|
fnLoop:
|
||||||
for _, fn := range j.Pkg.InitialFunctions {
|
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
if fn.Synthetic != "" || fn.Parent() != nil {
|
if fn.Synthetic != "" || fn.Parent() != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -319,28 +298,29 @@ fnLoop:
|
||||||
for i := 1; i < params.Len(); i++ {
|
for i := 1; i < params.Len(); i++ {
|
||||||
param := params.At(i)
|
param := params.At(i)
|
||||||
if types.TypeString(param.Type(), nil) == "context.Context" {
|
if types.TypeString(param.Type(), nil) == "context.Context" {
|
||||||
j.Errorf(param, "context.Context should be the first argument of a function")
|
pass.Reportf(param.Pos(), "context.Context should be the first argument of a function")
|
||||||
continue fnLoop
|
continue fnLoop
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckErrorStrings(j *lint.Job) {
|
func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
|
||||||
objNames := map[*ssa.Package]map[string]bool{}
|
objNames := map[*ssa.Package]map[string]bool{}
|
||||||
ssapkg := j.Pkg.SSA
|
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
|
||||||
objNames[ssapkg] = map[string]bool{}
|
objNames[ssapkg] = map[string]bool{}
|
||||||
for _, m := range ssapkg.Members {
|
for _, m := range ssapkg.Members {
|
||||||
if typ, ok := m.(*ssa.Type); ok {
|
if typ, ok := m.(*ssa.Type); ok {
|
||||||
objNames[ssapkg][typ.Name()] = true
|
objNames[ssapkg][typ.Name()] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, fn := range j.Pkg.InitialFunctions {
|
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
objNames[fn.Package()][fn.Name()] = true
|
objNames[fn.Package()][fn.Name()] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, fn := range j.Pkg.InitialFunctions {
|
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
|
||||||
if IsInTest(j, fn) {
|
if IsInTest(pass, fn) {
|
||||||
// We don't care about malformed error messages in tests;
|
// We don't care about malformed error messages in tests;
|
||||||
// they're usually for direct human consumption, not part
|
// they're usually for direct human consumption, not part
|
||||||
// of an API
|
// of an API
|
||||||
|
@ -368,7 +348,7 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) {
|
||||||
}
|
}
|
||||||
switch s[len(s)-1] {
|
switch s[len(s)-1] {
|
||||||
case '.', ':', '!', '\n':
|
case '.', ':', '!', '\n':
|
||||||
j.Errorf(call, "error strings should not end with punctuation or a newline")
|
pass.Reportf(call.Pos(), "error strings should not end with punctuation or a newline")
|
||||||
}
|
}
|
||||||
idx := strings.IndexByte(s, ' ')
|
idx := strings.IndexByte(s, ' ')
|
||||||
if idx == -1 {
|
if idx == -1 {
|
||||||
|
@ -402,13 +382,14 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) {
|
||||||
//
|
//
|
||||||
// It could still be a proper noun, though.
|
// It could still be a proper noun, though.
|
||||||
|
|
||||||
j.Errorf(call, "error strings should not be capitalized")
|
pass.Reportf(call.Pos(), "error strings should not be capitalized")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckTimeNames(j *lint.Job) {
|
func CheckTimeNames(pass *analysis.Pass) (interface{}, error) {
|
||||||
suffixes := []string{
|
suffixes := []string{
|
||||||
"Sec", "Secs", "Seconds",
|
"Sec", "Secs", "Seconds",
|
||||||
"Msec", "Msecs",
|
"Msec", "Msecs",
|
||||||
|
@ -423,31 +404,32 @@ func (c *Checker) CheckTimeNames(j *lint.Job) {
|
||||||
for _, name := range names {
|
for _, name := range names {
|
||||||
for _, suffix := range suffixes {
|
for _, suffix := range suffixes {
|
||||||
if strings.HasSuffix(name.Name, suffix) {
|
if strings.HasSuffix(name.Name, suffix) {
|
||||||
j.Errorf(name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix)
|
ReportNodef(pass, name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, f := range j.Pkg.Syntax {
|
for _, f := range pass.Files {
|
||||||
ast.Inspect(f, func(node ast.Node) bool {
|
ast.Inspect(f, func(node ast.Node) bool {
|
||||||
switch node := node.(type) {
|
switch node := node.(type) {
|
||||||
case *ast.ValueSpec:
|
case *ast.ValueSpec:
|
||||||
T := j.Pkg.TypesInfo.TypeOf(node.Type)
|
T := pass.TypesInfo.TypeOf(node.Type)
|
||||||
fn(T, node.Names)
|
fn(T, node.Names)
|
||||||
case *ast.FieldList:
|
case *ast.FieldList:
|
||||||
for _, field := range node.List {
|
for _, field := range node.List {
|
||||||
T := j.Pkg.TypesInfo.TypeOf(field.Type)
|
T := pass.TypesInfo.TypeOf(field.Type)
|
||||||
fn(T, field.Names)
|
fn(T, field.Names)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckErrorVarNames(j *lint.Job) {
|
func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) {
|
||||||
for _, f := range j.Pkg.Syntax {
|
for _, f := range pass.Files {
|
||||||
for _, decl := range f.Decls {
|
for _, decl := range f.Decls {
|
||||||
gen, ok := decl.(*ast.GenDecl)
|
gen, ok := decl.(*ast.GenDecl)
|
||||||
if !ok || gen.Tok != token.VAR {
|
if !ok || gen.Tok != token.VAR {
|
||||||
|
@ -461,7 +443,7 @@ func (c *Checker) CheckErrorVarNames(j *lint.Job) {
|
||||||
|
|
||||||
for i, name := range spec.Names {
|
for i, name := range spec.Names {
|
||||||
val := spec.Values[i]
|
val := spec.Values[i]
|
||||||
if !IsCallToAST(j, val, "errors.New") && !IsCallToAST(j, val, "fmt.Errorf") {
|
if !IsCallToAST(pass, val, "errors.New") && !IsCallToAST(pass, val, "fmt.Errorf") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -470,12 +452,13 @@ func (c *Checker) CheckErrorVarNames(j *lint.Job) {
|
||||||
prefix = "Err"
|
prefix = "Err"
|
||||||
}
|
}
|
||||||
if !strings.HasPrefix(name.Name, prefix) {
|
if !strings.HasPrefix(name.Name, prefix) {
|
||||||
j.Errorf(name, "error var %s should have name of the form %sFoo", name.Name, prefix)
|
ReportNodef(pass, name, "error var %s should have name of the form %sFoo", name.Name, prefix)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var httpStatusCodes = map[int]string{
|
var httpStatusCodes = map[int]string{
|
||||||
|
@ -540,19 +523,22 @@ var httpStatusCodes = map[int]string{
|
||||||
511: "StatusNetworkAuthenticationRequired",
|
511: "StatusNetworkAuthenticationRequired",
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) {
|
func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) {
|
||||||
whitelist := map[string]bool{}
|
whitelist := map[string]bool{}
|
||||||
for _, code := range j.Pkg.Config.HTTPStatusCodeWhitelist {
|
for _, code := range config.For(pass).HTTPStatusCodeWhitelist {
|
||||||
whitelist[code] = true
|
whitelist[code] = true
|
||||||
}
|
}
|
||||||
fn := func(node ast.Node) bool {
|
fn := func(node ast.Node) bool {
|
||||||
|
if node == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
call, ok := node.(*ast.CallExpr)
|
call, ok := node.(*ast.CallExpr)
|
||||||
if !ok {
|
if !ok {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
var arg int
|
var arg int
|
||||||
switch CallNameAST(j, call) {
|
switch CallNameAST(pass, call) {
|
||||||
case "net/http.Error":
|
case "net/http.Error":
|
||||||
arg = 2
|
arg = 2
|
||||||
case "net/http.Redirect":
|
case "net/http.Redirect":
|
||||||
|
@ -580,29 +566,32 @@ func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) {
|
||||||
if !ok {
|
if !ok {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n)
|
ReportNodefFG(pass, lit, "should use constant http.%s instead of numeric literal %d", s, n)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
for _, f := range j.Pkg.Syntax {
|
// OPT(dh): replace with inspector
|
||||||
|
for _, f := range pass.Files {
|
||||||
ast.Inspect(f, fn)
|
ast.Inspect(f, fn)
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) {
|
func CheckDefaultCaseOrder(pass *analysis.Pass) (interface{}, error) {
|
||||||
fn := func(node ast.Node) {
|
fn := func(node ast.Node) {
|
||||||
stmt := node.(*ast.SwitchStmt)
|
stmt := node.(*ast.SwitchStmt)
|
||||||
list := stmt.Body.List
|
list := stmt.Body.List
|
||||||
for i, c := range list {
|
for i, c := range list {
|
||||||
if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 {
|
if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 {
|
||||||
j.Errorf(c, "default case should be first or last in switch statement")
|
ReportNodefFG(pass, c, "default case should be first or last in switch statement")
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn)
|
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn)
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckYodaConditions(j *lint.Job) {
|
func CheckYodaConditions(pass *analysis.Pass) (interface{}, error) {
|
||||||
fn := func(node ast.Node) {
|
fn := func(node ast.Node) {
|
||||||
cond := node.(*ast.BinaryExpr)
|
cond := node.(*ast.BinaryExpr)
|
||||||
if cond.Op != token.EQL && cond.Op != token.NEQ {
|
if cond.Op != token.EQL && cond.Op != token.NEQ {
|
||||||
|
@ -615,12 +604,13 @@ func (c *Checker) CheckYodaConditions(j *lint.Job) {
|
||||||
// Don't flag lit == lit conditions, just in case
|
// Don't flag lit == lit conditions, just in case
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
j.Errorf(cond, "don't use Yoda conditions")
|
ReportNodefFG(pass, cond, "don't use Yoda conditions")
|
||||||
}
|
}
|
||||||
j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
|
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckInvisibleCharacters(j *lint.Job) {
|
func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) {
|
||||||
fn := func(node ast.Node) {
|
fn := func(node ast.Node) {
|
||||||
lit := node.(*ast.BasicLit)
|
lit := node.(*ast.BasicLit)
|
||||||
if lit.Kind != token.STRING {
|
if lit.Kind != token.STRING {
|
||||||
|
@ -628,11 +618,12 @@ func (c *Checker) CheckInvisibleCharacters(j *lint.Job) {
|
||||||
}
|
}
|
||||||
for _, r := range lit.Value {
|
for _, r := range lit.Value {
|
||||||
if unicode.Is(unicode.Cf, r) {
|
if unicode.Is(unicode.Cf, r) {
|
||||||
j.Errorf(lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r)
|
ReportNodef(pass, lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r)
|
||||||
} else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' {
|
} else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' {
|
||||||
j.Errorf(lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r)
|
ReportNodef(pass, lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn)
|
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn)
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
25
vendor/honnef.co/go/tools/stylecheck/names.go
vendored
25
vendor/honnef.co/go/tools/stylecheck/names.go
vendored
|
@ -9,7 +9,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
"honnef.co/go/tools/lint"
|
"golang.org/x/tools/go/analysis"
|
||||||
|
"honnef.co/go/tools/config"
|
||||||
. "honnef.co/go/tools/lint/lintdsl"
|
. "honnef.co/go/tools/lint/lintdsl"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -21,7 +22,7 @@ var knownNameExceptions = map[string]bool{
|
||||||
"kWh": true,
|
"kWh": true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Checker) CheckNames(j *lint.Job) {
|
func CheckNames(pass *analysis.Pass) (interface{}, error) {
|
||||||
// A large part of this function is copied from
|
// A large part of this function is copied from
|
||||||
// github.com/golang/lint, Copyright (c) 2013 The Go Authors,
|
// github.com/golang/lint, Copyright (c) 2013 The Go Authors,
|
||||||
// licensed under the BSD 3-clause license.
|
// licensed under the BSD 3-clause license.
|
||||||
|
@ -45,7 +46,7 @@ func (c *Checker) CheckNames(j *lint.Job) {
|
||||||
|
|
||||||
// Handle two common styles from other languages that don't belong in Go.
|
// Handle two common styles from other languages that don't belong in Go.
|
||||||
if len(id.Name) >= 5 && allCaps(id.Name) && strings.Contains(id.Name, "_") {
|
if len(id.Name) >= 5 && allCaps(id.Name) && strings.Contains(id.Name, "_") {
|
||||||
j.Errorf(id, "should not use ALL_CAPS in Go names; use CamelCase instead")
|
ReportfFG(pass, id.Pos(), "should not use ALL_CAPS in Go names; use CamelCase instead")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,10 +56,10 @@ func (c *Checker) CheckNames(j *lint.Job) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(id.Name) > 2 && strings.Contains(id.Name[1:len(id.Name)-1], "_") {
|
if len(id.Name) > 2 && strings.Contains(id.Name[1:len(id.Name)-1], "_") {
|
||||||
j.Errorf(id, "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should)
|
ReportfFG(pass, id.Pos(), "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
j.Errorf(id, "%s %s should be %s", thing, id.Name, should)
|
ReportfFG(pass, id.Pos(), "%s %s should be %s", thing, id.Name, should)
|
||||||
}
|
}
|
||||||
checkList := func(fl *ast.FieldList, thing string, initialisms map[string]bool) {
|
checkList := func(fl *ast.FieldList, thing string, initialisms map[string]bool) {
|
||||||
if fl == nil {
|
if fl == nil {
|
||||||
|
@ -71,17 +72,18 @@ func (c *Checker) CheckNames(j *lint.Job) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
initialisms := make(map[string]bool, len(j.Pkg.Config.Initialisms))
|
il := config.For(pass).Initialisms
|
||||||
for _, word := range j.Pkg.Config.Initialisms {
|
initialisms := make(map[string]bool, len(il))
|
||||||
|
for _, word := range il {
|
||||||
initialisms[word] = true
|
initialisms[word] = true
|
||||||
}
|
}
|
||||||
for _, f := range j.Pkg.Syntax {
|
for _, f := range pass.Files {
|
||||||
// Package names need slightly different handling than other names.
|
// Package names need slightly different handling than other names.
|
||||||
if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") {
|
if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") {
|
||||||
j.Errorf(f, "should not use underscores in package names")
|
ReportfFG(pass, f.Pos(), "should not use underscores in package names")
|
||||||
}
|
}
|
||||||
if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 {
|
if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 {
|
||||||
j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name))
|
ReportfFG(pass, f.Pos(), "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name))
|
||||||
}
|
}
|
||||||
|
|
||||||
ast.Inspect(f, func(node ast.Node) bool {
|
ast.Inspect(f, func(node ast.Node) bool {
|
||||||
|
@ -104,7 +106,7 @@ func (c *Checker) CheckNames(j *lint.Job) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
|
if IsInTest(pass, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,6 +175,7 @@ func (c *Checker) CheckNames(j *lint.Job) {
|
||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// lintName returns a different name if it should be different.
|
// lintName returns a different name if it should be different.
|
||||||
|
|
54
vendor/honnef.co/go/tools/unused/edge.go
vendored
Normal file
54
vendor/honnef.co/go/tools/unused/edge.go
vendored
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
package unused
|
||||||
|
|
||||||
|
//go:generate stringer -type edgeKind
|
||||||
|
type edgeKind uint64
|
||||||
|
|
||||||
|
func (e edgeKind) is(o edgeKind) bool {
|
||||||
|
return e&o != 0
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
edgeAlias edgeKind = 1 << iota
|
||||||
|
edgeBlankField
|
||||||
|
edgeAnonymousStruct
|
||||||
|
edgeCgoExported
|
||||||
|
edgeConstGroup
|
||||||
|
edgeElementType
|
||||||
|
edgeEmbeddedInterface
|
||||||
|
edgeExportedConstant
|
||||||
|
edgeExportedField
|
||||||
|
edgeExportedFunction
|
||||||
|
edgeExportedMethod
|
||||||
|
edgeExportedType
|
||||||
|
edgeExportedVariable
|
||||||
|
edgeExtendsExportedFields
|
||||||
|
edgeExtendsExportedMethodSet
|
||||||
|
edgeFieldAccess
|
||||||
|
edgeFunctionArgument
|
||||||
|
edgeFunctionResult
|
||||||
|
edgeFunctionSignature
|
||||||
|
edgeImplements
|
||||||
|
edgeInstructionOperand
|
||||||
|
edgeInterfaceCall
|
||||||
|
edgeInterfaceMethod
|
||||||
|
edgeKeyType
|
||||||
|
edgeLinkname
|
||||||
|
edgeMainFunction
|
||||||
|
edgeNamedType
|
||||||
|
edgeNetRPCRegister
|
||||||
|
edgeNoCopySentinel
|
||||||
|
edgeProvidesMethod
|
||||||
|
edgeReceiver
|
||||||
|
edgeRuntimeFunction
|
||||||
|
edgeSignature
|
||||||
|
edgeStructConversion
|
||||||
|
edgeTestSink
|
||||||
|
edgeTupleElement
|
||||||
|
edgeType
|
||||||
|
edgeTypeName
|
||||||
|
edgeUnderlyingType
|
||||||
|
edgePointerType
|
||||||
|
edgeUnsafeConversion
|
||||||
|
edgeUsedConstant
|
||||||
|
edgeVarDecl
|
||||||
|
)
|
109
vendor/honnef.co/go/tools/unused/edgekind_string.go
vendored
Normal file
109
vendor/honnef.co/go/tools/unused/edgekind_string.go
vendored
Normal file
|
@ -0,0 +1,109 @@
|
||||||
|
// Code generated by "stringer -type edgeKind"; DO NOT EDIT.
|
||||||
|
|
||||||
|
package unused
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
func _() {
|
||||||
|
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||||
|
// Re-run the stringer command to generate them again.
|
||||||
|
var x [1]struct{}
|
||||||
|
_ = x[edgeAlias-1]
|
||||||
|
_ = x[edgeBlankField-2]
|
||||||
|
_ = x[edgeAnonymousStruct-4]
|
||||||
|
_ = x[edgeCgoExported-8]
|
||||||
|
_ = x[edgeConstGroup-16]
|
||||||
|
_ = x[edgeElementType-32]
|
||||||
|
_ = x[edgeEmbeddedInterface-64]
|
||||||
|
_ = x[edgeExportedConstant-128]
|
||||||
|
_ = x[edgeExportedField-256]
|
||||||
|
_ = x[edgeExportedFunction-512]
|
||||||
|
_ = x[edgeExportedMethod-1024]
|
||||||
|
_ = x[edgeExportedType-2048]
|
||||||
|
_ = x[edgeExportedVariable-4096]
|
||||||
|
_ = x[edgeExtendsExportedFields-8192]
|
||||||
|
_ = x[edgeExtendsExportedMethodSet-16384]
|
||||||
|
_ = x[edgeFieldAccess-32768]
|
||||||
|
_ = x[edgeFunctionArgument-65536]
|
||||||
|
_ = x[edgeFunctionResult-131072]
|
||||||
|
_ = x[edgeFunctionSignature-262144]
|
||||||
|
_ = x[edgeImplements-524288]
|
||||||
|
_ = x[edgeInstructionOperand-1048576]
|
||||||
|
_ = x[edgeInterfaceCall-2097152]
|
||||||
|
_ = x[edgeInterfaceMethod-4194304]
|
||||||
|
_ = x[edgeKeyType-8388608]
|
||||||
|
_ = x[edgeLinkname-16777216]
|
||||||
|
_ = x[edgeMainFunction-33554432]
|
||||||
|
_ = x[edgeNamedType-67108864]
|
||||||
|
_ = x[edgeNetRPCRegister-134217728]
|
||||||
|
_ = x[edgeNoCopySentinel-268435456]
|
||||||
|
_ = x[edgeProvidesMethod-536870912]
|
||||||
|
_ = x[edgeReceiver-1073741824]
|
||||||
|
_ = x[edgeRuntimeFunction-2147483648]
|
||||||
|
_ = x[edgeSignature-4294967296]
|
||||||
|
_ = x[edgeStructConversion-8589934592]
|
||||||
|
_ = x[edgeTestSink-17179869184]
|
||||||
|
_ = x[edgeTupleElement-34359738368]
|
||||||
|
_ = x[edgeType-68719476736]
|
||||||
|
_ = x[edgeTypeName-137438953472]
|
||||||
|
_ = x[edgeUnderlyingType-274877906944]
|
||||||
|
_ = x[edgePointerType-549755813888]
|
||||||
|
_ = x[edgeUnsafeConversion-1099511627776]
|
||||||
|
_ = x[edgeUsedConstant-2199023255552]
|
||||||
|
_ = x[edgeVarDecl-4398046511104]
|
||||||
|
}
|
||||||
|
|
||||||
|
const _edgeKind_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl"
|
||||||
|
|
||||||
|
var _edgeKind_map = map[edgeKind]string{
|
||||||
|
1: _edgeKind_name[0:9],
|
||||||
|
2: _edgeKind_name[9:23],
|
||||||
|
4: _edgeKind_name[23:42],
|
||||||
|
8: _edgeKind_name[42:57],
|
||||||
|
16: _edgeKind_name[57:71],
|
||||||
|
32: _edgeKind_name[71:86],
|
||||||
|
64: _edgeKind_name[86:107],
|
||||||
|
128: _edgeKind_name[107:127],
|
||||||
|
256: _edgeKind_name[127:144],
|
||||||
|
512: _edgeKind_name[144:164],
|
||||||
|
1024: _edgeKind_name[164:182],
|
||||||
|
2048: _edgeKind_name[182:198],
|
||||||
|
4096: _edgeKind_name[198:218],
|
||||||
|
8192: _edgeKind_name[218:243],
|
||||||
|
16384: _edgeKind_name[243:271],
|
||||||
|
32768: _edgeKind_name[271:286],
|
||||||
|
65536: _edgeKind_name[286:306],
|
||||||
|
131072: _edgeKind_name[306:324],
|
||||||
|
262144: _edgeKind_name[324:345],
|
||||||
|
524288: _edgeKind_name[345:359],
|
||||||
|
1048576: _edgeKind_name[359:381],
|
||||||
|
2097152: _edgeKind_name[381:398],
|
||||||
|
4194304: _edgeKind_name[398:417],
|
||||||
|
8388608: _edgeKind_name[417:428],
|
||||||
|
16777216: _edgeKind_name[428:440],
|
||||||
|
33554432: _edgeKind_name[440:456],
|
||||||
|
67108864: _edgeKind_name[456:469],
|
||||||
|
134217728: _edgeKind_name[469:487],
|
||||||
|
268435456: _edgeKind_name[487:505],
|
||||||
|
536870912: _edgeKind_name[505:523],
|
||||||
|
1073741824: _edgeKind_name[523:535],
|
||||||
|
2147483648: _edgeKind_name[535:554],
|
||||||
|
4294967296: _edgeKind_name[554:567],
|
||||||
|
8589934592: _edgeKind_name[567:587],
|
||||||
|
17179869184: _edgeKind_name[587:599],
|
||||||
|
34359738368: _edgeKind_name[599:615],
|
||||||
|
68719476736: _edgeKind_name[615:623],
|
||||||
|
137438953472: _edgeKind_name[623:635],
|
||||||
|
274877906944: _edgeKind_name[635:653],
|
||||||
|
549755813888: _edgeKind_name[653:668],
|
||||||
|
1099511627776: _edgeKind_name[668:688],
|
||||||
|
2199023255552: _edgeKind_name[688:704],
|
||||||
|
4398046511104: _edgeKind_name[704:715],
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i edgeKind) String() string {
|
||||||
|
if str, ok := _edgeKind_map[i]; ok {
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
return "edgeKind(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||||
|
}
|
1189
vendor/honnef.co/go/tools/unused/unused.go
vendored
1189
vendor/honnef.co/go/tools/unused/unused.go
vendored
File diff suppressed because it is too large
Load diff
46
vendor/honnef.co/go/tools/version/buildinfo.go
vendored
Normal file
46
vendor/honnef.co/go/tools/version/buildinfo.go
vendored
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
// +build go1.12
|
||||||
|
|
||||||
|
package version
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"runtime/debug"
|
||||||
|
)
|
||||||
|
|
||||||
|
func printBuildInfo() {
|
||||||
|
if info, ok := debug.ReadBuildInfo(); ok {
|
||||||
|
fmt.Println("Main module:")
|
||||||
|
printModule(&info.Main)
|
||||||
|
fmt.Println("Dependencies:")
|
||||||
|
for _, dep := range info.Deps {
|
||||||
|
printModule(dep)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Println("Built without Go modules")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildInfoVersion() (string, bool) {
|
||||||
|
info, ok := debug.ReadBuildInfo()
|
||||||
|
if !ok {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
if info.Main.Version == "(devel)" {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
return info.Main.Version, true
|
||||||
|
}
|
||||||
|
|
||||||
|
func printModule(m *debug.Module) {
|
||||||
|
fmt.Printf("\t%s", m.Path)
|
||||||
|
if m.Version != "(devel)" {
|
||||||
|
fmt.Printf("@%s", m.Version)
|
||||||
|
}
|
||||||
|
if m.Sum != "" {
|
||||||
|
fmt.Printf(" (sum: %s)", m.Sum)
|
||||||
|
}
|
||||||
|
if m.Replace != nil {
|
||||||
|
fmt.Printf(" (replace: %s)", m.Replace.Path)
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
6
vendor/honnef.co/go/tools/version/buildinfo111.go
vendored
Normal file
6
vendor/honnef.co/go/tools/version/buildinfo111.go
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
// +build !go1.12
|
||||||
|
|
||||||
|
package version
|
||||||
|
|
||||||
|
func printBuildInfo() {}
|
||||||
|
func buildInfoVersion() (string, bool) { return "", false }
|
31
vendor/honnef.co/go/tools/version/version.go
vendored
31
vendor/honnef.co/go/tools/version/version.go
vendored
|
@ -4,14 +4,39 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Version = "devel"
|
const Version = "2019.2.3"
|
||||||
|
|
||||||
|
// version returns a version descriptor and reports whether the
|
||||||
|
// version is a known release.
|
||||||
|
func version() (string, bool) {
|
||||||
|
if Version != "devel" {
|
||||||
|
return Version, true
|
||||||
|
}
|
||||||
|
v, ok := buildInfoVersion()
|
||||||
|
if ok {
|
||||||
|
return v, false
|
||||||
|
}
|
||||||
|
return "devel", false
|
||||||
|
}
|
||||||
|
|
||||||
func Print() {
|
func Print() {
|
||||||
if Version == "devel" {
|
v, release := version()
|
||||||
|
|
||||||
|
if release {
|
||||||
|
fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), v)
|
||||||
|
} else if v == "devel" {
|
||||||
fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0]))
|
fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0]))
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), Version)
|
fmt.Printf("%s (devel, %s)\n", filepath.Base(os.Args[0]), v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Verbose() {
|
||||||
|
Print()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println("Compiled with Go version:", runtime.Version())
|
||||||
|
printBuildInfo()
|
||||||
|
}
|
||||||
|
|
16
vendor/modules.txt
vendored
16
vendor/modules.txt
vendored
|
@ -199,7 +199,7 @@ github.com/spf13/cobra
|
||||||
github.com/spf13/jwalterweatherman
|
github.com/spf13/jwalterweatherman
|
||||||
# github.com/spf13/pflag v1.0.5
|
# github.com/spf13/pflag v1.0.5
|
||||||
github.com/spf13/pflag
|
github.com/spf13/pflag
|
||||||
# github.com/spf13/viper v1.6.3
|
# github.com/spf13/viper v1.7.0
|
||||||
github.com/spf13/viper
|
github.com/spf13/viper
|
||||||
# github.com/stretchr/testify v1.5.1
|
# github.com/stretchr/testify v1.5.1
|
||||||
github.com/stretchr/testify/assert
|
github.com/stretchr/testify/assert
|
||||||
|
@ -260,6 +260,8 @@ golang.org/x/text/unicode/bidi
|
||||||
golang.org/x/text/unicode/norm
|
golang.org/x/text/unicode/norm
|
||||||
golang.org/x/text/width
|
golang.org/x/text/width
|
||||||
# golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef
|
# golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef
|
||||||
|
golang.org/x/tools/go/analysis
|
||||||
|
golang.org/x/tools/go/analysis/passes/inspect
|
||||||
golang.org/x/tools/go/ast/astutil
|
golang.org/x/tools/go/ast/astutil
|
||||||
golang.org/x/tools/go/ast/inspector
|
golang.org/x/tools/go/ast/inspector
|
||||||
golang.org/x/tools/go/buildutil
|
golang.org/x/tools/go/buildutil
|
||||||
|
@ -269,8 +271,10 @@ golang.org/x/tools/go/internal/gcimporter
|
||||||
golang.org/x/tools/go/internal/packagesdriver
|
golang.org/x/tools/go/internal/packagesdriver
|
||||||
golang.org/x/tools/go/loader
|
golang.org/x/tools/go/loader
|
||||||
golang.org/x/tools/go/packages
|
golang.org/x/tools/go/packages
|
||||||
|
golang.org/x/tools/go/types/objectpath
|
||||||
golang.org/x/tools/go/types/typeutil
|
golang.org/x/tools/go/types/typeutil
|
||||||
golang.org/x/tools/imports
|
golang.org/x/tools/imports
|
||||||
|
golang.org/x/tools/internal/analysisinternal
|
||||||
golang.org/x/tools/internal/fastwalk
|
golang.org/x/tools/internal/fastwalk
|
||||||
golang.org/x/tools/internal/gocommand
|
golang.org/x/tools/internal/gocommand
|
||||||
golang.org/x/tools/internal/gopathwalk
|
golang.org/x/tools/internal/gopathwalk
|
||||||
|
@ -321,24 +325,26 @@ gopkg.in/gomail.v2
|
||||||
gopkg.in/ini.v1
|
gopkg.in/ini.v1
|
||||||
# gopkg.in/yaml.v2 v2.2.8
|
# gopkg.in/yaml.v2 v2.2.8
|
||||||
gopkg.in/yaml.v2
|
gopkg.in/yaml.v2
|
||||||
# honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a
|
# honnef.co/go/tools v0.0.1-2019.2.3
|
||||||
honnef.co/go/tools/arg
|
honnef.co/go/tools/arg
|
||||||
honnef.co/go/tools/callgraph
|
|
||||||
honnef.co/go/tools/callgraph/static
|
|
||||||
honnef.co/go/tools/cmd/staticcheck
|
honnef.co/go/tools/cmd/staticcheck
|
||||||
honnef.co/go/tools/config
|
honnef.co/go/tools/config
|
||||||
honnef.co/go/tools/deprecated
|
honnef.co/go/tools/deprecated
|
||||||
|
honnef.co/go/tools/facts
|
||||||
honnef.co/go/tools/functions
|
honnef.co/go/tools/functions
|
||||||
honnef.co/go/tools/go/types/typeutil
|
honnef.co/go/tools/go/types/typeutil
|
||||||
|
honnef.co/go/tools/internal/cache
|
||||||
|
honnef.co/go/tools/internal/passes/buildssa
|
||||||
|
honnef.co/go/tools/internal/renameio
|
||||||
honnef.co/go/tools/internal/sharedcheck
|
honnef.co/go/tools/internal/sharedcheck
|
||||||
honnef.co/go/tools/lint
|
honnef.co/go/tools/lint
|
||||||
honnef.co/go/tools/lint/lintdsl
|
honnef.co/go/tools/lint/lintdsl
|
||||||
honnef.co/go/tools/lint/lintutil
|
honnef.co/go/tools/lint/lintutil
|
||||||
honnef.co/go/tools/lint/lintutil/format
|
honnef.co/go/tools/lint/lintutil/format
|
||||||
|
honnef.co/go/tools/loader
|
||||||
honnef.co/go/tools/printf
|
honnef.co/go/tools/printf
|
||||||
honnef.co/go/tools/simple
|
honnef.co/go/tools/simple
|
||||||
honnef.co/go/tools/ssa
|
honnef.co/go/tools/ssa
|
||||||
honnef.co/go/tools/ssa/ssautil
|
|
||||||
honnef.co/go/tools/ssautil
|
honnef.co/go/tools/ssautil
|
||||||
honnef.co/go/tools/staticcheck
|
honnef.co/go/tools/staticcheck
|
||||||
honnef.co/go/tools/staticcheck/vrp
|
honnef.co/go/tools/staticcheck/vrp
|
||||||
|
|
Loading…
Reference in a new issue